-
Notifications
You must be signed in to change notification settings - Fork 17
/
bitfield.hpp
191 lines (156 loc) · 5.12 KB
/
bitfield.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
#ifndef BITFIELD_H
#define BITFIELD_H
// A bitfield based on raw data considerably larger than the largest elementary type
// with the ability to select sub-ranges of bits
// This might seem like a good use case for a STL bitfield, but the differences
// are large enough to make a specific implementation worth while.
#include<tr1/cstdint>
#include<iostream>
#include<cstdlib>
#include<cmath>
#include<cstring> // memset
#include<bitset>
#include "debug_levels.h"
extern int debug_level;
// C is the type used for chunking; I is the index type
template<typename C, typename I>
class bitfield {
public:
bitfield() : data(nullptr), n(0) {
bits_per_type=sizeof(*data)*8;
};
bitfield(void *data, I n) {
bits_per_type=sizeof(*data)*8;
set_raw_data(data, n);
};
bitfield(C *data, I n) {
bits_per_type=sizeof(*data)*8;
this->data = data;
this->n = n;
};
void set_raw_data(void *data, I n) {
this->data = reinterpret_cast<C*>(data);
this->n = n;
};
// Caller is responsible to provide enough storage space in res
// Select bit range [start, end]
// NOTE: Bit indices are, as usual, zero-based
void get_bit_range(I start, I end, C *res) {
#ifdef EXPENSIVE_SANITY_CHECKS
if (end > n || start > n || start >= end) {
// TODO: Throw an exception instead
std::cerr << "Internal error: Bit index bogous" << std::endl;
std::cerr << "(start=" << start << ", end=" << end
<< ", n=" << n << ")" << std::endl;
std::exit(-1);
}
#endif
memset(res, 0, ceil(((double)(end - start + 1))/8));
// Compute chain elements and bit within this element for the
// start and end positions
I start_chunk = start/bits_per_type;
I end_chunk = end/bits_per_type;
unsigned short start_bit = start % bits_per_type;
unsigned short end_bit;
I dest_chunk = 0;
C chunk;
C mask;
if (start_chunk == end_chunk)
end_bit = end % bits_per_type;
else
end_bit = bits_per_type - 1;
mask = compute_mask(start_bit, end_bit);
res[dest_chunk] = (data[start_chunk] & mask) >> start_bit;
if (start_chunk == end_chunk)
return;
I dest_start_bit = end_bit - start_bit + 1;
if (dest_start_bit == bits_per_type) {
dest_chunk++;
dest_start_bit = 0;
}
I dest_bits;
for (I curr_chunk = start_chunk+1; curr_chunk < end_chunk; curr_chunk++) {
// For the inner chunks, we can always select the full chunk from
// the input data, but need to split it across the output
// data field
chunk = data[curr_chunk];
// How many bits remain in the destination chunk
dest_bits = bits_per_type - dest_start_bit;
// Fill up the current destination chunk
mask = compute_mask(0, dest_bits - 1);
res[dest_chunk] |= ((chunk & mask) << dest_start_bit);
dest_chunk++;
// ... and fill the next destination chunk as far as
// possible unless the previous chunk was completely
// drained and there is nothing left for the new chunk
if (dest_bits != bits_per_type) {
mask = compute_mask(dest_bits, bits_per_type - 1);
res[dest_chunk] = (chunk & mask) >> dest_bits;
}
// Compute new starting position in the destination chunk
dest_start_bit = bits_per_type - dest_bits;
}
end_bit = end % bits_per_type;
dest_bits = bits_per_type - dest_start_bit;
mask = compute_mask(0, end_bit);
chunk = data[end_chunk] & mask;
#ifdef EXPENSIVE_SANITY_CHECKS
if (debug_level >= EXCESSIVE_INFO) {
std::cerr << "end_bit: " << end_bit << ", dest_bits: " << dest_bits
<< ", mask: " << std::bitset<32>(mask) << ", end_chunk: "
<< end_chunk << std::endl;
std::cerr << "data[end_chunk]: " << std::bitset<32>(data[end_chunk])
<< std::endl;
std::cerr << "masked data: " << std::bitset<32>(chunk)
<< std::endl;
std::cerr << "dest_start_bit: " << dest_start_bit << std::endl;
}
#endif
// Any excess bits that do not fit into the current result chunk
// are shifted out to the left here
res[dest_chunk] |= (chunk << dest_start_bit);
if (end_bit + 1 > dest_bits) {
// We need to split the final chunk contribution
// across two result chunks
dest_chunk++;
// Shift out the already consumed bits to the right,
// and include the remaining bits into the final result chunk
res[dest_chunk] = (chunk >> dest_bits);
}
};
inline bool get_bit(I i) {
#ifdef EXPENSIVE_SANITY_CHECKS
if (i > n) {
// TODO: Throw an exception instead
std::cerr << "Internal error: Bit index out of range" << std::endl;
std::cerr << "(total length: " << n << ", requested: "
<< i << ")" << std::endl;
std::exit(-1);
}
#endif
// Compute chain element the bit is contained in
I idx = i/bits_per_type;
C chunk = data[idx];
bool bit = chunk & (static_cast<I>(1) << (i % bits_per_type));
return bit;
};
private:
C *data;
uint64_t n; // Length of the field in bits
unsigned short bits_per_type;
inline C compute_mask(I start_bit, I end_bit) {
C mask = 0;
#ifdef EXPENSIVE_SANITY_CHECKS
assert(end_bit < bits_per_type);
assert(end_bit >= start_bit);
#endif
if (end_bit < bits_per_type-1) {
mask = (1 << (end_bit+1)) - 1;
} else {
mask = ~static_cast<C>(0);
}
mask &= ~((static_cast<C>(1) << start_bit) - static_cast<C>(1));
return mask;
}
};
#endif