Blender V4.3
BLI_bit_vector.hh
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
5#pragma once
6
40#include "BLI_allocator.hh"
42#include "BLI_bit_span.hh"
43#include "BLI_span.hh"
44
45namespace blender::bits {
46
47template<
51 int64_t InlineBufferCapacity = 64,
56 typename Allocator = GuardedAllocator>
57class BitVector {
58 private:
59 static constexpr int64_t required_ints_for_bits(const int64_t number_of_bits)
60 {
61 return (number_of_bits + BitsPerInt - 1) / BitsPerInt;
62 }
63
64 static constexpr int64_t IntsInInlineBuffer = required_ints_for_bits(InlineBufferCapacity);
65 static constexpr int64_t BitsInInlineBuffer = IntsInInlineBuffer * BitsPerInt;
66 static constexpr int64_t AllocationAlignment = alignof(BitInt);
67
72 BitInt *data_;
73
75 int64_t size_in_bits_;
76
78 int64_t capacity_in_bits_;
79
81 BLI_NO_UNIQUE_ADDRESS Allocator allocator_;
82
85
86 public:
87 BitVector(Allocator allocator = {}) noexcept : allocator_(allocator)
88 {
89 data_ = inline_buffer_;
90 size_in_bits_ = 0;
91 capacity_in_bits_ = BitsInInlineBuffer;
92 uninitialized_fill_n(data_, IntsInInlineBuffer, BitInt(0));
93 }
94
95 BitVector(NoExceptConstructor, Allocator allocator = {}) noexcept : BitVector(allocator) {}
96
98 {
99 const int64_t ints_to_copy = required_ints_for_bits(span.size());
100 if (span.size() <= BitsInInlineBuffer) {
101 /* The data is copied into the owned inline buffer. */
102 data_ = inline_buffer_;
103 capacity_in_bits_ = BitsInInlineBuffer;
104 }
105 else {
106 /* Allocate a new array because the inline buffer is too small. */
107 data_ = static_cast<BitInt *>(
108 allocator_.allocate(ints_to_copy * sizeof(BitInt), AllocationAlignment, __func__));
109 capacity_in_bits_ = ints_to_copy * BitsPerInt;
110 }
111 size_in_bits_ = span.size();
112 uninitialized_copy_n(span.data(), ints_to_copy, data_);
113 }
114
116 {
117 allocator_ = other.allocator_;
118 }
119
120 BitVector(BitVector &&other) noexcept : BitVector(NoExceptConstructor(), other.allocator_)
121 {
122 if (other.is_inline()) {
123 /* Copy the data into the inline buffer. */
124 /* For small inline buffers, always copy all the bits because checking how many bits to copy
125 * would add additional overhead. */
126 int64_t ints_to_copy = IntsInInlineBuffer;
127 if constexpr (IntsInInlineBuffer > 8) {
128 /* Avoid copying too much unnecessary data in case the inline buffer is large. */
129 ints_to_copy = other.used_ints_amount();
130 }
131 data_ = inline_buffer_;
132 uninitialized_copy_n(other.data_, ints_to_copy, data_);
133 }
134 else {
135 /* Steal the pointer. */
136 data_ = other.data_;
137 }
138 size_in_bits_ = other.size_in_bits_;
139 capacity_in_bits_ = other.capacity_in_bits_;
140
141 /* Clear the other vector because it has been moved from. */
142 other.data_ = other.inline_buffer_;
143 other.size_in_bits_ = 0;
144 other.capacity_in_bits_ = BitsInInlineBuffer;
145 }
146
150 BitVector(const int64_t size_in_bits, const bool value = false, Allocator allocator = {})
151 : BitVector(NoExceptConstructor(), allocator)
152 {
153 this->resize(size_in_bits, value);
154 }
155
159 explicit BitVector(const Span<bool> values, Allocator allocator = {})
160 : BitVector(NoExceptConstructor(), allocator)
161 {
162 this->resize(values.size(), false);
163 or_bools_into_bits(values, *this);
164 }
165
167 {
168 if (!this->is_inline()) {
169 allocator_.deallocate(data_);
170 }
171 }
172
174 {
175 return copy_assign_container(*this, other);
176 }
177
179 {
180 return move_assign_container(*this, std::move(other));
181 }
182
183 operator BoundedBitSpan() const
184 {
185 return {data_, IndexRange(size_in_bits_)};
186 }
187
189 {
190 return {data_, IndexRange(size_in_bits_)};
191 }
192
196 int64_t size() const
197 {
198 return size_in_bits_;
199 }
200
205 {
206 return capacity_in_bits_;
207 }
208
209 bool is_empty() const
210 {
211 return size_in_bits_ == 0;
212 }
213
215 {
216 return data_;
217 }
218
219 const BitInt *data() const
220 {
221 return data_;
222 }
223
227 [[nodiscard]] BitRef operator[](const int64_t index) const
228 {
229 BLI_assert(index >= 0);
230 BLI_assert(index < size_in_bits_);
231 return {data_, index};
232 }
233
237 [[nodiscard]] MutableBitRef operator[](const int64_t index)
238 {
239 BLI_assert(index >= 0);
240 BLI_assert(index < size_in_bits_);
241 return {data_, index};
242 }
243
245 {
246 return IndexRange(size_in_bits_);
247 }
248
252 void append(const bool value)
253 {
254 this->ensure_space_for_one();
255 MutableBitRef bit{data_, size_in_bits_};
256 bit.set(value);
257 size_in_bits_++;
258 }
259
261 {
262 return {data_, 0};
263 }
264
266 {
267 return {data_, size_in_bits_};
268 }
269
271 {
272 return {data_, 0};
273 }
274
276 {
277 return {data_, size_in_bits_};
278 }
279
284 void resize(const int64_t new_size_in_bits, const bool value = false)
285 {
286 BLI_assert(new_size_in_bits >= 0);
287 const int64_t old_size_in_bits = size_in_bits_;
288 if (new_size_in_bits > old_size_in_bits) {
289 this->reserve(new_size_in_bits);
290 }
291 size_in_bits_ = new_size_in_bits;
292 if (old_size_in_bits < new_size_in_bits) {
293 MutableBitSpan(data_, IndexRange::from_begin_end(old_size_in_bits, new_size_in_bits))
294 .set_all(value);
295 }
296 }
297
301 void fill(const bool value)
302 {
303 MutableBitSpan(data_, size_in_bits_).set_all(value);
304 }
305
310 void reserve(const int new_capacity_in_bits)
311 {
312 this->realloc_to_at_least(new_capacity_in_bits);
313 }
314
319 void clear()
320 {
321 size_in_bits_ = 0;
322 }
323
328 {
329 size_in_bits_ = 0;
330 capacity_in_bits_ = 0;
331 if (!this->is_inline()) {
332 allocator_.deallocate(data_);
333 }
334 data_ = inline_buffer_;
335 }
336
337 private:
338 void ensure_space_for_one()
339 {
340 if (UNLIKELY(size_in_bits_ >= capacity_in_bits_)) {
341 this->realloc_to_at_least(size_in_bits_ + 1);
342 }
343 }
344
345 BLI_NOINLINE void realloc_to_at_least(const int64_t min_capacity_in_bits,
346 const BitInt initial_value_for_new_ints = 0)
347 {
348 if (capacity_in_bits_ >= min_capacity_in_bits) {
349 return;
350 }
351
352 const int64_t min_capacity_in_ints = this->required_ints_for_bits(min_capacity_in_bits);
353
354 /* At least double the size of the previous allocation. */
355 const int64_t min_new_capacity_in_ints = 2 * this->required_ints_for_bits(capacity_in_bits_);
356
357 const int64_t new_capacity_in_ints = std::max(min_capacity_in_ints, min_new_capacity_in_ints);
358 const int64_t ints_to_copy = this->used_ints_amount();
359
360 BitInt *new_data = static_cast<BitInt *>(
361 allocator_.allocate(new_capacity_in_ints * sizeof(BitInt), AllocationAlignment, __func__));
362 uninitialized_copy_n(data_, ints_to_copy, new_data);
363 /* Always initialize new capacity even if it isn't used yet. That's necessary to avoid warnings
364 * caused by using uninitialized memory. This happens when e.g. setting a clearing a bit in an
365 * uninitialized int. */
367 new_data + ints_to_copy, new_capacity_in_ints - ints_to_copy, initial_value_for_new_ints);
368
369 if (!this->is_inline()) {
370 allocator_.deallocate(data_);
371 }
372
373 data_ = new_data;
374 capacity_in_bits_ = new_capacity_in_ints * BitsPerInt;
375 }
376
377 bool is_inline() const
378 {
379 return data_ == inline_buffer_;
380 }
381
382 int64_t used_ints_amount() const
383 {
384 return this->required_ints_for_bits(size_in_bits_);
385 }
386};
387
388template<int64_t InlineBufferCapacity, typename Allocator>
393
394template<int64_t InlineBufferCapacity, typename Allocator>
399
400} // namespace blender::bits
401
402namespace blender {
403using bits::BitVector;
404} // namespace blender
#define BLI_assert(a)
Definition BLI_assert.h:50
#define BLI_NOINLINE
#define UNLIKELY(x)
#define BLI_NO_UNIQUE_ADDRESS
static constexpr IndexRange from_begin_end(const int64_t begin, const int64_t end)
constexpr int64_t size() const
Definition BLI_span.hh:253
const BitInt * data() const
int64_t size() const
MutableBitIterator begin()
BitVector(const int64_t size_in_bits, const bool value=false, Allocator allocator={})
BitIterator end() const
void resize(const int64_t new_size_in_bits, const bool value=false)
MutableBitRef operator[](const int64_t index)
BitVector(const Span< bool > values, Allocator allocator={})
BitIterator begin() const
void reserve(const int new_capacity_in_bits)
void append(const bool value)
BitVector(const BoundedBitSpan span)
BitVector(Allocator allocator={}) noexcept
BitVector & operator=(BitVector &&other)
BitVector & operator=(const BitVector &other)
BitVector(BitVector &&other) noexcept
BitVector(NoExceptConstructor, Allocator allocator={}) noexcept
void fill(const bool value)
MutableBitIterator end()
IndexRange index_range() const
BitVector(const BitVector &other)
const BitInt * data() const
BitRef operator[](const int64_t index) const
uint64_t BitInt
bool or_bools_into_bits(Span< bool > bools, MutableBitSpan r_bits, int64_t allowed_overshoot=0)
static constexpr int64_t BitsPerInt
T to_best_bit_span(const T &data)
Container & copy_assign_container(Container &dst, const Container &src)
Container & move_assign_container(Container &dst, Container &&src) noexcept(std::is_nothrow_move_constructible_v< Container >)
void uninitialized_fill_n(T *dst, int64_t n, const T &value)
void uninitialized_copy_n(const T *src, int64_t n, T *dst)
__int64 int64_t
Definition stdint.h:89