12 const int start_offset)
14 int offset = start_offset;
15 int64_t offset_i64 = start_offset;
18 const int count = counts_to_offsets[i];
20 counts_to_offsets[i] = offset;
26 counts_to_offsets.
last() = offset;
42 const int count = counts_to_offsets[i];
44 counts_to_offsets[i] = offset;
47 counts_to_offsets.
last() = offset;
48 const bool has_overflow = offset >= std::numeric_limits<int>::max();
58 threading::parallel_for(offsets.index_range(), 1024, [&](const IndexRange range) {
59 for (const int64_t i : range) {
60 offsets[i] = size * i + start_offset;
71 [&](
const int64_t i) { sizes[i] = offsets[i].
size(); });
87 threading::memory_bandwidth_bound_task(
88 sizes.
size_in_bytes() + offsets.
data().size_in_bytes() + indices.size_in_bytes(), [&]() {
89 threading::parallel_for(indices.index_range(), 4096, [&](const IndexRange range) {
90 for (const int i : range) {
91 sizes[i] = offsets[indices[i]].size();
109 mask.foreach_segment_optimized([&](
const auto segment) {
110 if constexpr (std::is_same_v<std::decay_t<
decltype(segment)>,
IndexRange>) {
114 for (
const int64_t i : segment) {
124 const int start_offset,
127 if (selection.is_empty()) {
130 int offset = start_offset;
131 selection.foreach_index_optimized<
int>([&](
const int i,
const int pos) {
132 dst_offsets[
pos] = offset;
133 offset += src_offsets[i].
size();
135 dst_offsets.
last() = offset;
142 for (const int64_t i : range) {
143 r_map.slice(offsets[i]).fill(i);
150 BLI_assert(std::all_of(offsets.
begin(), offsets.
end(), [](
int value) { return value == 0; }));
151 array_utils::count_indices(indices, offsets);
152 offset_indices::accumulate_counts_to_offsets(offsets);
#define BLI_assert_msg(a, msg)
#define UNUSED_VARS_NDEBUG(...)
constexpr IndexRange drop_back(int64_t n) const
constexpr int64_t size_in_bytes() const
constexpr T * end() const
constexpr T * begin() const
constexpr IndexRange index_range() const
constexpr T & last(const int64_t n=0) const
IndexRange index_range() const
void copy_group_sizes(OffsetIndices< int > offsets, const IndexMask &mask, MutableSpan< int > sizes)
OffsetIndices< int > accumulate_counts_to_offsets(MutableSpan< int > counts_to_offsets, int start_offset=0)
std::optional< OffsetIndices< int > > accumulate_counts_to_offsets_with_overflow_check(MutableSpan< int > counts_to_offsets, int start_offset=0)
void gather_group_sizes(OffsetIndices< int > offsets, const IndexMask &mask, MutableSpan< int > sizes)
void fill_constant_group_size(int size, int start_offset, MutableSpan< int > offsets)
void build_reverse_offsets(Span< int > indices, MutableSpan< int > offsets)
int sum_group_sizes(OffsetIndices< int > offsets, const IndexMask &mask)
OffsetIndices< int > gather_selected_offsets(OffsetIndices< int > src_offsets, const IndexMask &selection, int start_offset, MutableSpan< int > dst_offsets)
void memory_bandwidth_bound_task(const int64_t approximate_bytes_touched, const Function &function)