Blender V4.5
BLI_index_mask.hh
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#pragma once
10
11#include <array>
12#include <optional>
13#include <variant>
14
15#include "BLI_bit_span.hh"
16#include "BLI_function_ref.hh"
17#include "BLI_index_mask_fwd.hh"
20#include "BLI_offset_indices.hh"
21#include "BLI_offset_span.hh"
22#include "BLI_task.hh"
24#include "BLI_vector.hh"
25#include "BLI_vector_set.hh"
27
29
45static constexpr int64_t max_segment_size_shift = 14;
46static constexpr int64_t max_segment_size = (1 << max_segment_size_shift); /* 16384 */
49
61
105
113 private:
115 AlignedBuffer<1024, 8> inline_buffer_;
116
117 public:
119 {
120 this->provide_buffer(inline_buffer_);
121 }
122};
123
128class IndexMaskSegment : public OffsetSpan<int64_t, int16_t> {
129 public:
130 using OffsetSpan<int64_t, int16_t>::OffsetSpan;
131
133
134 IndexMaskSegment slice(const IndexRange &range) const;
135 IndexMaskSegment slice(const int64_t start, const int64_t size) const;
136
141 IndexMaskSegment shift(const int64_t shift) const;
142};
143
188class IndexMask : private IndexMaskData {
189 public:
191 IndexMask();
193 explicit IndexMask(int64_t size);
195 IndexMask(IndexRange range);
196
198 template<typename T> static IndexMask from_indices(Span<T> indices, IndexMaskMemory &memory);
202 static IndexMask from_bits(const IndexMask &universe, BitSpan bits, IndexMaskMemory &memory);
204 static IndexMask from_bools(Span<bool> bools, IndexMaskMemory &memory);
205 static IndexMask from_bools(const VArray<bool> &bools, IndexMaskMemory &memory);
206 static IndexMask from_bools_inverse(const VArray<bool> &bools, IndexMaskMemory &memory);
208 static IndexMask from_bools(const IndexMask &universe,
209 Span<bool> bools,
210 IndexMaskMemory &memory);
211 static IndexMask from_bools_inverse(const IndexMask &universe,
212 Span<bool> bools,
213 IndexMaskMemory &memory);
214 static IndexMask from_bools(const IndexMask &universe,
215 const VArray<bool> &bools,
216 IndexMaskMemory &memory);
217 static IndexMask from_bools_inverse(const IndexMask &universe,
218 const VArray<bool> &bools,
219 IndexMaskMemory &memory);
221 template<typename T>
223 const IndexMask &mask,
224 IndexMaskMemory &memory);
230 static IndexMask from_repeating(const IndexMask &mask_to_repeat,
231 int64_t repetitions,
232 int64_t stride,
233 int64_t initial_offset,
234 IndexMaskMemory &memory);
239 int64_t indices_num,
240 const int64_t initial_offset,
241 IndexMaskMemory &memory);
251 using Initializer = std::variant<IndexRange, Span<int64_t>, Span<int>, int64_t>;
252 static IndexMask from_initializers(const Span<Initializer> initializers,
253 IndexMaskMemory &memory);
255 static IndexMask from_union(const IndexMask &mask_a,
256 const IndexMask &mask_b,
257 IndexMaskMemory &memory);
261 static IndexMask from_difference(const IndexMask &mask_a,
262 const IndexMask &mask_b,
263 IndexMaskMemory &memory);
265 static IndexMask from_intersection(const IndexMask &mask_a,
266 const IndexMask &mask_b,
267 IndexMaskMemory &memory);
269 template<typename Fn>
270 static IndexMask from_predicate(const IndexMask &universe,
271 GrainSize grain_size,
272 IndexMaskMemory &memory,
273 Fn &&predicate);
283 const IndexMask &universe,
284 GrainSize grain_size,
285 IndexMaskMemory &memory,
286 FunctionRef<int64_t(const IndexMaskSegment &universe_segment,
287 IndexRangesBuilder<int16_t> &builder)> batch_predicate);
289 template<typename T, typename Fn>
290 static void from_groups(const IndexMask &universe,
291 IndexMaskMemory &memory,
292 Fn &&get_group_index,
293 MutableSpan<IndexMask> r_masks);
294
296 static Vector<IndexMask, 4> from_group_ids(const VArray<int> &group_ids,
297 IndexMaskMemory &memory,
298 VectorSet<int> &r_index_by_group_id);
299 static Vector<IndexMask, 4> from_group_ids(const IndexMask &universe,
300 const VArray<int> &group_ids,
301 IndexMaskMemory &memory,
302 VectorSet<int> &r_index_by_group_id);
303
304 int64_t size() const;
305 bool is_empty() const;
306 IndexRange index_range() const;
307 int64_t first() const;
308 int64_t last() const;
309
313 IndexRange bounds() const;
314
319 int64_t min_array_size() const;
320
324 std::optional<RawMaskIterator> find(int64_t query_index) const;
325 std::optional<RawMaskIterator> find_larger_equal(int64_t query_index) const;
326 std::optional<RawMaskIterator> find_smaller_equal(int64_t query_index) const;
330 bool contains(int64_t query_index) const;
331
336
346 int64_t operator[](const RawMaskIterator &it) const;
347
352 IndexMask slice(IndexRange range) const;
353 IndexMask slice(int64_t start, int64_t size) const;
354 IndexMask slice(RawMaskIterator first_it, RawMaskIterator last_it, int64_t size) const;
365 IndexMask slice_and_shift(IndexRange range, int64_t offset, IndexMaskMemory &memory) const;
368 int64_t offset,
369 IndexMaskMemory &memory) const;
370
374 IndexMask shift(const int64_t offset, IndexMaskMemory &memory) const;
375
380 IndexMask complement(const IndexMask &universe, IndexMaskMemory &memory) const;
381
385 int64_t segments_num() const;
389 IndexMaskSegment segment(int64_t segment_i) const;
390
404 static void foreach_segment_zipped(Span<IndexMask> masks,
405 FunctionRef<bool(Span<IndexMaskSegment> segments)> fn);
406
417 template<typename Fn> void foreach_index(Fn &&fn) const;
418 template<typename Fn> void foreach_index(GrainSize grain_size, Fn &&fn) const;
419
425 template<typename IndexT, typename Fn> void foreach_index_optimized(Fn &&fn) const;
426 template<typename IndexT, typename Fn>
427 void foreach_index_optimized(GrainSize grain_size, Fn &&fn) const;
428
440 template<typename Fn> void foreach_segment(Fn &&fn) const;
441 template<typename Fn> void foreach_segment(GrainSize grain_size, Fn &&fn) const;
442
452 template<typename Fn> void foreach_segment_optimized(Fn &&fn) const;
453 template<typename Fn> void foreach_segment_optimized(GrainSize grain_size, Fn &&fn) const;
454
463 template<typename Fn> void foreach_range(Fn &&fn) const;
464
469 template<typename T> void to_indices(MutableSpan<T> r_indices) const;
473 void set_bits(MutableBitSpan r_bits, int64_t offset = 0) const;
477 void to_bits(MutableBitSpan r_bits, int64_t offset = 0) const;
481 void to_bools(MutableSpan<bool> r_bools) const;
486 std::optional<IndexRange> to_range() const;
501 template<int64_t N = 4>
503
508
509 friend bool operator==(const IndexMask &a, const IndexMask &b);
510 friend bool operator!=(const IndexMask &a, const IndexMask &b);
511};
512
519 private:
520 int64_t segment_offset_;
521 const int16_t *segment_indices_;
522 std::array<int64_t, 2> cumulative_segment_sizes_;
523 IndexMask mask_;
524
525 public:
527 const IndexMask &update(IndexMaskSegment segment);
528};
529
531{
532 IndexMaskData &data = mask_.data_for_inplace_construction();
533 cumulative_segment_sizes_[0] = 0;
534 data.segments_num_ = 1;
535 data.indices_by_segment_ = &segment_indices_;
536 data.segment_offsets_ = &segment_offset_;
537 data.cumulative_segment_sizes_ = cumulative_segment_sizes_.data();
538 data.begin_index_in_segment_ = 0;
539}
540
542{
543 const Span<int16_t> indices = segment.base_span();
544 BLI_assert(!indices.is_empty());
545 BLI_assert(std::is_sorted(indices.begin(), indices.end()));
546 BLI_assert(indices[0] >= 0);
548 const int64_t indices_num = indices.size();
549
550 IndexMaskData &data = mask_.data_for_inplace_construction();
551 segment_offset_ = segment.offset();
552 segment_indices_ = indices.data();
553 cumulative_segment_sizes_[1] = int16_t(indices_num);
554 data.indices_num_ = indices_num;
555 data.end_index_in_segment_ = indices_num;
556
557 return mask_;
558}
559
560std::array<int16_t, max_segment_size> build_static_indices_array();
562std::ostream &operator<<(std::ostream &stream, const IndexMask &mask);
563
564/* -------------------------------------------------------------------- */
567
568inline const std::array<int16_t, max_segment_size> &get_static_indices_array()
569{
570 alignas(64) static const std::array<int16_t, max_segment_size> data =
572 return data;
573}
574
575template<typename T>
576inline void masked_fill(MutableSpan<T> data, const T &value, const IndexMask &mask)
577{
578 mask.foreach_index_optimized<int64_t>([&](const int64_t i) { data[i] = value; });
579}
580
586template<typename T> void build_reverse_map(const IndexMask &mask, MutableSpan<T> r_map);
587
596 IndexMaskMemory &memory);
597
602template<int64_t N>
604
605/* -------------------------------------------------------------------- */
608
609inline bool operator!=(const RawMaskIterator &a, const RawMaskIterator &b)
610{
611 return a.segment_i != b.segment_i || a.index_in_segment != b.index_in_segment;
612}
613
614inline bool operator==(const RawMaskIterator &a, const RawMaskIterator &b)
615{
616 return !(a != b);
617}
618
619/* -------------------------------------------------------------------- */
622
627
629{
630 return IndexMaskSegment(static_cast<const OffsetSpan<int64_t, int16_t> *>(this)->slice(range));
631}
632
634{
635 return IndexMaskSegment(
636 static_cast<const OffsetSpan<int64_t, int16_t> *>(this)->slice(start, size));
637}
638
640{
641 BLI_assert(this->is_empty() || (*this)[0] + shift >= 0);
642 return IndexMaskSegment(this->offset() + shift, this->base_span());
643}
644
645/* -------------------------------------------------------------------- */
648
650{
651 static constexpr int64_t cumulative_sizes_for_empty_mask[1] = {0};
652
653 data.indices_num_ = 0;
654 data.segments_num_ = 0;
655 data.cumulative_segment_sizes_ = cumulative_sizes_for_empty_mask;
656 /* Intentionally leave some pointers uninitialized which must not be accessed on empty masks
657 * anyway. */
658}
659
661{
662 init_empty_mask(*this);
663}
664
677
679{
680 if (range.is_empty()) {
681 init_empty_mask(*this);
682 return;
683 }
684 const int64_t one_after_last = range.one_after_last();
685 *this = get_static_index_mask_for_min_size(one_after_last);
686
687 const int64_t first_segment_i = range.first() >> max_segment_size_shift;
688 const int64_t last_segment_i = range.last() >> max_segment_size_shift;
689
690 indices_num_ = range.size();
691 segments_num_ = last_segment_i - first_segment_i + 1;
692 indices_by_segment_ += first_segment_i;
693 segment_offsets_ += first_segment_i;
694 cumulative_segment_sizes_ += first_segment_i;
696 end_index_in_segment_ = one_after_last - ((one_after_last - 1) & max_segment_size_mask_high);
697}
698
700{
701 return indices_num_;
702}
703
704inline bool IndexMask::is_empty() const
705{
706 return indices_num_ == 0;
707}
708
710{
711 return IndexRange(indices_num_);
712}
713
715{
716 if (this->is_empty()) {
717 return IndexRange();
718 }
719 const int64_t first = this->first();
720 const int64_t last = this->last();
722}
723
729
731{
733 const int64_t last_segment_i = segments_num_ - 1;
734 return segment_offsets_[last_segment_i] +
735 indices_by_segment_[last_segment_i][end_index_in_segment_ - 1];
736}
737
739{
740 if (indices_num_ == 0) {
741 return 0;
742 }
743 return this->last() + 1;
744}
745
747{
748 BLI_assert(index >= 0);
749 BLI_assert(index < indices_num_);
751 const int64_t full_index = index + cumulative_segment_sizes_[0] + begin_index_in_segment_;
755 [&](const int64_t cumulative_size) { return cumulative_size <= full_index; });
757 return it;
758}
759
770
772{
773 const RawMaskIterator it = this->index_to_iterator(i);
774 return (*this)[it];
775}
776
781
783{
784 return segments_num_;
785}
786
787inline IndexMaskSegment IndexMask::segment(const int64_t segment_i) const
788{
789 BLI_assert(segment_i >= 0);
790 BLI_assert(segment_i < segments_num_);
791 const int64_t full_segment_size = cumulative_segment_sizes_[segment_i + 1] -
792 cumulative_segment_sizes_[segment_i];
793 const int64_t begin_index = (segment_i == 0) ? begin_index_in_segment_ : 0;
794 const int64_t end_index = (segment_i == segments_num_ - 1) ? end_index_in_segment_ :
795 full_segment_size;
796 const int64_t segment_size = end_index - begin_index;
797 return IndexMaskSegment{segment_offsets_[segment_i],
798 {indices_by_segment_[segment_i] + begin_index, segment_size}};
799}
800
801inline IndexMask IndexMask::slice(const IndexRange range) const
802{
803 return this->slice(range.start(), range.size());
804}
805
807{
808 return *this;
809}
810
811template<typename Fn>
813 std::is_invocable_r_v<void, Fn, IndexMaskSegment, int64_t> ||
814 std::is_invocable_r_v<void, Fn, IndexRange, int64_t>;
815
816template<typename Fn> inline void IndexMask::foreach_index(Fn &&fn) const
817{
818 this->foreach_segment(
819 [&](const IndexMaskSegment indices, [[maybe_unused]] const int64_t start_segment_pos) {
820 if constexpr (std::is_invocable_r_v<void, Fn, int64_t, int64_t>) {
821 for (const int64_t i : indices.index_range()) {
822 fn(indices[i], start_segment_pos + i);
823 }
824 }
825 else {
826 for (const int64_t index : indices) {
827 fn(index);
828 }
829 }
830 });
831}
832
833template<typename Fn>
834inline void IndexMask::foreach_index(const GrainSize grain_size, Fn &&fn) const
835{
836 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
837 const IndexMask sub_mask = this->slice(range);
838 sub_mask.foreach_index([&](const int64_t i, [[maybe_unused]] const int64_t index_pos) {
839 if constexpr (std::is_invocable_r_v<void, Fn, int64_t, int64_t>) {
840 fn(i, index_pos + range.start());
841 }
842 else {
843 fn(i);
844 }
845 });
846 });
847}
848
849template<typename T, typename Fn>
850#if (defined(__GNUC__) && !defined(__clang__))
851[[gnu::optimize("O3")]]
852#endif
853inline void
855{
856 BLI_assert(segment.last() < std::numeric_limits<T>::max());
857 if (unique_sorted_indices::non_empty_is_range(segment.base_span())) {
858 const T start = T(segment[0]);
859 const T last = T(segment.last());
860 for (T i = start; i <= last; i++) {
861 fn(i);
862 }
863 }
864 else {
865 for (const int64_t i : segment) {
866 fn(T(i));
867 }
868 }
869}
870
871template<typename T, typename Fn>
872#if (defined(__GNUC__) && !defined(__clang__))
873[[gnu::optimize("O3")]]
874#endif
875inline void
877 const int64_t segment_pos,
878 const Fn fn)
879{
880 BLI_assert(segment.last() < std::numeric_limits<T>::max());
881 BLI_assert(segment.size() + segment_pos < std::numeric_limits<T>::max());
882 if (unique_sorted_indices::non_empty_is_range(segment.base_span())) {
883 const T start = T(segment[0]);
884 const T last = T(segment.last());
885 for (T i = start, pos = T(segment_pos); i <= last; i++, pos++) {
886 fn(i, pos);
887 }
888 }
889 else {
890 T pos = T(segment_pos);
891 for (const int64_t i : segment.index_range()) {
892 const T index = T(segment[i]);
893 fn(index, pos);
894 pos++;
895 }
896 }
897}
898
899template<typename IndexT, typename Fn>
901{
902 this->foreach_segment(
903 [&](const IndexMaskSegment segment, [[maybe_unused]] const int64_t segment_pos) {
904 if constexpr (std::is_invocable_r_v<void, Fn, IndexT, IndexT>) {
906 }
907 else {
909 }
910 });
911}
912
913template<typename IndexT, typename Fn>
914inline void IndexMask::foreach_index_optimized(const GrainSize grain_size, Fn &&fn) const
915{
916 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
917 const IndexMask sub_mask = this->slice(range);
918 sub_mask.foreach_segment(
919 [&](const IndexMaskSegment segment, [[maybe_unused]] const int64_t segment_pos) {
920 if constexpr (std::is_invocable_r_v<void, Fn, IndexT, IndexT>) {
921 optimized_foreach_index_with_pos<IndexT>(segment, segment_pos + range.start(), fn);
922 }
923 else {
924 optimized_foreach_index<IndexT>(segment, fn);
925 }
926 });
927 });
928}
929
930template<typename Fn> inline void IndexMask::foreach_segment_optimized(Fn &&fn) const
931{
932 this->foreach_segment(
933 [&](const IndexMaskSegment segment, [[maybe_unused]] const int64_t start_segment_pos) {
935 const IndexRange range(segment[0], segment.size());
937 fn(range, start_segment_pos);
938 }
939 else {
940 fn(range);
941 }
942 }
943 else {
945 fn(segment, start_segment_pos);
946 }
947 else {
948 fn(segment);
949 }
950 }
951 });
952}
953
954template<typename Fn>
955inline void IndexMask::foreach_segment_optimized(const GrainSize grain_size, Fn &&fn) const
956{
957 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
958 const IndexMask sub_mask = this->slice(range);
959 sub_mask.foreach_segment_optimized(
960 [&fn, range_start = range.start()](const auto segment,
961 [[maybe_unused]] const int64_t start_segment_pos) {
962 if constexpr (has_segment_and_start_parameter<Fn>) {
963 fn(segment, start_segment_pos + range_start);
964 }
965 else {
966 fn(segment);
967 }
968 });
969 });
970}
971
972template<typename Fn> inline void IndexMask::foreach_segment(Fn &&fn) const
973{
974 [[maybe_unused]] int64_t segment_pos = 0;
975 for (const int64_t segment_i : IndexRange(segments_num_)) {
976 const IndexMaskSegment segment = this->segment(segment_i);
978 fn(segment, segment_pos);
979 segment_pos += segment.size();
980 }
981 else {
982 fn(segment);
983 }
984 }
985}
986
987template<typename Fn>
988inline void IndexMask::foreach_segment(const GrainSize grain_size, Fn &&fn) const
989{
990 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
991 const IndexMask sub_mask = this->slice(range);
992 sub_mask.foreach_segment(
993 [&fn, range_start = range.start()](const IndexMaskSegment mask_segment,
994 [[maybe_unused]] const int64_t segment_pos) {
995 if constexpr (has_segment_and_start_parameter<Fn>) {
996 fn(mask_segment, segment_pos + range_start);
997 }
998 else {
999 fn(mask_segment);
1000 }
1001 });
1002 });
1003}
1004
1005template<typename Fn> inline void IndexMask::foreach_range(Fn &&fn) const
1006{
1007 this->foreach_segment([&](const IndexMaskSegment indices, [[maybe_unused]] int64_t segment_pos) {
1008 Span<int16_t> base_indices = indices.base_span();
1009 while (!base_indices.is_empty()) {
1010 const int64_t next_range_size = unique_sorted_indices::find_size_of_next_range(base_indices);
1011 const IndexRange range(int64_t(base_indices[0]) + indices.offset(), next_range_size);
1013 fn(range, segment_pos);
1014 }
1015 else {
1016 fn(range);
1017 }
1018 segment_pos += next_range_size;
1019 base_indices = base_indices.drop_front(next_range_size);
1020 }
1021 });
1022}
1023
1024namespace detail {
1026 const IndexMask &universe,
1027 GrainSize grain_size,
1028 IndexMaskMemory &memory,
1029 FunctionRef<int64_t(IndexMaskSegment indices, int16_t *r_true_indices)> filter_indices);
1030}
1031
1032template<typename Fn>
1034 const GrainSize grain_size,
1035 IndexMaskMemory &memory,
1036 Fn &&predicate)
1037{
1039 universe,
1040 grain_size,
1041 memory,
1042 [&](const IndexMaskSegment indices, int16_t *__restrict r_true_indices) {
1043 int16_t *r_current = r_true_indices;
1044 const int16_t *in_end = indices.base_span().end();
1045 const int64_t offset = indices.offset();
1046 for (const int16_t *in_current = indices.base_span().data(); in_current < in_end;
1047 in_current++) {
1048 const int16_t local_index = *in_current;
1049 const int64_t global_index = int64_t(local_index) + offset;
1050 const bool condition = predicate(global_index);
1051 *r_current = local_index;
1052 /* This expects the boolean to be either 0 or 1 which is generally the case but may not
1053 * be if the values are uninitialized. */
1054 BLI_assert(ELEM(int8_t(condition), 0, 1));
1055 /* Branchless conditional increment. */
1056 r_current += condition;
1057 }
1058 const int16_t true_indices_num = int16_t(r_current - r_true_indices);
1059 return true_indices_num;
1060 });
1061}
1062
1063template<typename T, typename Fn>
1065 IndexMaskMemory &memory,
1066 Fn &&get_group_index,
1067 MutableSpan<IndexMask> r_masks)
1068{
1069 Vector<Vector<T>> indices_by_group(r_masks.size());
1070 universe.foreach_index([&](const int64_t i) {
1071 const int group_index = get_group_index(i);
1072 indices_by_group[group_index].append(T(i));
1073 });
1074 for (const int64_t i : r_masks.index_range()) {
1075 r_masks[i] = IndexMask::from_indices<T>(indices_by_group[i], memory);
1076 }
1077}
1078
1079std::optional<IndexRange> inline IndexMask::to_range() const
1080{
1081 if (indices_num_ == 0) {
1082 return IndexRange{};
1083 }
1084 const int64_t first_index = this->first();
1085 const int64_t last_index = this->last();
1086 if (last_index - first_index == indices_num_ - 1) {
1087 return IndexRange(first_index, indices_num_);
1088 }
1089 return std::nullopt;
1090}
1091
1092template<int64_t N>
1094{
1096 this->foreach_segment_optimized([&](const auto segment) { segments.append(segment); });
1097 return segments;
1098}
1099
1100inline bool operator!=(const IndexMask &a, const IndexMask &b)
1101{
1102 return !(a == b);
1103}
1104
1105template<int64_t N>
1107 Vector<IndexMaskSegment, N> &r_segments)
1108{
1109 const std::array<int16_t, max_segment_size> &static_indices_array = get_static_indices_array();
1110
1111 const int64_t full_size = range.size();
1112 for (int64_t i = 0; i < full_size; i += max_segment_size) {
1113 const int64_t size = std::min(i + max_segment_size, full_size) - i;
1114 r_segments.append(
1115 IndexMaskSegment(range.first() + i, Span(static_indices_array).take_front(size)));
1116 }
1117}
1118
1129 const int64_t universe_size,
1130 const uint32_t random_seed,
1131 const float probability,
1132 IndexMaskMemory &memory);
1133
1134IndexMask random_mask(const int64_t universe_size,
1135 const uint32_t random_seed,
1136 const float probability,
1137 IndexMaskMemory &memory);
1138
1139} // namespace blender::index_mask
1140
1141namespace blender {
1142using index_mask::IndexMask;
1143using index_mask::IndexMaskFromSegment;
1144using index_mask::IndexMaskMemory;
1145using index_mask::IndexMaskSegment;
1146} // namespace blender
#define BLI_assert(a)
Definition BLI_assert.h:46
#define ELEM(...)
BMesh const char void * data
long long int int64_t
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition btDbvt.cpp:52
const IndexMask & update(IndexMaskSegment segment)
IndexMaskSegment slice(const IndexRange &range) const
IndexMaskSegment shift(const int64_t shift) const
IndexMaskSegment(const OffsetSpan< int64_t, int16_t > span)
static IndexMask from_predicate(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, Fn &&predicate)
IndexMaskSegment segment(int64_t segment_i) const
int64_t last() const
std::optional< IndexRange > to_range() const
void foreach_index_optimized(Fn &&fn) const
IndexMask slice(IndexRange range) const
static IndexMask from_indices(Span< T > indices, IndexMaskMemory &memory)
IndexRange bounds() const
int64_t operator[](int64_t i) const
int64_t iterator_to_index(const RawMaskIterator &it) const
int64_t min_array_size() const
static void from_groups(const IndexMask &universe, IndexMaskMemory &memory, Fn &&get_group_index, MutableSpan< IndexMask > r_masks)
IndexRange index_range() const
Vector< std::variant< IndexRange, IndexMaskSegment >, N > to_spans_and_ranges() const
IndexMaskData & data_for_inplace_construction()
int64_t size() const
void foreach_range(Fn &&fn) const
void foreach_segment_optimized(Fn &&fn) const
int64_t first() const
void foreach_index(Fn &&fn) const
RawMaskIterator index_to_iterator(int64_t index) const
int64_t segments_num() const
void foreach_segment(Fn &&fn) const
constexpr int64_t first() const
constexpr int64_t one_after_last() const
constexpr int64_t last(const int64_t n=0) const
constexpr int64_t size() const
constexpr bool is_empty() const
constexpr int64_t start() const
static constexpr IndexRange from_begin_end_inclusive(const int64_t begin, const int64_t last)
void provide_buffer(void *buffer, const int64_t size)
constexpr int64_t size() const
Definition BLI_span.hh:493
constexpr IndexRange index_range() const
Definition BLI_span.hh:670
NonCopyable(const NonCopyable &other)=delete
NonMovable(NonMovable &&other)=delete
constexpr Span drop_front(int64_t n) const
Definition BLI_span.hh:171
constexpr bool is_empty() const
Definition BLI_span.hh:260
void append(const T &value)
const IndexMask & update(IndexMaskSegment segment)
IndexMaskSegment slice(const IndexRange &range) const
IndexMaskSegment shift(const int64_t shift) const
IndexMaskSegment(const OffsetSpan< int64_t, int16_t > span)
IndexMask slice_and_shift(IndexRange range, int64_t offset, IndexMaskMemory &memory) const
static IndexMask from_predicate(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, Fn &&predicate)
static IndexMask from_every_nth(int64_t n, int64_t indices_num, const int64_t initial_offset, IndexMaskMemory &memory)
IndexMaskSegment segment(int64_t segment_i) const
void to_indices(MutableSpan< T > r_indices) const
IndexMask slice_content(IndexRange range) const
Vector< IndexRange > to_ranges_invert(IndexRange universe) const
static IndexMask from_bits(BitSpan bits, IndexMaskMemory &memory)
Vector< IndexRange > to_ranges() const
std::optional< RawMaskIterator > find(int64_t query_index) const
std::optional< IndexRange > to_range() const
static Vector< IndexMask, 4 > from_group_ids(const VArray< int > &group_ids, IndexMaskMemory &memory, VectorSet< int > &r_index_by_group_id)
static IndexMask from_repeating(const IndexMask &mask_to_repeat, int64_t repetitions, int64_t stride, int64_t initial_offset, IndexMaskMemory &memory)
static IndexMask from_union(const IndexMask &mask_a, const IndexMask &mask_b, IndexMaskMemory &memory)
IndexMask shift(const int64_t offset, IndexMaskMemory &memory) const
void foreach_index_optimized(Fn &&fn) const
std::optional< RawMaskIterator > find_larger_equal(int64_t query_index) const
IndexMask slice(IndexRange range) const
static IndexMask from_ranges(OffsetIndices< T > offsets, const IndexMask &mask, IndexMaskMemory &memory)
static IndexMask from_intersection(const IndexMask &mask_a, const IndexMask &mask_b, IndexMaskMemory &memory)
static IndexMask from_indices(Span< T > indices, IndexMaskMemory &memory)
int64_t operator[](int64_t i) const
int64_t iterator_to_index(const RawMaskIterator &it) const
static IndexMask from_segments(Span< IndexMaskSegment > segments, IndexMaskMemory &memory)
static void from_groups(const IndexMask &universe, IndexMaskMemory &memory, Fn &&get_group_index, MutableSpan< IndexMask > r_masks)
static IndexMask from_batch_predicate(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, FunctionRef< int64_t(const IndexMaskSegment &universe_segment, IndexRangesBuilder< int16_t > &builder)> batch_predicate)
static IndexMask from_initializers(const Span< Initializer > initializers, IndexMaskMemory &memory)
void set_bits(MutableBitSpan r_bits, int64_t offset=0) const
Vector< std::variant< IndexRange, IndexMaskSegment >, N > to_spans_and_ranges() const
std::optional< RawMaskIterator > find_smaller_equal(int64_t query_index) const
friend bool operator!=(const IndexMask &a, const IndexMask &b)
IndexMaskData & data_for_inplace_construction()
bool contains(int64_t query_index) const
IndexMask complement(const IndexMask &universe, IndexMaskMemory &memory) const
static IndexMask from_bools(Span< bool > bools, IndexMaskMemory &memory)
void to_bits(MutableBitSpan r_bits, int64_t offset=0) const
static IndexMask from_bools_inverse(const VArray< bool > &bools, IndexMaskMemory &memory)
void foreach_segment_optimized(Fn &&fn) const
friend bool operator==(const IndexMask &a, const IndexMask &b)
void to_bools(MutableSpan< bool > r_bools) const
void foreach_index(Fn &&fn) const
RawMaskIterator index_to_iterator(int64_t index) const
static void foreach_segment_zipped(Span< IndexMask > masks, FunctionRef< bool(Span< IndexMaskSegment > segments)> fn)
static IndexMask from_difference(const IndexMask &mask_a, const IndexMask &mask_b, IndexMaskMemory &memory)
std::variant< IndexRange, Span< int64_t >, Span< int >, int64_t > Initializer
void foreach_segment(Fn &&fn) const
static ushort indices[]
uint pos
ccl_device_inline float2 mask(const MaskType mask, const float2 a)
#define N
#define T
static int64_t last_if(Iterator begin, Iterator end, Predicate &&predicate)
IndexMask from_predicate_impl(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, FunctionRef< int64_t(IndexMaskSegment indices, int16_t *r_true_indices)> filter_indices)
constexpr bool has_segment_and_start_parameter
void index_range_to_mask_segments(const IndexRange range, Vector< IndexMaskSegment, N > &r_segments)
static constexpr int64_t max_segment_size
static constexpr int64_t max_segment_size_mask_high
void build_reverse_map(const IndexMask &mask, MutableSpan< T > r_map)
Definition index_mask.cc:34
static constexpr int64_t max_segment_size_mask_low
std::ostream & operator<<(std::ostream &stream, const IndexMask &mask)
void optimized_foreach_index(const IndexMaskSegment segment, const Fn fn)
bool operator==(const RawMaskIterator &a, const RawMaskIterator &b)
std::array< int16_t, max_segment_size > build_static_indices_array()
Definition index_mask.cc:47
void optimized_foreach_index_with_pos(const IndexMaskSegment segment, const int64_t segment_pos, const Fn fn)
const IndexMask & get_static_index_mask_for_min_size(const int64_t min_size)
Definition index_mask.cc:56
int64_t consolidate_index_mask_segments(MutableSpan< IndexMaskSegment > segments, IndexMaskMemory &memory)
IndexMask random_mask(const IndexMask &mask, const int64_t universe_size, const uint32_t random_seed, const float probability, IndexMaskMemory &memory)
bool operator!=(const RawMaskIterator &a, const RawMaskIterator &b)
const std::array< int16_t, max_segment_size > & get_static_indices_array()
void masked_fill(MutableSpan< T > data, const T &value, const IndexMask &mask)
void init_empty_mask(IndexMaskData &data)
static constexpr int64_t max_segment_size_shift
void parallel_for(const IndexRange range, const int64_t grain_size, const Function &function, const TaskSizeHints &size_hints=detail::TaskSizeHints_Static(1))
Definition BLI_task.hh:93
int64_t find_size_of_next_range(const Span< T > indices)
bool non_empty_is_range(const Span< T > indices)
i
Definition text_draw.cc:230