Blender V4.3
BLI_index_mask.hh
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
5#pragma once
6
7#include <array>
8#include <limits>
9#include <optional>
10#include <variant>
11
12#include "BLI_bit_span.hh"
13#include "BLI_function_ref.hh"
14#include "BLI_index_mask_fwd.hh"
17#include "BLI_offset_span.hh"
18#include "BLI_task.hh"
20#include "BLI_vector.hh"
21#include "BLI_vector_set.hh"
23
25
41static constexpr int64_t max_segment_size_shift = 14;
42static constexpr int64_t max_segment_size = (1 << max_segment_size_shift); /* 16384 */
44static constexpr int64_t max_segment_size_mask_high = ~max_segment_size_mask_low;
45
57
101
109 private:
111 AlignedBuffer<1024, 8> inline_buffer_;
112
113 public:
115 {
116 this->provide_buffer(inline_buffer_);
117 }
118};
119
124class IndexMaskSegment : public OffsetSpan<int64_t, int16_t> {
125 public:
127
129
130 IndexMaskSegment slice(const IndexRange &range) const;
131 IndexMaskSegment slice(const int64_t start, const int64_t size) const;
132
137 IndexMaskSegment shift(const int64_t shift) const;
138};
139
184class IndexMask : private IndexMaskData {
185 public:
187 IndexMask();
189 explicit IndexMask(int64_t size);
191 IndexMask(IndexRange range);
192
194 template<typename T> static IndexMask from_indices(Span<T> indices, IndexMaskMemory &memory);
196 static IndexMask from_bits(BitSpan bits, IndexMaskMemory &memory);
198 static IndexMask from_bits(const IndexMask &universe, BitSpan bits, IndexMaskMemory &memory);
200 static IndexMask from_bools(Span<bool> bools, IndexMaskMemory &memory);
201 static IndexMask from_bools(const VArray<bool> &bools, IndexMaskMemory &memory);
203 static IndexMask from_bools(const IndexMask &universe,
204 Span<bool> bools,
205 IndexMaskMemory &memory);
206 static IndexMask from_bools_inverse(const IndexMask &universe,
207 Span<bool> bools,
208 IndexMaskMemory &memory);
209 static IndexMask from_bools(const IndexMask &universe,
210 const VArray<bool> &bools,
211 IndexMaskMemory &memory);
217 static IndexMask from_repeating(const IndexMask &mask_to_repeat,
218 int64_t repetitions,
219 int64_t stride,
220 int64_t initial_offset,
221 IndexMaskMemory &memory);
226 int64_t indices_num,
227 const int64_t initial_offset,
228 IndexMaskMemory &memory);
238 using Initializer = std::variant<IndexRange, Span<int64_t>, Span<int>, int64_t>;
239 static IndexMask from_initializers(const Span<Initializer> initializers,
240 IndexMaskMemory &memory);
242 static IndexMask from_union(const IndexMask &mask_a,
243 const IndexMask &mask_b,
244 IndexMaskMemory &memory);
246 static IndexMask from_difference(const IndexMask &mask_a,
247 const IndexMask &mask_b,
248 IndexMaskMemory &memory);
250 static IndexMask from_intersection(const IndexMask &mask_a,
251 const IndexMask &mask_b,
252 IndexMaskMemory &memory);
254 template<typename Fn>
255 static IndexMask from_predicate(const IndexMask &universe,
256 GrainSize grain_size,
257 IndexMaskMemory &memory,
258 Fn &&predicate);
268 const IndexMask &universe,
269 GrainSize grain_size,
270 IndexMaskMemory &memory,
271 FunctionRef<int64_t(const IndexMaskSegment &universe_segment,
272 IndexRangesBuilder<int16_t> &builder)> batch_predicate);
274 template<typename T, typename Fn>
275 static void from_groups(const IndexMask &universe,
276 IndexMaskMemory &memory,
277 Fn &&get_group_index,
278 MutableSpan<IndexMask> r_masks);
279
281 static Vector<IndexMask, 4> from_group_ids(const VArray<int> &group_ids,
282 IndexMaskMemory &memory,
283 VectorSet<int> &r_index_by_group_id);
284 static Vector<IndexMask, 4> from_group_ids(const IndexMask &universe,
285 const VArray<int> &group_ids,
286 IndexMaskMemory &memory,
287 VectorSet<int> &r_index_by_group_id);
288
289 int64_t size() const;
290 bool is_empty() const;
291 IndexRange index_range() const;
292 int64_t first() const;
293 int64_t last() const;
294
298 IndexRange bounds() const;
299
304 int64_t min_array_size() const;
305
309 std::optional<RawMaskIterator> find(int64_t query_index) const;
310 std::optional<RawMaskIterator> find_larger_equal(int64_t query_index) const;
311 std::optional<RawMaskIterator> find_smaller_equal(int64_t query_index) const;
315 bool contains(int64_t query_index) const;
316
321
326 int64_t operator[](int64_t i) const;
331 int64_t operator[](const RawMaskIterator &it) const;
332
337 IndexMask slice(IndexRange range) const;
338 IndexMask slice(int64_t start, int64_t size) const;
339 IndexMask slice(RawMaskIterator first_it, RawMaskIterator last_it, int64_t size) const;
345 IndexMask slice_content(int64_t start, int64_t size) const;
350 IndexMask slice_and_shift(IndexRange range, int64_t offset, IndexMaskMemory &memory) const;
352 int64_t size,
353 int64_t offset,
354 IndexMaskMemory &memory) const;
355
359 IndexMask shift(const int64_t offset, IndexMaskMemory &memory) const;
360
365 IndexMask complement(const IndexMask &universe, IndexMaskMemory &memory) const;
366
370 int64_t segments_num() const;
374 IndexMaskSegment segment(int64_t segment_i) const;
375
389 static void foreach_segment_zipped(Span<IndexMask> masks,
390 FunctionRef<bool(Span<IndexMaskSegment> segments)> fn);
391
402 template<typename Fn> void foreach_index(Fn &&fn) const;
403 template<typename Fn> void foreach_index(GrainSize grain_size, Fn &&fn) const;
404
410 template<typename IndexT, typename Fn> void foreach_index_optimized(Fn &&fn) const;
411 template<typename IndexT, typename Fn>
412 void foreach_index_optimized(GrainSize grain_size, Fn &&fn) const;
413
425 template<typename Fn> void foreach_segment(Fn &&fn) const;
426 template<typename Fn> void foreach_segment(GrainSize grain_size, Fn &&fn) const;
427
437 template<typename Fn> void foreach_segment_optimized(Fn &&fn) const;
438 template<typename Fn> void foreach_segment_optimized(GrainSize grain_size, Fn &&fn) const;
439
448 template<typename Fn> void foreach_range(Fn &&fn) const;
449
454 template<typename T> void to_indices(MutableSpan<T> r_indices) const;
458 void set_bits(MutableBitSpan r_bits, int64_t offset = 0) const;
462 void to_bits(MutableBitSpan r_bits, int64_t offset = 0) const;
466 void to_bools(MutableSpan<bool> r_bools) const;
471 std::optional<IndexRange> to_range() const;
486 template<int64_t N = 4>
488
493
494 friend bool operator==(const IndexMask &a, const IndexMask &b);
495 friend bool operator!=(const IndexMask &a, const IndexMask &b);
496};
497
504 private:
505 int64_t segment_offset_;
506 const int16_t *segment_indices_;
507 std::array<int64_t, 2> cumulative_segment_sizes_;
508 IndexMask mask_;
509
510 public:
512 const IndexMask &update(IndexMaskSegment segment);
513};
514
516{
518 cumulative_segment_sizes_[0] = 0;
519 data.segments_num_ = 1;
520 data.indices_by_segment_ = &segment_indices_;
521 data.segment_offsets_ = &segment_offset_;
522 data.cumulative_segment_sizes_ = cumulative_segment_sizes_.data();
523 data.begin_index_in_segment_ = 0;
524}
525
527{
528 const Span<int16_t> indices = segment.base_span();
529 BLI_assert(!indices.is_empty());
530 BLI_assert(std::is_sorted(indices.begin(), indices.end()));
531 BLI_assert(indices[0] >= 0);
532 BLI_assert(indices.last() < max_segment_size);
533 const int64_t indices_num = indices.size();
534
536 segment_offset_ = segment.offset();
537 segment_indices_ = indices.data();
538 cumulative_segment_sizes_[1] = int16_t(indices_num);
539 data.indices_num_ = indices_num;
540 data.end_index_in_segment_ = indices_num;
541
542 return mask_;
543}
544
545std::array<int16_t, max_segment_size> build_static_indices_array();
547std::ostream &operator<<(std::ostream &stream, const IndexMask &mask);
548
549/* -------------------------------------------------------------------- */
553inline const std::array<int16_t, max_segment_size> &get_static_indices_array()
554{
555 alignas(64) static const std::array<int16_t, max_segment_size> data =
557 return data;
558}
559
560template<typename T>
561inline void masked_fill(MutableSpan<T> data, const T &value, const IndexMask &mask)
562{
563 mask.foreach_index_optimized<int64_t>([&](const int64_t i) { data[i] = value; });
564}
565
571template<typename T> void build_reverse_map(const IndexMask &mask, MutableSpan<T> r_map);
572
581 IndexMaskMemory &memory);
582
583/* -------------------------------------------------------------------- */
587inline bool operator!=(const RawMaskIterator &a, const RawMaskIterator &b)
588{
589 return a.segment_i != b.segment_i || a.index_in_segment != b.index_in_segment;
590}
591
592inline bool operator==(const RawMaskIterator &a, const RawMaskIterator &b)
593{
594 return !(a != b);
595}
596
597/* -------------------------------------------------------------------- */
605
607{
608 return IndexMaskSegment(static_cast<const OffsetSpan<int64_t, int16_t> *>(this)->slice(range));
609}
610
611inline IndexMaskSegment IndexMaskSegment::slice(const int64_t start, const int64_t size) const
612{
613 return IndexMaskSegment(
614 static_cast<const OffsetSpan<int64_t, int16_t> *>(this)->slice(start, size));
615}
616
618{
619 BLI_assert(this->is_empty() || (*this)[0] + shift >= 0);
620 return IndexMaskSegment(this->offset() + shift, this->base_span());
621}
622
623/* -------------------------------------------------------------------- */
628{
629 static constexpr int64_t cumulative_sizes_for_empty_mask[1] = {0};
630
631 data.indices_num_ = 0;
632 data.segments_num_ = 0;
633 data.cumulative_segment_sizes_ = cumulative_sizes_for_empty_mask;
634 /* Intentionally leave some pointers uninitialized which must not be accessed on empty masks
635 * anyway. */
636}
637
639{
640 init_empty_mask(*this);
641}
642
644{
645 if (size == 0) {
646 init_empty_mask(*this);
647 return;
648 }
654}
655
657{
658 if (range.is_empty()) {
659 init_empty_mask(*this);
660 return;
661 }
662 const int64_t one_after_last = range.one_after_last();
663 *this = get_static_index_mask_for_min_size(one_after_last);
664
665 const int64_t first_segment_i = range.first() >> max_segment_size_shift;
666 const int64_t last_segment_i = range.last() >> max_segment_size_shift;
667
668 indices_num_ = range.size();
669 segments_num_ = last_segment_i - first_segment_i + 1;
670 indices_by_segment_ += first_segment_i;
671 segment_offsets_ += first_segment_i;
672 cumulative_segment_sizes_ += first_segment_i;
674 end_index_in_segment_ = one_after_last - ((one_after_last - 1) & max_segment_size_mask_high);
675}
676
678{
679 return indices_num_;
680}
681
682inline bool IndexMask::is_empty() const
683{
684 return indices_num_ == 0;
685}
686
688{
689 return IndexRange(indices_num_);
690}
691
693{
694 if (this->is_empty()) {
695 return IndexRange();
696 }
697 const int64_t first = this->first();
698 const int64_t last = this->last();
700}
701
707
709{
711 const int64_t last_segment_i = segments_num_ - 1;
712 return segment_offsets_[last_segment_i] +
713 indices_by_segment_[last_segment_i][end_index_in_segment_ - 1];
714}
715
717{
718 if (indices_num_ == 0) {
719 return 0;
720 }
721 return this->last() + 1;
722}
723
725{
726 BLI_assert(index >= 0);
727 BLI_assert(index < indices_num_);
729 const int64_t full_index = index + cumulative_segment_sizes_[0] + begin_index_in_segment_;
730 it.segment_i = -1 +
734 [&](const int64_t cumulative_size) { return cumulative_size > full_index; });
735 it.index_in_segment = full_index - cumulative_segment_sizes_[it.segment_i];
736 return it;
737}
738
740{
741 BLI_assert(it.segment_i >= 0);
742 BLI_assert(it.segment_i < segments_num_);
743 BLI_assert(it.index_in_segment >= 0);
744 BLI_assert(it.index_in_segment < cumulative_segment_sizes_[it.segment_i + 1] -
745 cumulative_segment_sizes_[it.segment_i]);
746 return it.index_in_segment + cumulative_segment_sizes_[it.segment_i] -
748}
749
751{
752 const RawMaskIterator it = this->index_to_iterator(i);
753 return (*this)[it];
754}
755
757{
758 return segment_offsets_[it.segment_i] + indices_by_segment_[it.segment_i][it.index_in_segment];
759}
760
762{
763 return segments_num_;
764}
765
766inline IndexMaskSegment IndexMask::segment(const int64_t segment_i) const
767{
768 BLI_assert(segment_i >= 0);
769 BLI_assert(segment_i < segments_num_);
770 const int64_t full_segment_size = cumulative_segment_sizes_[segment_i + 1] -
771 cumulative_segment_sizes_[segment_i];
772 const int64_t begin_index = (segment_i == 0) ? begin_index_in_segment_ : 0;
773 const int64_t end_index = (segment_i == segments_num_ - 1) ? end_index_in_segment_ :
774 full_segment_size;
775 const int64_t segment_size = end_index - begin_index;
776 return IndexMaskSegment{segment_offsets_[segment_i],
777 {indices_by_segment_[segment_i] + begin_index, segment_size}};
778}
779
780inline IndexMask IndexMask::slice(const IndexRange range) const
781{
782 return this->slice(range.start(), range.size());
783}
784
786{
787 return *this;
788}
789
790template<typename Fn>
792 std::is_invocable_r_v<void, Fn, IndexMaskSegment, int64_t> ||
793 std::is_invocable_r_v<void, Fn, IndexRange, int64_t>;
794
795template<typename Fn> inline void IndexMask::foreach_index(Fn &&fn) const
796{
797 this->foreach_segment(
798 [&](const IndexMaskSegment indices, [[maybe_unused]] const int64_t start_segment_pos) {
799 if constexpr (std::is_invocable_r_v<void, Fn, int64_t, int64_t>) {
800 for (const int64_t i : indices.index_range()) {
801 fn(indices[i], start_segment_pos + i);
802 }
803 }
804 else {
805 for (const int64_t index : indices) {
806 fn(index);
807 }
808 }
809 });
810}
811
812template<typename Fn>
813inline void IndexMask::foreach_index(const GrainSize grain_size, Fn &&fn) const
814{
815 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
816 const IndexMask sub_mask = this->slice(range);
817 sub_mask.foreach_index([&](const int64_t i, [[maybe_unused]] const int64_t index_pos) {
818 if constexpr (std::is_invocable_r_v<void, Fn, int64_t, int64_t>) {
819 fn(i, index_pos + range.start());
820 }
821 else {
822 fn(i);
823 }
824 });
825 });
826}
827
828template<typename T, typename Fn>
829#if (defined(__GNUC__) && !defined(__clang__))
830[[gnu::optimize("O3")]]
831#endif
832inline void
833optimized_foreach_index(const IndexMaskSegment segment, const Fn fn)
834{
835 BLI_assert(segment.last() < std::numeric_limits<T>::max());
836 if (unique_sorted_indices::non_empty_is_range(segment.base_span())) {
837 const T start = T(segment[0]);
838 const T last = T(segment.last());
839 for (T i = start; i <= last; i++) {
840 fn(i);
841 }
842 }
843 else {
844 for (const int64_t i : segment) {
845 fn(T(i));
846 }
847 }
848}
849
850template<typename T, typename Fn>
851#if (defined(__GNUC__) && !defined(__clang__))
852[[gnu::optimize("O3")]]
853#endif
854inline void
856 const int64_t segment_pos,
857 const Fn fn)
858{
859 BLI_assert(segment.last() < std::numeric_limits<T>::max());
860 BLI_assert(segment.size() + segment_pos < std::numeric_limits<T>::max());
861 if (unique_sorted_indices::non_empty_is_range(segment.base_span())) {
862 const T start = T(segment[0]);
863 const T last = T(segment.last());
864 for (T i = start, pos = T(segment_pos); i <= last; i++, pos++) {
865 fn(i, pos);
866 }
867 }
868 else {
869 T pos = T(segment_pos);
870 for (const int64_t i : segment.index_range()) {
871 const T index = T(segment[i]);
872 fn(index, pos);
873 pos++;
874 }
875 }
876}
877
878template<typename IndexT, typename Fn>
879inline void IndexMask::foreach_index_optimized(Fn &&fn) const
880{
881 this->foreach_segment(
882 [&](const IndexMaskSegment segment, [[maybe_unused]] const int64_t segment_pos) {
883 if constexpr (std::is_invocable_r_v<void, Fn, IndexT, IndexT>) {
884 optimized_foreach_index_with_pos<IndexT>(segment, segment_pos, fn);
885 }
886 else {
888 }
889 });
890}
891
892template<typename IndexT, typename Fn>
893inline void IndexMask::foreach_index_optimized(const GrainSize grain_size, Fn &&fn) const
894{
895 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
896 const IndexMask sub_mask = this->slice(range);
897 sub_mask.foreach_segment(
898 [&](const IndexMaskSegment segment, [[maybe_unused]] const int64_t segment_pos) {
899 if constexpr (std::is_invocable_r_v<void, Fn, IndexT, IndexT>) {
900 optimized_foreach_index_with_pos<IndexT>(segment, segment_pos + range.start(), fn);
901 }
902 else {
903 optimized_foreach_index<IndexT>(segment, fn);
904 }
905 });
906 });
907}
908
909template<typename Fn> inline void IndexMask::foreach_segment_optimized(Fn &&fn) const
910{
911 this->foreach_segment(
912 [&](const IndexMaskSegment segment, [[maybe_unused]] const int64_t start_segment_pos) {
913 if (unique_sorted_indices::non_empty_is_range(segment.base_span())) {
914 const IndexRange range(segment[0], segment.size());
915 if constexpr (has_segment_and_start_parameter<Fn>) {
916 fn(range, start_segment_pos);
917 }
918 else {
919 fn(range);
920 }
921 }
922 else {
924 fn(segment, start_segment_pos);
925 }
926 else {
927 fn(segment);
928 }
929 }
930 });
931}
932
933template<typename Fn>
934inline void IndexMask::foreach_segment_optimized(const GrainSize grain_size, Fn &&fn) const
935{
936 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
937 const IndexMask sub_mask = this->slice(range);
938 sub_mask.foreach_segment_optimized(
939 [&fn, range_start = range.start()](const auto segment,
940 [[maybe_unused]] const int64_t start_segment_pos) {
941 if constexpr (has_segment_and_start_parameter<Fn>) {
942 fn(segment, start_segment_pos + range_start);
943 }
944 else {
945 fn(segment);
946 }
947 });
948 });
949}
950
951template<typename Fn> inline void IndexMask::foreach_segment(Fn &&fn) const
952{
953 [[maybe_unused]] int64_t segment_pos = 0;
954 for (const int64_t segment_i : IndexRange(segments_num_)) {
955 const IndexMaskSegment segment = this->segment(segment_i);
957 fn(segment, segment_pos);
958 segment_pos += segment.size();
959 }
960 else {
961 fn(segment);
962 }
963 }
964}
965
966template<typename Fn>
967inline void IndexMask::foreach_segment(const GrainSize grain_size, Fn &&fn) const
968{
969 threading::parallel_for(this->index_range(), grain_size.value, [&](const IndexRange range) {
970 const IndexMask sub_mask = this->slice(range);
971 sub_mask.foreach_segment(
972 [&fn, range_start = range.start()](const IndexMaskSegment mask_segment,
973 [[maybe_unused]] const int64_t segment_pos) {
974 if constexpr (has_segment_and_start_parameter<Fn>) {
975 fn(mask_segment, segment_pos + range_start);
976 }
977 else {
978 fn(mask_segment);
979 }
980 });
981 });
982}
983
984template<typename Fn> inline void IndexMask::foreach_range(Fn &&fn) const
985{
986 this->foreach_segment([&](const IndexMaskSegment indices, [[maybe_unused]] int64_t segment_pos) {
987 Span<int16_t> base_indices = indices.base_span();
988 while (!base_indices.is_empty()) {
989 const int64_t next_range_size = unique_sorted_indices::find_size_of_next_range(base_indices);
990 const IndexRange range(int64_t(base_indices[0]) + indices.offset(), next_range_size);
991 if constexpr (has_segment_and_start_parameter<Fn>) {
992 fn(range, segment_pos);
993 }
994 else {
995 fn(range);
996 }
997 segment_pos += next_range_size;
998 base_indices = base_indices.drop_front(next_range_size);
999 }
1000 });
1001}
1002
1003namespace detail {
1005 const IndexMask &universe,
1006 GrainSize grain_size,
1007 IndexMaskMemory &memory,
1008 FunctionRef<int64_t(IndexMaskSegment indices, int16_t *r_true_indices)> filter_indices);
1009}
1010
1011template<typename Fn>
1013 const GrainSize grain_size,
1014 IndexMaskMemory &memory,
1015 Fn &&predicate)
1016{
1018 universe,
1019 grain_size,
1020 memory,
1021 [&](const IndexMaskSegment indices, int16_t *__restrict r_true_indices) {
1022 int16_t *r_current = r_true_indices;
1023 const int16_t *in_end = indices.base_span().end();
1024 const int64_t offset = indices.offset();
1025 for (const int16_t *in_current = indices.base_span().data(); in_current < in_end;
1026 in_current++) {
1027 const int16_t local_index = *in_current;
1028 const int64_t global_index = int64_t(local_index) + offset;
1029 const bool condition = predicate(global_index);
1030 *r_current = local_index;
1031 /* This expects the boolean to be either 0 or 1 which is generally the case but may not
1032 * be if the values are uninitialized. */
1033 BLI_assert(ELEM(int8_t(condition), 0, 1));
1034 /* Branchless conditional increment. */
1035 r_current += condition;
1036 }
1037 const int16_t true_indices_num = int16_t(r_current - r_true_indices);
1038 return true_indices_num;
1039 });
1040}
1041
1042template<typename T, typename Fn>
1044 IndexMaskMemory &memory,
1045 Fn &&get_group_index,
1046 MutableSpan<IndexMask> r_masks)
1047{
1048 Vector<Vector<T>> indices_by_group(r_masks.size());
1049 universe.foreach_index([&](const int64_t i) {
1050 const int group_index = get_group_index(i);
1051 indices_by_group[group_index].append(T(i));
1052 });
1053 for (const int64_t i : r_masks.index_range()) {
1054 r_masks[i] = IndexMask::from_indices<T>(indices_by_group[i], memory);
1055 }
1056}
1057
1058std::optional<IndexRange> inline IndexMask::to_range() const
1059{
1060 if (indices_num_ == 0) {
1061 return IndexRange{};
1062 }
1063 const int64_t first_index = this->first();
1064 const int64_t last_index = this->last();
1065 if (last_index - first_index == indices_num_ - 1) {
1066 return IndexRange(first_index, indices_num_);
1067 }
1068 return std::nullopt;
1069}
1070
1071template<int64_t N>
1073{
1075 this->foreach_segment_optimized([&](const auto segment) { segments.append(segment); });
1076 return segments;
1077}
1078
1079inline bool operator!=(const IndexMask &a, const IndexMask &b)
1080{
1081 return !(a == b);
1082}
1083
1084} // namespace blender::index_mask
1085
1086namespace blender {
1091} // namespace blender
#define BLI_assert(a)
Definition BLI_assert.h:50
#define ELEM(...)
constexpr int64_t start() const
static constexpr IndexRange from_begin_end_inclusive(const int64_t begin, const int64_t last)
void provide_buffer(void *buffer, const int64_t size)
constexpr int64_t size() const
Definition BLI_span.hh:494
constexpr IndexRange index_range() const
Definition BLI_span.hh:671
constexpr Span drop_front(int64_t n) const
Definition BLI_span.hh:172
constexpr bool is_empty() const
Definition BLI_span.hh:261
void append(const T &value)
const IndexMask & update(IndexMaskSegment segment)
IndexMaskSegment slice(const IndexRange &range) const
IndexMaskSegment shift(const int64_t shift) const
IndexMaskSegment(const OffsetSpan< int64_t, int16_t > span)
IndexMask slice_and_shift(IndexRange range, int64_t offset, IndexMaskMemory &memory) const
static IndexMask from_predicate(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, Fn &&predicate)
static IndexMask from_every_nth(int64_t n, int64_t indices_num, const int64_t initial_offset, IndexMaskMemory &memory)
static IndexMask from_bools_inverse(const IndexMask &universe, Span< bool > bools, IndexMaskMemory &memory)
IndexMaskSegment segment(int64_t segment_i) const
void to_indices(MutableSpan< T > r_indices) const
IndexMask slice_content(IndexRange range) const
Vector< IndexRange > to_ranges_invert(IndexRange universe) const
static IndexMask from_bits(BitSpan bits, IndexMaskMemory &memory)
Vector< IndexRange > to_ranges() const
std::optional< RawMaskIterator > find(int64_t query_index) const
std::optional< IndexRange > to_range() const
static Vector< IndexMask, 4 > from_group_ids(const VArray< int > &group_ids, IndexMaskMemory &memory, VectorSet< int > &r_index_by_group_id)
static IndexMask from_repeating(const IndexMask &mask_to_repeat, int64_t repetitions, int64_t stride, int64_t initial_offset, IndexMaskMemory &memory)
static IndexMask from_union(const IndexMask &mask_a, const IndexMask &mask_b, IndexMaskMemory &memory)
IndexMask shift(const int64_t offset, IndexMaskMemory &memory) const
void foreach_index_optimized(Fn &&fn) const
std::optional< RawMaskIterator > find_larger_equal(int64_t query_index) const
IndexMask slice(IndexRange range) const
static IndexMask from_intersection(const IndexMask &mask_a, const IndexMask &mask_b, IndexMaskMemory &memory)
static IndexMask from_indices(Span< T > indices, IndexMaskMemory &memory)
int64_t operator[](int64_t i) const
int64_t iterator_to_index(const RawMaskIterator &it) const
static IndexMask from_segments(Span< IndexMaskSegment > segments, IndexMaskMemory &memory)
static void from_groups(const IndexMask &universe, IndexMaskMemory &memory, Fn &&get_group_index, MutableSpan< IndexMask > r_masks)
static IndexMask from_batch_predicate(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, FunctionRef< int64_t(const IndexMaskSegment &universe_segment, IndexRangesBuilder< int16_t > &builder)> batch_predicate)
static IndexMask from_initializers(const Span< Initializer > initializers, IndexMaskMemory &memory)
void set_bits(MutableBitSpan r_bits, int64_t offset=0) const
Vector< std::variant< IndexRange, IndexMaskSegment >, N > to_spans_and_ranges() const
std::optional< RawMaskIterator > find_smaller_equal(int64_t query_index) const
friend bool operator!=(const IndexMask &a, const IndexMask &b)
IndexMaskData & data_for_inplace_construction()
bool contains(int64_t query_index) const
IndexMask complement(const IndexMask &universe, IndexMaskMemory &memory) const
static IndexMask from_bools(Span< bool > bools, IndexMaskMemory &memory)
void foreach_range(Fn &&fn) const
void to_bits(MutableBitSpan r_bits, int64_t offset=0) const
void foreach_segment_optimized(Fn &&fn) const
friend bool operator==(const IndexMask &a, const IndexMask &b)
void to_bools(MutableSpan< bool > r_bools) const
void foreach_index(Fn &&fn) const
RawMaskIterator index_to_iterator(int64_t index) const
static void foreach_segment_zipped(Span< IndexMask > masks, FunctionRef< bool(Span< IndexMaskSegment > segments)> fn)
static IndexMask from_difference(const IndexMask &mask_a, const IndexMask &mask_b, IndexMaskMemory &memory)
std::variant< IndexRange, Span< int64_t >, Span< int >, int64_t > Initializer
void foreach_segment(Fn &&fn) const
local_group_size(16, 16) .push_constant(Type b
static ushort indices[]
#define N
#define T
int64_t find_predicate_begin(Iterator begin, Iterator end, Predicate &&predicate)
IndexMask from_predicate_impl(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, FunctionRef< int64_t(IndexMaskSegment indices, int16_t *r_true_indices)> filter_indices)
constexpr bool has_segment_and_start_parameter
static constexpr int64_t max_segment_size
static constexpr int64_t max_segment_size_mask_high
void build_reverse_map(const IndexMask &mask, MutableSpan< T > r_map)
Definition index_mask.cc:31
static constexpr int64_t max_segment_size_mask_low
std::ostream & operator<<(std::ostream &stream, const IndexMask &mask)
Definition index_mask.cc:99
void optimized_foreach_index(const IndexMaskSegment segment, const Fn fn)
bool operator==(const RawMaskIterator &a, const RawMaskIterator &b)
std::array< int16_t, max_segment_size > build_static_indices_array()
Definition index_mask.cc:44
void optimized_foreach_index_with_pos(const IndexMaskSegment segment, const int64_t segment_pos, const Fn fn)
const IndexMask & get_static_index_mask_for_min_size(const int64_t min_size)
Definition index_mask.cc:53
int64_t consolidate_index_mask_segments(MutableSpan< IndexMaskSegment > segments, IndexMaskMemory &memory)
bool operator!=(const RawMaskIterator &a, const RawMaskIterator &b)
const std::array< int16_t, max_segment_size > & get_static_indices_array()
void masked_fill(MutableSpan< T > data, const T &value, const IndexMask &mask)
void init_empty_mask(IndexMaskData &data)
static constexpr int64_t max_segment_size_shift
void parallel_for(const IndexRange range, const int64_t grain_size, const Function &function, const TaskSizeHints &size_hints=detail::TaskSizeHints_Static(1))
Definition BLI_task.hh:95
bool non_empty_is_range(const Span< T > indices)
signed short int16_t
Definition stdint.h:76
__int64 int64_t
Definition stdint.h:89
signed char int8_t
Definition stdint.h:75