Blender V5.0
BLI_virtual_array.hh
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
5#pragma once
6
27
28#include <functional>
29#include <optional>
30
31#include "BLI_any.hh"
32#include "BLI_array.hh"
34#include "BLI_index_mask.hh"
35#include "BLI_span.hh"
36
37namespace blender {
38
40class GVArray;
41class GVMutableArray;
42
48 enum class Type : uint8_t {
49 /* Is not one of the common special types below. */
53 };
54
56
58 bool may_have_ownership = true;
59
64 const void *data;
65
66 CommonVArrayInfo() = default;
67 CommonVArrayInfo(const Type _type, const bool _may_have_ownership, const void *_data)
68 : type(_type), may_have_ownership(_may_have_ownership), data(_data)
69 {
70 }
71};
72
77template<typename T> class VArrayImpl {
78 protected:
84
85 public:
87 {
88 BLI_assert(size_ >= 0);
89 }
90
91 virtual ~VArrayImpl() = default;
92
93 int64_t size() const
94 {
95 return size_;
96 }
97
102 virtual T get(int64_t index) const = 0;
103
105 {
106 return {};
107 }
108
113 virtual void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const
114 {
115 if constexpr (std::is_trivially_copyable_v<T>) {
116 mask.foreach_index([&](const int64_t i) { dst[i] = this->get(i); });
117 }
118 else {
119 if (dst_is_uninitialized) {
120 mask.foreach_index([&](const int64_t i) { new (dst + i) T(this->get(i)); });
121 }
122 else {
123 mask.foreach_index([&](const int64_t i) { dst[i] = this->get(i); });
124 }
125 }
126 }
127
134 T *dst,
135 const bool dst_is_uninitialized) const
136 {
137 if constexpr (std::is_trivially_copyable_v<T>) {
138 mask.foreach_index([&](const int64_t i, const int64_t pos) { dst[pos] = this->get(i); });
139 }
140 else {
141 if (dst_is_uninitialized) {
142 mask.foreach_index(
143 [&](const int64_t i, const int64_t pos) { new (dst + pos) T(this->get(i)); });
144 }
145 else {
146 mask.foreach_index([&](const int64_t i, const int64_t pos) { dst[pos] = this->get(i); });
147 }
148 }
149 }
150
157 virtual bool try_assign_GVArray(GVArray & /*varray*/) const
158 {
159 return false;
160 }
161};
162
164template<typename T> class VMutableArrayImpl : public VArrayImpl<T> {
165 public:
167
171 virtual void set(int64_t index, T value) = 0;
172
176 virtual void set_all(Span<T> src)
177 {
178 const CommonVArrayInfo info = this->common_info();
181 src.data(), this->size_, const_cast<T *>(static_cast<const T *>(info.data)));
182 }
183 else {
184 const int64_t size = this->size_;
185 for (int64_t i = 0; i < size; i++) {
186 this->set(i, src[i]);
187 }
188 }
189 }
190
194 virtual bool try_assign_GVMutableArray(GVMutableArray & /*varray*/) const
195 {
196 return false;
197 }
198};
199
204template<typename T> class VArrayImpl_For_Span : public VMutableArrayImpl<T> {
205 protected:
206 T *data_ = nullptr;
207
208 public:
213
214 protected:
216
217 T get(const int64_t index) const final
218 {
219 return data_[index];
220 }
221
222 void set(const int64_t index, T value) final
223 {
224 data_[index] = value;
225 }
226
228 {
230 }
231
232 void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
233 {
234 if constexpr (std::is_trivially_copyable_v<T>) {
235 mask.foreach_index_optimized<int64_t>([&](const int64_t i) { dst[i] = data_[i]; });
236 }
237 else {
238 if (dst_is_uninitialized) {
239 mask.foreach_index_optimized<int64_t>([&](const int64_t i) { new (dst + i) T(data_[i]); });
240 }
241 else {
242 mask.foreach_index_optimized<int64_t>([&](const int64_t i) { dst[i] = data_[i]; });
243 }
244 }
245 }
246
248 T *dst,
249 const bool dst_is_uninitialized) const override
250 {
251 if constexpr (std::is_trivially_copyable_v<T>) {
252 mask.foreach_index_optimized<int64_t>(
253 [&](const int64_t i, const int64_t pos) { dst[pos] = data_[i]; });
254 }
255 else {
256 if (dst_is_uninitialized) {
257 mask.foreach_index_optimized<int64_t>(
258 [&](const int64_t i, const int64_t pos) { new (dst + pos) T(data_[i]); });
259 }
260 else {
261 mask.foreach_index_optimized<int64_t>(
262 [&](const int64_t i, const int64_t pos) { dst[pos] = data_[i]; });
263 }
264 }
265 }
266};
267
272template<typename T> class VArrayImpl_For_Span_final final : public VArrayImpl_For_Span<T> {
273 public:
275
277 /* Cast const away, because the implementation for const and non const spans is shared. */
278 : VArrayImpl_For_Span<T>({const_cast<T *>(data.data()), data.size()})
279 {
280 }
281
282 private:
283 CommonVArrayInfo common_info() const final
284 {
285 return CommonVArrayInfo(CommonVArrayInfo::Type::Span, false, this->data_);
286 }
287};
288
289template<typename T>
291
298template<typename Container, typename T = typename Container::value_type>
300 private:
301 Container container_;
302
303 public:
304 VArrayImpl_For_ArrayContainer(Container container)
305 : VArrayImpl_For_Span<T>(int64_t(container.size())), container_(std::move(container))
306 {
307 this->data_ = const_cast<T *>(container_.data());
308 }
309};
310
316template<typename T> class VArrayImpl_For_Single final : public VArrayImpl<T> {
317 private:
318 T value_;
319
320 public:
322 : VArrayImpl<T>(size), value_(std::move(value))
323 {
324 }
325
326 protected:
327 T get(const int64_t /*index*/) const override
328 {
329 return value_;
330 }
331
333 {
335 }
336
337 void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
338 {
339 if constexpr (std::is_trivially_copyable_v<T>) {
340 mask.foreach_index([&](const int64_t i) { dst[i] = value_; });
341 }
342 else {
343 if (dst_is_uninitialized) {
344 mask.foreach_index([&](const int64_t i) { new (dst + i) T(value_); });
345 }
346 else {
347 mask.foreach_index([&](const int64_t i) { dst[i] = value_; });
348 }
349 }
350 }
351
353 T *dst,
354 const bool dst_is_uninitialized) const override
355 {
356 if constexpr (std::is_trivially_copyable_v<T>) {
357 initialized_fill_n(dst, mask.size(), value_);
358 }
359 else {
360 if (dst_is_uninitialized) {
361 uninitialized_fill_n(dst, mask.size(), value_);
362 }
363 else {
364 initialized_fill_n(dst, mask.size(), value_);
365 }
366 }
367 }
368};
369
370template<typename T>
372
377template<typename T, typename GetFunc> class VArrayImpl_For_Func final : public VArrayImpl<T> {
378 private:
379 GetFunc get_func_;
380
381 public:
382 VArrayImpl_For_Func(const int64_t size, GetFunc get_func)
383 : VArrayImpl<T>(size), get_func_(std::move(get_func))
384 {
385 }
386
387 private:
388 T get(const int64_t index) const override
389 {
390 return get_func_(index);
391 }
392
393 void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
394 {
395 if constexpr (std::is_trivially_copyable_v<T>) {
396 mask.foreach_index([&](const int64_t i) { dst[i] = get_func_(i); });
397 }
398 else {
399 if (dst_is_uninitialized) {
400 mask.foreach_index([&](const int64_t i) { new (dst + i) T(get_func_(i)); });
401 }
402 else {
403 mask.foreach_index([&](const int64_t i) { dst[i] = get_func_(i); });
404 }
405 }
406 }
407
408 void materialize_compressed(const IndexMask &mask,
409 T *dst,
410 const bool dst_is_uninitialized) const override
411 {
412 if constexpr (std::is_trivially_copyable_v<T>) {
413 mask.foreach_index([&](const int64_t i, const int64_t pos) { dst[pos] = get_func_(i); });
414 }
415 else {
416 if (dst_is_uninitialized) {
417 mask.foreach_index(
418 [&](const int64_t i, const int64_t pos) { new (dst + pos) T(get_func_(i)); });
419 }
420 else {
421 mask.foreach_index([&](const int64_t i, const int64_t pos) { dst[pos] = get_func_(i); });
422 }
423 }
424 }
425};
426
430template<typename StructT,
431 typename ElemT,
432 ElemT (*GetFunc)(const StructT &),
433 void (*SetFunc)(StructT &, ElemT) = nullptr>
435 private:
436 StructT *data_;
437
438 public:
443
444 template<typename OtherStructT,
445 typename OtherElemT,
446 OtherElemT (*OtherGetFunc)(const OtherStructT &),
447 void (*OtherSetFunc)(OtherStructT &, OtherElemT)>
449
450 private:
451 ElemT get(const int64_t index) const override
452 {
453 return GetFunc(data_[index]);
454 }
455
456 void set(const int64_t index, ElemT value) override
457 {
458 SetFunc(data_[index], std::move(value));
459 }
460
461 void materialize(const IndexMask &mask,
462 ElemT *dst,
463 const bool dst_is_uninitialized) const override
464 {
465 if constexpr (std::is_trivially_copyable_v<ElemT>) {
466 mask.foreach_index_optimized<int64_t>([&](const int64_t i) { dst[i] = GetFunc(data_[i]); });
467 }
468 else {
469 if (dst_is_uninitialized) {
470 mask.foreach_index_optimized<int64_t>(
471 [&](const int64_t i) { new (dst + i) ElemT(GetFunc(data_[i])); });
472 }
473 else {
474 mask.foreach_index_optimized<int64_t>(
475 [&](const int64_t i) { dst[i] = GetFunc(data_[i]); });
476 }
477 }
478 }
479
480 void materialize_compressed(const IndexMask &mask,
481 ElemT *dst,
482 const bool dst_is_uninitialized) const override
483 {
484 if constexpr (std::is_trivially_copyable_v<ElemT>) {
485 mask.foreach_index_optimized<int64_t>(
486 [&](const int64_t i, const int64_t pos) { dst[pos] = GetFunc(data_[i]); });
487 }
488 else {
489 if (dst_is_uninitialized) {
490 mask.foreach_index_optimized<int64_t>(
491 [&](const int64_t i, const int64_t pos) { new (dst + pos) ElemT(GetFunc(data_[i])); });
492 }
493 else {
494 mask.foreach_index_optimized<int64_t>(
495 [&](const int64_t i, const int64_t pos) { dst[pos] = GetFunc(data_[i]); });
496 }
497 }
498 }
499};
500
501template<typename StructT,
502 typename ElemT,
503 ElemT (*GetFunc)(const StructT &),
504 void (*SetFunc)(StructT &, ElemT)>
505inline constexpr bool
507
508namespace detail {
509
514template<typename T> struct VArrayAnyExtraInfo {
518 const VArrayImpl<T> *(*get_varray)(const void *buffer);
519
520 template<typename StorageT> static constexpr VArrayAnyExtraInfo get()
521 {
522 /* These are the only allowed types in the #Any. */
523 static_assert(
524 std::is_base_of_v<VArrayImpl<T>, StorageT> ||
525 is_same_any_v<StorageT, const VArrayImpl<T> *, std::shared_ptr<const VArrayImpl<T>>>);
526
527 /* Depending on how the virtual array implementation is stored in the #Any, a different
528 * #get_varray function is required. */
529 if constexpr (std::is_base_of_v<VArrayImpl<T>, StorageT>) {
530 return {[](const void *buffer) {
531 return static_cast<const VArrayImpl<T> *>((const StorageT *)buffer);
532 }};
533 }
534 else if constexpr (std::is_same_v<StorageT, const VArrayImpl<T> *>) {
535 return {[](const void *buffer) { return *(const StorageT *)buffer; }};
536 }
537 else if constexpr (std::is_same_v<StorageT, std::shared_ptr<const VArrayImpl<T>>>) {
538 return {[](const void *buffer) { return ((const StorageT *)buffer)->get(); }};
539 }
540 else {
542 return {};
543 }
544 }
545};
546
547} // namespace detail
548
556template<typename T> class VArrayCommon {
557 protected:
566
570 const VArrayImpl<T> *impl_ = nullptr;
579
580 VArrayCommon() = default;
581
584 {
585 impl_ = this->impl_from_storage();
586 }
587
589 VArrayCommon(VArrayCommon &&other) noexcept : storage_(std::move(other.storage_))
590 {
591 impl_ = this->impl_from_storage();
592 other.storage_.reset();
593 other.impl_ = nullptr;
594 }
595
600 VArrayCommon(const VArrayImpl<T> *impl) : impl_(impl)
601 {
602 storage_ = impl_;
603 }
604
608 VArrayCommon(std::shared_ptr<const VArrayImpl<T>> impl) : impl_(impl.get())
609 {
610 if (impl) {
611 storage_ = std::move(impl);
612 }
613 }
614
618 template<typename ImplT, typename... Args> void emplace(Args &&...args)
619 {
620 /* Make sure we are actually constructing a #VArrayImpl. */
621 static_assert(std::is_base_of_v<VArrayImpl<T>, ImplT>);
622 if constexpr (std::is_copy_constructible_v<ImplT> && Storage::template is_inline_v<ImplT>) {
623 /* Only inline the implementation when it is copyable and when it fits into the inline
624 * buffer of the storage. */
625 impl_ = &storage_.template emplace<ImplT>(std::forward<Args>(args)...);
626 }
627 else {
628 /* If it can't be inlined, create a new #std::shared_ptr instead and store that in the
629 * storage. */
630 std::shared_ptr<const VArrayImpl<T>> ptr = std::make_shared<ImplT>(
631 std::forward<Args>(args)...);
632 impl_ = &*ptr;
633 storage_ = std::move(ptr);
634 }
635 }
636
638 void copy_from(const VArrayCommon &other)
639 {
640 if (this == &other) {
641 return;
642 }
643 storage_ = other.storage_;
644 impl_ = this->impl_from_storage();
645 }
646
648 void move_from(VArrayCommon &&other) noexcept
649 {
650 if (this == &other) {
651 return;
652 }
653 storage_ = std::move(other.storage_);
654 impl_ = this->impl_from_storage();
655 other.storage_.reset();
656 other.impl_ = nullptr;
657 }
658
662 {
663 if (!storage_.has_value()) {
664 return nullptr;
665 }
666 return storage_.extra_info().get_varray(storage_.get());
667 }
668
669 public:
671 operator bool() const
672 {
673 return impl_ != nullptr;
674 }
675
681 T operator[](const int64_t index) const
682 {
683 BLI_assert(*this);
684 BLI_assert(index >= 0);
685 BLI_assert(index < this->size());
686 return impl_->get(index);
687 }
688
693 T get(const int64_t index) const
694 {
695 return (*this)[index];
696 }
697
702 int64_t size() const
703 {
704 if (impl_ == nullptr) {
705 return 0;
706 }
707 return impl_->size();
708 }
709
712 T first() const
713 {
714 return (*this)[0];
715 }
716
719 T last(const int64_t n = 0) const
720 {
721 return (*this)[this->size() - 1 - n];
722 }
723
725 bool is_empty() const
726 {
727 return this->size() == 0;
728 }
729
731 {
732 return IndexRange(this->size());
733 }
734
736 {
737 BLI_assert(*this);
738 return impl_->common_info();
739 }
740
742 bool is_span() const
743 {
744 BLI_assert(*this);
745 const CommonVArrayInfo info = impl_->common_info();
746 return info.type == CommonVArrayInfo::Type::Span;
747 }
748
754 {
755 BLI_assert(this->is_span());
756 const CommonVArrayInfo info = impl_->common_info();
757 return Span<T>(static_cast<const T *>(info.data), this->size());
758 }
759
761 bool is_single() const
762 {
763 BLI_assert(*this);
764 const CommonVArrayInfo info = impl_->common_info();
766 }
767
773 {
774 BLI_assert(this->is_single());
775 const CommonVArrayInfo info = impl_->common_info();
776 return *static_cast<const T *>(info.data);
777 }
778
782 std::optional<T> get_if_single() const
783 {
784 const CommonVArrayInfo info = impl_->common_info();
786 return std::nullopt;
787 }
788 return *static_cast<const T *>(info.data);
789 }
790
792 void materialize(MutableSpan<T> r_span) const
793 {
794 this->materialize(IndexMask(this->size()), r_span);
795 }
796
798 void materialize(const IndexMask &mask, MutableSpan<T> r_span) const
799 {
800 BLI_assert(mask.min_array_size() <= this->size());
801 impl_->materialize(mask, r_span.data(), false);
802 }
803
805 {
806 this->materialize_to_uninitialized(IndexMask(this->size()), r_span);
807 }
808
810 {
811 BLI_assert(mask.min_array_size() <= this->size());
812 impl_->materialize(mask, r_span.data(), true);
813 }
814
817 {
818 impl_->materialize_compressed(mask, r_span.data(), false);
819 }
820
822 {
823 impl_->materialize_compressed(mask, r_span.data(), true);
824 }
825
827 bool try_assign_GVArray(GVArray &varray) const
828 {
829 return impl_->try_assign_GVArray(varray);
830 }
831
833 {
834 return impl_;
835 }
836};
837
838template<typename T> class VMutableArray;
839
847namespace varray_tag {
848struct span {};
849struct single_ref {};
850struct single {};
851} // namespace varray_tag
852
858template<typename T> class VArray : public VArrayCommon<T> {
859 friend VMutableArray<T>;
860
861 public:
862 VArray() = default;
863 VArray(const VArray &other) = default;
864 VArray(VArray &&other) noexcept = default;
865
866 VArray(const VArrayImpl<T> *impl) : VArrayCommon<T>(impl) {}
867
868 VArray(std::shared_ptr<const VArrayImpl<T>> impl) : VArrayCommon<T>(std::move(impl)) {}
869
871 {
872 this->template emplace<VArrayImpl_For_Span_final<T>>(span);
873 }
874
875 VArray(varray_tag::single /*tag*/, T value, const int64_t size)
876 {
877 this->template emplace<VArrayImpl_For_Single<T>>(std::move(value), size);
878 }
879
883 template<typename ImplT, typename... Args> static VArray from(Args &&...args)
884 {
885 static_assert(std::is_base_of_v<VArrayImpl<T>, ImplT>);
886 VArray varray;
887 varray.template emplace<ImplT>(std::forward<Args>(args)...);
888 return varray;
889 }
890
894 static VArray from_single(T value, const int64_t size)
895 {
896 return VArray(varray_tag::single{}, std::move(value), size);
897 }
898
903 static VArray from_span(Span<T> values)
904 {
905 return VArray(varray_tag::span{}, values);
906 }
907
912 template<typename GetFunc> static VArray from_func(const int64_t size, GetFunc get_func)
913 {
914 return VArray::from<VArrayImpl_For_Func<T, decltype(get_func)>>(size, std::move(get_func));
915 }
916
921 static VArray from_std_func(const int64_t size, std::function<T(int64_t index)> get_func)
922 {
923 return VArray::from_func(size, get_func);
924 }
925
930 template<typename StructT, T (*GetFunc)(const StructT &)>
932 {
933 /* Cast const away, because the virtual array implementation for const and non const derived
934 * spans is shared. */
935 MutableSpan<StructT> span{const_cast<StructT *>(values.data()), values.size()};
937 }
938
944 template<typename ContainerT> static VArray from_container(ContainerT container)
945 {
946 return VArray::from<VArrayImpl_For_ArrayContainer<ContainerT>>(std::move(container));
947 }
948
949 VArray &operator=(const VArray &other)
950 {
951 this->copy_from(other);
952 return *this;
953 }
954
955 VArray &operator=(VArray &&other) noexcept
956 {
957 this->move_from(std::move(other));
958 return *this;
959 }
960};
961
965template<typename T> class VMutableArray : public VArrayCommon<T> {
966 public:
967 VMutableArray() = default;
968 VMutableArray(const VMutableArray &other) = default;
969 VMutableArray(VMutableArray &&other) noexcept = default;
970
972
973 VMutableArray(std::shared_ptr<const VMutableArrayImpl<T>> impl)
974 : VArrayCommon<T>(std::move(impl))
975 {
976 }
977
981 template<typename ImplT, typename... Args> static VMutableArray from(Args &&...args)
982 {
983 static_assert(std::is_base_of_v<VMutableArrayImpl<T>, ImplT>);
984 VMutableArray varray;
985 varray.template emplace<ImplT>(std::forward<Args>(args)...);
986 return varray;
987 }
988
996
1001 template<typename StructT, T (*GetFunc)(const StructT &), void (*SetFunc)(StructT &, T)>
1006
1012 template<typename ContainerT> static VMutableArray from_container(ContainerT container)
1013 {
1015 }
1016
1018 operator VArray<T>() const &
1019 {
1020 VArray<T> varray;
1021 varray.copy_from(*this);
1022 return varray;
1023 }
1024
1026 operator VArray<T>() && noexcept
1027 {
1028 VArray<T> varray;
1029 varray.move_from(std::move(*this));
1030 return varray;
1031 }
1032
1034 {
1035 this->copy_from(other);
1036 return *this;
1037 }
1038
1040 {
1041 this->move_from(std::move(other));
1042 return *this;
1043 }
1044
1050 {
1051 BLI_assert(this->is_span());
1052 const CommonVArrayInfo info = this->get_impl()->common_info();
1053 return MutableSpan<T>(const_cast<T *>(static_cast<const T *>(info.data)), this->size());
1054 }
1055
1059 void set(const int64_t index, T value)
1060 {
1061 BLI_assert(index >= 0);
1062 BLI_assert(index < this->size());
1063 this->get_impl()->set(index, std::move(value));
1064 }
1065
1070 {
1071 BLI_assert(src.size() == this->size());
1072 this->get_impl()->set_all(src);
1073 }
1074
1077 {
1078 return this->get_impl()->try_assign_GVMutableArray(varray);
1079 }
1080
1081 private:
1083 VMutableArrayImpl<T> *get_impl() const
1084 {
1085 /* This cast is valid by the invariant that a #VMutableArray->impl_ is always a
1086 * #VMutableArrayImpl. */
1087 return (VMutableArrayImpl<T> *)this->impl_;
1088 }
1089};
1090
1091template<typename T> static constexpr bool is_VArray_v = false;
1092template<typename T> static constexpr bool is_VArray_v<VArray<T>> = true;
1093
1094template<typename T> static constexpr bool is_VMutableArray_v = false;
1095template<typename T> static constexpr bool is_VMutableArray_v<VMutableArray<T>> = true;
1096
1108template<typename T> class VArraySpan final : public Span<T> {
1109 private:
1110 VArray<T> varray_;
1111 Array<T> owned_data_;
1112
1113 public:
1114 VArraySpan() = default;
1115
1116 VArraySpan(const VArray<T> &varray) : VArraySpan(VArray<T>(varray)) {}
1117
1118 VArraySpan(VArray<T> &&varray) : Span<T>(), varray_(std::move(varray))
1119 {
1120 if (!varray_) {
1121 return;
1122 }
1123 this->size_ = varray_.size();
1124 const CommonVArrayInfo info = varray_.common_info();
1125 if (info.type == CommonVArrayInfo::Type::Span) {
1126 this->data_ = static_cast<const T *>(info.data);
1127 }
1128 else {
1129 owned_data_.~Array();
1130 new (&owned_data_) Array<T>(varray_.size(), NoInitialization{});
1131 varray_.materialize_to_uninitialized(owned_data_);
1132 this->data_ = owned_data_.data();
1133 }
1134 }
1135
1137 : varray_(std::move(other.varray_)), owned_data_(std::move(other.owned_data_))
1138 {
1139 if (!varray_) {
1140 return;
1141 }
1142 this->size_ = varray_.size();
1143 const CommonVArrayInfo info = varray_.common_info();
1144 if (info.type == CommonVArrayInfo::Type::Span) {
1145 this->data_ = static_cast<const T *>(info.data);
1146 }
1147 else {
1148 this->data_ = owned_data_.data();
1149 }
1150 other.data_ = nullptr;
1151 other.size_ = 0;
1152 }
1153
1155 {
1156 if (this == &other) {
1157 return *this;
1158 }
1159 std::destroy_at(this);
1160 new (this) VArraySpan(std::move(other));
1161 return *this;
1162 }
1163};
1164
1165namespace internal {
1166void print_mutable_varray_span_warning();
1167}
1168
1176template<typename T> class MutableVArraySpan final : public MutableSpan<T> {
1177 private:
1178 VMutableArray<T> varray_;
1179 Array<T> owned_data_;
1180 bool save_has_been_called_ = false;
1181 bool show_not_saved_warning_ = true;
1182
1183 public:
1185
1186 /* Create a span for any virtual array. This is cheap when the virtual array is a span itself. If
1187 * not, a new array has to be allocated as a wrapper for the underlying virtual array. */
1188 MutableVArraySpan(VMutableArray<T> varray, const bool copy_values_to_span = true)
1189 : MutableSpan<T>(), varray_(std::move(varray))
1190 {
1191 if (!varray_) {
1192 return;
1193 }
1194
1195 this->size_ = varray_.size();
1196 const CommonVArrayInfo info = varray_.common_info();
1197 if (info.type == CommonVArrayInfo::Type::Span) {
1198 this->data_ = const_cast<T *>(static_cast<const T *>(info.data));
1199 }
1200 else {
1201 if (copy_values_to_span) {
1202 owned_data_.~Array();
1203 new (&owned_data_) Array<T>(varray_.size(), NoInitialization{});
1204 varray_.materialize_to_uninitialized(owned_data_);
1205 }
1206 else {
1207 owned_data_.reinitialize(varray_.size());
1208 }
1209 this->data_ = owned_data_.data();
1210 }
1211 }
1212
1214 : varray_(std::move(other.varray_)),
1215 owned_data_(std::move(other.owned_data_)),
1216 show_not_saved_warning_(other.show_not_saved_warning_)
1217 {
1218 if (!varray_) {
1219 return;
1220 }
1221
1222 this->size_ = varray_.size();
1223 const CommonVArrayInfo info = varray_.common_info();
1224 if (info.type == CommonVArrayInfo::Type::Span) {
1225 this->data_ = static_cast<T *>(const_cast<void *>(info.data));
1226 }
1227 else {
1228 this->data_ = owned_data_.data();
1229 }
1230 other.data_ = nullptr;
1231 other.size_ = 0;
1232 }
1233
1235 {
1236 if (varray_) {
1237 if (show_not_saved_warning_) {
1238 if (!save_has_been_called_) {
1240 }
1241 }
1242 }
1243 }
1244
1246 {
1247 if (this == &other) {
1248 return *this;
1249 }
1250 std::destroy_at(this);
1251 new (this) MutableVArraySpan(std::move(other));
1252 return *this;
1253 }
1254
1256 {
1257 return varray_;
1258 }
1259
1260 /* Write back all values from a temporary allocated array to the underlying virtual array. */
1261 void save()
1262 {
1263 save_has_been_called_ = true;
1264 if (this->data_ != owned_data_.data()) {
1265 return;
1266 }
1267 varray_.set_all(owned_data_);
1268 }
1269
1271 {
1272 show_not_saved_warning_ = false;
1273 }
1274};
1275
1276template<typename T> class SingleAsSpan {
1277 private:
1278 T value_;
1279 int64_t size_;
1280
1281 public:
1282 SingleAsSpan(T value, int64_t size) : value_(std::move(value)), size_(size)
1283 {
1284 BLI_assert(size_ >= 0);
1285 }
1286
1287 SingleAsSpan(const VArray<T> &varray) : SingleAsSpan(varray.get_internal_single(), varray.size())
1288 {
1289 }
1290
1291 const T &operator[](const int64_t index) const
1292 {
1293 BLI_assert(index >= 0);
1294 BLI_assert(index < size_);
1295 UNUSED_VARS_NDEBUG(index);
1296 return value_;
1297 }
1298};
1299
1300template<typename T> class VArrayRef {
1301 private:
1302 const VArray<T> &ref_;
1303
1304 public:
1305 VArrayRef(const VArray<T> &ref) : ref_(ref) {}
1306
1307 T operator[](const int64_t index) const
1308 {
1309 return ref_[index];
1310 }
1311};
1312
1314template<typename T, bool UseSingle, bool UseSpan> struct VArrayDevirtualizer {
1316
1317 template<typename Fn> bool devirtualize(const Fn &fn) const
1318 {
1319 const CommonVArrayInfo info = this->varray.common_info();
1320 const int64_t size = this->varray.size();
1321 if constexpr (UseSingle) {
1323 return fn(SingleAsSpan<T>(*static_cast<const T *>(info.data), size));
1324 }
1325 }
1326 if constexpr (UseSpan) {
1327 if (info.type == CommonVArrayInfo::Type::Span) {
1328 return fn(Span<T>(static_cast<const T *>(info.data), size));
1329 }
1330 }
1331 return false;
1332 }
1333};
1334
1343template<typename T, typename Func>
1344inline void devirtualize_varray(const VArray<T> &varray, const Func &func, bool enable = true)
1345{
1346 if (enable) {
1348 std::make_tuple(VArrayDevirtualizer<T, true, true>{varray}), func))
1349 {
1350 return;
1351 }
1352 }
1353 func(VArrayRef<T>(varray));
1354}
1355
1361template<typename T1, typename T2, typename Func>
1362inline void devirtualize_varray2(const VArray<T1> &varray1,
1363 const VArray<T2> &varray2,
1364 const Func &func,
1365 bool enable = true)
1366{
1367 if (enable) {
1369 std::make_tuple(VArrayDevirtualizer<T1, true, true>{varray1},
1371 func))
1372 {
1373 return;
1374 }
1375 }
1376 func(VArrayRef<T1>(varray1), VArrayRef<T2>(varray2));
1377}
1378
1379} // namespace blender
#define BLI_assert_unreachable()
Definition BLI_assert.h:93
#define BLI_assert(a)
Definition BLI_assert.h:46
#define final(a, b, c)
Definition BLI_hash.h:19
#define UNUSED_VARS_NDEBUG(...)
BMesh const char void * data
long long int int64_t
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition btDbvt.cpp:52
constexpr MutableSpan()=default
constexpr T * data() const
Definition BLI_span.hh:539
const VMutableArray< T > & varray() const
MutableVArraySpan & operator=(MutableVArraySpan &&other)
MutableVArraySpan(MutableVArraySpan &&other)
MutableVArraySpan(VMutableArray< T > varray, const bool copy_values_to_span=true)
SingleAsSpan(const VArray< T > &varray)
const T & operator[](const int64_t index) const
SingleAsSpan(T value, int64_t size)
constexpr const T * data() const
Definition BLI_span.hh:215
constexpr Span()=default
int64_t size_
Definition BLI_span.hh:86
constexpr int64_t size() const
Definition BLI_span.hh:252
std::optional< T > get_if_single() const
IndexRange index_range() const
VArrayCommon(std::shared_ptr< const VArrayImpl< T > > impl)
void materialize(MutableSpan< T > r_span) const
T operator[](const int64_t index) const
T last(const int64_t n=0) const
VArrayCommon(const VArrayImpl< T > *impl)
void copy_from(const VArrayCommon &other)
void materialize_compressed_to_uninitialized(const IndexMask &mask, MutableSpan< T > r_span) const
VArrayCommon(const VArrayCommon &other)
const VArrayImpl< T > * impl_from_storage() const
const VArrayImpl< T > * impl_
void materialize(const IndexMask &mask, MutableSpan< T > r_span) const
VArrayCommon(VArrayCommon &&other) noexcept
void move_from(VArrayCommon &&other) noexcept
T get(const int64_t index) const
void materialize_to_uninitialized(const IndexMask &mask, MutableSpan< T > r_span) const
const VArrayImpl< T > * get_implementation() const
bool try_assign_GVArray(GVArray &varray) const
void materialize_to_uninitialized(MutableSpan< T > r_span) const
void materialize_compressed(const IndexMask &mask, MutableSpan< T > r_span) const
CommonVArrayInfo common_info() const
Span< T > get_internal_span() const
void emplace(Args &&...args)
Any< blender::detail::VArrayAnyExtraInfo< T >, 24, 8 > Storage
VArrayImpl_For_DerivedSpan(const MutableSpan< StructT > data)
VArrayImpl_For_Func(const int64_t size, GetFunc get_func)
void materialize_compressed(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
VArrayImpl_For_Single(T value, const int64_t size)
void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
CommonVArrayInfo common_info() const override
T get(const int64_t) const override
VArrayImpl_For_Span_final(const Span< T > data)
CommonVArrayInfo common_info() const override
void set(const int64_t index, T value) final
VArrayImpl_For_Span(const int64_t size)
void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
T get(const int64_t index) const final
VArrayImpl_For_Span(const MutableSpan< T > data)
void materialize_compressed(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const override
virtual bool try_assign_GVArray(GVArray &) const
virtual void materialize(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const
virtual CommonVArrayInfo common_info() const
virtual void materialize_compressed(const IndexMask &mask, T *dst, const bool dst_is_uninitialized) const
virtual T get(int64_t index) const =0
VArrayImpl(const int64_t size)
virtual ~VArrayImpl()=default
VArrayRef(const VArray< T > &ref)
T operator[](const int64_t index) const
VArraySpan(VArray< T > &&varray)
VArraySpan(VArraySpan &&other)
VArraySpan(const VArray< T > &varray)
VArraySpan & operator=(VArraySpan &&other)
static VArray from_derived_span(Span< StructT > values)
VArray(const VArrayImpl< T > *impl)
VArray(const VArray &other)=default
static VArray from(Args &&...args)
VArray(varray_tag::single, T value, const int64_t size)
static VArray from_std_func(const int64_t size, std::function< T(int64_t index)> get_func)
VArray(VArray &&other) noexcept=default
static VArray from_single(T value, const int64_t size)
VArray & operator=(const VArray &other)
static VArray from_func(const int64_t size, GetFunc get_func)
VArray(varray_tag::span, Span< T > span)
VArray(std::shared_ptr< const VArrayImpl< T > > impl)
static VArray from_span(Span< T > values)
static VArray from_container(ContainerT container)
VArray & operator=(VArray &&other) noexcept
VArray()=default
virtual void set(int64_t index, T value)=0
virtual bool try_assign_GVMutableArray(GVMutableArray &) const
virtual void set_all(Span< T > src)
static VMutableArray from_derived_span(MutableSpan< StructT > values)
void set(const int64_t index, T value)
VMutableArray(std::shared_ptr< const VMutableArrayImpl< T > > impl)
static VMutableArray from_container(ContainerT container)
VMutableArray & operator=(const VMutableArray &other)
static VMutableArray from(Args &&...args)
bool try_assign_GVMutableArray(GVMutableArray &varray) const
VMutableArray & operator=(VMutableArray &&other) noexcept
VMutableArray(VMutableArray &&other) noexcept=default
VMutableArray(const VMutableArray &other)=default
MutableSpan< T > get_internal_span() const
VMutableArray(const VMutableArrayImpl< T > *impl)
static VMutableArray from_span(MutableSpan< T > values)
void set_all(Span< T > src)
uint pos
ccl_device_inline float2 mask(const MaskType mask, const float2 a)
#define T
void print_mutable_varray_span_warning()
constexpr bool is_same_any_v
void devirtualize_varray(const VArray< T > &varray, const Func &func, bool enable=true)
void devirtualize_varray2(const VArray< T1 > &varray1, const VArray< T2 > &varray2, const Func &func, bool enable=true)
static constexpr bool is_VArray_v
void initialized_fill_n(T *dst, int64_t n, const T &value)
bool call_with_devirtualized_parameters(const std::tuple< Devirtualizers... > &devis, const Fn &fn)
static constexpr bool is_VMutableArray_v
void uninitialized_fill_n(T *dst, int64_t n, const T &value)
void initialized_copy_n(const T *src, int64_t n, T *dst)
constexpr bool is_trivial_extended_v
CommonVArrayInfo(const Type _type, const bool _may_have_ownership, const void *_data)
bool devirtualize(const Fn &fn) const
static constexpr VArrayAnyExtraInfo get()
i
Definition text_draw.cc:230
PointerRNA * ptr
Definition wm_files.cc:4238