Blender V5.0
sculpt_undo.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2006 by Nicholas Bishop. All rights reserved.
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
25#include "sculpt_undo.hh"
26
27#include <mutex>
28#include <zstd.h>
29
30#include "CLG_log.h"
31
32#include "BLI_array.hh"
34#include "BLI_compression.hh"
36#include "BLI_listbase.h"
37#include "BLI_map.hh"
38#include "BLI_memory_counter.hh"
39#include "BLI_string_utf8.h"
40#include "BLI_task.h"
41#include "BLI_utildefines.h"
42#include "BLI_vector.hh"
43
44#include "DNA_key_types.h"
45#include "DNA_object_types.h"
46#include "DNA_scene_types.h"
47#include "DNA_screen_types.h"
48
49#include "BKE_attribute.hh"
51#include "BKE_ccg.hh"
52#include "BKE_context.hh"
53#include "BKE_customdata.hh"
54#include "BKE_global.hh"
55#include "BKE_key.hh"
56#include "BKE_layer.hh"
57#include "BKE_main.hh"
58#include "BKE_mesh.hh"
59#include "BKE_multires.hh"
60#include "BKE_object.hh"
61#include "BKE_paint.hh"
62#include "BKE_paint_types.hh"
63#include "BKE_scene.hh"
64#include "BKE_subdiv_ccg.hh"
65#include "BKE_undo_system.hh"
66
67/* TODO(sergey): Ideally should be no direct call to such low level things. */
68#include "BKE_subdiv_eval.hh"
69
70#include "DEG_depsgraph.hh"
71
72#include "WM_api.hh"
73#include "WM_types.hh"
74
75#include "ED_geometry.hh"
76#include "ED_object.hh"
77#include "ED_sculpt.hh"
78#include "ED_undo.hh"
79
80#include "bmesh.hh"
81#include "mesh_brush_common.hh"
82#include "paint_hide.hh"
83#include "sculpt_color.hh"
84#include "sculpt_dyntopo.hh"
85#include "sculpt_face_set.hh"
86#include "sculpt_intern.hh"
87
88// #define DEBUG_TIME
89
90#ifdef DEBUG_TIME
91# include "BLI_timeit.hh"
92#endif
93
94static CLG_LogRef LOG = {"undo.sculpt"};
95
97
98/* Implementation of undo system for objects in sculpt mode.
99 *
100 * Each undo step in sculpt mode consists of list of nodes, each node contains a flat array of data
101 * related to the step type.
102 *
103 * Node type used for undo depends on specific operation and active sculpt mode ("regular" or
104 * dynamic topology).
105 *
106 * Regular sculpt brushes will use Position, HideVert, HideFace, Mask, Face Set * nodes. These
107 * nodes are created for every BVH node which is affected by the brush. The undo push for the node
108 * happens BEFORE modifications. This makes the operation undo to work in the following way: for
109 * every node in the undo step swap happens between node in the undo stack and the corresponding
110 * value in the BVH. This is how redo is possible after undo.
111 *
112 * The COORDS, HIDDEN or MASK type of nodes contains arrays of the corresponding values.
113 *
114 * Operations like Symmetrize are using GEOMETRY type of nodes which pushes the entire state of the
115 * mesh to the undo stack. This node contains all CustomData layers.
116 *
117 * The tricky aspect of this undo node type is that it stores mesh before and after modification.
118 * This allows the undo system to both undo and redo the symmetrize operation within the
119 * pre-modified-push of other node type behavior, but it uses more memory that it seems it should
120 * be.
121 *
122 * The dynamic topology undo nodes are handled somewhat separately from all other ones and the idea
123 * there is to store log of operations: which vertices and faces have been added or removed.
124 *
125 * Begin of dynamic topology sculpting mode have own node type. It contains an entire copy of mesh
126 * since just enabling the dynamic topology mode already does modifications on it.
127 *
128 * End of dynamic topology and symmetrize in this mode are handled in a special manner as well. */
129
130#define NO_ACTIVE_LAYER bke::AttrDomain::Auto
131
165
172
173/* Storage of geometry for the undo node.
174 * Is used as a storage for either original or modified geometry. */
176 /* Is used for sanity check, helping with ensuring that two and only two
177 * geometry pushes happened in the undo stack. */
179
190};
191
192struct Node;
194
195struct StepData {
196 private:
197 bool applied_ = true;
198
199 public:
205
207 std::string object_name;
208
211
212 /* TODO: Combine the three structs into a variant, since they specify data that is only valid
213 * within a single mode. */
214 struct {
215 /* The number of vertices in the entire mesh. */
217 /* The number of face corners in the entire mesh. */
220
221 struct {
227
228 struct {
240
241 /* Geometry at the bmesh enter moment. */
244
247
248 /* Geometry modification operations. */
249 /* Original geometry is stored before the modification and is restored from when undoing. */
251 /* Modified geometry is stored after the modification and is restored from when redoing. */
253
255
266
269 std::unique_ptr<PositionUndoStorage> position_step_storage;
270 size_t undo_size;
271
273 bool needs_undo() const
274 {
275 return applied_;
276 }
277
279 {
280 applied_ = true;
281 }
282
284 {
285 applied_ = false;
286 }
287};
288
289namespace compression {
290
295template<typename T>
297 Vector<std::byte> &filter_buffer,
298 Vector<std::byte> &compress_buffer)
299{
300 filter_buffer.resize(src.size_in_bytes());
301 filter_transpose_delta(reinterpret_cast<const uint8_t *>(src.data()),
302 reinterpret_cast<uint8_t *>(filter_buffer.data()),
303 src.size(),
304 sizeof(T));
305
306 /* Level 3 gives a good balance of compression performance and ratio, and is also used elsewhere
307 * across Blender for calls to #ZSTD_compress. */
308 constexpr int zstd_level = 3;
309 compress_buffer.resize(ZSTD_compressBound(src.size_in_bytes()));
310 const size_t dst_size = ZSTD_compress(compress_buffer.data(),
311 compress_buffer.size(),
312 filter_buffer.data(),
313 filter_buffer.size(),
314 zstd_level);
315 if (ZSTD_isError(dst_size)) {
316 compress_buffer.clear();
317 return;
318 }
319
320 compress_buffer.resize(dst_size);
321}
322
323template<typename T>
325{
326 const unsigned long long dst_size_in_bytes = ZSTD_getFrameContentSize(src.data(), src.size());
327 if (ELEM(dst_size_in_bytes, ZSTD_CONTENTSIZE_ERROR, ZSTD_CONTENTSIZE_UNKNOWN)) {
328 dst.clear();
329 return;
330 }
331
332 buffer.resize(dst_size_in_bytes);
333 const size_t result = ZSTD_decompress(buffer.data(), buffer.size(), src.data(), src.size());
334 if (ZSTD_isError(result)) {
335 dst.clear();
336 return;
337 }
338
339 dst.resize(buffer.size() / sizeof(T));
340 unfilter_transpose_delta(reinterpret_cast<const uint8_t *>(buffer.data()),
341 reinterpret_cast<uint8_t *>(dst.data()),
342 dst.size(),
343 sizeof(T));
344}
345
348
351
352} // namespace compression
353
357
359
360 /* As undo and redo happen, the data in these arrays is swapped (an undo step becomes a redo
361 * step, and vice versa). */
363
365
367 std::atomic<bool> compression_ready = false;
368 std::atomic<bool> compression_started = false;
370
371 explicit PositionUndoStorage(StepData &step_data)
372 : nodes_to_compress(std::move(step_data.nodes)), owner_step_data(&step_data)
373 {
374 this->multires_undo = step_data.grids.grids_num != 0;
375 if (!multires_undo) {
376 this->unique_verts_nums.reinitialize(this->nodes_to_compress.size());
377 for (const int i : this->nodes_to_compress.index_range()) {
378 this->unique_verts_nums[i] = this->nodes_to_compress[i]->unique_verts_num;
379 }
380 }
381
382 this->compression_task_pool = BLI_task_pool_create_background(this, TASK_PRIORITY_LOW);
383 this->compression_started = true;
384
385 BLI_task_pool_push(this->compression_task_pool, compress_fn, this, false, nullptr);
386 }
387
395
397 {
398 if (!compression_ready.load(std::memory_order_acquire)) {
400 }
401 }
402
403 static void compress_fn(TaskPool * /*pool*/, void *task_data)
404 {
405#ifdef DEBUG_TIME
406 SCOPED_TIMER_AVERAGED(__func__);
407#endif
408 auto *data = static_cast<PositionUndoStorage *>(task_data);
409 MutableSpan<std::unique_ptr<Node>> nodes = data->nodes_to_compress;
410 const int nodes_num = nodes.size();
411
413 Array<Array<std::byte>> compressed_data(nodes.size(), NoInitialization());
414 struct CompressLocalData {
415 Vector<std::byte> filtered;
416 Vector<std::byte> compressed;
417 };
420 threading::parallel_for(IndexRange(nodes_num), 1, [&](const IndexRange range) {
421 CompressLocalData &local_data = all_tls.local();
422 for (const int i : range) {
423 const Span<int> indices = data->multires_undo ? nodes[i]->grids : nodes[i]->vert_indices;
424 const Span<float3> positions = !nodes[i]->orig_position.is_empty() ?
425 nodes[i]->orig_position :
426 nodes[i]->position;
427 compression::filter_compress(indices, local_data.filtered, local_data.compressed);
428 new (&compressed_indices[i]) Array<std::byte>(local_data.compressed.as_span());
429 compression::filter_compress(positions, local_data.filtered, local_data.compressed);
430 new (&compressed_data[i]) Array<std::byte>(local_data.compressed.as_span());
431 nodes[i].reset();
432 }
433 });
434 });
435 data->nodes_to_compress.clear_and_shrink();
436
437 size_t memory_size = 0;
438 for (const int i : IndexRange(nodes_num)) {
439 memory_size += compressed_indices[i].as_span().size_in_bytes();
440 memory_size += compressed_data[i].as_span().size_in_bytes();
441 }
442
443 data->compressed_indices = std::move(compressed_indices);
444 data->compressed_positions = std::move(compressed_data);
445 data->owner_step_data->undo_size += memory_size;
446
447 data->compression_ready.store(true, std::memory_order_release);
448 }
449};
450
453 /* NOTE: will split out into list for multi-object-sculpt-mode. */
455
456 /* Active color attribute at the start of this undo step. */
458
459 /* Active color attribute at the end of this undo step. */
461};
462
464{
465 if (step->type != BKE_UNDOSYS_TYPE_SCULPT) {
466 return 0;
467 }
468
469 SculptUndoStep *sculpt_step = reinterpret_cast<SculptUndoStep *>(step);
470
471 if (sculpt_step->data.position_step_storage) {
472 sculpt_step->data.position_step_storage->ensure_compression_complete();
473 }
474
475 return sculpt_step->data.undo_size;
476}
477
479{
480 UndoStack *ustack = ED_undo_stack_get();
482 return reinterpret_cast<SculptUndoStep *>(us);
483}
484
486{
487 if (SculptUndoStep *us = get_active_step()) {
488 return &us->data;
489 }
490 return nullptr;
491}
492
493static bool use_multires_undo(const StepData &step_data, const SculptSession &ss)
494{
495 return step_data.grids.grids_num != 0 && ss.subdiv_ccg != nullptr;
496}
497
498static bool topology_matches(const StepData &step_data, const Object &object)
499{
500 const SculptSession &ss = *object.sculpt;
501 if (use_multires_undo(step_data, ss)) {
502 const SubdivCCG &subdiv_ccg = *ss.subdiv_ccg;
503 return subdiv_ccg.grids_num == step_data.grids.grids_num &&
504 subdiv_ccg.grid_size == step_data.grids.grid_size;
505 }
506 const Mesh &mesh = *static_cast<Mesh *>(object.data);
507 return mesh.verts_num == step_data.mesh.verts_num;
508}
509
511{
512 return std::any_of(indices.begin(), indices.end(), [&](const int i) { return data[i]; });
513}
514
516 Depsgraph &depsgraph,
517 const StepData &step_data,
518 Object &object)
519{
520 const SculptSession &ss = *object.sculpt;
521 if (ss.shapekey_active && ss.shapekey_active->name != step_data.active_shape_key_name) {
522 /* Shape key has been changed before calling undo operator. */
523
524 Key *key = BKE_key_from_object(&object);
525 const KeyBlock *kb = key ?
526 BKE_keyblock_find_name(key, step_data.active_shape_key_name.c_str()) :
527 nullptr;
528
529 if (kb) {
530 object.shapenr = BLI_findindex(&key->block, kb) + 1;
531
534 }
535 else {
536 /* Key has been removed -- skip this undo node. */
537 return false;
538 }
539 }
540 return true;
541}
542
543template<typename T>
545{
546 BLI_assert(full.size() == indices.size());
547 for (const int i : indices.index_range()) {
548 std::swap(full[i], indexed[indices[i]]);
549 }
550}
551
552static void restore_position_mesh(Object &object,
553 PositionUndoStorage &undo_data,
554 const MutableSpan<bool> modified_verts)
555{
556#ifdef DEBUG_TIME
557 SCOPED_TIMER_AVERAGED(__func__);
558#endif
559 SculptSession &ss = *object.sculpt;
560 Mesh &mesh = *static_cast<Mesh *>(object.data);
561 MutableSpan<float3> positions = mesh.vert_positions_for_write();
562 std::optional<ShapeKeyData> shape_key_data = ShapeKeyData::from_object(object);
563
564 undo_data.ensure_compression_complete();
565
566 const int nodes_num = undo_data.unique_verts_nums.size();
567
568 struct LocalData {
569 Vector<std::byte> compress_buffer;
570 Vector<std::byte> filter_buffer;
572 Vector<float3> positions;
573 };
575 threading::parallel_for(IndexRange(nodes_num), 1, [&](const IndexRange range) {
576 LocalData &tls = all_tls.local();
577 for (const int i : range) {
579 undo_data.compressed_indices[i], tls.compress_buffer, tls.indices);
580 const int unique_verts_num = undo_data.unique_verts_nums[i];
581 const Span<int> verts = tls.indices.as_span().take_front(unique_verts_num);
582
584 undo_data.compressed_positions[i], tls.compress_buffer, tls.positions);
585 MutableSpan undo_positions = tls.positions.as_mutable_span();
586
587 if (!ss.deform_modifiers_active) {
588 /* When original positions aren't written separately in the undo step, there are no
589 * deform modifiers. Therefore the original and evaluated deform positions will be the
590 * same, and modifying the positions from the original mesh is enough. */
591 swap_indexed_data(undo_positions.take_front(unique_verts_num), verts, positions);
592 }
593 else {
594 /* When original positions are stored in the undo step, undo/redo will cause a reevaluation
595 * of the object. The evaluation will recompute the evaluated positions, so dealing with
596 * them here is unnecessary. */
597 if (shape_key_data) {
598 MutableSpan<float3> active_data = shape_key_data->active_key_data;
599
600 if (!shape_key_data->dependent_keys.is_empty()) {
601 Array<float3, 1024> translations(verts.size());
603 undo_positions.take_front(unique_verts_num), verts, active_data, translations);
604 for (MutableSpan<float3> data : shape_key_data->dependent_keys) {
605 apply_translations(translations, verts, data);
606 }
607 }
608
609 if (shape_key_data->basis_key_active) {
610 /* The basis key positions and the mesh positions are always kept in sync. */
611 scatter_data_mesh(undo_positions.as_span(), verts, positions);
612 }
613 swap_indexed_data(undo_positions.take_front(unique_verts_num), verts, active_data);
614 }
615 else {
616 /* There is a deform modifier, but no shape keys. */
617 swap_indexed_data(undo_positions.take_front(unique_verts_num), verts, positions);
618 }
619 }
620
621 modified_verts.fill_indices(verts, true);
622
623 compression::filter_compress<float3>(undo_positions, tls.filter_buffer, tls.compress_buffer);
624 undo_data.compressed_positions[i] = tls.compress_buffer.as_span();
625 }
626 });
627}
628
629static void restore_position_grids(const MutableSpan<float3> positions,
630 const CCGKey &key,
631 PositionUndoStorage &undo_data,
632 const MutableSpan<bool> modified_grids)
633{
634 const int nodes_num = undo_data.compressed_indices.size();
635
636 struct LocalData {
637 Vector<std::byte> compress_buffer;
638 Vector<std::byte> filter_buffer;
640 Vector<float3> positions;
641 };
643 threading::parallel_for(IndexRange(nodes_num), 1, [&](const IndexRange range) {
644 LocalData &tls = all_tls.local();
645 for (const int i : range) {
647 undo_data.compressed_indices[i], tls.compress_buffer, tls.indices);
648 const Span<int> grids = tls.indices.as_span();
649
651 undo_data.compressed_positions[i], tls.compress_buffer, tls.positions);
652 MutableSpan node_positions = tls.positions.as_mutable_span();
653
654 for (const int i : grids.index_range()) {
655 MutableSpan data = positions.slice(bke::ccg::grid_range(key, grids[i]));
656 MutableSpan undo_data = node_positions.slice(bke::ccg::grid_range(key, i));
657 for (const int offset : data.index_range()) {
658 std::swap(data[offset], undo_data[offset]);
659 }
660 }
661
662 modified_grids.fill_indices(grids, true);
663
664 compression::filter_compress<float3>(node_positions, tls.filter_buffer, tls.compress_buffer);
665 undo_data.compressed_positions[i] = tls.compress_buffer.as_span();
666 }
667 });
668}
669
671 Node &unode,
672 const MutableSpan<bool> modified_verts)
673{
674 Mesh &mesh = *static_cast<Mesh *>(object.data);
675 bke::MutableAttributeAccessor attributes = mesh.attributes_for_write();
677 ".hide_vert", bke::AttrDomain::Point);
678 for (const int i : unode.vert_indices.index_range().take_front(unode.unique_verts_num)) {
679 const int vert = unode.vert_indices[i];
680 if (unode.vert_hidden[i].test() != hide_vert.span[vert]) {
681 unode.vert_hidden[i].set(!unode.vert_hidden[i].test());
682 hide_vert.span[vert] = !hide_vert.span[vert];
683 modified_verts[vert] = true;
684 }
685 }
686 hide_vert.finish();
687}
688
690 Node &unode,
691 const MutableSpan<bool> modified_grids)
692{
693 if (unode.grid_hidden.is_empty()) {
695 return;
696 }
697
698 BitGroupVector<> &grid_hidden = BKE_subdiv_ccg_grid_hidden_ensure(subdiv_ccg);
699 const Span<int> grids = unode.grids;
700 for (const int i : grids.index_range()) {
701 /* Swap the two bit spans. */
703 MutableBoundedBitSpan b = grid_hidden[grids[i]];
704 for (const int j : a.index_range()) {
705 const bool value_a = a[j];
706 const bool value_b = b[j];
707 a[j].set(value_b);
708 b[j].set(value_a);
709 }
710 }
711
712 modified_grids.fill_indices(grids, true);
713}
714
715static void restore_hidden_face(Object &object,
716 Node &unode,
717 const MutableSpan<bool> modified_faces)
718{
719 Mesh &mesh = *static_cast<Mesh *>(object.data);
720 bke::MutableAttributeAccessor attributes = mesh.attributes_for_write();
721 bke::SpanAttributeWriter hide_poly = attributes.lookup_or_add_for_write_span<bool>(
722 ".hide_poly", bke::AttrDomain::Face);
723
724 const Span<int> face_indices = unode.face_indices;
725
726 for (const int i : face_indices.index_range()) {
727 const int face = face_indices[i];
728 if (unode.face_hidden[i].test() != hide_poly.span[face]) {
729 unode.face_hidden[i].set(!unode.face_hidden[i].test());
730 hide_poly.span[face] = !hide_poly.span[face];
731 modified_faces[face] = true;
732 }
733 }
734 hide_poly.finish();
735}
736
737static void restore_color(Object &object,
738 StepData &step_data,
739 const MutableSpan<bool> modified_verts)
740{
741 Mesh &mesh = *static_cast<Mesh *>(object.data);
743
744 for (std::unique_ptr<Node> &unode : step_data.nodes) {
745 if (color_attribute.domain == bke::AttrDomain::Point && !unode->col.is_empty()) {
747 unode->vert_indices.as_span().take_front(unode->unique_verts_num),
748 color_attribute.span,
749 unode->col);
750 }
751 else if (color_attribute.domain == bke::AttrDomain::Corner && !unode->loop_col.is_empty()) {
752 color::swap_gathered_colors(unode->corner_indices, color_attribute.span, unode->loop_col);
753 }
754
755 modified_verts.fill_indices(unode->vert_indices.as_span(), true);
756 }
757
758 color_attribute.finish();
759}
760
761static void restore_mask_mesh(Object &object, Node &unode, const MutableSpan<bool> modified_verts)
762{
764
765 bke::MutableAttributeAccessor attributes = mesh->attributes_for_write();
767 ".sculpt_mask", bke::AttrDomain::Point);
768
769 const Span<int> index = unode.vert_indices.as_span().take_front(unode.unique_verts_num);
770
771 for (const int i : index.index_range()) {
772 const int vert = index[i];
773 if (mask.span[vert] != unode.mask[i]) {
774 std::swap(mask.span[vert], unode.mask[i]);
775 modified_verts[vert] = true;
776 }
777 }
778
779 mask.finish();
780}
781
782static void restore_mask_grids(Object &object, Node &unode, const MutableSpan<bool> modified_grids)
783{
784 SculptSession &ss = *object.sculpt;
785 SubdivCCG *subdiv_ccg = ss.subdiv_ccg;
786 MutableSpan<float> masks = subdiv_ccg->masks;
787
788 const CCGKey key = BKE_subdiv_ccg_key_top_level(*subdiv_ccg);
789
790 const Span<int> grids = unode.grids;
791 MutableSpan<float> undo_mask = unode.mask;
792
793 for (const int i : grids.index_range()) {
794 MutableSpan data = masks.slice(bke::ccg::grid_range(key, grids[i]));
795 MutableSpan undo_data = undo_mask.slice(bke::ccg::grid_range(key, i));
796 for (const int offset : data.index_range()) {
797 std::swap(data[offset], undo_data[offset]);
798 }
799 }
800
801 modified_grids.fill_indices(unode.grids.as_span(), true);
802}
803
804static bool restore_face_sets(Object &object,
805 Node &unode,
806 const MutableSpan<bool> modified_face_set_faces)
807{
808 const Span<int> face_indices = unode.face_indices;
809
811 *static_cast<Mesh *>(object.data));
812 bool modified = false;
813 for (const int i : face_indices.index_range()) {
814 const int face = face_indices[i];
815 if (unode.face_sets[i] == face_sets.span[face]) {
816 continue;
817 }
818 std::swap(unode.face_sets[i], face_sets.span[face]);
819 modified_face_set_faces[face] = true;
820 modified = true;
821 }
822 face_sets.finish();
823 return modified;
824}
825
826static void bmesh_restore_generic(StepData &step_data, Object &object)
827{
828 SculptSession &ss = *object.sculpt;
829 if (step_data.needs_undo()) {
830 BM_log_undo(ss.bm, ss.bm_log);
831 step_data.tag_needs_redo();
832 }
833 else {
834 BM_log_redo(ss.bm, ss.bm_log);
835 step_data.tag_needs_undo();
836 }
837
838 if (step_data.type == Type::Mask) {
839 bke::pbvh::Tree &pbvh = *bke::object::pbvh_get(object);
840 IndexMaskMemory memory;
841 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
842 pbvh.tag_masks_changed(node_mask);
843 bke::pbvh::update_mask_bmesh(*ss.bm, node_mask, pbvh);
844 }
845 else {
849 }
850}
851
852/* Create empty sculpt BMesh and enable logging. */
853static void bmesh_enable(Object &object, const StepData &step_data)
854{
855 SculptSession &ss = *object.sculpt;
856 Mesh *mesh = static_cast<Mesh *>(object.data);
857
860
861 /* Create empty BMesh and enable logging. */
862 BMeshCreateParams bmesh_create_params{};
863 bmesh_create_params.use_toolflags = false;
864
865 ss.bm = BM_mesh_create(&bm_mesh_allocsize_default, &bmesh_create_params);
866 BM_data_layer_add_named(ss.bm, &ss.bm->vdata, CD_PROP_FLOAT, ".sculpt_mask");
867
869
870 /* Restore the BMLog using saved entries. */
872}
873
874static void bmesh_handle_dyntopo_begin(bContext *C, StepData &step_data, Object &object)
875{
876 if (step_data.needs_undo()) {
877 dyntopo::disable(C, &step_data);
878 step_data.tag_needs_redo();
879 }
880 else /* needs_redo */ {
881 SculptSession &ss = *object.sculpt;
882 bmesh_enable(object, step_data);
883
884 /* Restore the mesh from the first log entry. */
885 BM_log_redo(ss.bm, ss.bm_log);
886
887 step_data.tag_needs_undo();
888 }
889}
890
891static void bmesh_handle_dyntopo_end(bContext *C, StepData &step_data, Object &object)
892{
893 if (step_data.needs_undo()) {
894 SculptSession &ss = *object.sculpt;
895 bmesh_enable(object, step_data);
896
897 /* Restore the mesh from the last log entry. */
898 BM_log_undo(ss.bm, ss.bm_log);
899
900 step_data.tag_needs_redo();
901 }
902 else /* needs_redo */ {
903 /* Disable dynamic topology sculpting. */
904 dyntopo::disable(C, nullptr);
905 step_data.tag_needs_undo();
906 }
907}
908
910{
911 const Mesh *mesh = static_cast<const Mesh *>(object.data);
912
913 BLI_assert(!geometry->is_initialized);
914 geometry->is_initialized = true;
915
917 &mesh->vert_data, &geometry->vert_data, CD_MASK_MESH.vmask, mesh->verts_num);
919 &mesh->edge_data, &geometry->edge_data, CD_MASK_MESH.emask, mesh->edges_num);
921 &mesh->corner_data, &geometry->corner_data, CD_MASK_MESH.lmask, mesh->corners_num);
923 &mesh->face_data, &geometry->face_data, CD_MASK_MESH.pmask, mesh->faces_num);
924 implicit_sharing::copy_shared_pointer(mesh->face_offset_indices,
925 mesh->runtime->face_offsets_sharing_info,
926 &geometry->face_offset_indices,
927 &geometry->face_offsets_sharing_info);
928
929 geometry->verts_num = mesh->verts_num;
930 geometry->edges_num = mesh->edges_num;
931 geometry->corners_num = mesh->corners_num;
932 geometry->faces_num = mesh->faces_num;
933}
934
936{
937 BLI_assert(geometry->is_initialized);
938
940
941 mesh->verts_num = geometry->verts_num;
942 mesh->edges_num = geometry->edges_num;
943 mesh->corners_num = geometry->corners_num;
944 mesh->faces_num = geometry->faces_num;
945 mesh->totface_legacy = 0;
946
948 &geometry->vert_data, &mesh->vert_data, CD_MASK_MESH.vmask, geometry->verts_num);
950 &geometry->edge_data, &mesh->edge_data, CD_MASK_MESH.emask, geometry->edges_num);
952 &geometry->corner_data, &mesh->corner_data, CD_MASK_MESH.lmask, geometry->corners_num);
954 &geometry->face_data, &mesh->face_data, CD_MASK_MESH.pmask, geometry->faces_num);
956 geometry->face_offsets_sharing_info,
957 &mesh->face_offset_indices,
958 &mesh->runtime->face_offsets_sharing_info);
959}
960
962{
963 CustomData_free(&geometry->vert_data);
964 CustomData_free(&geometry->edge_data);
965 CustomData_free(&geometry->corner_data);
966 CustomData_free(&geometry->face_data);
967 implicit_sharing::free_shared_data(&geometry->face_offset_indices,
968 &geometry->face_offsets_sharing_info);
969}
970
971static void restore_geometry(StepData &step_data, Object &object)
972{
975
976 Mesh *mesh = static_cast<Mesh *>(object.data);
977
978 if (step_data.needs_undo()) {
980 step_data.tag_needs_redo();
981 }
982 else {
984 step_data.tag_needs_undo();
985 }
986}
987
988/* Handle all dynamic-topology updates
989 *
990 * Returns true if this was a dynamic-topology undo step, otherwise
991 * returns false to indicate the non-dyntopo code should run. */
992static int bmesh_restore(bContext *C, Depsgraph &depsgraph, StepData &step_data, Object &object)
993{
994 SculptSession &ss = *object.sculpt;
995 switch (step_data.type) {
998 bmesh_handle_dyntopo_begin(C, step_data, object);
999 return true;
1000
1001 case Type::DyntopoEnd:
1003 bmesh_handle_dyntopo_end(C, step_data, object);
1004 return true;
1005 default:
1006 if (ss.bm_log) {
1008 bmesh_restore_generic(step_data, object);
1009 return true;
1010 }
1011 break;
1012 }
1013
1014 return false;
1015}
1016
1021
1023{
1024 return get_step_data()->bmesh.bm_entry;
1025}
1026
1027/* Geometry updates (such as Apply Base, for example) will re-evaluate the object and refine its
1028 * Subdiv descriptor. Upon undo it is required that mesh, grids, and subdiv all stay consistent
1029 * with each other. This means that when geometry coordinate changes the undo should refine the
1030 * subdiv to the new coarse mesh coordinates. Tricky part is: this needs to happen without using
1031 * dependency graph tag: tagging object for geometry update will either loose sculpted data from
1032 * the sculpt grids, or will wrongly "commit" them to the CD_MDISPS.
1033 *
1034 * So what we do instead is do minimum object evaluation to get base mesh coordinates for the
1035 * multires modifier input. While this is expensive, it is less expensive than dependency graph
1036 * evaluation and is only happening when geometry coordinates changes on undo.
1037 *
1038 * Note that the dependency graph is ensured to be evaluated prior to the undo step is decoded,
1039 * so if the object's modifier stack references other object it is all fine. */
1040static void refine_subdiv(Depsgraph *depsgraph,
1041 const SculptSession &ss,
1042 Object &object,
1043 bke::subdiv::Subdiv *subdiv)
1044{
1046 depsgraph, &object, ss.multires.modifier);
1047
1049 subdiv, static_cast<const Mesh *>(object.data), deformed_verts);
1050}
1051
1052static void restore_list(bContext *C, Depsgraph *depsgraph, StepData &step_data)
1053{
1054 Scene *scene = CTX_data_scene(C);
1055 ViewLayer *view_layer = CTX_data_view_layer(C);
1057 BKE_view_layer_synced_ensure(scene, view_layer);
1058 Object &object = *BKE_view_layer_active_object_get(view_layer);
1059 if (step_data.object_name != object.id.name) {
1060 return;
1061 }
1062 SculptSession &ss = *object.sculpt;
1063 bke::pbvh::Tree &pbvh = *bke::object::pbvh_get(object);
1064
1065 /* Restore pivot. */
1066 ss.pivot_pos = step_data.pivot_pos;
1067 ss.pivot_rot = step_data.pivot_rot;
1068
1069 if (bmesh_restore(C, *depsgraph, step_data, object)) {
1070 return;
1071 }
1072
1073 /* Switching to sculpt mode does not push a particular type.
1074 * See #124484. */
1075 /* TODO: Add explicit type for switching into Sculpt Mode. */
1076 if (step_data.type == Type::None && step_data.nodes.is_empty()) {
1077 return;
1078 }
1079
1080 /* Adding multires via the `subdivision_set` operator results in the subsequent undo step
1081 * not correctly performing a global undo step; we exit early here to avoid crashing.
1082 * See: #131478 */
1083 const bool multires_undo_step = use_multires_undo(step_data, ss);
1084 if ((multires_undo_step && pbvh.type() != bke::pbvh::Type::Grids) ||
1085 (!multires_undo_step && pbvh.type() != bke::pbvh::Type::Mesh))
1086 {
1087 CLOG_WARN(&LOG,
1088 "Undo step type and sculpt geometry type do not match: skipping undo state restore");
1089 return;
1090 }
1091
1092 const bool tag_update = ID_REAL_USERS(object.data) > 1 ||
1093 !BKE_sculptsession_use_pbvh_draw(&object, rv3d) || ss.shapekey_active ||
1095
1096 switch (step_data.type) {
1097 case Type::None: {
1099 break;
1100 }
1101 case Type::Position: {
1102 IndexMaskMemory memory;
1103 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
1104
1106 if (!topology_matches(step_data, object)) {
1107 return;
1108 }
1109
1110 if (use_multires_undo(step_data, ss)) {
1112 SubdivCCG &subdiv_ccg = *ss.subdiv_ccg;
1113 const CCGKey key = BKE_subdiv_ccg_key_top_level(subdiv_ccg);
1114
1115 Array<bool> modified_grids(subdiv_ccg.grids_num, false);
1117 subdiv_ccg.positions, key, *step_data.position_step_storage, modified_grids);
1118
1119 const IndexMask changed_nodes = IndexMask::from_predicate(
1120 node_mask, GrainSize(1), memory, [&](const int i) {
1121 return indices_contain_true(modified_grids, nodes[i].grids());
1122 });
1123 pbvh.tag_positions_changed(changed_nodes);
1125 }
1126 else {
1128 if (!restore_active_shape_key(*C, *depsgraph, step_data, object)) {
1129 return;
1130 }
1131 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1132 Array<bool> modified_verts(mesh.verts_num, false);
1133 restore_position_mesh(object, *step_data.position_step_storage, modified_verts);
1134
1135 const IndexMask changed_nodes = IndexMask::from_predicate(
1136 node_mask, GrainSize(1), memory, [&](const int i) {
1137 return indices_contain_true(modified_verts, nodes[i].all_verts());
1138 });
1139 pbvh.tag_positions_changed(changed_nodes);
1140 }
1141
1142 if (tag_update) {
1143 Mesh &mesh = *static_cast<Mesh *>(object.data);
1144 mesh.tag_positions_changed();
1146 }
1147 else {
1148 Mesh &mesh = *static_cast<Mesh *>(object.data);
1149 /* The BVH normals recalculation that will happen later (caused by
1150 * `pbvh.tag_positions_changed`) won't recalculate the face corner normals.
1151 * We need to manually clear that cache. */
1152 mesh.runtime->corner_normals_cache.tag_dirty();
1153 }
1154 pbvh.update_bounds(*depsgraph, object);
1156 break;
1157 }
1158 case Type::HideVert: {
1159 IndexMaskMemory memory;
1160 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
1161
1163 if (!topology_matches(step_data, object)) {
1164 return;
1165 }
1166
1167 if (use_multires_undo(step_data, ss)) {
1169 SubdivCCG &subdiv_ccg = *ss.subdiv_ccg;
1170 Array<bool> modified_grids(subdiv_ccg.grids_num, false);
1171 for (std::unique_ptr<Node> &unode : step_data.nodes) {
1172 restore_vert_visibility_grids(subdiv_ccg, *unode, modified_grids);
1173 }
1174 const IndexMask changed_nodes = IndexMask::from_predicate(
1175 node_mask, GrainSize(1), memory, [&](const int i) {
1176 return indices_contain_true(modified_grids, nodes[i].grids());
1177 });
1178 pbvh.tag_visibility_changed(changed_nodes);
1179 }
1180 else {
1182 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1183 Array<bool> modified_verts(mesh.verts_num, false);
1184 for (std::unique_ptr<Node> &unode : step_data.nodes) {
1185 restore_vert_visibility_mesh(object, *unode, modified_verts);
1186 }
1187 const IndexMask changed_nodes = IndexMask::from_predicate(
1188 node_mask, GrainSize(1), memory, [&](const int i) {
1189 return indices_contain_true(modified_verts, nodes[i].all_verts());
1190 });
1191 pbvh.tag_visibility_changed(changed_nodes);
1192 }
1193
1195 pbvh.update_visibility(object);
1196 if (BKE_sculpt_multires_active(scene, &object)) {
1198 }
1199 break;
1200 }
1201 case Type::HideFace: {
1202 IndexMaskMemory memory;
1203 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
1204
1206 if (!topology_matches(step_data, object)) {
1207 return;
1208 }
1209
1210 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1211 Array<bool> modified_faces(mesh.faces_num, false);
1212 for (std::unique_ptr<Node> &unode : step_data.nodes) {
1213 restore_hidden_face(object, *unode, modified_faces);
1214 }
1215
1216 if (use_multires_undo(step_data, ss)) {
1218 const SubdivCCG &subdiv_ccg = *ss.subdiv_ccg;
1219 const IndexMask changed_nodes = IndexMask::from_predicate(
1220 node_mask, GrainSize(1), memory, [&](const int i) {
1221 Vector<int> faces_vector;
1223 subdiv_ccg, nodes[i], faces_vector);
1224 return indices_contain_true(modified_faces, faces);
1225 });
1226 pbvh.tag_visibility_changed(changed_nodes);
1227 }
1228 else {
1230 const IndexMask changed_nodes = IndexMask::from_predicate(
1231 node_mask, GrainSize(1), memory, [&](const int i) {
1232 return indices_contain_true(modified_faces, nodes[i].faces());
1233 });
1234 pbvh.tag_visibility_changed(changed_nodes);
1235 }
1236
1238 pbvh.update_visibility(object);
1239 break;
1240 }
1241 case Type::Mask: {
1242 IndexMaskMemory memory;
1243 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
1244
1246 if (!topology_matches(step_data, object)) {
1247 return;
1248 }
1249
1250 if (use_multires_undo(step_data, ss)) {
1252 Array<bool> modified_grids(ss.subdiv_ccg->grids_num, false);
1253 for (std::unique_ptr<Node> &unode : step_data.nodes) {
1254 restore_mask_grids(object, *unode, modified_grids);
1255 }
1256 const IndexMask changed_nodes = IndexMask::from_predicate(
1257 node_mask, GrainSize(1), memory, [&](const int i) {
1258 return indices_contain_true(modified_grids, nodes[i].grids());
1259 });
1260 bke::pbvh::update_mask_grids(*ss.subdiv_ccg, changed_nodes, pbvh);
1261 pbvh.tag_masks_changed(changed_nodes);
1262 }
1263 else {
1265 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1266 Array<bool> modified_verts(mesh.verts_num, false);
1267 for (std::unique_ptr<Node> &unode : step_data.nodes) {
1268 restore_mask_mesh(object, *unode, modified_verts);
1269 }
1270 const IndexMask changed_nodes = IndexMask::from_predicate(
1271 node_mask, GrainSize(1), memory, [&](const int i) {
1272 return indices_contain_true(modified_verts, nodes[i].all_verts());
1273 });
1274 bke::pbvh::update_mask_mesh(mesh, changed_nodes, pbvh);
1275 pbvh.tag_masks_changed(changed_nodes);
1276 }
1277 break;
1278 }
1279 case Type::FaceSet: {
1280 IndexMaskMemory memory;
1281 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
1282
1284 if (!topology_matches(step_data, object)) {
1285 return;
1286 }
1287
1288 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1289 Array<bool> modified_faces(mesh.faces_num, false);
1290 for (std::unique_ptr<Node> &unode : step_data.nodes) {
1291 restore_face_sets(object, *unode, modified_faces);
1292 }
1293 if (use_multires_undo(step_data, ss)) {
1295 const SubdivCCG &subdiv_ccg = *ss.subdiv_ccg;
1296 const IndexMask changed_nodes = IndexMask::from_predicate(
1297 node_mask, GrainSize(1), memory, [&](const int i) {
1298 Vector<int> faces_vector;
1300 subdiv_ccg, nodes[i], faces_vector);
1301 return indices_contain_true(modified_faces, faces);
1302 });
1303 pbvh.tag_face_sets_changed(changed_nodes);
1304 }
1305 else {
1307 const IndexMask changed_nodes = IndexMask::from_predicate(
1308 node_mask, GrainSize(1), memory, [&](const int i) {
1309 return indices_contain_true(modified_faces, nodes[i].faces());
1310 });
1311 pbvh.tag_face_sets_changed(changed_nodes);
1312 }
1313 break;
1314 }
1315 case Type::Color: {
1316 IndexMaskMemory memory;
1317 const IndexMask node_mask = bke::pbvh::all_leaf_nodes(pbvh, memory);
1318
1320 if (!topology_matches(step_data, object)) {
1321 return;
1322 }
1323
1325
1326 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1327 Array<bool> modified_verts(mesh.verts_num, false);
1328 restore_color(object, step_data, modified_verts);
1329 const IndexMask changed_nodes = IndexMask::from_predicate(
1330 node_mask, GrainSize(1), memory, [&](const int i) {
1331 return indices_contain_true(modified_verts, nodes[i].all_verts());
1332 });
1333 pbvh.tag_attribute_changed(changed_nodes, mesh.active_color_attribute);
1334 break;
1335 }
1336 case Type::Geometry: {
1337 BLI_assert(!ss.bm);
1338
1339 restore_geometry(step_data, object);
1341 if (SubdivCCG *subdiv_ccg = ss.subdiv_ccg) {
1342 refine_subdiv(depsgraph, ss, object, subdiv_ccg->subdiv);
1343 }
1344 break;
1345 }
1346 case Type::DyntopoBegin:
1347 case Type::DyntopoEnd:
1348 /* Handled elsewhere. */
1350 break;
1351 }
1352
1354 if (tag_update) {
1356 }
1357}
1358
1359static void free_step_data(StepData &step_data)
1360{
1364 if (step_data.bmesh.bm_entry) {
1365 BM_log_entry_drop(step_data.bmesh.bm_entry);
1366 }
1367 step_data.~StepData();
1368}
1369
1376static const Node *get_node(const bke::pbvh::Node *node, const Type type)
1377{
1378 StepData *step_data = get_step_data();
1379 if (!step_data) {
1380 return nullptr;
1381 }
1382 if (step_data->type != type) {
1383 return nullptr;
1384 }
1385 /* This access does not need to be locked because this function is not expected to be called
1386 * while the per-node undo data is being pushed. In other words, this must not be called
1387 * concurrently with #push_node. */
1388 std::unique_ptr<Node> *node_ptr = step_data->undo_nodes_by_pbvh_node.lookup_ptr(node);
1389 if (!node_ptr) {
1390 return nullptr;
1391 }
1392 return node_ptr->get();
1393}
1394
1395static void store_vert_visibility_grids(const SubdivCCG &subdiv_ccg,
1396 const bke::pbvh::GridsNode &node,
1397 Node &unode)
1398{
1399 const BitGroupVector<> &grid_hidden = subdiv_ccg.grid_hidden;
1400 if (grid_hidden.is_empty()) {
1401 return;
1402 }
1403
1404 const Span<int> grid_indices = node.grids();
1405 unode.grid_hidden = BitGroupVector<0>(grid_indices.size(), grid_hidden.group_size());
1406 for (const int i : grid_indices.index_range()) {
1407 unode.grid_hidden[i].copy_from(grid_hidden[grid_indices[i]]);
1408 }
1409}
1410
1411static void store_positions_mesh(const Depsgraph &depsgraph, const Object &object, Node &unode)
1412{
1413 const SculptSession &ss = *object.sculpt;
1415 unode.vert_indices.as_span(),
1416 unode.position.as_mutable_span());
1418 unode.vert_indices.as_span(),
1419 unode.normal.as_mutable_span());
1420
1421 if (ss.deform_modifiers_active) {
1422 const Mesh &mesh = *static_cast<const Mesh *>(object.data);
1423 const Span<float3> orig_positions = ss.shapekey_active ? Span(static_cast<const float3 *>(
1424 ss.shapekey_active->data),
1425 mesh.verts_num) :
1426 mesh.vert_positions();
1427
1429 orig_positions, unode.vert_indices.as_span(), unode.orig_position.as_mutable_span());
1430 }
1431}
1432
1433static void store_positions_grids(const SubdivCCG &subdiv_ccg, Node &unode)
1434{
1436 subdiv_ccg, subdiv_ccg.positions.as_span(), unode.grids, unode.position.as_mutable_span());
1438 subdiv_ccg, subdiv_ccg.normals.as_span(), unode.grids, unode.normal.as_mutable_span());
1439}
1440
1441static void store_vert_visibility_mesh(const Mesh &mesh, const bke::pbvh::Node &node, Node &unode)
1442{
1443 const bke::AttributeAccessor attributes = mesh.attributes();
1444 const VArraySpan<bool> hide_vert = *attributes.lookup<bool>(".hide_vert",
1446 if (hide_vert.is_empty()) {
1447 return;
1448 }
1449
1450 const Span<int> verts = static_cast<const bke::pbvh::MeshNode &>(node).all_verts();
1451 for (const int i : verts.index_range()) {
1452 unode.vert_hidden[i].set(hide_vert[verts[i]]);
1453 }
1454}
1455
1456static void store_face_visibility(const Mesh &mesh, Node &unode)
1457{
1458 const bke::AttributeAccessor attributes = mesh.attributes();
1459 const VArraySpan<bool> hide_poly = *attributes.lookup<bool>(".hide_poly", bke::AttrDomain::Face);
1460 if (hide_poly.is_empty()) {
1461 unode.face_hidden.fill(false);
1462 return;
1463 }
1464 const Span<int> faces = unode.face_indices;
1465 for (const int i : faces.index_range()) {
1466 unode.face_hidden[i].set(hide_poly[faces[i]]);
1467 }
1468}
1469
1470static void store_mask_mesh(const Mesh &mesh, Node &unode)
1471{
1472 const bke::AttributeAccessor attributes = mesh.attributes();
1473 const VArraySpan mask = *attributes.lookup<float>(".sculpt_mask", bke::AttrDomain::Point);
1474 if (mask.is_empty()) {
1475 unode.mask.fill(0.0f);
1476 }
1477 else {
1479 }
1480}
1481
1482static void store_mask_grids(const SubdivCCG &subdiv_ccg, Node &unode)
1483{
1484 if (!subdiv_ccg.masks.is_empty()) {
1486 subdiv_ccg, subdiv_ccg.masks.as_span(), unode.grids, unode.mask.as_mutable_span());
1487 }
1488 else {
1489 unode.mask.fill(0.0f);
1490 }
1491}
1492
1493static void store_color(const Mesh &mesh, const bke::pbvh::MeshNode &node, Node &unode)
1494{
1495 const OffsetIndices<int> faces = mesh.faces();
1496 const Span<int> corner_verts = mesh.corner_verts();
1497 const GroupedSpan<int> vert_to_face_map = mesh.vert_to_face_map();
1499 const GVArraySpan colors(*color_attribute);
1500
1501 /* NOTE: even with loop colors we still store (derived)
1502 * vertex colors for original data lookup. */
1503 const Span<int> verts = node.verts();
1504 unode.col.reinitialize(verts.size());
1506 faces, corner_verts, vert_to_face_map, colors, color_attribute.domain, verts, unode.col);
1507
1508 if (color_attribute.domain == bke::AttrDomain::Corner) {
1509 for (const int face : node.faces()) {
1510 for (const int corner : faces[face]) {
1511 unode.corner_indices.append(corner);
1512 }
1513 }
1514 unode.loop_col.reinitialize(unode.corner_indices.size());
1515 color::gather_colors(colors, unode.corner_indices, unode.loop_col);
1516 }
1517}
1518
1520{
1521 if (!step_data.geometry_original.is_initialized) {
1522 return &step_data.geometry_original;
1523 }
1524
1526
1527 return &step_data.geometry_modified;
1528}
1529
1530static void geometry_push(const Object &object)
1531{
1532 StepData *step_data = get_step_data();
1533
1534 step_data->type = Type::Geometry;
1535
1536 NodeGeometry *geometry = geometry_get(*step_data);
1538}
1539
1540static void store_face_sets(const Mesh &mesh, Node &unode)
1541{
1542 const bke::AttributeAccessor attributes = mesh.attributes();
1543 const VArraySpan face_sets = *attributes.lookup<int>(".sculpt_face_set", bke::AttrDomain::Face);
1544 if (face_sets.is_empty()) {
1545 unode.face_sets.fill(1);
1546 }
1547 else {
1548 gather_data_mesh(face_sets, unode.face_indices.as_span(), unode.face_sets.as_mutable_span());
1549 }
1550}
1551
1552static void fill_node_data_mesh(const Depsgraph &depsgraph,
1553 const Object &object,
1554 const bke::pbvh::MeshNode &node,
1555 const Type type,
1556 Node &unode)
1557{
1558 const SculptSession &ss = *object.sculpt;
1559 const Mesh &mesh = *static_cast<Mesh *>(object.data);
1560
1561 unode.vert_indices = node.all_verts();
1562 unode.unique_verts_num = node.verts().size();
1563
1564 const int verts_num = unode.vert_indices.size();
1565
1566 if (ELEM(type, Type::FaceSet, Type::HideFace)) {
1567 unode.face_indices = node.faces();
1568 }
1569
1570 switch (type) {
1571 case Type::None:
1573 break;
1574 case Type::Position: {
1575 unode.position.reinitialize(verts_num);
1576 /* Needed for original data lookup. */
1577 unode.normal.reinitialize(verts_num);
1578 if (ss.deform_modifiers_active) {
1579 unode.orig_position.reinitialize(verts_num);
1580 }
1581 store_positions_mesh(depsgraph, object, unode);
1582 break;
1583 }
1584 case Type::HideVert: {
1585 unode.vert_hidden.resize(unode.vert_indices.size());
1586 store_vert_visibility_mesh(mesh, node, unode);
1587 break;
1588 }
1589 case Type::HideFace: {
1590 unode.face_hidden.resize(unode.face_indices.size());
1592 break;
1593 }
1594 case Type::Mask: {
1595 unode.mask.reinitialize(verts_num);
1596 store_mask_mesh(mesh, unode);
1597 break;
1598 }
1599 case Type::Color: {
1600 store_color(mesh, node, unode);
1601 break;
1602 }
1603 case Type::DyntopoBegin:
1604 case Type::DyntopoEnd:
1605 /* Dyntopo should be handled elsewhere. */
1607 break;
1608 case Type::Geometry:
1609 /* See #geometry_push. */
1611 break;
1612 case Type::FaceSet: {
1613 unode.face_sets.reinitialize(unode.face_indices.size());
1614 store_face_sets(mesh, unode);
1615 break;
1616 }
1617 }
1618}
1619
1620static void fill_node_data_grids(const Object &object,
1621 const bke::pbvh::GridsNode &node,
1622 const Type type,
1623 Node &unode)
1624{
1625 const SculptSession &ss = *object.sculpt;
1626 const Mesh &base_mesh = *static_cast<const Mesh *>(object.data);
1627 const SubdivCCG &subdiv_ccg = *ss.subdiv_ccg;
1628
1629 unode.grids = node.grids();
1630
1631 const int grid_area = subdiv_ccg.grid_size * subdiv_ccg.grid_size;
1632 const int verts_num = unode.grids.size() * grid_area;
1633
1634 if (ELEM(type, Type::FaceSet, Type::HideFace)) {
1636 }
1637
1638 switch (type) {
1639 case Type::None:
1641 break;
1642 case Type::Position: {
1643 unode.position.reinitialize(verts_num);
1644 /* Needed for original data lookup. */
1645 unode.normal.reinitialize(verts_num);
1646 store_positions_grids(subdiv_ccg, unode);
1647 break;
1648 }
1649 case Type::HideVert: {
1650 store_vert_visibility_grids(subdiv_ccg, node, unode);
1651 break;
1652 }
1653 case Type::HideFace: {
1654 unode.face_hidden.resize(unode.face_indices.size());
1655 store_face_visibility(base_mesh, unode);
1656 break;
1657 }
1658 case Type::Mask: {
1659 unode.mask.reinitialize(verts_num);
1660 store_mask_grids(subdiv_ccg, unode);
1661 break;
1662 }
1663 case Type::Color: {
1665 break;
1666 }
1667 case Type::DyntopoBegin:
1668 case Type::DyntopoEnd:
1669 /* Dyntopo should be handled elsewhere. */
1671 break;
1672 case Type::Geometry:
1673 /* See #geometry_push. */
1675 break;
1676 case Type::FaceSet: {
1677 unode.face_sets.reinitialize(unode.face_indices.size());
1678 store_face_sets(base_mesh, unode);
1679 break;
1680 }
1681 }
1682}
1683
1688BLI_NOINLINE static void bmesh_push(const Object &object,
1689 const bke::pbvh::BMeshNode *node,
1690 Type type)
1691{
1692 StepData *step_data = get_step_data();
1693 const SculptSession &ss = *object.sculpt;
1694
1695 std::scoped_lock lock(step_data->nodes_mutex);
1696
1697 if (step_data->nodes.is_empty()) {
1698 /* We currently need to append data here so that the overall undo system knows to indicate that
1699 * data should be flushed to the memfile */
1700 /* TODO: Once we store entering Sculpt Mode as a specific type of action, we can remove this
1701 * call. */
1702 step_data->nodes.append(std::make_unique<Node>());
1703
1704 step_data->type = type;
1705
1706 if (type == Type::DyntopoEnd) {
1707 step_data->bmesh.bm_entry = BM_log_entry_add(ss.bm_log);
1709 }
1710 else if (type == Type::DyntopoBegin) {
1711 /* Store a copy of the mesh's current vertices, loops, and
1712 * faces. A full copy like this is needed because entering
1713 * dynamic-topology immediately does topological edits
1714 * (converting faces to triangles) that the BMLog can't
1715 * fully restore from. */
1718
1719 step_data->bmesh.bm_entry = BM_log_entry_add(ss.bm_log);
1720 BM_log_all_added(ss.bm, ss.bm_log);
1721 }
1722 else {
1723 step_data->bmesh.bm_entry = BM_log_entry_add(ss.bm_log);
1724 }
1725 }
1726
1727 if (node) {
1728 const int cd_vert_mask_offset = CustomData_get_offset_named(
1729 &ss.bm->vdata, CD_PROP_FLOAT, ".sculpt_mask");
1730
1731 /* The vertices and node aren't changed, though pointers to them are stored in the log. */
1732 bke::pbvh::BMeshNode *node_mut = const_cast<bke::pbvh::BMeshNode *>(node);
1733
1734 switch (type) {
1735 case Type::None:
1737 break;
1738 case Type::Position:
1739 case Type::Mask:
1740 /* Before any vertex values get modified, ensure their
1741 * original positions are logged. */
1742 for (BMVert *vert : BKE_pbvh_bmesh_node_unique_verts(node_mut)) {
1743 BM_log_vert_before_modified(ss.bm_log, vert, cd_vert_mask_offset);
1744 }
1745 for (BMVert *vert : BKE_pbvh_bmesh_node_other_verts(node_mut)) {
1746 BM_log_vert_before_modified(ss.bm_log, vert, cd_vert_mask_offset);
1747 }
1748 break;
1749
1750 case Type::HideFace:
1751 case Type::HideVert: {
1752 for (BMVert *vert : BKE_pbvh_bmesh_node_unique_verts(node_mut)) {
1753 BM_log_vert_before_modified(ss.bm_log, vert, cd_vert_mask_offset);
1754 }
1755 for (BMVert *vert : BKE_pbvh_bmesh_node_other_verts(node_mut)) {
1756 BM_log_vert_before_modified(ss.bm_log, vert, cd_vert_mask_offset);
1757 }
1758
1759 for (BMFace *f : BKE_pbvh_bmesh_node_faces(node_mut)) {
1761 }
1762 break;
1763 }
1764
1765 case Type::DyntopoBegin:
1766 case Type::DyntopoEnd:
1767 case Type::Geometry:
1768 case Type::FaceSet:
1769 case Type::Color:
1770 break;
1771 }
1772 }
1773}
1774
1779static Node *ensure_node(StepData &step_data, const bke::pbvh::Node &node, bool &r_new)
1780{
1781 std::scoped_lock lock(step_data.nodes_mutex);
1782 r_new = false;
1783 std::unique_ptr<Node> &unode = step_data.undo_nodes_by_pbvh_node.lookup_or_add_cb(&node, [&]() {
1784 std::unique_ptr<Node> new_unode = std::make_unique<Node>();
1785 r_new = true;
1786 return new_unode;
1787 });
1788 return unode.get();
1789}
1790
1791void push_node(const Depsgraph &depsgraph,
1792 const Object &object,
1793 const bke::pbvh::Node *node,
1794 const Type type)
1795{
1796 SculptSession &ss = *object.sculpt;
1797 const bke::pbvh::Tree &pbvh = *bke::object::pbvh_get(object);
1798 if (ss.bm || ELEM(type, Type::DyntopoBegin, Type::DyntopoEnd)) {
1799 bmesh_push(object, static_cast<const bke::pbvh::BMeshNode *>(node), type);
1800 return;
1801 }
1802
1803 StepData *step_data = get_step_data();
1804 BLI_assert(ELEM(step_data->type, Type::None, type));
1805 step_data->type = type;
1806
1807 bool newly_added;
1808 Node *unode = ensure_node(*step_data, *node, newly_added);
1809 if (!newly_added) {
1810 /* The node was already filled with data for this undo step. */
1811 return;
1812 }
1813
1814 ss.needs_flush_to_id = true;
1815
1816 switch (pbvh.type()) {
1819 depsgraph, object, static_cast<const bke::pbvh::MeshNode &>(*node), type, *unode);
1820 break;
1822 fill_node_data_grids(object, static_cast<const bke::pbvh::GridsNode &>(*node), type, *unode);
1823 break;
1826 break;
1827 }
1828}
1829
1830void push_nodes(const Depsgraph &depsgraph,
1831 Object &object,
1832 const IndexMask &node_mask,
1833 const Type type)
1834{
1835 SculptSession &ss = *object.sculpt;
1836
1837 ss.needs_flush_to_id = true;
1838
1839 const bke::pbvh::Tree &pbvh = *bke::object::pbvh_get(object);
1840 if (ss.bm || ELEM(type, Type::DyntopoBegin, Type::DyntopoEnd)) {
1842 node_mask.foreach_index([&](const int i) { bmesh_push(object, &nodes[i], type); });
1843 return;
1844 }
1845
1846 StepData *step_data = get_step_data();
1847 BLI_assert(ELEM(step_data->type, Type::None, type));
1848 step_data->type = type;
1849
1850 switch (pbvh.type()) {
1851 case bke::pbvh::Type::Mesh: {
1854 node_mask.foreach_index([&](const int i) {
1855 bool newly_added;
1856 Node *unode = ensure_node(*step_data, nodes[i], newly_added);
1857 if (newly_added) {
1858 nodes_to_fill.append({&nodes[i], unode});
1859 }
1860 });
1861 threading::parallel_for(nodes_to_fill.index_range(), 1, [&](const IndexRange range) {
1862 for (const auto &[node, unode] : nodes_to_fill.as_span().slice(range)) {
1863 fill_node_data_mesh(depsgraph, object, *node, type, *unode);
1864 }
1865 });
1866 break;
1867 }
1871 node_mask.foreach_index([&](const int i) {
1872 bool newly_added;
1873 Node *unode = ensure_node(*step_data, nodes[i], newly_added);
1874 if (newly_added) {
1875 nodes_to_fill.append({&nodes[i], unode});
1876 }
1877 });
1878 threading::parallel_for(nodes_to_fill.index_range(), 1, [&](const IndexRange range) {
1879 for (const auto &[node, unode] : nodes_to_fill.as_span().slice(range)) {
1880 fill_node_data_grids(object, *node, type, *unode);
1881 }
1882 });
1883 break;
1884 }
1887 break;
1888 }
1889 }
1890}
1891
1892static void save_active_attribute(Object &object, SculptAttrRef *attr)
1893{
1895 attr->was_set = true;
1896 attr->domain = NO_ACTIVE_LAYER;
1897 attr->name[0] = 0;
1898 if (!mesh) {
1899 return;
1900 }
1901 const char *name = mesh->active_color_attribute;
1902 const bke::AttributeAccessor attributes = mesh->attributes();
1903 const std::optional<bke::AttributeMetaData> meta_data = attributes.lookup_meta_data(name);
1904 if (!meta_data) {
1905 return;
1906 }
1907 if (!(ATTR_DOMAIN_AS_MASK(meta_data->domain) & ATTR_DOMAIN_MASK_COLOR) ||
1909 {
1910 return;
1911 }
1912 attr->domain = meta_data->domain;
1913 STRNCPY_UTF8(attr->name, name);
1914 attr->type = *bke::attr_type_to_custom_data_type(meta_data->data_type);
1915}
1916
1922{
1923 us->data.object_name = ob.id.name;
1924
1925 if (!us->active_color_start.was_set) {
1927 }
1928
1929 /* Set end attribute in case push_end is not called,
1930 * so we don't end up with corrupted state.
1931 */
1932 if (!us->active_color_end.was_set) {
1934 us->active_color_end.was_set = false;
1935 }
1936
1937 const SculptSession &ss = *ob.sculpt;
1938
1939 us->data.pivot_pos = ss.pivot_pos;
1940 us->data.pivot_rot = ss.pivot_rot;
1941
1942 if (const KeyBlock *key = BKE_keyblock_from_object(&ob)) {
1943 us->data.active_shape_key_name = key->name;
1944 }
1945}
1946
1947void push_begin_ex(const Scene & /*scene*/, Object &ob, const char *name)
1948{
1949 UndoStack *ustack = ED_undo_stack_get();
1950
1951 /* If possible, we need to tag the object and its geometry data as 'changed in the future' in
1952 * the previous undo step if it's a memfile one. */
1954 ED_undosys_stack_memfile_id_changed_tag(ustack, static_cast<ID *>(ob.data));
1955
1956 /* Special case, we never read from this. */
1957 bContext *C = nullptr;
1958
1959 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(
1961
1962 const SculptSession &ss = *ob.sculpt;
1963 const bke::pbvh::Tree &pbvh = *bke::object::pbvh_get(ob);
1964
1965 save_common_data(ob, us);
1966
1967 switch (pbvh.type()) {
1968 case bke::pbvh::Type::Mesh: {
1969 const Mesh &mesh = *static_cast<const Mesh *>(ob.data);
1970 us->data.mesh.verts_num = mesh.verts_num;
1971 us->data.mesh.corners_num = mesh.corners_num;
1972 break;
1973 }
1977 break;
1978 }
1980 break;
1981 }
1982 }
1983}
1984
1985void push_begin(const Scene &scene, Object &ob, const wmOperator *op)
1986{
1987 push_begin_ex(scene, ob, op->type->name);
1988}
1989
1990void push_enter_sculpt_mode(const Scene & /*scene*/, Object &ob, const wmOperator *op)
1991{
1992 UndoStack *ustack = ED_undo_stack_get();
1993
1994 /* If possible, we need to tag the object and its geometry data as 'changed in the future' in
1995 * the previous undo step if it's a memfile one. */
1997 ED_undosys_stack_memfile_id_changed_tag(ustack, static_cast<ID *>(ob.data));
1998
1999 /* Special case, we never read from this. */
2000 bContext *C = nullptr;
2001
2002 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(
2004 save_common_data(ob, us);
2005}
2006
2007static size_t node_size_in_bytes(const Node &node)
2008{
2009 size_t size = sizeof(Node);
2010 size += node.position.as_span().size_in_bytes();
2011 size += node.orig_position.as_span().size_in_bytes();
2012 size += node.normal.as_span().size_in_bytes();
2013 size += node.col.as_span().size_in_bytes();
2014 size += node.mask.as_span().size_in_bytes();
2015 size += node.loop_col.as_span().size_in_bytes();
2018 size += node.vert_hidden.size() / 8;
2019 size += node.face_hidden.size() / 8;
2020 size += node.grids.as_span().size_in_bytes();
2021 size += node.grid_hidden.all_bits().size() / 8;
2022 size += node.face_sets.as_span().size_in_bytes();
2024 return size;
2025}
2026
2027void push_end_ex(Object &ob, const bool use_nested_undo)
2028{
2029 StepData *step_data = get_step_data();
2030
2031 /* Move undo node storage from map to vector. */
2032 step_data->nodes.reserve(step_data->undo_nodes_by_pbvh_node.size());
2033 for (std::unique_ptr<Node> &node : step_data->undo_nodes_by_pbvh_node.values()) {
2034 step_data->nodes.append(std::move(node));
2035 }
2036 step_data->undo_nodes_by_pbvh_node.clear();
2037
2038 /* We don't need normals in the undo stack. */
2039 for (std::unique_ptr<Node> &unode : step_data->nodes) {
2040 unode->normal = {};
2041 }
2042 /* TODO: When #Node.orig_positions is stored, #Node.positions is unnecessary, don't keep it in
2043 * the stored undo step. In the future the stored undo step should use a different format with
2044 * just one positions array that has a different semantic meaning depending on whether there are
2045 * deform modifiers. */
2046
2047 if (step_data->type == Type::Position) {
2048 step_data->position_step_storage = std::make_unique<PositionUndoStorage>(*step_data);
2049 }
2050 else {
2052 step_data->nodes.index_range(),
2053 16,
2054 0,
2055 [&](const IndexRange range, size_t size) {
2056 for (const int i : range) {
2057 size += node_size_in_bytes(*step_data->nodes[i]);
2058 }
2059 return size;
2060 },
2061 std::plus<size_t>());
2062 }
2063
2064 /* We could remove this and enforce all callers run in an operator using 'OPTYPE_UNDO'. */
2065 wmWindowManager *wm = static_cast<wmWindowManager *>(G_MAIN->wm.first);
2066 if (wm->op_undo_depth == 0 || use_nested_undo) {
2067 UndoStack *ustack = ED_undo_stack_get();
2068 BKE_undosys_step_push(ustack, nullptr, nullptr);
2069 if (wm->op_undo_depth == 0) {
2071 }
2073 }
2074
2075 UndoStack *ustack = ED_undo_stack_get();
2076 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(
2078
2079 save_active_attribute(ob, &us->active_color_end);
2080}
2081
2083{
2084 push_end_ex(ob, false);
2085}
2086
2087/* -------------------------------------------------------------------- */
2090
2091static void set_active_layer(bContext *C, const SculptAttrRef *attr)
2092{
2093 if (attr->domain == bke::AttrDomain::Auto) {
2094 return;
2095 }
2096
2099
2100 SculptAttrRef existing;
2101 save_active_attribute(*ob, &existing);
2102
2104 CustomDataLayer *layer = BKE_attribute_find(owner, attr->name, attr->type, attr->domain);
2105
2106 /* Temporary fix for #97408. This is a fundamental
2107 * bug in the undo stack; the operator code needs to push
2108 * an extra undo step before running an operator if a
2109 * non-memfile undo system is active.
2110 *
2111 * For now, detect if the layer does exist but with a different
2112 * domain and just unconvert it.
2113 */
2114 if (!layer) {
2117 if (layer) {
2119 mesh->attributes_for_write(),
2120 attr->name,
2121 attr->domain,
2123 nullptr))
2124 {
2125 layer = BKE_attribute_find(owner, attr->name, attr->type, attr->domain);
2126 }
2127 }
2128 }
2129
2130 if (!layer) {
2131 /* Memfile undo killed the layer; re-create it. */
2132 mesh->attributes_for_write().add(attr->name,
2133 attr->domain,
2136 layer = BKE_attribute_find(owner, attr->name, attr->type, attr->domain);
2138 }
2139
2140 if (layer) {
2142 }
2143}
2144
2145static void step_encode_init(bContext * /*C*/, UndoStep *us_p)
2146{
2147 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(us_p);
2148 new (&us->data) StepData();
2149}
2150
2151static bool step_encode(bContext * /*C*/, Main *bmain, UndoStep *us_p)
2152{
2153 /* Dummy, encoding is done along the way by adding tiles
2154 * to the current 'SculptUndoStep' added by encode_init. */
2155 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(us_p);
2156 us->step.data_size = us->data.undo_size;
2157
2158 if (us->data.type == Type::DyntopoEnd) {
2159 us->step.use_memfile_step = true;
2160 }
2161 us->step.is_applied = true;
2162
2163 /* We do not flush data when entering sculpt mode - this is currently indicated by Type::None */
2164 if (us->data.type != Type::None) {
2165 bmain->is_memfile_undo_flush_needed = true;
2166 }
2167
2168 return true;
2169}
2170
2172{
2173 BLI_assert(us->step.is_applied == true);
2174
2176 us->step.is_applied = false;
2177}
2178
2180{
2181 BLI_assert(us->step.is_applied == false);
2182
2184 us->step.is_applied = true;
2185}
2186
2188 Depsgraph *depsgraph,
2189 SculptUndoStep *us,
2190 const bool is_final)
2191{
2192 /* Walk forward over any applied steps of same type,
2193 * then walk back in the next loop, un-applying them. */
2194 SculptUndoStep *us_iter = us;
2195 while (us_iter->step.next && (us_iter->step.next->type == us_iter->step.type)) {
2196 if (us_iter->step.next->is_applied == false) {
2197 break;
2198 }
2199 us_iter = reinterpret_cast<SculptUndoStep *>(us_iter->step.next);
2200 }
2201
2202 while ((us_iter != us) || (!is_final && us_iter == us)) {
2203 BLI_assert(us_iter->step.type == us->step.type); /* Previous loop ensures this. */
2204
2207
2208 if (us_iter == us) {
2209 if (us_iter->step.prev && us_iter->step.prev->type == BKE_UNDOSYS_TYPE_SCULPT) {
2211 C, &reinterpret_cast<SculptUndoStep *>(us_iter->step.prev)->active_color_end);
2212 }
2213 break;
2214 }
2215
2216 us_iter = reinterpret_cast<SculptUndoStep *>(us_iter->step.prev);
2217 }
2218}
2219
2221{
2222 SculptUndoStep *us_iter = us;
2223 while (us_iter->step.prev && (us_iter->step.prev->type == us_iter->step.type)) {
2224 if (us_iter->step.prev->is_applied == true) {
2225 break;
2226 }
2227 us_iter = reinterpret_cast<SculptUndoStep *>(us_iter->step.prev);
2228 }
2229 while (us_iter && (us_iter->step.is_applied == false)) {
2232
2233 if (us_iter == us) {
2235 break;
2236 }
2237 us_iter = reinterpret_cast<SculptUndoStep *>(us_iter->step.next);
2238 }
2239}
2240
2241static void step_decode(
2242 bContext *C, Main *bmain, UndoStep *us_p, const eUndoStepDir dir, const bool is_final)
2243{
2244 /* NOTE: behavior for undo/redo closely matches image undo. */
2245 BLI_assert(dir != STEP_INVALID);
2246
2248
2249 /* Ensure sculpt mode. */
2250 {
2251 Scene *scene = CTX_data_scene(C);
2252 ViewLayer *view_layer = CTX_data_view_layer(C);
2253 BKE_view_layer_synced_ensure(scene, view_layer);
2254 Object *ob = BKE_view_layer_active_object_get(view_layer);
2255 if (ob && (ob->type == OB_MESH)) {
2257 /* Pass. */
2258 }
2259 else {
2260 object::mode_generic_exit(bmain, depsgraph, scene, ob);
2261
2262 /* Sculpt needs evaluated state.
2263 * NOTE: needs to be done here, as #object::mode_generic_exit will usually invalidate
2264 * (some) evaluated data. */
2266
2267 Mesh *mesh = static_cast<Mesh *>(ob->data);
2268 /* Don't add sculpt topology undo steps when reading back undo state.
2269 * The undo steps must enter/exit for us. */
2271 object_sculpt_mode_enter(*bmain, *depsgraph, *scene, *ob, true, nullptr);
2272 }
2273
2274 if (ob->sculpt) {
2275 ob->sculpt->needs_flush_to_id = true;
2276 }
2277 bmain->is_memfile_undo_flush_needed = true;
2278 }
2279 else {
2280 BLI_assert(0);
2281 return;
2282 }
2283 }
2284
2285 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(us_p);
2286 if (dir == STEP_UNDO) {
2287 step_decode_undo(C, depsgraph, us, is_final);
2288 }
2289 else if (dir == STEP_REDO) {
2291 }
2292}
2293
2294static void step_free(UndoStep *us_p)
2295{
2296 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(us_p);
2297 free_step_data(us->data);
2298}
2299
2300void geometry_begin(const Scene &scene, Object &ob, const wmOperator *op)
2301{
2302 geometry_begin_ex(scene, ob, op->type->name);
2303}
2304
2305void geometry_begin_ex(const Scene & /*scene*/, Object &ob, const char *name)
2306{
2307 UndoStack *ustack = ED_undo_stack_get();
2308
2309 /* If possible, we need to tag the object and its geometry data as 'changed in the future' in
2310 * the previous undo step if it's a memfile one. */
2312 ED_undosys_stack_memfile_id_changed_tag(ustack, static_cast<ID *>(ob.data));
2313
2314 /* Special case, we never read from this. */
2315 bContext *C = nullptr;
2316
2317 SculptUndoStep *us = reinterpret_cast<SculptUndoStep *>(
2319 save_common_data(ob, us);
2320 geometry_push(ob);
2321}
2322
2323static size_t calculate_node_geometry_allocated_size(const NodeGeometry &node_geometry)
2324{
2325 BLI_assert(node_geometry.is_initialized);
2326
2327 MemoryCount memory;
2329
2330 memory_counter.add_shared(node_geometry.face_offsets_sharing_info,
2331 sizeof(int) * (node_geometry.faces_num + 1));
2332
2333 CustomData_count_memory(node_geometry.corner_data, node_geometry.corners_num, memory_counter);
2334 CustomData_count_memory(node_geometry.face_data, node_geometry.faces_num, memory_counter);
2335 CustomData_count_memory(node_geometry.vert_data, node_geometry.verts_num, memory_counter);
2336 CustomData_count_memory(node_geometry.edge_data, node_geometry.edges_num, memory_counter);
2337
2338 return memory.total_bytes;
2339}
2340
2341static size_t estimate_geometry_step_size(const StepData &step_data)
2342{
2343 size_t step_size = 0;
2344
2345 /* TODO: This calculation is not entirely accurate, as the current amount of memory consumed by
2346 * Sculpt Undo is not updated when elements are evicted. Further changes to the overall undo
2347 * system would be needed to measure this accurately. */
2350
2351 return step_size;
2352}
2353
2355{
2356 geometry_push(ob);
2357
2358 StepData *step_data = get_step_data();
2359 step_data->undo_size = estimate_geometry_step_size(*step_data);
2360
2361 /* We could remove this and enforce all callers run in an operator using 'OPTYPE_UNDO'. */
2362 wmWindowManager *wm = static_cast<wmWindowManager *>(G_MAIN->wm.first);
2363 if (wm->op_undo_depth == 0) {
2364 UndoStack *ustack = ED_undo_stack_get();
2365 BKE_undosys_step_push(ustack, nullptr, nullptr);
2366 if (wm->op_undo_depth == 0) {
2368 }
2370 }
2371}
2372
2374{
2375 ut->name = "Sculpt";
2376 ut->poll = nullptr; /* No poll from context for now. */
2380 ut->step_free = step_free;
2381
2383
2384 ut->step_size = sizeof(SculptUndoStep);
2385}
2386
2388
2389/* -------------------------------------------------------------------- */
2408
2410{
2412 return false;
2413 }
2414
2415 const Object *object = CTX_data_active_object(C);
2416 const SculptSession *sculpt_session = object->sculpt;
2417
2418 return sculpt_session->multires.active;
2419}
2420
2422{
2423 if (!use_multires_mesh(C)) {
2424 return;
2425 }
2426
2427 const Scene &scene = *CTX_data_scene(C);
2428 Object *object = CTX_data_active_object(C);
2429
2431
2432 push_begin_ex(scene, *object, str);
2433
2434 geometry_push(*object);
2435}
2436
2438{
2439 if (!use_multires_mesh(C)) {
2440 ED_undo_push(C, str);
2441 return;
2442 }
2443
2444 Object *object = CTX_data_active_object(C);
2445
2446 geometry_push(*object);
2447
2448 push_end(*object);
2449}
2450
2452
2453} // namespace blender::ed::sculpt_paint::undo
2454
2455namespace blender::ed::sculpt_paint {
2456
2457std::optional<OrigPositionData> orig_position_data_lookup_mesh_all_verts(
2458 const Object & /*object*/, const bke::pbvh::MeshNode &node)
2459{
2460 const undo::Node *unode = undo::get_node(&node, undo::Type::Position);
2461 if (!unode) {
2462 return std::nullopt;
2463 }
2464 return OrigPositionData{unode->position.as_span(), unode->normal.as_span()};
2465}
2466
2467std::optional<OrigPositionData> orig_position_data_lookup_mesh(const Object &object,
2468 const bke::pbvh::MeshNode &node)
2469{
2470 const std::optional<OrigPositionData> result = orig_position_data_lookup_mesh_all_verts(object,
2471 node);
2472 if (!result) {
2473 return std::nullopt;
2474 }
2475 return OrigPositionData{result->positions.take_front(node.verts().size()),
2476 result->normals.take_front(node.verts().size())};
2477}
2478
2479std::optional<OrigPositionData> orig_position_data_lookup_grids(const Object & /*object*/,
2480 const bke::pbvh::GridsNode &node)
2481{
2482 const undo::Node *unode = undo::get_node(&node, undo::Type::Position);
2483 if (!unode) {
2484 return std::nullopt;
2485 }
2486 return OrigPositionData{unode->position.as_span(), unode->normal.as_span()};
2487}
2488
2490 const Set<BMVert *, 0> &verts,
2491 const MutableSpan<float3> positions,
2493{
2494 int i = 0;
2495 for (const BMVert *vert : verts) {
2496 const float *co;
2497 const float *no;
2498 BM_log_original_vert_data(&const_cast<BMLog &>(bm_log), const_cast<BMVert *>(vert), &co, &no);
2499 if (!positions.is_empty()) {
2500 positions[i] = co;
2501 }
2502 if (!normals.is_empty()) {
2503 normals[i] = no;
2504 }
2505 i++;
2506 }
2507}
2508
2509std::optional<Span<float4>> orig_color_data_lookup_mesh(const Object & /*object*/,
2510 const bke::pbvh::MeshNode &node)
2511{
2512 const undo::Node *unode = undo::get_node(&node, undo::Type::Color);
2513 if (!unode) {
2514 return std::nullopt;
2515 }
2516 return unode->col.as_span();
2517}
2518
2519std::optional<Span<int>> orig_face_set_data_lookup_mesh(const Object & /*object*/,
2520 const bke::pbvh::MeshNode &node)
2521{
2522 const undo::Node *unode = undo::get_node(&node, undo::Type::FaceSet);
2523 if (!unode) {
2524 return std::nullopt;
2525 }
2526 return unode->face_sets.as_span();
2527}
2528
2529std::optional<Span<int>> orig_face_set_data_lookup_grids(const Object & /*object*/,
2530 const bke::pbvh::GridsNode &node)
2531{
2532 const undo::Node *unode = undo::get_node(&node, undo::Type::FaceSet);
2533 if (!unode) {
2534 return std::nullopt;
2535 }
2536 return unode->face_sets.as_span();
2537}
2538
2539std::optional<Span<float>> orig_mask_data_lookup_mesh(const Object & /*object*/,
2540 const bke::pbvh::MeshNode &node)
2541{
2542 const undo::Node *unode = undo::get_node(&node, undo::Type::Mask);
2543 if (!unode) {
2544 return std::nullopt;
2545 }
2546 return unode->mask.as_span();
2547}
2548
2549std::optional<Span<float>> orig_mask_data_lookup_grids(const Object & /*object*/,
2550 const bke::pbvh::GridsNode &node)
2551{
2552 const undo::Node *unode = undo::get_node(&node, undo::Type::Mask);
2553 if (!unode) {
2554 return std::nullopt;
2555 }
2556 return unode->mask.as_span();
2557}
2558
2559} // namespace blender::ed::sculpt_paint
struct CustomDataLayer * BKE_attribute_search_for_write(AttributeOwner &owner, blender::StringRef name, eCustomDataMask type, AttrDomainMask domain_mask)
Definition attribute.cc:672
struct CustomDataLayer * BKE_attribute_find(const AttributeOwner &owner, blender::StringRef name, eCustomDataType type, blender::bke::AttrDomain domain)
Definition attribute.cc:614
@ ATTR_DOMAIN_MASK_ALL
#define ATTR_DOMAIN_MASK_COLOR
#define ATTR_DOMAIN_AS_MASK(domain)
void BKE_id_attributes_active_color_set(struct ID *id, std::optional< blender::StringRef > name)
Definition attribute.cc:985
Depsgraph * CTX_data_ensure_evaluated_depsgraph(const bContext *C)
Object * CTX_data_active_object(const bContext *C)
Scene * CTX_data_scene(const bContext *C)
RegionView3D * CTX_wm_region_view3d(const bContext *C)
ViewLayer * CTX_data_view_layer(const bContext *C)
CustomData interface, see also DNA_customdata_types.h.
void CustomData_count_memory(const CustomData &data, int totelem, blender::MemoryCounter &memory)
int CustomData_get_offset_named(const CustomData *data, eCustomDataType type, blender::StringRef name)
void CustomData_free(CustomData *data)
void CustomData_init_from(const CustomData *source, CustomData *dest, eCustomDataMask mask, int totelem)
const CustomData_MeshMasks CD_MASK_MESH
#define G_MAIN
KeyBlock * BKE_keyblock_from_object(Object *ob)
Definition key.cc:1889
KeyBlock * BKE_keyblock_find_name(Key *key, const char name[])
Definition key.cc:1916
Key * BKE_key_from_object(Object *ob)
Definition key.cc:1791
void BKE_view_layer_synced_ensure(const Scene *scene, ViewLayer *view_layer)
Object * BKE_view_layer_active_object_get(const ViewLayer *view_layer)
void BKE_mesh_clear_geometry(Mesh *mesh)
blender::Array< blender::float3 > BKE_multires_create_deformed_base_mesh_vert_coords(Depsgraph *depsgraph, Object *object, MultiresModifierData *mmd)
Definition multires.cc:119
@ MULTIRES_HIDDEN_MODIFIED
@ MULTIRES_COORDS_MODIFIED
void multires_flush_sculpt_updates(Object *object)
Definition multires.cc:270
void multires_mark_as_modified(Depsgraph *depsgraph, Object *object, MultiresModifiedFlags flags)
Definition multires.cc:247
General operations, lookup, etc. for blender objects.
Mesh * BKE_object_get_original_mesh(const Object *object)
bool BKE_sculptsession_use_pbvh_draw(const Object *ob, const RegionView3D *rv3d)
Definition paint.cc:3068
void BKE_sculptsession_free_deformMats(SculptSession *ss)
Definition paint.cc:2239
MultiresModifierData * BKE_sculpt_multires_active(const Scene *scene, Object *ob)
Definition paint.cc:2513
void BKE_sculpt_update_object_for_edit(Depsgraph *depsgraph, Object *ob_orig, bool is_paint_tool)
Definition paint.cc:2797
void BKE_sculptsession_free_pbvh(Object &object)
Definition paint.cc:2286
PaintMode BKE_paintmode_get_active_from_context(const bContext *C)
Definition paint.cc:505
const blender::Set< BMFace *, 0 > & BKE_pbvh_bmesh_node_faces(blender::bke::pbvh::BMeshNode *node)
const blender::Set< BMVert *, 0 > & BKE_pbvh_bmesh_node_unique_verts(blender::bke::pbvh::BMeshNode *node)
const blender::Set< BMVert *, 0 > & BKE_pbvh_bmesh_node_other_verts(blender::bke::pbvh::BMeshNode *node)
void BKE_pbvh_sync_visibility_from_verts(Object &object)
Definition pbvh.cc:2546
void BKE_scene_graph_evaluated_ensure(Depsgraph *depsgraph, Main *bmain)
Definition scene.cc:2626
CCGKey BKE_subdiv_ccg_key_top_level(const SubdivCCG &subdiv_ccg)
blender::BitGroupVector & BKE_subdiv_ccg_grid_hidden_ensure(SubdivCCG &subdiv_ccg)
void BKE_subdiv_ccg_grid_hidden_free(SubdivCCG &subdiv_ccg)
@ UNDOTYPE_FLAG_DECODE_ACTIVE_STEP
const UndoType * BKE_UNDOSYS_TYPE_SCULPT
eUndoPushReturn BKE_undosys_step_push(UndoStack *ustack, bContext *C, const char *name)
eUndoStepDir
@ STEP_INVALID
@ STEP_UNDO
@ STEP_REDO
UndoStep * BKE_undosys_step_push_init_with_type(UndoStack *ustack, bContext *C, const char *name, const UndoType *ut)
#define BKE_undosys_stack_limit_steps_and_memory_defaults(ustack)
UndoStep * BKE_undosys_stack_init_or_active_with_type(UndoStack *ustack, const UndoType *ut)
#define BLI_assert_unreachable()
Definition BLI_assert.h:93
#define BLI_assert(a)
Definition BLI_assert.h:46
#define BLI_NOINLINE
int BLI_findindex(const ListBase *listbase, const void *vlink) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1)
Definition listbase.cc:586
#define STRNCPY_UTF8(dst, src)
@ TASK_PRIORITY_LOW
Definition BLI_task.h:52
void BLI_task_pool_work_and_wait(TaskPool *pool)
Definition task_pool.cc:535
TaskPool * BLI_task_pool_create_background(void *userdata, eTaskPriority priority)
Definition task_pool.cc:489
void BLI_task_pool_free(TaskPool *pool)
Definition task_pool.cc:521
void BLI_task_pool_push(TaskPool *pool, TaskRunFunction run, void *taskdata, bool free_taskdata, TaskFreeFunction freedata)
Definition task_pool.cc:526
#define SCOPED_TIMER_AVERAGED(name)
Definition BLI_timeit.hh:76
#define ELEM(...)
#define CLOG_WARN(clg_ref,...)
Definition CLG_log.h:189
void DEG_id_tag_update(ID *id, unsigned int flags)
@ ID_RECALC_SHADING
Definition DNA_ID.h:1094
@ ID_RECALC_GEOMETRY
Definition DNA_ID.h:1074
#define ID_REAL_USERS(id)
Definition DNA_ID.h:676
#define MAX_CUSTOMDATA_LAYER_NAME
#define CD_MASK_PROP_ALL
@ CD_PROP_FLOAT
@ ME_SCULPT_DYNAMIC_TOPOLOGY
@ OB_MODE_SCULPT
@ OB_MODE_VERTEX_PAINT
Object is a sort of wrapper for general info.
@ OB_MESH
void ED_undo_push(bContext *C, const char *str)
Definition ed_undo.cc:98
UndoStack * ED_undo_stack_get()
Definition ed_undo.cc:442
void ED_undosys_stack_memfile_id_changed_tag(UndoStack *ustack, ID *id)
#define C
Definition RandGen.cpp:29
#define ND_DATA
Definition WM_types.hh:509
#define NC_OBJECT
Definition WM_types.hh:379
volatile int lock
void BM_data_layer_add_named(BMesh *bm, CustomData *data, int type, const StringRef name)
BMesh const char void * data
void BM_log_all_added(BMesh *bm, BMLog *log)
Definition bmesh_log.cc:737
void BM_log_original_vert_data(BMLog *log, BMVert *v, const float **r_co, const float **r_no)
Definition bmesh_log.cc:806
void BM_log_face_modified(BMLog *log, BMFace *f)
Definition bmesh_log.cc:676
void BM_log_redo(BMesh *bm, BMLog *log)
Definition bmesh_log.cc:618
void BM_log_vert_before_modified(BMLog *log, BMVert *v, const int cd_vert_mask_offset)
Definition bmesh_log.cc:652
void BM_log_entry_drop(BMLogEntry *entry)
Definition bmesh_log.cc:526
BMLog * BM_log_from_existing_entries_create(BMesh *bm, BMLogEntry *entry)
Definition bmesh_log.cc:444
void BM_log_undo(BMesh *bm, BMLog *log)
Definition bmesh_log.cc:597
void BM_log_before_all_removed(BMesh *bm, BMLog *log)
Definition bmesh_log.cc:765
BMLogEntry * BM_log_entry_add(BMLog *log)
Definition bmesh_log.cc:512
const BMAllocTemplate bm_mesh_allocsize_default
Definition bmesh_mesh.cc:30
BMesh * BM_mesh_create(const BMAllocTemplate *allocsize, const BMeshCreateParams *params)
BMesh Make Mesh.
void BM_mesh_normals_update(BMesh *bm)
BPy_StructRNA * depsgraph
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition btDbvt.cpp:52
int64_t size() const
Definition BLI_array.hh:256
Span< T > as_span() const
Definition BLI_array.hh:243
MutableSpan< T > as_mutable_span()
Definition BLI_array.hh:248
IndexRange index_range() const
Definition BLI_array.hh:360
void fill(const T &value) const
Definition BLI_array.hh:272
void reinitialize(const int64_t new_size)
Definition BLI_array.hh:419
bool is_empty() const
Definition BLI_array.hh:264
static AttributeOwner from_id(ID *id)
Definition attribute.cc:44
constexpr int64_t size_in_bytes() const
Definition BLI_span.hh:268
constexpr int64_t size() const
Definition BLI_span.hh:252
constexpr Span take_front(int64_t n) const
Definition BLI_span.hh:193
int64_t size() const
void append(const T &value)
Span< T > as_span() const
Span< T > as_span() const
Definition BLI_array.hh:243
void reinitialize(const int64_t new_size)
Definition BLI_array.hh:419
static IndexMask from_predicate(const IndexMask &universe, GrainSize grain_size, IndexMaskMemory &memory, Fn &&predicate)
constexpr IndexRange take_front(int64_t n) const
constexpr int64_t size() const
Definition BLI_span.hh:493
constexpr MutableSpan slice(const int64_t start, const int64_t size) const
Definition BLI_span.hh:573
constexpr bool is_empty() const
Definition BLI_span.hh:509
constexpr Span< T > as_span() const
Definition BLI_span.hh:661
constexpr void fill_indices(Span< IndexT > indices, const T &value) const
Definition BLI_span.hh:526
constexpr MutableSpan take_front(const int64_t n) const
Definition BLI_span.hh:629
NonMovable(NonMovable &&other)=delete
constexpr int64_t size_in_bytes() const
Definition BLI_span.hh:268
constexpr const T * data() const
Definition BLI_span.hh:215
constexpr int64_t size() const
Definition BLI_span.hh:252
constexpr IndexRange index_range() const
Definition BLI_span.hh:401
constexpr bool is_empty() const
Definition BLI_span.hh:260
int64_t size() const
IndexRange index_range() const
void resize(const int64_t new_size)
Span< T > as_span() const
IndexRange index_range() const
GAttributeReader lookup(const StringRef attribute_id) const
std::optional< AttributeMetaData > lookup_meta_data(StringRef attribute_id) const
GSpanAttributeWriter lookup_or_add_for_write_span(StringRef attribute_id, AttrDomain domain, AttrType data_type, const AttributeInit &initializer=AttributeInitDefaultValue())
void tag_attribute_changed(const IndexMask &node_mask, StringRef attribute_name)
Definition pbvh.cc:676
void tag_positions_changed(const IndexMask &node_mask)
Definition pbvh.cc:635
Span< NodeT > nodes() const
void update_bounds(const Depsgraph &depsgraph, const Object &object)
Definition pbvh.cc:1386
void tag_face_sets_changed(const IndexMask &node_mask)
Definition pbvh.cc:662
void tag_masks_changed(const IndexMask &node_mask)
Definition pbvh.cc:669
void tag_visibility_changed(const IndexMask &node_mask)
Definition pbvh.cc:646
void update_visibility(const Object &object)
Definition pbvh.cc:1579
void foreach_index(Fn &&fn) const
#define str(s)
static ushort indices[]
static float verts[][3]
static float normals[][3]
VecBase< float, D > step(VecOp< float, D >, VecOp< float, D >) RET
#define LOG(level)
Definition log.h:97
#define T
static char faces[256]
IndexRange grid_range(const int grid_area, const int grid)
pbvh::Tree * pbvh_get(Object &object)
Definition paint.cc:3052
void update_mask_bmesh(const BMesh &bm, const IndexMask &node_mask, Tree &pbvh)
Definition pbvh.cc:1496
void update_mask_mesh(const Mesh &mesh, const IndexMask &node_mask, Tree &pbvh)
Definition pbvh.cc:1432
IndexMask all_leaf_nodes(const Tree &pbvh, IndexMaskMemory &memory)
Definition pbvh.cc:2628
Span< float3 > vert_normals_eval(const Depsgraph &depsgraph, const Object &object_orig)
Definition pbvh.cc:1059
void update_mask_grids(const SubdivCCG &subdiv_ccg, const IndexMask &node_mask, Tree &pbvh)
Definition pbvh.cc:1463
void store_bounds_orig(Tree &pbvh)
Definition pbvh.cc:1408
Span< int > node_face_indices_calc_grids(const SubdivCCG &subdiv_ccg, const GridsNode &node, Vector< int > &faces)
Definition pbvh.cc:1767
Span< float3 > vert_positions_eval(const Depsgraph &depsgraph, const Object &object_orig)
Definition pbvh.cc:1040
bool eval_refine_from_mesh(Subdiv *subdiv, const Mesh *mesh, Span< float3 > coarse_vert_positions)
std::optional< eCustomDataType > attr_type_to_custom_data_type(AttrType attr_type)
std::optional< AttrType > custom_data_type_to_attr_type(eCustomDataType data_type)
bool convert_attribute(AttributeOwner &owner, bke::MutableAttributeAccessor attributes, const StringRef name, const bke::AttrDomain dst_domain, const bke::AttrType dst_type, ReportList *reports)
void mode_generic_exit(Main *bmain, Depsgraph *depsgraph, Scene *scene, Object *ob)
bke::GSpanAttributeWriter active_color_attribute_for_write(Mesh &mesh)
void gather_colors_vert(OffsetIndices< int > faces, Span< int > corner_verts, GroupedSpan< int > vert_to_face_map, GSpan color_attribute, bke::AttrDomain color_domain, Span< int > verts, MutableSpan< float4 > r_colors)
void gather_colors(GSpan color_attribute, Span< int > indices, MutableSpan< float4 > r_colors)
void swap_gathered_colors(Span< int > indices, GMutableSpan color_attribute, MutableSpan< float4 > r_colors)
bke::GAttributeReader active_color_attribute(const Mesh &mesh)
static void disable(Main &bmain, Depsgraph &depsgraph, Scene &scene, Object &ob, undo::StepData *undo_step)
bke::SpanAttributeWriter< int > ensure_face_sets_mesh(Mesh &mesh)
void sync_all_from_faces(Object &object)
Definition paint_hide.cc:55
void filter_decompress(const Span< std::byte > src, Vector< std::byte > &buffer, Vector< T > &dst)
template void filter_decompress< float3 >(Span< std::byte >, Vector< std::byte > &, Vector< float3 > &)
template void filter_decompress< int >(Span< std::byte >, Vector< std::byte > &, Vector< int > &)
template void filter_compress< float3 >(Span< float3 >, Vector< std::byte > &, Vector< std::byte > &)
template void filter_compress< int >(Span< int >, Vector< std::byte > &, Vector< std::byte > &)
void filter_compress(const Span< T > src, Vector< std::byte > &filter_buffer, Vector< std::byte > &compress_buffer)
static void save_active_attribute(Object &object, SculptAttrRef *attr)
static void bmesh_enable(Object &object, const StepData &step_data)
void push_nodes(const Depsgraph &depsgraph, Object &object, const IndexMask &node_mask, const Type type)
static void store_vert_visibility_grids(const SubdivCCG &subdiv_ccg, const bke::pbvh::GridsNode &node, Node &unode)
static bool use_multires_mesh(bContext *C)
static void step_decode(bContext *C, Main *bmain, UndoStep *us_p, const eUndoStepDir dir, const bool is_final)
static void geometry_push(const Object &object)
static void store_positions_grids(const SubdivCCG &subdiv_ccg, Node &unode)
static BLI_NOINLINE void bmesh_push(const Object &object, const bke::pbvh::BMeshNode *node, Type type)
static void free_step_data(StepData &step_data)
void push_multires_mesh_begin(bContext *C, const char *str)
void push_multires_mesh_end(bContext *C, const char *str)
static void fill_node_data_mesh(const Depsgraph &depsgraph, const Object &object, const bke::pbvh::MeshNode &node, const Type type, Node &unode)
static size_t node_size_in_bytes(const Node &node)
static bool topology_matches(const StepData &step_data, const Object &object)
static SculptUndoStep * get_active_step()
static void store_face_visibility(const Mesh &mesh, Node &unode)
size_t step_memory_size_get(UndoStep *step)
static void refine_subdiv(Depsgraph *depsgraph, const SculptSession &ss, Object &object, bke::subdiv::Subdiv *subdiv)
static void bmesh_handle_dyntopo_end(bContext *C, StepData &step_data, Object &object)
static void restore_vert_visibility_mesh(Object &object, Node &unode, const MutableSpan< bool > modified_verts)
static void step_encode_init(bContext *, UndoStep *us_p)
static size_t estimate_geometry_step_size(const StepData &step_data)
static bool use_multires_undo(const StepData &step_data, const SculptSession &ss)
void push_begin_ex(const Scene &, Object &ob, const char *name)
static void step_decode_redo_impl(bContext *C, Depsgraph *depsgraph, SculptUndoStep *us)
static void restore_geometry_data(const NodeGeometry *geometry, Mesh *mesh)
void push_enter_sculpt_mode(const Scene &, Object &ob, const wmOperator *op)
static StepData * get_step_data()
static void store_vert_visibility_mesh(const Mesh &mesh, const bke::pbvh::Node &node, Node &unode)
static void set_active_layer(bContext *C, const SculptAttrRef *attr)
static void store_mask_grids(const SubdivCCG &subdiv_ccg, Node &unode)
static NodeGeometry * geometry_get(StepData &step_data)
static void store_positions_mesh(const Depsgraph &depsgraph, const Object &object, Node &unode)
static void fill_node_data_grids(const Object &object, const bke::pbvh::GridsNode &node, const Type type, Node &unode)
static void geometry_free_data(NodeGeometry *geometry)
static void step_decode_undo(bContext *C, Depsgraph *depsgraph, SculptUndoStep *us, const bool is_final)
static void restore_hidden_face(Object &object, Node &unode, const MutableSpan< bool > modified_faces)
void geometry_begin_ex(const Scene &scene, Object &ob, const char *name)
static void restore_position_grids(const MutableSpan< float3 > positions, const CCGKey &key, PositionUndoStorage &undo_data, const MutableSpan< bool > modified_grids)
static void step_free(UndoStep *us_p)
static void restore_position_mesh(Object &object, PositionUndoStorage &undo_data, const MutableSpan< bool > modified_verts)
static const Node * get_node(const bke::pbvh::Node *node, const Type type)
static bool step_encode(bContext *, Main *bmain, UndoStep *us_p)
static void restore_vert_visibility_grids(SubdivCCG &subdiv_ccg, Node &unode, const MutableSpan< bool > modified_grids)
static bool indices_contain_true(const Span< bool > data, const Span< int > indices)
static Node * ensure_node(StepData &step_data, const bke::pbvh::Node &node, bool &r_new)
static void save_common_data(Object &ob, SculptUndoStep *us)
void geometry_begin(const Scene &scene, Object &ob, const wmOperator *op)
static void restore_mask_grids(Object &object, Node &unode, const MutableSpan< bool > modified_grids)
static void step_decode_undo_impl(bContext *C, Depsgraph *depsgraph, SculptUndoStep *us)
static void restore_list(bContext *C, Depsgraph *depsgraph, StepData &step_data)
static void step_decode_redo(bContext *C, Depsgraph *depsgraph, SculptUndoStep *us)
static void restore_color(Object &object, StepData &step_data, const MutableSpan< bool > modified_verts)
static void store_geometry_data(NodeGeometry *geometry, const Object &object)
static void store_mask_mesh(const Mesh &mesh, Node &unode)
static void bmesh_restore_generic(StepData &step_data, Object &object)
void push_node(const Depsgraph &depsgraph, const Object &object, const bke::pbvh::Node *node, const Type type)
static bool restore_active_shape_key(bContext &C, Depsgraph &depsgraph, const StepData &step_data, Object &object)
static void restore_mask_mesh(Object &object, Node &unode, const MutableSpan< bool > modified_verts)
static void store_color(const Mesh &mesh, const bke::pbvh::MeshNode &node, Node &unode)
void push_begin(const Scene &scene, Object &ob, const wmOperator *op)
void restore_from_bmesh_enter_geometry(const StepData &step_data, Mesh &mesh)
static void store_face_sets(const Mesh &mesh, Node &unode)
static bool restore_face_sets(Object &object, Node &unode, const MutableSpan< bool > modified_face_set_faces)
static int bmesh_restore(bContext *C, Depsgraph &depsgraph, StepData &step_data, Object &object)
static size_t calculate_node_geometry_allocated_size(const NodeGeometry &node_geometry)
void push_end_ex(Object &ob, const bool use_nested_undo)
static void swap_indexed_data(MutableSpan< T > full, const Span< int > indices, MutableSpan< T > indexed)
static void restore_geometry(StepData &step_data, Object &object)
static void bmesh_handle_dyntopo_begin(bContext *C, StepData &step_data, Object &object)
void object_sculpt_mode_enter(Main &bmain, Depsgraph &depsgraph, Scene &scene, Object &ob, bool force_dyntopo, ReportList *reports)
std::optional< Span< float > > orig_mask_data_lookup_grids(const Object &object, const bke::pbvh::GridsNode &node)
std::optional< Span< int > > orig_face_set_data_lookup_mesh(const Object &object, const bke::pbvh::MeshNode &node)
void gather_data_grids(const SubdivCCG &subdiv_ccg, Span< T > src, Span< int > grids, MutableSpan< T > node_data)
Definition sculpt.cc:6405
std::optional< OrigPositionData > orig_position_data_lookup_grids(const Object &object, const bke::pbvh::GridsNode &node)
void orig_position_data_gather_bmesh(const BMLog &bm_log, const Set< BMVert *, 0 > &verts, MutableSpan< float3 > positions, MutableSpan< float3 > normals)
void translations_from_new_positions(Span< float3 > new_positions, Span< int > verts, Span< float3 > old_positions, MutableSpan< float3 > translations)
Definition sculpt.cc:7542
std::optional< Span< float4 > > orig_color_data_lookup_mesh(const Object &object, const bke::pbvh::MeshNode &node)
void scatter_data_mesh(Span< T > src, Span< int > indices, MutableSpan< T > dst)
Definition sculpt.cc:6435
void apply_translations(Span< float3 > translations, Span< int > verts, MutableSpan< float3 > positions)
Definition sculpt.cc:7268
void gather_data_mesh(Span< T > src, Span< int > indices, MutableSpan< T > dst)
Definition sculpt.cc:6395
std::optional< Span< float > > orig_mask_data_lookup_mesh(const Object &object, const bke::pbvh::MeshNode &node)
std::optional< OrigPositionData > orig_position_data_lookup_mesh_all_verts(const Object &object, const bke::pbvh::MeshNode &node)
std::optional< OrigPositionData > orig_position_data_lookup_mesh(const Object &object, const bke::pbvh::MeshNode &node)
std::optional< Span< int > > orig_face_set_data_lookup_grids(const Object &object, const bke::pbvh::GridsNode &node)
void copy_shared_pointer(T *src_ptr, const ImplicitSharingInfo *src_sharing_info, T **r_dst_ptr, const ImplicitSharingInfo **r_dst_sharing_info)
void free_shared_data(T **data, const ImplicitSharingInfo **sharing_info)
void isolate_task(const Function &function)
Definition BLI_task.hh:248
void parallel_for(const IndexRange range, const int64_t grain_size, const Function &function, const TaskSizeHints &size_hints=detail::TaskSizeHints_Static(1))
Definition BLI_task.hh:93
Value parallel_reduce(IndexRange range, int64_t grain_size, const Value &identity, const Function &function, const Reduction &reduction)
Definition BLI_task.hh:151
void filter_transpose_delta(const uint8_t *src, uint8_t *dst, size_t items_num, size_t item_size)
VecBase< float, 4 > float4
std::mutex Mutex
Definition BLI_mutex.hh:47
VecBase< float, 3 > float3
void unfilter_transpose_delta(const uint8_t *src, uint8_t *dst, size_t items_num, size_t item_size)
const char * name
#define NO_ACTIVE_LAYER
CustomData vdata
Definition DNA_ID.h:414
char name[258]
Definition DNA_ID.h:432
char name[64]
void * data
ListBase block
bool is_memfile_undo_flush_needed
Definition BKE_main.hh:213
struct SculptSession * sculpt
bool needs_flush_to_id
Definition BKE_paint.hh:501
BMLog * bm_log
Definition BKE_paint.hh:392
KeyBlock * shapekey_active
Definition BKE_paint.hh:377
blender::float4 pivot_rot
Definition BKE_paint.hh:467
SubdivCCG * subdiv_ccg
Definition BKE_paint.hh:395
blender::float3 pivot_pos
Definition BKE_paint.hh:466
struct SculptSession::@300305335361021334214041350300054316061376210174 multires
MultiresModifierData * modifier
Definition BKE_paint.hh:373
bool deform_modifiers_active
Definition BKE_paint.hh:401
blender::Array< blender::float3 > normals
blender::BitGroupVector grid_hidden
blender::Array< float > masks
blender::Array< blender::float3 > positions
size_t data_size
UndoStep * prev
UndoStep * next
const UndoType * type
bool use_memfile_step
void(* step_encode_init)(bContext *C, UndoStep *us)
const char * name
void(* step_free)(UndoStep *us)
bool(* poll)(struct bContext *C)
void(* step_decode)(bContext *C, Main *bmain, UndoStep *us, eUndoStepDir dir, bool is_final)
bool(* step_encode)(bContext *C, Main *bmain, UndoStep *us)
Span< int > all_verts() const
static std::optional< ShapeKeyData > from_object(Object &object)
Definition sculpt.cc:7416
const ImplicitSharingInfo * face_offsets_sharing_info
static void compress_fn(TaskPool *, void *task_data)
Vector< std::unique_ptr< Node > > nodes_to_compress
char name[MAX_CUSTOMDATA_LAYER_NAME]
Vector< std::unique_ptr< Node > > nodes
struct blender::ed::sculpt_paint::undo::StepData::@003361057031346165147254064063355303362213163250 bmesh
std::unique_ptr< PositionUndoStorage > position_step_storage
Map< const bke::pbvh::Node *, std::unique_ptr< Node > > undo_nodes_by_pbvh_node
struct blender::ed::sculpt_paint::undo::StepData::@366317265143151251156360154171061027264271376261 mesh
struct blender::ed::sculpt_paint::undo::StepData::@100246257151307200201214251232151127242076121134 grids
const char * name
Definition WM_types.hh:1033
struct wmOperatorType * type
i
Definition text_draw.cc:230
void WM_event_add_notifier(const bContext *C, uint type, void *reference)
void WM_file_tag_modified()
Definition wm_files.cc:177