316 bool needs_relations_update_ =
false;
320 : bmain_(bmain), params_(
params), relations_(bmain)
329 changed_ntrees.
append(ntree);
342 bool is_single_tree_update =
false;
344 if (root_ntrees.
size() == 1) {
350 update_result_by_tree_.add_new(ntree,
result);
351 if (!
result.interface_changed && !
result.output_changed) {
352 is_single_tree_update =
true;
356 if (!is_single_tree_update) {
358 for (
bNodeTree *ntree : ntrees_in_order) {
362 if (!update_result_by_tree_.contains(ntree)) {
364 update_result_by_tree_.add_new(ntree,
result);
368 if (
result.output_changed) {
373 if (
result.interface_changed) {
381 for (
const auto item : update_result_by_tree_.items()) {
385 this->reset_changed_flags(*ntree);
387 if (
result.interface_changed) {
389 relations_.ensure_modifier_users();
391 Object *
object = pair.first;
401 if (
result.output_changed) {
402 ntree->
runtime->geometry_nodes_lazy_function_graph_info_mutex.tag_dirty();
406 ID &owner_or_self_id = owner_id ? *owner_id : ntree->
id;
407 if (params_.tree_changed_fn) {
408 params_.tree_changed_fn(*ntree, owner_or_self_id);
410 if (params_.tree_output_changed_fn &&
result.output_changed) {
411 params_.tree_output_changed_fn(*ntree, owner_or_self_id);
415 if (needs_relations_update_) {
426 enum class ToposortMark {
446 ToposortMarkMap marks;
447 for (
bNodeTree *ntree : trees_to_update) {
448 marks.add_new(ntree, ToposortMark::None);
450 for (
bNodeTree *ntree : trees_to_update) {
451 if (marks.lookup(ntree) == ToposortMark::None) {
452 const bool cycle_detected = !this->get_tree_update_order__visit_recursive(
453 ntree, marks, sorted_ntrees);
460 std::reverse(sorted_ntrees.
begin(), sorted_ntrees.
end());
462 return sorted_ntrees;
465 bool get_tree_update_order__visit_recursive(
bNodeTree *ntree,
466 ToposortMarkMap &marks,
467 Vector<bNodeTree *> &sorted_ntrees)
469 ToposortMark &mark = marks.lookup(ntree);
470 if (mark == ToposortMark::Permanent) {
473 if (mark == ToposortMark::Temporary) {
478 mark = ToposortMark::Temporary;
480 for (
const TreeNodePair &pair : relations_.get_group_node_users(ntree)) {
481 this->get_tree_update_order__visit_recursive(pair.first, marks, sorted_ntrees);
483 sorted_ntrees.
append(ntree);
485 mark = ToposortMark::Permanent;
489 Set<bNodeTree *> get_trees_to_update(Span<bNodeTree *> root_ntrees)
491 relations_.ensure_group_node_users();
493 Set<bNodeTree *> reachable_trees;
494 VectorSet<bNodeTree *> trees_to_check = root_ntrees;
496 while (!trees_to_check.
is_empty()) {
498 if (reachable_trees.
add(ntree)) {
499 for (
const TreeNodePair &pair : relations_.get_group_node_users(ntree)) {
500 trees_to_check.
add(pair.first);
505 return reachable_trees;
508 TreeUpdateResult update_tree(
bNodeTree &ntree)
512 ntree.
runtime->link_errors.clear();
513 ntree.
runtime->invalid_zone_output_node_ids.clear();
514 ntree.
runtime->shader_node_errors.clear();
516 if (this->update_panel_toggle_names(ntree)) {
517 result.interface_changed =
true;
520 this->update_socket_link_and_use(ntree);
521 this->update_individual_nodes(ntree);
522 this->update_internal_links(ntree);
523 this->update_generic_callback(ntree);
524 this->remove_unused_previews_when_necessary(ntree);
525 this->make_node_previews_dirty(ntree);
527 this->propagate_runtime_flags(ntree);
529 if (this->propagate_enum_definitions(ntree)) {
530 result.interface_changed =
true;
536 result.interface_changed =
true;
542 result.interface_changed =
true;
547 this->update_from_field_inference(ntree);
549 result.interface_changed =
true;
552 result.interface_changed =
true;
557 this->update_socket_shapes(ntree);
561 this->update_eval_dependencies(ntree);
564 result.output_changed = this->check_if_output_changed(ntree);
566 this->update_socket_link_and_use(ntree);
567 this->update_link_validation(ntree);
569 if (this->update_nested_node_refs(ntree)) {
570 result.interface_changed =
true;
578 result.interface_changed =
true;
583 Set<int32_t> node_identifiers;
584 const Span<const bNode *> nodes = ntree.all_nodes();
586 const bNode &node = *nodes[
i];
598 tree.ensure_topology_cache();
600 if (socket->directly_linked_links().is_empty()) {
601 socket->link =
nullptr;
604 socket->link = socket->directly_linked_links()[0];
608 this->update_socket_used_tags(
tree);
613 tree.ensure_topology_cache();
615 const bool socket_is_linked = !socket->directly_linked_links().is_empty();
620 void update_individual_nodes(
bNodeTree &ntree)
622 for (
bNode *node : ntree.all_nodes()) {
624 if (this->should_update_individual_node(ntree, *node)) {
625 bke::bNodeType &ntype = *node->typeinfo;
627 this->remove_unused_geometry_nodes_viewer_sockets(ntree, *node);
636 else if (node->is_undefined()) {
639 delete node->runtime->declaration;
640 node->runtime->declaration =
nullptr;
642 socket->runtime->declaration =
nullptr;
645 socket->runtime->declaration =
nullptr;
655 bool should_update_individual_node(
const bNodeTree &ntree,
const bNode &node)
668 if (node.is_group_input() || node.is_group_output()) {
675 if (
const bNode *output_node = zone_type.get_corresponding_output(ntree, node)) {
684 void remove_unused_geometry_nodes_viewer_sockets(
bNodeTree &ntree,
bNode &viewer_node)
686 ntree.ensure_topology_cache();
687 Vector<int> item_indices_to_remove;
689 for (
const int i : IndexRange(storage.items_num)) {
695 const bNodeSocket *socket = viewer_node.input_by_identifier(identifier_str.c_str());
699 if (!socket->is_directly_linked()) {
700 item_indices_to_remove.
append(
i);
703 std::reverse(item_indices_to_remove.
begin(), item_indices_to_remove.
end());
704 for (
const int i : item_indices_to_remove) {
707 &storage.active_index,
713 struct InternalLink {
716 int multi_input_sort_id = 0;
722 const Span<const bNodeLink *> links)
const
725 if (!link->fromnode->is_dangling_reroute()) {
732 void update_internal_links(
bNodeTree &ntree)
734 bke::node_tree_runtime::AllowUsingOutdatedInfo allow_outdated_info{ntree};
735 ntree.ensure_topology_cache();
736 for (
bNode *node : ntree.all_nodes()) {
737 if (!this->should_update_individual_node(ntree, *node)) {
742 for (
const bNodeSocket *output_socket : node->output_sockets()) {
743 if (!output_socket->is_available()) {
746 if (output_socket->runtime->declaration &&
747 output_socket->runtime->declaration->no_mute_links)
751 const bNodeSocket *input_socket = this->find_internally_linked_input(ntree, output_socket);
752 if (input_socket ==
nullptr) {
756 const Span<const bNodeLink *> connected_links = input_socket->directly_linked_links();
757 const bNodeLink *connected_link = first_non_dangling_link(ntree, connected_links);
760 std::max<int>(0, connected_links.
size() - 1);
761 expected_internal_links.
append(InternalLink{
const_cast<bNodeSocket *
>(input_socket),
767 if (node->
runtime->internal_links.size() != expected_internal_links.
size()) {
768 this->update_internal_links_in_node(ntree, *node, expected_internal_links);
772 const bool all_expected_internal_links_exist = std::all_of(
773 node->
runtime->internal_links.begin(),
774 node->
runtime->internal_links.end(),
776 const InternalLink internal_link{link.fromsock, link.tosock, link.multi_input_sort_id};
777 return expected_internal_links.
as_span().contains(internal_link);
780 if (all_expected_internal_links_exist) {
784 this->update_internal_links_in_node(ntree, *node, expected_internal_links);
791 const bNode &node = output_socket->owner_node();
792 if (node.
typeinfo->internally_linked_input) {
793 return node.
typeinfo->internally_linked_input(ntree, node, *output_socket);
797 int selected_priority = -1;
798 bool selected_is_linked =
false;
799 for (
const bNodeSocket *input_socket : node.input_sockets()) {
800 if (!input_socket->is_available()) {
803 if (input_socket->
runtime->declaration && input_socket->
runtime->declaration->no_mute_links)
812 const bool is_linked = input_socket->is_directly_linked();
813 const bool is_preferred = priority > selected_priority || (is_linked && !selected_is_linked);
817 selected_socket = input_socket;
818 selected_priority = priority;
819 selected_is_linked = is_linked;
821 return selected_socket;
824 void update_internal_links_in_node(
bNodeTree &ntree,
826 Span<InternalLink> internal_links)
828 node.
runtime->internal_links.clear();
829 node.
runtime->internal_links.reserve(internal_links.
size());
830 for (
const InternalLink &internal_link : internal_links) {
835 link.
tosock = internal_link.to;
838 node.
runtime->internal_links.append(link);
843 void update_generic_callback(
bNodeTree &ntree)
845 if (ntree.
typeinfo->update ==
nullptr) {
851 void remove_unused_previews_when_necessary(
bNodeTree &ntree)
856 if ((ntree.
runtime->changed_flag & allowed_flags) == ntree.
runtime->changed_flag) {
862 void make_node_previews_dirty(
bNodeTree &ntree)
864 ntree.
runtime->previews_refresh_state++;
865 for (
bNode *node : ntree.all_nodes()) {
866 if (!node->is_group()) {
870 this->make_node_previews_dirty(*nested_tree);
875 void propagate_runtime_flags(
const bNodeTree &ntree)
877 ntree.ensure_topology_cache();
879 ntree.
runtime->runtime_flag = 0;
881 for (
const bNode *group_node : ntree.group_nodes()) {
883 if (group !=
nullptr) {
890 for (
const StringRefNull idname : {
"ShaderNodeTexImage",
"ShaderNodeTexEnvironment"}) {
891 for (
const bNode *node : ntree.nodes_by_type(idname)) {
900 for (
const StringRefNull idname : {
"ShaderNodeOutputMaterial",
901 "ShaderNodeOutputLight",
902 "ShaderNodeOutputWorld",
903 "ShaderNodeOutputAOV"})
905 const Span<const bNode *> nodes = ntree.nodes_by_type(idname);
914 if (!ntree.nodes_by_type(
"GeometryNodeSimulationOutput").is_empty()) {
920 void update_from_field_inference(
bNodeTree &ntree)
925 for (
bNode *node : ntree.nodes_by_type(
"GeometryNodeBake")) {
927 for (
const int i : IndexRange(storage.
items_num)) {
930 if (socket.may_be_field()) {
937 static int get_socket_shape(
const bNodeSocket &socket,
938 const bool use_inferred_structure_type =
false)
943 const SocketDeclaration *decl = socket.
runtime->declaration;
950 const StructureType display_structure_type = use_inferred_structure_type ?
951 socket.
runtime->inferred_structure_type :
953 switch (display_structure_type) {
954 case StructureType::Single:
956 case StructureType::Dynamic:
958 case StructureType::Field:
960 case StructureType::Grid:
962 case StructureType::List:
969 void update_socket_shapes(
bNodeTree &ntree)
971 ntree.ensure_topology_cache();
972 for (
bNode *node : ntree.all_nodes()) {
973 if (node->is_undefined()) {
986 for (
bNodeSocket *socket : node->input_sockets()) {
989 for (
bNodeSocket *socket : node->output_sockets()) {
996 for (
const int i : IndexRange(storage.
items_num)) {
1006 for (
const int i : IndexRange(storage.
items_num)) {
1019 closure_output_node->storage);
1020 for (
const int i : IndexRange(storage.input_items.
items_num)) {
1031 for (
const int i : IndexRange(storage.output_items.
items_num)) {
1041 for (
const int i : IndexRange(storage.input_items.
items_num)) {
1047 for (
const int i : IndexRange(storage.output_items.
items_num)) {
1057 for (
bNodeSocket *socket : node->input_sockets()) {
1060 for (
bNodeSocket *socket : node->output_sockets()) {
1069 void update_eval_dependencies(
bNodeTree &ntree)
1071 ntree.ensure_topology_cache();
1072 nodes::GeometryNodesEvalDependencies new_deps =
1076 if (!ntree.
runtime->geometry_nodes_eval_dependencies ||
1077 new_deps != *ntree.
runtime->geometry_nodes_eval_dependencies)
1079 needs_relations_update_ =
true;
1080 ntree.
runtime->geometry_nodes_eval_dependencies =
1081 std::make_unique<nodes::GeometryNodesEvalDependencies>(std::move(new_deps));
1085 bool propagate_enum_definitions(
bNodeTree &ntree)
1087 ntree.ensure_interface_cache();
1091 for (
bNode *node : ntree.toposort_right_to_left()) {
1092 const bool node_updated = this->should_update_individual_node(ntree, *node);
1095 if (node->is_type(
"GeometryNodeMenuSwitch")) {
1106 enum_items->remove_user_and_delete_if_last();
1108 locally_defined_enums.
append(&enum_input);
1110 else if (!node->is_group()) {
1112 for (
bNodeSocket *input_socket : node->input_sockets()) {
1113 if (!input_socket->is_available()) {
1119 const auto *socket_decl =
dynamic_cast<const nodes::decl::Menu *
>(
1120 input_socket->
runtime->declaration);
1125 socket_decl->items.get());
1126 locally_defined_enums.
append(input_socket);
1131 for (
bNodeSocket *socket : node->input_sockets()) {
1133 !locally_defined_enums.
contains(socket))
1135 clear_enum_reference(*socket);
1138 for (
bNodeSocket *socket : node->output_sockets()) {
1140 clear_enum_reference(*socket);
1158 if (node->is_group()) {
1160 if (node->
id ==
nullptr) {
1164 group_tree->ensure_interface_cache();
1166 for (
const int socket_i : group_tree->interface_inputs().index_range()) {
1172 this->update_socket_enum_definition(
1178 else if (node->is_type(
"GeometryNodeMenuSwitch")) {
1183 this->update_socket_enum_definition(
1189 else if (node->is_type(
"GeometryNodeForeachGeometryElementInput")) {
1191 BLI_assert(node->input_sockets().size() == node->output_sockets().size());
1193 const IndexRange sockets = node->input_sockets().index_range().drop_front(2);
1194 for (
const int socket_i : sockets) {
1200 this->update_socket_enum_definition(
1215 this->update_socket_enum_definition(
1226 const Span<bNode *> group_input_nodes = ntree.group_input_nodes();
1227 for (
const int interface_input_i : ntree.interface_inputs().index_range()) {
1229 *ntree.interface_inputs()[interface_input_i];
1230 if (interface_socket.
socket_type != StringRef(
"NodeSocketMenu")) {
1234 bool found_conflict =
false;
1235 for (
bNode *input_node : group_input_nodes) {
1236 const bNodeSocket &socket = input_node->output_socket(interface_input_i);
1238 if (socket_value.has_conflict()) {
1239 found_conflict =
true;
1242 if (found_enum_items ==
nullptr) {
1243 found_enum_items = socket_value.enum_items;
1245 else if (socket_value.enum_items !=
nullptr) {
1246 if (found_enum_items != socket_value.enum_items) {
1247 found_conflict =
true;
1252 if (found_conflict) {
1254 for (
bNode *input_node : group_input_nodes) {
1255 bNodeSocket &socket = input_node->output_socket(interface_input_i);
1257 if (socket_value.enum_items) {
1258 socket_value.enum_items->remove_user_and_delete_if_last();
1259 socket_value.enum_items =
nullptr;
1264 else if (found_enum_items !=
nullptr) {
1266 for (
bNode *input_node : group_input_nodes) {
1267 bNodeSocket &socket = input_node->output_socket(interface_input_i);
1269 if (socket_value.enum_items ==
nullptr) {
1270 found_enum_items->add_user();
1271 socket_value.enum_items = found_enum_items;
1279 for (
const bNode *group_input_node : ntree.group_input_nodes()) {
1280 for (
const int socket_i : ntree.interface_inputs().index_range()) {
1284 this->update_socket_enum_definition(interface_enum_items[socket_i],
1291 bool changed =
false;
1292 for (
const int socket_i : ntree.interface_inputs().index_range()) {
1300 dst.
enum_items->remove_user_and_delete_if_last();
1309 src.
enum_items->remove_user_and_delete_if_last();
1325 enum_items->items.reinitialize(enum_def.
items_num);
1326 for (
const int i : enum_def.items().index_range()) {
1331 dst.name = src.
name ? src.
name :
"";
1341 this->reset_enum_ptr(default_value);
1347 if (dst.has_conflict()) {
1353 if (src.has_conflict()) {
1355 this->reset_enum_ptr(dst);
1364 this->reset_enum_ptr(dst);
1372 dst.
enum_items->remove_user_and_delete_if_last();
1380 dst.
enum_items->remove_user_and_delete_if_last();
1384 enum_items->add_user();
1389 void update_link_validation(
bNodeTree &ntree)
1392 const auto is_invalid_enum_ref = [](
const bNodeSocket &socket) ->
bool {
1401 ntree.
runtime->last_valid_zones)
1403 fallback_zones = ntree.
runtime->last_valid_zones.get();
1408 if (!link->fromsock->is_available() || !link->tosock->is_available()) {
1409 link->flag &= ~NODE_LINK_VALID;
1412 if (is_invalid_enum_ref(*link->fromsock) || is_invalid_enum_ref(*link->tosock)) {
1413 link->flag &= ~NODE_LINK_VALID;
1414 ntree.
runtime->link_errors.add(
1419 const bNode &from_node = *link->fromnode;
1420 const bNode &to_node = *link->tonode;
1421 if (from_node.
runtime->toposort_left_to_right_index >
1422 to_node.
runtime->toposort_left_to_right_index)
1424 link->flag &= ~NODE_LINK_VALID;
1425 ntree.
runtime->link_errors.add(
1430 if (ntree.
typeinfo->validate_link) {
1434 link->flag &= ~NODE_LINK_VALID;
1435 ntree.
runtime->link_errors.add(
1439 TIP_(
"Conversion is not supported"),
1440 TIP_(link->fromsock->typeinfo->label),
1441 TIP_(link->tosock->typeinfo->label))});
1445 if (fallback_zones) {
1449 ntree.
runtime->invalid_zone_output_node_ids.add(*from_zone->output_node_id);
1452 link->flag &= ~NODE_LINK_VALID;
1453 ntree.
runtime->link_errors.add(
1459 if (
const char *
error = this->get_structure_type_link_error(*link)) {
1460 link->flag &= ~NODE_LINK_VALID;
1467 const char *get_structure_type_link_error(
const bNodeLink &link)
1469 const nodes::StructureType from_inferred_type =
1471 if (from_inferred_type == StructureType::Dynamic) {
1486 if (from_inferred_type == StructureType::Single) {
1489 return TIP_(
"Input expects a single value");
1495 if (
ELEM(from_inferred_type, StructureType::Single, StructureType::Field)) {
1498 return TIP_(
"Input expects a field or single value");
1504 if (from_inferred_type == StructureType::Grid) {
1507 return TIP_(
"Input expects a volume grid");
1513 if (from_inferred_type == StructureType::List) {
1516 return TIP_(
"Input expects a list");
1524 tree.ensure_topology_cache();
1530 const uint32_t old_topology_hash =
tree.runtime->output_topology_hash;
1531 const uint32_t new_topology_hash = this->get_combined_socket_topology_hash(
1532 tree, tree_output_sockets);
1533 tree.runtime->output_topology_hash = new_topology_hash;
1545 const StringRef expression = driver->
expression;
1561 if (old_topology_hash != new_topology_hash) {
1567 if (
tree.runtime->changed_flag ==
1570 if (old_topology_hash == new_topology_hash) {
1575 if (!this->check_if_socket_outputs_changed_based_on_flags(
tree, tree_output_sockets)) {
1585 for (
const bNode *node :
tree.all_nodes()) {
1586 if (!this->is_output_node(*node)) {
1589 for (
const bNodeSocket *socket : node->input_sockets()) {
1590 if (!
STREQ(socket->
idname,
"NodeSocketVirtual")) {
1598 bool is_output_node(
const bNode &node)
const
1603 if (node.is_group_output()) {
1606 if (node.is_type(
"GeometryNodeWarning")) {
1613 if (node.is_group()) {
1615 if (node_group !=
nullptr &&
1629 Span<const bNodeSocket *> sockets)
1631 if (
tree.has_available_link_cycle()) {
1638 for (uint32_t
hash : hashes) {
1641 return combined_hash;
1645 const Span<const bNodeSocket *> sockets)
1649 Stack<const bNodeSocket *> sockets_to_check = sockets;
1651 auto get_socket_ptr_hash = [&](
const bNodeSocket &socket) {
1657 while (!sockets_to_check.
is_empty()) {
1659 const bNode &node = socket.owner_node();
1661 if (hash_by_socket_id[socket.index_in_tree()].has_value()) {
1662 sockets_to_check.
pop();
1668 if (socket.is_input()) {
1670 bool all_origins_computed =
true;
1671 bool get_value_from_origin =
false;
1673 for (
const bNodeLink *link : socket.directly_linked_links()) {
1674 if (link->is_muted()) {
1677 if (!link->is_available()) {
1684 if (zone->output_node_id == node.
identifier) {
1685 if (
const bNode *input_node = zone->input_node()) {
1686 origin_sockets.
extend(input_node->input_sockets());
1691 for (
const bNodeSocket *origin_socket : origin_sockets) {
1692 const std::optional<uint32_t> origin_hash =
1693 hash_by_socket_id[origin_socket->index_in_tree()];
1694 if (origin_hash.has_value()) {
1695 if (get_value_from_origin || socket.
type != origin_socket->type) {
1696 socket_hash =
noise::hash(socket_hash, *origin_hash);
1700 socket_hash = *origin_hash;
1702 get_value_from_origin =
true;
1705 sockets_to_check.
push(origin_socket);
1706 all_origins_computed =
false;
1709 if (!all_origins_computed) {
1713 if (!get_value_from_origin) {
1714 socket_hash = get_socket_ptr_hash(socket);
1718 bool all_available_inputs_computed =
true;
1719 for (
const bNodeSocket *input_socket : node.input_sockets()) {
1720 if (input_socket->is_available()) {
1721 if (!hash_by_socket_id[input_socket->index_in_tree()].has_value()) {
1722 sockets_to_check.
push(input_socket);
1723 all_available_inputs_computed =
false;
1727 if (!all_available_inputs_computed) {
1730 if (node.is_reroute()) {
1731 socket_hash = *hash_by_socket_id[node.input_socket(0).index_in_tree()];
1733 else if (node.is_muted()) {
1734 const bNodeSocket *internal_input = socket.internal_link_input();
1735 if (internal_input ==
nullptr) {
1736 socket_hash = get_socket_ptr_hash(socket);
1739 if (internal_input->
type == socket.
type) {
1740 socket_hash = *hash_by_socket_id[internal_input->index_in_tree()];
1743 socket_hash = get_socket_ptr_hash(socket);
1748 socket_hash = get_socket_ptr_hash(socket);
1749 for (
const bNodeSocket *input_socket : node.input_sockets()) {
1750 if (input_socket->is_available()) {
1751 const uint32_t input_socket_hash = *hash_by_socket_id[input_socket->index_in_tree()];
1752 socket_hash =
noise::hash(socket_hash, input_socket_hash);
1758 if (node.is_type(
"ShaderNodeTexImage") && socket.index() == 0) {
1760 const bNodeSocket &alpha_socket = node.output_socket(1);
1762 if (alpha_socket.is_directly_linked()) {
1768 hash_by_socket_id[socket.index_in_tree()] = socket_hash;
1771 sockets_to_check.
pop();
1777 hashes[
i] = *hash_by_socket_id[sockets[
i]->index_in_tree()];
1786 bool check_if_socket_outputs_changed_based_on_flags(
const bNodeTree &
tree,
1787 Span<const bNodeSocket *> sockets)
1791 Stack<const bNodeSocket *> sockets_to_check = sockets;
1794 pushed_by_socket_id[socket->index_in_tree()] =
true;
1797 while (!sockets_to_check.
is_empty()) {
1799 const bNode &node = socket.owner_node();
1804 const bool only_unused_internal_link_changed = !node.is_muted() &&
1808 const bool change_affects_output = !(only_unused_internal_link_changed ||
1809 only_parent_changed);
1810 if (change_affects_output) {
1814 if (socket.is_input()) {
1815 for (
const bNodeSocket *origin_socket : socket.directly_linked_sockets()) {
1816 bool &pushed = pushed_by_socket_id[origin_socket->index_in_tree()];
1818 sockets_to_check.
push(origin_socket);
1824 for (
const bNodeSocket *input_socket : node.input_sockets()) {
1825 if (input_socket->is_available()) {
1826 bool &pushed = pushed_by_socket_id[input_socket->index_in_tree()];
1828 sockets_to_check.
push(input_socket);
1848 if (input_socket->is_available()) {
1849 bool &pushed = pushed_by_socket_id[input_socket->index_in_tree()];
1851 sockets_to_check.
push(input_socket);
1861 if ((node.is_type(
"ShaderNodeNormal") || node.is_type(
"CompositorNodeNormal")) &&
1862 socket.index() == 1)
1865 const bNodeSocket &normal_output = node.output_socket(0);
1867 bool &pushed = pushed_by_socket_id[normal_output.index_in_tree()];
1869 sockets_to_check.
push(&normal_output);
1882 bool update_nested_node_refs(
bNodeTree &ntree)
1884 ntree.ensure_topology_cache();
1887 Map<bNestedNodePath, int32_t> old_id_by_path;
1888 Set<int32_t> old_ids;
1889 for (
const bNestedNodeRef &ref : ntree.nested_node_refs_span()) {
1890 old_id_by_path.
add(ref.path, ref.id);
1891 old_ids.
add(ref.id);
1898 const bNode *node = ntree.node_by_id(path.node_id);
1899 if (node && node->is_group() && node->
id) {
1901 nested_node_paths.
append(path);
1908 for (StringRefNull idname : {
"GeometryNodeSimulationOutput",
"GeometryNodeBake"}) {
1909 for (
const bNode *node : ntree.nodes_by_type(idname)) {
1915 for (
const bNode *node : ntree.group_nodes()) {
1917 if (group ==
nullptr) {
1920 for (
const int i : group->nested_node_refs_span().index_range()) {
1929 Map<int32_t, bNestedNodePath> new_path_by_id;
1934 new_path_by_id.
add(old_id, path);
1945 new_path_by_id.
add(new_id, path);
1949 if (!this->nested_node_refs_changed(ntree, new_path_by_id)) {
1963 for (
const auto item : new_path_by_id.
items()) {
1966 ref.
path = item.value;
1976 bool nested_node_refs_changed(
const bNodeTree &ntree,
1977 const Map<int32_t, bNestedNodePath> &new_path_by_id)
1982 for (
const bNestedNodeRef &ref : ntree.nested_node_refs_span()) {
1983 if (!new_path_by_id.
contains(ref.id)) {
1990 void reset_changed_flags(
bNodeTree &ntree)
1993 for (
bNode *node : ntree.all_nodes()) {
2010 bool update_panel_toggle_names(
bNodeTree &ntree)
2012 bool changed =
false;
2013 ntree.ensure_interface_cache();
2020 if (!
STREQ(panel->
name, toggle_socket->name)) {