Blender V5.0
node_tree_field_inferencing.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
5#include "BKE_node.hh"
7#include "BKE_node_runtime.hh"
8
9#include "NOD_geometry.hh"
11#include "NOD_socket.hh"
12
13#include "BLI_resource_scope.hh"
14#include "BLI_set.hh"
15#include "BLI_stack.hh"
16
18
19using nodes::FieldInferencingInterface;
21using nodes::NodeDeclaration;
22using nodes::OutputFieldDependency;
24using nodes::SocketDeclaration;
25
26static bool is_field_socket_type(const bNodeSocket &socket)
27{
29}
30
32{
33 for (const bNodeSocket *socket : sockets) {
34 if (!socket->owner_node().is_dangling_reroute()) {
35 return false;
36 }
37 }
38 return true;
39}
40
42 const bNodeSocket &socket)
43{
44 if (!is_field_socket_type(socket)) {
45 return InputSocketFieldType::None;
46 }
47 if (node.is_reroute()) {
48 return InputSocketFieldType::IsSupported;
49 }
50 if (node.is_group_output()) {
51 /* Outputs always support fields when the data type is correct. */
52 return InputSocketFieldType::IsSupported;
53 }
54 if (node.is_undefined()) {
55 return InputSocketFieldType::None;
56 }
57 if (node.type_legacy == NODE_CUSTOM) {
58 return InputSocketFieldType::None;
59 }
60
61 /* TODO: Ensure declaration exists. */
62 const NodeDeclaration *node_decl = node.declaration();
63
64 /* Node declarations should be implemented for nodes involved here. */
65 BLI_assert(node_decl != nullptr);
66
67 /* Get the field type from the declaration. */
68 const SocketDeclaration &socket_decl = *node_decl->inputs[socket.index()];
69 const InputSocketFieldType field_type = socket_decl.input_field_type;
70 return field_type;
71}
72
74 const bNodeSocket &socket)
75{
76 if (!is_field_socket_type(socket)) {
77 /* Non-field sockets always output data. */
79 }
80 if (node.is_reroute()) {
81 /* The reroute just forwards what is passed in. */
83 }
84 if (node.is_group_input()) {
85 /* Input nodes get special treatment in #determine_group_input_states. */
87 }
88 if (node.is_undefined()) {
90 }
91 if (node.type_legacy == NODE_CUSTOM) {
93 }
94
95 const NodeDeclaration *node_decl = node.declaration();
96
97 /* Node declarations should be implemented for nodes involved here. */
98 BLI_assert(node_decl != nullptr);
99
100 /* Use the socket declaration. */
101 const SocketDeclaration &socket_decl = *node_decl->outputs[socket.index()];
102 return socket_decl.output_field_dependency;
103}
104
106 ResourceScope &scope)
107{
108 auto &inferencing_interface = scope.construct<FieldInferencingInterface>();
109 inferencing_interface.inputs = Array<InputSocketFieldType>(node.input_sockets().size(),
110 InputSocketFieldType::None);
111 inferencing_interface.outputs = Array<OutputFieldDependency>(
112 node.output_sockets().size(), OutputFieldDependency::ForDataSource());
113 return inferencing_interface;
114}
115
122 ResourceScope &scope)
123{
124 /* Node groups already reference all required information, so just return that. */
125 if (node.is_group()) {
126 bNodeTree *group = (bNodeTree *)node.id;
127 if (group == nullptr) {
128 static const FieldInferencingInterface empty_interface;
129 return empty_interface;
130 }
131 if (!bke::node_tree_is_registered(*group)) {
132 /* This can happen when there is a linked node group that was not found (see #92799). */
133 return get_dummy_field_inferencing_interface(node, scope);
134 }
135 if (!group->runtime->field_inferencing_interface) {
136 /* This shouldn't happen because referenced node groups should always be updated first. */
138 }
139 return *group->runtime->field_inferencing_interface;
140 }
141
142 auto &inferencing_interface = scope.construct<FieldInferencingInterface>();
143
144 const Span<const bNodeSocket *> input_sockets = node.input_sockets();
145 inferencing_interface.inputs.reinitialize(input_sockets.size());
146 for (const int i : input_sockets.index_range()) {
147 inferencing_interface.inputs[i] = get_interface_input_field_type(node, *input_sockets[i]);
148 }
149
150 const Span<const bNodeSocket *> output_sockets = node.output_sockets();
151 inferencing_interface.outputs.reinitialize(output_sockets.size());
152 for (const int i : output_sockets.index_range()) {
153 inferencing_interface.outputs[i] = get_interface_output_field_dependency(node,
154 *output_sockets[i]);
155 }
156 return inferencing_interface;
157}
158
164 /* This socket starts a new field. */
165 bool is_field_source = false;
166 /* This socket can never become a field, because the node itself does not support it. */
167 bool is_always_single = false;
168 /* This socket is currently a single value. It could become a field though. */
169 bool is_single = true;
170 /* This socket is required to be a single value. This can be because the node itself only
171 * supports this socket to be a single value, or because a node afterwards requires this to be a
172 * single value. */
173 bool requires_single = false;
174};
175
177 const OutputFieldDependency &field_dependency, const bNode &node)
178{
179 const OutputSocketFieldType type = field_dependency.field_type();
180 Vector<const bNodeSocket *> input_sockets;
181 switch (type) {
182 case OutputSocketFieldType::FieldSource:
183 case OutputSocketFieldType::None: {
184 break;
185 }
186 case OutputSocketFieldType::DependentField: {
187 /* This output depends on all inputs. */
188 input_sockets.extend(node.input_sockets());
189 break;
190 }
191 case OutputSocketFieldType::PartiallyDependent: {
192 /* This output depends only on a few inputs. */
193 for (const int i : field_dependency.linked_input_indices()) {
194 input_sockets.append(&node.input_socket(i));
195 }
196 break;
197 }
198 }
199 return input_sockets;
200}
201
207 const bNodeSocket &group_output_socket,
208 const Span<const FieldInferencingInterface *> interface_by_node,
209 const Span<SocketFieldState> field_state_by_socket_id)
210{
211 if (!is_field_socket_type(group_output_socket)) {
213 }
214
215 /* Use a Set here instead of an array indexed by socket id, because we my only need to look at
216 * very few sockets. */
217 Set<const bNodeSocket *> handled_sockets;
218 Stack<const bNodeSocket *> sockets_to_check;
219
220 handled_sockets.add(&group_output_socket);
221 sockets_to_check.push(&group_output_socket);
222
223 /* Keeps track of group input indices that are (indirectly) connected to the output. */
224 Vector<int> linked_input_indices;
225
226 while (!sockets_to_check.is_empty()) {
227 const bNodeSocket *input_socket = sockets_to_check.pop();
228
229 if (!input_socket->is_directly_linked() &&
230 !field_state_by_socket_id[input_socket->index_in_tree()].is_single)
231 {
232 /* This socket uses a field as input by default. */
234 }
235
236 for (const bNodeSocket *origin_socket : input_socket->directly_linked_sockets()) {
237 const bNode &origin_node = origin_socket->owner_node();
238 const SocketFieldState &origin_state =
239 field_state_by_socket_id[origin_socket->index_in_tree()];
240
241 if (origin_state.is_field_source) {
242 if (origin_node.is_group_input()) {
243 /* Found a group input that the group output depends on. */
244 linked_input_indices.append_non_duplicates(origin_socket->index());
245 }
246 else {
247 /* Found a field source that is not the group input. So the output is always a field. */
249 }
250 }
251 else if (!origin_state.is_single) {
252 const FieldInferencingInterface &inferencing_interface =
253 *interface_by_node[origin_node.index()];
254 const OutputFieldDependency &field_dependency =
255 inferencing_interface.outputs[origin_socket->index()];
256
257 /* Propagate search further to the left. */
258 for (const bNodeSocket *origin_input_socket :
259 gather_input_socket_dependencies(field_dependency, origin_node))
260 {
261 if (!origin_input_socket->is_available()) {
262 continue;
263 }
264 if (!field_state_by_socket_id[origin_input_socket->index_in_tree()].is_single) {
265 if (handled_sockets.add(origin_input_socket)) {
266 sockets_to_check.push(origin_input_socket);
267 }
268 }
269 }
270 }
271 }
272 }
273 return OutputFieldDependency::ForPartiallyDependentField(std::move(linked_input_indices));
274}
275
277enum class FieldStateSyncResult : int8_t {
278 /* Nothing changed. */
279 NONE = 0,
280 /* State A has been modified. */
281 CHANGED_A = (1 << 0),
282 /* State B has been modified. */
283 CHANGED_B = (1 << 1),
284};
286
287
293{
294 const bool requires_single = a.requires_single || b.requires_single;
295 const bool is_single = a.is_single && b.is_single;
296
298 if (a.requires_single != requires_single || a.is_single != is_single) {
299 res |= FieldStateSyncResult::CHANGED_A;
300 }
301 if (b.requires_single != requires_single || b.is_single != is_single) {
302 res |= FieldStateSyncResult::CHANGED_B;
303 }
304
305 a.requires_single = requires_single;
306 b.requires_single = requires_single;
307 a.is_single = is_single;
308 b.is_single = is_single;
309
310 return res;
311}
312
319 const bNode &input_node,
320 const bNode &output_node,
321 const MutableSpan<SocketFieldState> field_state_by_socket_id)
322{
324 for (const int i : output_node.output_sockets().index_range()) {
325 /* First input node output is Delta Time which does not appear in the output node outputs. */
326 const bNodeSocket &input_socket = input_node.output_socket(i + 1);
327 const bNodeSocket &output_socket = output_node.output_socket(i);
328 SocketFieldState &input_state = field_state_by_socket_id[input_socket.index_in_tree()];
329 SocketFieldState &output_state = field_state_by_socket_id[output_socket.index_in_tree()];
330 res |= sync_field_states(input_state, output_state);
331 }
332 return res;
333}
334
336 const bNode &input_node,
337 const bNode &output_node,
338 const MutableSpan<SocketFieldState> field_state_by_socket_id)
339{
341 const auto &storage = *static_cast<const NodeGeometryRepeatOutput *>(output_node.storage);
342 for (const int i : IndexRange(storage.items_num)) {
343 const bNodeSocket &input_socket = input_node.output_socket(i + 1);
344 const bNodeSocket &output_socket = output_node.output_socket(i);
345 SocketFieldState &input_state = field_state_by_socket_id[input_socket.index_in_tree()];
346 SocketFieldState &output_state = field_state_by_socket_id[output_socket.index_in_tree()];
347 res |= sync_field_states(input_state, output_state);
348 }
349 return res;
350}
351
353 const bNodeTree &tree,
354 const bNode &node,
355 const MutableSpan<SocketFieldState> field_state_by_socket_id)
356{
357 tree.ensure_topology_cache();
358
359 bool need_update = false;
360
361 /* Sync field state between zone nodes and schedule another pass if necessary. */
362 switch (node.type_legacy) {
364 const auto &data = *static_cast<const NodeGeometrySimulationInput *>(node.storage);
365 if (const bNode *output_node = tree.node_by_id(data.output_node_id)) {
367 node, *output_node, field_state_by_socket_id);
368 if (bool(sync_result & FieldStateSyncResult::CHANGED_B)) {
369 need_update = true;
370 }
371 }
372 break;
373 }
375 for (const bNode *input_node : tree.nodes_by_type("GeometryNodeSimulationInput")) {
376 const auto &data = *static_cast<const NodeGeometrySimulationInput *>(input_node->storage);
377 if (node.identifier == data.output_node_id) {
379 *input_node, node, field_state_by_socket_id);
380 if (bool(sync_result & FieldStateSyncResult::CHANGED_A)) {
381 need_update = true;
382 }
383 }
384 }
385 break;
386 }
388 const auto &data = *static_cast<const NodeGeometryRepeatInput *>(node.storage);
389 if (const bNode *output_node = tree.node_by_id(data.output_node_id)) {
391 node, *output_node, field_state_by_socket_id);
392 if (bool(sync_result & FieldStateSyncResult::CHANGED_B)) {
393 need_update = true;
394 }
395 }
396 break;
397 }
399 for (const bNode *input_node : tree.nodes_by_type("GeometryNodeRepeatInput")) {
400 const auto &data = *static_cast<const NodeGeometryRepeatInput *>(input_node->storage);
401 if (node.identifier == data.output_node_id) {
403 *input_node, node, field_state_by_socket_id);
404 if (bool(sync_result & FieldStateSyncResult::CHANGED_A)) {
405 need_update = true;
406 }
407 }
408 }
409 break;
410 }
411 }
412
413 return need_update;
414}
415
417 const bNodeTree &tree,
418 const Span<const FieldInferencingInterface *> interface_by_node,
419 const MutableSpan<SocketFieldState> field_state_by_socket_id)
420{
421 const Span<const bNode *> toposort_result = tree.toposort_right_to_left();
422
423 while (true) {
424 /* Node updates may require several passes due to cyclic dependencies caused by simulation or
425 * repeat input/output nodes. */
426 bool need_update = false;
427
428 for (const bNode *node : toposort_result) {
429 const FieldInferencingInterface &inferencing_interface = *interface_by_node[node->index()];
430
431 for (const bNodeSocket *output_socket : node->output_sockets()) {
432 SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
433
434 const OutputFieldDependency &field_dependency =
435 inferencing_interface.outputs[output_socket->index()];
436
437 if (field_dependency.field_type() == OutputSocketFieldType::FieldSource) {
438 continue;
439 }
440 if (field_dependency.field_type() == OutputSocketFieldType::None) {
441 state.requires_single = true;
442 state.is_always_single = true;
443 continue;
444 }
445
446 /* The output is required to be a single value when it is connected to any input that does
447 * not support fields. */
448 for (const bNodeSocket *target_socket : output_socket->directly_linked_sockets()) {
449 if (target_socket->is_available()) {
450 state.requires_single |=
451 field_state_by_socket_id[target_socket->index_in_tree()].requires_single;
452 }
453 }
454
455 if (state.requires_single) {
456 bool any_input_is_field_implicitly = false;
458 field_dependency, *node);
459 for (const bNodeSocket *input_socket : connected_inputs) {
460 if (!input_socket->is_available()) {
461 continue;
462 }
463 if (inferencing_interface.inputs[input_socket->index()] ==
464 InputSocketFieldType::Implicit)
465 {
466 if (!input_socket->is_logically_linked()) {
467 any_input_is_field_implicitly = true;
468 break;
469 }
470 }
471 }
472 if (any_input_is_field_implicitly) {
473 /* This output isn't a single value actually. */
474 state.requires_single = false;
475 }
476 else {
477 /* If the output is required to be a single value, the connected inputs in the same
478 * node must not be fields as well. */
479 for (const bNodeSocket *input_socket : connected_inputs) {
480 field_state_by_socket_id[input_socket->index_in_tree()].requires_single = true;
481 }
482 }
483 }
484 }
485
486 /* Some inputs do not require fields independent of what the outputs are connected to. */
487 for (const bNodeSocket *input_socket : node->input_sockets()) {
488 SocketFieldState &state = field_state_by_socket_id[input_socket->index_in_tree()];
489 if (inferencing_interface.inputs[input_socket->index()] == InputSocketFieldType::None) {
490 state.requires_single = true;
491 state.is_always_single = true;
492 }
493 }
494
495 /* Find reverse dependencies and resolve conflicts, which may require another pass. */
496 if (propagate_special_data_requirements(tree, *node, field_state_by_socket_id)) {
497 need_update = true;
498 }
499 }
500
501 if (!need_update) {
502 break;
503 }
504 }
505}
506
508 const bNodeTree &tree,
509 FieldInferencingInterface &new_inferencing_interface,
510 const MutableSpan<SocketFieldState> field_state_by_socket_id)
511{
512 {
513 /* Non-field inputs never support fields. */
514 for (const int index : tree.interface_inputs().index_range()) {
515 const bNodeTreeInterfaceSocket *group_input = tree.interface_inputs()[index];
516 const bNodeSocketType *typeinfo = group_input->socket_typeinfo();
517 const eNodeSocketDatatype type = typeinfo ? typeinfo->type : SOCK_CUSTOM;
519 new_inferencing_interface.inputs[index] = InputSocketFieldType::None;
520 }
521 else if (group_input->default_input != NODE_DEFAULT_INPUT_VALUE) {
522 new_inferencing_interface.inputs[index] = InputSocketFieldType::Implicit;
523 }
524 else if (is_layer_selection_field(*group_input)) {
525 new_inferencing_interface.inputs[index] = InputSocketFieldType::Implicit;
526 }
528 new_inferencing_interface.inputs[index] = InputSocketFieldType::None;
529 }
530 }
531 }
532 /* Check if group inputs are required to be single values, because they are (indirectly)
533 * connected to some socket that does not support fields. */
534 for (const bNode *node : tree.group_input_nodes()) {
535 for (const bNodeSocket *output_socket : node->output_sockets().drop_back(1)) {
536 SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
537 const int output_index = output_socket->index();
538 if (state.requires_single) {
539 if (new_inferencing_interface.inputs[output_index] == InputSocketFieldType::Implicit) {
540 /* Don't override hard-coded implicit fields. */
541 continue;
542 }
543 new_inferencing_interface.inputs[output_index] = InputSocketFieldType::None;
544 }
545 }
546 }
547 /* If an input does not support fields, this should be reflected in all Group Input nodes. */
548 for (const bNode *node : tree.group_input_nodes()) {
549 for (const bNodeSocket *output_socket : node->output_sockets().drop_back(1)) {
550 SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
551 const bool supports_field = new_inferencing_interface.inputs[output_socket->index()] !=
552 InputSocketFieldType::None;
553 if (supports_field) {
554 state.is_single = false;
555 state.is_field_source = true;
556 }
557 else {
558 state.requires_single = true;
559 }
560 }
561 SocketFieldState &dummy_socket_state =
562 field_state_by_socket_id[node->output_sockets().last()->index_in_tree()];
563 dummy_socket_state.requires_single = true;
564 }
565}
566
568 const bNodeTree &tree,
569 const Span<const FieldInferencingInterface *> interface_by_node,
570 const MutableSpan<SocketFieldState> field_state_by_socket_id)
571{
572 const Span<const bNode *> toposort_result = tree.toposort_left_to_right();
573
574 while (true) {
575 /* Node updates may require several passes due to cyclic dependencies. */
576 bool need_update = false;
577
578 for (const bNode *node : toposort_result) {
579 if (node->is_group_input()) {
580 continue;
581 }
582
583 const FieldInferencingInterface &inferencing_interface = *interface_by_node[node->index()];
584
585 /* Update field state of input sockets, also taking into account linked origin sockets. */
586 for (const bNodeSocket *input_socket : node->input_sockets()) {
587 SocketFieldState &state = field_state_by_socket_id[input_socket->index_in_tree()];
588 if (state.is_always_single) {
589 state.is_single = true;
590 continue;
591 }
592 state.is_single = true;
593 if (!input_socket->is_directly_linked() ||
594 all_dangling_reroutes(input_socket->directly_linked_sockets()))
595 {
596 if (inferencing_interface.inputs[input_socket->index()] ==
597 InputSocketFieldType::Implicit)
598 {
599 state.is_single = false;
600 }
601 }
602 else {
603 for (const bNodeSocket *origin_socket : input_socket->directly_linked_sockets()) {
604 if (!field_state_by_socket_id[origin_socket->index_in_tree()].is_single) {
605 state.is_single = false;
606 break;
607 }
608 }
609 }
610 }
611
612 /* Update field state of output sockets, also taking into account input sockets. */
613 for (const bNodeSocket *output_socket : node->output_sockets()) {
614 SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
615 const OutputFieldDependency &field_dependency =
616 inferencing_interface.outputs[output_socket->index()];
617
618 switch (field_dependency.field_type()) {
619 case OutputSocketFieldType::None: {
620 state.is_single = true;
621 break;
622 }
623 case OutputSocketFieldType::FieldSource: {
624 state.is_single = false;
625 state.is_field_source = true;
626 break;
627 }
628 case OutputSocketFieldType::PartiallyDependent:
629 case OutputSocketFieldType::DependentField: {
630 for (const bNodeSocket *input_socket :
631 gather_input_socket_dependencies(field_dependency, *node))
632 {
633 if (!input_socket->is_available()) {
634 continue;
635 }
636 if (!field_state_by_socket_id[input_socket->index_in_tree()].is_single) {
637 state.is_single = false;
638 break;
639 }
640 }
641 break;
642 }
643 }
644 }
645
646 /* Find reverse dependencies and resolve conflicts, which may require another pass. */
647 if (propagate_special_data_requirements(tree, *node, field_state_by_socket_id)) {
648 need_update = true;
649 }
650 }
651
652 if (!need_update) {
653 break;
654 }
655 }
656}
657
659 const bNodeTree &tree,
660 FieldInferencingInterface &new_inferencing_interface,
661 const Span<const FieldInferencingInterface *> interface_by_node,
662 const Span<SocketFieldState> field_state_by_socket_id)
663{
664 const bNode *group_output_node = tree.group_output_node();
665 if (!group_output_node) {
666 return;
667 }
668
669 for (const bNodeSocket *group_output_socket : group_output_node->input_sockets().drop_back(1)) {
671 *group_output_socket, interface_by_node, field_state_by_socket_id);
672 new_inferencing_interface.outputs[group_output_socket->index()] = std::move(field_dependency);
673 }
674}
675
677 const Span<SocketFieldState> field_state_by_socket_id)
678{
679 auto get_state_to_store = [&](const SocketFieldState &state) {
680 if (state.is_always_single) {
682 }
683 if (!state.is_single) {
685 }
686 if (state.requires_single) {
688 }
690 };
691
692 Array<FieldSocketState> result(field_state_by_socket_id.size());
693 for (const int i : field_state_by_socket_id.index_range()) {
694 result[i] = get_state_to_store(field_state_by_socket_id[i]);
695 }
696 return result;
697}
698
702 ResourceScope &scope)
703{
704 for (const int i : nodes.index_range()) {
705 interface_by_node[i] = &get_node_field_inferencing_interface(*nodes[i], scope);
706 }
707}
708
710{
712 tree.ensure_topology_cache();
713 tree.ensure_interface_cache();
714
715 const Span<const bNode *> nodes = tree.all_nodes();
716 ResourceScope scope;
717 Array<const FieldInferencingInterface *> interface_by_node(nodes.size());
718 prepare_inferencing_interfaces(nodes, interface_by_node, scope);
719
720 /* Create new inferencing interface for this node group. */
721 std::unique_ptr<FieldInferencingInterface> new_inferencing_interface =
722 std::make_unique<FieldInferencingInterface>();
723 new_inferencing_interface->inputs = Array<InputSocketFieldType>(
724 tree.interface_inputs().size(), InputSocketFieldType::IsSupported);
725 new_inferencing_interface->outputs = Array<OutputFieldDependency>(
726 tree.interface_outputs().size(), OutputFieldDependency::ForDataSource());
727
728 /* Keep track of the state of all sockets. The index into this array is #SocketRef::id(). */
729 Array<SocketFieldState> field_state_by_socket_id(tree.all_sockets().size());
730
732 tree, interface_by_node, field_state_by_socket_id);
733 determine_group_input_states(tree, *new_inferencing_interface, field_state_by_socket_id);
734 propagate_field_status_from_left_to_right(tree, interface_by_node, field_state_by_socket_id);
736 tree, *new_inferencing_interface, interface_by_node, field_state_by_socket_id);
737
738 /* Update the previous group interface. */
739 const bool group_interface_changed = !tree.runtime->field_inferencing_interface ||
740 *tree.runtime->field_inferencing_interface !=
741 *new_inferencing_interface;
742 tree.runtime->field_inferencing_interface = std::move(new_inferencing_interface);
743 tree.runtime->field_states = calc_socket_states(field_state_by_socket_id);
744
745 return group_interface_changed;
746}
747
748} // namespace blender::bke::node_field_inferencing
#define NODE_CUSTOM
Definition BKE_node.hh:810
#define GEO_NODE_SIMULATION_OUTPUT
#define GEO_NODE_REPEAT_OUTPUT
#define GEO_NODE_REPEAT_INPUT
#define GEO_NODE_SIMULATION_INPUT
#define BLI_assert_unreachable()
Definition BLI_assert.h:93
#define BLI_assert(a)
Definition BLI_assert.h:46
#define ENUM_OPERATORS(_type, _max)
@ NODE_INTERFACE_SOCKET_STRUCTURE_TYPE_SINGLE
@ NTREE_GEOMETRY
eNodeSocketDatatype
@ SOCK_CUSTOM
BMesh const char void * data
@ NONE
T & construct(Args &&...args)
bool add(const Key &key)
Definition BLI_set.hh:248
constexpr int64_t size() const
Definition BLI_span.hh:252
constexpr IndexRange index_range() const
Definition BLI_span.hh:401
bool is_empty() const
Definition BLI_stack.hh:308
void push(const T &value)
Definition BLI_stack.hh:213
void append(const T &value)
void extend(Span< T > array)
void append_non_duplicates(const T &value)
static OutputFieldDependency ForPartiallyDependentField(Vector< int > indices)
Vector< SocketDeclaration * > inputs
Vector< SocketDeclaration * > outputs
OutputSocketFieldType field_type() const
KDTree_3d * tree
static ulong state[N]
static void determine_group_output_states(const bNodeTree &tree, FieldInferencingInterface &new_inferencing_interface, const Span< const FieldInferencingInterface * > interface_by_node, const Span< SocketFieldState > field_state_by_socket_id)
static FieldStateSyncResult repeat_field_state_sync(const bNode &input_node, const bNode &output_node, const MutableSpan< SocketFieldState > field_state_by_socket_id)
static FieldStateSyncResult sync_field_states(SocketFieldState &a, SocketFieldState &b)
static InputSocketFieldType get_interface_input_field_type(const bNode &node, const bNodeSocket &socket)
static OutputFieldDependency get_interface_output_field_dependency(const bNode &node, const bNodeSocket &socket)
static bool all_dangling_reroutes(const Span< const bNodeSocket * > sockets)
static const FieldInferencingInterface & get_node_field_inferencing_interface(const bNode &node, ResourceScope &scope)
static bool propagate_special_data_requirements(const bNodeTree &tree, const bNode &node, const MutableSpan< SocketFieldState > field_state_by_socket_id)
static bool is_field_socket_type(const bNodeSocket &socket)
static OutputFieldDependency find_group_output_dependencies(const bNodeSocket &group_output_socket, const Span< const FieldInferencingInterface * > interface_by_node, const Span< SocketFieldState > field_state_by_socket_id)
static void propagate_field_status_from_left_to_right(const bNodeTree &tree, const Span< const FieldInferencingInterface * > interface_by_node, const MutableSpan< SocketFieldState > field_state_by_socket_id)
static void propagate_data_requirements_from_right_to_left(const bNodeTree &tree, const Span< const FieldInferencingInterface * > interface_by_node, const MutableSpan< SocketFieldState > field_state_by_socket_id)
static FieldStateSyncResult simulation_nodes_field_state_sync(const bNode &input_node, const bNode &output_node, const MutableSpan< SocketFieldState > field_state_by_socket_id)
static Vector< const bNodeSocket * > gather_input_socket_dependencies(const OutputFieldDependency &field_dependency, const bNode &node)
static Array< FieldSocketState > calc_socket_states(const Span< SocketFieldState > field_state_by_socket_id)
static const FieldInferencingInterface & get_dummy_field_inferencing_interface(const bNode &node, ResourceScope &scope)
static void prepare_inferencing_interfaces(const Span< const bNode * > nodes, MutableSpan< const FieldInferencingInterface * > interface_by_node, ResourceScope &scope)
static void determine_group_input_states(const bNodeTree &tree, FieldInferencingInterface &new_inferencing_interface, const MutableSpan< SocketFieldState > field_state_by_socket_id)
bool node_tree_is_registered(const bNodeTree &ntree)
Definition node.cc:2369
bool socket_type_supports_fields(const eNodeSocketDatatype socket_type)
bool is_layer_selection_field(const bNodeTreeInterfaceSocket &socket)
bNodeSocketTypeHandle * typeinfo
bNodeTreeRuntimeHandle * runtime
ListBase inputs
struct ID * id
int16_t type_legacy
void * storage
ListBase outputs
int32_t identifier
Defines a socket type.
Definition BKE_node.hh:158
eNodeSocketDatatype type
Definition BKE_node.hh:193
i
Definition text_draw.cc:230