Blender V4.3
deg_eval.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2013 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
12
13#include "BLI_compiler_attrs.h"
14#include "BLI_function_ref.hh"
15#include "BLI_gsqueue.h"
16#include "BLI_task.h"
17#include "BLI_time.h"
18#include "BLI_utildefines.h"
19
20#include "BKE_global.hh"
21
22#include "DNA_node_types.h"
23#include "DNA_object_types.h"
24#include "DNA_scene_types.h"
25
26#include "DEG_depsgraph.hh"
28
29#ifdef WITH_PYTHON
30# include "BPY_extern.hh"
31#endif
32
33#include "atomic_ops.h"
34
35#include "intern/depsgraph.hh"
47
48namespace blender::deg {
49
50namespace {
51
52struct DepsgraphEvalState;
53
54void deg_task_run_func(TaskPool *pool, void *taskdata);
55
56void schedule_children(DepsgraphEvalState *state,
57 OperationNode *node,
58 FunctionRef<void(OperationNode *node)> schedule_fn);
59
60/* Denotes which part of dependency graph is being evaluated. */
61enum class EvaluationStage {
62 /* Stage 1: Only Copy-on-Write operations are to be evaluated, prior to anything else.
63 * This allows other operations to access its dependencies when there is a dependency cycle
64 * involved. */
66
67 /* Evaluate actual ID nodes visibility based on the current state of animation and drivers. */
68 DYNAMIC_VISIBILITY,
69
70 /* Threaded evaluation of all possible operations. */
71 THREADED_EVALUATION,
72
73 /* Workaround for areas which can not be evaluated in threads.
74 *
75 * For example, meta-balls, which are iterating over all bases and are requesting dupli-lists
76 * to see whether there are meta-balls inside. */
77 SINGLE_THREADED_WORKAROUND,
78};
79
80struct DepsgraphEvalState {
81 Depsgraph *graph;
83 EvaluationStage stage;
86};
87
88void evaluate_node(const DepsgraphEvalState *state, OperationNode *operation_node)
89{
90 ::Depsgraph *depsgraph = reinterpret_cast<::Depsgraph *>(state->graph);
91
92 /* Sanity checks. */
93 BLI_assert_msg(!operation_node->is_noop(), "NOOP nodes should not actually be scheduled");
94 /* Perform operation. */
95 if (state->do_stats) {
96 const double start_time = BLI_time_now_seconds();
97 operation_node->evaluate(depsgraph);
98 operation_node->stats.current_time += BLI_time_now_seconds() - start_time;
99 }
100 else {
101 operation_node->evaluate(depsgraph);
102 }
103
104 /* Clear the flag early on, allowing partial updates without re-evaluating the same node multiple
105 * times.
106 * This is a thread-safe modification as the node's flags are only read for a non-scheduled nodes
107 * and this node has been scheduled. */
108 operation_node->flag &= ~DEPSOP_FLAG_CLEAR_ON_EVAL;
109}
110
111void deg_task_run_func(TaskPool *pool, void *taskdata)
112{
113 void *userdata_v = BLI_task_pool_user_data(pool);
114 DepsgraphEvalState *state = (DepsgraphEvalState *)userdata_v;
115
116 /* Evaluate node. */
117 OperationNode *operation_node = reinterpret_cast<OperationNode *>(taskdata);
118 evaluate_node(state, operation_node);
119
120 /* Schedule children. */
121 schedule_children(state, operation_node, [&](OperationNode *node) {
122 BLI_task_pool_push(pool, deg_task_run_func, node, false, nullptr);
123 });
124}
125
126bool check_operation_node_visible(const DepsgraphEvalState *state, OperationNode *op_node)
127{
128 const ComponentNode *comp_node = op_node->owner;
129 /* Special case for copy-on-eval component: it is to be always evaluated, to keep copied
130 * "database" in a consistent state. */
131 if (comp_node->type == NodeType::COPY_ON_EVAL) {
132 return true;
133 }
134
135 /* Special case for dynamic visibility pass: the actual visibility is not yet known, so limit to
136 * only operations which affects visibility. */
137 if (state->stage == EvaluationStage::DYNAMIC_VISIBILITY) {
139 }
140
141 return comp_node->affects_visible_id;
142}
143
144void calculate_pending_parents_for_node(const DepsgraphEvalState *state, OperationNode *node)
145{
146 /* Update counters, applies for both visible and invisible IDs. */
147 node->num_links_pending = 0;
148 node->scheduled = false;
149 /* Invisible IDs requires no pending operations. */
150 if (!check_operation_node_visible(state, node)) {
151 return;
152 }
153 /* No need to bother with anything if node is not tagged for update. */
154 if ((node->flag & DEPSOP_FLAG_NEEDS_UPDATE) == 0) {
155 return;
156 }
157 for (Relation *rel : node->inlinks) {
158 if (rel->from->type == NodeType::OPERATION && (rel->flag & RELATION_FLAG_CYCLIC) == 0) {
159 OperationNode *from = (OperationNode *)rel->from;
160 /* TODO(sergey): This is how old layer system was checking for the
161 * calculation, but how is it possible that visible object depends
162 * on an invisible? This is something what is prohibited after
163 * deg_graph_build_flush_layers(). */
164 if (!check_operation_node_visible(state, from)) {
165 continue;
166 }
167 /* No need to wait for operation which is up to date. */
168 if ((from->flag & DEPSOP_FLAG_NEEDS_UPDATE) == 0) {
169 continue;
170 }
171 ++node->num_links_pending;
172 }
173 }
174}
175
176void calculate_pending_parents_if_needed(DepsgraphEvalState *state)
177{
178 if (!state->need_update_pending_parents) {
179 return;
180 }
181
182 for (OperationNode *node : state->graph->operations) {
183 calculate_pending_parents_for_node(state, node);
184 }
185
186 state->need_update_pending_parents = false;
187}
188
189void initialize_execution(DepsgraphEvalState *state, Depsgraph *graph)
190{
191 /* Clear tags and other things which needs to be clear. */
192 if (state->do_stats) {
193 for (OperationNode *node : graph->operations) {
194 node->stats.reset_current();
195 }
196 }
197}
198
199bool is_metaball_object_operation(const OperationNode *operation_node)
200{
201 const ComponentNode *component_node = operation_node->owner;
202 const IDNode *id_node = component_node->owner;
203 if (GS(id_node->id_cow->name) != ID_OB) {
204 return false;
205 }
206 const Object *object = reinterpret_cast<const Object *>(id_node->id_cow);
207 return object->type == OB_MBALL;
208}
209
210bool need_evaluate_operation_at_stage(DepsgraphEvalState *state,
211 const OperationNode *operation_node)
212{
213 const ComponentNode *component_node = operation_node->owner;
214 switch (state->stage) {
215 case EvaluationStage::COPY_ON_EVAL:
216 return (component_node->type == NodeType::COPY_ON_EVAL);
217
218 case EvaluationStage::DYNAMIC_VISIBILITY:
219 return operation_node->flag & OperationFlag::DEPSOP_FLAG_AFFECTS_VISIBILITY;
220
221 case EvaluationStage::THREADED_EVALUATION:
222 if (is_metaball_object_operation(operation_node)) {
223 state->need_single_thread_pass = true;
224 return false;
225 }
226 return true;
227
228 case EvaluationStage::SINGLE_THREADED_WORKAROUND:
229 return true;
230 }
231 BLI_assert_msg(0, "Unhandled evaluation stage, should never happen.");
232 return false;
233}
234
235/* Schedule a node if it needs evaluation.
236 * dec_parents: Decrement pending parents count, true when child nodes are
237 * scheduled after a task has been completed.
238 */
239void schedule_node(DepsgraphEvalState *state,
240 OperationNode *node,
241 bool dec_parents,
242 const FunctionRef<void(OperationNode *node)> schedule_fn)
243{
244 /* No need to schedule nodes of invisible ID. */
245 if (!check_operation_node_visible(state, node)) {
246 return;
247 }
248 /* No need to schedule operations which are not tagged for update, they are
249 * considered to be up to date. */
250 if ((node->flag & DEPSOP_FLAG_NEEDS_UPDATE) == 0) {
251 return;
252 }
253 /* TODO(sergey): This is not strictly speaking safe to read
254 * num_links_pending. */
255 if (dec_parents) {
256 BLI_assert(node->num_links_pending > 0);
257 atomic_sub_and_fetch_uint32(&node->num_links_pending, 1);
258 }
259 /* Cal not schedule operation while its dependencies are not yet
260 * evaluated. */
261 if (node->num_links_pending != 0) {
262 return;
263 }
264 /* During the copy-on-eval stage only schedule copy-on-eval nodes. */
265 if (!need_evaluate_operation_at_stage(state, node)) {
266 return;
267 }
268 /* Actually schedule the node. */
269 bool is_scheduled = atomic_fetch_and_or_uint8((uint8_t *)&node->scheduled, uint8_t(true));
270 if (!is_scheduled) {
271 if (node->is_noop()) {
272 /* Clear flags to avoid affecting subsequent update propagation.
273 * For normal nodes these are cleared when it is evaluated. */
274 node->flag &= ~DEPSOP_FLAG_CLEAR_ON_EVAL;
275
276 /* skip NOOP node, schedule children right away */
277 schedule_children(state, node, schedule_fn);
278 }
279 else {
280 /* children are scheduled once this task is completed */
281 schedule_fn(node);
282 }
283 }
284}
285
286void schedule_graph(DepsgraphEvalState *state,
287 const FunctionRef<void(OperationNode *node)> schedule_fn)
288{
289 for (OperationNode *node : state->graph->operations) {
290 schedule_node(state, node, false, schedule_fn);
291 }
292}
293
294void schedule_children(DepsgraphEvalState *state,
295 OperationNode *node,
296 const FunctionRef<void(OperationNode *node)> schedule_fn)
297{
298 for (Relation *rel : node->outlinks) {
299 OperationNode *child = (OperationNode *)rel->to;
300 BLI_assert(child->type == NodeType::OPERATION);
301 if (child->scheduled) {
302 /* Happens when having cyclic dependencies. */
303 continue;
304 }
305 schedule_node(state, child, (rel->flag & RELATION_FLAG_CYCLIC) == 0, schedule_fn);
306 }
307}
308
309/* Evaluate given stage of the dependency graph evaluation using multiple threads.
310 *
311 * NOTE: Will assign the `state->stage` to the given stage. */
312void evaluate_graph_threaded_stage(DepsgraphEvalState *state,
314 const EvaluationStage stage)
315{
316 state->stage = stage;
317
318 calculate_pending_parents_if_needed(state);
319
320 schedule_graph(state, [&](OperationNode *node) {
321 BLI_task_pool_push(task_pool, deg_task_run_func, node, false, nullptr);
322 });
324}
325
326/* Evaluate remaining operations of the dependency graph in a single threaded manner. */
327void evaluate_graph_single_threaded_if_needed(DepsgraphEvalState *state)
328{
329 if (!state->need_single_thread_pass) {
330 return;
331 }
332
333 BLI_assert(!state->need_update_pending_parents);
334
335 state->stage = EvaluationStage::SINGLE_THREADED_WORKAROUND;
336
337 GSQueue *evaluation_queue = BLI_gsqueue_new(sizeof(OperationNode *));
338 auto schedule_node_to_queue = [&](OperationNode *node) {
339 BLI_gsqueue_push(evaluation_queue, &node);
340 };
341 schedule_graph(state, schedule_node_to_queue);
342
343 while (!BLI_gsqueue_is_empty(evaluation_queue)) {
344 OperationNode *operation_node;
345 BLI_gsqueue_pop(evaluation_queue, &operation_node);
346
347 evaluate_node(state, operation_node);
348 schedule_children(state, operation_node, schedule_node_to_queue);
349 }
350
351 BLI_gsqueue_free(evaluation_queue);
352}
353
354void depsgraph_ensure_view_layer(Depsgraph *graph)
355{
356 /* We update evaluated scene in the following cases:
357 * - It was not expanded yet.
358 * - It was tagged for update of evaluated component.
359 * This allows us to have proper view layer pointer. */
360 Scene *scene_cow = graph->scene_cow;
361 if (deg_eval_copy_is_expanded(&scene_cow->id) &&
362 (scene_cow->id.recalc & ID_RECALC_SYNC_TO_EVAL) == 0)
363 {
364 return;
365 }
366
367 const IDNode *scene_id_node = graph->find_id_node(&graph->scene->id);
368 deg_update_eval_copy_datablock(graph, scene_id_node);
369}
370
371TaskPool *deg_evaluate_task_pool_create(DepsgraphEvalState *state)
372{
373 if (G.debug & G_DEBUG_DEPSGRAPH_NO_THREADS) {
375 }
376
378}
379
380} // namespace
381
383{
384 /* Nothing to update, early out. */
385 if (graph->entry_tags.is_empty()) {
386 return;
387 }
388
389 graph->update_count++;
390
391 graph->debug.begin_graph_evaluation();
392
393#ifdef WITH_PYTHON
394 /* Release the GIL so that Python drivers can be evaluated. See #91046. */
396#endif
397
398 graph->is_evaluating = true;
399 depsgraph_ensure_view_layer(graph);
400
401 /* Set up evaluation state. */
402 DepsgraphEvalState state;
403 state.graph = graph;
404 state.do_stats = graph->debug.do_time_debug();
405
406 /* Prepare all nodes for evaluation. */
407 initialize_execution(&state, graph);
408
409 /* Evaluation happens in several incremental steps:
410 *
411 * - Start with the copy-on-evaluation operations which never form dependency cycles. This will
412 * ensure that if a dependency graph has a cycle evaluation functions will always "see" valid
413 * expanded datablock. It might not be evaluated yet, but at least the datablock will be valid.
414 *
415 * - If there is potentially dynamically changing visibility in the graph update the actual
416 * nodes visibilities, so that actual heavy data evaluation can benefit from knowledge that
417 * something heavy is not currently visible.
418 *
419 * - Multi-threaded evaluation of all possible nodes.
420 * Certain operations (and their subtrees) could be ignored. For example, meta-balls are not
421 * safe from threading point of view, so the threaded evaluation will stop at the metaball
422 * operation node.
423 *
424 * - Single-threaded pass of all remaining operations. */
425
426 TaskPool *task_pool = deg_evaluate_task_pool_create(&state);
427
428 evaluate_graph_threaded_stage(&state, task_pool, EvaluationStage::COPY_ON_EVAL);
429
430 if (graph->has_animated_visibility || graph->need_update_nodes_visibility) {
431 /* Update pending parents including only the ones which are affecting operations which are
432 * affecting visibility. */
433 state.need_update_pending_parents = true;
434
435 evaluate_graph_threaded_stage(&state, task_pool, EvaluationStage::DYNAMIC_VISIBILITY);
436
438
439 /* Update parents to an updated visibility and evaluation stage.
440 *
441 * Need to do it regardless of whether visibility is actually changed or not: current state of
442 * the pending parents are all zeroes because it was previously calculated for only visibility
443 * related nodes and those are fully evaluated by now. */
444 state.need_update_pending_parents = true;
445 }
446
447 evaluate_graph_threaded_stage(&state, task_pool, EvaluationStage::THREADED_EVALUATION);
448
450
451 evaluate_graph_single_threaded_if_needed(&state);
452
453 /* Finalize statistics gathering. This is because we only gather single
454 * operation timing here, without aggregating anything to avoid any extra
455 * synchronization. */
456 if (state.do_stats) {
458 }
459
460 /* Clear any uncleared tags. */
462 graph->is_evaluating = false;
463
464#ifdef WITH_PYTHON
466#endif
467
468 graph->debug.end_graph_evaluation();
469}
470
471} // namespace blender::deg
@ G_DEBUG_DEPSGRAPH_NO_THREADS
#define BLI_assert(a)
Definition BLI_assert.h:50
#define BLI_assert_msg(a, msg)
Definition BLI_assert.h:57
void BLI_gsqueue_free(GSQueue *queue)
Definition gsqueue.c:93
void BLI_gsqueue_push(GSQueue *queue, const void *item)
Definition gsqueue.c:100
void BLI_gsqueue_pop(GSQueue *queue, void *r_item)
Definition gsqueue.c:134
GSQueue * BLI_gsqueue_new(size_t elem_size)
Definition gsqueue.c:72
bool BLI_gsqueue_is_empty(const GSQueue *queue)
Definition gsqueue.c:162
@ TASK_PRIORITY_HIGH
Definition BLI_task.h:57
TaskPool * BLI_task_pool_create_suspended(void *userdata, eTaskPriority priority)
Definition task_pool.cc:413
void * BLI_task_pool_user_data(TaskPool *pool)
Definition task_pool.cc:516
TaskPool * BLI_task_pool_create_no_threads(void *userdata)
Definition task_pool.cc:421
void BLI_task_pool_work_and_wait(TaskPool *pool)
Definition task_pool.cc:471
void BLI_task_pool_free(TaskPool *pool)
Definition task_pool.cc:431
void BLI_task_pool_push(TaskPool *pool, TaskRunFunction run, void *taskdata, bool free_taskdata, TaskFreeFunction freedata)
Definition task_pool.cc:450
Platform independent time functions.
double BLI_time_now_seconds(void)
Definition time.c:65
#define BPy_BEGIN_ALLOW_THREADS
Definition BPY_extern.hh:50
#define BPy_END_ALLOW_THREADS
Definition BPY_extern.hh:54
@ ID_RECALC_SYNC_TO_EVAL
Definition DNA_ID.h:1085
@ ID_OB
Object is a sort of wrapper for general info.
@ OB_MBALL
Provides wrapper around system-specific atomic primitives, and some extensions (faked-atomic operatio...
ATOMIC_INLINE uint8_t atomic_fetch_and_or_uint8(uint8_t *p, uint8_t b)
ATOMIC_INLINE uint32_t atomic_sub_and_fetch_uint32(uint32_t *p, uint32_t x)
OperationNode * node
Depsgraph * graph
const IDNode * id_node
bool need_update_pending_parents
Definition deg_eval.cc:84
bool do_stats
Definition deg_eval.cc:82
bool need_single_thread_pass
Definition deg_eval.cc:85
EvaluationStage stage
Definition deg_eval.cc:83
const Depsgraph * depsgraph
TaskPool * task_pool
#define GS(x)
Definition iris.cc:202
static ulong state[N]
#define G(x, y, z)
void deg_eval_stats_aggregate(Depsgraph *graph)
bool deg_eval_copy_is_expanded(const ID *id_cow)
void deg_graph_clear_tags(Depsgraph *graph)
ID * deg_update_eval_copy_datablock(const Depsgraph *depsgraph, const IDNode *id_node)
void deg_evaluate_on_refresh(Depsgraph *graph)
Definition deg_eval.cc:382
void deg_graph_flush_visibility_flags_if_needed(Depsgraph *graph)
unsigned char uint8_t
Definition stdint.h:78
unsigned int recalc
Definition DNA_ID.h:437
char name[66]
Definition DNA_ID.h:425
DepsgraphDebug debug
Definition depsgraph.hh:159