Blender V4.3
sync.cpp
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2011-2022 Blender Foundation
2 *
3 * SPDX-License-Identifier: Apache-2.0 */
4
5#include "scene/background.h"
6#include "scene/camera.h"
7#include "scene/curves.h"
8#include "scene/film.h"
9#include "scene/integrator.h"
10#include "scene/light.h"
11#include "scene/mesh.h"
12#include "scene/object.h"
13#include "scene/procedural.h"
14#include "scene/scene.h"
15#include "scene/shader.h"
16#include "scene/shader_graph.h"
17#include "scene/shader_nodes.h"
18
19#include "device/device.h"
20
21#include "blender/device.h"
22#include "blender/session.h"
23#include "blender/sync.h"
24#include "blender/util.h"
25
26#include "integrator/denoiser.h"
27
28#include "util/debug.h"
29#include "util/foreach.h"
30#include "util/hash.h"
31#include "util/log.h"
33
35
36static const char *cryptomatte_prefix = "Crypto";
37
38/* Constructor */
39
40BlenderSync::BlenderSync(BL::RenderEngine &b_engine,
41 BL::BlendData &b_data,
42 BL::Scene &b_scene,
43 Scene *scene,
44 bool preview,
45 bool use_developer_ui,
46 Progress &progress)
47 : b_engine(b_engine),
48 b_data(b_data),
49 b_scene(b_scene),
50 shader_map(scene),
51 object_map(scene),
52 procedural_map(scene),
53 geometry_map(scene),
54 light_map(scene),
55 particle_system_map(scene),
56 world_map(NULL),
57 world_recalc(false),
58 scene(scene),
59 preview(preview),
60 experimental(false),
61 use_developer_ui(use_developer_ui),
62 dicing_rate(1.0f),
63 max_subdivisions(12),
64 progress(progress),
65 has_updates_(true)
66{
67 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
68 dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
69 RNA_float_get(&cscene, "dicing_rate");
70 max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
71}
72
74
75void BlenderSync::reset(BL::BlendData &b_data, BL::Scene &b_scene)
76{
77 /* Update data and scene pointers in case they change in session reset,
78 * for example after undo.
79 * Note that we do not modify the `has_updates_` flag here because the sync
80 * reset is also used during viewport navigation. */
81 this->b_data = b_data;
82 this->b_scene = b_scene;
83}
84
86{
87 has_updates_ = true;
88}
89
90/* Sync */
91
92void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d)
93{
94 /* Sync recalc flags from blender to cycles. Actual update is done separate,
95 * so we can do it later on if doing it immediate is not suitable. */
96
97 if (experimental) {
98 /* Mark all meshes as needing to be exported again if dicing changed. */
99 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
100 bool dicing_prop_changed = false;
101
102 float updated_dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
103 RNA_float_get(&cscene, "dicing_rate");
104
105 if (dicing_rate != updated_dicing_rate) {
106 dicing_rate = updated_dicing_rate;
107 dicing_prop_changed = true;
108 }
109
110 int updated_max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
111
112 if (max_subdivisions != updated_max_subdivisions) {
113 max_subdivisions = updated_max_subdivisions;
114 dicing_prop_changed = true;
115 }
116
117 if (dicing_prop_changed) {
118 has_updates_ = true;
119
120 for (const pair<const GeometryKey, Geometry *> &iter : geometry_map.key_to_scene_data()) {
121 Geometry *geom = iter.second;
122 if (geom->is_mesh()) {
123 Mesh *mesh = static_cast<Mesh *>(geom);
124 if (mesh->get_subdivision_type() != Mesh::SUBDIVISION_NONE) {
125 PointerRNA id_ptr = RNA_id_pointer_create((::ID *)iter.first.id);
126 geometry_map.set_recalc(BL::ID(id_ptr));
127 }
128 }
129 }
130 }
131 }
132
133 /* Iterate over all IDs in this depsgraph. */
134 for (BL::DepsgraphUpdate &b_update : b_depsgraph.updates) {
135 /* TODO(sergey): Can do more selective filter here. For example, ignore changes made to
136 * screen data-block. Note that sync_data() needs to be called after object deletion, and
137 * currently this is ensured by the scene ID tagged for update, which sets the `has_updates_`
138 * flag. */
139 has_updates_ = true;
140
141 BL::ID b_id(b_update.id());
142
143 /* Material */
144 if (b_id.is_a(&RNA_Material)) {
145 BL::Material b_mat(b_id);
146 shader_map.set_recalc(b_mat);
147 }
148 /* Light */
149 else if (b_id.is_a(&RNA_Light)) {
150 BL::Light b_light(b_id);
151 shader_map.set_recalc(b_light);
152 }
153 /* Object */
154 else if (b_id.is_a(&RNA_Object)) {
155 BL::Object b_ob(b_id);
156 const bool can_have_geometry = object_can_have_geometry(b_ob);
157 const bool is_light = !can_have_geometry && object_is_light(b_ob);
158
159 if (b_ob.is_instancer() && b_update.is_updated_shading()) {
160 /* Needed for e.g. object color updates on instancer. */
161 object_map.set_recalc(b_ob);
162 }
163
164 if (can_have_geometry || is_light) {
165 const bool updated_geometry = b_update.is_updated_geometry();
166
167 /* Geometry (mesh, hair, volume). */
168 if (can_have_geometry) {
169 if (b_update.is_updated_transform() || b_update.is_updated_shading()) {
170 object_map.set_recalc(b_ob);
171 }
172
173 if (updated_geometry ||
174 (object_subdivision_type(b_ob, preview, experimental) != Mesh::SUBDIVISION_NONE))
175 {
176 BL::ID key = BKE_object_is_modified(b_ob) ? b_ob : b_ob.data();
177 geometry_map.set_recalc(key);
178
179 /* Sync all contained geometry instances as well when the object changed.. */
180 map<void *, set<BL::ID>>::const_iterator instance_geometries =
181 instance_geometries_by_object.find(b_ob.ptr.data);
182 if (instance_geometries != instance_geometries_by_object.end()) {
183 for (BL::ID geometry : instance_geometries->second) {
184 geometry_map.set_recalc(geometry);
185 }
186 }
187 }
188
189 if (updated_geometry) {
190 BL::Object::particle_systems_iterator b_psys;
191 for (b_ob.particle_systems.begin(b_psys); b_psys != b_ob.particle_systems.end();
192 ++b_psys)
193 {
194 particle_system_map.set_recalc(b_ob);
195 }
196 }
197 }
198 /* Light */
199 else if (is_light) {
200 if (b_update.is_updated_transform() || b_update.is_updated_shading()) {
201 object_map.set_recalc(b_ob);
202 light_map.set_recalc(b_ob);
203 }
204
205 if (updated_geometry) {
206 light_map.set_recalc(b_ob);
207 }
208 }
209 }
210 else if (object_is_camera(b_ob)) {
211 shader_map.set_recalc(b_ob);
212 }
213 }
214 /* Mesh */
215 else if (b_id.is_a(&RNA_Mesh)) {
216 BL::Mesh b_mesh(b_id);
217 geometry_map.set_recalc(b_mesh);
218 }
219 /* World */
220 else if (b_id.is_a(&RNA_World)) {
221 BL::World b_world(b_id);
222 if (world_map == b_world.ptr.data) {
223 world_recalc = true;
224 }
225 shader_map.set_recalc(b_world);
226 }
227 /* World */
228 else if (b_id.is_a(&RNA_Scene)) {
229 shader_map.set_recalc(b_id);
230 }
231 /* Volume */
232 else if (b_id.is_a(&RNA_Volume)) {
233 BL::Volume b_volume(b_id);
234 geometry_map.set_recalc(b_volume);
235 }
236 }
237
238 if (b_v3d) {
239 BlenderViewportParameters new_viewport_parameters(b_v3d, use_developer_ui);
240
241 if (viewport_parameters.shader_modified(new_viewport_parameters)) {
242 world_recalc = true;
243 has_updates_ = true;
244 }
245
246 has_updates_ |= viewport_parameters.modified(new_viewport_parameters);
247 }
248}
249
250void BlenderSync::sync_data(BL::RenderSettings &b_render,
251 BL::Depsgraph &b_depsgraph,
252 BL::SpaceView3D &b_v3d,
253 BL::Object &b_override,
254 int width,
255 int height,
256 void **python_thread_state,
257 const DeviceInfo &denoise_device_info)
258{
259 /* For auto refresh images. */
260 ImageManager *image_manager = scene->image_manager;
261 const int frame = b_scene.frame_current();
262 const bool auto_refresh_update = image_manager->set_animation_frame_update(frame);
263
264 if (!has_updates_ && !auto_refresh_update) {
265 return;
266 }
267
269
270 BL::ViewLayer b_view_layer = b_depsgraph.view_layer_eval();
271
272 /* TODO(sergey): This feels weak to pass view layer to the integrator, and even weaker to have an
273 * implicit check on whether it is a background render or not. What is the nicer thing here? */
274 const bool background = !b_v3d;
275
276 sync_view_layer(b_view_layer);
277 sync_integrator(b_view_layer, background, denoise_device_info);
278 sync_film(b_view_layer, b_v3d);
279 sync_shaders(b_depsgraph, b_v3d, auto_refresh_update);
280 sync_images();
281
282 geometry_synced.clear(); /* use for objects and motion sync */
283
284 if (scene->need_motion() == Scene::MOTION_PASS || scene->need_motion() == Scene::MOTION_NONE ||
285 scene->camera->get_motion_position() == MOTION_POSITION_CENTER)
286 {
287 sync_objects(b_depsgraph, b_v3d);
288 }
289 sync_motion(b_render, b_depsgraph, b_v3d, b_override, width, height, python_thread_state);
290
291 geometry_synced.clear();
292
293 /* Shader sync done at the end, since object sync uses it.
294 * false = don't delete unused shaders, not supported. */
295 shader_map.post_sync(false);
296
297 VLOG_INFO << "Total time spent synchronizing data: " << timer.get_time();
298
299 has_updates_ = false;
300}
301
302/* Integrator */
303
304void BlenderSync::sync_integrator(BL::ViewLayer &b_view_layer,
305 bool background,
306 const DeviceInfo &denoise_device_info)
307{
308 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
309
310 experimental = (get_enum(cscene, "feature_set") != 0);
311
312 Integrator *integrator = scene->integrator;
313
314 integrator->set_min_bounce(get_int(cscene, "min_light_bounces"));
315 integrator->set_max_bounce(get_int(cscene, "max_bounces"));
316
317 integrator->set_max_diffuse_bounce(get_int(cscene, "diffuse_bounces"));
318 integrator->set_max_glossy_bounce(get_int(cscene, "glossy_bounces"));
319 integrator->set_max_transmission_bounce(get_int(cscene, "transmission_bounces"));
320 integrator->set_max_volume_bounce(get_int(cscene, "volume_bounces"));
321
322 integrator->set_transparent_min_bounce(get_int(cscene, "min_transparent_bounces"));
323 integrator->set_transparent_max_bounce(get_int(cscene, "transparent_max_bounces"));
324
325 integrator->set_volume_max_steps(get_int(cscene, "volume_max_steps"));
326 float volume_step_rate = (preview) ? get_float(cscene, "volume_preview_step_rate") :
327 get_float(cscene, "volume_step_rate");
328 integrator->set_volume_step_rate(volume_step_rate);
329
330 integrator->set_caustics_reflective(get_boolean(cscene, "caustics_reflective"));
331 integrator->set_caustics_refractive(get_boolean(cscene, "caustics_refractive"));
332 integrator->set_filter_glossy(get_float(cscene, "blur_glossy"));
333
334 int seed = get_int(cscene, "seed");
335 if (get_boolean(cscene, "use_animated_seed")) {
336 seed = hash_uint2(b_scene.frame_current(), get_int(cscene, "seed"));
337 if (b_scene.frame_subframe() != 0.0f) {
338 /* TODO(sergey): Ideally should be some sort of hash_merge,
339 * but this is good enough for now.
340 */
341 seed += hash_uint2((int)(b_scene.frame_subframe() * (float)INT_MAX),
342 get_int(cscene, "seed"));
343 }
344 }
345
346 integrator->set_seed(seed);
347
348 integrator->set_sample_clamp_direct(get_float(cscene, "sample_clamp_direct"));
349 integrator->set_sample_clamp_indirect(get_float(cscene, "sample_clamp_indirect"));
350 if (!preview) {
351 integrator->set_motion_blur(view_layer.use_motion_blur);
352 }
353
354 bool use_light_tree = get_boolean(cscene, "use_light_tree");
355 integrator->set_use_light_tree(use_light_tree);
356 integrator->set_light_sampling_threshold(get_float(cscene, "light_sampling_threshold"));
357
358 if (integrator->use_light_tree_is_modified()) {
359 scene->light_manager->tag_update(scene, LightManager::UPDATE_ALL);
360 }
361
362 SamplingPattern sampling_pattern = (SamplingPattern)get_enum(
363 cscene, "sampling_pattern", SAMPLING_NUM_PATTERNS, SAMPLING_PATTERN_TABULATED_SOBOL);
364
365 switch (sampling_pattern) {
367 if (!background) {
368 /* For interactive rendering, ensure that the first sample is in itself
369 * blue-noise-distributed for smooth viewport navigation. */
370 sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_FIRST;
371 }
372 else {
373 /* For non-interactive rendering, default to a full blue-noise pattern. */
374 sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_PURE;
375 }
376 break;
379 /* Always allowed. */
380 break;
381 default:
382 /* If not using developer UI, default to blue noise for "advanced" patterns. */
383 if (!use_developer_ui) {
384 sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_PURE;
385 }
386 break;
387 }
388
389 const bool is_vertex_baking = scene->bake_manager->get_baking() &&
390 b_scene.render().bake().target() !=
391 BL::BakeSettings::target_IMAGE_TEXTURES;
392 scene->bake_manager->set_use_seed(is_vertex_baking);
393 if (is_vertex_baking) {
394 /* When baking vertex colors, the "pixels" in the output are unrelated to their neighbors,
395 * so blue-noise sampling makes no sense. */
396 sampling_pattern = SAMPLING_PATTERN_TABULATED_SOBOL;
397 }
398
399 integrator->set_sampling_pattern(sampling_pattern);
400
401 int samples = 1;
402 bool use_adaptive_sampling = false;
403 if (preview) {
404 samples = get_int(cscene, "preview_samples");
405 use_adaptive_sampling = RNA_boolean_get(&cscene, "use_preview_adaptive_sampling");
406 integrator->set_use_adaptive_sampling(use_adaptive_sampling);
407 integrator->set_adaptive_threshold(get_float(cscene, "preview_adaptive_threshold"));
408 integrator->set_adaptive_min_samples(get_int(cscene, "preview_adaptive_min_samples"));
409 }
410 else {
411 samples = get_int(cscene, "samples");
412 use_adaptive_sampling = RNA_boolean_get(&cscene, "use_adaptive_sampling");
413 integrator->set_use_adaptive_sampling(use_adaptive_sampling);
414 integrator->set_adaptive_threshold(get_float(cscene, "adaptive_threshold"));
415 integrator->set_adaptive_min_samples(get_int(cscene, "adaptive_min_samples"));
416 }
417
418 float scrambling_distance = get_float(cscene, "scrambling_distance");
419 bool auto_scrambling_distance = get_boolean(cscene, "auto_scrambling_distance");
420 if (auto_scrambling_distance) {
421 if (samples == 0) {
422 /* If samples is 0, then viewport rendering is set to render infinitely. In that case we
423 * override the samples value with 4096 so the Automatic Scrambling Distance algorithm
424 * picks a Scrambling Distance value with a good balance of performance and correlation
425 * artifacts when rendering to high sample counts. */
426 samples = 4096;
427 }
428
429 if (use_adaptive_sampling) {
430 /* If Adaptive Sampling is enabled, use "min_samples" in the Automatic Scrambling Distance
431 * algorithm to avoid artifacts common with Adaptive Sampling + Scrambling Distance. */
432 const AdaptiveSampling adaptive_sampling = integrator->get_adaptive_sampling();
433 samples = min(samples, adaptive_sampling.min_samples);
434 }
435 scrambling_distance *= 4.0f / sqrtf(samples);
436 }
437
438 /* Only use scrambling distance in the viewport if user wants to. */
439 bool preview_scrambling_distance = get_boolean(cscene, "preview_scrambling_distance");
440 if ((preview && !preview_scrambling_distance) ||
441 sampling_pattern != SAMPLING_PATTERN_TABULATED_SOBOL)
442 {
443 scrambling_distance = 1.0f;
444 }
445
446 if (scrambling_distance != 1.0f) {
447 VLOG_INFO << "Using scrambling distance: " << scrambling_distance;
448 }
449 integrator->set_scrambling_distance(scrambling_distance);
450
451 if (get_boolean(cscene, "use_fast_gi")) {
452 if (preview) {
453 integrator->set_ao_bounces(get_int(cscene, "ao_bounces"));
454 }
455 else {
456 integrator->set_ao_bounces(get_int(cscene, "ao_bounces_render"));
457 }
458 }
459 else {
460 integrator->set_ao_bounces(0);
461 }
462
463#ifdef WITH_CYCLES_DEBUG
464 DirectLightSamplingType direct_light_sampling_type = (DirectLightSamplingType)get_enum(
465 cscene, "direct_light_sampling_type", DIRECT_LIGHT_SAMPLING_NUM, DIRECT_LIGHT_SAMPLING_MIS);
466 integrator->set_direct_light_sampling_type(direct_light_sampling_type);
467#endif
468
469 integrator->set_use_guiding(get_boolean(cscene, "use_guiding"));
470 integrator->set_use_surface_guiding(get_boolean(cscene, "use_surface_guiding"));
471 integrator->set_use_volume_guiding(get_boolean(cscene, "use_volume_guiding"));
472 integrator->set_guiding_training_samples(get_int(cscene, "guiding_training_samples"));
473
474 if (use_developer_ui) {
475 integrator->set_deterministic_guiding(get_boolean(cscene, "use_deterministic_guiding"));
476 integrator->set_surface_guiding_probability(get_float(cscene, "surface_guiding_probability"));
477 integrator->set_volume_guiding_probability(get_float(cscene, "volume_guiding_probability"));
478 integrator->set_use_guiding_direct_light(get_boolean(cscene, "use_guiding_direct_light"));
479 integrator->set_use_guiding_mis_weights(get_boolean(cscene, "use_guiding_mis_weights"));
480 GuidingDistributionType guiding_distribution_type = (GuidingDistributionType)get_enum(
481 cscene, "guiding_distribution_type", GUIDING_NUM_TYPES, GUIDING_TYPE_PARALLAX_AWARE_VMM);
482 integrator->set_guiding_distribution_type(guiding_distribution_type);
483 GuidingDirectionalSamplingType guiding_directional_sampling_type =
485 "guiding_directional_sampling_type",
488 integrator->set_guiding_directional_sampling_type(guiding_directional_sampling_type);
489 integrator->set_guiding_roughness_threshold(get_float(cscene, "guiding_roughness_threshold"));
490 }
491
492 DenoiseParams denoise_params = get_denoise_params(
493 b_scene, b_view_layer, background, denoise_device_info);
494
495 /* No denoising support for vertex color baking, vertices packed into image
496 * buffer have no relation to neighbors. */
497 if (is_vertex_baking) {
498 denoise_params.use = false;
499 }
500
501 integrator->set_use_denoise(denoise_params.use);
502
503 /* Only update denoiser parameters if the denoiser is actually used. This allows to tweak
504 * denoiser parameters before enabling it without render resetting on every change. The downside
505 * is that the interface and the integrator are technically out of sync. */
506 if (denoise_params.use) {
507 integrator->set_denoiser_type(denoise_params.type);
508 integrator->set_denoise_use_gpu(denoise_params.use_gpu);
509 integrator->set_denoise_start_sample(denoise_params.start_sample);
510 integrator->set_use_denoise_pass_albedo(denoise_params.use_pass_albedo);
511 integrator->set_use_denoise_pass_normal(denoise_params.use_pass_normal);
512 integrator->set_denoiser_prefilter(denoise_params.prefilter);
513 integrator->set_denoiser_quality(denoise_params.quality);
514 }
515
516 /* UPDATE_NONE as we don't want to tag the integrator as modified (this was done by the
517 * set calls above), but we need to make sure that the dependent things are tagged. */
518 integrator->tag_update(scene, Integrator::UPDATE_NONE);
519}
520
521/* Film */
522
523void BlenderSync::sync_film(BL::ViewLayer &b_view_layer, BL::SpaceView3D &b_v3d)
524{
525 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
526 PointerRNA crl = RNA_pointer_get(&b_view_layer.ptr, "cycles");
527
528 Film *film = scene->film;
529
530 if (b_v3d) {
531 const BlenderViewportParameters new_viewport_parameters(b_v3d, use_developer_ui);
532 film->set_display_pass(new_viewport_parameters.display_pass);
533 film->set_show_active_pixels(new_viewport_parameters.show_active_pixels);
534 }
535
536 film->set_exposure(get_float(cscene, "film_exposure"));
537 film->set_filter_type(
538 (FilterType)get_enum(cscene, "pixel_filter_type", FILTER_NUM_TYPES, FILTER_BLACKMAN_HARRIS));
539 float filter_width = (film->get_filter_type() == FILTER_BOX) ? 1.0f :
540 get_float(cscene, "filter_width");
541 film->set_filter_width(filter_width);
542
543 if (b_scene.world()) {
544 BL::WorldMistSettings b_mist = b_scene.world().mist_settings();
545
546 film->set_mist_start(b_mist.start());
547 film->set_mist_depth(b_mist.depth());
548
549 switch (b_mist.falloff()) {
550 case BL::WorldMistSettings::falloff_QUADRATIC:
551 film->set_mist_falloff(2.0f);
552 break;
553 case BL::WorldMistSettings::falloff_LINEAR:
554 film->set_mist_falloff(1.0f);
555 break;
556 case BL::WorldMistSettings::falloff_INVERSE_QUADRATIC:
557 film->set_mist_falloff(0.5f);
558 break;
559 }
560 }
561
562 /* Blender viewport does not support proper shadow catcher compositing, so force an approximate
563 * mode to improve visual feedback. */
564 if (b_v3d) {
565 film->set_use_approximate_shadow_catcher(true);
566 }
567 else {
568 film->set_use_approximate_shadow_catcher(!get_boolean(crl, "use_pass_shadow_catcher"));
569 }
570}
571
572/* Render Layer */
573
574void BlenderSync::sync_view_layer(BL::ViewLayer &b_view_layer)
575{
576 view_layer.name = b_view_layer.name();
577
578 /* Filter. */
579 view_layer.use_background_shader = b_view_layer.use_sky();
580 /* Always enable surfaces for baking, otherwise there is nothing to bake to. */
581 view_layer.use_surfaces = b_view_layer.use_solid() || scene->bake_manager->get_baking();
582 view_layer.use_hair = b_view_layer.use_strand();
583 view_layer.use_volumes = b_view_layer.use_volumes();
584 view_layer.use_motion_blur = b_view_layer.use_motion_blur() &&
585 b_scene.render().use_motion_blur();
586
587 /* Material override. */
588 view_layer.material_override = b_view_layer.material_override();
589 /* World override. */
590 view_layer.world_override = b_view_layer.world_override();
591
592 /* Sample override. */
593 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
594 int use_layer_samples = get_enum(cscene, "use_layer_samples");
595
596 view_layer.bound_samples = (use_layer_samples == 1);
597 view_layer.samples = 0;
598
599 if (use_layer_samples != 2) {
600 int samples = b_view_layer.samples();
601 view_layer.samples = samples;
602 }
603}
604
605/* Images */
606void BlenderSync::sync_images()
607{
608 /* Sync is a convention for this API, but currently it frees unused buffers. */
609
610 const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
611 if (is_interface_locked == false && BlenderSession::headless == false) {
612 /* If interface is not locked, it's possible image is needed for
613 * the display.
614 */
615 return;
616 }
617 /* Free buffers used by images which are not needed for render. */
618 for (BL::Image &b_image : b_data.images) {
619 const bool is_builtin = image_is_builtin(b_image, b_engine);
620 if (is_builtin == false) {
621 b_image.buffers_free();
622 }
623 /* TODO(sergey): Free builtin images not used by any shader. */
624 }
625}
626
627/* Passes */
628
629static bool get_known_pass_type(BL::RenderPass &b_pass, PassType &type, PassMode &mode)
630{
631 string name = b_pass.name();
632#define MAP_PASS(passname, passtype, noisy) \
633 if (name == passname) { \
634 type = passtype; \
635 mode = (noisy) ? PassMode::NOISY : PassMode::DENOISED; \
636 return true; \
637 } \
638 ((void)0)
639
640 /* NOTE: Keep in sync with defined names from engine.py */
641
642 MAP_PASS("Combined", PASS_COMBINED, false);
643 MAP_PASS("Noisy Image", PASS_COMBINED, true);
644
645 MAP_PASS("Depth", PASS_DEPTH, false);
646 MAP_PASS("Mist", PASS_MIST, false);
647 MAP_PASS("Position", PASS_POSITION, false);
648 MAP_PASS("Normal", PASS_NORMAL, false);
649 MAP_PASS("IndexOB", PASS_OBJECT_ID, false);
650 MAP_PASS("UV", PASS_UV, false);
651 MAP_PASS("Vector", PASS_MOTION, false);
652 MAP_PASS("IndexMA", PASS_MATERIAL_ID, false);
653
654 MAP_PASS("DiffDir", PASS_DIFFUSE_DIRECT, false);
655 MAP_PASS("GlossDir", PASS_GLOSSY_DIRECT, false);
656 MAP_PASS("TransDir", PASS_TRANSMISSION_DIRECT, false);
657 MAP_PASS("VolumeDir", PASS_VOLUME_DIRECT, false);
658
659 MAP_PASS("DiffInd", PASS_DIFFUSE_INDIRECT, false);
660 MAP_PASS("GlossInd", PASS_GLOSSY_INDIRECT, false);
661 MAP_PASS("TransInd", PASS_TRANSMISSION_INDIRECT, false);
662 MAP_PASS("VolumeInd", PASS_VOLUME_INDIRECT, false);
663
664 MAP_PASS("DiffCol", PASS_DIFFUSE_COLOR, false);
665 MAP_PASS("GlossCol", PASS_GLOSSY_COLOR, false);
666 MAP_PASS("TransCol", PASS_TRANSMISSION_COLOR, false);
667
668 MAP_PASS("Emit", PASS_EMISSION, false);
669 MAP_PASS("Env", PASS_BACKGROUND, false);
670 MAP_PASS("AO", PASS_AO, false);
671
672 MAP_PASS("BakePrimitive", PASS_BAKE_PRIMITIVE, false);
673 MAP_PASS("BakeSeed", PASS_BAKE_SEED, false);
674 MAP_PASS("BakeDifferential", PASS_BAKE_DIFFERENTIAL, false);
675
676 MAP_PASS("Denoising Normal", PASS_DENOISING_NORMAL, true);
677 MAP_PASS("Denoising Albedo", PASS_DENOISING_ALBEDO, true);
678 MAP_PASS("Denoising Depth", PASS_DENOISING_DEPTH, true);
679
680 MAP_PASS("Shadow Catcher", PASS_SHADOW_CATCHER, false);
681 MAP_PASS("Noisy Shadow Catcher", PASS_SHADOW_CATCHER, true);
682
683 MAP_PASS("AdaptiveAuxBuffer", PASS_ADAPTIVE_AUX_BUFFER, false);
684 MAP_PASS("Debug Sample Count", PASS_SAMPLE_COUNT, false);
685
686 MAP_PASS("Guiding Color", PASS_GUIDING_COLOR, false);
687 MAP_PASS("Guiding Probability", PASS_GUIDING_PROBABILITY, false);
688 MAP_PASS("Guiding Average Roughness", PASS_GUIDING_AVG_ROUGHNESS, false);
689
691 type = PASS_CRYPTOMATTE;
692 mode = PassMode::DENOISED;
693 return true;
694 }
695
696#undef MAP_PASS
697
698 return false;
699}
700
701static Pass *pass_add(Scene *scene,
702 PassType type,
703 const char *name,
705{
706 Pass *pass = scene->create_node<Pass>();
707
708 pass->set_type(type);
709 pass->set_name(ustring(name));
710 pass->set_mode(mode);
711
712 return pass;
713}
714
715void BlenderSync::sync_render_passes(BL::RenderLayer &b_rlay, BL::ViewLayer &b_view_layer)
716{
717 /* Delete all existing passes. */
718 set<Pass *> clear_passes(scene->passes.begin(), scene->passes.end());
719 scene->delete_nodes(clear_passes);
720
721 /* Always add combined pass. */
722 pass_add(scene, PASS_COMBINED, "Combined");
723
724 /* Cryptomatte stores two ID/weight pairs per RGBA layer.
725 * User facing parameter is the number of pairs. */
726 int crypto_depth = divide_up(min(16, b_view_layer.pass_cryptomatte_depth()), 2);
727 scene->film->set_cryptomatte_depth(crypto_depth);
728 CryptomatteType cryptomatte_passes = CRYPT_NONE;
729 if (b_view_layer.use_pass_cryptomatte_object()) {
730 cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_OBJECT);
731 }
732 if (b_view_layer.use_pass_cryptomatte_material()) {
733 cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_MATERIAL);
734 }
735 if (b_view_layer.use_pass_cryptomatte_asset()) {
736 cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_ASSET);
737 }
738 scene->film->set_cryptomatte_passes(cryptomatte_passes);
739
740 unordered_set<string> expected_passes;
741
742 /* Custom AOV passes. */
743 BL::ViewLayer::aovs_iterator b_aov_iter;
744 for (b_view_layer.aovs.begin(b_aov_iter); b_aov_iter != b_view_layer.aovs.end(); ++b_aov_iter) {
745 BL::AOV b_aov(*b_aov_iter);
746 if (!b_aov.is_valid()) {
747 continue;
748 }
749
750 string name = b_aov.name();
751 PassType type = (b_aov.type() == BL::AOV::type_COLOR) ? PASS_AOV_COLOR : PASS_AOV_VALUE;
752
753 pass_add(scene, type, name.c_str());
754 expected_passes.insert(name);
755 }
756
757 /* Light Group passes. */
758 BL::ViewLayer::lightgroups_iterator b_lightgroup_iter;
759 for (b_view_layer.lightgroups.begin(b_lightgroup_iter);
760 b_lightgroup_iter != b_view_layer.lightgroups.end();
761 ++b_lightgroup_iter)
762 {
763 BL::Lightgroup b_lightgroup(*b_lightgroup_iter);
764
765 string name = string_printf("Combined_%s", b_lightgroup.name().c_str());
766
767 Pass *pass = pass_add(scene, PASS_COMBINED, name.c_str(), PassMode::NOISY);
768 pass->set_lightgroup(ustring(b_lightgroup.name()));
769 expected_passes.insert(name);
770 }
771
772 /* Sync the passes that were defined in engine.py. */
773 for (BL::RenderPass &b_pass : b_rlay.passes) {
774 PassType pass_type = PASS_NONE;
775 PassMode pass_mode = PassMode::DENOISED;
776
777 if (!get_known_pass_type(b_pass, pass_type, pass_mode)) {
778 if (!expected_passes.count(b_pass.name())) {
779 LOG(ERROR) << "Unknown pass " << b_pass.name();
780 }
781 continue;
782 }
783
784 if (pass_type == PASS_MOTION &&
785 (b_view_layer.use_motion_blur() && b_scene.render().use_motion_blur()))
786 {
787 continue;
788 }
789
790 pass_add(scene, pass_type, b_pass.name().c_str(), pass_mode);
791 }
792
793 scene->film->set_pass_alpha_threshold(b_view_layer.pass_alpha_threshold());
794}
795
796void BlenderSync::free_data_after_sync(BL::Depsgraph &b_depsgraph)
797{
798 /* When viewport display is not needed during render we can force some
799 * caches to be releases from blender side in order to reduce peak memory
800 * footprint during synchronization process.
801 */
802
803 const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
804 const bool is_persistent_data = b_engine.render() && b_engine.render().use_persistent_data();
805 const bool can_free_caches =
806 (BlenderSession::headless || is_interface_locked) &&
807 /* Baking re-uses the depsgraph multiple times, clearing crashes
808 * reading un-evaluated mesh data which isn't aligned with the
809 * geometry we're baking, see #71012. */
810 !scene->bake_manager->get_baking() &&
811 /* Persistent data must main caches for performance and correctness. */
812 !is_persistent_data;
813
814 if (!can_free_caches) {
815 return;
816 }
817 /* TODO(sergey): We can actually remove the whole dependency graph,
818 * but that will need some API support first.
819 */
820 for (BL::Object &b_ob : b_depsgraph.objects) {
821 /* Grease pencil render requires all evaluated objects available as-is after Cycles is done
822 * with its part. */
823 if (b_ob.type() == BL::Object::type_GREASEPENCIL || b_ob.type() == BL::Object::type_GPENCIL) {
824 continue;
825 }
826 b_ob.cache_release();
827 }
828}
829
830/* Scene Parameters */
831
833 const bool background,
834 const bool use_developer_ui)
835{
837 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
838 const bool shadingsystem = RNA_boolean_get(&cscene, "shading_system");
839
840 if (shadingsystem == 0) {
841 params.shadingsystem = SHADINGSYSTEM_SVM;
842 }
843 else if (shadingsystem == 1) {
844 params.shadingsystem = SHADINGSYSTEM_OSL;
845 }
846
847 if (background || (use_developer_ui && get_enum(cscene, "debug_bvh_type"))) {
848 params.bvh_type = BVH_TYPE_STATIC;
849 }
850 else {
851 params.bvh_type = BVH_TYPE_DYNAMIC;
852 }
853
854 params.use_bvh_spatial_split = RNA_boolean_get(&cscene, "debug_use_spatial_splits");
855 params.use_bvh_compact_structure = RNA_boolean_get(&cscene, "debug_use_compact_bvh");
856 params.use_bvh_unaligned_nodes = RNA_boolean_get(&cscene, "debug_use_hair_bvh");
857 params.num_bvh_time_steps = RNA_int_get(&cscene, "debug_bvh_time_steps");
858
859 PointerRNA csscene = RNA_pointer_get(&b_scene.ptr, "cycles_curves");
860 params.hair_subdivisions = get_int(csscene, "subdivisions");
861 params.hair_shape = (CurveShapeType)get_enum(
862 csscene, "shape", CURVE_NUM_SHAPE_TYPES, CURVE_THICK);
863
864 int texture_limit;
865 if (background) {
866 texture_limit = RNA_enum_get(&cscene, "texture_limit_render");
867 }
868 else {
869 texture_limit = RNA_enum_get(&cscene, "texture_limit");
870 }
871 if (texture_limit > 0 && b_scene.render().use_simplify()) {
872 params.texture_limit = 1 << (texture_limit + 6);
873 }
874 else {
875 params.texture_limit = 0;
876 }
877
878 params.bvh_layout = DebugFlags().cpu.bvh_layout;
879
880 params.background = background;
881
882 return params;
883}
884
885/* Session Parameters */
886
887bool BlenderSync::get_session_pause(BL::Scene &b_scene, bool background)
888{
889 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
890 return (background) ? false : get_boolean(cscene, "preview_pause");
891}
892
894 BL::Preferences &b_preferences,
895 BL::Scene &b_scene,
896 bool background)
897{
899 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
900
901 if (background && !b_engine.is_preview()) {
902 /* Viewport and preview renders do not require temp directory and do request session
903 * parameters more often than the background render.
904 * Optimize RNA-C++ usage and memory allocation a bit by saving string access which we know is
905 * not needed for viewport render. */
906 params.temp_dir = b_engine.temporary_directory();
907 }
908
909 /* feature set */
910 params.experimental = (get_enum(cscene, "feature_set") != 0);
911
912 /* Headless and background rendering. */
914 params.background = background;
915
916 /* Device */
917 params.threads = blender_device_threads(b_scene);
919 b_preferences, b_scene, params.background, b_engine.is_preview(), params.denoise_device);
920
921 /* samples */
922 int samples = get_int(cscene, "samples");
923 int preview_samples = get_int(cscene, "preview_samples");
924 int sample_offset = get_int(cscene, "sample_offset");
925
926 if (background) {
927 params.samples = samples;
928 params.sample_offset = sample_offset;
929 }
930 else {
931 params.samples = preview_samples;
932 if (params.samples == 0) {
933 params.samples = INT_MAX;
934 }
935 params.sample_offset = 0;
936 }
937
938 /* Clamp sample offset. */
939 params.sample_offset = clamp(params.sample_offset, 0, Integrator::MAX_SAMPLES);
940
941 /* Clamp samples. */
942 params.samples = clamp(params.samples, 0, Integrator::MAX_SAMPLES - params.sample_offset);
943
944 /* Viewport Performance */
945 params.pixel_size = b_engine.get_preview_pixel_size(b_scene);
946
947 if (background) {
948 params.pixel_size = 1;
949 }
950
951 /* shading system - scene level needs full refresh */
952 const bool shadingsystem = RNA_boolean_get(&cscene, "shading_system");
953
954 if (shadingsystem == 0) {
955 params.shadingsystem = SHADINGSYSTEM_SVM;
956 }
957 else if (shadingsystem == 1) {
958 params.shadingsystem = SHADINGSYSTEM_OSL;
959 }
960
961 /* Time limit. */
962 if (background) {
963 params.time_limit = (double)get_float(cscene, "time_limit");
964 }
965 else {
966 /* For the viewport it kind of makes more sense to think in terms of the noise floor, which is
967 * usually higher than acceptable level for the final frame. */
968 /* TODO: It might be useful to support time limit in the viewport as well, but needs some
969 * extra thoughts and input. */
970 params.time_limit = 0.0;
971 }
972
973 /* Profiling. */
974 params.use_profiling = params.device.has_profiling && !b_engine.is_preview() && background &&
976
977 if (background) {
978 params.use_auto_tile = RNA_boolean_get(&cscene, "use_auto_tile");
979 params.tile_size = max(get_int(cscene, "tile_size"), 8);
980 }
981 else {
982 params.use_auto_tile = false;
983 }
984
985 return params;
986}
987
989 BL::ViewLayer &b_view_layer,
990 bool background,
991 const DeviceInfo &denoise_device_info)
992{
993 enum DenoiserInput {
994 DENOISER_INPUT_RGB = 1,
995 DENOISER_INPUT_RGB_ALBEDO = 2,
996 DENOISER_INPUT_RGB_ALBEDO_NORMAL = 3,
997
998 DENOISER_INPUT_NUM,
999 };
1000
1001 DenoiseParams denoising;
1002 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
1003
1004 int input_passes = -1;
1005
1006 if (background) {
1007 /* Final Render Denoising */
1008 denoising.use = get_boolean(cscene, "use_denoising");
1009 denoising.type = (DenoiserType)get_enum(cscene, "denoiser", DENOISER_NUM, DENOISER_NONE);
1010 denoising.use_gpu = get_boolean(cscene, "denoising_use_gpu");
1012 cscene, "denoising_prefilter", DENOISER_PREFILTER_NUM, DENOISER_PREFILTER_NONE);
1013 denoising.quality = (DenoiserQuality)get_enum(
1014 cscene, "denoising_quality", DENOISER_QUALITY_NUM, DENOISER_QUALITY_HIGH);
1015
1016 input_passes = (DenoiserInput)get_enum(
1017 cscene, "denoising_input_passes", DENOISER_INPUT_NUM, DENOISER_INPUT_RGB_ALBEDO_NORMAL);
1018
1019 if (b_view_layer) {
1020 PointerRNA clayer = RNA_pointer_get(&b_view_layer.ptr, "cycles");
1021 if (!get_boolean(clayer, "use_denoising")) {
1022 denoising.use = false;
1023 }
1024 }
1025 }
1026 else {
1027 /* Viewport Denoising */
1028 denoising.use = get_boolean(cscene, "use_preview_denoising");
1029 denoising.type = (DenoiserType)get_enum(
1030 cscene, "preview_denoiser", DENOISER_NUM, DENOISER_NONE);
1031 denoising.use_gpu = get_boolean(cscene, "preview_denoising_use_gpu");
1033 cscene, "preview_denoising_prefilter", DENOISER_PREFILTER_NUM, DENOISER_PREFILTER_FAST);
1034 denoising.quality = (DenoiserQuality)get_enum(
1035 cscene, "preview_denoising_quality", DENOISER_QUALITY_NUM, DENOISER_QUALITY_BALANCED);
1036 denoising.start_sample = get_int(cscene, "preview_denoising_start_sample");
1037
1038 input_passes = (DenoiserInput)get_enum(
1039 cscene, "preview_denoising_input_passes", DENOISER_INPUT_NUM, DENOISER_INPUT_RGB_ALBEDO);
1040
1041 /* Auto select fastest denoiser. */
1042 if (denoising.type == DENOISER_NONE) {
1043 denoising.type = Denoiser::automatic_viewport_denoiser_type(denoise_device_info);
1044 if (denoising.type == DENOISER_NONE) {
1045 denoising.use = false;
1046 }
1047 }
1048 }
1049
1050 switch (input_passes) {
1051 case DENOISER_INPUT_RGB:
1052 denoising.use_pass_albedo = false;
1053 denoising.use_pass_normal = false;
1054 break;
1055
1056 case DENOISER_INPUT_RGB_ALBEDO:
1057 denoising.use_pass_albedo = true;
1058 denoising.use_pass_normal = false;
1059 break;
1060
1061 case DENOISER_INPUT_RGB_ALBEDO_NORMAL:
1062 denoising.use_pass_albedo = true;
1063 denoising.use_pass_normal = true;
1064 break;
1065
1066 default:
1067 LOG(ERROR) << "Unhandled input passes enum " << input_passes;
1068 break;
1069 }
1070
1071 return denoising;
1072}
1073
typedef double(DMatrix)[4][4]
DeviceInfo blender_device_info(BL::Preferences &b_preferences, BL::Scene &b_scene, bool background, bool preview, DeviceInfo &preferences_device)
int blender_device_threads(BL::Scene &b_scene)
static unsigned long seed
Definition btSoftBody.h:39
static bool headless
static bool print_render_stats
BlenderSync(BL::RenderEngine &b_engine, BL::BlendData &b_data, BL::Scene &b_scene, Scene *scene, bool preview, bool use_developer_ui, Progress &progress)
Definition sync.cpp:40
static DenoiseParams get_denoise_params(BL::Scene &b_scene, BL::ViewLayer &b_view_layer, bool background, const DeviceInfo &denoise_device)
Definition sync.cpp:988
~BlenderSync()
Definition sync.cpp:73
static bool get_session_pause(BL::Scene &b_scene, bool background)
Definition sync.cpp:887
void sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d)
Definition sync.cpp:92
void tag_update()
Definition sync.cpp:85
void sync_data(BL::RenderSettings &b_render, BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d, BL::Object &b_override, int width, int height, void **python_thread_state, const DeviceInfo &denoise_device_info)
Definition sync.cpp:250
void sync_render_passes(BL::RenderLayer &b_render_layer, BL::ViewLayer &b_view_layer)
Definition sync.cpp:715
void sync_view_layer(BL::ViewLayer &b_view_layer)
Definition sync.cpp:574
void sync_integrator(BL::ViewLayer &b_view_layer, bool background, const DeviceInfo &denoise_device_info)
Definition sync.cpp:304
void free_data_after_sync(BL::Depsgraph &b_depsgraph)
Definition sync.cpp:796
static SceneParams get_scene_params(BL::Scene &b_scene, const bool background, const bool use_developer_ui)
Definition sync.cpp:832
void reset(BL::BlendData &b_data, BL::Scene &b_scene)
Definition sync.cpp:75
static SessionParams get_session_params(BL::RenderEngine &b_engine, BL::Preferences &b_userpref, BL::Scene &b_scene, bool background)
Definition sync.cpp:893
bool shader_modified(const BlenderViewportParameters &other) const
Definition viewport.cpp:71
bool modified(const BlenderViewportParameters &other) const
Definition viewport.cpp:85
CPU cpu
Definition debug.h:117
DenoiserType type
Definition denoise.h:61
DenoiserQuality quality
Definition denoise.h:78
bool use_gpu
Definition denoise.h:75
int start_sample
Definition denoise.h:64
DenoiserPrefilter prefilter
Definition denoise.h:77
NODE_DECLARE bool use
Definition denoise.h:58
bool use_pass_normal
Definition denoise.h:68
bool use_pass_albedo
Definition denoise.h:67
static DenoiserType automatic_viewport_denoiser_type(const DeviceInfo &denoise_device_info)
Definition denoiser.cpp:172
Definition film.h:30
bool is_mesh() const
bool set_animation_frame_update(int frame)
void tag_update(Scene *scene, uint32_t flag)
AdaptiveSampling get_adaptive_sampling() const
static const int MAX_SAMPLES
Definition integrator.h:80
Definition pass.h:49
void set_recalc(const BL::ID &id)
Definition id_map.h:54
void post_sync(bool do_delete=true)
Definition id_map.h:150
const map< K, T * > & key_to_scene_data()
Definition id_map.h:173
static bool image_is_builtin(BL::Image &ima, BL::RenderEngine &engine)
static float get_float(PointerRNA &ptr, const char *name)
static bool get_boolean(PointerRNA &ptr, const char *name)
static int get_int(PointerRNA &ptr, const char *name)
static int get_enum(PointerRNA &ptr, const char *name, int num_values=-1, int default_value=-1)
static Mesh::SubdivisionType object_subdivision_type(BL::Object &b_ob, bool preview, bool experimental)
DebugFlags & DebugFlags()
Definition debug.h:142
DenoiserQuality
Definition denoise.h:43
@ DENOISER_QUALITY_NUM
Definition denoise.h:47
@ DENOISER_QUALITY_BALANCED
Definition denoise.h:45
@ DENOISER_QUALITY_HIGH
Definition denoise.h:44
DenoiserPrefilter
Definition denoise.h:27
@ DENOISER_PREFILTER_FAST
Definition denoise.h:34
@ DENOISER_PREFILTER_NONE
Definition denoise.h:30
@ DENOISER_PREFILTER_NUM
Definition denoise.h:40
DenoiserType
Definition denoise.h:13
@ DENOISER_NONE
Definition denoise.h:18
@ DENOISER_NUM
Definition denoise.h:16
#define CCL_NAMESPACE_END
#define NULL
#define sqrtf(x)
FilterType
Definition film.h:22
@ FILTER_NUM_TYPES
Definition film.h:27
@ FILTER_BOX
Definition film.h:23
@ FILTER_BLACKMAN_HARRIS
Definition film.h:25
ccl_device_inline uint hash_uint2(uint kx, uint ky)
Definition hash.h:89
uiWidgetBaseParameters params[MAX_WIDGET_BASE_BATCH]
DirectLightSamplingType
@ DIRECT_LIGHT_SAMPLING_MIS
@ DIRECT_LIGHT_SAMPLING_NUM
CryptomatteType
@ CRYPT_ASSET
@ CRYPT_NONE
@ CRYPT_OBJECT
@ CRYPT_MATERIAL
CurveShapeType
@ CURVE_THICK
@ CURVE_NUM_SHAPE_TYPES
@ MOTION_POSITION_CENTER
GuidingDirectionalSamplingType
@ GUIDING_DIRECTIONAL_SAMPLING_NUM_TYPES
@ GUIDING_DIRECTIONAL_SAMPLING_TYPE_RIS
SamplingPattern
@ SAMPLING_PATTERN_AUTOMATIC
@ SAMPLING_PATTERN_BLUE_NOISE_FIRST
@ SAMPLING_PATTERN_TABULATED_SOBOL
@ SAMPLING_PATTERN_BLUE_NOISE_PURE
@ SAMPLING_NUM_PATTERNS
PassType
@ PASS_EMISSION
@ PASS_POSITION
@ PASS_BACKGROUND
@ PASS_TRANSMISSION_DIRECT
@ PASS_VOLUME_DIRECT
@ PASS_UV
@ PASS_TRANSMISSION_COLOR
@ PASS_GUIDING_COLOR
@ PASS_DEPTH
@ PASS_MIST
@ PASS_TRANSMISSION_INDIRECT
@ PASS_BAKE_SEED
@ PASS_SHADOW_CATCHER
@ PASS_DENOISING_NORMAL
@ PASS_DIFFUSE_DIRECT
@ PASS_MOTION
@ PASS_MATERIAL_ID
@ PASS_AO
@ PASS_COMBINED
@ PASS_DIFFUSE_INDIRECT
@ PASS_GUIDING_PROBABILITY
@ PASS_ADAPTIVE_AUX_BUFFER
@ PASS_OBJECT_ID
@ PASS_AOV_COLOR
@ PASS_NONE
@ PASS_VOLUME_INDIRECT
@ PASS_NORMAL
@ PASS_CRYPTOMATTE
@ PASS_DIFFUSE_COLOR
@ PASS_SAMPLE_COUNT
@ PASS_GLOSSY_DIRECT
@ PASS_DENOISING_ALBEDO
@ PASS_AOV_VALUE
@ PASS_GUIDING_AVG_ROUGHNESS
@ PASS_GLOSSY_COLOR
@ PASS_GLOSSY_INDIRECT
@ PASS_BAKE_DIFFERENTIAL
@ PASS_DENOISING_DEPTH
@ PASS_BAKE_PRIMITIVE
GuidingDistributionType
@ GUIDING_NUM_TYPES
@ GUIDING_TYPE_PARALLAX_AWARE_VMM
#define VLOG_INFO
Definition log.h:72
#define LOG(severity)
Definition log.h:33
bool is_builtin(const void *, const StringRef attribute_id)
@ BVH_TYPE_DYNAMIC
Definition params.h:34
@ BVH_TYPE_STATIC
Definition params.h:41
PassMode
Definition pass.h:20
PointerRNA RNA_pointer_get(PointerRNA *ptr, const char *name)
int RNA_int_get(PointerRNA *ptr, const char *name)
float RNA_float_get(PointerRNA *ptr, const char *name)
bool RNA_boolean_get(PointerRNA *ptr, const char *name)
int RNA_enum_get(PointerRNA *ptr, const char *name)
PointerRNA RNA_id_pointer_create(ID *id)
@ SHADINGSYSTEM_OSL
@ SHADINGSYSTEM_SVM
#define min(a, b)
Definition sort.c:32
bool string_startswith(const string_view s, const string_view start)
Definition string.cpp:103
CCL_NAMESPACE_BEGIN string string_printf(const char *format,...)
Definition string.cpp:23
BVHLayout bvh_layout
Definition debug.h:48
Definition DNA_ID.h:413
@ SUBDIVISION_NONE
Definition scene/mesh.h:121
@ MOTION_PASS
Definition scene.h:177
@ MOTION_NONE
Definition scene.h:177
static bool get_known_pass_type(BL::RenderPass &b_pass, PassType &type, PassMode &mode)
Definition sync.cpp:629
#define MAP_PASS(passname, passtype, noisy)
static Pass * pass_add(Scene *scene, PassType type, const char *name, PassMode mode=PassMode::DENOISED)
Definition sync.cpp:701
static CCL_NAMESPACE_BEGIN const char * cryptomatte_prefix
Definition sync.cpp:36
float max
ccl_device_inline bool is_light(const ccl_global KernelLightTreeEmitter *kemitter)
Definition tree.h:64
ccl_device_inline int clamp(int a, int mn, int mx)
Definition util/math.h:379
ccl_device_inline size_t divide_up(size_t x, size_t y)
Definition util/types.h:53
wmTimer * timer