Blender V5.0
sync.cpp
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2011-2022 Blender Foundation
2 *
3 * SPDX-License-Identifier: Apache-2.0 */
4
5#include "RNA_types.hh"
6#include "scene/background.h"
7#include "scene/bake.h"
8#include "scene/camera.h"
9#include "scene/curves.h"
10#include "scene/film.h"
11#include "scene/integrator.h"
12#include "scene/light.h"
13#include "scene/mesh.h"
14#include "scene/object.h"
15#include "scene/procedural.h"
16#include "scene/scene.h"
17#include "scene/shader.h"
18#include "scene/shader_graph.h"
19#include "scene/shader_nodes.h"
20
21#include "device/device.h"
22
23#include "blender/device.h"
24#include "blender/session.h"
25#include "blender/sync.h"
26#include "blender/util.h"
27
28#include "integrator/denoiser.h"
29
30#include "util/debug.h"
31
32#include "util/hash.h"
33#include "util/log.h"
34
36
37static const char *cryptomatte_prefix = "Crypto";
38
39/* Constructor */
40
41BlenderSync::BlenderSync(BL::RenderEngine &b_engine,
42 BL::BlendData &b_data,
43 BL::Scene &b_scene,
44 Scene *scene,
45 bool preview,
46 bool use_developer_ui,
47 Progress &progress)
48 : b_engine(b_engine),
49 b_data(b_data),
50 b_scene(b_scene),
51 b_bake_target(PointerRNA_NULL),
52 shader_map(scene),
53 object_map(scene),
54 procedural_map(scene),
55 geometry_map(scene),
56 particle_system_map(scene),
57 world_map(nullptr),
58 world_recalc(false),
59 scene(scene),
60 preview(preview),
61 use_developer_ui(use_developer_ui),
62 dicing_rate(1.0f),
63 max_subdivisions(12),
64 progress(progress)
65
66{
67 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
68 dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
69 RNA_float_get(&cscene, "dicing_rate");
70 max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
71}
72
74
75void BlenderSync::reset(BL::BlendData &b_data, BL::Scene &b_scene)
76{
77 /* Update data and scene pointers in case they change in session reset,
78 * for example after undo.
79 * Note that we do not modify the `has_updates_` flag here because the sync
80 * reset is also used during viewport navigation. */
81 this->b_data = b_data;
82 this->b_scene = b_scene;
83}
84
86{
87 has_updates_ = true;
88}
89
90void BlenderSync::set_bake_target(BL::Object &b_object)
91{
92 b_bake_target = b_object;
93}
94
95/* Sync */
96
97void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph,
98 BL::SpaceView3D &b_v3d,
99 BL::RegionView3D &b_rv3d)
100{
101 /* Sync recalc flags from blender to cycles. Actual update is done separate,
102 * so we can do it later on if doing it immediate is not suitable. */
103 BL::Object b_dicing_camera_object = get_dicing_camera_object(b_v3d, b_rv3d);
104 bool dicing_camera_updated = false;
105
106 /* Iterate over all IDs in this depsgraph. */
107 for (BL::DepsgraphUpdate &b_update : b_depsgraph.updates) {
108 /* TODO(sergey): Can do more selective filter here. For example, ignore changes made to
109 * screen data-block. Note that sync_data() needs to be called after object deletion, and
110 * currently this is ensured by the scene ID tagged for update, which sets the `has_updates_`
111 * flag. */
112 has_updates_ = true;
113
114 BL::ID b_id(b_update.id());
115
116 /* Material */
117 if (b_id.is_a(&RNA_Material)) {
118 const BL::Material b_mat(b_id);
119 shader_map.set_recalc(b_mat);
120 }
121 /* Light */
122 else if (b_id.is_a(&RNA_Light)) {
123 const BL::Light b_light(b_id);
124 shader_map.set_recalc(b_light);
125 geometry_map.set_recalc(b_light);
126 }
127 /* Object */
128 else if (b_id.is_a(&RNA_Object)) {
129 BL::Object b_ob(b_id);
130 const bool can_have_geometry = object_can_have_geometry(b_ob);
131 const bool is_light = !can_have_geometry && object_is_light(b_ob);
132
133 if (b_ob.is_instancer() && b_update.is_updated_shading()) {
134 /* Needed for object color updates on instancer, among other things. */
135 object_map.set_recalc(b_ob);
136 }
137
138 if (can_have_geometry || is_light) {
139 const bool updated_geometry = b_update.is_updated_geometry();
140 const bool updated_transform = b_update.is_updated_transform();
141
142 /* Geometry (mesh, hair, volume). */
143 if (can_have_geometry) {
144 if (updated_transform || b_update.is_updated_shading()) {
145 object_map.set_recalc(b_ob);
146 }
147
148 const bool use_adaptive_subdiv = object_subdivision_type(
149 b_ob, preview, use_adaptive_subdivision) !=
151
152 /* Need to recompute geometry if the geometry changed, or the transform changed
153 * and using adaptive subdivision. */
154 if (updated_geometry || (updated_transform && use_adaptive_subdiv)) {
155 BL::ID const key = BKE_object_is_modified(b_ob) ?
156 b_ob :
157 object_get_data(b_ob, use_adaptive_subdiv);
158 geometry_map.set_recalc(key);
159
160 /* Sync all contained geometry instances as well when the object changed.. */
161 const map<void *, set<BL::ID>>::const_iterator instance_geometries =
162 instance_geometries_by_object.find(b_ob.ptr.data);
163 if (instance_geometries != instance_geometries_by_object.end()) {
164 for (BL::ID const &geometry : instance_geometries->second) {
165 geometry_map.set_recalc(geometry);
166 }
167 }
168 }
169
170 if (updated_geometry) {
171 BL::Object::particle_systems_iterator b_psys;
172 for (b_ob.particle_systems.begin(b_psys); b_psys != b_ob.particle_systems.end();
173 ++b_psys)
174 {
175 particle_system_map.set_recalc(b_ob);
176 }
177 }
178 }
179 /* Light */
180 else if (is_light) {
181 if (b_update.is_updated_transform() || b_update.is_updated_shading()) {
182 object_map.set_recalc(b_ob);
183 geometry_map.set_recalc(b_ob);
184 }
185
186 if (updated_geometry) {
187 geometry_map.set_recalc(b_ob);
188 }
189 }
190 }
191 else if (object_is_camera(b_ob)) {
192 shader_map.set_recalc(b_ob);
193 }
194
195 if (b_dicing_camera_object == b_ob) {
196 dicing_camera_updated = true;
197 }
198 }
199 /* Mesh */
200 else if (b_id.is_a(&RNA_Mesh)) {
201 const BL::Mesh b_mesh(b_id);
202 geometry_map.set_recalc(b_mesh);
203 }
204 /* World */
205 else if (b_id.is_a(&RNA_World)) {
206 const BL::World b_world(b_id);
207 if (world_map == b_world.ptr.data) {
208 world_recalc = true;
209 }
210 shader_map.set_recalc(b_world);
211 }
212 /* World */
213 else if (b_id.is_a(&RNA_Scene)) {
214 shader_map.set_recalc(b_id);
215 }
216 /* Volume */
217 else if (b_id.is_a(&RNA_Volume)) {
218 const BL::Volume b_volume(b_id);
219 geometry_map.set_recalc(b_volume);
220 }
221 /* Camera */
222 else if (b_id.is_a(&RNA_Camera)) {
223 if (b_dicing_camera_object && b_dicing_camera_object.data() == b_id) {
224 dicing_camera_updated = true;
225 }
226 }
227 }
228
229 if (use_adaptive_subdivision) {
230 /* Mark all meshes as needing to be exported again if dicing changed. */
231 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
232 bool dicing_prop_changed = false;
233
234 const float updated_dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
235 RNA_float_get(&cscene, "dicing_rate");
236
237 if (dicing_rate != updated_dicing_rate) {
238 dicing_rate = updated_dicing_rate;
239 dicing_prop_changed = true;
240 }
241
242 const int updated_max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
243
244 if (max_subdivisions != updated_max_subdivisions) {
245 max_subdivisions = updated_max_subdivisions;
246 dicing_prop_changed = true;
247 }
248
249 if ((dicing_camera_updated && !region_view3d_navigating_or_transforming(b_rv3d)) ||
250 dicing_prop_changed)
251 {
252 has_updates_ = true;
253
254 for (const pair<const GeometryKey, Geometry *> &iter : geometry_map.key_to_scene_data()) {
255 Geometry *geom = iter.second;
256 if (geom->is_mesh()) {
257 Mesh *mesh = static_cast<Mesh *>(geom);
258 if (mesh->get_subdivision_type() != Mesh::SUBDIVISION_NONE) {
259 const PointerRNA id_ptr = RNA_id_pointer_create((::ID *)iter.first.id);
260 geometry_map.set_recalc(BL::ID(id_ptr));
261 }
262 }
263 }
264 }
265 }
266
267 if (b_v3d) {
268 const BlenderViewportParameters new_viewport_parameters(b_v3d, use_developer_ui);
269
270 if (viewport_parameters.shader_modified(new_viewport_parameters)) {
271 world_recalc = true;
272 has_updates_ = true;
273 }
274
275 has_updates_ |= viewport_parameters.modified(new_viewport_parameters);
276 }
277}
278
279void BlenderSync::sync_data(BL::RenderSettings &b_render,
280 BL::Depsgraph &b_depsgraph,
281 BL::SpaceView3D &b_v3d,
282 BL::RegionView3D &b_rv3d,
283 const int width,
284 const int height,
285 void **python_thread_state,
286 const DeviceInfo &denoise_device_info)
287{
288 /* For auto refresh images. */
289 ImageManager *image_manager = scene->image_manager.get();
290 const int frame = b_scene.frame_current();
291 const bool auto_refresh_update = image_manager->set_animation_frame_update(frame);
292
293 if (!has_updates_ && !auto_refresh_update) {
294 return;
295 }
296
297 const scoped_timer timer;
298
299 BL::ViewLayer b_view_layer = b_depsgraph.view_layer_eval();
300
301 /* TODO(sergey): This feels weak to pass view layer to the integrator, and even weaker to have an
302 * implicit check on whether it is a background render or not. What is the nicer thing here? */
303 const bool background = !b_v3d;
304
305 sync_view_layer(b_view_layer);
306 sync_integrator(b_view_layer, background, denoise_device_info);
307 sync_film(b_view_layer, b_v3d);
308 sync_shaders(b_depsgraph, b_v3d, auto_refresh_update);
309 sync_images();
310
311 geometry_synced.clear(); /* use for objects and motion sync */
312
313 if (scene->need_motion() == Scene::MOTION_PASS || scene->need_motion() == Scene::MOTION_NONE ||
314 scene->camera->get_motion_position() == MOTION_POSITION_CENTER)
315 {
316 sync_objects(b_depsgraph, b_v3d);
317 }
318 sync_motion(b_render, b_depsgraph, b_v3d, b_rv3d, width, height, python_thread_state);
319
320 geometry_synced.clear();
321
322 /* Shader sync done at the end, since object sync uses it.
323 * false = don't delete unused shaders, not supported. */
324 shader_map.post_sync(false);
325
326 LOG_INFO << "Total time spent synchronizing data: " << timer.get_time();
327
328 has_updates_ = false;
329}
330
331/* Integrator */
332
333void BlenderSync::sync_integrator(BL::ViewLayer &b_view_layer,
334 bool background,
335 const DeviceInfo &denoise_device_info)
336{
337 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
338
339 /* No adaptive subdivision for baking, mesh needs to match Blender exactly. */
340 use_adaptive_subdivision = !b_bake_target;
341
342 Integrator *integrator = scene->integrator;
343
344 integrator->set_min_bounce(get_int(cscene, "min_light_bounces"));
345 integrator->set_max_bounce(get_int(cscene, "max_bounces"));
346
347 integrator->set_max_diffuse_bounce(get_int(cscene, "diffuse_bounces"));
348 integrator->set_max_glossy_bounce(get_int(cscene, "glossy_bounces"));
349 integrator->set_max_transmission_bounce(get_int(cscene, "transmission_bounces"));
350 integrator->set_max_volume_bounce(get_int(cscene, "volume_bounces"));
351 integrator->set_transparent_min_bounce(get_int(cscene, "min_transparent_bounces"));
352 integrator->set_transparent_max_bounce(get_int(cscene, "transparent_max_bounces"));
353
354 integrator->set_volume_ray_marching(get_boolean(cscene, "volume_biased"));
355 integrator->set_volume_max_steps(get_int(cscene, "volume_max_steps"));
356 const float volume_step_rate = (preview) ? get_float(cscene, "volume_preview_step_rate") :
357 get_float(cscene, "volume_step_rate");
358 integrator->set_volume_step_rate(volume_step_rate);
359
360 integrator->set_caustics_reflective(get_boolean(cscene, "caustics_reflective"));
361 integrator->set_caustics_refractive(get_boolean(cscene, "caustics_refractive"));
362 integrator->set_filter_glossy(get_float(cscene, "blur_glossy"));
363
364 int seed = get_int(cscene, "seed");
365 if (get_boolean(cscene, "use_animated_seed")) {
366 seed = hash_uint2(b_scene.frame_current(), get_int(cscene, "seed"));
367 if (b_scene.frame_subframe() != 0.0f) {
368 /* TODO(sergey): Ideally should be some sort of hash_merge,
369 * but this is good enough for now.
370 */
371 seed += hash_uint2((int)(b_scene.frame_subframe() * (float)INT_MAX),
372 get_int(cscene, "seed"));
373 }
374 }
375
376 integrator->set_seed(seed);
377
378 integrator->set_sample_clamp_direct(get_float(cscene, "sample_clamp_direct"));
379 integrator->set_sample_clamp_indirect(get_float(cscene, "sample_clamp_indirect"));
380 if (!preview) {
381 integrator->set_motion_blur(view_layer.use_motion_blur);
382 }
383
384 const bool use_light_tree = get_boolean(cscene, "use_light_tree");
385 integrator->set_use_light_tree(use_light_tree);
386 integrator->set_light_sampling_threshold(get_float(cscene, "light_sampling_threshold"));
387
388 if (integrator->use_light_tree_is_modified()) {
389 scene->light_manager->tag_update(scene, LightManager::UPDATE_ALL);
390 }
391
392 SamplingPattern sampling_pattern = (SamplingPattern)get_enum(
393 cscene, "sampling_pattern", SAMPLING_NUM_PATTERNS, SAMPLING_PATTERN_TABULATED_SOBOL);
394
395 switch (sampling_pattern) {
397 if (!background) {
398 /* For interactive rendering, ensure that the first sample is in itself
399 * blue-noise-distributed for smooth viewport navigation. */
400 sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_FIRST;
401 }
402 else {
403 /* For non-interactive rendering, default to a full blue-noise pattern. */
404 sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_PURE;
405 }
406 break;
409 /* Always allowed. */
410 break;
411 default:
412 /* If not using developer UI, default to blue noise for "advanced" patterns. */
413 if (!use_developer_ui) {
414 sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_PURE;
415 }
416 break;
417 }
418
419 const bool is_vertex_baking = b_bake_target && b_scene.render().bake().target() !=
420 BL::BakeSettings::target_IMAGE_TEXTURES;
421 scene->bake_manager->set_use_seed(is_vertex_baking);
422 if (is_vertex_baking) {
423 /* When baking vertex colors, the "pixels" in the output are unrelated to their neighbors,
424 * so blue-noise sampling makes no sense. */
425 sampling_pattern = SAMPLING_PATTERN_TABULATED_SOBOL;
426 }
427
428 integrator->set_sampling_pattern(sampling_pattern);
429
430 int samples = 1;
431 bool use_adaptive_sampling = false;
432 if (preview) {
433 samples = get_int(cscene, "preview_samples");
434 use_adaptive_sampling = RNA_boolean_get(&cscene, "use_preview_adaptive_sampling");
435 integrator->set_use_adaptive_sampling(use_adaptive_sampling);
436 integrator->set_adaptive_threshold(get_float(cscene, "preview_adaptive_threshold"));
437 integrator->set_adaptive_min_samples(get_int(cscene, "preview_adaptive_min_samples"));
438 }
439 else {
440 samples = get_int(cscene, "samples");
441 use_adaptive_sampling = RNA_boolean_get(&cscene, "use_adaptive_sampling");
442 integrator->set_use_adaptive_sampling(use_adaptive_sampling);
443 integrator->set_adaptive_threshold(get_float(cscene, "adaptive_threshold"));
444 integrator->set_adaptive_min_samples(get_int(cscene, "adaptive_min_samples"));
445 }
446
447 float scrambling_distance = get_float(cscene, "scrambling_distance");
448 const bool auto_scrambling_distance = get_boolean(cscene, "auto_scrambling_distance");
449 if (auto_scrambling_distance) {
450 if (samples == 0) {
451 /* If samples is 0, then viewport rendering is set to render infinitely. In that case we
452 * override the samples value with 4096 so the Automatic Scrambling Distance algorithm
453 * picks a Scrambling Distance value with a good balance of performance and correlation
454 * artifacts when rendering to high sample counts. */
455 samples = 4096;
456 }
457
458 if (use_adaptive_sampling) {
459 /* If Adaptive Sampling is enabled, use "min_samples" in the Automatic Scrambling Distance
460 * algorithm to avoid artifacts common with Adaptive Sampling + Scrambling Distance. */
461 const AdaptiveSampling adaptive_sampling = integrator->get_adaptive_sampling();
462 samples = min(samples, adaptive_sampling.min_samples);
463 }
464 scrambling_distance *= 4.0f / sqrtf(samples);
465 }
466
467 /* Only use scrambling distance in the viewport if user wants to. */
468 const bool preview_scrambling_distance = get_boolean(cscene, "preview_scrambling_distance");
469 if ((preview && !preview_scrambling_distance) ||
470 sampling_pattern != SAMPLING_PATTERN_TABULATED_SOBOL)
471 {
472 scrambling_distance = 1.0f;
473 }
474
475 if (scrambling_distance != 1.0f) {
476 LOG_INFO << "Using scrambling distance: " << scrambling_distance;
477 }
478 integrator->set_scrambling_distance(scrambling_distance);
479
480 if (get_boolean(cscene, "use_fast_gi")) {
481 if (preview) {
482 integrator->set_ao_bounces(get_int(cscene, "ao_bounces"));
483 }
484 else {
485 integrator->set_ao_bounces(get_int(cscene, "ao_bounces_render"));
486 }
487 }
488 else {
489 integrator->set_ao_bounces(0);
490 }
491
492#ifdef WITH_CYCLES_DEBUG
493 DirectLightSamplingType direct_light_sampling_type = (DirectLightSamplingType)get_enum(
494 cscene, "direct_light_sampling_type", DIRECT_LIGHT_SAMPLING_NUM, DIRECT_LIGHT_SAMPLING_MIS);
495 integrator->set_direct_light_sampling_type(direct_light_sampling_type);
496#endif
497
498 integrator->set_use_guiding(get_boolean(cscene, "use_guiding"));
499 integrator->set_use_surface_guiding(get_boolean(cscene, "use_surface_guiding"));
500 integrator->set_use_volume_guiding(get_boolean(cscene, "use_volume_guiding"));
501 integrator->set_guiding_training_samples(get_int(cscene, "guiding_training_samples"));
502
503 if (use_developer_ui) {
504 integrator->set_deterministic_guiding(get_boolean(cscene, "use_deterministic_guiding"));
505 integrator->set_surface_guiding_probability(get_float(cscene, "surface_guiding_probability"));
506 integrator->set_volume_guiding_probability(get_float(cscene, "volume_guiding_probability"));
507 integrator->set_use_guiding_direct_light(get_boolean(cscene, "use_guiding_direct_light"));
508 integrator->set_use_guiding_mis_weights(get_boolean(cscene, "use_guiding_mis_weights"));
509 const GuidingDistributionType guiding_distribution_type = (GuidingDistributionType)get_enum(
510 cscene, "guiding_distribution_type", GUIDING_NUM_TYPES, GUIDING_TYPE_PARALLAX_AWARE_VMM);
511 integrator->set_guiding_distribution_type(guiding_distribution_type);
512 const GuidingDirectionalSamplingType guiding_directional_sampling_type =
514 "guiding_directional_sampling_type",
517 integrator->set_guiding_directional_sampling_type(guiding_directional_sampling_type);
518 integrator->set_guiding_roughness_threshold(get_float(cscene, "guiding_roughness_threshold"));
519 }
520
521 DenoiseParams denoise_params = get_denoise_params(
522 b_scene, b_view_layer, background, denoise_device_info);
523
524 /* No denoising support for vertex color baking, vertices packed into image
525 * buffer have no relation to neighbors. */
526 if (is_vertex_baking) {
527 denoise_params.use = false;
528 }
529
530 integrator->set_use_denoise(denoise_params.use);
531
532 /* Only update denoiser parameters if the denoiser is actually used. This allows to tweak
533 * denoiser parameters before enabling it without render resetting on every change. The downside
534 * is that the interface and the integrator are technically out of sync. */
535 if (denoise_params.use) {
536 integrator->set_denoiser_type(denoise_params.type);
537 integrator->set_denoise_use_gpu(denoise_params.use_gpu);
538 integrator->set_denoise_start_sample(denoise_params.start_sample);
539 integrator->set_use_denoise_pass_albedo(denoise_params.use_pass_albedo);
540 integrator->set_use_denoise_pass_normal(denoise_params.use_pass_normal);
541 integrator->set_denoiser_prefilter(denoise_params.prefilter);
542 integrator->set_denoiser_quality(denoise_params.quality);
543 }
544
545 /* UPDATE_NONE as we don't want to tag the integrator as modified (this was done by the
546 * set calls above), but we need to make sure that the dependent things are tagged. */
547 integrator->tag_update(scene, Integrator::UPDATE_NONE);
548}
549
550/* Film */
551
552void BlenderSync::sync_film(BL::ViewLayer &b_view_layer, BL::SpaceView3D &b_v3d)
553{
554 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
555 PointerRNA crl = RNA_pointer_get(&b_view_layer.ptr, "cycles");
556
557 Film *film = scene->film;
558
559 if (b_v3d) {
560 const BlenderViewportParameters new_viewport_parameters(b_v3d, use_developer_ui);
561 film->set_display_pass(new_viewport_parameters.display_pass);
562 film->set_show_active_pixels(new_viewport_parameters.show_active_pixels);
563 }
564
565 film->set_exposure(get_float(cscene, "film_exposure"));
566 film->set_filter_type(
567 (FilterType)get_enum(cscene, "pixel_filter_type", FILTER_NUM_TYPES, FILTER_BLACKMAN_HARRIS));
568 const float filter_width = (film->get_filter_type() == FILTER_BOX) ?
569 1.0f :
570 get_float(cscene, "filter_width");
571 film->set_filter_width(filter_width);
572
573 if (b_scene.world()) {
574 BL::WorldMistSettings b_mist = b_scene.world().mist_settings();
575
576 film->set_mist_start(b_mist.start());
577 film->set_mist_depth(b_mist.depth());
578
579 switch (b_mist.falloff()) {
580 case BL::WorldMistSettings::falloff_QUADRATIC:
581 film->set_mist_falloff(2.0f);
582 break;
583 case BL::WorldMistSettings::falloff_LINEAR:
584 film->set_mist_falloff(1.0f);
585 break;
586 case BL::WorldMistSettings::falloff_INVERSE_QUADRATIC:
587 film->set_mist_falloff(0.5f);
588 break;
589 }
590 }
591
592 /* Blender viewport does not support proper shadow catcher compositing, so force an approximate
593 * mode to improve visual feedback. */
594 if (b_v3d) {
595 film->set_use_approximate_shadow_catcher(true);
596 }
597 else {
598 film->set_use_approximate_shadow_catcher(!get_boolean(crl, "use_pass_shadow_catcher"));
599 }
600}
601
602/* Render Layer */
603
604void BlenderSync::sync_view_layer(BL::ViewLayer &b_view_layer)
605{
606 view_layer.name = b_view_layer.name();
607
608 /* Filter. */
609 view_layer.use_background_shader = b_view_layer.use_sky();
610 /* Always enable surfaces for baking, otherwise there is nothing to bake to. */
611 view_layer.use_surfaces = b_view_layer.use_solid() || b_bake_target;
612 view_layer.use_hair = b_view_layer.use_strand();
613 view_layer.use_volumes = b_view_layer.use_volumes();
614 view_layer.use_motion_blur = b_view_layer.use_motion_blur() &&
615 b_scene.render().use_motion_blur();
616
617 /* Material override. */
618 view_layer.material_override = b_view_layer.material_override();
619 /* World override. */
620 view_layer.world_override = b_view_layer.world_override();
621
622 /* Sample override. */
623 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
624 const int use_layer_samples = get_enum(cscene, "use_layer_samples");
625
626 view_layer.bound_samples = (use_layer_samples == 1);
627 view_layer.samples = 0;
628
629 if (use_layer_samples != 2) {
630 const int samples = b_view_layer.samples();
631 view_layer.samples = samples;
632 }
633}
634
635/* Images */
636void BlenderSync::sync_images()
637{
638 /* Sync is a convention for this API, but currently it frees unused buffers. */
639
640 const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
641 if (is_interface_locked == false && BlenderSession::headless == false) {
642 /* If interface is not locked, it's possible image is needed for
643 * the display.
644 */
645 return;
646 }
647 /* Free buffers used by images which are not needed for render. */
648 for (BL::Image &b_image : b_data.images) {
649 const bool is_builtin = image_is_builtin(b_image, b_engine);
650 if (is_builtin == false) {
651 b_image.buffers_free();
652 }
653 /* TODO(sergey): Free builtin images not used by any shader. */
654 }
655}
656
657/* Passes */
658
659static bool get_known_pass_type(BL::RenderPass &b_pass, PassType &type, PassMode &mode)
660{
661 const string name = b_pass.name();
662#define MAP_PASS(passname, passtype, noisy) \
663 if (name == passname) { \
664 type = passtype; \
665 mode = (noisy) ? PassMode::NOISY : PassMode::DENOISED; \
666 return true; \
667 } \
668 ((void)0)
669
670 /* NOTE: Keep in sync with defined names from engine.py */
671
672 MAP_PASS("Combined", PASS_COMBINED, false);
673 MAP_PASS("Noisy Image", PASS_COMBINED, true);
674
675 MAP_PASS("Depth", PASS_DEPTH, false);
676 MAP_PASS("Mist", PASS_MIST, false);
677 MAP_PASS("Position", PASS_POSITION, false);
678 MAP_PASS("Normal", PASS_NORMAL, false);
679 MAP_PASS("Object Index", PASS_OBJECT_ID, false);
680 MAP_PASS("UV", PASS_UV, false);
681 MAP_PASS("Vector", PASS_MOTION, false);
682 MAP_PASS("Material Index", PASS_MATERIAL_ID, false);
683
684 MAP_PASS("Diffuse Direct", PASS_DIFFUSE_DIRECT, false);
685 MAP_PASS("Glossy Direct", PASS_GLOSSY_DIRECT, false);
686 MAP_PASS("Transmission Direct", PASS_TRANSMISSION_DIRECT, false);
687 MAP_PASS("Volume Direct", PASS_VOLUME_DIRECT, false);
688
689 MAP_PASS("Diffuse Indirect", PASS_DIFFUSE_INDIRECT, false);
690 MAP_PASS("Glossy Indirect", PASS_GLOSSY_INDIRECT, false);
691 MAP_PASS("Transmission Indirect", PASS_TRANSMISSION_INDIRECT, false);
692 MAP_PASS("Volume Indirect", PASS_VOLUME_INDIRECT, false);
693 MAP_PASS("Volume Scatter", PASS_VOLUME_SCATTER, false);
694 MAP_PASS("Volume Transmit", PASS_VOLUME_TRANSMIT, false);
695 MAP_PASS("Volume Majorant", PASS_VOLUME_MAJORANT, false);
696
697 MAP_PASS("Diffuse Color", PASS_DIFFUSE_COLOR, false);
698 MAP_PASS("Glossy Color", PASS_GLOSSY_COLOR, false);
699 MAP_PASS("Transmission Color", PASS_TRANSMISSION_COLOR, false);
700
701 MAP_PASS("Emission", PASS_EMISSION, false);
702 MAP_PASS("Environment", PASS_BACKGROUND, false);
703 MAP_PASS("Ambient Occlusion", PASS_AO, false);
704
705 MAP_PASS("BakePrimitive", PASS_BAKE_PRIMITIVE, false);
706 MAP_PASS("BakeSeed", PASS_BAKE_SEED, false);
707 MAP_PASS("BakeDifferential", PASS_BAKE_DIFFERENTIAL, false);
708
709 MAP_PASS("Denoising Normal", PASS_DENOISING_NORMAL, true);
710 MAP_PASS("Denoising Albedo", PASS_DENOISING_ALBEDO, true);
711 MAP_PASS("Denoising Depth", PASS_DENOISING_DEPTH, true);
712
713 MAP_PASS("Shadow Catcher", PASS_SHADOW_CATCHER, false);
714 MAP_PASS("Noisy Shadow Catcher", PASS_SHADOW_CATCHER, true);
715
716 MAP_PASS("AdaptiveAuxBuffer", PASS_ADAPTIVE_AUX_BUFFER, false);
717 MAP_PASS("Debug Sample Count", PASS_SAMPLE_COUNT, false);
718 MAP_PASS("Render Time", PASS_RENDER_TIME, false);
719
720 MAP_PASS("Guiding Color", PASS_GUIDING_COLOR, false);
721 MAP_PASS("Guiding Probability", PASS_GUIDING_PROBABILITY, false);
722 MAP_PASS("Guiding Average Roughness", PASS_GUIDING_AVG_ROUGHNESS, false);
723
725 type = PASS_CRYPTOMATTE;
726 mode = PassMode::DENOISED;
727 return true;
728 }
729
730#undef MAP_PASS
731
732 return false;
733}
734
735static Pass *pass_add(Scene *scene,
736 PassType type,
737 const char *name,
739{
740 Pass *pass = scene->create_node<Pass>();
741
742 pass->set_type(type);
743 pass->set_name(ustring(name));
744 pass->set_mode(mode);
745
746 return pass;
747}
748
749void BlenderSync::sync_render_passes(BL::RenderLayer &b_rlay, BL::ViewLayer &b_view_layer)
750{
751 /* Delete all existing passes. */
752 const vector<Pass *> &scene_passes = scene->passes;
753 scene->delete_nodes(set<Pass *>(scene_passes.begin(), scene_passes.end()));
754
755 /* Always add combined pass. */
756 pass_add(scene, PASS_COMBINED, "Combined");
757
758 /* Cryptomatte stores two ID/weight pairs per RGBA layer.
759 * User facing parameter is the number of pairs. */
760 const int crypto_depth = divide_up(min(16, b_view_layer.pass_cryptomatte_depth()), 2);
761 scene->film->set_cryptomatte_depth(crypto_depth);
762 CryptomatteType cryptomatte_passes = CRYPT_NONE;
763 if (b_view_layer.use_pass_cryptomatte_object()) {
764 cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_OBJECT);
765 }
766 if (b_view_layer.use_pass_cryptomatte_material()) {
767 cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_MATERIAL);
768 }
769 if (b_view_layer.use_pass_cryptomatte_asset()) {
770 cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_ASSET);
771 }
772 scene->film->set_cryptomatte_passes(cryptomatte_passes);
773
774 unordered_set<string> expected_passes;
775
776 /* Custom AOV passes. */
777 BL::ViewLayer::aovs_iterator b_aov_iter;
778 for (b_view_layer.aovs.begin(b_aov_iter); b_aov_iter != b_view_layer.aovs.end(); ++b_aov_iter) {
779 BL::AOV b_aov(*b_aov_iter);
780 if (!b_aov.is_valid()) {
781 continue;
782 }
783
784 const string name = b_aov.name();
785 const PassType type = (b_aov.type() == BL::AOV::type_COLOR) ? PASS_AOV_COLOR : PASS_AOV_VALUE;
786
787 pass_add(scene, type, name.c_str());
788 expected_passes.insert(name);
789 }
790
791 /* Light Group passes. */
792 BL::ViewLayer::lightgroups_iterator b_lightgroup_iter;
793 for (b_view_layer.lightgroups.begin(b_lightgroup_iter);
794 b_lightgroup_iter != b_view_layer.lightgroups.end();
795 ++b_lightgroup_iter)
796 {
797 BL::Lightgroup b_lightgroup(*b_lightgroup_iter);
798
799 const string name = string_printf("Combined_%s", b_lightgroup.name().c_str());
800
801 Pass *pass = pass_add(scene, PASS_COMBINED, name.c_str(), PassMode::NOISY);
802 pass->set_lightgroup(ustring(b_lightgroup.name()));
803 expected_passes.insert(name);
804 }
805
806 /* Sync the passes that were defined in engine.py. */
807 for (BL::RenderPass &b_pass : b_rlay.passes) {
808 PassType pass_type = PASS_NONE;
809 PassMode pass_mode = PassMode::DENOISED;
810
811 if (!get_known_pass_type(b_pass, pass_type, pass_mode)) {
812 if (!expected_passes.count(b_pass.name())) {
813 LOG_ERROR << "Unknown pass " << b_pass.name();
814 }
815 continue;
816 }
817
818 if (pass_type == PASS_MOTION &&
819 (b_view_layer.use_motion_blur() && b_scene.render().use_motion_blur()))
820 {
821 continue;
822 }
823
824 pass_add(scene, pass_type, b_pass.name().c_str(), pass_mode);
825 }
826
827 scene->film->set_pass_alpha_threshold(b_view_layer.pass_alpha_threshold());
828}
829
830void BlenderSync::free_data_after_sync(BL::Depsgraph &b_depsgraph)
831{
832 /* When viewport display is not needed during render we can force some
833 * caches to be releases from blender side in order to reduce peak memory
834 * footprint during synchronization process.
835 */
836
837 const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
838 const bool is_persistent_data = b_engine.render() && b_engine.render().use_persistent_data();
839 const bool can_free_caches =
840 (BlenderSession::headless || is_interface_locked) &&
841 /* Baking re-uses the depsgraph multiple times, clearing crashes
842 * reading un-evaluated mesh data which isn't aligned with the
843 * geometry we're baking, see #71012. */
844 !b_bake_target &&
845 /* Persistent data must main caches for performance and correctness. */
846 !is_persistent_data;
847
848 if (!can_free_caches) {
849 return;
850 }
851 /* TODO(sergey): We can actually remove the whole dependency graph,
852 * but that will need some API support first.
853 */
854 for (BL::Object &b_ob : b_depsgraph.objects) {
855 /* Grease pencil render requires all evaluated objects available as-is after Cycles is done
856 * with its part. */
857 if (b_ob.type() == BL::Object::type_GREASEPENCIL) {
858 continue;
859 }
860 b_ob.cache_release();
861 }
862}
863
864/* Scene Parameters */
865
867 const bool background,
868 const bool use_developer_ui)
869{
871 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
872 const bool shadingsystem = RNA_boolean_get(&cscene, "shading_system");
873
874 if (shadingsystem == 0) {
875 params.shadingsystem = SHADINGSYSTEM_SVM;
876 }
877 else if (shadingsystem == 1) {
878 params.shadingsystem = SHADINGSYSTEM_OSL;
879 }
880
881 if (background || (use_developer_ui && get_enum(cscene, "debug_bvh_type"))) {
882 params.bvh_type = BVH_TYPE_STATIC;
883 }
884 else {
885 params.bvh_type = BVH_TYPE_DYNAMIC;
886 }
887
888 params.use_bvh_spatial_split = RNA_boolean_get(&cscene, "debug_use_spatial_splits");
889 params.use_bvh_compact_structure = RNA_boolean_get(&cscene, "debug_use_compact_bvh");
890 params.use_bvh_unaligned_nodes = RNA_boolean_get(&cscene, "debug_use_hair_bvh");
891 params.num_bvh_time_steps = RNA_int_get(&cscene, "debug_bvh_time_steps");
892
893 PointerRNA csscene = RNA_pointer_get(&b_scene.ptr, "cycles_curves");
894 params.hair_subdivisions = get_int(csscene, "subdivisions");
895 params.hair_shape = (CurveShapeType)get_enum(
896 csscene, "shape", CURVE_NUM_SHAPE_TYPES, CURVE_THICK);
897
898 int texture_limit;
899 if (background) {
900 texture_limit = RNA_enum_get(&cscene, "texture_limit_render");
901 }
902 else {
903 texture_limit = RNA_enum_get(&cscene, "texture_limit");
904 }
905 if (texture_limit > 0 && b_scene.render().use_simplify()) {
906 params.texture_limit = 1 << (texture_limit + 6);
907 }
908 else {
909 params.texture_limit = 0;
910 }
911
912 params.bvh_layout = DebugFlags().cpu.bvh_layout;
913
914 params.background = background;
915
916 return params;
917}
918
919/* Session Parameters */
920
921bool BlenderSync::get_session_pause(BL::Scene &b_scene, bool background)
922{
923 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
924 return (background) ? false : get_boolean(cscene, "preview_pause");
925}
926
928 BL::Preferences &b_preferences,
929 BL::Scene &b_scene,
930 bool background)
931{
933 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
934
935 if (background && !b_engine.is_preview()) {
936 /* Viewport and preview renders do not require temp directory and do request session
937 * parameters more often than the background render.
938 * Optimize RNA-C++ usage and memory allocation a bit by saving string access which we know is
939 * not needed for viewport render. */
940 params.temp_dir = b_engine.temporary_directory();
941 }
942
943 /* Headless and background rendering. */
945 params.background = background;
946
947 /* Device */
948 params.threads = blender_device_threads(b_scene);
950 b_preferences, b_scene, params.background, b_engine.is_preview(), params.denoise_device);
951
952 /* samples */
953 const int samples = get_int(cscene, "samples");
954 const int preview_samples = get_int(cscene, "preview_samples");
955 const bool use_sample_subset = get_boolean(cscene, "use_sample_subset");
956 const int sample_subset_offset = get_int(cscene, "sample_offset");
957 const int sample_subset_length = get_int(cscene, "sample_subset_length");
958
959 if (background) {
960 params.samples = samples;
961
962 params.use_sample_subset = use_sample_subset;
963 params.sample_subset_offset = sample_subset_offset;
964 params.sample_subset_length = sample_subset_length;
965 }
966 else {
967 params.samples = preview_samples;
968 if (params.samples == 0) {
969 params.samples = INT_MAX;
970 }
971 params.use_sample_subset = false;
972 params.sample_subset_offset = 0;
973 params.sample_subset_length = 0;
974 }
975
976 /* Viewport Performance */
977 params.pixel_size = b_engine.get_preview_pixel_size(b_scene);
978
979 if (background) {
980 params.pixel_size = 1;
981 }
982
983 /* shading system - scene level needs full refresh */
984 const bool shadingsystem = RNA_boolean_get(&cscene, "shading_system");
985
986 if (shadingsystem == 0) {
987 params.shadingsystem = SHADINGSYSTEM_SVM;
988 }
989 else if (shadingsystem == 1) {
990 params.shadingsystem = SHADINGSYSTEM_OSL;
991 }
992
993 /* Time limit. */
994 if (background) {
995 params.time_limit = (double)get_float(cscene, "time_limit");
996 }
997 else {
998 /* For the viewport it kind of makes more sense to think in terms of the noise floor, which is
999 * usually higher than acceptable level for the final frame. */
1000 /* TODO: It might be useful to support time limit in the viewport as well, but needs some
1001 * extra thoughts and input. */
1002 params.time_limit = 0.0;
1003 }
1004
1005 /* Profiling. */
1006 params.use_profiling = params.device.has_profiling && !b_engine.is_preview() && background &&
1008
1009 if (background) {
1010 params.use_auto_tile = true;
1011 params.tile_size = max(get_int(cscene, "tile_size"), 8);
1012 }
1013 else {
1014 params.use_auto_tile = false;
1015 }
1016
1017 return params;
1018}
1019
1021 BL::ViewLayer &b_view_layer,
1022 bool background,
1023 const DeviceInfo &denoise_device_info)
1024{
1025 enum DenoiserInput {
1026 DENOISER_INPUT_RGB = 1,
1027 DENOISER_INPUT_RGB_ALBEDO = 2,
1028 DENOISER_INPUT_RGB_ALBEDO_NORMAL = 3,
1029
1030 DENOISER_INPUT_NUM,
1031 };
1032
1033 DenoiseParams denoising;
1034 PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
1035
1036 int input_passes = -1;
1037
1038 if (background) {
1039 /* Final Render Denoising */
1040 denoising.use = get_boolean(cscene, "use_denoising");
1041 denoising.type = (DenoiserType)get_enum(cscene, "denoiser", DENOISER_NUM, DENOISER_NONE);
1042 denoising.use_gpu = get_boolean(cscene, "denoising_use_gpu");
1044 cscene, "denoising_prefilter", DENOISER_PREFILTER_NUM, DENOISER_PREFILTER_NONE);
1045 denoising.quality = (DenoiserQuality)get_enum(
1046 cscene, "denoising_quality", DENOISER_QUALITY_NUM, DENOISER_QUALITY_HIGH);
1047
1048 input_passes = (DenoiserInput)get_enum(
1049 cscene, "denoising_input_passes", DENOISER_INPUT_NUM, DENOISER_INPUT_RGB_ALBEDO_NORMAL);
1050
1051 if (b_view_layer) {
1052 PointerRNA clayer = RNA_pointer_get(&b_view_layer.ptr, "cycles");
1053 if (!get_boolean(clayer, "use_denoising")) {
1054 denoising.use = false;
1055 }
1056 }
1057 }
1058 else {
1059 /* Viewport Denoising */
1060 denoising.use = get_boolean(cscene, "use_preview_denoising");
1061 denoising.type = (DenoiserType)get_enum(
1062 cscene, "preview_denoiser", DENOISER_NUM, DENOISER_NONE);
1063 denoising.use_gpu = get_boolean(cscene, "preview_denoising_use_gpu");
1065 cscene, "preview_denoising_prefilter", DENOISER_PREFILTER_NUM, DENOISER_PREFILTER_FAST);
1066 denoising.quality = (DenoiserQuality)get_enum(
1067 cscene, "preview_denoising_quality", DENOISER_QUALITY_NUM, DENOISER_QUALITY_BALANCED);
1068 denoising.start_sample = get_int(cscene, "preview_denoising_start_sample");
1069
1070 input_passes = (DenoiserInput)get_enum(
1071 cscene, "preview_denoising_input_passes", DENOISER_INPUT_NUM, DENOISER_INPUT_RGB_ALBEDO);
1072
1073 /* Auto select fastest denoiser. */
1074 if (denoising.type == DENOISER_NONE) {
1075 denoising.type = Denoiser::automatic_viewport_denoiser_type(denoise_device_info);
1076 if (denoising.type == DENOISER_NONE) {
1077 denoising.use = false;
1078 }
1079 }
1080 }
1081
1082 switch (input_passes) {
1083 case DENOISER_INPUT_RGB:
1084 denoising.use_pass_albedo = false;
1085 denoising.use_pass_normal = false;
1086 break;
1087
1088 case DENOISER_INPUT_RGB_ALBEDO:
1089 denoising.use_pass_albedo = true;
1090 denoising.use_pass_normal = false;
1091 break;
1092
1093 case DENOISER_INPUT_RGB_ALBEDO_NORMAL:
1094 denoising.use_pass_albedo = true;
1095 denoising.use_pass_normal = true;
1096 break;
1097
1098 default:
1099 LOG_ERROR << "Unhandled input passes enum " << input_passes;
1100 break;
1101 }
1102
1103 return denoising;
1104}
1105
DeviceInfo blender_device_info(BL::Preferences &b_preferences, BL::Scene &b_scene, bool background, bool preview, DeviceInfo &preferences_device)
int blender_device_threads(BL::Scene &b_scene)
static unsigned long seed
Definition btSoftBody.h:39
static bool headless
static bool print_render_stats
BlenderSync(BL::RenderEngine &b_engine, BL::BlendData &b_data, BL::Scene &b_scene, Scene *scene, bool preview, bool use_developer_ui, Progress &progress)
Definition sync.cpp:41
static DenoiseParams get_denoise_params(BL::Scene &b_scene, BL::ViewLayer &b_view_layer, bool background, const DeviceInfo &denoise_device)
Definition sync.cpp:1020
static bool get_session_pause(BL::Scene &b_scene, bool background)
Definition sync.cpp:921
void tag_update()
Definition sync.cpp:85
static SessionParams get_session_params(BL::RenderEngine &b_engine, BL::Preferences &b_preferences, BL::Scene &b_scene, bool background)
Definition sync.cpp:927
void sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d, BL::RegionView3D &b_rv3d)
Definition sync.cpp:97
void sync_view_layer(BL::ViewLayer &b_view_layer)
Definition sync.cpp:604
void sync_integrator(BL::ViewLayer &b_view_layer, bool background, const DeviceInfo &denoise_device_info)
Definition sync.cpp:333
void free_data_after_sync(BL::Depsgraph &b_depsgraph)
Definition sync.cpp:830
static SceneParams get_scene_params(BL::Scene &b_scene, const bool background, const bool use_developer_ui)
Definition sync.cpp:866
void reset(BL::BlendData &b_data, BL::Scene &b_scene)
Definition sync.cpp:75
void set_bake_target(BL::Object &b_object)
Definition sync.cpp:90
void sync_render_passes(BL::RenderLayer &b_rlay, BL::ViewLayer &b_view_layer)
Definition sync.cpp:749
void sync_data(BL::RenderSettings &b_render, BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d, BL::RegionView3D &b_rv3d, const int width, const int height, void **python_thread_state, const DeviceInfo &denoise_device_info)
Definition sync.cpp:279
CPU cpu
Definition debug.h:120
DenoiserType type
Definition denoise.h:59
DenoiserQuality quality
Definition denoise.h:76
bool use_gpu
Definition denoise.h:73
int start_sample
Definition denoise.h:62
DenoiserPrefilter prefilter
Definition denoise.h:75
NODE_DECLARE bool use
Definition denoise.h:56
bool use_pass_normal
Definition denoise.h:66
bool use_pass_albedo
Definition denoise.h:65
static DenoiserType automatic_viewport_denoiser_type(const DeviceInfo &denoise_device_info)
Definition denoiser.cpp:184
Definition film.h:29
bool is_mesh() const
bool set_animation_frame_update(const int frame)
void tag_update(Scene *scene, const uint32_t flag)
AdaptiveSampling get_adaptive_sampling() const
Definition pass.h:50
static bool image_is_builtin(BL::Image &ima, BL::RenderEngine &engine)
static float get_float(PointerRNA &ptr, const char *name)
static bool get_boolean(PointerRNA &ptr, const char *name)
static int get_int(PointerRNA &ptr, const char *name)
static int get_enum(PointerRNA &ptr, const char *name, int num_values=-1, int default_value=-1)
static Mesh::SubdivisionType object_subdivision_type(BL::Object &b_ob, const bool preview, const bool use_adaptive_subdivision)
static CCL_NAMESPACE_BEGIN BL::ID object_get_data(const BL::Object &b_ob, const bool use_adaptive_subdivision)
static bool region_view3d_navigating_or_transforming(const BL::RegionView3D &b_rv3d)
DebugFlags & DebugFlags()
Definition debug.h:145
DenoiserQuality
Definition denoise.h:41
@ DENOISER_QUALITY_NUM
Definition denoise.h:45
@ DENOISER_QUALITY_BALANCED
Definition denoise.h:43
@ DENOISER_QUALITY_HIGH
Definition denoise.h:42
DenoiserPrefilter
Definition denoise.h:25
@ DENOISER_PREFILTER_FAST
Definition denoise.h:32
@ DENOISER_PREFILTER_NONE
Definition denoise.h:28
@ DENOISER_PREFILTER_NUM
Definition denoise.h:38
DenoiserType
Definition denoise.h:11
@ DENOISER_NONE
Definition denoise.h:16
@ DENOISER_NUM
Definition denoise.h:14
#define CCL_NAMESPACE_END
FilterType
Definition film.h:21
@ FILTER_NUM_TYPES
Definition film.h:26
@ FILTER_BOX
Definition film.h:22
@ FILTER_BLACKMAN_HARRIS
Definition film.h:24
ccl_device_inline uint hash_uint2(const uint kx, const uint ky)
Definition hash.h:139
uiWidgetBaseParameters params[MAX_WIDGET_BASE_BATCH]
DirectLightSamplingType
@ DIRECT_LIGHT_SAMPLING_MIS
@ DIRECT_LIGHT_SAMPLING_NUM
CryptomatteType
@ CRYPT_ASSET
@ CRYPT_NONE
@ CRYPT_OBJECT
@ CRYPT_MATERIAL
CurveShapeType
@ CURVE_THICK
@ CURVE_NUM_SHAPE_TYPES
@ MOTION_POSITION_CENTER
GuidingDirectionalSamplingType
@ GUIDING_DIRECTIONAL_SAMPLING_NUM_TYPES
@ GUIDING_DIRECTIONAL_SAMPLING_TYPE_RIS
SamplingPattern
@ SAMPLING_PATTERN_AUTOMATIC
@ SAMPLING_PATTERN_BLUE_NOISE_FIRST
@ SAMPLING_PATTERN_TABULATED_SOBOL
@ SAMPLING_PATTERN_BLUE_NOISE_PURE
@ SAMPLING_NUM_PATTERNS
PassType
@ PASS_EMISSION
@ PASS_POSITION
@ PASS_BACKGROUND
@ PASS_TRANSMISSION_DIRECT
@ PASS_VOLUME_DIRECT
@ PASS_UV
@ PASS_TRANSMISSION_COLOR
@ PASS_GUIDING_COLOR
@ PASS_DEPTH
@ PASS_MIST
@ PASS_VOLUME_SCATTER
@ PASS_TRANSMISSION_INDIRECT
@ PASS_BAKE_SEED
@ PASS_SHADOW_CATCHER
@ PASS_VOLUME_MAJORANT
@ PASS_DENOISING_NORMAL
@ PASS_DIFFUSE_DIRECT
@ PASS_MOTION
@ PASS_MATERIAL_ID
@ PASS_AO
@ PASS_COMBINED
@ PASS_DIFFUSE_INDIRECT
@ PASS_RENDER_TIME
@ PASS_GUIDING_PROBABILITY
@ PASS_ADAPTIVE_AUX_BUFFER
@ PASS_OBJECT_ID
@ PASS_AOV_COLOR
@ PASS_NONE
@ PASS_VOLUME_INDIRECT
@ PASS_NORMAL
@ PASS_CRYPTOMATTE
@ PASS_DIFFUSE_COLOR
@ PASS_SAMPLE_COUNT
@ PASS_GLOSSY_DIRECT
@ PASS_DENOISING_ALBEDO
@ PASS_AOV_VALUE
@ PASS_GUIDING_AVG_ROUGHNESS
@ PASS_GLOSSY_COLOR
@ PASS_GLOSSY_INDIRECT
@ PASS_BAKE_DIFFERENTIAL
@ PASS_VOLUME_TRANSMIT
@ PASS_DENOISING_DEPTH
@ PASS_BAKE_PRIMITIVE
GuidingDistributionType
@ GUIDING_NUM_TYPES
@ GUIDING_TYPE_PARALLAX_AWARE_VMM
#define LOG_ERROR
Definition log.h:101
#define LOG_INFO
Definition log.h:106
@ BVH_TYPE_DYNAMIC
Definition params.h:33
@ BVH_TYPE_STATIC
Definition params.h:40
PassMode
Definition pass.h:20
@ DENOISED
Definition pass.h:22
@ NOISY
Definition pass.h:21
const char * name
#define sqrtf
PointerRNA RNA_pointer_get(PointerRNA *ptr, const char *name)
const PointerRNA PointerRNA_NULL
int RNA_int_get(PointerRNA *ptr, const char *name)
float RNA_float_get(PointerRNA *ptr, const char *name)
bool RNA_boolean_get(PointerRNA *ptr, const char *name)
int RNA_enum_get(PointerRNA *ptr, const char *name)
PointerRNA RNA_id_pointer_create(ID *id)
@ SHADINGSYSTEM_OSL
@ SHADINGSYSTEM_SVM
#define min(a, b)
Definition sort.cc:36
bool string_startswith(const string_view s, const string_view start)
Definition string.cpp:104
CCL_NAMESPACE_BEGIN string string_printf(const char *format,...)
Definition string.cpp:23
BVHLayout bvh_layout
Definition debug.h:47
Definition DNA_ID.h:414
@ SUBDIVISION_NONE
Definition scene/mesh.h:118
ustring name
Definition graph/node.h:177
Film * film
Definition scene.h:128
T * create_node(Args &&...)=delete
@ MOTION_PASS
Definition scene.h:185
@ MOTION_NONE
Definition scene.h:185
static bool get_known_pass_type(BL::RenderPass &b_pass, PassType &type, PassMode &mode)
Definition sync.cpp:659
#define MAP_PASS(passname, passtype, noisy)
static Pass * pass_add(Scene *scene, PassType type, const char *name, PassMode mode=PassMode::DENOISED)
Definition sync.cpp:735
static CCL_NAMESPACE_BEGIN const char * cryptomatte_prefix
Definition sync.cpp:37
max
Definition text_draw.cc:251
ccl_device_inline bool is_light(const ccl_global KernelLightTreeEmitter *kemitter)
Definition tree.h:69
ccl_device_inline size_t divide_up(const size_t x, const size_t y)
Definition types_base.h:52
wmTimer * timer