Blender V5.0
integrator.cpp
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2011-2022 Blender Foundation
2 *
3 * SPDX-License-Identifier: Apache-2.0 */
4
5#include "device/device.h"
6
7#include "scene/background.h"
8#include "scene/bake.h"
9#include "scene/camera.h"
10#include "scene/film.h"
11#include "scene/integrator.h"
12#include "scene/light.h"
13#include "scene/object.h"
14#include "scene/scene.h"
15#include "scene/shader.h"
16#include "scene/stats.h"
18
19#include "kernel/types.h"
20
21#include "util/hash.h"
22#include "util/log.h"
23#include "util/task.h"
24#include "util/time.h"
25
27
29{
30 NodeType *type = NodeType::add("integrator", create);
31
32 SOCKET_INT(min_bounce, "Min Bounce", 0);
33 SOCKET_INT(max_bounce, "Max Bounce", 7);
34
35 SOCKET_INT(max_diffuse_bounce, "Max Diffuse Bounce", 7);
36 SOCKET_INT(max_glossy_bounce, "Max Glossy Bounce", 7);
37 SOCKET_INT(max_transmission_bounce, "Max Transmission Bounce", 7);
38 SOCKET_INT(max_volume_bounce, "Max Volume Bounce", 7);
39
40 SOCKET_INT(transparent_min_bounce, "Transparent Min Bounce", 0);
41 SOCKET_INT(transparent_max_bounce, "Transparent Max Bounce", 7);
42
43#ifdef WITH_CYCLES_DEBUG
44 static NodeEnum direct_light_sampling_type_enum;
45 direct_light_sampling_type_enum.insert("multiple_importance_sampling",
47 direct_light_sampling_type_enum.insert("forward_path_tracing", DIRECT_LIGHT_SAMPLING_FORWARD);
48 direct_light_sampling_type_enum.insert("next_event_estimation", DIRECT_LIGHT_SAMPLING_NEE);
49 SOCKET_ENUM(direct_light_sampling_type,
50 "Direct Light Sampling Type",
51 direct_light_sampling_type_enum,
53#endif
54
55 SOCKET_INT(ao_bounces, "AO Bounces", 0);
56 SOCKET_FLOAT(ao_factor, "AO Factor", 0.0f);
57 SOCKET_FLOAT(ao_distance, "AO Distance", FLT_MAX);
58 SOCKET_FLOAT(ao_additive_factor, "AO Additive Factor", 0.0f);
59
60 SOCKET_BOOLEAN(volume_ray_marching, "Biased", false);
61 SOCKET_INT(volume_max_steps, "Volume Max Steps", 1024);
62 SOCKET_FLOAT(volume_step_rate, "Volume Step Rate", 1.0f);
63
64 static NodeEnum guiding_distribution_enum;
65 guiding_distribution_enum.insert("PARALLAX_AWARE_VMM", GUIDING_TYPE_PARALLAX_AWARE_VMM);
66 guiding_distribution_enum.insert("DIRECTIONAL_QUAD_TREE", GUIDING_TYPE_DIRECTIONAL_QUAD_TREE);
67 guiding_distribution_enum.insert("VMM", GUIDING_TYPE_VMM);
68
69 static NodeEnum guiding_directional_sampling_type_enum;
70 guiding_directional_sampling_type_enum.insert("MIS",
72 guiding_directional_sampling_type_enum.insert("RIS", GUIDING_DIRECTIONAL_SAMPLING_TYPE_RIS);
73 guiding_directional_sampling_type_enum.insert("ROUGHNESS",
75
76 SOCKET_BOOLEAN(use_guiding, "Guiding", false);
77 SOCKET_BOOLEAN(deterministic_guiding, "Deterministic Guiding", true);
78 SOCKET_BOOLEAN(use_surface_guiding, "Surface Guiding", true);
79 SOCKET_FLOAT(surface_guiding_probability, "Surface Guiding Probability", 0.5f);
80 SOCKET_BOOLEAN(use_volume_guiding, "Volume Guiding", true);
81 SOCKET_FLOAT(volume_guiding_probability, "Volume Guiding Probability", 0.5f);
82 SOCKET_INT(guiding_training_samples, "Training Samples", 128);
83 SOCKET_BOOLEAN(use_guiding_direct_light, "Guide Direct Light", true);
84 SOCKET_BOOLEAN(use_guiding_mis_weights, "Use MIS Weights", true);
85 SOCKET_ENUM(guiding_distribution_type,
86 "Guiding Distribution Type",
87 guiding_distribution_enum,
89 SOCKET_ENUM(guiding_directional_sampling_type,
90 "Guiding Directional Sampling Type",
91 guiding_directional_sampling_type_enum,
93 SOCKET_FLOAT(guiding_roughness_threshold, "Guiding Roughness Threshold", 0.05f);
94
95 SOCKET_BOOLEAN(caustics_reflective, "Reflective Caustics", true);
96 SOCKET_BOOLEAN(caustics_refractive, "Refractive Caustics", true);
97 SOCKET_FLOAT(filter_glossy, "Filter Glossy", 0.0f);
98
99 SOCKET_BOOLEAN(use_direct_light, "Use Direct Light", true);
100 SOCKET_BOOLEAN(use_indirect_light, "Use Indirect Light", true);
101 SOCKET_BOOLEAN(use_diffuse, "Use Diffuse", true);
102 SOCKET_BOOLEAN(use_glossy, "Use Glossy", true);
103 SOCKET_BOOLEAN(use_transmission, "Use Transmission", true);
104 SOCKET_BOOLEAN(use_emission, "Use Emission", true);
105
106 SOCKET_INT(seed, "Seed", 0);
107 SOCKET_FLOAT(sample_clamp_direct, "Sample Clamp Direct", 0.0f);
108 SOCKET_FLOAT(sample_clamp_indirect, "Sample Clamp Indirect", 10.0f);
109 SOCKET_BOOLEAN(motion_blur, "Motion Blur", false);
110
111 SOCKET_INT(aa_samples, "AA Samples", 0);
112 SOCKET_BOOLEAN(use_sample_subset, "Use Sample Subset", false);
113 SOCKET_INT(sample_subset_offset, "Sample Subset Offset", 0);
114 SOCKET_INT(sample_subset_length, "Sample Subset Length", MAX_SAMPLES);
115
116 SOCKET_BOOLEAN(use_adaptive_sampling, "Use Adaptive Sampling", true);
117 SOCKET_FLOAT(adaptive_threshold, "Adaptive Threshold", 0.01f);
118 SOCKET_INT(adaptive_min_samples, "Adaptive Min Samples", 0);
119
120 SOCKET_BOOLEAN(use_light_tree, "Use light tree to optimize many light sampling", true);
121 SOCKET_FLOAT(light_sampling_threshold, "Light Sampling Threshold", 0.0f);
122
123 static NodeEnum sampling_pattern_enum;
124 sampling_pattern_enum.insert("sobol_burley", SAMPLING_PATTERN_SOBOL_BURLEY);
125 sampling_pattern_enum.insert("tabulated_sobol", SAMPLING_PATTERN_TABULATED_SOBOL);
126 sampling_pattern_enum.insert("blue_noise_pure", SAMPLING_PATTERN_BLUE_NOISE_PURE);
127 sampling_pattern_enum.insert("blue_noise_round", SAMPLING_PATTERN_BLUE_NOISE_ROUND);
128 sampling_pattern_enum.insert("blue_noise_first", SAMPLING_PATTERN_BLUE_NOISE_FIRST);
129 SOCKET_ENUM(sampling_pattern,
130 "Sampling Pattern",
131 sampling_pattern_enum,
133 SOCKET_FLOAT(scrambling_distance, "Scrambling Distance", 1.0f);
134
135 static NodeEnum denoiser_type_enum;
136 denoiser_type_enum.insert("none", DENOISER_NONE);
137 denoiser_type_enum.insert("optix", DENOISER_OPTIX);
138 denoiser_type_enum.insert("openimagedenoise", DENOISER_OPENIMAGEDENOISE);
139
140 static NodeEnum denoiser_prefilter_enum;
141 denoiser_prefilter_enum.insert("none", DENOISER_PREFILTER_NONE);
142 denoiser_prefilter_enum.insert("fast", DENOISER_PREFILTER_FAST);
143 denoiser_prefilter_enum.insert("accurate", DENOISER_PREFILTER_ACCURATE);
144
145 static NodeEnum denoiser_quality_enum;
146 denoiser_quality_enum.insert("high", DENOISER_QUALITY_HIGH);
147 denoiser_quality_enum.insert("balanced", DENOISER_QUALITY_BALANCED);
148 denoiser_quality_enum.insert("fast", DENOISER_QUALITY_FAST);
149
150 /* Default to accurate denoising with OpenImageDenoise. For interactive viewport
151 * it's best use OptiX and disable the normal pass since it does not always have
152 * the desired effect for that denoiser. */
153 SOCKET_BOOLEAN(use_denoise, "Use Denoiser", false);
154 SOCKET_ENUM(denoiser_type, "Denoiser Type", denoiser_type_enum, DENOISER_OPENIMAGEDENOISE);
155 SOCKET_INT(denoise_start_sample, "Start Sample to Denoise", 0);
156 SOCKET_BOOLEAN(use_denoise_pass_albedo, "Use Albedo Pass for Denoiser", true);
157 SOCKET_BOOLEAN(use_denoise_pass_normal, "Use Normal Pass for Denoiser", true);
158 SOCKET_ENUM(denoiser_prefilter,
159 "Denoiser Prefilter",
160 denoiser_prefilter_enum,
162 SOCKET_BOOLEAN(denoise_use_gpu, "Denoise on GPU", true);
163 SOCKET_ENUM(denoiser_quality, "Denoiser Quality", denoiser_quality_enum, DENOISER_QUALITY_HIGH);
164
165 return type;
166}
167
168Integrator::Integrator() : Node(get_node_type()) {}
169
170Integrator::~Integrator() = default;
171
172void Integrator::device_update(Device *device, DeviceScene *dscene, Scene *scene)
173{
174 if (!is_modified()) {
175 return;
176 }
177
178 const scoped_callback_timer timer([scene](double time) {
179 if (scene->update_stats) {
180 scene->update_stats->integrator.times.add_entry({"device_update", time});
181 }
182 });
183
184 KernelIntegrator *kintegrator = &dscene->data.integrator;
185
186 device_free(device, dscene);
187
188 /* integrator parameters */
189
190 /* Plus one so that a bounce of 0 indicates no global illumination, only direct illumination. */
191 kintegrator->min_bounce = min_bounce + 1;
192 kintegrator->max_bounce = max_bounce + 1;
193
194 kintegrator->max_diffuse_bounce = max_diffuse_bounce + 1;
195 kintegrator->max_glossy_bounce = max_glossy_bounce + 1;
196 kintegrator->max_transmission_bounce = max_transmission_bounce + 1;
197 kintegrator->max_volume_bounce = max_volume_bounce + 1;
198
199 kintegrator->transparent_min_bounce = transparent_min_bounce + 1;
200
201 /* Unlike other type of bounces, 0 transparent bounce means there is no transparent bounce in the
202 * scene. */
203 kintegrator->transparent_max_bounce = transparent_max_bounce;
204
205 kintegrator->ao_bounces = (ao_factor != 0.0f) ? ao_bounces : 0;
206 kintegrator->ao_bounces_distance = ao_distance;
207 kintegrator->ao_bounces_factor = ao_factor;
208 kintegrator->ao_additive_factor = ao_additive_factor;
209
210#ifdef WITH_CYCLES_DEBUG
211 kintegrator->direct_light_sampling_type = direct_light_sampling_type;
212#else
213 kintegrator->direct_light_sampling_type = DIRECT_LIGHT_SAMPLING_MIS;
214#endif
215
216 /* Transparent Shadows
217 * We only need to enable transparent shadows, if we actually have
218 * transparent shaders in the scene. Otherwise we can disable it
219 * to improve performance a bit. */
220 kintegrator->transparent_shadows = false;
221 for (Shader *shader : scene->shaders) {
222 /* keep this in sync with SD_HAS_TRANSPARENT_SHADOW in shader.cpp */
223 if ((shader->has_surface_transparent && shader->get_use_transparent_shadow()) ||
224 shader->has_volume)
225 {
226 kintegrator->transparent_shadows = true;
227 break;
228 }
229 }
230
231 kintegrator->volume_ray_marching = volume_ray_marching;
232 kintegrator->volume_max_steps = volume_max_steps;
233
234 kintegrator->caustics_reflective = caustics_reflective;
235 kintegrator->caustics_refractive = caustics_refractive;
236 kintegrator->filter_glossy = (filter_glossy == 0.0f) ? FLT_MAX : 1.0f / filter_glossy;
237
238 kintegrator->filter_closures = 0;
239 if (!use_direct_light) {
240 kintegrator->filter_closures |= FILTER_CLOSURE_DIRECT_LIGHT;
241 }
242 if (!use_indirect_light) {
243 kintegrator->min_bounce = 1;
244 kintegrator->max_bounce = 1;
245 }
246 if (!use_diffuse) {
247 kintegrator->filter_closures |= FILTER_CLOSURE_DIFFUSE;
248 }
249 if (!use_glossy) {
250 kintegrator->filter_closures |= FILTER_CLOSURE_GLOSSY;
251 }
252 if (!use_transmission) {
253 kintegrator->filter_closures |= FILTER_CLOSURE_TRANSMISSION;
254 }
255 if (!use_emission) {
256 kintegrator->filter_closures |= FILTER_CLOSURE_EMISSION;
257 }
258 if (scene->bake_manager->get_baking()) {
259 /* Baking does not need to trace through transparency, we only want to bake
260 * the object itself. */
261 kintegrator->filter_closures |= FILTER_CLOSURE_TRANSPARENT;
262 }
263
264 const GuidingParams guiding_params = get_guiding_params(device);
265 kintegrator->use_guiding = guiding_params.use;
266 kintegrator->train_guiding = kintegrator->use_guiding;
267 kintegrator->use_surface_guiding = guiding_params.use_surface_guiding;
268 kintegrator->use_volume_guiding = guiding_params.use_volume_guiding;
269 kintegrator->surface_guiding_probability = surface_guiding_probability;
270 kintegrator->volume_guiding_probability = volume_guiding_probability;
271 kintegrator->use_guiding_direct_light = use_guiding_direct_light;
272 kintegrator->use_guiding_mis_weights = use_guiding_mis_weights;
273 kintegrator->guiding_distribution_type = guiding_params.type;
274 kintegrator->guiding_directional_sampling_type = guiding_params.sampling_type;
275 kintegrator->guiding_roughness_threshold = guiding_params.roughness_threshold;
276
277 kintegrator->sample_clamp_direct = (sample_clamp_direct == 0.0f) ? FLT_MAX :
278 sample_clamp_direct * 3.0f;
279 kintegrator->sample_clamp_indirect = (sample_clamp_indirect == 0.0f) ?
280 FLT_MAX :
281 sample_clamp_indirect * 3.0f;
282
283 const int clamped_aa_samples = min(aa_samples, MAX_SAMPLES);
284
285 kintegrator->sampling_pattern = sampling_pattern;
286 kintegrator->scrambling_distance = scrambling_distance;
287 kintegrator->sobol_index_mask = reverse_integer_bits(next_power_of_two(clamped_aa_samples - 1) -
288 1);
289 kintegrator->blue_noise_sequence_length = clamped_aa_samples;
290 if (kintegrator->sampling_pattern == SAMPLING_PATTERN_BLUE_NOISE_ROUND) {
291 if (!is_power_of_two(clamped_aa_samples)) {
292 kintegrator->blue_noise_sequence_length = next_power_of_two(clamped_aa_samples);
293 }
294 kintegrator->sampling_pattern = SAMPLING_PATTERN_BLUE_NOISE_PURE;
295 }
296 if (kintegrator->sampling_pattern == SAMPLING_PATTERN_BLUE_NOISE_FIRST) {
297 kintegrator->blue_noise_sequence_length -= 1;
298 }
299
300 /* The blue-noise sampler needs a randomized seed to scramble properly, providing e.g. 0 won't
301 * work properly. Therefore, hash the seed in those cases. */
302 if (kintegrator->sampling_pattern == SAMPLING_PATTERN_BLUE_NOISE_FIRST ||
303 kintegrator->sampling_pattern == SAMPLING_PATTERN_BLUE_NOISE_PURE)
304 {
305 kintegrator->seed = hash_uint(seed);
306 }
307 else {
308 kintegrator->seed = seed;
309 }
310
311 /* NOTE: The kintegrator->use_light_tree is assigned to the efficient value in the light manager,
312 * and the synchronization code is expected to tag the light manager for update when the
313 * `use_light_tree` is changed. */
314 if (light_sampling_threshold > 0.0f && !kintegrator->use_light_tree) {
315 kintegrator->light_inv_rr_threshold = scene->film->get_exposure() / light_sampling_threshold;
316 }
317 else {
318 kintegrator->light_inv_rr_threshold = 0.0f;
319 }
320
321 /* Build pre-tabulated Sobol samples if needed. */
322 const int sequence_size = clamp(
324 const int table_size = sequence_size * NUM_TAB_SOBOL_PATTERNS * NUM_TAB_SOBOL_DIMENSIONS;
325 if (kintegrator->sampling_pattern == SAMPLING_PATTERN_TABULATED_SOBOL &&
326 dscene->sample_pattern_lut.size() != table_size)
327 {
328 kintegrator->tabulated_sobol_sequence_size = sequence_size;
329
330 if (dscene->sample_pattern_lut.size() != 0) {
331 dscene->sample_pattern_lut.free();
332 }
333 float4 *directions = (float4 *)dscene->sample_pattern_lut.alloc(table_size);
334 TaskPool pool;
335 for (int j = 0; j < NUM_TAB_SOBOL_PATTERNS; ++j) {
336 float4 *sequence = directions + j * sequence_size;
337 pool.push([sequence, sequence_size, j] {
338 tabulated_sobol_generate_4D(sequence, sequence_size, j);
339 });
340 }
341 pool.wait_work();
342
344 }
345
346 kintegrator->has_shadow_catcher = scene->has_shadow_catcher();
347
350}
351
352void Integrator::device_free(Device * /*unused*/, DeviceScene *dscene, bool force_free)
353{
354 dscene->sample_pattern_lut.free_if_need_realloc(force_free);
355}
356
357void Integrator::tag_update(Scene *scene, const uint32_t flag)
358{
359 if (flag & UPDATE_ALL) {
360 tag_modified();
361 }
362
363 if (flag & AO_PASS_MODIFIED) {
364 /* tag only the ao_bounces socket as modified so we avoid updating sample_pattern_lut
365 * unnecessarily */
366 tag_ao_bounces_modified();
367 }
368
369 if (motion_blur_is_modified()) {
370 scene->object_manager->tag_update(scene, ObjectManager::MOTION_BLUR_MODIFIED);
371 scene->camera->tag_modified();
372 }
373}
374
376{
377 uint kernel_features = 0;
378
379 if (ao_additive_factor != 0.0f) {
380 kernel_features |= KERNEL_FEATURE_AO_ADDITIVE;
381 }
382
383 if (get_use_light_tree()) {
384 kernel_features |= KERNEL_FEATURE_LIGHT_TREE;
385 }
386
387 return kernel_features;
388}
389
391{
392 AdaptiveSampling adaptive_sampling;
393
394 adaptive_sampling.use = use_adaptive_sampling;
395
396 if (!adaptive_sampling.use) {
397 return adaptive_sampling;
398 }
399
400 const int clamped_aa_samples = min(aa_samples, MAX_SAMPLES);
401
402 if (clamped_aa_samples > 0 && adaptive_threshold == 0.0f) {
403 adaptive_sampling.threshold = max(0.001f, 1.0f / (float)aa_samples);
404 LOG_INFO << "Adaptive sampling: automatic threshold = " << adaptive_sampling.threshold;
405 }
406 else {
407 adaptive_sampling.threshold = adaptive_threshold;
408 }
409
410 if (use_sample_subset && clamped_aa_samples > 0) {
411 const int subset_samples = max(
412 min(sample_subset_offset + sample_subset_length, clamped_aa_samples) -
413 sample_subset_offset,
414 0);
415
416 adaptive_sampling.threshold *= sqrtf((float)subset_samples / (float)clamped_aa_samples);
417 }
418
419 if (adaptive_sampling.threshold > 0 && adaptive_min_samples == 0) {
420 /* Threshold 0.1 -> 32, 0.01 -> 64, 0.001 -> 128.
421 * This is highly scene dependent, we make a guess that seemed to work well
422 * in various test scenes. */
423 const int min_samples = (int)ceilf(16.0f / powf(adaptive_sampling.threshold, 0.3f));
424 adaptive_sampling.min_samples = max(4, min_samples);
425 LOG_INFO << "Adaptive sampling: automatic min samples = " << adaptive_sampling.min_samples;
426 }
427 else {
428 adaptive_sampling.min_samples = max(4, adaptive_min_samples);
429 }
430
431 /* Arbitrary factor that makes the threshold more similar to what is was before,
432 * and gives arguably more intuitive values. */
433 adaptive_sampling.threshold *= 5.0f;
434
435 adaptive_sampling.adaptive_step = 16;
436
437 DCHECK(is_power_of_two(adaptive_sampling.adaptive_step))
438 << "Adaptive step must be a power of two for bitwise operations to work";
439
440 return adaptive_sampling;
441}
442
444{
445 DenoiseParams denoise_params;
446
447 denoise_params.use = use_denoise;
448
449 denoise_params.type = denoiser_type;
450
451 denoise_params.use_gpu = denoise_use_gpu;
452
453 denoise_params.start_sample = denoise_start_sample;
454
455 denoise_params.use_pass_albedo = use_denoise_pass_albedo;
456 denoise_params.use_pass_normal = use_denoise_pass_normal;
457
458 denoise_params.prefilter = denoiser_prefilter;
459 denoise_params.quality = denoiser_quality;
460
461 return denoise_params;
462}
463
465{
466 const bool use = use_guiding && device->info.has_guiding;
467
468 GuidingParams guiding_params;
469 guiding_params.use_surface_guiding = use && use_surface_guiding &&
470 surface_guiding_probability > 0.0f;
471 guiding_params.use_volume_guiding = use && use_volume_guiding &&
472 volume_guiding_probability > 0.0f;
473 guiding_params.use = guiding_params.use_surface_guiding || guiding_params.use_volume_guiding;
474 guiding_params.type = guiding_distribution_type;
475 guiding_params.training_samples = guiding_training_samples;
476 guiding_params.deterministic = deterministic_guiding;
477 guiding_params.sampling_type = guiding_directional_sampling_type;
478 // In Blender/Cycles the user set roughness is squared to behave more linear.
479 guiding_params.roughness_threshold = guiding_roughness_threshold * guiding_roughness_threshold;
480 return guiding_params;
481}
unsigned int uint
static unsigned long seed
Definition btSoftBody.h:39
DenoiserType type
Definition denoise.h:59
DenoiserQuality quality
Definition denoise.h:76
bool use_gpu
Definition denoise.h:73
int start_sample
Definition denoise.h:62
DenoiserPrefilter prefilter
Definition denoise.h:75
NODE_DECLARE bool use
Definition denoise.h:56
bool use_pass_normal
Definition denoise.h:66
bool use_pass_albedo
Definition denoise.h:65
device_vector< float > sample_pattern_lut
Definition devicescene.h:83
KernelData data
Definition devicescene.h:94
DeviceInfo info
void tag_update(Scene *scene, const uint32_t flag)
void device_update(Device *device, DeviceScene *dscene, Scene *scene)
DenoiseParams get_denoise_params() const
~Integrator() override
AdaptiveSampling get_adaptive_sampling() const
void device_free(Device *device, DeviceScene *dscene, bool force_free=false)
GuidingParams get_guiding_params(const Device *device) const
static const int MAX_SAMPLES
Definition integrator.h:80
uint get_kernel_features() const
bool has_volume
bool has_surface_transparent
size_t size() const
T * alloc(const size_t width, const size_t height=0)
void free_if_need_realloc(bool force_free)
@ DENOISER_QUALITY_FAST
Definition denoise.h:44
@ DENOISER_QUALITY_BALANCED
Definition denoise.h:43
@ DENOISER_QUALITY_HIGH
Definition denoise.h:42
@ DENOISER_PREFILTER_FAST
Definition denoise.h:32
@ DENOISER_PREFILTER_NONE
Definition denoise.h:28
@ DENOISER_PREFILTER_ACCURATE
Definition denoise.h:36
@ DENOISER_NONE
Definition denoise.h:16
@ DENOISER_OPTIX
Definition denoise.h:12
@ DENOISER_OPENIMAGEDENOISE
Definition denoise.h:13
#define KERNEL_FEATURE_LIGHT_TREE
#define MIN_TAB_SOBOL_SAMPLES
#define NUM_TAB_SOBOL_DIMENSIONS
#define MAX_TAB_SOBOL_SAMPLES
#define NUM_TAB_SOBOL_PATTERNS
#define KERNEL_FEATURE_AO_ADDITIVE
#define powf(x, y)
#define CCL_NAMESPACE_END
constexpr T clamp(T, U, U) RET
ccl_device_inline uint hash_uint(const uint kx)
Definition hash.h:126
@ FILTER_CLOSURE_EMISSION
@ FILTER_CLOSURE_GLOSSY
@ FILTER_CLOSURE_DIFFUSE
@ FILTER_CLOSURE_TRANSPARENT
@ FILTER_CLOSURE_DIRECT_LIGHT
@ FILTER_CLOSURE_TRANSMISSION
@ DIRECT_LIGHT_SAMPLING_MIS
@ DIRECT_LIGHT_SAMPLING_NEE
@ DIRECT_LIGHT_SAMPLING_FORWARD
@ GUIDING_DIRECTIONAL_SAMPLING_TYPE_PRODUCT_MIS
@ GUIDING_DIRECTIONAL_SAMPLING_TYPE_RIS
@ GUIDING_DIRECTIONAL_SAMPLING_TYPE_ROUGHNESS
@ SAMPLING_PATTERN_BLUE_NOISE_FIRST
@ SAMPLING_PATTERN_TABULATED_SOBOL
@ SAMPLING_PATTERN_BLUE_NOISE_PURE
@ SAMPLING_PATTERN_SOBOL_BURLEY
@ SAMPLING_PATTERN_BLUE_NOISE_ROUND
@ GUIDING_TYPE_VMM
@ GUIDING_TYPE_DIRECTIONAL_QUAD_TREE
@ GUIDING_TYPE_PARALLAX_AWARE_VMM
#define DCHECK(expression)
Definition log.h:135
#define LOG_INFO
Definition log.h:106
ccl_device_inline uint32_t reverse_integer_bits(uint32_t x)
Definition math_base.h:802
ccl_device_inline uint next_power_of_two(const uint x)
Definition math_base.h:786
#define SOCKET_FLOAT(name, ui_name, default_value,...)
Definition node_type.h:211
#define SOCKET_INT(name, ui_name, default_value,...)
Definition node_type.h:205
#define NODE_DEFINE(structname)
Definition node_type.h:152
#define SOCKET_BOOLEAN(name, ui_name, default_value,...)
Definition node_type.h:203
#define SOCKET_ENUM(name, ui_name, values, default_value,...)
Definition node_type.h:227
#define sqrtf
#define ceilf
#define min(a, b)
Definition sort.cc:36
#define FLT_MAX
Definition stdcycles.h:14
GuidingDirectionalSamplingType sampling_type
GuidingDistributionType type
void insert(const char *x, const int y)
Definition node_enum.h:21
static NodeType * add(const char *name, CreateFunc create, Type type=NONE, const NodeType *base=nullptr)
void clear_modified()
void tag_modified()
bool is_modified() const
Node(const NodeType *type, ustring name=ustring())
Film * film
Definition scene.h:128
unique_ptr< ObjectManager > object_manager
Definition scene.h:150
unique_ptr< SceneUpdateStats > update_stats
Definition scene.h:175
unique_ptr< BakeManager > bake_manager
Definition scene.h:152
unique_ptr_vector< Shader > shaders
Definition scene.h:137
bool has_shadow_catcher()
Definition scene.cpp:776
struct Object * camera
void push(TaskRunFunction &&task)
Definition task.cpp:21
void wait_work(Summary *stats=nullptr)
Definition task.cpp:27
CCL_NAMESPACE_BEGIN void tabulated_sobol_generate_4D(float4 points[], const int size, const int rng_seed)
max
Definition text_draw.cc:251
ccl_device_inline bool is_power_of_two(const size_t x)
Definition types_base.h:67
wmTimer * timer
uint8_t flag
Definition wm_window.cc:145