Blender V4.3
scene/object.cpp
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2011-2022 Blender Foundation
2 *
3 * SPDX-License-Identifier: Apache-2.0 */
4
5#include "scene/object.h"
6#include "device/device.h"
7#include "scene/camera.h"
8#include "scene/curves.h"
9#include "scene/hair.h"
10#include "scene/integrator.h"
11#include "scene/light.h"
12#include "scene/mesh.h"
13#include "scene/particles.h"
14#include "scene/pointcloud.h"
15#include "scene/scene.h"
16#include "scene/stats.h"
17#include "scene/volume.h"
18
19#include "util/foreach.h"
20#include "util/log.h"
21#include "util/map.h"
22#include "util/murmurhash.h"
23#include "util/progress.h"
24#include "util/set.h"
25#include "util/task.h"
26#include "util/vector.h"
27
28#include "subd/patch_table.h"
29
31
32/* Global state of object transform update. */
33
35 /* Global state used by device_update_object_transform().
36 * Common for both threaded and non-threaded update.
37 */
38
39 /* Type of the motion required by the scene settings. */
41
42 /* Mapping from particle system to a index in packed particle array.
43 * Only used for read.
44 */
45 map<ParticleSystem *, int> particle_offset;
46
47 /* Motion offsets for each object. */
49
50 /* Packed object arrays. Those will be filled in. */
57
58 /* Flags which will be synchronized to Integrator. */
63
64 /* ** Scheduling queue. ** */
66
67 /* First unused object index in the queue. */
69};
70
71/* Object */
72
74{
75 NodeType *type = NodeType::add("object", create);
76
77 SOCKET_NODE(geometry, "Geometry", Geometry::get_node_base_type());
78 SOCKET_TRANSFORM(tfm, "Transform", transform_identity());
79 SOCKET_UINT(visibility, "Visibility", ~0);
80 SOCKET_COLOR(color, "Color", zero_float3());
81 SOCKET_FLOAT(alpha, "Alpha", 0.0f);
82 SOCKET_UINT(random_id, "Random ID", 0);
83 SOCKET_INT(pass_id, "Pass ID", 0);
84 SOCKET_BOOLEAN(use_holdout, "Use Holdout", false);
85 SOCKET_BOOLEAN(hide_on_missing_motion, "Hide on Missing Motion", false);
86 SOCKET_POINT(dupli_generated, "Dupli Generated", zero_float3());
87 SOCKET_POINT2(dupli_uv, "Dupli UV", zero_float2());
88 SOCKET_TRANSFORM_ARRAY(motion, "Motion", array<Transform>());
89 SOCKET_FLOAT(shadow_terminator_shading_offset, "Shadow Terminator Shading Offset", 0.0f);
90 SOCKET_FLOAT(shadow_terminator_geometry_offset, "Shadow Terminator Geometry Offset", 0.1f);
91 SOCKET_STRING(asset_name, "Asset Name", ustring());
92
93 SOCKET_BOOLEAN(is_shadow_catcher, "Shadow Catcher", false);
94
95 SOCKET_BOOLEAN(is_caustics_caster, "Cast Shadow Caustics", false);
96 SOCKET_BOOLEAN(is_caustics_receiver, "Receive Shadow Caustics", false);
97
98 SOCKET_NODE(particle_system, "Particle System", ParticleSystem::get_node_type());
99 SOCKET_INT(particle_index, "Particle Index", 0);
100
101 SOCKET_FLOAT(ao_distance, "AO Distance", 0.0f);
102
103 SOCKET_STRING(lightgroup, "Light Group", ustring());
104 SOCKET_UINT(receiver_light_set, "Light Set Index", 0);
105 SOCKET_UINT64(light_set_membership, "Light Set Membership", LIGHT_LINK_MASK_ALL);
106 SOCKET_UINT(blocker_shadow_set, "Shadow Set Index", 0);
107 SOCKET_UINT64(shadow_set_membership, "Shadow Set Membership", LIGHT_LINK_MASK_ALL);
108
109 return type;
110}
111
112Object::Object() : Node(get_node_type())
113{
114 particle_system = NULL;
115 particle_index = 0;
116 attr_map_offset = 0;
118 intersects_volume = false;
119}
120
122
124{
125 if (!use_motion()) {
126 return;
127 }
128
129 bool have_motion = false;
130
131 for (size_t i = 0; i < motion.size(); i++) {
132 if (motion[i] == transform_empty()) {
133 if (hide_on_missing_motion) {
134 /* Hide objects that have no valid previous or next
135 * transform, for example particle that stop existing. It
136 * would be better to handle this in the kernel and make
137 * objects invisible outside certain motion steps. */
138 tfm = transform_empty();
139 motion.clear();
140 return;
141 }
142 else {
143 /* Otherwise just copy center motion. */
144 motion[i] = tfm;
145 }
146 }
147
148 /* Test if any of the transforms are actually different. */
149 have_motion = have_motion || motion[i] != tfm;
150 }
151
152 /* Clear motion array if there is no actual motion. */
153 if (!have_motion) {
154 motion.clear();
155 }
156}
157
158void Object::compute_bounds(bool motion_blur)
159{
160 BoundBox mbounds = geometry->bounds;
161
162 if (motion_blur && use_motion()) {
163 array<DecomposedTransform> decomp(motion.size());
164 transform_motion_decompose(decomp.data(), motion.data(), motion.size());
165
167
168 /* TODO: this is really terrible. according to PBRT there is a better
169 * way to find this iteratively, but did not find implementation yet
170 * or try to implement myself */
171 for (float t = 0.0f; t < 1.0f; t += (1.0f / 128.0f)) {
172 Transform ttfm;
173
174 transform_motion_array_interpolate(&ttfm, decomp.data(), motion.size(), t);
175 bounds.grow(mbounds.transformed(&ttfm));
176 }
177 }
178 else {
179 /* No motion blur case. */
180 if (geometry->transform_applied) {
181 bounds = mbounds;
182 }
183 else {
184 bounds = mbounds.transformed(&tfm);
185 }
186 }
187}
188
189void Object::apply_transform(bool apply_to_motion)
190{
191 if (!geometry || tfm == transform_identity()) {
192 return;
193 }
194
195 geometry->apply_transform(tfm, apply_to_motion);
196
197 /* we keep normals pointing in same direction on negative scale, notify
198 * geometry about this in it (re)calculates normals */
199 if (transform_negative_scale(tfm)) {
200 geometry->transform_negative_scaled = true;
201 }
202
203 if (bounds.valid()) {
204 geometry->compute_bounds();
205 compute_bounds(false);
206 }
207
208 /* tfm is not reset to identity, all code that uses it needs to check the
209 * transform_applied boolean */
210}
211
213{
215
216 if (is_modified()) {
218
219 if (use_holdout_is_modified()) {
221 }
222
223 if (is_shadow_catcher_is_modified()) {
224 scene->tag_shadow_catcher_modified();
226 }
227 }
228
229 if (geometry) {
230 if (tfm_is_modified() || motion_is_modified()) {
232 }
233
234 if (visibility_is_modified()) {
236 }
237
238 foreach (Node *node, geometry->get_used_shaders()) {
239 Shader *shader = static_cast<Shader *>(node);
240 if (shader->emission_sampling != EMISSION_SAMPLING_NONE) {
241 scene->light_manager->tag_update(scene, LightManager::EMISSIVE_MESH_MODIFIED);
242 }
243 }
244 }
245
246 scene->camera->need_flags_update = true;
247 scene->object_manager->tag_update(scene, flag);
248}
249
251{
252 return (motion.size() > 1);
253}
254
255float Object::motion_time(int step) const
256{
257 return (use_motion()) ? 2.0f * step / (motion.size() - 1) - 1.0f : 0.0f;
258}
259
260int Object::motion_step(float time) const
261{
262 if (use_motion()) {
263 for (size_t step = 0; step < motion.size(); step++) {
264 if (time == motion_time(step)) {
265 return step;
266 }
267 }
268 }
269
270 return -1;
271}
272
274{
275 /* Mesh itself can be empty,can skip all such objects. */
276 if (!bounds.valid() || bounds.size() == zero_float3()) {
277 return false;
278 }
279 /* TODO(sergey): Check for mesh vertices/curves. visibility flags. */
280 return true;
281}
282
284{
285 return SHADOW_CATCHER_OBJECT_VISIBILITY(is_shadow_catcher, visibility & PATH_RAY_ALL_VISIBILITY);
286}
287
289{
290 if (geometry->geometry_type != Geometry::MESH && geometry->geometry_type != Geometry::VOLUME) {
291 return FLT_MAX;
292 }
293
294 Mesh *mesh = static_cast<Mesh *>(geometry);
295
296 if (!mesh->has_volume) {
297 return FLT_MAX;
298 }
299
300 /* Compute step rate from shaders. */
301 float step_rate = FLT_MAX;
302
303 foreach (Node *node, mesh->get_used_shaders()) {
304 Shader *shader = static_cast<Shader *>(node);
305 if (shader->has_volume) {
306 if ((shader->get_heterogeneous_volume() && shader->has_volume_spatial_varying) ||
307 (shader->has_volume_attribute_dependency))
308 {
309 step_rate = fminf(shader->get_volume_step_rate(), step_rate);
310 }
311 }
312 }
313
314 if (step_rate == FLT_MAX) {
315 return FLT_MAX;
316 }
317
318 /* Compute step size from voxel grids. */
319 float step_size = FLT_MAX;
320
321 if (geometry->geometry_type == Geometry::VOLUME) {
322 Volume *volume = static_cast<Volume *>(geometry);
323
324 foreach (Attribute &attr, volume->attributes.attributes) {
325 if (attr.element == ATTR_ELEMENT_VOXEL) {
326 ImageHandle &handle = attr.data_voxel();
327 const ImageMetaData &metadata = handle.metadata();
328 if (metadata.width == 0 || metadata.height == 0 || metadata.depth == 0) {
329 continue;
330 }
331
332 /* User specified step size. */
333 float voxel_step_size = volume->get_step_size();
334
335 if (voxel_step_size == 0.0f) {
336 /* Auto detect step size. */
337 float3 size = one_float3();
338#ifdef WITH_NANOVDB
339 /* Dimensions were not applied to image transform with NanoVDB (see image_vdb.cpp) */
340 if (metadata.type != IMAGE_DATA_TYPE_NANOVDB_FLOAT &&
342 metadata.type != IMAGE_DATA_TYPE_NANOVDB_FPN &&
344#endif
345 size /= make_float3(metadata.width, metadata.height, metadata.depth);
346
347 /* Step size is transformed from voxel to world space. */
348 Transform voxel_tfm = tfm;
349 if (metadata.use_transform_3d) {
350 voxel_tfm = tfm * transform_inverse(metadata.transform_3d);
351 }
352 voxel_step_size = reduce_min(fabs(transform_direction(&voxel_tfm, size)));
353 }
354 else if (volume->get_object_space()) {
355 /* User specified step size in object space. */
356 float3 size = make_float3(voxel_step_size, voxel_step_size, voxel_step_size);
357 voxel_step_size = reduce_min(fabs(transform_direction(&tfm, size)));
358 }
359
360 if (voxel_step_size > 0.0f) {
361 step_size = fminf(voxel_step_size, step_size);
362 }
363 }
364 }
365 }
366
367 if (step_size == FLT_MAX) {
368 /* Fall back to 1/10th of bounds for procedural volumes. */
369 assert(bounds.valid());
370 step_size = 0.1f * average(bounds.size());
371 }
372
373 step_size *= step_rate;
374
375 return step_size;
376}
377
379{
380 return index;
381}
382
384{
385 Geometry *geom = get_geometry();
386 if (!geom->is_mesh() && !geom->is_volume()) {
387 return false;
388 }
389 /* Skip non-traceable objects. */
390 if (!is_traceable()) {
391 return false;
392 }
393 /* Skip if we are not visible for BSDFs. */
394 if (!(get_visibility() &
396 {
397 return false;
398 }
399 /* Skip if we have no emission shaders. */
400 /* TODO(sergey): Ideally we want to avoid such duplicated loop, since it'll
401 * iterate all geometry shaders twice (when counting and when calculating
402 * triangle area.
403 */
404 foreach (Node *node, geom->get_used_shaders()) {
405 Shader *shader = static_cast<Shader *>(node);
406 if (shader->emission_sampling != EMISSION_SAMPLING_NONE) {
407 return true;
408 }
409 }
410 return false;
411}
412
414{
415 if (get_receiver_light_set()) {
416 return true;
417 }
418
419 if (get_light_set_membership() != LIGHT_LINK_MASK_ALL) {
420 return true;
421 }
422
423 return false;
424}
425
427{
428 if (get_blocker_shadow_set()) {
429 return true;
430 }
431
432 if (get_shadow_set_membership() != LIGHT_LINK_MASK_ALL) {
433 return true;
434 }
435
436 return false;
437}
438
439/* Object Manager */
440
442{
443 update_flags = UPDATE_ALL;
444 need_flags_update = true;
445}
446
448
449static float object_volume_density(const Transform &tfm, Geometry *geom)
450{
451 if (geom->geometry_type == Geometry::VOLUME) {
452 /* Volume density automatically adjust to object scale. */
453 if (static_cast<Volume *>(geom)->get_object_space()) {
454 const float3 unit = normalize(one_float3());
455 return 1.0f / len(transform_direction(&tfm, unit));
456 }
457 }
458
459 return 1.0f;
460}
461
463 Object *ob,
464 bool update_all,
465 const Scene *scene)
466{
467 KernelObject &kobject = state->objects[ob->index];
468 Transform *object_motion_pass = state->object_motion_pass;
469
470 Geometry *geom = ob->geometry;
471 uint flag = 0;
472
473 /* Compute transformations. */
474 Transform tfm = ob->tfm;
475 Transform itfm = transform_inverse(tfm);
476
477 float3 color = ob->color;
478 float pass_id = ob->pass_id;
479 float random_number = (float)ob->random_id * (1.0f / (float)0xFFFFFFFF);
480 int particle_index = (ob->particle_system) ?
481 ob->particle_index + state->particle_offset[ob->particle_system] :
482 0;
483
484 kobject.tfm = tfm;
485 kobject.itfm = itfm;
486 kobject.volume_density = object_volume_density(tfm, geom);
487 kobject.color[0] = color.x;
488 kobject.color[1] = color.y;
489 kobject.color[2] = color.z;
490 kobject.alpha = ob->alpha;
491 kobject.pass_id = pass_id;
492 kobject.random_number = random_number;
494 kobject.motion_offset = 0;
495 kobject.ao_distance = ob->ao_distance;
496 kobject.receiver_light_set = ob->receiver_light_set >= LIGHT_LINK_SET_MAX ?
497 0 :
498 ob->receiver_light_set;
499 kobject.light_set_membership = ob->light_set_membership;
500 kobject.blocker_shadow_set = ob->blocker_shadow_set >= LIGHT_LINK_SET_MAX ?
501 0 :
502 ob->blocker_shadow_set;
503 kobject.shadow_set_membership = ob->shadow_set_membership;
504
505 if (geom->get_use_motion_blur()) {
506 state->have_motion = true;
507 }
508
509 if (transform_negative_scale(tfm)) {
511 }
512
514 /* TODO: why only mesh? */
515 Mesh *mesh = static_cast<Mesh *>(geom);
516 if (mesh->attributes.find(ATTR_STD_MOTION_VERTEX_POSITION)) {
518 }
519 }
520 else if (geom->is_volume()) {
521 Volume *volume = static_cast<Volume *>(geom);
522 if (volume->attributes.find(ATTR_STD_VOLUME_VELOCITY) && volume->get_velocity_scale() != 0.0f)
523 {
525 kobject.velocity_scale = volume->get_velocity_scale();
526 }
527 }
528
529 if (state->need_motion == Scene::MOTION_PASS) {
530 /* Clear motion array if there is no actual motion. */
531 ob->update_motion();
532
533 /* Compute motion transforms. */
534 Transform tfm_pre, tfm_post;
535 if (ob->use_motion()) {
536 tfm_pre = ob->motion[0];
537 tfm_post = ob->motion[ob->motion.size() - 1];
538 }
539 else {
540 tfm_pre = tfm;
541 tfm_post = tfm;
542 }
543
544 /* Motion transformations, is world/object space depending if mesh
545 * comes with deformed position in object space, or if we transform
546 * the shading point in world space. */
548 tfm_pre = tfm_pre * itfm;
549 tfm_post = tfm_post * itfm;
550 }
551
552 int motion_pass_offset = ob->index * OBJECT_MOTION_PASS_SIZE;
553 object_motion_pass[motion_pass_offset + 0] = tfm_pre;
554 object_motion_pass[motion_pass_offset + 1] = tfm_post;
555 }
556 else if (state->need_motion == Scene::MOTION_BLUR) {
557 if (ob->use_motion()) {
558 kobject.motion_offset = state->motion_offset[ob->index];
559
560 /* Decompose transforms for interpolation. */
561 if (ob->tfm_is_modified() || ob->motion_is_modified() || update_all) {
562 DecomposedTransform *decomp = state->object_motion + kobject.motion_offset;
563 transform_motion_decompose(decomp, ob->motion.data(), ob->motion.size());
564 }
565
567 state->have_motion = true;
568 }
569 }
570
571 /* Dupli object coords and motion info. */
572 kobject.dupli_generated[0] = ob->dupli_generated[0];
573 kobject.dupli_generated[1] = ob->dupli_generated[1];
574 kobject.dupli_generated[2] = ob->dupli_generated[2];
575 kobject.dupli_uv[0] = ob->dupli_uv[0];
576 kobject.dupli_uv[1] = ob->dupli_uv[1];
577 int totalsteps = geom->get_motion_steps();
578 kobject.numsteps = (totalsteps - 1) / 2;
579 kobject.numverts = (geom->geometry_type == Geometry::MESH ||
581 static_cast<Mesh *>(geom)->get_verts().size() :
582 (geom->geometry_type == Geometry::HAIR) ?
583 static_cast<Hair *>(geom)->get_curve_keys().size() :
585 static_cast<PointCloud *>(geom)->num_points() :
586 0;
587 kobject.patch_map_offset = 0;
588 kobject.attribute_map_offset = 0;
589
590 if (ob->asset_name_is_modified() || update_all) {
591 uint32_t hash_name = util_murmur_hash3(ob->name.c_str(), ob->name.length(), 0);
592 uint32_t hash_asset = util_murmur_hash3(ob->asset_name.c_str(), ob->asset_name.length(), 0);
593 kobject.cryptomatte_object = util_hash_to_float(hash_name);
594 kobject.cryptomatte_asset = util_hash_to_float(hash_asset);
595 }
596
597 kobject.shadow_terminator_shading_offset = 1.0f /
598 (1.0f - 0.5f * ob->shadow_terminator_shading_offset);
599 kobject.shadow_terminator_geometry_offset = ob->shadow_terminator_geometry_offset;
600
601 kobject.visibility = ob->visibility_for_tracing();
602 kobject.primitive_type = geom->primitive_type();
603
604 /* Object shadow caustics flag */
605 if (ob->is_caustics_caster) {
607 }
608 if (ob->is_caustics_receiver) {
610 }
611
612 /* Object flag. */
613 if (ob->use_holdout) {
615 }
616 state->object_flag[ob->index] = flag;
617 state->object_volume_step[ob->index] = FLT_MAX;
618
619 /* Have curves. */
620 if (geom->geometry_type == Geometry::HAIR) {
621 state->have_curves = true;
622 }
623 if (geom->geometry_type == Geometry::POINTCLOUD) {
624 state->have_points = true;
625 }
626 if (geom->geometry_type == Geometry::VOLUME) {
627 state->have_volumes = true;
628 }
629
630 /* Light group. */
631 auto it = scene->lightgroups.find(ob->lightgroup);
632 if (it != scene->lightgroups.end()) {
633 kobject.lightgroup = it->second;
634 }
635 else {
636 kobject.lightgroup = LIGHTGROUP_NONE;
637 }
638}
639
641{
642 if (!scene->integrator->get_use_light_tree()) {
643 BVHLayoutMask layout_mask = device->get_bvh_layout_mask(dscene->data.kernel_features);
644 if (layout_mask != BVH_LAYOUT_METAL && layout_mask != BVH_LAYOUT_MULTI_METAL &&
645 layout_mask != BVH_LAYOUT_MULTI_METAL_EMBREE && layout_mask != BVH_LAYOUT_HIPRT &&
646 layout_mask != BVH_LAYOUT_MULTI_HIPRT && layout_mask != BVH_LAYOUT_MULTI_HIPRT_EMBREE)
647 {
648 return;
649 }
650 }
651
652 /* On MetalRT, primitive / curve segment offsets can't be baked at BVH build time. Intersection
653 * handlers need to apply the offset manually. */
654 uint *object_prim_offset = dscene->object_prim_offset.alloc(scene->objects.size());
655 foreach (Object *ob, scene->objects) {
656 uint32_t prim_offset = 0;
657 if (Geometry *const geom = ob->geometry) {
658 if (geom->geometry_type == Geometry::HAIR) {
659 prim_offset = ((Hair *const)geom)->curve_segment_offset;
660 }
661 else {
662 prim_offset = geom->prim_offset;
663 }
664 }
665 uint obj_index = ob->get_device_index();
666 object_prim_offset[obj_index] = prim_offset;
667 }
668
671}
672
674{
676 state.need_motion = scene->need_motion();
677 state.have_motion = false;
678 state.have_curves = false;
679 state.have_points = false;
680 state.have_volumes = false;
681 state.scene = scene;
682 state.queue_start_object = 0;
683
684 state.objects = dscene->objects.alloc(scene->objects.size());
685 state.object_flag = dscene->object_flag.alloc(scene->objects.size());
686 state.object_volume_step = dscene->object_volume_step.alloc(scene->objects.size());
687 state.object_motion = NULL;
688 state.object_motion_pass = NULL;
689
690 if (state.need_motion == Scene::MOTION_PASS) {
691 state.object_motion_pass = dscene->object_motion_pass.alloc(OBJECT_MOTION_PASS_SIZE *
692 scene->objects.size());
693 }
694 else if (state.need_motion == Scene::MOTION_BLUR) {
695 /* Set object offsets into global object motion array. */
696 uint *motion_offsets = state.motion_offset.resize(scene->objects.size());
697 uint motion_offset = 0;
698
699 foreach (Object *ob, scene->objects) {
700 *motion_offsets = motion_offset;
701 motion_offsets++;
702
703 /* Clear motion array if there is no actual motion. */
704 ob->update_motion();
705 motion_offset += ob->motion.size();
706 }
707
708 state.object_motion = dscene->object_motion.alloc(motion_offset);
709 }
710
711 /* Particle system device offsets
712 * 0 is dummy particle, index starts at 1.
713 */
714 int numparticles = 1;
715 foreach (ParticleSystem *psys, scene->particle_systems) {
716 state.particle_offset[psys] = numparticles;
717 numparticles += psys->particles.size();
718 }
719
720 /* as all the arrays are the same size, checking only dscene.objects is sufficient */
721 const bool update_all = dscene->objects.need_realloc();
722
723 /* Parallel object update, with grain size to avoid too much threading overhead
724 * for individual objects. */
725 static const int OBJECTS_PER_TASK = 32;
726 parallel_for(blocked_range<size_t>(0, scene->objects.size(), OBJECTS_PER_TASK),
727 [&](const blocked_range<size_t> &r) {
728 for (size_t i = r.begin(); i != r.end(); i++) {
729 Object *ob = state.scene->objects[i];
730 device_update_object_transform(&state, ob, update_all, scene);
731 }
732 });
733
734 if (progress.get_cancel()) {
735 return;
736 }
737
738 dscene->objects.copy_to_device_if_modified();
739 if (state.need_motion == Scene::MOTION_PASS) {
740 dscene->object_motion_pass.copy_to_device();
741 }
742 else if (state.need_motion == Scene::MOTION_BLUR) {
743 dscene->object_motion.copy_to_device();
744 }
745
746 dscene->data.bvh.have_motion = state.have_motion;
747 dscene->data.bvh.have_curves = state.have_curves;
748 dscene->data.bvh.have_points = state.have_points;
749 dscene->data.bvh.have_volumes = state.have_volumes;
750
751 dscene->objects.clear_modified();
752 dscene->object_motion_pass.clear_modified();
753 dscene->object_motion.clear_modified();
754}
755
757 DeviceScene *dscene,
758 Scene *scene,
759 Progress &progress)
760{
761 if (!need_update()) {
762 return;
763 }
764
765 if (update_flags & (OBJECT_ADDED | OBJECT_REMOVED)) {
766 dscene->objects.tag_realloc();
768 dscene->object_motion.tag_realloc();
769 dscene->object_flag.tag_realloc();
771 }
772
773 if (update_flags & HOLDOUT_MODIFIED) {
774 dscene->object_flag.tag_modified();
775 }
776
777 if (update_flags & PARTICLE_MODIFIED) {
778 dscene->objects.tag_modified();
779 }
780
781 VLOG_INFO << "Total " << scene->objects.size() << " objects.";
782
783 device_free(device, dscene, false);
784
785 if (scene->objects.size() == 0) {
786 return;
787 }
788
789 {
790 /* Assign object IDs. */
791 scoped_callback_timer timer([scene](double time) {
792 if (scene->update_stats) {
793 scene->update_stats->object.times.add_entry({"device_update (assign index)", time});
794 }
795 });
796
797 int index = 0;
798 foreach (Object *object, scene->objects) {
799 object->index = index++;
800
801 /* this is a bit too broad, however a bigger refactor might be needed to properly separate
802 * update each type of data (transform, flags, etc.) */
803 if (object->is_modified()) {
804 dscene->objects.tag_modified();
806 dscene->object_motion.tag_modified();
807 dscene->object_flag.tag_modified();
809 }
810 }
811 }
812
813 {
814 /* set object transform matrices, before applying static transforms */
815 scoped_callback_timer timer([scene](double time) {
816 if (scene->update_stats) {
817 scene->update_stats->object.times.add_entry(
818 {"device_update (copy objects to device)", time});
819 }
820 });
821
822 progress.set_status("Updating Objects", "Copying Transformations to device");
823 device_update_transforms(dscene, scene, progress);
824 }
825
826 if (progress.get_cancel()) {
827 return;
828 }
829
830 /* prepare for static BVH building */
831 /* todo: do before to support getting object level coords? */
832 if (scene->params.bvh_type == BVH_TYPE_STATIC) {
833 scoped_callback_timer timer([scene](double time) {
834 if (scene->update_stats) {
835 scene->update_stats->object.times.add_entry(
836 {"device_update (apply static transforms)", time});
837 }
838 });
839
840 progress.set_status("Updating Objects", "Applying Static Transformations");
841 apply_static_transforms(dscene, scene, progress);
842 }
843
844 foreach (Object *object, scene->objects) {
845 object->clear_modified();
846 }
847}
848
850 Device *, DeviceScene *dscene, Scene *scene, Progress & /*progress*/, bool bounds_valid)
851{
852 if (!need_update() && !need_flags_update) {
853 return;
854 }
855
856 scoped_callback_timer timer([scene](double time) {
857 if (scene->update_stats) {
858 scene->update_stats->object.times.add_entry({"device_update_flags", time});
859 }
860 });
861
862 if (bounds_valid) {
863 /* Object flags and calculations related to volume depend on proper bounds calculated, which
864 * might not be available yet when object flags are updated for displacement or hair
865 * transparency calculation. In this case do not clear the need_flags_update, so that these
866 * values which depend on bounds are re-calculated when the device_update process comes back
867 * here from the "Updating Objects Flags" stage. */
868 update_flags = UPDATE_NONE;
869 need_flags_update = false;
870 }
871
872 if (scene->objects.size() == 0) {
873 return;
874 }
875
876 /* Object info flag. */
877 uint *object_flag = dscene->object_flag.data();
878 float *object_volume_step = dscene->object_volume_step.data();
879
880 /* Object volume intersection. */
881 vector<Object *> volume_objects;
882 bool has_volume_objects = false;
883 foreach (Object *object, scene->objects) {
884 if (object->geometry->has_volume) {
885 /* If the bounds are not valid it is not always possible to calculate the volume step, and
886 * the step size is not needed for the displacement. So, delay calculation of the volume
887 * step size until the final bounds are known. */
888 if (bounds_valid) {
889 volume_objects.push_back(object);
890 object_volume_step[object->index] = object->compute_volume_step_size();
891 }
892 else {
893 object_volume_step[object->index] = FLT_MAX;
894 }
895 has_volume_objects = true;
896 }
897 else {
898 object_volume_step[object->index] = FLT_MAX;
899 }
900 }
901
902 foreach (Object *object, scene->objects) {
903 if (object->geometry->has_volume) {
904 object_flag[object->index] |= SD_OBJECT_HAS_VOLUME;
905 object_flag[object->index] &= ~SD_OBJECT_HAS_VOLUME_ATTRIBUTES;
906
907 foreach (Attribute &attr, object->geometry->attributes.attributes) {
908 if (attr.element == ATTR_ELEMENT_VOXEL) {
909 object_flag[object->index] |= SD_OBJECT_HAS_VOLUME_ATTRIBUTES;
910 }
911 }
912 }
913 else {
914 object_flag[object->index] &= ~(SD_OBJECT_HAS_VOLUME | SD_OBJECT_HAS_VOLUME_ATTRIBUTES);
915 }
916
917 if (object->is_shadow_catcher) {
918 object_flag[object->index] |= SD_OBJECT_SHADOW_CATCHER;
919 }
920 else {
921 object_flag[object->index] &= ~SD_OBJECT_SHADOW_CATCHER;
922 }
923
924 if (bounds_valid) {
925 object->intersects_volume = false;
926 foreach (Object *volume_object, volume_objects) {
927 if (object == volume_object) {
928 continue;
929 }
930 if (object->bounds.intersects(volume_object->bounds)) {
931 object_flag[object->index] |= SD_OBJECT_INTERSECTS_VOLUME;
932 object->intersects_volume = true;
933 break;
934 }
935 }
936 }
937 else if (has_volume_objects) {
938 /* Not really valid, but can't make more reliable in the case
939 * of bounds not being up to date.
940 */
941 object_flag[object->index] |= SD_OBJECT_INTERSECTS_VOLUME;
942 }
943 }
944
945 /* Copy object flag. */
946 dscene->object_flag.copy_to_device();
947 dscene->object_volume_step.copy_to_device();
948
949 dscene->object_flag.clear_modified();
950 dscene->object_volume_step.clear_modified();
951}
952
954{
955 if (dscene->objects.size() == 0) {
956 return;
957 }
958
959 KernelObject *kobjects = dscene->objects.data();
960
961 bool update = false;
962
963 foreach (Object *object, scene->objects) {
964 Geometry *geom = object->geometry;
965
966 if (geom->geometry_type == Geometry::MESH) {
967 Mesh *mesh = static_cast<Mesh *>(geom);
968 if (mesh->patch_table) {
969 uint patch_map_offset = 2 * (mesh->patch_table_offset + mesh->patch_table->total_size() -
970 mesh->patch_table->num_nodes * PATCH_NODE_SIZE) -
971 mesh->patch_offset;
972
973 if (kobjects[object->index].patch_map_offset != patch_map_offset) {
974 kobjects[object->index].patch_map_offset = patch_map_offset;
975 update = true;
976 }
977 }
978 }
979
980 size_t attr_map_offset = object->attr_map_offset;
981
982 /* An object attribute map cannot have a zero offset because mesh maps come first. */
983 if (attr_map_offset == 0) {
984 attr_map_offset = geom->attr_map_offset;
985 }
986
987 if (kobjects[object->index].attribute_map_offset != attr_map_offset) {
988 kobjects[object->index].attribute_map_offset = attr_map_offset;
989 update = true;
990 }
991 }
992
993 if (update) {
994 dscene->objects.copy_to_device();
995 }
996}
997
998void ObjectManager::device_free(Device *, DeviceScene *dscene, bool force_free)
999{
1000 dscene->objects.free_if_need_realloc(force_free);
1001 dscene->object_motion_pass.free_if_need_realloc(force_free);
1002 dscene->object_motion.free_if_need_realloc(force_free);
1003 dscene->object_flag.free_if_need_realloc(force_free);
1004 dscene->object_volume_step.free_if_need_realloc(force_free);
1005 dscene->object_prim_offset.free_if_need_realloc(force_free);
1006}
1007
1009{
1010 /* todo: normals and displacement should be done before applying transform! */
1011 /* todo: create objects/geometry in right order! */
1012
1013 /* counter geometry users */
1014 map<Geometry *, int> geometry_users;
1015 Scene::MotionType need_motion = scene->need_motion();
1016 bool motion_blur = need_motion == Scene::MOTION_BLUR;
1017 bool apply_to_motion = need_motion != Scene::MOTION_PASS;
1018 int i = 0;
1019
1020 foreach (Object *object, scene->objects) {
1021 map<Geometry *, int>::iterator it = geometry_users.find(object->geometry);
1022
1023 if (it == geometry_users.end()) {
1024 geometry_users[object->geometry] = 1;
1025 }
1026 else {
1027 it->second++;
1028 }
1029 }
1030
1031 if (progress.get_cancel()) {
1032 return;
1033 }
1034
1035 uint *object_flag = dscene->object_flag.data();
1036
1037 /* apply transforms for objects with single user geometry */
1038 foreach (Object *object, scene->objects) {
1039 /* Annoying feedback loop here: we can't use is_instanced() because
1040 * it'll use uninitialized transform_applied flag.
1041 *
1042 * Could be solved by moving reference counter to Geometry.
1043 */
1044 Geometry *geom = object->geometry;
1045 bool apply = (geometry_users[geom] == 1) && !geom->has_surface_bssrdf &&
1046 !geom->has_true_displacement();
1047
1048 if (geom->geometry_type == Geometry::MESH) {
1049 Mesh *mesh = static_cast<Mesh *>(geom);
1050 apply = apply && mesh->get_subdivision_type() == Mesh::SUBDIVISION_NONE;
1051 }
1052 else if (geom->geometry_type == Geometry::HAIR) {
1053 /* Can't apply non-uniform scale to curves, this can't be represented by
1054 * control points and radius alone. */
1055 float scale;
1056 apply = apply && transform_uniform_scale(object->tfm, scale);
1057 }
1058
1059 if (apply) {
1060 if (!(motion_blur && object->use_motion())) {
1061 if (!geom->transform_applied) {
1062 object->apply_transform(apply_to_motion);
1063 geom->transform_applied = true;
1064
1065 if (progress.get_cancel()) {
1066 return;
1067 }
1068 }
1069
1070 object_flag[i] |= SD_OBJECT_TRANSFORM_APPLIED;
1071 }
1072 }
1073
1074 i++;
1075 }
1076}
1077
1079{
1080 update_flags |= flag;
1081
1082 /* avoid infinite loops if the geometry manager tagged us for an update */
1083 if ((flag & GEOMETRY_MANAGER) == 0) {
1085
1086 /* Also notify in case added or removed objects were instances, as no Geometry might have been
1087 * added or removed, but the BVH still needs to updated. */
1088 if ((flag & (OBJECT_ADDED | OBJECT_REMOVED)) != 0) {
1090 }
1091
1092 if ((flag & TRANSFORM_MODIFIED) != 0) {
1093 geometry_flag |= GeometryManager::TRANSFORM_MODIFIED;
1094 }
1095
1096 if ((flag & VISIBILITY_MODIFIED) != 0) {
1097 geometry_flag |= GeometryManager::VISIBILITY_MODIFIED;
1098 }
1099
1100 scene->geometry_manager->tag_update(scene, geometry_flag);
1101 }
1102
1103 scene->light_manager->tag_update(scene, LightManager::OBJECT_MANAGER);
1104
1105 /* Integrator's shadow catcher settings depends on object visibility settings. */
1107 scene->integrator->tag_update(scene, Integrator::OBJECT_MANAGER);
1108 }
1109}
1110
1112{
1113 return update_flags != UPDATE_NONE;
1114}
1115
1117{
1118 string manifest = "{";
1119
1120 unordered_set<ustring, ustringHash> objects;
1121 foreach (Object *object, scene->objects) {
1122 if (objects.count(object->name)) {
1123 continue;
1124 }
1125 objects.insert(object->name);
1126 uint32_t hash_name = util_murmur_hash3(object->name.c_str(), object->name.length(), 0);
1127 manifest += string_printf("\"%s\":\"%08x\",", object->name.c_str(), hash_name);
1128 }
1129 manifest[manifest.size() - 1] = '}';
1130 return manifest;
1131}
1132
1134{
1135 string manifest = "{";
1136 unordered_set<ustring, ustringHash> assets;
1137 foreach (Object *ob, scene->objects) {
1138 if (assets.count(ob->asset_name)) {
1139 continue;
1140 }
1141 assets.insert(ob->asset_name);
1142 uint32_t hash_asset = util_murmur_hash3(ob->asset_name.c_str(), ob->asset_name.length(), 0);
1143 manifest += string_printf("\"%s\":\"%08x\",", ob->asset_name.c_str(), hash_asset);
1144 }
1145 manifest[manifest.size() - 1] = '}';
1146 return manifest;
1147}
1148
unsigned int uint
SIMD_FORCE_INLINE btVector3 & normalize()
Normalize this vector x^2 + y^2 + z^2 = 1.
Definition btVector3.h:303
AttributeElement element
ImageHandle & data_voxel()
device_vector< DecomposedTransform > object_motion
Definition devicescene.h:49
device_vector< Transform > object_motion_pass
Definition devicescene.h:48
device_vector< float > object_volume_step
Definition devicescene.h:51
device_vector< uint > object_prim_offset
Definition devicescene.h:52
device_vector< uint > object_flag
Definition devicescene.h:50
KernelData data
Definition devicescene.h:95
device_vector< KernelObject > objects
Definition devicescene.h:47
virtual BVHLayoutMask get_bvh_layout_mask(uint kernel_features) const =0
Type geometry_type
bool transform_applied
bool has_true_displacement() const
bool is_volume() const
bool has_surface_bssrdf
virtual PrimitiveType primitive_type() const =0
size_t attr_map_offset
bool is_mesh() const
Definition hair.h:14
ImageDataType type
string get_cryptomatte_objects(Scene *scene)
void device_update(Device *device, DeviceScene *dscene, Scene *scene, Progress &progress)
void tag_update(Scene *scene, uint32_t flag)
void device_update_object_transform(UpdateObjectTransformState *state, Object *ob, bool update_all, const Scene *scene)
void device_free(Device *device, DeviceScene *dscene, bool force_free)
string get_cryptomatte_assets(Scene *scene)
bool need_update() const
void device_update_geom_offsets(Device *device, DeviceScene *dscene, Scene *scene)
void device_update_prim_offsets(Device *device, DeviceScene *dscene, Scene *scene)
void device_update_transforms(DeviceScene *dscene, Scene *scene, Progress &progress)
void apply_static_transforms(DeviceScene *dscene, Scene *scene, Progress &progress)
void device_update_flags(Device *device, DeviceScene *dscene, Scene *scene, Progress &progress, bool bounds_valid=true)
bool get_cancel() const
Definition progress.h:93
void tag_update(Scene *scene)
size_t size() const
T * alloc(size_t width, size_t height=0, size_t depth=0)
void free_if_need_realloc(bool force_free)
OperationNode * node
#define CCL_NAMESPACE_END
ccl_device_forceinline float3 make_float3(const float x, const float y, const float z)
#define NULL
#define fminf(x, y)
int len
draw_view in_light_buf[] float
ccl_device_inline uint particle_index(KernelGlobals kg, int particle)
ccl_device_inline float object_volume_density(KernelGlobals kg, int object)
#define LIGHT_LINK_SET_MAX
@ ATTR_STD_VOLUME_VELOCITY
@ ATTR_STD_MOTION_VERTEX_POSITION
@ PATH_RAY_TRANSMIT
@ PATH_RAY_VOLUME_SCATTER
@ PATH_RAY_GLOSSY
@ PATH_RAY_ALL_VISIBILITY
@ PATH_RAY_DIFFUSE
#define OBJECT_MOTION_PASS_SIZE
@ EMISSION_SAMPLING_NONE
@ SD_OBJECT_MOTION
@ SD_OBJECT_HAS_VOLUME_ATTRIBUTES
@ SD_OBJECT_HAS_VOLUME
@ SD_OBJECT_INTERSECTS_VOLUME
@ SD_OBJECT_NEGATIVE_SCALE
@ SD_OBJECT_HOLDOUT_MASK
@ SD_OBJECT_HAS_VOLUME_MOTION
@ SD_OBJECT_CAUSTICS_RECEIVER
@ SD_OBJECT_SHADOW_CATCHER
@ SD_OBJECT_TRANSFORM_APPLIED
@ SD_OBJECT_HAS_VERTEX_MOTION
@ SD_OBJECT_CAUSTICS_CASTER
@ BVH_LAYOUT_MULTI_HIPRT_EMBREE
@ BVH_LAYOUT_METAL
@ BVH_LAYOUT_MULTI_HIPRT
@ BVH_LAYOUT_HIPRT
@ BVH_LAYOUT_MULTI_METAL
@ BVH_LAYOUT_MULTI_METAL_EMBREE
@ ATTR_ELEMENT_VOXEL
#define LIGHTGROUP_NONE
#define LIGHT_LINK_MASK_ALL
#define SHADOW_CATCHER_OBJECT_VISIBILITY(is_shadow_catcher, visibility)
#define VLOG_INFO
Definition log.h:72
CCL_NAMESPACE_BEGIN ccl_device_inline float2 zero_float2()
Definition math_float2.h:14
ccl_device_inline float average(const float2 a)
ccl_device_inline float reduce_min(const float2 a)
ccl_device_inline float2 fabs(const float2 a)
ccl_device_inline float3 one_float3()
Definition math_float3.h:24
CCL_NAMESPACE_BEGIN ccl_device_inline float3 zero_float3()
Definition math_float3.h:15
static ulong state[N]
float util_hash_to_float(uint32_t hash)
uint32_t util_murmur_hash3(const void *key, int len, uint32_t seed)
static void update(bNodeTree *ntree)
#define SOCKET_POINT(name, ui_name, default_value,...)
Definition node_type.h:206
#define SOCKET_FLOAT(name, ui_name, default_value,...)
Definition node_type.h:200
#define SOCKET_INT(name, ui_name, default_value,...)
Definition node_type.h:194
#define SOCKET_NODE(name, ui_name, node_type,...)
Definition node_type.h:229
#define SOCKET_TRANSFORM(name, ui_name, default_value,...)
Definition node_type.h:214
#define SOCKET_UINT(name, ui_name, default_value,...)
Definition node_type.h:196
#define NODE_DEFINE(structname)
Definition node_type.h:148
#define SOCKET_COLOR(name, ui_name, default_value,...)
Definition node_type.h:202
#define SOCKET_TRANSFORM_ARRAY(name, ui_name, default_value,...)
Definition node_type.h:269
#define SOCKET_BOOLEAN(name, ui_name, default_value,...)
Definition node_type.h:192
#define SOCKET_POINT2(name, ui_name, default_value,...)
Definition node_type.h:210
#define SOCKET_STRING(name, ui_name, default_value,...)
Definition node_type.h:212
#define SOCKET_UINT64(name, ui_name, default_value,...)
Definition node_type.h:198
int BVHLayoutMask
Definition params.h:51
@ BVH_TYPE_STATIC
Definition params.h:41
#define PATCH_NODE_SIZE
Definition patch_table.h:33
static float object_volume_density(const Transform &tfm, Geometry *geom)
#define FLT_MAX
Definition stdcycles.h:14
unsigned int uint32_t
Definition stdint.h:80
CCL_NAMESPACE_BEGIN string string_printf(const char *format,...)
Definition string.cpp:23
BoundBox transformed(const Transform *tfm) const
Definition boundbox.h:135
__forceinline bool valid() const
Definition boundbox.h:128
__forceinline float3 size() const
Definition boundbox.h:123
__forceinline void grow(const float3 &pt)
Definition boundbox.h:36
Transform tfm
uint64_t shadow_set_membership
Transform itfm
float dupli_uv[2]
float shadow_terminator_geometry_offset
float cryptomatte_asset
uint blocker_shadow_set
uint attribute_map_offset
uint receiver_light_set
uint64_t light_set_membership
float shadow_terminator_shading_offset
float dupli_generated[3]
float cryptomatte_object
@ SUBDIVISION_NONE
Definition scene/mesh.h:121
static NodeType * add(const char *name, CreateFunc create, Type type=NONE, const NodeType *base=NULL)
ustring name
Definition graph/node.h:177
bool is_modified() const
void compute_bounds(bool motion_blur)
int motion_step(float time) const
bool use_motion() const
NODE_DECLARE BoundBox bounds
size_t attr_map_offset
bool usable_as_light() const
void update_motion()
int get_device_index() const
void apply_transform(bool apply_to_motion)
float compute_volume_step_size() const
bool has_light_linking() const
bool is_traceable() const
uint visibility_for_tracing() const
void tag_update(Scene *scene)
float color[4]
float motion_time(int step) const
bool has_shadow_linking() const
struct LightgroupMembership * lightgroup
bool intersects_volume
ParticleData * particles
size_t num_points() const
vector< Object * > objects
Definition scene.h:131
MotionType
Definition scene.h:177
@ MOTION_PASS
Definition scene.h:177
@ MOTION_BLUR
Definition scene.h:177
map< ParticleSystem *, int > particle_offset
DecomposedTransform * object_motion
Scene::MotionType need_motion
void transform_motion_decompose(DecomposedTransform *decomp, const Transform *motion, size_t size)
ccl_device_inline Transform transform_identity()
Definition transform.h:296
ccl_device void transform_motion_array_interpolate(ccl_private Transform *tfm, ccl_global const DecomposedTransform *motion, uint numsteps, float time)
Definition transform.h:482
ccl_device_inline Transform transform_empty()
Definition transform.h:383
ccl_device_inline bool transform_negative_scale(const Transform &tfm)
Definition transform.h:363
ccl_device_inline Transform transform_inverse(const Transform tfm)
Definition transform.h:423
ccl_device_inline float3 transform_direction(ccl_private const Transform *t, const float3 a)
Definition transform.h:94
ccl_device_inline bool transform_uniform_scale(const Transform &tfm, float &scale)
Definition transform.h:340
@ IMAGE_DATA_TYPE_NANOVDB_FP16
@ IMAGE_DATA_TYPE_NANOVDB_FLOAT
@ IMAGE_DATA_TYPE_NANOVDB_FLOAT3
@ IMAGE_DATA_TYPE_NANOVDB_FPN
wmTimer * timer
uint8_t flag
Definition wm_window.cc:138