Blender V5.0
node_composite_defocus.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2006 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include "BLI_math_base.hh"
11
12#include "DNA_camera_types.h"
13#include "DNA_object_types.h"
14#include "DNA_scene_types.h"
15
16#include "BKE_camera.h"
17
18#include "RNA_access.hh"
19
20#include "UI_interface.hh"
22#include "UI_resources.hh"
23
25#include "COM_bokeh_kernel.hh"
26#include "COM_node_operation.hh"
27#include "COM_utilities.hh"
28
30
31/* ************ Defocus Node ****************** */
32
34
36
38{
39 b.add_input<decl::Color>("Image")
40 .default_value({1.0f, 1.0f, 1.0f, 1.0f})
41 .structure_type(StructureType::Dynamic);
42 b.add_input<decl::Float>("Z").default_value(1.0f).min(0.0f).max(1.0f).structure_type(
43 StructureType::Dynamic);
44 b.add_output<decl::Color>("Image").structure_type(StructureType::Dynamic);
45}
46
47static void node_composit_init_defocus(bNodeTree * /*ntree*/, bNode *node)
48{
49 /* defocus node */
51 nbd->bktype = 0;
52 nbd->rotation = 0.0f;
53 nbd->fstop = 128.0f;
54 nbd->maxblur = 16;
55 nbd->scale = 1.0f;
56 nbd->no_zbuf = 1;
57 node->storage = nbd;
58}
59
61{
62 uiLayout *sub, *col;
63
64 col = &layout->column(false);
65 col->label(IFACE_("Bokeh Type:"), ICON_NONE);
66 col->prop(ptr, "bokeh", UI_ITEM_R_SPLIT_EMPTY_NAME, "", ICON_NONE);
67 col->prop(ptr, "angle", UI_ITEM_R_SPLIT_EMPTY_NAME, std::nullopt, ICON_NONE);
68
69 col = &layout->column(false);
70 col->active_set(RNA_boolean_get(ptr, "use_zbuffer") == true);
71 col->prop(ptr, "f_stop", UI_ITEM_R_SPLIT_EMPTY_NAME, std::nullopt, ICON_NONE);
72
73 layout->prop(ptr, "blur_max", UI_ITEM_R_SPLIT_EMPTY_NAME, std::nullopt, ICON_NONE);
74
75 uiTemplateID(layout, C, ptr, "scene", nullptr, nullptr, nullptr);
76
77 col = &layout->column(false);
78 col->prop(ptr, "use_zbuffer", UI_ITEM_R_SPLIT_EMPTY_NAME, std::nullopt, ICON_NONE);
79 sub = &col->column(false);
80 sub->active_set(RNA_boolean_get(ptr, "use_zbuffer") == false);
81 sub->prop(ptr, "z_scale", UI_ITEM_R_SPLIT_EMPTY_NAME, std::nullopt, ICON_NONE);
82}
83
84using namespace blender::compositor;
85
87 public:
89
90 void execute() override
91 {
92 const Result &input = this->get_input("Image");
93 Result &output = this->get_result("Image");
94 if (input.is_single_value() || node_storage(bnode()).maxblur < 1.0f) {
95 output.share_data(input);
96 return;
97 }
98
100
101 const int maximum_defocus_radius = math::ceil(compute_maximum_defocus_radius());
102
103 /* The special zero value indicate a circle, in which case, the roundness should be set to
104 * 1, and the number of sides can be anything and is arbitrarily set to 3. */
105 const bool is_circle = node_storage(bnode()).bktype == 0;
106 const int2 kernel_size = int2(maximum_defocus_radius * 2 + 1);
107 const int sides = is_circle ? 3 : node_storage(bnode()).bktype;
108 const float rotation = node_storage(bnode()).rotation;
109 const float roundness = is_circle ? 1.0f : 0.0f;
110 const Result &bokeh_kernel = context().cache_manager().bokeh_kernels.get(
111 context(), kernel_size, sides, rotation, roundness, 0.0f, 0.0f);
112
113 if (this->context().use_gpu()) {
114 this->execute_gpu(input, radius, bokeh_kernel, output, maximum_defocus_radius);
115 }
116 else {
117 this->execute_cpu(input, radius, bokeh_kernel, output, maximum_defocus_radius);
118 }
119
120 radius.release();
121 }
122
124 const Result &radius,
125 const Result &bokeh_kernel,
126 Result &output,
127 const int search_radius)
128 {
129 gpu::Shader *shader = context().get_shader("compositor_defocus_blur");
130 GPU_shader_bind(shader);
131
132 GPU_shader_uniform_1i(shader, "search_radius", search_radius);
133
134 input.bind_as_texture(shader, "input_tx");
135
136 radius.bind_as_texture(shader, "radius_tx");
137
138 GPU_texture_filter_mode(bokeh_kernel, true);
139 bokeh_kernel.bind_as_texture(shader, "weights_tx");
140
141 const Domain domain = compute_domain();
142 output.allocate_texture(domain);
143 output.bind_as_image(shader, "output_img");
144
146
148 input.unbind_as_texture();
149 radius.unbind_as_texture();
150 bokeh_kernel.unbind_as_texture();
151 output.unbind_as_image();
152 }
153
155 const Result &radius,
156 const Result &bokeh_kernel,
157 Result &output,
158 const int search_radius)
159 {
160 const Domain domain = compute_domain();
161 output.allocate_texture(domain);
162
163 /* Given the texel in the range [-radius, radius] in both axis, load the appropriate weight
164 * from the weights image, where the given texel (0, 0) corresponds the center of weights
165 * image. Note that we load the weights image inverted along both directions to maintain
166 * the shape of the weights if it was not symmetrical. To understand why inversion makes sense,
167 * consider a 1D weights image whose right half is all ones and whose left half is all zeros.
168 * Further, consider that we are blurring a single white pixel on a black background. When
169 * computing the value of a pixel that is to the right of the white pixel, the white pixel will
170 * be in the left region of the search window, and consequently, without inversion, a zero will
171 * be sampled from the left side of the weights image and result will be zero. However, what
172 * we expect is that pixels to the right of the white pixel will be white, that is, they should
173 * sample a weight of 1 from the right side of the weights image, hence the need for
174 * inversion. */
175 auto load_weight = [&](const int2 texel, const float radius) {
176 /* Add the radius to transform the texel into the range [0, radius * 2], with an additional
177 * 0.5 to sample at the center of the pixels, then divide by the upper bound plus one to
178 * transform the texel into the normalized range [0, 1] needed to sample the weights sampler.
179 * Finally, invert the textures coordinates by subtracting from 1 to maintain the shape of
180 * the weights as mentioned in the function description. */
181 return bokeh_kernel.sample_bilinear_extended(
182 1.0f - ((float2(texel) + float2(radius + 0.5f)) / (radius * 2.0f + 1.0f)));
183 };
184
185 parallel_for(domain.size, [&](const int2 texel) {
186 float center_radius = math::max(0.0f, radius.load_pixel<float, true>(texel));
187
188 /* Go over the window of the given search radius and accumulate the colors multiplied by
189 * their respective weights as well as the weights themselves, but only if both the radius of
190 * the center pixel and the radius of the candidate pixel are less than both the x and y
191 * distances of the candidate pixel. */
192 float4 accumulated_color = float4(0.0);
193 float4 accumulated_weight = float4(0.0);
194 for (int y = -search_radius; y <= search_radius; y++) {
195 for (int x = -search_radius; x <= search_radius; x++) {
196 float candidate_radius = math::max(
197 0.0f, radius.load_pixel_extended<float, true>(texel + int2(x, y)));
198
199 /* Skip accumulation if either the x or y distances of the candidate pixel are larger
200 * than either the center or candidate pixel radius. Note that the max and min functions
201 * here denote "either" in the aforementioned description. */
202 float radius = math::min(center_radius, candidate_radius);
203 if (math::max(math::abs(x), math::abs(y)) > radius) {
204 continue;
205 }
206
207 float4 weight = load_weight(int2(x, y), radius);
208 float4 input_color = input.load_pixel_extended<float4>(texel + int2(x, y));
209
210 accumulated_color += input_color * weight;
211 accumulated_weight += weight;
212 }
213 }
214
215 accumulated_color = math::safe_divide(accumulated_color, accumulated_weight);
216
217 output.store_pixel(texel, accumulated_color);
218 });
219 }
220
222 {
223 if (node_storage(bnode()).no_zbuf) {
225 }
227 }
228
230 {
231 Result &input_depth = get_input("Z");
232 if (this->context().use_gpu() && !input_depth.is_single_value()) {
234 }
236 }
237
239 {
240 gpu::Shader *shader = context().get_shader("compositor_defocus_radius_from_scale");
241 GPU_shader_bind(shader);
242
243 GPU_shader_uniform_1f(shader, "scale", node_storage(bnode()).scale);
244 GPU_shader_uniform_1f(shader, "max_radius", node_storage(bnode()).maxblur);
245
246 Result &input_depth = get_input("Z");
247 input_depth.bind_as_texture(shader, "radius_tx");
248
249 Result output_radius = context().create_result(ResultType::Float);
250 const Domain domain = input_depth.domain();
251 output_radius.allocate_texture(domain);
252 output_radius.bind_as_image(shader, "radius_img");
253
255
257 input_depth.unbind_as_texture();
258 output_radius.unbind_as_image();
259
260 return output_radius;
261 }
262
264 {
265 const float scale = node_storage(bnode()).scale;
266 const float max_radius = node_storage(bnode()).maxblur;
267
268 Result &input_depth = get_input("Z");
269
270 Result output_radius = context().create_result(ResultType::Float);
271
272 auto compute_radius = [&](const float depth) {
273 return math::clamp(depth * scale, 0.0f, max_radius);
274 };
275
276 if (input_depth.is_single_value()) {
277 output_radius.allocate_single_value();
278 output_radius.set_single_value(compute_radius(input_depth.get_single_value<float>()));
279 return output_radius;
280 }
281
282 const Domain domain = input_depth.domain();
283 output_radius.allocate_texture(domain);
284
285 parallel_for(domain.size, [&](const int2 texel) {
286 float depth = input_depth.load_pixel<float>(texel);
287 output_radius.store_pixel(texel, compute_radius(depth));
288 });
289
290 return output_radius;
291 }
292
294 {
295 Result &input_depth = get_input("Z");
296 Result output_radius = context().create_result(ResultType::Float);
297 if (this->context().use_gpu() && !input_depth.is_single_value()) {
299 }
300 else {
302 }
303
304 if (output_radius.is_single_value()) {
305 return output_radius;
306 }
307
308 /* We apply a dilate morphological operator on the radius computed from depth, the operator
309 * radius is the maximum possible defocus radius. This is done such that objects in
310 * focus---that is, objects whose defocus radius is small---are not affected by nearby out of
311 * focus objects, hence the use of erosion. */
312 const float morphological_radius = compute_maximum_defocus_radius();
313 Result eroded_radius = context().create_result(ResultType::Float);
314 morphological_blur(context(), output_radius, eroded_radius, float2(morphological_radius));
315 output_radius.release();
316
317 return eroded_radius;
318 }
319
321 {
322 gpu::Shader *shader = context().get_shader("compositor_defocus_radius_from_depth");
323 GPU_shader_bind(shader);
324
325 const float distance_to_image_of_focus = compute_distance_to_image_of_focus();
326 GPU_shader_uniform_1f(shader, "f_stop", get_f_stop());
327 GPU_shader_uniform_1f(shader, "focal_length", get_focal_length());
328 GPU_shader_uniform_1f(shader, "max_radius", node_storage(bnode()).maxblur);
329 GPU_shader_uniform_1f(shader, "pixels_per_meter", compute_pixels_per_meter());
330 GPU_shader_uniform_1f(shader, "distance_to_image_of_focus", distance_to_image_of_focus);
331
332 Result &input_depth = get_input("Z");
333 input_depth.bind_as_texture(shader, "depth_tx");
334
335 const Domain domain = input_depth.domain();
336 output_radius.allocate_texture(domain);
337 output_radius.bind_as_image(shader, "radius_img");
338
340
342 input_depth.unbind_as_texture();
343 output_radius.unbind_as_image();
344 }
345
347 {
348 const float f_stop = this->get_f_stop();
349 const float focal_length = this->get_focal_length();
350 const float max_radius = node_storage(this->bnode()).maxblur;
351 const float pixels_per_meter = this->compute_pixels_per_meter();
352 const float distance_to_image_of_focus = this->compute_distance_to_image_of_focus();
353
354 Result &input_depth = get_input("Z");
355
356 /* Given a depth value, compute the radius of the circle of confusion in pixels based on
357 * equation (8) of the paper:
358 *
359 * Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for
360 * synthetic image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
361 auto compute_radius = [&](const float depth) {
362 /* Compute `Vu` in equation (7). */
363 const float distance_to_image_of_object = (focal_length * depth) / (depth - focal_length);
364
365 /* Compute C in equation (8). Notice that the last multiplier was included in the absolute
366 * since it is negative when the object distance is less than the focal length, as noted in
367 * equation (7). */
368 float diameter = math::abs((distance_to_image_of_object - distance_to_image_of_focus) *
369 (focal_length / (f_stop * distance_to_image_of_object)));
370
371 /* The diameter is in meters, so multiply by the pixels per meter. */
372 float radius = (diameter / 2.0f) * pixels_per_meter;
373
374 return math::min(max_radius, radius);
375 };
376
377 if (input_depth.is_single_value()) {
378 output_radius.allocate_single_value();
379 output_radius.set_single_value(compute_radius(input_depth.get_single_value<float>()));
380 return;
381 }
382
383 const Domain domain = input_depth.domain();
384 output_radius.allocate_texture(domain);
385
386 parallel_for(domain.size, [&](const int2 texel) {
387 float depth = input_depth.load_pixel<float>(texel);
388 output_radius.store_pixel(texel, compute_radius(depth));
389 });
390 }
391
392 /* Computes the maximum possible defocus radius in pixels. */
394 {
395 if (node_storage(bnode()).no_zbuf) {
396 return node_storage(bnode()).maxblur;
397 }
398
399 const float maximum_diameter = compute_maximum_diameter_of_circle_of_confusion();
400 const float pixels_per_meter = compute_pixels_per_meter();
401 const float radius = (maximum_diameter / 2.0f) * pixels_per_meter;
402 return math::min(radius, node_storage(bnode()).maxblur);
403 }
404
405 /* Computes the diameter of the circle of confusion at infinity. This computes the limit in
406 * figure (5) of the paper:
407 *
408 * Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
409 * image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305.
410 *
411 * Notice that the diameter is asymmetric around the focus point, and we are computing the
412 * limiting diameter at infinity, while another limiting diameter exist at zero distance from the
413 * lens. This is a limitation of the implementation, as it assumes far defocusing only. */
415 {
416 const float f_stop = get_f_stop();
417 const float focal_length = get_focal_length();
418 const float distance_to_image_of_focus = compute_distance_to_image_of_focus();
419 return math::abs((distance_to_image_of_focus / (f_stop * focal_length)) -
420 (focal_length / f_stop));
421 }
422
423 /* Computes the distance in meters to the image of the focus point across a lens of the specified
424 * focal length. This computes `Vp` in equation (7) of the paper:
425 *
426 * Potmesil, Michael, and Indranil Chakravarty. "A lens and aperture camera model for synthetic
427 * image generation." ACM SIGGRAPH Computer Graphics 15.3 (1981): 297-305. */
429 {
430 const float focal_length = get_focal_length();
431 const float focus_distance = compute_focus_distance();
432 return (focal_length * focus_distance) / (focus_distance - focal_length);
433 }
434
435 /* Returns the focal length in meters. Fall back to 50 mm in case of an invalid camera. Ensure a
436 * minimum of 1e-6. */
438 {
439 const Camera *camera = get_camera();
440 return camera ? math::max(1e-6f, camera->lens / 1000.0f) : 50.0f / 1000.0f;
441 }
442
443 /* Computes the distance to the point that is completely in focus. Default to 10 meters for null
444 * camera. */
446 {
447 const Object *camera_object = get_camera_object();
448 if (!camera_object) {
449 return 10.0f;
450 }
451 return BKE_camera_object_dof_distance(camera_object);
452 }
453
454 /* Computes the number of pixels per meter of the sensor size. This is essentially the resolution
455 * over the sensor size, using the sensor fit axis. Fall back to DEFAULT_SENSOR_WIDTH in case of
456 * an invalid camera. Note that the stored sensor size is in millimeter, so convert to meters. */
458 {
459 const int2 size = compute_domain().size;
460 const Camera *camera = get_camera();
461 const float default_value = size.x / (DEFAULT_SENSOR_WIDTH / 1000.0f);
462 if (!camera) {
463 return default_value;
464 }
465
466 switch (camera->sensor_fit) {
468 return size.x / (camera->sensor_x / 1000.0f);
470 return size.y / (camera->sensor_y / 1000.0f);
472 return size.x > size.y ? size.x / (camera->sensor_x / 1000.0f) :
473 size.y / (camera->sensor_y / 1000.0f);
474 }
475 default:
476 break;
477 }
478
479 return default_value;
480 }
481
482 /* Returns the f-stop number. Fall back to 1e-3 for zero f-stop. */
484 {
485 return math::max(1e-3f, node_storage(bnode()).fstop);
486 }
487
489 {
490 const Object *camera_object = get_camera_object();
491 if (!camera_object || camera_object->type != OB_CAMERA) {
492 return nullptr;
493 }
494
495 return reinterpret_cast<Camera *>(camera_object->data);
496 }
497
499 {
500 return get_scene()->camera;
501 }
502
504 {
505 return bnode().id ? reinterpret_cast<Scene *>(bnode().id) : &context().get_scene();
506 }
507};
508
510{
511 return new DefocusOperation(context, node);
512}
513
514} // namespace blender::nodes::node_composite_defocus_cc
515
517{
519
520 static blender::bke::bNodeType ntype;
521
522 cmp_node_type_base(&ntype, "CompositorNodeDefocus", CMP_NODE_DEFOCUS);
523 ntype.ui_name = "Defocus";
524 ntype.ui_description = "Apply depth of field in 2D, using a Z depth map or mask";
525 ntype.enum_name_legacy = "DEFOCUS";
527 ntype.declare = file_ns::cmp_node_defocus_declare;
528 ntype.draw_buttons = file_ns::node_composit_buts_defocus;
529 ntype.initfunc = file_ns::node_composit_init_defocus;
532 ntype.get_compositor_operation = file_ns::get_compositor_operation;
533
535}
Camera data-block and utility functions.
float BKE_camera_object_dof_distance(const struct Object *ob)
#define NODE_STORAGE_FUNCS(StorageT)
Definition BKE_node.hh:1240
#define NODE_CLASS_OP_FILTER
Definition BKE_node.hh:451
#define CMP_NODE_DEFOCUS
#define IFACE_(msgid)
@ CAMERA_SENSOR_FIT_HOR
@ CAMERA_SENSOR_FIT_AUTO
@ CAMERA_SENSOR_FIT_VERT
#define DEFAULT_SENSOR_WIDTH
Object is a sort of wrapper for general info.
@ OB_CAMERA
void GPU_shader_uniform_1f(blender::gpu::Shader *sh, const char *name, float value)
void GPU_shader_bind(blender::gpu::Shader *shader, const blender::gpu::shader::SpecializationConstants *constants_state=nullptr)
void GPU_shader_uniform_1i(blender::gpu::Shader *sh, const char *name, int value)
void GPU_shader_unbind()
void GPU_texture_filter_mode(blender::gpu::Texture *texture, bool use_filter)
#define NOD_REGISTER_NODE(REGISTER_FUNC)
#define C
Definition RandGen.cpp:29
void uiTemplateID(uiLayout *layout, const bContext *C, PointerRNA *ptr, blender::StringRefNull propname, const char *newop, const char *openop, const char *unlinkop, int filter=UI_TEMPLATE_ID_FILTER_ALL, bool live_icon=false, std::optional< blender::StringRef > text=std::nullopt)
@ UI_ITEM_R_SPLIT_EMPTY_NAME
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition btDbvt.cpp:52
Result & get(Context &context, int2 size, int sides, float rotation, float roundness, float catadioptric, float lens_shift)
gpu::Shader * get_shader(const char *info_name, ResultPrecision precision)
NodeOperation(Context &context, DNode node)
Result & get_result(StringRef identifier)
Definition operation.cc:39
Result & get_input(StringRef identifier) const
Definition operation.cc:138
virtual Domain compute_domain()
Definition operation.cc:56
void allocate_texture(const Domain domain, const bool from_pool=true, const std::optional< ResultStorageType > storage_type=std::nullopt)
Definition result.cc:389
void unbind_as_texture() const
Definition result.cc:511
void set_single_value(const T &value)
void bind_as_texture(gpu::Shader *shader, const char *texture_name) const
Definition result.cc:487
const Domain & domain() const
void unbind_as_image() const
Definition result.cc:517
float4 sample_bilinear_extended(const float2 &coordinates) const
void bind_as_image(gpu::Shader *shader, const char *image_name, bool read=false) const
Definition result.cc:498
bool is_single_value() const
Definition result.cc:758
const T & get_single_value() const
void execute_gpu(const Result &input, const Result &radius, const Result &bokeh_kernel, Result &output, const int search_radius)
void execute_cpu(const Result &input, const Result &radius, const Result &bokeh_kernel, Result &output, const int search_radius)
uint col
#define input
#define output
void * MEM_callocN(size_t len, const char *str)
Definition mallocn.cc:118
void node_register_type(bNodeType &ntype)
Definition node.cc:2416
void node_type_storage(bNodeType &ntype, std::optional< StringRefNull > storagename, void(*freefunc)(bNode *node), void(*copyfunc)(bNodeTree *dest_ntree, bNode *dest_node, const bNode *src_node))
Definition node.cc:5414
void compute_dispatch_threads_at_least(gpu::Shader *shader, int2 threads_range, int2 local_size=int2(16))
Definition utilities.cc:196
void morphological_blur(Context &context, const Result &input, Result &output, const float2 &radius, const MorphologicalBlurOperation operation=MorphologicalBlurOperation::Erode, const int filter_type=R_FILTER_GAUSS)
void parallel_for(const int2 range, const Function &function)
T clamp(const T &a, const T &min, const T &max)
T safe_divide(const T &a, const T &b)
T min(const T &a, const T &b)
T ceil(const T &a)
T max(const T &a, const T &b)
T abs(const T &a)
static void node_composit_buts_defocus(uiLayout *layout, bContext *C, PointerRNA *ptr)
static void cmp_node_defocus_declare(NodeDeclarationBuilder &b)
static void node_composit_init_defocus(bNodeTree *, bNode *node)
static NodeOperation * get_compositor_operation(Context &context, DNode node)
VecBase< int32_t, 2 > int2
VecBase< float, 2 > float2
static void register_node_type_cmp_defocus()
void cmp_node_type_base(blender::bke::bNodeType *ntype, std::string idname, const std::optional< int16_t > legacy_type)
void node_free_standard_storage(bNode *node)
Definition node_util.cc:42
void node_copy_standard_storage(bNodeTree *, bNode *dest_node, const bNode *src_node)
Definition node_util.cc:54
bool RNA_boolean_get(PointerRNA *ptr, const char *name)
#define min(a, b)
Definition sort.cc:36
char sensor_fit
float sensor_y
float sensor_x
void * storage
Defines a node type.
Definition BKE_node.hh:238
std::string ui_description
Definition BKE_node.hh:244
NodeGetCompositorOperationFunction get_compositor_operation
Definition BKE_node.hh:348
void(* initfunc)(bNodeTree *ntree, bNode *node)
Definition BKE_node.hh:289
const char * enum_name_legacy
Definition BKE_node.hh:247
void(* draw_buttons)(uiLayout *, bContext *C, PointerRNA *ptr)
Definition BKE_node.hh:259
NodeDeclareFunction declare
Definition BKE_node.hh:362
uiLayout & column(bool align)
void active_set(bool active)
void prop(PointerRNA *ptr, PropertyRNA *prop, int index, int value, eUI_Item_Flag flag, std::optional< blender::StringRef > name_opt, int icon, std::optional< blender::StringRef > placeholder=std::nullopt)
max
Definition text_draw.cc:251
static pxr::UsdShadeInput get_input(const pxr::UsdShadeShader &usd_shader, const pxr::TfToken &input_name)
PointerRNA * ptr
Definition wm_files.cc:4238