Blender V4.3
vk_command_builder.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2024 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
10#include "vk_render_graph.hh"
11
13
15{
16 vk_buffer_memory_barrier_ = {};
17 vk_buffer_memory_barrier_.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
18 vk_buffer_memory_barrier_.pNext = nullptr;
19 vk_buffer_memory_barrier_.srcAccessMask = VK_ACCESS_NONE;
20 vk_buffer_memory_barrier_.dstAccessMask = VK_ACCESS_NONE;
21 vk_buffer_memory_barrier_.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
22 vk_buffer_memory_barrier_.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
23 vk_buffer_memory_barrier_.buffer = VK_NULL_HANDLE;
24 vk_buffer_memory_barrier_.offset = 0;
25 vk_buffer_memory_barrier_.size = VK_WHOLE_SIZE;
26
27 vk_image_memory_barrier_ = {};
28 vk_image_memory_barrier_.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
29 vk_image_memory_barrier_.pNext = nullptr;
30 vk_image_memory_barrier_.srcAccessMask = VK_ACCESS_NONE;
31 vk_image_memory_barrier_.dstAccessMask = VK_ACCESS_NONE;
32 vk_image_memory_barrier_.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
33 vk_image_memory_barrier_.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
34 vk_image_memory_barrier_.image = VK_NULL_HANDLE;
35 vk_image_memory_barrier_.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
36 vk_image_memory_barrier_.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
37 vk_image_memory_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_NONE;
38 vk_image_memory_barrier_.subresourceRange.baseArrayLayer = 0;
39 vk_image_memory_barrier_.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
40 vk_image_memory_barrier_.subresourceRange.baseMipLevel = 0;
41 vk_image_memory_barrier_.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS;
42}
43
44/* -------------------------------------------------------------------- */
49 VKCommandBufferInterface &command_buffer,
50 Span<NodeHandle> nodes)
51{
52 /* Swap chain images layouts needs to be reset as the image layouts are changed externally. */
53 render_graph.resources_.reset_image_layouts();
54
55 state_.active_pipelines = {};
56
57 command_buffer.begin_recording();
58 state_.debug_level = 0;
59 state_.active_debug_group_id = -1;
60 std::optional<NodeHandle> rendering_scope;
61 IndexRange nodes_range = nodes.index_range();
62 while (!nodes_range.is_empty()) {
63 IndexRange node_group = nodes_range.slice(0, 1);
64 NodeHandle node_handle = nodes[nodes_range.first()];
65 VKRenderGraphNode &node = render_graph.nodes_[node_handle];
66 while (node_type_is_rendering(node.type) && node_group.size() < nodes_range.size()) {
67 NodeHandle node_handle = nodes[nodes_range[node_group.size()]];
68 VKRenderGraphNode &node = render_graph.nodes_[node_handle];
69 if (!node_type_is_rendering(node.type) || node.type == VKNodeType::BEGIN_RENDERING) {
70 break;
71 }
72 node_group = nodes_range.slice(0, node_group.size() + 1);
73 }
74
75 build_node_group(render_graph, command_buffer, nodes.slice(node_group), rendering_scope);
76 nodes_range = nodes_range.drop_front(node_group.size());
77 }
78
79 finish_debug_groups(command_buffer);
80 state_.debug_level = 0;
81
82 command_buffer.end_recording();
83}
84
85void VKCommandBuilder::build_node_group(VKRenderGraph &render_graph,
86 VKCommandBufferInterface &command_buffer,
87 Span<NodeHandle> node_group,
88 std::optional<NodeHandle> &r_rendering_scope)
89{
90 bool is_rendering = false;
91 for (NodeHandle node_handle : node_group) {
92 VKRenderGraphNode &node = render_graph.nodes_[node_handle];
93#if 0
94 std::cout << "node_group: " << node_group.first() << "-" << node_group.last()
95 << ", node_handle: " << node_handle << ", node_type: " << node.type << "\n";
96#endif
97#if 0
98 render_graph.debug_print(node_handle);
99#endif
100 build_pipeline_barriers(render_graph, command_buffer, node_handle, node.pipeline_stage_get());
101 if (node.type == VKNodeType::BEGIN_RENDERING) {
102 layer_tracking_begin(render_graph, node_handle);
103 }
104 }
105
106 for (NodeHandle node_handle : node_group) {
107 VKRenderGraphNode &node = render_graph.nodes_[node_handle];
108 if (node.type == VKNodeType::BEGIN_RENDERING) {
109 BLI_assert(!r_rendering_scope.has_value());
110 BLI_assert(!is_rendering);
111 r_rendering_scope = node_handle;
112 is_rendering = true;
113
114 /* Check of the node_group spans a full rendering scope. In that case we don't need to set
115 * the VK_RENDERING_SUSPENDING_BIT. */
116 const VKRenderGraphNode &last_node = render_graph.nodes_[node_group[node_group.size() - 1]];
117 bool will_be_suspended = last_node.type != VKNodeType::END_RENDERING;
118 if (will_be_suspended) {
119 node.begin_rendering.vk_rendering_info.flags = VK_RENDERING_SUSPENDING_BIT;
120 }
121 }
122
123 else if (node.type == VKNodeType::END_RENDERING) {
124 BLI_assert(r_rendering_scope.has_value());
125 r_rendering_scope.reset();
126 is_rendering = false;
127 }
128 else if (node_type_is_within_rendering(node.type)) {
129 BLI_assert(r_rendering_scope.has_value());
130 if (!is_rendering) {
131 // Resuming paused rendering scope.
132 VKRenderGraphNode &rendering_node = render_graph.nodes_[*r_rendering_scope];
133 rendering_node.begin_rendering.vk_rendering_info.flags = VK_RENDERING_RESUMING_BIT;
134 rendering_node.build_commands(command_buffer, state_.active_pipelines);
135 is_rendering = true;
136 }
137 }
138#if 0
139 std::cout << "node_group: " << node_group.first() << "-" << node_group.last()
140 << ", node_handle: " << node_handle << ", node_type: " << node.type << "\n";
141#endif
142 if (G.debug & G_DEBUG_GPU) {
143 activate_debug_group(render_graph, command_buffer, node_handle);
144 }
145 node.build_commands(command_buffer, state_.active_pipelines);
146
147 /* When layered image has different layouts we reset the layouts to
148 * VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL. */
149 if (node.type == VKNodeType::END_RENDERING && state_.subresource_tracking_enabled()) {
150 layer_tracking_end(command_buffer, false);
151 }
152 }
153 if (is_rendering) {
154 /* Suspend rendering as the next node group will contain data transfer/dispatch commands. */
155 is_rendering = false;
156 command_buffer.end_rendering();
157 if (state_.subresource_tracking_enabled()) {
158 layer_tracking_end(command_buffer, true);
159 }
160 }
161}
162
163void VKCommandBuilder::activate_debug_group(VKRenderGraph &render_graph,
164 VKCommandBufferInterface &command_buffer,
165 NodeHandle node_handle)
166{
167 VKRenderGraph::DebugGroupID debug_group = render_graph.debug_.node_group_map[node_handle];
168 if (debug_group == state_.active_debug_group_id) {
169 return;
170 }
171
172 /* Determine the number of pops and pushes that will happen on the debug stack. */
173 int num_ends = 0;
174 int num_begins = 0;
175
176 if (debug_group == -1) {
177 num_ends = state_.debug_level;
178 }
179 else {
180 Vector<VKRenderGraph::DebugGroupNameID> &to_group =
181 render_graph.debug_.used_groups[debug_group];
182 if (state_.active_debug_group_id != -1) {
183 Vector<VKRenderGraph::DebugGroupNameID> &from_group =
184 render_graph.debug_.used_groups[state_.active_debug_group_id];
185
186 num_ends = max_ii(from_group.size() - to_group.size(), 0);
187 int num_checks = min_ii(from_group.size(), to_group.size());
188 for (int index : IndexRange(num_checks)) {
189 if (from_group[index] != to_group[index]) {
190 num_ends += num_checks - index;
191 break;
192 }
193 }
194 }
195
196 num_begins = to_group.size() - (state_.debug_level - num_ends);
197 }
198
199 /* Perform the pops from the debug stack. */
200 for (int index = 0; index < num_ends; index++) {
201 command_buffer.end_debug_utils_label();
202 }
203 state_.debug_level -= num_ends;
204
205 /* Perform the pushes to the debug stack. */
206 if (num_begins > 0) {
207 Vector<VKRenderGraph::DebugGroupNameID> &to_group =
208 render_graph.debug_.used_groups[debug_group];
209 VkDebugUtilsLabelEXT debug_utils_label = {};
210 debug_utils_label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
211 for (int index : IndexRange(state_.debug_level, num_begins)) {
212 std::string group_name = render_graph.debug_.group_names[to_group[index]];
213 debug_utils_label.pLabelName = group_name.c_str();
214 command_buffer.begin_debug_utils_label(&debug_utils_label);
215 }
216 }
217
218 state_.debug_level += num_begins;
219 state_.active_debug_group_id = debug_group;
220}
221
222void VKCommandBuilder::finish_debug_groups(VKCommandBufferInterface &command_buffer)
223{
224 for (int i = 0; i < state_.debug_level; i++) {
225 command_buffer.end_debug_utils_label();
226 }
227 state_.debug_level = 0;
228}
229
230void VKCommandBuilder::build_pipeline_barriers(VKRenderGraph &render_graph,
231 VKCommandBufferInterface &command_buffer,
232 NodeHandle node_handle,
233 VkPipelineStageFlags pipeline_stage)
234{
235 reset_barriers();
236 add_image_barriers(render_graph, node_handle, pipeline_stage);
237 add_buffer_barriers(render_graph, node_handle, pipeline_stage);
238 send_pipeline_barriers(command_buffer);
239}
240
243/* -------------------------------------------------------------------- */
247void VKCommandBuilder::reset_barriers()
248{
249 vk_buffer_memory_barriers_.clear();
250 vk_image_memory_barriers_.clear();
251 state_.src_stage_mask = VK_PIPELINE_STAGE_NONE;
252 state_.dst_stage_mask = VK_PIPELINE_STAGE_NONE;
253}
254
255void VKCommandBuilder::send_pipeline_barriers(VKCommandBufferInterface &command_buffer)
256{
257 if (vk_image_memory_barriers_.is_empty() && vk_buffer_memory_barriers_.is_empty()) {
258 reset_barriers();
259 return;
260 }
261
262 /* When no resources have been used, we can start the barrier at the top of the pipeline.
263 * It is not allowed to set it to None. */
264 /* TODO: VK_KHR_synchronization2 allows setting src_stage_mask to NONE. */
265 if (state_.src_stage_mask == VK_PIPELINE_STAGE_NONE) {
266 state_.src_stage_mask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
267 }
268
269 command_buffer.pipeline_barrier(state_.src_stage_mask,
270 state_.dst_stage_mask,
271 VK_DEPENDENCY_BY_REGION_BIT,
272 0,
273 nullptr,
274 vk_buffer_memory_barriers_.size(),
275 vk_buffer_memory_barriers_.data(),
276 vk_image_memory_barriers_.size(),
277 vk_image_memory_barriers_.data());
278 reset_barriers();
279}
280
281void VKCommandBuilder::add_buffer_barriers(VKRenderGraph &render_graph,
282 NodeHandle node_handle,
283 VkPipelineStageFlags node_stages)
284{
285 add_buffer_read_barriers(render_graph, node_handle, node_stages);
286 add_buffer_write_barriers(render_graph, node_handle, node_stages);
287}
288
289void VKCommandBuilder::add_buffer_read_barriers(VKRenderGraph &render_graph,
290 NodeHandle node_handle,
291 VkPipelineStageFlags node_stages)
292{
293 for (const VKRenderGraphLink &link : render_graph.links_[node_handle].inputs) {
294 const ResourceWithStamp &versioned_resource = link.resource;
295 VKResourceStateTracker::Resource &resource = render_graph.resources_.resources_.lookup(
296 versioned_resource.handle);
297 if (resource.type == VKResourceType::IMAGE) {
298 /* Ignore image resources. */
299 continue;
300 }
301 VKResourceBarrierState &resource_state = resource.barrier_state;
302 const bool is_first_read = resource_state.is_new_stamp();
303 if (!is_first_read &&
304 (resource_state.vk_access & link.vk_access_flags) == link.vk_access_flags &&
305 (resource_state.vk_pipeline_stages & node_stages) == node_stages)
306 {
307 /* Has already been covered in a previous call no need to add this one. */
308 continue;
309 }
310
311 const VkAccessFlags wait_access = resource_state.vk_access;
312
313 state_.src_stage_mask |= resource_state.vk_pipeline_stages;
314 state_.dst_stage_mask |= node_stages;
315
316 if (is_first_read) {
317 resource_state.vk_access = link.vk_access_flags;
318 resource_state.vk_pipeline_stages = node_stages;
319 }
320 else {
321 resource_state.vk_access |= link.vk_access_flags;
322 resource_state.vk_pipeline_stages |= node_stages;
323 }
324
325 add_buffer_barrier(resource.buffer.vk_buffer, wait_access, link.vk_access_flags);
326 }
327}
328
329void VKCommandBuilder::add_buffer_write_barriers(VKRenderGraph &render_graph,
330 NodeHandle node_handle,
331 VkPipelineStageFlags node_stages)
332{
333 for (const VKRenderGraphLink link : render_graph.links_[node_handle].outputs) {
334 const ResourceWithStamp &versioned_resource = link.resource;
335 VKResourceStateTracker::Resource &resource = render_graph.resources_.resources_.lookup(
336 versioned_resource.handle);
337 if (resource.type == VKResourceType::IMAGE) {
338 /* Ignore image resources. */
339 continue;
340 }
341 VKResourceBarrierState &resource_state = resource.barrier_state;
342 const VkAccessFlags wait_access = resource_state.vk_access;
343
344 state_.src_stage_mask |= resource_state.vk_pipeline_stages;
345 state_.dst_stage_mask |= node_stages;
346
347 resource_state.vk_access = link.vk_access_flags;
348 resource_state.vk_pipeline_stages = node_stages;
349
350 if (wait_access != VK_ACCESS_NONE) {
351 add_buffer_barrier(resource.buffer.vk_buffer, wait_access, link.vk_access_flags);
352 }
353 }
354}
355
356void VKCommandBuilder::add_buffer_barrier(VkBuffer vk_buffer,
357 VkAccessFlags src_access_mask,
358 VkAccessFlags dst_access_mask)
359{
360 for (VkBufferMemoryBarrier &vk_buffer_memory_barrier : vk_buffer_memory_barriers_) {
361 if (vk_buffer_memory_barrier.buffer == vk_buffer) {
362 /* When registering read/write buffers, it can be that the node internally requires
363 * read/write. In this case we adjust the dstAccessMask of the read barrier. */
364 if ((vk_buffer_memory_barrier.dstAccessMask & src_access_mask) == src_access_mask) {
365 vk_buffer_memory_barrier.dstAccessMask |= dst_access_mask;
366 return;
367 }
368 /* When re-registering resources we can skip if access mask already contain all the flags. */
369 if ((vk_buffer_memory_barrier.dstAccessMask & dst_access_mask) == dst_access_mask &&
370 (vk_buffer_memory_barrier.srcAccessMask & src_access_mask) == src_access_mask)
371 {
372 return;
373 }
374 }
375 }
376
377 vk_buffer_memory_barrier_.srcAccessMask = src_access_mask;
378 vk_buffer_memory_barrier_.dstAccessMask = dst_access_mask;
379 vk_buffer_memory_barrier_.buffer = vk_buffer;
380 vk_buffer_memory_barriers_.append(vk_buffer_memory_barrier_);
381 vk_buffer_memory_barrier_.srcAccessMask = VK_ACCESS_NONE;
382 vk_buffer_memory_barrier_.dstAccessMask = VK_ACCESS_NONE;
383 vk_buffer_memory_barrier_.buffer = VK_NULL_HANDLE;
384}
385
386void VKCommandBuilder::add_image_barriers(VKRenderGraph &render_graph,
387 NodeHandle node_handle,
388 VkPipelineStageFlags node_stages)
389{
390 add_image_read_barriers(render_graph, node_handle, node_stages);
391 add_image_write_barriers(render_graph, node_handle, node_stages);
392}
393
394void VKCommandBuilder::add_image_read_barriers(VKRenderGraph &render_graph,
395 NodeHandle node_handle,
396 VkPipelineStageFlags node_stages)
397{
398 for (const VKRenderGraphLink &link : render_graph.links_[node_handle].inputs) {
399 const ResourceWithStamp &versioned_resource = link.resource;
400 VKResourceStateTracker::Resource &resource = render_graph.resources_.resources_.lookup(
401 versioned_resource.handle);
402 if (resource.type == VKResourceType::BUFFER) {
403 /* Ignore buffer resources. */
404 continue;
405 }
406 VKResourceBarrierState &resource_state = resource.barrier_state;
407 const bool is_first_read = resource_state.is_new_stamp();
408 if ((!is_first_read) &&
409 (resource_state.vk_access & link.vk_access_flags) == link.vk_access_flags &&
410 (resource_state.vk_pipeline_stages & node_stages) == node_stages &&
411 resource_state.image_layout == link.vk_image_layout)
412 {
413 /* Has already been covered in previous barrier no need to add this one. */
414 continue;
415 }
416
417 if (state_.layered_attachments.contains(resource.image.vk_image) &&
418 resource_state.image_layout != link.vk_image_layout)
419 {
420 layer_tracking_update(resource.image.vk_image,
421 link.layer_base,
422 link.layer_count,
423 resource_state.image_layout,
424 link.vk_image_layout);
425 continue;
426 }
427
428 VkAccessFlags wait_access = resource_state.vk_access;
429
430 state_.src_stage_mask |= resource_state.vk_pipeline_stages;
431 state_.dst_stage_mask |= node_stages;
432
433 if (is_first_read) {
434 resource_state.vk_access = link.vk_access_flags;
435 resource_state.vk_pipeline_stages = node_stages;
436 }
437 else {
438 resource_state.vk_access |= link.vk_access_flags;
439 resource_state.vk_pipeline_stages |= node_stages;
440 }
441
442 add_image_barrier(resource.image.vk_image,
443 wait_access,
444 link.vk_access_flags,
445 resource_state.image_layout,
446 link.vk_image_layout,
447 link.vk_image_aspect);
448 resource_state.image_layout = link.vk_image_layout;
449 }
450}
451
452void VKCommandBuilder::add_image_write_barriers(VKRenderGraph &render_graph,
453 NodeHandle node_handle,
454 VkPipelineStageFlags node_stages)
455{
456 for (const VKRenderGraphLink link : render_graph.links_[node_handle].outputs) {
457 const ResourceWithStamp &versioned_resource = link.resource;
458 VKResourceStateTracker::Resource &resource = render_graph.resources_.resources_.lookup(
459 versioned_resource.handle);
460 if (resource.type == VKResourceType::BUFFER) {
461 /* Ignore buffer resources. */
462 continue;
463 }
464 VKResourceBarrierState &resource_state = resource.barrier_state;
465 const VkAccessFlags wait_access = resource_state.vk_access;
466
467 if (state_.layered_attachments.contains(resource.image.vk_image) &&
468 resource_state.image_layout != link.vk_image_layout)
469 {
470 layer_tracking_update(resource.image.vk_image,
471 link.layer_base,
472 link.layer_count,
473 resource_state.image_layout,
474 link.vk_image_layout);
475
476 continue;
477 }
478
479 state_.src_stage_mask |= resource_state.vk_pipeline_stages;
480 state_.dst_stage_mask |= node_stages;
481
482 resource_state.vk_access = link.vk_access_flags;
483 resource_state.vk_pipeline_stages = node_stages;
484
485 if (wait_access != VK_ACCESS_NONE || link.vk_image_layout != resource_state.image_layout) {
486 add_image_barrier(resource.image.vk_image,
487 wait_access,
488 link.vk_access_flags,
489 resource_state.image_layout,
490 link.vk_image_layout,
491 link.vk_image_aspect);
492 resource_state.image_layout = link.vk_image_layout;
493 }
494 }
495}
496
497void VKCommandBuilder::add_image_barrier(VkImage vk_image,
498 VkAccessFlags src_access_mask,
499 VkAccessFlags dst_access_mask,
500 VkImageLayout old_layout,
501 VkImageLayout new_layout,
502 VkImageAspectFlags aspect_mask,
503 uint32_t layer_base,
504 uint32_t layer_count)
505{
506 BLI_assert(aspect_mask != VK_IMAGE_ASPECT_NONE);
507 for (VkImageMemoryBarrier &vk_image_memory_barrier : vk_image_memory_barriers_) {
508 if (vk_image_memory_barrier.image == vk_image) {
509 /* When registering read/write buffers, it can be that the node internally requires
510 * read/write. In this case we adjust the dstAccessMask of the read barrier. An example is
511 * EEVEE update HIZ compute shader and shadow tagging. */
512 if ((vk_image_memory_barrier.dstAccessMask & src_access_mask) == src_access_mask) {
513 vk_image_memory_barrier.dstAccessMask |= dst_access_mask;
514 return;
515 }
516 /* When re-registering resources we can skip if access mask already contain all the flags. */
517 if ((vk_image_memory_barrier.dstAccessMask & dst_access_mask) == dst_access_mask &&
518 (vk_image_memory_barrier.srcAccessMask & src_access_mask) == src_access_mask &&
519 old_layout == new_layout)
520 {
521 return;
522 }
523 }
524 }
525
526 vk_image_memory_barrier_.srcAccessMask = src_access_mask;
527 vk_image_memory_barrier_.dstAccessMask = dst_access_mask;
528 vk_image_memory_barrier_.image = vk_image;
529 vk_image_memory_barrier_.oldLayout = old_layout;
530 vk_image_memory_barrier_.newLayout = new_layout;
531 vk_image_memory_barrier_.subresourceRange.aspectMask = aspect_mask;
532 vk_image_memory_barrier_.subresourceRange.baseArrayLayer = layer_base;
533 vk_image_memory_barrier_.subresourceRange.layerCount = layer_count;
534 vk_image_memory_barriers_.append(vk_image_memory_barrier_);
535 /* Reset state for reuse. */
536 vk_image_memory_barrier_.srcAccessMask = VK_ACCESS_NONE;
537 vk_image_memory_barrier_.dstAccessMask = VK_ACCESS_NONE;
538 vk_image_memory_barrier_.image = VK_NULL_HANDLE;
539 vk_image_memory_barrier_.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
540 vk_image_memory_barrier_.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
541 vk_image_memory_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_NONE;
542 vk_image_memory_barrier_.subresourceRange.baseArrayLayer = 0;
543 vk_image_memory_barrier_.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
544}
545
548/* -------------------------------------------------------------------- */
552void VKCommandBuilder::layer_tracking_begin(const VKRenderGraph &render_graph,
553 NodeHandle node_handle)
554{
555 BLI_assert(render_graph.nodes_[node_handle].type == VKNodeType::BEGIN_RENDERING);
556 state_.layered_attachments.clear();
557 state_.layered_bindings.clear();
558
559 const VKRenderGraphNodeLinks &links = render_graph.links_[node_handle];
560 for (const VKRenderGraphLink &link : links.outputs) {
561 VKResourceStateTracker::Resource &resource = render_graph.resources_.resources_.lookup(
562 link.resource.handle);
563 if (resource.has_multiple_layers()) {
564 state_.layered_attachments.add(resource.image.vk_image);
565 }
566 }
567}
568
569void VKCommandBuilder::layer_tracking_update(VkImage vk_image,
570 uint32_t layer,
571 uint32_t layer_count,
572 VkImageLayout old_layout,
573 VkImageLayout new_layout)
574{
575 for (const LayeredImageBinding &binding : state_.layered_bindings) {
576 if (binding.vk_image == vk_image && binding.layer == layer) {
577 BLI_assert_msg(binding.vk_image_layout == new_layout,
578 "We don't support that one layer transitions multiple times during a "
579 "rendering scope.");
580 /* Early exit as layer is in correct layout. This is a normal case as we expect multiple draw
581 * commands to take place during a rendering scope with the same layer access.*/
582 return;
583 }
584 }
585
586 state_.layered_bindings.append({vk_image, new_layout, layer, layer_count});
587
588 /* We should be able to do better. BOTTOM/TOP is really a worst case barrier. */
589 state_.src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
590 state_.dst_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
591 add_image_barrier(vk_image,
592 VK_ACCESS_TRANSFER_WRITE_BIT,
593 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT |
594 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
595 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT |
596 VK_ACCESS_TRANSFER_WRITE_BIT,
597 old_layout,
598 new_layout,
599 VK_IMAGE_ASPECT_COLOR_BIT,
600 layer,
601 layer_count);
602}
603
604void VKCommandBuilder::layer_tracking_end(VKCommandBufferInterface &command_buffer, bool suspend)
605{
606 if (!state_.layered_bindings.is_empty()) {
607 reset_barriers();
608 /* We should be able to do better. BOTTOM/TOP is really a worst case barrier. */
609 state_.src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
610 state_.dst_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
611 for (const LayeredImageBinding &binding : state_.layered_bindings) {
612 add_image_barrier(
613 binding.vk_image,
614 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT |
615 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
616 VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
617 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT |
618 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
619 VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
620 binding.vk_image_layout,
621 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
622 VK_IMAGE_ASPECT_COLOR_BIT,
623 binding.layer,
624 binding.layer_count);
625 }
626 send_pipeline_barriers(command_buffer);
627 }
628 state_.layered_bindings.clear();
629 if (!suspend) {
630 state_.layered_attachments.clear();
631 }
632}
633
636} // namespace blender::gpu::render_graph
@ G_DEBUG_GPU
#define BLI_assert(a)
Definition BLI_assert.h:50
#define BLI_assert_msg(a, msg)
Definition BLI_assert.h:57
MINLINE int min_ii(int a, int b)
MINLINE int max_ii(int a, int b)
constexpr int64_t first() const
constexpr int64_t size() const
constexpr bool is_empty() const
constexpr IndexRange slice(int64_t start, int64_t size) const
constexpr IndexRange index_range() const
constexpr IndexRange drop_front(int64_t n) const
constexpr const T & first() const
Definition BLI_span.hh:316
constexpr int64_t size() const
Definition BLI_span.hh:253
constexpr const T & last(const int64_t n=0) const
Definition BLI_span.hh:326
int64_t size() const
void append(const T &value)
bool is_empty() const
void build_nodes(VKRenderGraph &render_graph, VKCommandBufferInterface &command_buffer, Span< NodeHandle > node_handles)
void debug_print(NodeHandle node_handle) const
#define G(x, y, z)
BLI_INLINE bool node_type_is_rendering(VKNodeType node_type)
BLI_INLINE bool node_type_is_within_rendering(VKNodeType node_type)
unsigned int uint32_t
Definition stdint.h:80