Blender V4.3
vk_scheduler.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2024 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
9#include <sstream>
10
11#include "vk_render_graph.hh"
12#include "vk_scheduler.hh"
13
14#include "BLI_index_range.hh"
15#include "BLI_set.hh"
16
18
20 VkImage vk_image)
21{
22 UNUSED_VARS(vk_image);
23 select_all_nodes(render_graph);
24 reorder_nodes(render_graph);
25 return result_;
26}
27
29 VkBuffer vk_buffer)
30{
31 UNUSED_VARS(vk_buffer);
32 select_all_nodes(render_graph);
33 reorder_nodes(render_graph);
34 return result_;
35}
36
38{
39 select_all_nodes(render_graph);
40 reorder_nodes(render_graph);
41 return result_;
42}
43
44void VKScheduler::select_all_nodes(const VKRenderGraph &render_graph)
45{
46 /* TODO: This will not work when we extract subgraphs. When subgraphs are removed the order in
47 * the render graph may not follow the order the nodes were added. */
48 result_.clear();
49 for (NodeHandle node_handle : render_graph.nodes_.index_range()) {
50 result_.append(node_handle);
51 }
52}
53
54/* -------------------------------------------------------------------- */
58void VKScheduler::reorder_nodes(const VKRenderGraph &render_graph)
59{
60 move_transfer_and_dispatch_outside_rendering_scope(render_graph);
61}
62
63std::optional<std::pair<int64_t, int64_t>> VKScheduler::find_rendering_scope(
64 const VKRenderGraph &render_graph, IndexRange search_range) const
65{
66 int64_t rendering_start = -1;
67
68 for (int64_t index : search_range) {
69 NodeHandle node_handle = result_[index];
70 const VKRenderGraphNode &node = render_graph.nodes_[node_handle];
71 if (node.type == VKNodeType::BEGIN_RENDERING) {
72 rendering_start = index;
73 }
74 if (node.type == VKNodeType::END_RENDERING && rendering_start != -1) {
75 return std::pair(rendering_start, index);
76 }
77 }
78 BLI_assert(rendering_start == -1);
79
80 return std::nullopt;
81}
82
85/* -------------------------------------------------------------------- */
89void VKScheduler::move_transfer_and_dispatch_outside_rendering_scope(
90 const VKRenderGraph &render_graph)
91{
92 Vector<NodeHandle> pre_rendering_scope;
93 Vector<NodeHandle> rendering_scope;
94 Set<ResourceHandle> used_buffers;
95
96 foreach_rendering_scope(render_graph, [&](int64_t start_index, int64_t end_index) {
97 /* Move end_rendering right after the last graphics node. */
98 for (int index = end_index - 1; index >= start_index; index--) {
99 NodeHandle node_handle = result_[index];
100 const VKRenderGraphNode &node = render_graph.nodes_[node_handle];
101 if (node_type_is_rendering(node.type)) {
102 break;
103 }
104 std::swap(result_[end_index], result_[index]);
105 end_index -= 1;
106 }
107
108 /* Move begin_rendering right before the first graphics node. */
109 for (int index = start_index + 1; index < end_index; index++) {
110 NodeHandle node_handle = result_[index];
111 const VKRenderGraphNode &node = render_graph.nodes_[node_handle];
112 if (node_type_is_rendering(node.type)) {
113 break;
114 }
115 std::swap(result_[start_index], result_[index]);
116 start_index += 1;
117 }
118
119 /* Move buffer update buffer commands to before the rendering scope, unless the buffer is
120 * already being used by a draw command. Images modification could also be moved outside the
121 * rendering scope, but it is more tricky as they could also be attached to the frame-buffer.
122 */
123 pre_rendering_scope.clear();
124 rendering_scope.clear();
125 used_buffers.clear();
126
127 for (int index = start_index + 1; index < end_index; index++) {
128 NodeHandle node_handle = result_[index];
129 const VKRenderGraphNode &node = render_graph.nodes_[node_handle];
130 /* Should we add this node to the rendering scope. This is only done when we need to reorder
131 * nodes. In that case the rendering_scope has already an item and we should add this node to
132 * or the rendering scope or before the rendering scope. Adding nodes before rendering scope
133 * is done in the VKNodeType::UPDATE_BUFFER branch. */
134 bool add_to_rendering_scope = !rendering_scope.is_empty();
135 if (node.type == VKNodeType::UPDATE_BUFFER) {
136 if (!used_buffers.contains(
137 render_graph.resources_.buffer_resources_.lookup(node.update_buffer.dst_buffer)))
138 {
139 /* Buffer isn't used by this rendering scope so we can safely move it before the
140 * rendering scope begins. */
141 pre_rendering_scope.append(node_handle);
142 add_to_rendering_scope = false;
143 /* When this is the first time we move a node before the rendering we should start
144 * building up the rendering scope as well. This is postponed so we can safe some cycles
145 * when no nodes needs to be moved at all. */
146 if (rendering_scope.is_empty()) {
147 rendering_scope.extend(Span<NodeHandle>(&result_[start_index], index - start_index));
148 }
149 }
150 }
151 if (add_to_rendering_scope) {
152 /* When rendering scope has an item we are rewriting the execution order and need to track
153 * what should be inside the rendering scope. */
154 rendering_scope.append(node_handle);
155 }
156
157 /* Any read/write to buffer resources should be added to used_buffers in order to detect if
158 * it is safe to move a node before the rendering scope.*/
159 const VKRenderGraphNodeLinks &links = render_graph.links_[node_handle];
160 for (const VKRenderGraphLink &input : links.inputs) {
161 if (render_graph.resources_.resource_type_get(input.resource.handle) ==
163 {
164 used_buffers.add(input.resource.handle);
165 }
166 }
167 for (const VKRenderGraphLink &output : links.outputs) {
168 if (render_graph.resources_.resource_type_get(output.resource.handle) ==
170 {
171 used_buffers.add(output.resource.handle);
172 }
173 }
174 }
175
176 /* When pre_rendering_scope has an item we want to rewrite the order.
177 * The number of nodes are not changed, so we can do this inline. */
178 if (!pre_rendering_scope.is_empty()) {
179 MutableSpan<NodeHandle> store_none_rendering = result_.as_mutable_span().slice(
180 start_index, pre_rendering_scope.size());
181 MutableSpan<NodeHandle> store_rendering = result_.as_mutable_span().slice(
182 start_index + pre_rendering_scope.size(), rendering_scope.size());
183 store_none_rendering.copy_from(pre_rendering_scope);
184 store_rendering.copy_from(rendering_scope);
185 start_index += pre_rendering_scope.size();
186 }
187 });
188}
189
192/* -------------------------------------------------------------------- */
196void VKScheduler::debug_print(const VKRenderGraph &render_graph) const
197{
198 std::stringstream ss;
199 int indent = 0;
200
201 for (int index : result_.index_range()) {
202 const NodeHandle node_handle = result_[index];
203 const VKRenderGraphNode &node = render_graph.nodes_[node_handle];
204 if (node.type == VKNodeType::END_RENDERING) {
205 indent--;
206 }
207 for (int i = 0; i < indent; i++) {
208 ss << " ";
209 }
210 ss << node.type << "\n";
211#if 0
212 render_graph.debug_print(node_handle);
213#endif
214 if (node.type == VKNodeType::BEGIN_RENDERING) {
215 indent++;
216 }
217 }
218 ss << "\n";
219
220 std::cout << ss.str();
221}
222
225} // namespace blender::gpu::render_graph
#define BLI_assert(a)
Definition BLI_assert.h:50
#define UNUSED_VARS(...)
void append(const T &value)
MutableSpan< T > as_mutable_span()
Span< NodeHandle > select_nodes(const VKRenderGraph &render_graph)
Span< NodeHandle > select_nodes_for_image(const VKRenderGraph &render_graph, VkImage vk_image)
Span< NodeHandle > select_nodes_for_buffer(const VKRenderGraph &render_graph, VkBuffer vk_buffer)
BLI_INLINE bool node_type_is_rendering(VKNodeType node_type)
__int64 int64_t
Definition stdint.h:89