Blender V4.5
vk_device.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include <sstream>
10
11#include "CLG_log.h"
12
13#include "vk_backend.hh"
14#include "vk_context.hh"
15#include "vk_device.hh"
16#include "vk_state_manager.hh"
17#include "vk_storage_buffer.hh"
18#include "vk_texture.hh"
19#include "vk_vertex_buffer.hh"
20
21#include "GPU_capabilities.hh"
22
24
25#include "GHOST_C-api.h"
26
28
29static CLG_LogRef LOG = {"gpu.vulkan"};
30
31namespace blender::gpu {
32
34{
36 2,
37 "Device features\n"
38 " - [%c] shader output viewport index\n"
39 " - [%c] shader output layer\n"
40 " - [%c] fragment shader barycentric\n"
41 "Device extensions\n"
42 " - [%c] descriptor buffer\n"
43 " - [%c] dynamic rendering\n"
44 " - [%c] dynamic rendering local read\n"
45 " - [%c] dynamic rendering unused attachments\n"
46 " - [%c] external memory\n"
47 " - [%c] memory priority\n"
48 " - [%c] pageable device local memory\n"
49 " - [%c] shader stencil export",
51 shader_output_layer ? 'X' : ' ',
53 descriptor_buffer ? 'X' : ' ',
54 dynamic_rendering ? 'X' : ' ',
57 external_memory ? 'X' : ' ',
58 memory_priority ? 'X' : ' ',
60 GPU_stencil_export_support() ? 'X' : ' ');
61}
62
64{
65 samplers_.free();
66 samplers_.init();
67}
68
70{
71 if (!is_initialized()) {
72 return;
73 }
74
75 deinit_submission_pool();
76
77 dummy_buffer.free();
78 samplers_.free();
79
80 {
81 while (!thread_data_.is_empty()) {
82 VKThreadData *thread_data = thread_data_.pop_last();
83 delete thread_data;
84 }
85 thread_data_.clear();
86 }
87 pipelines.write_to_disk();
88 pipelines.free_data();
89 descriptor_set_layouts_.deinit();
90 orphaned_data_render.deinit(*this);
91 orphaned_data.deinit(*this);
92 vmaDestroyPool(mem_allocator_, vma_pools.external_memory);
93 vmaDestroyAllocator(mem_allocator_);
94 mem_allocator_ = VK_NULL_HANDLE;
95
96 while (!render_graphs_.is_empty()) {
97 render_graph::VKRenderGraph *render_graph = render_graphs_.pop_last();
98 MEM_delete<render_graph::VKRenderGraph>(render_graph);
99 }
100
101 debugging_tools_.deinit(vk_instance_);
102
103 vk_instance_ = VK_NULL_HANDLE;
104 vk_physical_device_ = VK_NULL_HANDLE;
105 vk_device_ = VK_NULL_HANDLE;
106 vk_queue_family_ = 0;
107 vk_queue_ = VK_NULL_HANDLE;
108 vk_physical_device_properties_ = {};
109 glsl_vert_patch_.clear();
110 glsl_frag_patch_.clear();
111 glsl_geom_patch_.clear();
112 glsl_comp_patch_.clear();
113 is_initialized_ = false;
114}
115
116void VKDevice::init(void *ghost_context)
117{
119 GHOST_VulkanHandles handles = {};
120 GHOST_GetVulkanHandles((GHOST_ContextHandle)ghost_context, &handles);
121 vk_instance_ = handles.instance;
122 vk_physical_device_ = handles.physical_device;
123 vk_device_ = handles.device;
124 vk_queue_family_ = handles.graphic_queue_family;
125 vk_queue_ = handles.queue;
126 queue_mutex_ = static_cast<std::mutex *>(handles.queue_mutex);
127
128 init_physical_device_extensions();
129 init_physical_device_properties();
130 init_physical_device_memory_properties();
131 init_physical_device_features();
134 init_functions();
135 init_debug_callbacks();
136 init_memory_allocator();
137 pipelines.init();
138 pipelines.read_from_disk();
139
140 samplers_.init();
141 init_dummy_buffer();
142
143 debug::object_label(vk_handle(), "LogicalDevice");
144 debug::object_label(vk_queue_, "GenericQueue");
146
147 resources.use_dynamic_rendering = extensions_.dynamic_rendering;
148 resources.use_dynamic_rendering_local_read = extensions_.dynamic_rendering_local_read;
149 orphaned_data.timeline_ = 0;
150
151 init_submission_pool();
152 is_initialized_ = true;
153}
154
155void VKDevice::init_functions()
156{
157#define LOAD_FUNCTION(name) (PFN_##name) vkGetInstanceProcAddr(vk_instance_, STRINGIFY(name))
158 /* VK_KHR_dynamic_rendering */
159 functions.vkCmdBeginRendering = LOAD_FUNCTION(vkCmdBeginRenderingKHR);
160 functions.vkCmdEndRendering = LOAD_FUNCTION(vkCmdEndRenderingKHR);
161
162 /* VK_EXT_debug_utils */
163 functions.vkCmdBeginDebugUtilsLabel = LOAD_FUNCTION(vkCmdBeginDebugUtilsLabelEXT);
164 functions.vkCmdEndDebugUtilsLabel = LOAD_FUNCTION(vkCmdEndDebugUtilsLabelEXT);
165 functions.vkSetDebugUtilsObjectName = LOAD_FUNCTION(vkSetDebugUtilsObjectNameEXT);
166 functions.vkCreateDebugUtilsMessenger = LOAD_FUNCTION(vkCreateDebugUtilsMessengerEXT);
167 functions.vkDestroyDebugUtilsMessenger = LOAD_FUNCTION(vkDestroyDebugUtilsMessengerEXT);
168
169 if (extensions_.external_memory) {
170#ifdef _WIN32
171 /* VK_KHR_external_memory_win32 */
172 functions.vkGetMemoryWin32Handle = LOAD_FUNCTION(vkGetMemoryWin32HandleKHR);
173#elif not defined(__APPLE__)
174 /* VK_KHR_external_memory_fd */
175 functions.vkGetMemoryFd = LOAD_FUNCTION(vkGetMemoryFdKHR);
176#endif
177 }
178
179 /* VK_EXT_descriptor_buffer */
180 functions.vkGetDescriptorSetLayoutSize = LOAD_FUNCTION(vkGetDescriptorSetLayoutSizeEXT);
181 functions.vkGetDescriptorSetLayoutBindingOffset = LOAD_FUNCTION(
182 vkGetDescriptorSetLayoutBindingOffsetEXT);
183 functions.vkGetDescriptor = LOAD_FUNCTION(vkGetDescriptorEXT);
184 functions.vkCmdBindDescriptorBuffers = LOAD_FUNCTION(vkCmdBindDescriptorBuffersEXT);
185 functions.vkCmdSetDescriptorBufferOffsets = LOAD_FUNCTION(vkCmdSetDescriptorBufferOffsetsEXT);
186
187#undef LOAD_FUNCTION
188}
189
190void VKDevice::init_debug_callbacks()
191{
192 debugging_tools_.init(vk_instance_);
193}
194
195void VKDevice::init_physical_device_properties()
196{
197 BLI_assert(vk_physical_device_ != VK_NULL_HANDLE);
198
199 VkPhysicalDeviceProperties2 vk_physical_device_properties = {};
200 vk_physical_device_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
201 vk_physical_device_driver_properties_.sType =
202 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
203 vk_physical_device_id_properties_.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
204 vk_physical_device_properties.pNext = &vk_physical_device_driver_properties_;
205 vk_physical_device_driver_properties_.pNext = &vk_physical_device_id_properties_;
206
207 if (supports_extension(VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME)) {
208 vk_physical_device_descriptor_buffer_properties_ = {
209 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT};
210 vk_physical_device_descriptor_buffer_properties_.pNext =
211 vk_physical_device_driver_properties_.pNext;
212 vk_physical_device_driver_properties_.pNext =
213 &vk_physical_device_descriptor_buffer_properties_;
214 }
215
216 vkGetPhysicalDeviceProperties2(vk_physical_device_, &vk_physical_device_properties);
217 vk_physical_device_properties_ = vk_physical_device_properties.properties;
218}
219
220void VKDevice::init_physical_device_memory_properties()
221{
222 BLI_assert(vk_physical_device_ != VK_NULL_HANDLE);
223 vkGetPhysicalDeviceMemoryProperties(vk_physical_device_, &vk_physical_device_memory_properties_);
224}
225
226void VKDevice::init_physical_device_features()
227{
228 BLI_assert(vk_physical_device_ != VK_NULL_HANDLE);
229
230 VkPhysicalDeviceFeatures2 features = {};
231 features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
232 vk_physical_device_vulkan_11_features_.sType =
233 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
234 vk_physical_device_vulkan_12_features_.sType =
235 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
236
237 features.pNext = &vk_physical_device_vulkan_11_features_;
238 vk_physical_device_vulkan_11_features_.pNext = &vk_physical_device_vulkan_12_features_;
239
240 vkGetPhysicalDeviceFeatures2(vk_physical_device_, &features);
241 vk_physical_device_features_ = features.features;
242}
243
244void VKDevice::init_physical_device_extensions()
245{
246 uint32_t count = 0;
247 vkEnumerateDeviceExtensionProperties(vk_physical_device_, nullptr, &count, nullptr);
248 device_extensions_ = Array<VkExtensionProperties>(count);
249 vkEnumerateDeviceExtensionProperties(
250 vk_physical_device_, nullptr, &count, device_extensions_.data());
251}
252
253bool VKDevice::supports_extension(const char *extension_name) const
254{
255 for (const VkExtensionProperties &vk_extension_properties : device_extensions_) {
256 if (STREQ(vk_extension_properties.extensionName, extension_name)) {
257 return true;
258 }
259 }
260 return false;
261}
262
263void VKDevice::init_memory_allocator()
264{
265 VmaAllocatorCreateInfo info = {};
266 info.vulkanApiVersion = VK_API_VERSION_1_2;
267 info.physicalDevice = vk_physical_device_;
268 info.device = vk_device_;
269 info.instance = vk_instance_;
270 if (extensions_.descriptor_buffer) {
271 info.flags |= VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT;
272 }
273 if (extensions_.memory_priority) {
274 info.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT;
275 }
276 vmaCreateAllocator(&info, &mem_allocator_);
277
278 if (!extensions_.external_memory) {
279 return;
280 }
281 /* External memory pool */
282 /* Initialize a dummy image create info to find the memory type index that will be used for
283 * allocating. */
284 VkExternalMemoryHandleTypeFlags vk_external_memory_handle_type = 0;
285#ifdef _WIN32
286 vk_external_memory_handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
287#else
288 vk_external_memory_handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
289#endif
290 VkExternalMemoryImageCreateInfo external_image_create_info = {
291 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
292 nullptr,
293 vk_external_memory_handle_type};
294 VkImageCreateInfo image_create_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
295 &external_image_create_info,
296 0,
297 VK_IMAGE_TYPE_2D,
298 VK_FORMAT_R8G8B8A8_UNORM,
299 {1024, 1024, 1},
300 1,
301 1,
302 VK_SAMPLE_COUNT_1_BIT,
303 VK_IMAGE_TILING_OPTIMAL,
304 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
305 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
306 VK_IMAGE_USAGE_SAMPLED_BIT,
307 VK_SHARING_MODE_EXCLUSIVE,
308 0,
309 nullptr,
310 VK_IMAGE_LAYOUT_UNDEFINED};
311 VmaAllocationCreateInfo allocation_create_info = {};
312 allocation_create_info.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
313 allocation_create_info.usage = VMA_MEMORY_USAGE_AUTO;
314 uint32_t memory_type_index;
315 vmaFindMemoryTypeIndexForImageInfo(
316 mem_allocator_, &image_create_info, &allocation_create_info, &memory_type_index);
317
318 vma_pools.external_memory_info.handleTypes = vk_external_memory_handle_type;
319 VmaPoolCreateInfo pool_create_info = {};
320 pool_create_info.memoryTypeIndex = memory_type_index;
321 pool_create_info.pMemoryAllocateNext = &vma_pools.external_memory_info;
322 pool_create_info.priority = 1.0f;
323 vmaCreatePool(mem_allocator_, &pool_create_info, &vma_pools.external_memory);
324}
325
326void VKDevice::init_dummy_buffer()
327{
328 dummy_buffer.create(sizeof(float4x4),
329 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
330 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
331 VkMemoryPropertyFlags(0),
332 VmaAllocationCreateFlags(0),
333 1.0f);
334 debug::object_label(dummy_buffer.vk_handle(), "DummyBuffer");
335 /* Default dummy buffer. Set the 4th element to 1 to fix missing orcos. */
336 float data[16] = {
337 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0};
338 dummy_buffer.update_immediately(static_cast<void *>(data));
339}
340
342{
343 std::stringstream ss;
344
345 ss << "#version 450\n";
347 ss << "#extension GL_ARB_shader_draw_parameters : enable\n";
348 ss << "#define GPU_ARB_shader_draw_parameters\n";
349 ss << "#define gpu_BaseInstance (gl_BaseInstanceARB)\n";
350 }
351 ss << "#define GPU_ARB_clip_control\n";
352
353 ss << "#define gl_VertexID gl_VertexIndex\n";
354 ss << "#define gpu_InstanceIndex (gl_InstanceIndex)\n";
355 ss << "#define gl_InstanceID (gpu_InstanceIndex - gpu_BaseInstance)\n";
356
357 ss << "#extension GL_ARB_shader_viewport_layer_array: enable\n";
359 ss << "#extension GL_ARB_shader_stencil_export: enable\n";
360 ss << "#define GPU_ARB_shader_stencil_export 1\n";
361 }
362 if (extensions_.fragment_shader_barycentric) {
363 ss << "#extension GL_EXT_fragment_shader_barycentric : require\n";
364 ss << "#define gpu_BaryCoord gl_BaryCoordEXT\n";
365 ss << "#define gpu_BaryCoordNoPersp gl_BaryCoordNoPerspEXT\n";
366 }
367
368 /* GLSL Backend Lib. */
369
370 glsl_vert_patch_ = ss.str() + "#define GPU_VERTEX_SHADER\n" + datatoc_glsl_shader_defines_glsl;
371 glsl_geom_patch_ = ss.str() + "#define GPU_GEOMETRY_SHADER\n" + datatoc_glsl_shader_defines_glsl;
372 glsl_frag_patch_ = ss.str() + "#define GPU_FRAGMENT_SHADER\n" + datatoc_glsl_shader_defines_glsl;
373 glsl_comp_patch_ = ss.str() + "#define GPU_COMPUTE_SHADER\n" + datatoc_glsl_shader_defines_glsl;
374}
375
377{
378 BLI_assert(!glsl_vert_patch_.empty());
379 return glsl_vert_patch_.c_str();
380}
381
383{
384 BLI_assert(!glsl_geom_patch_.empty());
385 return glsl_geom_patch_.c_str();
386}
387
389{
390 BLI_assert(!glsl_frag_patch_.empty());
391 return glsl_frag_patch_.c_str();
392}
393
395{
396 BLI_assert(!glsl_comp_patch_.empty());
397 return glsl_comp_patch_.c_str();
398}
399
400/* -------------------------------------------------------------------- */
403
404constexpr int32_t PCI_ID_NVIDIA = 0x10de;
405constexpr int32_t PCI_ID_INTEL = 0x8086;
406constexpr int32_t PCI_ID_AMD = 0x1002;
407constexpr int32_t PCI_ID_ATI = 0x1022;
408constexpr int32_t PCI_ID_APPLE = 0x106b;
409
411{
412 switch (vk_physical_device_driver_properties_.driverID) {
413 case VK_DRIVER_ID_AMD_PROPRIETARY:
414 case VK_DRIVER_ID_AMD_OPEN_SOURCE:
415 case VK_DRIVER_ID_MESA_RADV:
416 return GPU_DEVICE_ATI;
417
418 case VK_DRIVER_ID_NVIDIA_PROPRIETARY:
419 case VK_DRIVER_ID_MESA_NVK:
420 return GPU_DEVICE_NVIDIA;
421
422 case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS:
423 case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA:
424 return GPU_DEVICE_INTEL;
425
426 case VK_DRIVER_ID_QUALCOMM_PROPRIETARY:
427 return GPU_DEVICE_QUALCOMM;
428
429 case VK_DRIVER_ID_MOLTENVK:
430 return GPU_DEVICE_APPLE;
431
432 case VK_DRIVER_ID_MESA_LLVMPIPE:
433 return GPU_DEVICE_SOFTWARE;
434
435 default:
436 return GPU_DEVICE_UNKNOWN;
437 }
438
439 return GPU_DEVICE_UNKNOWN;
440}
441
443{
444 switch (vk_physical_device_driver_properties_.driverID) {
445 case VK_DRIVER_ID_AMD_PROPRIETARY:
446 case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS:
447 case VK_DRIVER_ID_NVIDIA_PROPRIETARY:
448 case VK_DRIVER_ID_QUALCOMM_PROPRIETARY:
449 return GPU_DRIVER_OFFICIAL;
450
451 case VK_DRIVER_ID_MOLTENVK:
452 case VK_DRIVER_ID_AMD_OPEN_SOURCE:
453 case VK_DRIVER_ID_MESA_RADV:
454 case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA:
455 case VK_DRIVER_ID_MESA_NVK:
457
458 case VK_DRIVER_ID_MESA_LLVMPIPE:
459 return GPU_DRIVER_SOFTWARE;
460
461 default:
462 return GPU_DRIVER_ANY;
463 }
464
465 return GPU_DRIVER_ANY;
466}
467
468std::string VKDevice::vendor_name() const
469{
470 /* Below 0x10000 are the PCI vendor IDs (https://pcisig.com/membership/member-companies) */
471 if (vk_physical_device_properties_.vendorID < 0x10000) {
472 switch (vk_physical_device_properties_.vendorID) {
473 case PCI_ID_AMD:
474 case PCI_ID_ATI:
475 return "Advanced Micro Devices";
476 case PCI_ID_NVIDIA:
477 return "NVIDIA Corporation";
478 case PCI_ID_INTEL:
479 return "Intel Corporation";
480 case PCI_ID_APPLE:
481 return "Apple";
482 default:
483 return std::to_string(vk_physical_device_properties_.vendorID);
484 }
485 }
486 else {
487 /* above 0x10000 should be vkVendorIDs
488 * NOTE: When debug_messaging landed we can use something similar to
489 * vk::to_string(vk::VendorId(properties.vendorID));
490 */
491 return std::to_string(vk_physical_device_properties_.vendorID);
492 }
493}
494
495std::string VKDevice::driver_version() const
496{
497 return StringRefNull(vk_physical_device_driver_properties_.driverName) + " " +
498 StringRefNull(vk_physical_device_driver_properties_.driverInfo);
499}
500
502
503/* -------------------------------------------------------------------- */
506
508{
509 for (VKResourcePool &resource_pool : resource_pools) {
510 resource_pool.init(device);
511 }
512}
513
515
516/* -------------------------------------------------------------------- */
519
521{
522 std::scoped_lock mutex(resources.mutex);
523 pthread_t current_thread_id = pthread_self();
524
525 for (VKThreadData *thread_data : thread_data_) {
526 if (pthread_equal(thread_data->thread_id, current_thread_id)) {
527 return *thread_data;
528 }
529 }
530
531 VKThreadData *thread_data = new VKThreadData(*this, current_thread_id);
532 thread_data_.append(thread_data);
533 return *thread_data;
534}
535
537{
538 contexts_.append(std::reference_wrapper(context));
539}
540
542{
543 if (context.render_graph_.has_value()) {
544 render_graph::VKRenderGraph &render_graph = context.render_graph();
545 context.render_graph_.reset();
546 BLI_assert_msg(render_graph.is_empty(),
547 "Unregistering a context that still has an unsubmitted render graph.");
548 render_graph.reset();
549 BLI_thread_queue_push(unused_render_graphs_, &render_graph);
550 }
551 {
552 std::scoped_lock lock(orphaned_data.mutex_get());
553 orphaned_data.move_data(context.discard_pool, timeline_value_ + 1);
554 }
555
556 contexts_.remove(contexts_.first_index_of(std::reference_wrapper(context)));
557}
559{
560 return contexts_;
561};
562
563void VKDevice::memory_statistics_get(int *r_total_mem_kb, int *r_free_mem_kb) const
564{
565 VmaBudget budgets[VK_MAX_MEMORY_HEAPS];
566 vmaGetHeapBudgets(mem_allocator_get(), budgets);
567 VkDeviceSize total_mem = 0;
568 VkDeviceSize used_mem = 0;
569
570 for (int memory_heap_index : IndexRange(vk_physical_device_memory_properties_.memoryHeapCount)) {
571 const VkMemoryHeap &memory_heap =
572 vk_physical_device_memory_properties_.memoryHeaps[memory_heap_index];
573 const VmaBudget &budget = budgets[memory_heap_index];
574
575 /* Skip host memory-heaps. */
576 if (!bool(memory_heap.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT)) {
577 continue;
578 }
579
580 total_mem += memory_heap.size;
581 used_mem += budget.usage;
582 }
583
584 *r_total_mem_kb = int(total_mem / 1024);
585 *r_free_mem_kb = int((total_mem - used_mem) / 1024);
586}
587
589
590/* -------------------------------------------------------------------- */
593
594void VKDevice::debug_print(std::ostream &os, const VKDiscardPool &discard_pool)
595{
596 if (discard_pool.images_.is_empty() && discard_pool.buffers_.is_empty() &&
597 discard_pool.image_views_.is_empty() && discard_pool.buffer_views_.is_empty() &&
598 discard_pool.shader_modules_.is_empty() && discard_pool.pipeline_layouts_.is_empty() &&
599 discard_pool.descriptor_pools_.is_empty())
600 {
601 return;
602 }
603 os << " Discardable resources: ";
604 if (!discard_pool.images_.is_empty()) {
605 os << "VkImage=" << discard_pool.images_.size() << " ";
606 }
607 if (!discard_pool.image_views_.is_empty()) {
608 os << "VkImageView=" << discard_pool.image_views_.size() << " ";
609 }
610 if (!discard_pool.buffers_.is_empty()) {
611 os << "VkBuffer=" << discard_pool.buffers_.size() << " ";
612 }
613 if (!discard_pool.buffer_views_.is_empty()) {
614 os << "VkBufferViews=" << discard_pool.buffer_views_.size() << " ";
615 }
616 if (!discard_pool.shader_modules_.is_empty()) {
617 os << "VkShaderModule=" << discard_pool.shader_modules_.size() << " ";
618 }
619 if (!discard_pool.pipeline_layouts_.is_empty()) {
620 os << "VkPipelineLayout=" << discard_pool.pipeline_layouts_.size() << " ";
621 }
622 if (!discard_pool.descriptor_pools_.is_empty()) {
623 os << "VkDescriptorPool=" << discard_pool.descriptor_pools_.size();
624 }
625 os << "\n";
626}
627
629{
631 "VKDevice::debug_print can only be called from the main thread.");
632
633 std::ostream &os = std::cout;
634
635 os << "Pipelines\n";
636 os << " Graphics: " << pipelines.graphic_pipelines_.size() << "\n";
637 os << " Compute: " << pipelines.compute_pipelines_.size() << "\n";
638 os << "Descriptor sets\n";
639 os << " VkDescriptorSetLayouts: " << descriptor_set_layouts_.size() << "\n";
640 for (const VKThreadData *thread_data : thread_data_) {
641 /* NOTE: Assumption that this is always called form the main thread. This could be solved by
642 * keeping track of the main thread inside the thread data. */
643 const bool is_main = pthread_equal(thread_data->thread_id, pthread_self());
644 os << "ThreadData" << (is_main ? " (main-thread)" : "") << ")\n";
645 os << " Rendering_depth: " << thread_data->rendering_depth << "\n";
646 for (int resource_pool_index : IndexRange(thread_data->resource_pools.size())) {
647 const bool is_active = thread_data->resource_pool_index == resource_pool_index;
648 os << " Resource Pool (index=" << resource_pool_index << (is_active ? " active" : "")
649 << ")\n";
650 }
651 }
652 os << "Discard pool\n";
654 os << "Discard pool (render)\n";
656 os << "\n";
657
658 for (const std::reference_wrapper<VKContext> &context : contexts_) {
659 os << " VKContext \n";
660 debug_print(os, context.get().discard_pool);
661 }
662
663 int total_mem_kb;
664 int free_mem_kb;
665 memory_statistics_get(&total_mem_kb, &free_mem_kb);
666 os << "\nMemory: total=" << total_mem_kb << ", free=" << free_mem_kb << "\n";
667}
668
670
671} // namespace blender::gpu
#define BLI_assert(a)
Definition BLI_assert.h:46
#define BLI_assert_msg(a, msg)
Definition BLI_assert.h:53
void BLI_thread_queue_push(ThreadQueue *queue, void *work)
Definition threads.cc:642
int BLI_thread_is_main(void)
Definition threads.cc:179
#define STREQ(a, b)
#define CLOG_INFO(clg_ref, level,...)
Definition CLG_log.h:179
GHOST C-API function and type declarations.
bool GPU_stencil_export_support()
bool GPU_shader_draw_parameters_support()
eGPUDriverType
@ GPU_DRIVER_ANY
@ GPU_DRIVER_OFFICIAL
@ GPU_DRIVER_OPENSOURCE
@ GPU_DRIVER_SOFTWARE
eGPUDeviceType
@ GPU_DEVICE_UNKNOWN
@ GPU_DEVICE_ATI
@ GPU_DEVICE_QUALCOMM
@ GPU_DEVICE_SOFTWARE
@ GPU_DEVICE_NVIDIA
@ GPU_DEVICE_APPLE
@ GPU_DEVICE_INTEL
volatile int lock
BMesh const char void * data
static void capabilities_init(VKDevice &device)
static void platform_init(const VKDevice &device)
VKPipelinePool pipelines
Definition vk_device.hh:246
void init(void *ghost_context)
Definition vk_device.cc:116
render_graph::VKResourceStateTracker resources
Definition vk_device.hh:242
VmaAllocator mem_allocator_get() const
Definition vk_device.hh:346
VkDevice vk_handle() const
Definition vk_device.hh:336
bool supports_extension(const char *extension_name) const
Definition vk_device.cc:253
struct blender::gpu::VKDevice::@262132226121372152356051207311115003347304265036 vma_pools
std::string vendor_name() const
Definition vk_device.cc:468
std::string driver_version() const
Definition vk_device.cc:495
const char * glsl_geometry_patch_get() const
Definition vk_device.cc:382
VKDiscardPool orphaned_data
Definition vk_device.hh:243
const char * glsl_fragment_patch_get() const
Definition vk_device.cc:388
VKThreadData & current_thread_data()
Definition vk_device.cc:520
eGPUDriverType driver_type() const
Definition vk_device.cc:442
void context_register(VKContext &context)
Definition vk_device.cc:536
bool is_initialized() const
Definition vk_device.hh:374
const char * glsl_compute_patch_get() const
Definition vk_device.cc:394
const char * glsl_vertex_patch_get() const
Definition vk_device.cc:376
void context_unregister(VKContext &context)
Definition vk_device.cc:541
void memory_statistics_get(int *r_total_mem_kb, int *r_free_mem_kb) const
Definition vk_device.cc:563
struct blender::gpu::VKDevice::@164322360241203077355133066050124027157363067003 functions
Span< std::reference_wrapper< VKContext > > contexts_get() const
Definition vk_device.cc:558
eGPUDeviceType device_type() const
Definition vk_device.cc:410
VKDiscardPool orphaned_data_render
Definition vk_device.hh:245
std::array< VKResourcePool, resource_pools_count > resource_pools
Definition vk_device.hh:127
VKThreadData(VKDevice &device, pthread_t thread_id)
Definition vk_device.cc:507
void init(VkInstance vk_instance)
Definition vk_debug.cc:162
char datatoc_glsl_shader_defines_glsl[]
Definition gl_shader.cc:48
ThreadMutex mutex
int count
#define LOG(severity)
Definition log.h:32
void object_label(GLenum type, GLuint object, const char *name)
Definition gl_debug.cc:329
constexpr int32_t PCI_ID_NVIDIA
Definition vk_device.cc:404
constexpr int32_t PCI_ID_INTEL
Definition vk_device.cc:405
static CLG_LogRef LOG
constexpr int32_t PCI_ID_ATI
Definition vk_device.cc:407
constexpr int32_t PCI_ID_AMD
Definition vk_device.cc:406
constexpr int32_t PCI_ID_APPLE
Definition vk_device.cc:408
MatBase< float, 4, 4 > float4x4
ParamHandle ** handles
#define LOAD_FUNCTION(name)