Blender V5.0
GHOST_ContextVK.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2022-2023 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include "GHOST_ContextVK.hh"
10
11#ifdef _WIN32
12# include <vulkan/vulkan_win32.h>
13#else /* X11/WAYLAND. */
14# ifdef WITH_GHOST_X11
15# include <vulkan/vulkan_xlib.h>
16# endif
17# ifdef WITH_GHOST_WAYLAND
18# include <vulkan/vulkan_wayland.h>
19# endif
20#endif
21
23
24#if !defined(_WIN32) or defined(_M_ARM64)
25/* Silence compilation warning on non-windows x64 systems. */
26# define VMA_EXTERNAL_MEMORY_WIN32 0
27#endif
28#include "vk_mem_alloc.h"
29
30#include "CLG_log.h"
31
32#include <algorithm>
33#include <array>
34#include <cassert>
35#include <cstdio>
36#include <cstring>
37#include <iostream>
38#include <mutex>
39#include <optional>
40#include <sstream>
41#include <vector>
42
43#include <sys/stat.h>
44
45using namespace std;
46
47static CLG_LogRef LOG = {"ghost.context"};
48
49static const char *vulkan_error_as_string(VkResult result)
50{
51#define FORMAT_ERROR(X) \
52 case X: { \
53 return "" #X; \
54 }
55
56 switch (result) {
57 FORMAT_ERROR(VK_NOT_READY);
58 FORMAT_ERROR(VK_TIMEOUT);
59 FORMAT_ERROR(VK_EVENT_SET);
60 FORMAT_ERROR(VK_EVENT_RESET);
61 FORMAT_ERROR(VK_INCOMPLETE);
62 FORMAT_ERROR(VK_ERROR_OUT_OF_HOST_MEMORY);
63 FORMAT_ERROR(VK_ERROR_OUT_OF_DEVICE_MEMORY);
64 FORMAT_ERROR(VK_ERROR_INITIALIZATION_FAILED);
65 FORMAT_ERROR(VK_ERROR_DEVICE_LOST);
66 FORMAT_ERROR(VK_ERROR_MEMORY_MAP_FAILED);
67 FORMAT_ERROR(VK_ERROR_LAYER_NOT_PRESENT);
68 FORMAT_ERROR(VK_ERROR_EXTENSION_NOT_PRESENT);
69 FORMAT_ERROR(VK_ERROR_FEATURE_NOT_PRESENT);
70 FORMAT_ERROR(VK_ERROR_INCOMPATIBLE_DRIVER);
71 FORMAT_ERROR(VK_ERROR_TOO_MANY_OBJECTS);
72 FORMAT_ERROR(VK_ERROR_FORMAT_NOT_SUPPORTED);
73 FORMAT_ERROR(VK_ERROR_FRAGMENTED_POOL);
74 FORMAT_ERROR(VK_ERROR_UNKNOWN);
75 FORMAT_ERROR(VK_ERROR_OUT_OF_POOL_MEMORY);
76 FORMAT_ERROR(VK_ERROR_INVALID_EXTERNAL_HANDLE);
77 FORMAT_ERROR(VK_ERROR_FRAGMENTATION);
78 FORMAT_ERROR(VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS);
79 FORMAT_ERROR(VK_ERROR_SURFACE_LOST_KHR);
80 FORMAT_ERROR(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
81 FORMAT_ERROR(VK_SUBOPTIMAL_KHR);
82 FORMAT_ERROR(VK_ERROR_OUT_OF_DATE_KHR);
83 FORMAT_ERROR(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
84 FORMAT_ERROR(VK_ERROR_VALIDATION_FAILED_EXT);
85 FORMAT_ERROR(VK_ERROR_INVALID_SHADER_NV);
86 FORMAT_ERROR(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
87 FORMAT_ERROR(VK_ERROR_NOT_PERMITTED_EXT);
88 FORMAT_ERROR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
89 FORMAT_ERROR(VK_THREAD_IDLE_KHR);
90 FORMAT_ERROR(VK_THREAD_DONE_KHR);
91 FORMAT_ERROR(VK_OPERATION_DEFERRED_KHR);
92 FORMAT_ERROR(VK_OPERATION_NOT_DEFERRED_KHR);
93 FORMAT_ERROR(VK_PIPELINE_COMPILE_REQUIRED_EXT);
94 default:
95 return "Unknown Error";
96 }
97}
98
99#define __STR(A) "" #A
100#define VK_CHECK(__expression, fail_value) \
101 do { \
102 VkResult r = (__expression); \
103 if (r != VK_SUCCESS) { \
104 CLOG_ERROR(&LOG, \
105 "Vulkan: %s resulted in code %s.", \
106 __STR(__expression), \
107 vulkan_error_as_string(r)); \
108 return fail_value; \
109 } \
110 } while (0)
111
112/* -------------------------------------------------------------------- */
115
116void GHOST_SwapchainImage::destroy(VkDevice vk_device)
117{
118 vkDestroySemaphore(vk_device, present_semaphore, nullptr);
119 present_semaphore = VK_NULL_HANDLE;
120 vk_image = VK_NULL_HANDLE;
121}
122
123void GHOST_FrameDiscard::destroy(VkDevice vk_device)
124{
125 while (!swapchains.empty()) {
126 VkSwapchainKHR vk_swapchain = swapchains.back();
127 swapchains.pop_back();
128 vkDestroySwapchainKHR(vk_device, vk_swapchain, nullptr);
129 }
130 while (!semaphores.empty()) {
131 VkSemaphore vk_semaphore = semaphores.back();
132 semaphores.pop_back();
133 vkDestroySemaphore(vk_device, vk_semaphore, nullptr);
134 }
135}
136
137void GHOST_Frame::destroy(VkDevice vk_device)
138{
139 vkDestroyFence(vk_device, submission_fence, nullptr);
140 submission_fence = VK_NULL_HANDLE;
141 vkDestroySemaphore(vk_device, acquire_semaphore, nullptr);
142 acquire_semaphore = VK_NULL_HANDLE;
143 discard_pile.destroy(vk_device);
144}
145
147
148/* -------------------------------------------------------------------- */
151
153 vector<VkExtensionProperties> extensions;
154 vector<const char *> enabled;
155
156 bool is_supported(const char *extension_name) const
157 {
158 for (const VkExtensionProperties &extension : extensions) {
159 if (strcmp(extension.extensionName, extension_name) == 0) {
160 return true;
161 }
162 }
163 return false;
164 }
165
166 bool is_supported(const vector<const char *> &extension_names)
167 {
168 for (const char *extension_name : extension_names) {
169 if (!is_supported(extension_name)) {
170 return false;
171 }
172 }
173 return true;
174 }
175
176 bool enable(const char *extension_name, bool optional = false)
177 {
178 bool supported = is_supported(extension_name);
179 if (supported) {
181 "Vulkan: %s extension enabled: name=%s",
182 optional ? "optional" : "required",
183 extension_name);
184 enabled.push_back(extension_name);
185 return true;
186 }
187
189 (optional ? CLG_LEVEL_TRACE : CLG_LEVEL_ERROR),
190 "Vulkan: %s extension not available: name=%s",
191 optional ? "optional" : "required",
192 extension_name);
193
194 return false;
195 }
196
197 bool enable(const vector<const char *> &extension_names, bool optional = false)
198 {
199 bool failure = false;
200 for (const char *extension_name : extension_names) {
201 failure |= !enable(extension_name, optional);
202 }
203 return !failure;
204 }
205
206 bool is_enabled(const char *extension_name) const
207 {
208 for (const char *enabled_extension_name : enabled) {
209 if (strcmp(enabled_extension_name, extension_name) == 0) {
210 return true;
211 }
212 }
213 return false;
214 }
215};
216
218
219/* -------------------------------------------------------------------- */
222
224 public:
225 VkPhysicalDevice vk_physical_device = VK_NULL_HANDLE;
227
228 VkDevice vk_device = VK_NULL_HANDLE;
229
231 VkQueue generic_queue = VK_NULL_HANDLE;
232 VmaAllocator vma_allocator = VK_NULL_HANDLE;
233
234 VkPhysicalDeviceProperties2 properties = {
235 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
236 };
237 VkPhysicalDeviceVulkan12Properties properties_12 = {
238 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
239 };
240 VkPhysicalDeviceFeatures2 features = {};
241 VkPhysicalDeviceVulkan11Features features_11 = {};
242 VkPhysicalDeviceVulkan12Features features_12 = {};
243 VkPhysicalDeviceRobustness2FeaturesEXT features_robustness2 = {
244 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT};
245
246 int users = 0;
247
249 std::mutex queue_mutex;
250
253
254 public:
258 {
259 properties.pNext = &properties_12;
260 vkGetPhysicalDeviceProperties2(vk_physical_device, &properties);
261
262 features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
263 features_11.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
264 features_12.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
265 features.pNext = &features_11;
266 features_11.pNext = &features_12;
268
269 vkGetPhysicalDeviceFeatures2(vk_physical_device, &features);
271 }
272
274 {
275 if (vma_allocator != VK_NULL_HANDLE) {
276 vmaDestroyAllocator(vma_allocator);
277 vma_allocator = VK_NULL_HANDLE;
278 }
279 if (vk_device != VK_NULL_HANDLE) {
280 vkDestroyDevice(vk_device, nullptr);
281 vk_device = VK_NULL_HANDLE;
282 }
283 }
284
286 {
287 uint32_t extensions_count;
288 VK_CHECK(vkEnumerateDeviceExtensionProperties(
289 vk_physical_device, nullptr, &extensions_count, nullptr),
290 false);
291 extensions.extensions.resize(extensions_count);
292 VK_CHECK(vkEnumerateDeviceExtensionProperties(
293 vk_physical_device, nullptr, &extensions_count, extensions.extensions.data()),
294 false);
295 return true;
296 }
297
299 {
300 if (vk_device) {
301 std::scoped_lock lock(queue_mutex);
302 vkDeviceWaitIdle(vk_device);
303 }
304 }
305
307 {
308 uint32_t queue_family_count = 0;
309 vkGetPhysicalDeviceQueueFamilyProperties(vk_physical_device, &queue_family_count, nullptr);
310
311 vector<VkQueueFamilyProperties> queue_families(queue_family_count);
312 vkGetPhysicalDeviceQueueFamilyProperties(
313 vk_physical_device, &queue_family_count, queue_families.data());
314
316 for (const auto &queue_family : queue_families) {
317 /* Every VULKAN implementation by spec must have one queue family that support both graphics
318 * and compute pipelines. We select this one; compute only queue family hints at asynchronous
319 * compute implementations. */
320 if ((queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) &&
321 (queue_family.queueFlags & VK_QUEUE_COMPUTE_BIT))
322 {
323 return;
324 }
326 }
327 }
328
330 {
331 vkGetDeviceQueue(vk_device, generic_queue_family, 0, &generic_queue);
332 }
333
334 void init_memory_allocator(VkInstance vk_instance)
335 {
336 VmaAllocatorCreateInfo vma_allocator_create_info = {};
337 vma_allocator_create_info.vulkanApiVersion = VK_API_VERSION_1_2;
338 vma_allocator_create_info.physicalDevice = vk_physical_device;
339 vma_allocator_create_info.device = vk_device;
340 vma_allocator_create_info.instance = vk_instance;
341 vma_allocator_create_info.flags = VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT;
342 if (extensions.is_enabled(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME)) {
343 vma_allocator_create_info.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT;
344 }
345 if (extensions.is_enabled(VK_KHR_MAINTENANCE_4_EXTENSION_NAME)) {
346 vma_allocator_create_info.flags |= VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT;
347 }
348 vmaCreateAllocator(&vma_allocator_create_info, &vma_allocator);
349 }
350};
351
353
354/* -------------------------------------------------------------------- */
357
359 VkInstance vk_instance = VK_NULL_HANDLE;
360 VkPhysicalDevice vk_physical_device = VK_NULL_HANDLE;
361
363
364 std::optional<GHOST_DeviceVK> device;
365
367 {
369 }
370
372 {
373 device.reset();
374 vkDestroyInstance(vk_instance, nullptr);
375 vk_physical_device = VK_NULL_HANDLE;
376 vk_instance = VK_NULL_HANDLE;
377 }
378
380 {
381 uint32_t extension_count = 0;
382 VK_CHECK(vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr), false);
383 extensions.extensions.resize(extension_count);
384 VK_CHECK(vkEnumerateInstanceExtensionProperties(
385 nullptr, &extension_count, extensions.extensions.data()),
386 false);
387 return true;
388 }
389
390 bool create_instance(uint32_t vulkan_api_version)
391 {
392 VkApplicationInfo vk_application_info = {VK_STRUCTURE_TYPE_APPLICATION_INFO,
393 nullptr,
394 "Blender",
395 VK_MAKE_VERSION(1, 0, 0),
396 "Blender",
397 VK_MAKE_VERSION(1, 0, 0),
398 vulkan_api_version};
399 VkInstanceCreateInfo vk_instance_create_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
400 nullptr,
401 0,
402 &vk_application_info,
403 0,
404 nullptr,
405 uint32_t(extensions.enabled.size()),
406 extensions.enabled.data()
407
408 };
409
410 VK_CHECK(vkCreateInstance(&vk_instance_create_info, nullptr, &vk_instance), false);
411 return true;
412 }
413
414 bool select_physical_device(const GHOST_GPUDevice &preferred_device,
415 const vector<const char *> &required_extensions)
416 {
417 VkPhysicalDevice best_physical_device = VK_NULL_HANDLE;
418
419 uint32_t device_count = 0;
420 vkEnumeratePhysicalDevices(vk_instance, &device_count, nullptr);
421
422 vector<VkPhysicalDevice> physical_devices(device_count);
423 vkEnumeratePhysicalDevices(vk_instance, &device_count, physical_devices.data());
424
425 int best_device_score = -1;
426 int device_index = -1;
427 for (const auto &physical_device : physical_devices) {
428 GHOST_DeviceVK device_vk(physical_device, false);
429 device_index++;
430
431 if (!device_vk.extensions.is_supported(required_extensions)) {
432 continue;
433 }
434 if (!blender::gpu::GPU_vulkan_is_supported_driver(physical_device)) {
435 continue;
436 }
437
438 if (!device_vk.features.features.geometryShader ||
439 !device_vk.features.features.dualSrcBlend || !device_vk.features.features.logicOp ||
440 !device_vk.features.features.imageCubeArray)
441 {
442 continue;
443 }
444
445 int device_score = 0;
446 switch (device_vk.properties.properties.deviceType) {
447 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
448 device_score = 400;
449 break;
450 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
451 device_score = 300;
452 break;
453 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
454 device_score = 200;
455 break;
456 case VK_PHYSICAL_DEVICE_TYPE_CPU:
457 device_score = 100;
458 break;
459 default:
460 break;
461 }
462
463 /* User has configured a preferred device. Add bonus score when vendor and device match.
464 * Driver id isn't considered as drivers update more frequently and can break the device
465 * selection. */
466 if (device_vk.properties.properties.deviceID == preferred_device.device_id &&
467 device_vk.properties.properties.vendorID == preferred_device.vendor_id)
468 {
469 device_score += 500;
470 if (preferred_device.index == device_index) {
471 device_score += 10;
472 }
473 }
474 if (device_score > best_device_score) {
475 best_physical_device = physical_device;
476 best_device_score = device_score;
477 }
478 }
479
480 if (best_physical_device == VK_NULL_HANDLE) {
481 CLOG_ERROR(&LOG, "No suitable Vulkan Device found!");
482 return GHOST_kFailure;
483 }
484
485 vk_physical_device = best_physical_device;
486
487 return GHOST_kSuccess;
488 }
489
490 bool create_device(const bool use_vk_ext_swapchain_maintenance1,
491 vector<const char *> &required_device_extensions,
492 vector<const char *> &optional_device_extensions)
493 {
494 device.emplace(vk_physical_device, use_vk_ext_swapchain_maintenance1);
495 GHOST_DeviceVK &device = *this->device;
496
497 device.extensions.enable(required_device_extensions);
498 device.extensions.enable(optional_device_extensions, true);
499 device.init_generic_queue_family();
500
501 float queue_priorities[] = {1.0f};
502 vector<VkDeviceQueueCreateInfo> queue_create_infos;
503 VkDeviceQueueCreateInfo graphic_queue_create_info = {};
504 graphic_queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
505 graphic_queue_create_info.queueFamilyIndex = device.generic_queue_family;
506 graphic_queue_create_info.queueCount = 1;
507 graphic_queue_create_info.pQueuePriorities = queue_priorities;
508 queue_create_infos.push_back(graphic_queue_create_info);
509
510 VkPhysicalDeviceFeatures device_features = {};
511 device_features.geometryShader = VK_TRUE;
512 device_features.logicOp = VK_TRUE;
513 device_features.dualSrcBlend = VK_TRUE;
514 device_features.imageCubeArray = VK_TRUE;
515 device_features.multiDrawIndirect = VK_TRUE;
516 device_features.multiViewport = VK_TRUE;
517 device_features.shaderClipDistance = VK_TRUE;
518 device_features.drawIndirectFirstInstance = VK_TRUE;
519 device_features.fragmentStoresAndAtomics = VK_TRUE;
520 device_features.samplerAnisotropy = device.features.features.samplerAnisotropy;
521 device_features.wideLines = device.features.features.wideLines;
522
523 VkDeviceCreateInfo device_create_info = {};
524 device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
525 device_create_info.queueCreateInfoCount = uint32_t(queue_create_infos.size());
526 device_create_info.pQueueCreateInfos = queue_create_infos.data();
527 device_create_info.enabledExtensionCount = uint32_t(device.extensions.enabled.size());
528 device_create_info.ppEnabledExtensionNames = device.extensions.enabled.data();
529 device_create_info.pEnabledFeatures = &device_features;
530
531 std::vector<void *> feature_struct_ptr;
532
533 /* Enable vulkan 11 features when supported on physical device. */
534 VkPhysicalDeviceVulkan11Features vulkan_11_features = {};
535 vulkan_11_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
536 vulkan_11_features.shaderDrawParameters = VK_TRUE;
537 feature_struct_ptr.push_back(&vulkan_11_features);
538
539 /* Enable optional vulkan 12 features when supported on physical device. */
540 VkPhysicalDeviceVulkan12Features vulkan_12_features = {};
541 vulkan_12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
542 vulkan_12_features.shaderOutputLayer = device.features_12.shaderOutputLayer;
543 vulkan_12_features.shaderOutputViewportIndex = device.features_12.shaderOutputViewportIndex;
544 vulkan_12_features.bufferDeviceAddress = device.features_12.bufferDeviceAddress;
545 vulkan_12_features.timelineSemaphore = VK_TRUE;
546 feature_struct_ptr.push_back(&vulkan_12_features);
547
548 /* Enable provoking vertex. */
549 VkPhysicalDeviceProvokingVertexFeaturesEXT provoking_vertex_features = {};
550 provoking_vertex_features.sType =
551 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
552 provoking_vertex_features.provokingVertexLast = VK_TRUE;
553 feature_struct_ptr.push_back(&provoking_vertex_features);
554
555 /* Enable dynamic rendering. */
556 VkPhysicalDeviceDynamicRenderingFeatures dynamic_rendering = {};
557 dynamic_rendering.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES;
558 dynamic_rendering.dynamicRendering = VK_TRUE;
559 feature_struct_ptr.push_back(&dynamic_rendering);
560
561 VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT
562 dynamic_rendering_unused_attachments = {};
563 dynamic_rendering_unused_attachments.sType =
564 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT;
565 dynamic_rendering_unused_attachments.dynamicRenderingUnusedAttachments = VK_TRUE;
566 if (device.extensions.is_enabled(VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME)) {
567 feature_struct_ptr.push_back(&dynamic_rendering_unused_attachments);
568 }
569
570 VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR dynamic_rendering_local_read = {};
571 dynamic_rendering_local_read.sType =
572 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR;
573 dynamic_rendering_local_read.dynamicRenderingLocalRead = VK_TRUE;
574 if (device.extensions.is_enabled(VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME)) {
575 feature_struct_ptr.push_back(&dynamic_rendering_local_read);
576 }
577
578 /* VK_EXT_robustness2 */
579 VkPhysicalDeviceRobustness2FeaturesEXT robustness_2_features = {
580 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT};
581 if (device.extensions.is_enabled(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME)) {
582 robustness_2_features.nullDescriptor = device.features_robustness2.nullDescriptor;
583 feature_struct_ptr.push_back(&robustness_2_features);
584 }
585
586 /* Query for Mainenance4 (core in Vulkan 1.3). */
587 VkPhysicalDeviceMaintenance4FeaturesKHR maintenance_4 = {};
588 maintenance_4.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR;
589 maintenance_4.maintenance4 = VK_TRUE;
590 if (device.extensions.is_enabled(VK_KHR_MAINTENANCE_4_EXTENSION_NAME)) {
591 feature_struct_ptr.push_back(&maintenance_4);
592 }
593
594 /* Swap-chain maintenance 1 is optional. */
595 VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT swapchain_maintenance_1 = {
596 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT, nullptr, VK_TRUE};
597 if (device.extensions.is_enabled(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME)) {
598 feature_struct_ptr.push_back(&swapchain_maintenance_1);
599 device.use_vk_ext_swapchain_maintenance_1 = true;
600 }
601
602 /* Descriptor buffers */
603 VkPhysicalDeviceDescriptorBufferFeaturesEXT descriptor_buffer = {
604 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT,
605 nullptr,
606 VK_TRUE,
607 VK_FALSE,
608 VK_FALSE,
609 VK_FALSE};
610 if (device.extensions.is_enabled(VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME)) {
611 feature_struct_ptr.push_back(&descriptor_buffer);
612 }
613
614 /* Query and enable Fragment Shader Barycentrics. */
615 VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR fragment_shader_barycentric = {};
616 fragment_shader_barycentric.sType =
617 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR;
618 fragment_shader_barycentric.fragmentShaderBarycentric = VK_TRUE;
619 if (device.extensions.is_enabled(VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME)) {
620 feature_struct_ptr.push_back(&fragment_shader_barycentric);
621 }
622
623 /* VK_EXT_memory_priority */
624 VkPhysicalDeviceMemoryPriorityFeaturesEXT memory_priority = {
625 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, nullptr, VK_TRUE};
626 if (device.extensions.is_enabled(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME)) {
627 feature_struct_ptr.push_back(&memory_priority);
628 }
629
630 /* VK_EXT_pageable_device_local_memory */
631 VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT pageable_device_local_memory = {
632 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
633 nullptr,
634 VK_TRUE};
635 if (device.extensions.is_enabled(VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME)) {
636 feature_struct_ptr.push_back(&pageable_device_local_memory);
637 }
638
639 /* Link all registered feature structs. */
640 for (int i = 1; i < feature_struct_ptr.size(); i++) {
641 ((VkBaseInStructure *)(feature_struct_ptr[i - 1]))->pNext =
642 (VkBaseInStructure *)(feature_struct_ptr[i]);
643 }
644
645 device_create_info.pNext = feature_struct_ptr[0];
646 VK_CHECK(vkCreateDevice(vk_physical_device, &device_create_info, nullptr, &device.vk_device),
648 device.init_generic_queue();
649 device.init_memory_allocator(vk_instance);
650 return true;
651 }
652};
653
655
663static std::optional<GHOST_InstanceVK> vulkan_instance;
664
666
668#ifdef _WIN32
669 HWND hwnd,
670#elif defined(__APPLE__)
671 CAMetalLayer *metal_layer,
672#else
674 /* X11 */
675 Window window,
676 Display *display,
677 /* Wayland */
678 wl_surface *wayland_surface,
679 wl_display *wayland_display,
680 const GHOST_ContextVK_WindowInfo *wayland_window_info,
681#endif
682 int contextMajorVersion,
683 int contextMinorVersion,
684 const GHOST_GPUDevice &preferred_device,
685 const GHOST_WindowHDRInfo *hdr_info)
686 : GHOST_Context(context_params),
687#ifdef _WIN32
688 hwnd_(hwnd),
689#elif defined(__APPLE__)
690 metal_layer_(metal_layer),
691#else
692 platform_(platform),
693 /* X11 */
694 display_(display),
695 window_(window),
696 /* Wayland */
697 wayland_surface_(wayland_surface),
698 wayland_display_(wayland_display),
699 wayland_window_info_(wayland_window_info),
700#endif
701 context_major_version_(contextMajorVersion),
702 context_minor_version_(contextMinorVersion),
703 preferred_device_(preferred_device),
704 hdr_info_(hdr_info),
705 surface_(VK_NULL_HANDLE),
706 swapchain_(VK_NULL_HANDLE),
707 frame_data_(2),
708 render_frame_(0),
709 use_hdr_swapchain_(false)
710{
711 frame_data_.reserve(5);
712}
713
715{
716 if (vulkan_instance.has_value()) {
717 GHOST_InstanceVK &instance_vk = vulkan_instance.value();
718 GHOST_DeviceVK &device_vk = instance_vk.device.value();
719 device_vk.wait_idle();
720 for (VkFence fence : fence_pile_) {
721 vkDestroyFence(device_vk.vk_device, fence, nullptr);
722 }
723 fence_pile_.clear();
724 destroySwapchain();
725
726 if (surface_ != VK_NULL_HANDLE) {
727 vkDestroySurfaceKHR(instance_vk.vk_instance, surface_, nullptr);
728 }
729
730 device_vk.users--;
731 if (device_vk.users == 0) {
732 vulkan_instance.reset();
733 }
734 }
735}
736
738{
739 if (acquired_swapchain_image_index_.has_value()) {
740 assert(false);
741 return GHOST_kFailure;
742 }
743
744 GHOST_DeviceVK &device_vk = vulkan_instance->device.value();
745 VkDevice vk_device = device_vk.vk_device;
746
747 /* This method is called after all the draw calls in the application, and it signals that
748 * we are ready to both (1) submit commands for those draw calls to the device and
749 * (2) begin building the next frame. It is assumed as an invariant that the submission fence
750 * in the current GHOST_Frame has been signaled. So, we wait for the *next* GHOST_Frame's
751 * submission fence to be signaled, to ensure the invariant holds for the next call to
752 * `swapBuffers`.
753 *
754 * We will pass the current GHOST_Frame to the swap_buffer_draw_callback_ for command buffer
755 * submission, and it is the responsibility of that callback to use the current GHOST_Frame's
756 * fence for it's submission fence. Since the callback is called after we wait for the next frame
757 * to be complete, it is also safe in the callback to clean up resources associated with the next
758 * frame.
759 */
760 render_frame_ = (render_frame_ + 1) % frame_data_.size();
761 GHOST_Frame &submission_frame_data = frame_data_[render_frame_];
762 /* Wait for previous time that the frame was used to finish rendering. Presenting can
763 * still happen in parallel, but acquiring needs can only happen when the frame acquire semaphore
764 * has been signaled and waited for. */
765 if (submission_frame_data.submission_fence) {
766 vkWaitForFences(vk_device, 1, &submission_frame_data.submission_fence, true, UINT64_MAX);
767 }
768 for (VkSwapchainKHR swapchain : submission_frame_data.discard_pile.swapchains) {
769 this->destroySwapchainPresentFences(swapchain);
770 }
771 submission_frame_data.discard_pile.destroy(vk_device);
772
773 const bool use_hdr_swapchain = hdr_info_ &&
774 (hdr_info_->wide_gamut_enabled || hdr_info_->hdr_enabled) &&
776 if (use_hdr_swapchain != use_hdr_swapchain_) {
777 /* Re-create swapchain if HDR mode was toggled in the system settings. */
778 recreateSwapchain(use_hdr_swapchain);
779 }
780 else {
781#ifdef WITH_GHOST_WAYLAND
782 /* Wayland doesn't provide a WSI with windowing capabilities, therefore cannot detect whether
783 * the swap-chain needs to be recreated. But as a side effect we can recreate the swap-chain
784 * before presenting. */
785 if (wayland_window_info_) {
786 const bool recreate_swapchain =
787 ((wayland_window_info_->size[0] !=
788 std::max(render_extent_.width, render_extent_min_.width)) ||
789 (wayland_window_info_->size[1] !=
790 std::max(render_extent_.height, render_extent_min_.height)));
791
792 if (recreate_swapchain) {
793 /* Swap-chain is out of date. Recreate swap-chain. */
794 recreateSwapchain(use_hdr_swapchain);
795 }
796 }
797#endif
798 }
799 /* there is no valid swapchain when the previous window was minimized. User can have maximized
800 * the window so we need to check if the swapchain has to be created. */
801 if (swapchain_ == VK_NULL_HANDLE) {
802 recreateSwapchain(use_hdr_swapchain);
803 }
804
805 /* Acquiree next image, swapchain can be (or become) invalid when minimizing window.*/
806 uint32_t image_index = 0;
807 if (swapchain_ != VK_NULL_HANDLE) {
808 /* Some platforms (NVIDIA/Wayland) can receive an out of date swapchain when acquiring the next
809 * swapchain image. Other do it when calling vkQueuePresent. */
810 VkResult acquire_result = VK_ERROR_OUT_OF_DATE_KHR;
811 while (swapchain_ != VK_NULL_HANDLE &&
812 (acquire_result == VK_ERROR_OUT_OF_DATE_KHR || acquire_result == VK_SUBOPTIMAL_KHR))
813 {
814 acquire_result = vkAcquireNextImageKHR(vk_device,
815 swapchain_,
817 submission_frame_data.acquire_semaphore,
818 VK_NULL_HANDLE,
819 &image_index);
820 if (acquire_result == VK_ERROR_OUT_OF_DATE_KHR || acquire_result == VK_SUBOPTIMAL_KHR) {
821 recreateSwapchain(use_hdr_swapchain);
822 }
823 }
824 }
825
826 /* Acquired callback is also called when there is no swapchain.
827 *
828 * When acquiring swap chain (image) and the swap chain is discarded (window has been minimized).
829 * We have trigger a last acquired callback to reduce the attachments of the GPUFramebuffer.
830 * Vulkan backend will retrieve the data (getVulkanSwapChainFormat) containing a render extent of
831 * 0,0.
832 *
833 * The next frame window manager will detect that the window is minimized and doesn't draw the
834 * window at all.
835 */
836 if (swap_buffer_acquired_callback_) {
837 swap_buffer_acquired_callback_();
838 }
839
840 if (swapchain_ == VK_NULL_HANDLE) {
841 CLOG_TRACE(&LOG, "Swap-chain unavailable (minimized window).");
842 return GHOST_kSuccess;
843 }
844
846 "Acquired swap-chain image (render_frame=%lu, image_index=%u)",
847 render_frame_,
848 image_index);
849 acquired_swapchain_image_index_ = image_index;
850
851 return GHOST_kSuccess;
852}
853VkFence GHOST_ContextVK::getFence()
854{
855 if (!fence_pile_.empty()) {
856 VkFence fence = fence_pile_.back();
857 fence_pile_.pop_back();
858 return fence;
859 }
860 GHOST_DeviceVK &device_vk = vulkan_instance->device.value();
861 VkFence fence = VK_NULL_HANDLE;
862 const VkFenceCreateInfo fence_create_info = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO};
863 vkCreateFence(device_vk.vk_device, &fence_create_info, nullptr, &fence);
864 return fence;
865}
866
867void GHOST_ContextVK::setPresentFence(VkSwapchainKHR swapchain, VkFence present_fence)
868{
869 if (present_fence == VK_NULL_HANDLE) {
870 return;
871 }
872 present_fences_[swapchain].push_back(present_fence);
873 GHOST_DeviceVK &device_vk = vulkan_instance->device.value();
875 for (std::pair<const VkSwapchainKHR, std::vector<VkFence>> &item : present_fences_) {
876 std::vector<VkFence>::iterator end = item.second.end();
877 std::vector<VkFence>::iterator it = std::remove_if(
878 item.second.begin(), item.second.end(), [&](const VkFence fence) {
879 if (vkGetFenceStatus(device_vk.vk_device, fence) == VK_NOT_READY) {
880 return false;
881 }
882 vkResetFences(device_vk.vk_device, 1, &fence);
883 fence_pile_.push_back(fence);
884 return true;
885 });
886 item.second.erase(it, end);
887 }
888}
889
891{
892 /* Minimized windows don't have a swapchain and swapchain image. In this case we perform the draw
893 * to release render graph and discarded resources. */
894 if (swapchain_ == VK_NULL_HANDLE) {
895 GHOST_VulkanSwapChainData swap_chain_data = {};
896 if (swap_buffer_draw_callback_) {
897 swap_buffer_draw_callback_(&swap_chain_data);
898 }
899 return GHOST_kSuccess;
900 }
901
902 if (!acquired_swapchain_image_index_.has_value()) {
903 assert(false);
904 return GHOST_kFailure;
905 }
906 GHOST_DeviceVK &device_vk = vulkan_instance->device.value();
907 VkDevice vk_device = device_vk.vk_device;
908
909 uint32_t image_index = acquired_swapchain_image_index_.value();
910 GHOST_SwapchainImage &swapchain_image = swapchain_images_[image_index];
911 GHOST_Frame &submission_frame_data = frame_data_[render_frame_];
912 const bool use_hdr_swapchain = hdr_info_ && hdr_info_->hdr_enabled &&
914
915 GHOST_VulkanSwapChainData swap_chain_data;
916 swap_chain_data.image = swapchain_image.vk_image;
917 swap_chain_data.surface_format = surface_format_;
918 swap_chain_data.extent = render_extent_;
919 swap_chain_data.submission_fence = submission_frame_data.submission_fence;
920 swap_chain_data.acquire_semaphore = submission_frame_data.acquire_semaphore;
921 swap_chain_data.present_semaphore = swapchain_image.present_semaphore;
922 swap_chain_data.sdr_scale = (hdr_info_) ? hdr_info_->sdr_white_level : 1.0f;
923
924 vkResetFences(vk_device, 1, &submission_frame_data.submission_fence);
925 if (swap_buffer_draw_callback_) {
926 swap_buffer_draw_callback_(&swap_chain_data);
927 }
928
929 VkPresentInfoKHR present_info = {};
930 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
931 present_info.waitSemaphoreCount = 1;
932 present_info.pWaitSemaphores = &swapchain_image.present_semaphore;
933 present_info.swapchainCount = 1;
934 present_info.pSwapchains = &swapchain_;
935 present_info.pImageIndices = &image_index;
936 present_info.pResults = nullptr;
937
938 VkResult present_result = VK_SUCCESS;
939 {
940 std::scoped_lock lock(device_vk.queue_mutex);
941 VkSwapchainPresentFenceInfoEXT fence_info{VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT};
942 VkFence present_fence = VK_NULL_HANDLE;
944 present_fence = this->getFence();
945
946 fence_info.swapchainCount = 1;
947 fence_info.pFences = &present_fence;
948
949 present_info.pNext = &fence_info;
950 }
951 present_result = vkQueuePresentKHR(device_vk.generic_queue, &present_info);
952 this->setPresentFence(swapchain_, present_fence);
953 }
954 acquired_swapchain_image_index_.reset();
955
956 if (present_result == VK_ERROR_OUT_OF_DATE_KHR || present_result == VK_SUBOPTIMAL_KHR) {
957 recreateSwapchain(use_hdr_swapchain);
958 return GHOST_kSuccess;
959 }
960 if (present_result != VK_SUCCESS) {
962 "Vulkan: failed to present swap-chain image : %s",
963 vulkan_error_as_string(present_result));
964 return GHOST_kFailure;
965 }
966
967 return GHOST_kSuccess;
968}
969
971 GHOST_VulkanSwapChainData *r_swap_chain_data)
972{
973 r_swap_chain_data->image = VK_NULL_HANDLE;
974 r_swap_chain_data->surface_format = surface_format_;
975 r_swap_chain_data->extent = render_extent_;
976 r_swap_chain_data->sdr_scale = (hdr_info_) ? hdr_info_->sdr_white_level : 1.0f;
977
978 return GHOST_kSuccess;
979}
980
982{
983 r_handles = {
984 VK_NULL_HANDLE, /* instance */
985 VK_NULL_HANDLE, /* physical_device */
986 VK_NULL_HANDLE, /* device */
987 0, /* queue_family */
988 VK_NULL_HANDLE, /* queue */
989 nullptr, /* queue_mutex */
990 VK_NULL_HANDLE, /* vma_allocator */
991 };
992
993 if (vulkan_instance.has_value() && vulkan_instance.value().device.has_value()) {
994 GHOST_InstanceVK &instance_vk = vulkan_instance.value();
995 GHOST_DeviceVK &device_vk = instance_vk.device.value();
996 r_handles = {
997 instance_vk.vk_instance,
998 device_vk.vk_physical_device,
999 device_vk.vk_device,
1000 device_vk.generic_queue_family,
1001 device_vk.generic_queue,
1002 &device_vk.queue_mutex,
1003 device_vk.vma_allocator,
1004 };
1005 }
1006
1007 return GHOST_kSuccess;
1008}
1009
1011 std::function<void(const GHOST_VulkanSwapChainData *)> swap_buffer_draw_callback,
1012 std::function<void(void)> swap_buffer_acquired_callback,
1013 std::function<void(GHOST_VulkanOpenXRData *)> openxr_acquire_framebuffer_image_callback,
1014 std::function<void(GHOST_VulkanOpenXRData *)> openxr_release_framebuffer_image_callback)
1015{
1016 swap_buffer_draw_callback_ = swap_buffer_draw_callback;
1017 swap_buffer_acquired_callback_ = swap_buffer_acquired_callback;
1018 openxr_acquire_framebuffer_image_callback_ = openxr_acquire_framebuffer_image_callback;
1019 openxr_release_framebuffer_image_callback_ = openxr_release_framebuffer_image_callback;
1020 return GHOST_kSuccess;
1021}
1022
1028
1034
1036 VkPhysicalDevice device,
1037 VkSurfaceKHR surface,
1038 VkPresentModeKHR *r_presentMode)
1039{
1040 uint32_t present_count;
1041 vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, nullptr);
1042 vector<VkPresentModeKHR> presents(present_count);
1043 vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, presents.data());
1044
1045 if (vsync != GHOST_kVSyncModeUnset) {
1046 const bool vsync_off = (vsync == GHOST_kVSyncModeOff);
1047 if (vsync_off) {
1048 for (auto present_mode : presents) {
1049 if (present_mode == VK_PRESENT_MODE_IMMEDIATE_KHR) {
1050 *r_presentMode = present_mode;
1051 return GHOST_kSuccess;
1052 }
1053 }
1054 CLOG_WARN(&LOG,
1055 "Vulkan: VSync off was requested via --gpu-vsync, "
1056 "but VK_PRESENT_MODE_IMMEDIATE_KHR is not supported.");
1057 }
1058 }
1059
1060 /* MAILBOX is the lowest latency V-Sync enabled mode. We will use it if available as it fixes
1061 * some lag on NVIDIA/Intel GPUs. */
1062 /* TODO: select the correct presentation mode based on the actual being performed by the user.
1063 * When low latency is required (paint cursor) we should select mailbox, otherwise we can do FIFO
1064 * to reduce CPU/GPU usage. */
1065 for (auto present_mode : presents) {
1066 if (present_mode == VK_PRESENT_MODE_MAILBOX_KHR) {
1067 *r_presentMode = present_mode;
1068 return GHOST_kSuccess;
1069 }
1070 }
1071
1072 /* FIFO present mode is always available and we (should) prefer it as it will keep the main loop
1073 * running along the monitor refresh rate. Mailbox and FIFO relaxed can generate a lot of frames
1074 * that will never be displayed. */
1075 *r_presentMode = VK_PRESENT_MODE_FIFO_KHR;
1076 return GHOST_kSuccess;
1077}
1078
1084static bool selectSurfaceFormat(const VkPhysicalDevice physical_device,
1085 const VkSurfaceKHR surface,
1086 bool use_hdr_swapchain,
1087 VkSurfaceFormatKHR &r_surfaceFormat)
1088{
1089 uint32_t format_count;
1090 vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &format_count, nullptr);
1091 vector<VkSurfaceFormatKHR> formats(format_count);
1092 vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &format_count, formats.data());
1093
1094 array<pair<VkColorSpaceKHR, VkFormat>, 4> selection_order = {
1095 make_pair(VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, VK_FORMAT_R16G16B16A16_SFLOAT),
1096 make_pair(VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_FORMAT_R16G16B16A16_SFLOAT),
1097 make_pair(VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_FORMAT_R8G8B8A8_UNORM),
1098 make_pair(VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_FORMAT_B8G8R8A8_UNORM),
1099 };
1100
1101 for (pair<VkColorSpaceKHR, VkFormat> &pair : selection_order) {
1102 if (pair.second == VK_FORMAT_R16G16B16A16_SFLOAT && !use_hdr_swapchain) {
1103 continue;
1104 }
1105 for (const VkSurfaceFormatKHR &format : formats) {
1106 if (format.colorSpace == pair.first && format.format == pair.second) {
1107 r_surfaceFormat = format;
1108 return true;
1109 }
1110 }
1111 }
1112
1113 return false;
1114}
1115
1116GHOST_TSuccess GHOST_ContextVK::initializeFrameData()
1117{
1118 VkDevice device = vulkan_instance.value().device.value().vk_device;
1119
1120 const VkSemaphoreCreateInfo vk_semaphore_create_info = {
1121 VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
1122 const VkFenceCreateInfo vk_fence_create_info = {
1123 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
1124 for (GHOST_SwapchainImage &swapchain_image : swapchain_images_) {
1125 /* VK_EXT_swapchain_maintenance1 reuses present semaphores. */
1126 if (swapchain_image.present_semaphore == VK_NULL_HANDLE) {
1127 VK_CHECK(vkCreateSemaphore(
1128 device, &vk_semaphore_create_info, nullptr, &swapchain_image.present_semaphore),
1130 }
1131 }
1132
1133 for (int index = 0; index < frame_data_.size(); index++) {
1134 GHOST_Frame &frame_data = frame_data_[index];
1135 /* VK_EXT_swapchain_maintenance1 reuses acquire semaphores. */
1136 if (frame_data.acquire_semaphore == VK_NULL_HANDLE) {
1137 VK_CHECK(vkCreateSemaphore(
1138 device, &vk_semaphore_create_info, nullptr, &frame_data.acquire_semaphore),
1140 }
1141 if (frame_data.submission_fence == VK_NULL_HANDLE) {
1142 VK_CHECK(vkCreateFence(device, &vk_fence_create_info, nullptr, &frame_data.submission_fence),
1144 }
1145 }
1146
1147 return GHOST_kSuccess;
1148}
1149
1150GHOST_TSuccess GHOST_ContextVK::recreateSwapchain(bool use_hdr_swapchain)
1151{
1152 GHOST_InstanceVK &instance_vk = vulkan_instance.value();
1153 GHOST_DeviceVK &device_vk = instance_vk.device.value();
1154
1155 surface_format_ = {};
1157 device_vk.vk_physical_device, surface_, use_hdr_swapchain, surface_format_))
1158 {
1159 return GHOST_kFailure;
1160 }
1161
1162 VkPresentModeKHR present_mode;
1163 if (!selectPresentMode(getVSync(), device_vk.vk_physical_device, surface_, &present_mode)) {
1164 return GHOST_kFailure;
1165 }
1166
1167 /* Query the surface capabilities for the given present mode on the surface. */
1168 VkSurfacePresentScalingCapabilitiesEXT vk_surface_present_scaling_capabilities = {
1169 VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT,
1170 };
1171 VkSurfaceCapabilities2KHR vk_surface_capabilities = {
1172 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
1173 &vk_surface_present_scaling_capabilities,
1174 };
1175 VkSurfacePresentModeEXT vk_surface_present_mode = {
1176 VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT, nullptr, present_mode};
1177 VkPhysicalDeviceSurfaceInfo2KHR vk_physical_device_surface_info = {
1178 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, &vk_surface_present_mode, surface_};
1179 VkSurfaceCapabilitiesKHR capabilities = {};
1180
1181 if (device_vk.use_vk_ext_swapchain_maintenance_1) {
1182 VK_CHECK(vkGetPhysicalDeviceSurfaceCapabilities2KHR(device_vk.vk_physical_device,
1183 &vk_physical_device_surface_info,
1184 &vk_surface_capabilities),
1186 capabilities = vk_surface_capabilities.surfaceCapabilities;
1187 }
1188 else {
1189 VK_CHECK(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
1190 device_vk.vk_physical_device, surface_, &capabilities),
1192 }
1193
1194 use_hdr_swapchain_ = use_hdr_swapchain;
1195 render_extent_ = capabilities.currentExtent;
1196 render_extent_min_ = capabilities.minImageExtent;
1197 if (render_extent_.width == UINT32_MAX) {
1198 /* Window Manager is going to set the surface size based on the given size.
1199 * Choose something between minImageExtent and maxImageExtent. */
1200 int width = 0;
1201 int height = 0;
1202
1203#ifdef WITH_GHOST_WAYLAND
1204 /* Wayland doesn't provide a windowing API via WSI. */
1205 if (wayland_window_info_) {
1206 width = wayland_window_info_->size[0];
1207 height = wayland_window_info_->size[1];
1208 }
1209#endif
1210
1211 if (width == 0 || height == 0) {
1212 width = 1280;
1213 height = 720;
1214 }
1215
1216 render_extent_.width = width;
1217 render_extent_.height = height;
1218
1219 if (capabilities.minImageExtent.width > render_extent_.width) {
1220 render_extent_.width = capabilities.minImageExtent.width;
1221 }
1222 if (capabilities.minImageExtent.height > render_extent_.height) {
1223 render_extent_.height = capabilities.minImageExtent.height;
1224 }
1225 }
1226
1227 if (device_vk.use_vk_ext_swapchain_maintenance_1) {
1228 if (vk_surface_present_scaling_capabilities.minScaledImageExtent.width > render_extent_.width)
1229 {
1230 render_extent_.width = vk_surface_present_scaling_capabilities.minScaledImageExtent.width;
1231 }
1232 if (vk_surface_present_scaling_capabilities.minScaledImageExtent.height >
1233 render_extent_.height)
1234 {
1235 render_extent_.height = vk_surface_present_scaling_capabilities.minScaledImageExtent.height;
1236 }
1237 }
1238
1239 /* Discard swapchain resources of current swapchain. */
1240 GHOST_FrameDiscard &discard_pile = frame_data_[render_frame_].discard_pile;
1241 for (GHOST_SwapchainImage &swapchain_image : swapchain_images_) {
1242 swapchain_image.vk_image = VK_NULL_HANDLE;
1243 if (swapchain_image.present_semaphore != VK_NULL_HANDLE) {
1244 discard_pile.semaphores.push_back(swapchain_image.present_semaphore);
1245 swapchain_image.present_semaphore = VK_NULL_HANDLE;
1246 }
1247 }
1248
1249 /* Swap-chains with out any resolution should not be created. In the case the render extent is
1250 * zero we should not use the swap-chain.
1251 *
1252 * VUID-VkSwapchainCreateInfoKHR-imageExtent-01689
1253 */
1254 if (render_extent_.width == 0 || render_extent_.height == 0) {
1255 if (swapchain_) {
1256 discard_pile.swapchains.push_back(swapchain_);
1257 swapchain_ = VK_NULL_HANDLE;
1258 }
1259 return GHOST_kFailure;
1260 }
1261
1262 /* Use double buffering when using FIFO. Increasing the number of images could stall when doing
1263 * actions that require low latency (paint cursor, UI resizing). MAILBOX prefers triple
1264 * buffering. */
1265 uint32_t image_count_requested = present_mode == VK_PRESENT_MODE_MAILBOX_KHR ? 3 : 2;
1266 /* NOTE: maxImageCount == 0 means no limit. */
1267 if (capabilities.minImageCount != 0 && image_count_requested < capabilities.minImageCount) {
1268 image_count_requested = capabilities.minImageCount;
1269 }
1270 if (capabilities.maxImageCount != 0 && image_count_requested > capabilities.maxImageCount) {
1271 image_count_requested = capabilities.maxImageCount;
1272 }
1273
1274 VkSwapchainKHR old_swapchain = swapchain_;
1275
1276 /* First time we stretch the swapchain image as it can happen that the first frame size isn't
1277 * correctly reported by the initial swapchain. All subsequent creations will use one to one as
1278 * that can reduce resizing artifacts. */
1279 VkPresentScalingFlagBitsEXT vk_present_scaling = old_swapchain == VK_NULL_HANDLE ?
1280 VK_PRESENT_SCALING_STRETCH_BIT_EXT :
1281 VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT;
1282
1283 VkSwapchainPresentModesCreateInfoEXT vk_swapchain_present_modes = {
1284 VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT, nullptr, 1, &present_mode};
1285 VkSwapchainPresentScalingCreateInfoEXT vk_swapchain_present_scaling = {
1286 VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT,
1287 &vk_swapchain_present_modes,
1288 vk_surface_present_scaling_capabilities.supportedPresentScaling & vk_present_scaling,
1289 vk_surface_present_scaling_capabilities.supportedPresentGravityX &
1290 VK_PRESENT_GRAVITY_MIN_BIT_EXT,
1291 vk_surface_present_scaling_capabilities.supportedPresentGravityY &
1292 VK_PRESENT_GRAVITY_MAX_BIT_EXT,
1293 };
1294
1295 VkSwapchainCreateInfoKHR create_info = {};
1296 create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
1297 if (device_vk.use_vk_ext_swapchain_maintenance_1) {
1298 create_info.pNext = &vk_swapchain_present_scaling;
1299 }
1300 create_info.surface = surface_;
1301 create_info.minImageCount = image_count_requested;
1302 create_info.imageFormat = surface_format_.format;
1303 create_info.imageColorSpace = surface_format_.colorSpace;
1304 create_info.imageExtent = render_extent_;
1305 create_info.imageArrayLayers = 1;
1306 create_info.imageUsage = VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1307 (use_hdr_swapchain ? VK_IMAGE_USAGE_STORAGE_BIT : 0);
1308 create_info.preTransform = capabilities.currentTransform;
1309 create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
1310 create_info.presentMode = present_mode;
1311 create_info.clipped = VK_TRUE;
1312 create_info.oldSwapchain = old_swapchain;
1313 create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
1314 create_info.queueFamilyIndexCount = 0;
1315 create_info.pQueueFamilyIndices = nullptr;
1316
1317 VK_CHECK(vkCreateSwapchainKHR(device_vk.vk_device, &create_info, nullptr, &swapchain_),
1319
1320 /* image_count may not be what we requested! Getter for final value. */
1321 uint32_t actual_image_count = 0;
1322 vkGetSwapchainImagesKHR(device_vk.vk_device, swapchain_, &actual_image_count, nullptr);
1323 /* Some platforms require a minimum amount of render frames that is larger than we expect. When
1324 * that happens we should increase the number of frames in flight. We could also consider
1325 * splitting the frame in flight and image specific data. */
1326 if (actual_image_count > frame_data_.size()) {
1327 CLOG_TRACE(&LOG, "Vulkan: Increasing frame data to %u frames", actual_image_count);
1328 assert(actual_image_count <= frame_data_.capacity());
1329 frame_data_.resize(actual_image_count);
1330 }
1331 swapchain_images_.resize(actual_image_count);
1332 std::vector<VkImage> swapchain_images(actual_image_count);
1333 vkGetSwapchainImagesKHR(
1334 device_vk.vk_device, swapchain_, &actual_image_count, swapchain_images.data());
1335 for (int index = 0; index < actual_image_count; index++) {
1336 swapchain_images_[index].vk_image = swapchain_images[index];
1337 }
1338 CLOG_DEBUG(&LOG,
1339 "Vulkan: recreating swapchain: width=%u, height=%u, format=%d, colorSpace=%d, "
1340 "present_mode=%d, image_count_requested=%u, image_count_acquired=%u, swapchain=%lx, "
1341 "old_swapchain=%lx",
1342 render_extent_.width,
1343 render_extent_.height,
1344 surface_format_.format,
1345 surface_format_.colorSpace,
1346 present_mode,
1347 image_count_requested,
1348 actual_image_count,
1349 uint64_t(swapchain_),
1350 uint64_t(old_swapchain));
1351 /* Construct new semaphores. It can be that image_count is larger than previously. We only need
1352 * to fill in where the handle is `VK_NULL_HANDLE`. */
1353 /* Previous handles from the frame data cannot be used and should be discarded. */
1354 for (GHOST_Frame &frame : frame_data_) {
1355 if (frame.acquire_semaphore != VK_NULL_HANDLE) {
1356 discard_pile.semaphores.push_back(frame.acquire_semaphore);
1357 }
1358 frame.acquire_semaphore = VK_NULL_HANDLE;
1359 }
1360 if (old_swapchain) {
1361 discard_pile.swapchains.push_back(old_swapchain);
1362 }
1363 initializeFrameData();
1364
1365 image_count_ = actual_image_count;
1366
1367 return GHOST_kSuccess;
1368}
1369
1370void GHOST_ContextVK::destroySwapchainPresentFences(VkSwapchainKHR swapchain)
1371{
1372 GHOST_DeviceVK &device_vk = vulkan_instance.value().device.value();
1373 const std::vector<VkFence> &fences = present_fences_[swapchain];
1374 if (!fences.empty()) {
1375 vkWaitForFences(device_vk.vk_device, fences.size(), fences.data(), VK_TRUE, UINT64_MAX);
1376 for (VkFence fence : fences) {
1377 vkDestroyFence(device_vk.vk_device, fence, nullptr);
1378 }
1379 }
1380 present_fences_.erase(swapchain);
1381}
1382
1383GHOST_TSuccess GHOST_ContextVK::destroySwapchain()
1384{
1385 GHOST_DeviceVK &device_vk = vulkan_instance.value().device.value();
1386
1387 if (swapchain_ != VK_NULL_HANDLE) {
1388 this->destroySwapchainPresentFences(swapchain_);
1389 vkDestroySwapchainKHR(device_vk.vk_device, swapchain_, nullptr);
1390 }
1391 device_vk.wait_idle();
1392 for (GHOST_SwapchainImage &swapchain_image : swapchain_images_) {
1393 swapchain_image.destroy(device_vk.vk_device);
1394 }
1395 swapchain_images_.clear();
1396 for (GHOST_Frame &frame_data : frame_data_) {
1397 for (VkSwapchainKHR swapchain : frame_data.discard_pile.swapchains) {
1398 this->destroySwapchainPresentFences(swapchain);
1399 }
1400 frame_data.destroy(device_vk.vk_device);
1401 }
1402 frame_data_.clear();
1403
1404 return GHOST_kSuccess;
1405}
1406
1407const char *GHOST_ContextVK::getPlatformSpecificSurfaceExtension() const
1408{
1409#ifdef _WIN32
1410 return VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
1411#elif defined(__APPLE__)
1412 return VK_EXT_METAL_SURFACE_EXTENSION_NAME;
1413#else /* UNIX/Linux */
1414 switch (platform_) {
1415# ifdef WITH_GHOST_X11
1416 case GHOST_kVulkanPlatformX11:
1417 return VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
1418 break;
1419# endif
1420# ifdef WITH_GHOST_WAYLAND
1421 case GHOST_kVulkanPlatformWayland:
1422 return VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME;
1423 break;
1424# endif
1426 break;
1427 }
1428#endif
1429 return nullptr;
1430}
1431
1433{
1434 bool use_vk_ext_swapchain_colorspace = false;
1435#ifdef _WIN32
1436 const bool use_window_surface = (hwnd_ != nullptr);
1437#elif defined(__APPLE__)
1438 const bool use_window_surface = (metal_layer_ != nullptr);
1439#else /* UNIX/Linux */
1440 bool use_window_surface = false;
1441 switch (platform_) {
1442# ifdef WITH_GHOST_X11
1443 case GHOST_kVulkanPlatformX11:
1444 use_window_surface = (display_ != nullptr) && (window_ != (Window) nullptr);
1445 break;
1446# endif
1447# ifdef WITH_GHOST_WAYLAND
1448 case GHOST_kVulkanPlatformWayland:
1449 use_window_surface = (wayland_display_ != nullptr) && (wayland_surface_ != nullptr);
1450 break;
1451# endif
1453 use_window_surface = false;
1454 break;
1455 }
1456#endif
1457
1458 vector<const char *> required_device_extensions;
1459 vector<const char *> optional_device_extensions;
1460
1461 /* Initialize VkInstance */
1462 if (!vulkan_instance.has_value()) {
1463 vulkan_instance.emplace();
1464 GHOST_InstanceVK &instance_vk = vulkan_instance.value();
1465 if (context_params_.is_debug) {
1466 instance_vk.extensions.enable(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, true);
1467 }
1468
1469 if (use_window_surface) {
1470 const char *native_surface_extension_name = getPlatformSpecificSurfaceExtension();
1471 instance_vk.extensions.enable(VK_KHR_SURFACE_EXTENSION_NAME);
1472 instance_vk.extensions.enable(native_surface_extension_name);
1473 /* X11 doesn't use the correct swapchain offset, flipping can squash the first frames. */
1474 const bool use_vk_ext_swapchain_maintenance1 =
1475#ifdef WITH_GHOST_X11
1476 platform_ != GHOST_kVulkanPlatformX11 &&
1477#endif
1478 instance_vk.extensions.is_supported(VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME) &&
1479 instance_vk.extensions.is_supported(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
1480 if (use_vk_ext_swapchain_maintenance1) {
1481 instance_vk.extensions.enable(VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME);
1482 instance_vk.extensions.enable(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
1483 optional_device_extensions.push_back(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME);
1484 }
1485
1486 use_vk_ext_swapchain_colorspace = instance_vk.extensions.enable(
1487 VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, true);
1488
1489 required_device_extensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1490 }
1491
1492 if (!instance_vk.create_instance(
1493 VK_MAKE_VERSION(context_major_version_, context_minor_version_, 0)))
1494 {
1495 vulkan_instance.reset();
1496 return GHOST_kFailure;
1497 }
1498 }
1499 GHOST_InstanceVK &instance_vk = vulkan_instance.value();
1500
1501 /* Initialize VkSurface */
1502 if (use_window_surface) {
1503#ifdef _WIN32
1504 VkWin32SurfaceCreateInfoKHR surface_create_info = {};
1505 surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
1506 surface_create_info.hinstance = GetModuleHandle(nullptr);
1507 surface_create_info.hwnd = hwnd_;
1508 VK_CHECK(
1509 vkCreateWin32SurfaceKHR(instance_vk.vk_instance, &surface_create_info, nullptr, &surface_),
1511#elif defined(__APPLE__)
1512 VkMetalSurfaceCreateInfoEXT info = {};
1513 info.sType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
1514 info.pNext = nullptr;
1515 info.flags = 0;
1516 info.pLayer = metal_layer_;
1517 VK_CHECK(vkCreateMetalSurfaceEXT(instance_vk.vk_instance, &info, nullptr, &surface_),
1519#else
1520 switch (platform_) {
1521# ifdef WITH_GHOST_X11
1522 case GHOST_kVulkanPlatformX11: {
1523 VkXlibSurfaceCreateInfoKHR surface_create_info = {};
1524 surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
1525 surface_create_info.dpy = display_;
1526 surface_create_info.window = window_;
1527 VK_CHECK(vkCreateXlibSurfaceKHR(
1528 instance_vk.vk_instance, &surface_create_info, nullptr, &surface_),
1530 break;
1531 }
1532# endif
1533# ifdef WITH_GHOST_WAYLAND
1534 case GHOST_kVulkanPlatformWayland: {
1535 VkWaylandSurfaceCreateInfoKHR surface_create_info = {};
1536 surface_create_info.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
1537 surface_create_info.display = wayland_display_;
1538 surface_create_info.surface = wayland_surface_;
1539 VK_CHECK(vkCreateWaylandSurfaceKHR(
1540 instance_vk.vk_instance, &surface_create_info, nullptr, &surface_),
1542 break;
1543 }
1544# endif
1546 surface_ = VK_NULL_HANDLE;
1547 break;
1548 }
1549 }
1550
1551#endif
1552 }
1553
1554 /* Initialize VkDevice */
1555 if (!vulkan_instance->device.has_value()) {
1556 /* External memory extensions. */
1557#ifdef _WIN32
1558 optional_device_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
1559#else
1560 optional_device_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
1561#endif
1562
1563 required_device_extensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1564 required_device_extensions.push_back(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
1565 optional_device_extensions.push_back(VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME);
1566 optional_device_extensions.push_back(
1567 VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME);
1568 optional_device_extensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
1569 optional_device_extensions.push_back(VK_KHR_MAINTENANCE_4_EXTENSION_NAME);
1570 optional_device_extensions.push_back(VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME);
1571 optional_device_extensions.push_back(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
1572 optional_device_extensions.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
1573 optional_device_extensions.push_back(VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME);
1574 optional_device_extensions.push_back(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME);
1575 optional_device_extensions.push_back(VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME);
1576
1577 if (!instance_vk.select_physical_device(preferred_device_, required_device_extensions)) {
1578 return GHOST_kFailure;
1579 }
1580
1581 if (!instance_vk.create_device(use_vk_ext_swapchain_colorspace,
1582 required_device_extensions,
1583 optional_device_extensions))
1584 {
1585 return GHOST_kFailure;
1586 }
1587 }
1588 GHOST_DeviceVK &device_vk = instance_vk.device.value();
1589
1590 device_vk.users++;
1591
1592 render_extent_ = {0, 0};
1593 render_extent_min_ = {0, 0};
1594 surface_format_ = {VK_FORMAT_R8G8B8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR};
1595
1596 active_context_ = this;
1597 return GHOST_kSuccess;
1598}
1599
#define CLOG_ERROR(clg_ref,...)
Definition CLG_log.h:188
#define CLOG_DEBUG(clg_ref,...)
Definition CLG_log.h:191
#define CLOG_WARN(clg_ref,...)
Definition CLG_log.h:189
@ CLG_LEVEL_ERROR
Definition CLG_log.h:56
@ CLG_LEVEL_TRACE
Definition CLG_log.h:64
#define CLOG_AT_LEVEL(clg_ref, verbose_level,...)
Definition CLG_log.h:150
#define CLOG_TRACE(clg_ref,...)
Definition CLG_log.h:192
static bool selectSurfaceFormat(const VkPhysicalDevice physical_device, const VkSurfaceKHR surface, bool use_hdr_swapchain, VkSurfaceFormatKHR &r_surfaceFormat)
static std::optional< GHOST_InstanceVK > vulkan_instance
#define VK_CHECK(__expression, fail_value)
static const char * vulkan_error_as_string(VkResult result)
static GHOST_TSuccess selectPresentMode(const GHOST_TVSyncModes vsync, VkPhysicalDevice device, VkSurfaceKHR surface, VkPresentModeKHR *r_presentMode)
#define FORMAT_ERROR(X)
static CLG_LogRef LOG
#define Window
GHOST_TVulkanPlatformType
@ GHOST_kVulkanPlatformHeadless
#define wl_display
#define Display
#define wl_surface
GHOST_TSuccess
Definition GHOST_Types.h:57
@ GHOST_kFailure
Definition GHOST_Types.h:57
@ GHOST_kSuccess
Definition GHOST_Types.h:57
GHOST_TVSyncModes
@ GHOST_kVSyncModeUnset
@ GHOST_kVSyncModeOff
volatile int lock
unsigned long long int uint64_t
GHOST_TSuccess swapBufferRelease() override
GHOST_TSuccess activateDrawingContext() override
GHOST_TSuccess getVulkanSwapChainFormat(GHOST_VulkanSwapChainData *r_swap_chain_data) override
GHOST_ContextVK(const GHOST_ContextParams &context_params, GHOST_TVulkanPlatformType platform, Window window, Display *display, wl_surface *wayland_surface, wl_display *wayland_display, const GHOST_ContextVK_WindowInfo *wayland_window_info, int contextMajorVersion, int contextMinorVersion, const GHOST_GPUDevice &preferred_device, const GHOST_WindowHDRInfo *hdr_info_=nullptr)
GHOST_TSuccess initializeDrawingContext() override
GHOST_TSuccess setVulkanSwapBuffersCallbacks(std::function< void(const GHOST_VulkanSwapChainData *)> swap_buffer_draw_callback, std::function< void(void)> swap_buffer_acquired_callback, std::function< void(GHOST_VulkanOpenXRData *)> openxr_acquire_framebuffer_image_callback, std::function< void(GHOST_VulkanOpenXRData *)> openxr_release_framebuffer_image_callback) override
GHOST_TSuccess getVulkanHandles(GHOST_VulkanHandles &r_handles) override
GHOST_TSuccess releaseDrawingContext() override
GHOST_TSuccess swapBufferAcquire() override
GHOST_TSuccess releaseNativeHandles() override
~GHOST_ContextVK() override
GHOST_Context(const GHOST_ContextParams &context_params)
static GHOST_Context * active_context_
virtual GHOST_TVSyncModes getVSync()
GHOST_ContextParams context_params_
VkPhysicalDevice vk_physical_device
VmaAllocator vma_allocator
void init_generic_queue_family()
VkPhysicalDeviceRobustness2FeaturesEXT features_robustness2
bool use_vk_ext_swapchain_maintenance_1
void init_memory_allocator(VkInstance vk_instance)
VkPhysicalDeviceFeatures2 features
uint32_t generic_queue_family
VkPhysicalDeviceVulkan12Properties properties_12
VkPhysicalDeviceVulkan12Features features_12
VkPhysicalDeviceProperties2 properties
GHOST_ExtensionsVK extensions
std::mutex queue_mutex
bool use_vk_ext_swapchain_colorspace
VkPhysicalDeviceVulkan11Features features_11
GHOST_DeviceVK(VkPhysicalDevice vk_physical_device, const bool use_vk_ext_swapchain_colorspace)
#define UINT64_MAX
#define UINT32_MAX
#define assert(assertion)
format
#define LOG(level)
Definition log.h:97
bool GPU_vulkan_is_supported_driver(VkPhysicalDevice vk_physical_device)
Definition vk_backend.cc:48
bool is_supported(const char *extension_name) const
bool is_enabled(const char *extension_name) const
vector< const char * > enabled
bool enable(const char *extension_name, bool optional=false)
vector< VkExtensionProperties > extensions
bool enable(const vector< const char * > &extension_names, bool optional=false)
bool is_supported(const vector< const char * > &extension_names)
std::vector< VkSwapchainKHR > swapchains
void destroy(VkDevice vk_device)
std::vector< VkSemaphore > semaphores
VkFence submission_fence
VkSemaphore acquire_semaphore
GHOST_FrameDiscard discard_pile
void destroy(VkDevice vk_device)
VkPhysicalDevice vk_physical_device
bool select_physical_device(const GHOST_GPUDevice &preferred_device, const vector< const char * > &required_extensions)
std::optional< GHOST_DeviceVK > device
GHOST_ExtensionsVK extensions
bool create_instance(uint32_t vulkan_api_version)
bool create_device(const bool use_vk_ext_swapchain_maintenance1, vector< const char * > &required_device_extensions, vector< const char * > &optional_device_extensions)
void destroy(VkDevice vk_device)
VkSemaphore present_semaphore
i
Definition text_draw.cc:230