Blender V5.0
GHOST_XrGraphicsBindingVulkan.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2025 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include <algorithm>
10#include <cstring>
11#include <sstream>
12
13#include "GHOST_ContextVK.hh"
14#include "GHOST_XrException.hh"
16#include "GHOST_Xr_intern.hh"
17
18#ifdef _WIN32
19# include <vulkan/vulkan_win32.h>
20#endif
21
23PFN_xrGetVulkanGraphicsRequirements2KHR
24 GHOST_XrGraphicsBindingVulkan::s_xrGetVulkanGraphicsRequirements2KHR_fn = nullptr;
25PFN_xrGetVulkanGraphicsDevice2KHR
26 GHOST_XrGraphicsBindingVulkan::s_xrGetVulkanGraphicsDevice2KHR_fn = nullptr;
27PFN_xrCreateVulkanInstanceKHR GHOST_XrGraphicsBindingVulkan::s_xrCreateVulkanInstanceKHR_fn =
28 nullptr;
29PFN_xrCreateVulkanDeviceKHR GHOST_XrGraphicsBindingVulkan::s_xrCreateVulkanDeviceKHR_fn = nullptr;
30
31/* -------------------------------------------------------------------- */
34
39
41
42/* -------------------------------------------------------------------- */
45
47{
48 /* Destroy buffer */
49 if (vk_buffer_ != VK_NULL_HANDLE) {
50 vmaUnmapMemory(vma_allocator_, vk_buffer_allocation_);
51 vmaDestroyBuffer(vma_allocator_, vk_buffer_, vk_buffer_allocation_);
52 vk_buffer_ = VK_NULL_HANDLE;
53 vk_buffer_allocation_ = VK_NULL_HANDLE;
54 }
55
56 for (ImportedMemory &imported_memory : imported_memory_) {
57 vkDestroyImage(vk_device_, imported_memory.vk_image_xr, nullptr);
58 vkFreeMemory(vk_device_, imported_memory.vk_device_memory_xr, nullptr);
59 }
60 imported_memory_.clear();
61
62 /* Destroy VMA */
63 if (vma_allocator_ != VK_NULL_HANDLE) {
64 vmaDestroyAllocator(vma_allocator_);
65 vma_allocator_ = VK_NULL_HANDLE;
66 }
67
68 /* Destroy command buffer */
69 if (vk_command_buffer_ != VK_NULL_HANDLE) {
70 vkFreeCommandBuffers(vk_device_, vk_command_pool_, 1, &vk_command_buffer_);
71 vk_command_buffer_ = VK_NULL_HANDLE;
72 }
73
74 /* Destroy command pool */
75 if (vk_command_pool_ != VK_NULL_HANDLE) {
76 vkDestroyCommandPool(vk_device_, vk_command_pool_, nullptr);
77 vk_command_pool_ = VK_NULL_HANDLE;
78 }
79
80 vk_queue_ = VK_NULL_HANDLE;
81
82 /* Destroy device */
83 if (vk_device_ != VK_NULL_HANDLE) {
84 vkDestroyDevice(vk_device_, nullptr);
85 vk_device_ = VK_NULL_HANDLE;
86 }
87
88 /* Destroy instance */
89 if (vk_instance_ != VK_NULL_HANDLE) {
90 vkDestroyInstance(vk_instance_, nullptr);
91 vk_instance_ = VK_NULL_HANDLE;
92 }
93
94 s_xrGetVulkanGraphicsRequirements2KHR_fn = nullptr;
95 s_xrGetVulkanGraphicsDevice2KHR_fn = nullptr;
96 s_xrCreateVulkanInstanceKHR_fn = nullptr;
97 s_xrCreateVulkanDeviceKHR_fn = nullptr;
98}
99
101
103 XrInstance instance,
104 XrSystemId system_id,
105 std::string *r_requirement_info) const
106{
107#define LOAD_PFN(var, name) \
108 if (var == nullptr && \
109 XR_FAILED(xrGetInstanceProcAddr(instance, #name, (PFN_xrVoidFunction *)&var))) \
110 { \
111 var = nullptr; \
112 *r_requirement_info = std::string("Unable to retrieve " #name " instance function"); \
113 return false; \
114 }
115 /* Get the function pointers for OpenXR/Vulkan. If any fails we expect that we cannot use the
116 * given context. */
117 LOAD_PFN(s_xrGetVulkanGraphicsRequirements2KHR_fn, xrGetVulkanGraphicsRequirements2KHR);
118 LOAD_PFN(s_xrGetVulkanGraphicsDevice2KHR_fn, xrGetVulkanGraphicsDevice2KHR);
119 LOAD_PFN(s_xrCreateVulkanInstanceKHR_fn, xrCreateVulkanInstanceKHR);
120 LOAD_PFN(s_xrCreateVulkanDeviceKHR_fn, xrCreateVulkanDeviceKHR);
121#undef LOAD_PFN
122
123 XrGraphicsRequirementsVulkanKHR xr_graphics_requirements{
124 /*type*/ XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN_KHR,
125 };
126 if (XR_FAILED(s_xrGetVulkanGraphicsRequirements2KHR_fn(
127 instance, system_id, &xr_graphics_requirements)))
128 {
129 *r_requirement_info = std::string("Unable to retrieve Xr version requirements for Vulkan");
130 return false;
131 }
132
133 /* Check if the Vulkan API instance version is supported. */
134 GHOST_ContextVK &context_vk = static_cast<GHOST_ContextVK &>(ghost_ctx);
135 const XrVersion vk_version = XR_MAKE_VERSION(
136 context_vk.context_major_version_, context_vk.context_minor_version_, 0);
137 if (vk_version < xr_graphics_requirements.minApiVersionSupported ||
138 vk_version > xr_graphics_requirements.maxApiVersionSupported)
139 {
140 std::ostringstream strstream;
141 strstream << "Min Vulkan version "
142 << XR_VERSION_MAJOR(xr_graphics_requirements.minApiVersionSupported) << "."
143 << XR_VERSION_MINOR(xr_graphics_requirements.minApiVersionSupported) << std::endl;
144 strstream << "Max Vulkan version "
145 << XR_VERSION_MAJOR(xr_graphics_requirements.maxApiVersionSupported) << "."
146 << XR_VERSION_MINOR(xr_graphics_requirements.maxApiVersionSupported) << std::endl;
147
148 *r_requirement_info = strstream.str();
149 return false;
150 }
151
152 return true;
153}
154
156 XrInstance instance,
157 XrSystemId system_id)
158{
159 /* Create a new VkInstance that is compatible with OpenXR */
160 VkApplicationInfo vk_application_info = {VK_STRUCTURE_TYPE_APPLICATION_INFO,
161 nullptr,
162 "Blender",
163 VK_MAKE_VERSION(1, 0, 0),
164 "BlenderXR",
165 VK_MAKE_VERSION(1, 0, 0),
166 VK_MAKE_VERSION(1, 2, 0)};
167 VkInstanceCreateInfo vk_instance_create_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
168 nullptr,
169 0,
170 &vk_application_info,
171 0,
172 nullptr,
173 0,
174 nullptr};
175 XrVulkanInstanceCreateInfoKHR xr_instance_create_info = {XR_TYPE_VULKAN_INSTANCE_CREATE_INFO_KHR,
176 nullptr,
177 system_id,
178 0,
179 vkGetInstanceProcAddr,
180 &vk_instance_create_info,
181 nullptr};
182 VkResult vk_result;
183 CHECK_XR(s_xrCreateVulkanInstanceKHR_fn(
184 instance, &xr_instance_create_info, &vk_instance_, &vk_result),
185 "Unable to create an OpenXR compatible Vulkan instance.");
186
187 /* Physical device selection */
188 XrVulkanGraphicsDeviceGetInfoKHR xr_device_get_info = {
189 XR_TYPE_VULKAN_GRAPHICS_DEVICE_GET_INFO_KHR, nullptr, system_id, vk_instance_};
190 CHECK_XR(s_xrGetVulkanGraphicsDevice2KHR_fn(instance, &xr_device_get_info, &vk_physical_device_),
191 "Unable to create an OpenXR compatible Vulkan physical device.");
192
193 /* Queue family */
194 uint32_t vk_queue_family_count = 0;
195 vkGetPhysicalDeviceQueueFamilyProperties(vk_physical_device_, &vk_queue_family_count, nullptr);
196 std::vector<VkQueueFamilyProperties> vk_queue_families(vk_queue_family_count);
197 graphics_queue_family_ = 0;
198 vkGetPhysicalDeviceQueueFamilyProperties(
199 vk_physical_device_, &vk_queue_family_count, vk_queue_families.data());
200 for (uint32_t i = 0; i < vk_queue_family_count; i++) {
201 if (vk_queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT &&
202 vk_queue_families[i].queueFlags & VK_QUEUE_TRANSFER_BIT)
203 {
204 graphics_queue_family_ = i;
205 break;
206 }
207 }
208
209 /* Graphic device creation */
210 const float queue_priority = 1.0f;
211 VkDeviceQueueCreateInfo vk_queue_create_info = {VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
212 nullptr,
213 0,
214 graphics_queue_family_,
215 1,
216 &queue_priority};
217 VkDeviceCreateInfo vk_device_create_info = {VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
218 nullptr,
219 0,
220 1,
221 &vk_queue_create_info,
222 0,
223 nullptr,
224 0,
225 nullptr};
226 XrVulkanDeviceCreateInfoKHR xr_device_create_info = {XR_TYPE_VULKAN_DEVICE_CREATE_INFO_KHR,
227 nullptr,
228 system_id,
229 0,
230 vkGetInstanceProcAddr,
231 vk_physical_device_,
232 &vk_device_create_info,
233 nullptr};
234 CHECK_XR(s_xrCreateVulkanDeviceKHR_fn(instance, &xr_device_create_info, &vk_device_, &vk_result),
235 "Unable to create an OpenXR compatible Vulkan logical device.");
236
237 vkGetDeviceQueue(vk_device_, graphics_queue_family_, 0, &vk_queue_);
238
239 /* Command buffer pool */
240 VkCommandPoolCreateInfo vk_command_pool_create_info = {
241 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
242 nullptr,
243 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
244 graphics_queue_family_};
245 vkCreateCommandPool(vk_device_, &vk_command_pool_create_info, nullptr, &vk_command_pool_);
246
247 /* Command buffer */
248 VkCommandBufferAllocateInfo vk_command_buffer_allocate_info = {
249 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
250 nullptr,
251 vk_command_pool_,
252 VK_COMMAND_BUFFER_LEVEL_PRIMARY,
253 1};
254 vkAllocateCommandBuffers(vk_device_, &vk_command_buffer_allocate_info, &vk_command_buffer_);
255
256 /* Select the best data transfer mode based on the OpenXR device and ContextVK. */
257 data_transfer_mode_ = choseDataTransferMode();
258
259 if (data_transfer_mode_ == GHOST_kVulkanXRModeCPU) {
260 /* VMA */
261 VmaAllocatorCreateInfo allocator_create_info = {};
262 allocator_create_info.flags = VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
263 allocator_create_info.vulkanApiVersion = VK_API_VERSION_1_2;
264 allocator_create_info.physicalDevice = vk_physical_device_;
265 allocator_create_info.device = vk_device_;
266 allocator_create_info.instance = vk_instance_;
267 vmaCreateAllocator(&allocator_create_info, &vma_allocator_);
268 }
269
270 /* Update the binding struct */
271 oxr_binding.vk.type = XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR;
272 oxr_binding.vk.next = nullptr;
273 oxr_binding.vk.instance = vk_instance_;
274 oxr_binding.vk.physicalDevice = vk_physical_device_;
275 oxr_binding.vk.device = vk_device_;
276 oxr_binding.vk.queueFamilyIndex = graphics_queue_family_;
277 oxr_binding.vk.queueIndex = 0;
278}
279
280GHOST_TVulkanXRModes GHOST_XrGraphicsBindingVulkan::choseDataTransferMode()
281{
282 GHOST_VulkanHandles vulkan_handles;
283 ghost_ctx_.getVulkanHandles(vulkan_handles);
284
285 /* Retrieve the Context physical device properties. */
286 VkPhysicalDeviceVulkan11Properties vk_physical_device_vulkan11_properties = {
287 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES};
288 VkPhysicalDeviceProperties2 vk_physical_device_properties = {
289 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, &vk_physical_device_vulkan11_properties};
290 vkGetPhysicalDeviceProperties2(vulkan_handles.physical_device, &vk_physical_device_properties);
291
292 /* Retrieve OpenXR physical device properties. */
293 VkPhysicalDeviceVulkan11Properties xr_physical_device_vulkan11_properties = {
294 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES};
295 VkPhysicalDeviceProperties2 xr_physical_device_properties = {
296 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, &xr_physical_device_vulkan11_properties};
297 vkGetPhysicalDeviceProperties2(vk_physical_device_, &xr_physical_device_properties);
298
299 /* When the physical device properties match between the Vulkan device and the Xr devices we
300 * assume that they are the same physical device in the machine and we can use shared memory.
301 * If not we fall back to CPU based data transfer. */
302 const bool is_same_physical_device = memcmp(&vk_physical_device_vulkan11_properties,
303 &xr_physical_device_vulkan11_properties,
304 sizeof(VkPhysicalDeviceVulkan11Properties)) == 0;
305 if (!is_same_physical_device) {
306 return GHOST_kVulkanXRModeCPU;
307 }
308
309 /* Check for available extensions. We assume that the needed extensions are enabled when
310 * available during construction. */
311 uint32_t device_extension_count;
312 vkEnumerateDeviceExtensionProperties(
313 vulkan_handles.physical_device, nullptr, &device_extension_count, nullptr);
314 std::vector<VkExtensionProperties> available_device_extensions(device_extension_count);
315 vkEnumerateDeviceExtensionProperties(vulkan_handles.physical_device,
316 nullptr,
317 &device_extension_count,
318 available_device_extensions.data());
319
320 auto has_extension = [=](const char *extension_name) {
321 for (const auto &extension : available_device_extensions) {
322 if (strcmp(extension_name, extension.extensionName) == 0) {
323 return true;
324 }
325 }
326 return false;
327 };
328
329#ifdef _WIN32
330 bool has_vk_khr_external_memory_win32_extension = has_extension(
331 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
332 if (has_vk_khr_external_memory_win32_extension) {
333 return GHOST_kVulkanXRModeWin32;
334 }
335#elif defined(__APPLE__)
336#else /* UNIX/Linux */
337 bool has_vk_khr_external_memory_fd_extension = has_extension(
338 VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
339 if (has_vk_khr_external_memory_fd_extension) {
340 return GHOST_kVulkanXRModeFD;
341 }
342#endif
343
344 return GHOST_kVulkanXRModeCPU;
345}
346
347static std::optional<int64_t> choose_swapchain_format_from_candidates(
348 const std::vector<int64_t> &gpu_binding_formats, const std::vector<int64_t> &runtime_formats)
349{
350 if (gpu_binding_formats.empty()) {
351 return std::nullopt;
352 }
353
354 auto res = std::find_first_of(gpu_binding_formats.begin(),
355 gpu_binding_formats.end(),
356 runtime_formats.begin(),
357 runtime_formats.end());
358 if (res == gpu_binding_formats.end()) {
359 return std::nullopt;
360 }
361
362 return *res;
363}
364
366 const std::vector<int64_t> &runtime_formats,
367 GHOST_TXrSwapchainFormat &r_format,
368 bool &r_is_srgb_format) const
369{
370 std::vector<int64_t> gpu_binding_formats = {
371 VK_FORMAT_R16G16B16A16_SFLOAT,
372 VK_FORMAT_R8G8B8A8_UNORM,
373 VK_FORMAT_B8G8R8A8_UNORM,
374 VK_FORMAT_R8G8B8A8_SRGB,
375 VK_FORMAT_B8G8R8A8_SRGB,
376 };
377
378 r_format = GHOST_kXrSwapchainFormatRGBA8;
379 r_is_srgb_format = false;
380 std::optional result = choose_swapchain_format_from_candidates(gpu_binding_formats,
381 runtime_formats);
382 if (result) {
383 switch (*result) {
384 case VK_FORMAT_R16G16B16A16_SFLOAT:
385 r_format = GHOST_kXrSwapchainFormatRGBA16F;
386 break;
387 case VK_FORMAT_R8G8B8A8_UNORM:
388 case VK_FORMAT_B8G8R8A8_UNORM:
389 case VK_FORMAT_R8G8B8A8_SRGB:
390 case VK_FORMAT_B8G8R8A8_SRGB:
391 r_format = GHOST_kXrSwapchainFormatRGBA8;
392 break;
393 }
394
395 switch (*result) {
396 case VK_FORMAT_R16G16B16A16_SFLOAT:
397 case VK_FORMAT_R8G8B8A8_UNORM:
398 case VK_FORMAT_B8G8R8A8_UNORM:
399 r_is_srgb_format = false;
400 break;
401 case VK_FORMAT_R8G8B8A8_SRGB:
402 case VK_FORMAT_B8G8R8A8_SRGB:
403 r_is_srgb_format = true;
404 break;
405 }
406 }
407 return result;
408}
409
410std::vector<XrSwapchainImageBaseHeader *> GHOST_XrGraphicsBindingVulkan::createSwapchainImages(
411 uint32_t image_count)
412{
413 std::vector<XrSwapchainImageBaseHeader *> base_images;
414 std::vector<XrSwapchainImageVulkan2KHR> vulkan_images(
415 image_count, {XR_TYPE_SWAPCHAIN_IMAGE_VULKAN2_KHR, nullptr, VK_NULL_HANDLE});
416 for (XrSwapchainImageVulkan2KHR &image : vulkan_images) {
417 base_images.push_back(reinterpret_cast<XrSwapchainImageBaseHeader *>(&image));
418 }
419 image_cache_.push_back(std::move(vulkan_images));
420
421 return base_images;
422}
423
426 XrSwapchainImageBaseHeader &swapchain_image, const GHOST_XrDrawViewInfo &draw_info)
427{
428 XrSwapchainImageVulkan2KHR &vulkan_image = *reinterpret_cast<XrSwapchainImageVulkan2KHR *>(
429 &swapchain_image);
430
431 switch (data_transfer_mode_) {
432 case GHOST_kVulkanXRModeFD:
433 case GHOST_kVulkanXRModeWin32:
434 submitToSwapchainImageGpu(vulkan_image, draw_info);
435 break;
436
437 case GHOST_kVulkanXRModeCPU:
438 submitToSwapchainImageCpu(vulkan_image, draw_info);
439 break;
440 }
441}
443
444/* -------------------------------------------------------------------- */
447
448void GHOST_XrGraphicsBindingVulkan::submitToSwapchainImageCpu(
449 XrSwapchainImageVulkan2KHR &swapchain_image, const GHOST_XrDrawViewInfo &draw_info)
450{
451 /* Acquire frame buffer image. */
452 GHOST_VulkanOpenXRData openxr_data = {GHOST_kVulkanXRModeCPU};
453 ghost_ctx_.openxr_acquire_framebuffer_image_callback_(&openxr_data);
454
455 /* Import render result. */
456 VkDeviceSize component_size = 4 * sizeof(uint8_t);
457 if (draw_info.swapchain_format == GHOST_kXrSwapchainFormatRGBA16F ||
458 draw_info.swapchain_format == GHOST_kXrSwapchainFormatRGBA16)
459 {
460 component_size = 4 * sizeof(uint16_t);
461 }
462 VkDeviceSize image_data_size = openxr_data.extent.width * openxr_data.extent.height *
463 component_size;
464
465 if (vk_buffer_ != VK_NULL_HANDLE && vk_buffer_allocation_info_.size < image_data_size) {
466 vmaUnmapMemory(vma_allocator_, vk_buffer_allocation_);
467 vmaDestroyBuffer(vma_allocator_, vk_buffer_, vk_buffer_allocation_);
468 vk_buffer_ = VK_NULL_HANDLE;
469 vk_buffer_allocation_ = VK_NULL_HANDLE;
470 }
471
472 if (vk_buffer_ == VK_NULL_HANDLE) {
473 VkBufferCreateInfo vk_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
474 nullptr,
475 0,
476 image_data_size,
477 VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
478 VK_SHARING_MODE_EXCLUSIVE,
479 0,
480 nullptr};
481 VmaAllocationCreateInfo allocation_create_info = {};
482 allocation_create_info.usage = VMA_MEMORY_USAGE_AUTO;
483 allocation_create_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT;
484 allocation_create_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
485 vmaCreateBuffer(vma_allocator_,
486 &vk_buffer_create_info,
487 &allocation_create_info,
488 &vk_buffer_,
489 &vk_buffer_allocation_,
490 &vk_buffer_allocation_info_);
491 vmaMapMemory(vma_allocator_, vk_buffer_allocation_, &vk_buffer_allocation_info_.pMappedData);
492 }
493 std::memcpy(vk_buffer_allocation_info_.pMappedData, openxr_data.cpu.image_data, image_data_size);
494
495 /* Copy frame buffer image to swapchain image. */
496 VkCommandBuffer vk_command_buffer = vk_command_buffer_;
497
498 /* - Begin command recording */
499 VkCommandBufferBeginInfo vk_command_buffer_begin_info = {
500 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
501 nullptr,
502 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
503 nullptr};
504 vkBeginCommandBuffer(vk_command_buffer, &vk_command_buffer_begin_info);
505
506 /* Transfer imported render result & swap-chain image (UNDEFINED -> GENERAL). */
507 VkImageMemoryBarrier vk_image_memory_barrier = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
508 nullptr,
509 0,
510 VK_ACCESS_TRANSFER_WRITE_BIT,
511 VK_IMAGE_LAYOUT_UNDEFINED,
512 VK_IMAGE_LAYOUT_GENERAL,
513 VK_QUEUE_FAMILY_IGNORED,
514 VK_QUEUE_FAMILY_IGNORED,
515 swapchain_image.image,
516 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
517 vkCmdPipelineBarrier(vk_command_buffer,
518 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
519 VK_PIPELINE_STAGE_TRANSFER_BIT,
520 0,
521 0,
522 nullptr,
523 0,
524 nullptr,
525 1,
526 &vk_image_memory_barrier);
527
528 /* Copy buffer to image */
529 VkBufferImageCopy vk_buffer_image_copy = {
530 0,
531 0,
532 0,
533 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
534 {draw_info.ofsx, draw_info.ofsy, 0},
535 {openxr_data.extent.width, openxr_data.extent.height, 1}};
536 vkCmdCopyBufferToImage(vk_command_buffer,
537 vk_buffer_,
538 swapchain_image.image,
539 VK_IMAGE_LAYOUT_GENERAL,
540 1,
541 &vk_buffer_image_copy);
542
543 /* - End command recording */
544 vkEndCommandBuffer(vk_command_buffer);
545 /* - Submit command buffer to queue. */
546 VkSubmitInfo vk_submit_info = {
547 VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &vk_command_buffer};
548 vkQueueSubmit(vk_queue_, 1, &vk_submit_info, VK_NULL_HANDLE);
549
550 /* - Wait until device is idle. */
551 vkQueueWaitIdle(vk_queue_);
552
553 /* - Reset command buffer for next eye/frame */
554 vkResetCommandBuffer(vk_command_buffer, 0);
555
556 /* Release frame buffer image. */
557 ghost_ctx_.openxr_release_framebuffer_image_callback_(&openxr_data);
558}
559
561
562/* -------------------------------------------------------------------- */
565
566void GHOST_XrGraphicsBindingVulkan::submitToSwapchainImageGpu(
567 XrSwapchainImageVulkan2KHR &swapchain_image, const GHOST_XrDrawViewInfo &draw_info)
568{
569 /* Check for previous imported memory. */
570 ImportedMemory *imported_memory = nullptr;
571 for (ImportedMemory &item : imported_memory_) {
572 if (item.view_idx == draw_info.view_idx) {
573 imported_memory = &item;
574 }
575 }
576 /* No previous imported memory found, creating a new. */
577 if (imported_memory == nullptr) {
578 imported_memory_.push_back(
579 {draw_info.view_idx, VK_NULL_HANDLE, VK_NULL_HANDLE, VK_NULL_HANDLE});
580 imported_memory = &imported_memory_.back();
581 }
582
583 GHOST_VulkanOpenXRData openxr_data = {data_transfer_mode_};
584 openxr_data.gpu.vk_image_blender = imported_memory->vk_image_blender;
585 ghost_ctx_.openxr_acquire_framebuffer_image_callback_(&openxr_data);
586 imported_memory->vk_image_blender = openxr_data.gpu.vk_image_blender;
587
588 /* Create an image handle */
589 if (openxr_data.gpu.new_handle) {
590 if (imported_memory->vk_image_xr) {
591 vkDestroyImage(vk_device_, imported_memory->vk_image_xr, nullptr);
592 vkFreeMemory(vk_device_, imported_memory->vk_device_memory_xr, nullptr);
593 imported_memory->vk_device_memory_xr = VK_NULL_HANDLE;
594 imported_memory->vk_image_xr = VK_NULL_HANDLE;
595 }
596
597 VkExternalMemoryImageCreateInfo vk_external_memory_image_info = {
598 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, nullptr, 0};
599
600 switch (data_transfer_mode_) {
601 case GHOST_kVulkanXRModeFD:
602 vk_external_memory_image_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
603 break;
604 case GHOST_kVulkanXRModeWin32:
605 vk_external_memory_image_info.handleTypes =
606 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
607 break;
608 case GHOST_kVulkanXRModeCPU:
609 break;
610 }
611
612 VkImageCreateInfo vk_image_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
613 &vk_external_memory_image_info,
614 0,
615 VK_IMAGE_TYPE_2D,
616 openxr_data.gpu.image_format,
617 {openxr_data.extent.width, openxr_data.extent.height, 1},
618 1,
619 1,
620 VK_SAMPLE_COUNT_1_BIT,
621 VK_IMAGE_TILING_OPTIMAL,
622 VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
623 VK_SHARING_MODE_EXCLUSIVE,
624 0,
625 nullptr,
626 VK_IMAGE_LAYOUT_UNDEFINED};
627
628 vkCreateImage(vk_device_, &vk_image_info, nullptr, &imported_memory->vk_image_xr);
629
630 /* Get the memory requirements */
631 VkMemoryRequirements vk_memory_requirements = {};
632 vkGetImageMemoryRequirements(
633 vk_device_, imported_memory->vk_image_xr, &vk_memory_requirements);
634
635 /* Import the memory */
636 VkMemoryDedicatedAllocateInfo vk_memory_dedicated_allocation_info = {
637 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
638 nullptr,
639 imported_memory->vk_image_xr,
640 VK_NULL_HANDLE};
641 switch (data_transfer_mode_) {
642 case GHOST_kVulkanXRModeFD: {
643 VkImportMemoryFdInfoKHR import_memory_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
644 &vk_memory_dedicated_allocation_info,
645 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
646 int(openxr_data.gpu.image_handle)};
647 VkMemoryAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
648 &import_memory_info,
649 vk_memory_requirements.size};
650 vkAllocateMemory(
651 vk_device_, &allocate_info, nullptr, &imported_memory->vk_device_memory_xr);
652 break;
653 }
654
655 case GHOST_kVulkanXRModeWin32: {
656#ifdef _WIN32
657 VkImportMemoryWin32HandleInfoKHR import_memory_info = {
658 VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
659 &vk_memory_dedicated_allocation_info,
660 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
661 HANDLE(openxr_data.gpu.image_handle)};
662 VkMemoryAllocateInfo allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
663 &import_memory_info,
664 vk_memory_requirements.size};
665 vkAllocateMemory(
666 vk_device_, &allocate_info, nullptr, &imported_memory->vk_device_memory_xr);
667#endif
668 break;
669 }
670
671 case GHOST_kVulkanXRModeCPU:
672 break;
673 }
674
675 /* Bind the imported memory to the image. */
676 vkBindImageMemory(vk_device_,
677 imported_memory->vk_image_xr,
678 imported_memory->vk_device_memory_xr,
679 openxr_data.gpu.memory_offset);
680 }
681
682 /* Copy frame buffer image to swapchain image. */
683 VkCommandBuffer vk_command_buffer = vk_command_buffer_;
684
685 /* Begin command recording */
686 VkCommandBufferBeginInfo vk_command_buffer_begin_info = {
687 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
688 nullptr,
689 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
690 nullptr};
691 vkBeginCommandBuffer(vk_command_buffer, &vk_command_buffer_begin_info);
692
693 /* Transfer imported render result & swap-chain image (UNDEFINED -> GENERAL). */
694 VkImageMemoryBarrier vk_image_memory_barrier[] = {{VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
695 nullptr,
696 0,
697 VK_ACCESS_TRANSFER_READ_BIT,
698 VK_IMAGE_LAYOUT_UNDEFINED,
699 VK_IMAGE_LAYOUT_GENERAL,
700 VK_QUEUE_FAMILY_IGNORED,
701 VK_QUEUE_FAMILY_IGNORED,
702 imported_memory->vk_image_xr,
703 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}},
704 {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
705 nullptr,
706 0,
707 VK_ACCESS_TRANSFER_WRITE_BIT,
708 VK_IMAGE_LAYOUT_UNDEFINED,
709 VK_IMAGE_LAYOUT_GENERAL,
710 VK_QUEUE_FAMILY_IGNORED,
711 VK_QUEUE_FAMILY_IGNORED,
712 swapchain_image.image,
713 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}}};
714 vkCmdPipelineBarrier(vk_command_buffer,
715 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
716 VK_PIPELINE_STAGE_TRANSFER_BIT,
717 0,
718 0,
719 nullptr,
720 0,
721 nullptr,
722 2,
723 vk_image_memory_barrier);
724
725 /* Copy image to swap-chain. */
726 VkImageCopy vk_image_copy = {{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
727 {0, 0, 0},
728 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
729 {draw_info.ofsx, draw_info.ofsy, 0},
730 {openxr_data.extent.width, openxr_data.extent.height, 1}};
731 vkCmdCopyImage(vk_command_buffer,
732 imported_memory->vk_image_xr,
733 VK_IMAGE_LAYOUT_GENERAL,
734 swapchain_image.image,
735 VK_IMAGE_LAYOUT_GENERAL,
736 1,
737 &vk_image_copy);
738
739 /* Swap-chain needs to be in an VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL compatible layout. */
740 VkImageMemoryBarrier vk_image_memory_barrier2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
741 nullptr,
742 VK_ACCESS_TRANSFER_WRITE_BIT,
743 0,
744 VK_IMAGE_LAYOUT_GENERAL,
745 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
746 VK_QUEUE_FAMILY_IGNORED,
747 VK_QUEUE_FAMILY_IGNORED,
748 swapchain_image.image,
749 {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
750 vkCmdPipelineBarrier(vk_command_buffer,
751 VK_PIPELINE_STAGE_TRANSFER_BIT,
752 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
753 0,
754 0,
755 nullptr,
756 0,
757 nullptr,
758 1,
759 &vk_image_memory_barrier2);
760
761 /* End command recording. */
762 vkEndCommandBuffer(vk_command_buffer);
763 /* Submit command buffer to queue. */
764 VkSubmitInfo vk_submit_info = {
765 VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &vk_command_buffer};
766 vkQueueSubmit(vk_queue_, 1, &vk_submit_info, VK_NULL_HANDLE);
767
768 /* Wait until device is idle. */
769 vkQueueWaitIdle(vk_queue_);
770
771 /* Reset command buffer for next eye/frame. */
772 vkResetCommandBuffer(vk_command_buffer, 0);
773}
774
776
778{
779 return ghost_ctx.isUpsideDown();
780}
static std::optional< int64_t > choose_swapchain_format_from_candidates(const std::vector< int64_t > &gpu_binding_formats, const std::vector< int64_t > &runtime_formats)
static std::optional< int64_t > choose_swapchain_format_from_candidates(const std::vector< int64_t > &gpu_binding_formats, const std::vector< int64_t > &runtime_formats)
#define LOAD_PFN(var, name)
#define CHECK_XR(call, error_msg)
GHOST_TSuccess getVulkanHandles(GHOST_VulkanHandles &r_handles) override
virtual bool isUpsideDown() const
GHOST_IXrGraphicsBinding()=default
union GHOST_IXrGraphicsBinding::@316344103261324226161313226323037304250053225267 oxr_binding
std::vector< XrSwapchainImageBaseHeader * > createSwapchainImages(uint32_t image_count) override
std::optional< int64_t > chooseSwapchainFormat(const std::vector< int64_t > &runtime_formats, GHOST_TXrSwapchainFormat &r_format, bool &r_is_srgb_format) const override
void initFromGhostContext(GHOST_Context &ghost_ctx, XrInstance instance, XrSystemId system_id) override
void submitToSwapchainImage(XrSwapchainImageBaseHeader &swapchain_image, const GHOST_XrDrawViewInfo &draw_info) override
bool needsUpsideDownDrawing(GHOST_Context &ghost_ctx) const override
bool checkVersionRequirements(GHOST_Context &ghost_ctx, XrInstance instance, XrSystemId system_id, std::string *r_requirement_info) const override
GHOST_XrGraphicsBindingVulkan(GHOST_Context &ghost_ctx)
i
Definition text_draw.cc:230