51 VkPhysicalDeviceProperties2 vk_physical_device_properties = {};
52 VkPhysicalDeviceDriverProperties vk_physical_device_driver_properties = {};
53 vk_physical_device_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
54 vk_physical_device_driver_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
55 vk_physical_device_properties.pNext = &vk_physical_device_driver_properties;
56 vkGetPhysicalDeviceProperties2(vk_physical_device, &vk_physical_device_properties);
70 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS &&
71 vk_physical_device_properties.properties.deviceType ==
72 VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU)
74 const uint32_t driver_version = vk_physical_device_properties.properties.driverVersion;
75 uint32_t driver_version_major = driver_version >> 14u;
76 uint32_t driver_version_minor = driver_version & 0x3fffu;
77 if (driver_version_major < 101 || driver_version_major == 101 && driver_version_minor < 2140) {
89 uint32_t conformance_version = VK_MAKE_API_VERSION(
90 vk_physical_device_driver_properties.conformanceVersion.major,
91 vk_physical_device_driver_properties.conformanceVersion.minor,
92 vk_physical_device_driver_properties.conformanceVersion.subminor,
93 vk_physical_device_driver_properties.conformanceVersion.patch);
94 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY &&
95 conformance_version < VK_MAKE_API_VERSION(1, 3, 7, 2))
104 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY &&
105 ((
StringRefNull(vk_physical_device_driver_properties.driverInfo).
find(
"580.76.5", 0) !=
107 (
StringRefNull(vk_physical_device_driver_properties.driverInfo).
find(
"580.76.05", 0) !=
115 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_QUALCOMM_PROPRIETARY) {
122 const uint32_t driver_version = vk_physical_device_properties.properties.driverVersion;
123 constexpr uint32_t version_31_0_112 = VK_MAKE_VERSION(512, 827, 0);
124 if (driver_version < version_31_0_112) {
126 "Detected qualcomm driver is not supported. To run the Vulkan backend "
127 "driver 31.0.112.0 or later is required. Switching to OpenGL.");
140 VkPhysicalDeviceVulkan12Features features_12 = {
141 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES};
142 VkPhysicalDeviceVulkan11Features features_11 = {
143 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, &features_12};
144 VkPhysicalDeviceFeatures2 features = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
147 vkGetPhysicalDeviceFeatures2(vk_physical_device, &features);
150 if (features.features.geometryShader == VK_FALSE) {
151 missing_capabilities.
append(
"geometry shaders");
153 if (features.features.logicOp == VK_FALSE) {
154 missing_capabilities.
append(
"logical operations");
157 if (features.features.dualSrcBlend == VK_FALSE) {
158 missing_capabilities.
append(
"dual source blending");
160 if (features.features.imageCubeArray == VK_FALSE) {
161 missing_capabilities.
append(
"image cube array");
163 if (features.features.multiDrawIndirect == VK_FALSE) {
164 missing_capabilities.
append(
"multi draw indirect");
166 if (features.features.multiViewport == VK_FALSE) {
167 missing_capabilities.
append(
"multi viewport");
169 if (features.features.shaderClipDistance == VK_FALSE) {
170 missing_capabilities.
append(
"shader clip distance");
172 if (features.features.drawIndirectFirstInstance == VK_FALSE) {
173 missing_capabilities.
append(
"draw indirect first instance");
175 if (features.features.fragmentStoresAndAtomics == VK_FALSE) {
176 missing_capabilities.
append(
"fragment stores and atomics");
178 if (features_11.shaderDrawParameters == VK_FALSE) {
179 missing_capabilities.
append(
"shader draw parameters");
181 if (features_12.timelineSemaphore == VK_FALSE) {
182 missing_capabilities.
append(
"timeline semaphores");
184 if (features_12.bufferDeviceAddress == VK_FALSE) {
185 missing_capabilities.
append(
"buffer device address");
189 uint32_t vk_extension_count;
190 vkEnumerateDeviceExtensionProperties(vk_physical_device,
nullptr, &vk_extension_count,
nullptr);
193 vkEnumerateDeviceExtensionProperties(
194 vk_physical_device,
nullptr, &vk_extension_count, vk_extensions.
data());
196 for (VkExtensionProperties &vk_extension : vk_extensions) {
197 extensions.
add(vk_extension.extensionName);
200 if (!extensions.
contains(VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
201 missing_capabilities.
append(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
203 if (!extensions.
contains(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME)) {
204 missing_capabilities.
append(VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
208 if (!extensions.
contains(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME)) {
209 missing_capabilities.
append(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
213 return missing_capabilities;
226 std::stringstream allowed_layers;
227 allowed_layers <<
"VK_LAYER_KHRONOS_*";
228 allowed_layers <<
",VK_LAYER_AMD_*";
229 allowed_layers <<
",VK_LAYER_INTEL_*";
230 allowed_layers <<
",VK_LAYER_NV_*";
231 allowed_layers <<
",VK_LAYER_MESA_*";
233 allowed_layers <<
",VK_LAYER_LUNARG_*";
234 allowed_layers <<
",VK_LAYER_RENDERDOC_*";
236 BLI_setenv(
"VK_LOADER_LAYERS_DISABLE",
"~implicit~");
237 BLI_setenv(
"VK_LOADER_LAYERS_ALLOW", allowed_layers.str().c_str());
240 VkApplicationInfo vk_application_info = {VK_STRUCTURE_TYPE_APPLICATION_INFO};
241 vk_application_info.pApplicationName =
"Blender";
242 vk_application_info.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
243 vk_application_info.pEngineName =
"Blender";
244 vk_application_info.engineVersion = VK_MAKE_VERSION(1, 0, 0);
245 vk_application_info.apiVersion = VK_API_VERSION_1_2;
247 VkInstanceCreateInfo vk_instance_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO};
248 vk_instance_info.pApplicationInfo = &vk_application_info;
250 VkInstance vk_instance = VK_NULL_HANDLE;
251 vkCreateInstance(&vk_instance_info,
nullptr, &vk_instance);
252 if (vk_instance == VK_NULL_HANDLE) {
253 CLOG_ERROR(&
LOG,
"Unable to initialize a Vulkan 1.2 instance.");
258 uint32_t physical_devices_count = 0;
259 vkEnumeratePhysicalDevices(vk_instance, &physical_devices_count,
nullptr);
261 vkEnumeratePhysicalDevices(vk_instance, &physical_devices_count, vk_physical_devices.
data());
263 for (VkPhysicalDevice vk_physical_device : vk_physical_devices) {
264 VkPhysicalDeviceProperties vk_properties = {};
265 vkGetPhysicalDeviceProperties(vk_physical_device, &vk_properties);
269 "Installed driver for device [%s] has known issues and will not be used. Updating "
270 "driver might improve compatibility.",
271 vk_properties.deviceName);
277 if (missing_capabilities.
is_empty()) {
281 "Device [%s] supports minimum requirements. Skip checking other GPUs. Another GPU "
282 "can still be selected during auto-detection.",
283 vk_properties.deviceName);
285 vkDestroyInstance(vk_instance,
nullptr);
289 std::stringstream ss;
290 ss <<
"Device [" << vk_properties.deviceName
291 <<
"] does not meet minimum requirements. Missing features are [";
293 ss << feature <<
", ";
295 ss.seekp(-2, std::ios_base::end);
302 vkDestroyInstance(vk_instance,
nullptr);
304 "No Vulkan device found that meets the minimum requirements. "
305 "Updating GPU driver can improve compatibility.");
313#elif defined(__APPLE__)
335 GHOST_VulkanHandles vulkan_handles = {};
336 GHOST_GetVulkanHandles(ghost_context, &vulkan_handles);
338 uint32_t physical_devices_count = 0;
339 vkEnumeratePhysicalDevices(vulkan_handles.instance, &physical_devices_count,
nullptr);
341 vkEnumeratePhysicalDevices(
342 vulkan_handles.instance, &physical_devices_count, vk_physical_devices.
data());
344 for (VkPhysicalDevice vk_physical_device : vk_physical_devices) {
348 VkPhysicalDeviceProperties vk_properties = {};
349 vkGetPhysicalDeviceProperties(vk_physical_device, &vk_properties);
350 std::stringstream identifier;
351 identifier << std::hex << vk_properties.vendorID <<
"/" << vk_properties.deviceID <<
"/"
353 GPG.devices.append({identifier.str(),
355 vk_properties.vendorID,
356 vk_properties.deviceID,
357 std::string(vk_properties.deviceName)});
363 if (a.name == b.name) {
364 return a.index < b.index;
366 return a.
name <
b.name;
372 const VkPhysicalDeviceProperties &properties =
device.physical_device_properties_get();
379 std::string vendor_name =
device.vendor_name();
380 std::string driver_version =
device.driver_version();
386 GPG.init(device_type,
392 properties.deviceName,
393 driver_version.c_str(),
395 GPG.devices = devices;
397 const VkPhysicalDeviceIDProperties &id_properties =
device.physical_device_id_properties_get();
401 if (id_properties.deviceLUIDValid) {
403 GPG.device_luid_node_mask = id_properties.deviceNodeMask;
406 GPG.device_luid.reinitialize(0);
407 GPG.device_luid_node_mask = 0;
411 "Using vendor [%s] device [%s] driver version [%s].",
413 device.vk_physical_device_properties_.deviceName,
414 driver_version.c_str());
417void VKBackend::detect_workarounds(
VKDevice &device)
424 printf(
"VK: Forcing workaround usage and disabling features and extensions.\n");
439 GCaps.stencil_export_support =
false;
441 device.workarounds_ = workarounds;
442 device.extensions_ = extensions;
452 VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME);
454 VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME);
456 VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME);
461 VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME);
466 VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME);
469 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
470#elif not defined(__APPLE__)
481 if (device.vk_physical_device_driver_properties_.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY) {
493 if (device.vk_physical_device_driver_properties_.driverID ==
494 VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS)
522 VkFormatProperties format_properties = {};
523 vkGetPhysicalDeviceFormatProperties(
526 VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) == 0;
528 device.workarounds_ = workarounds;
529 device.extensions_ = extensions;
532void VKBackend::platform_exit()
535 VKDevice &device = VKBackend::get().device;
536 if (device.is_initialized()) {
555 if (
device.is_initialized()) {
569 context.render_graph().add_node(dispatch_info);
582 context.render_graph().add_node(dispatch_indirect_info);
593 if (!
device.is_initialized()) {
594 device.init(ghost_context);
595 device.extensions_get().log();
600 device.context_register(*context);
601 GHOST_SetVulkanSwapBuffersCallbacks((GHOST_ContextHandle)ghost_context,
680 if (context !=
nullptr) {
683 std::scoped_lock
lock(
device.orphaned_data.mutex_get());
684 device.orphaned_data.move_data(
device.orphaned_data_render,
685 device.orphaned_data.timeline_ + 1);
693 std::scoped_lock
lock(
device.orphaned_data.mutex_get());
694 device.orphaned_data.move_data(
device.orphaned_data_render,
695 device.orphaned_data.timeline_ + 1);
701 if (force_resource_release) {
702 std::scoped_lock
lock(
device.orphaned_data.mutex_get());
703 device.orphaned_data.move_data(
device.orphaned_data_render,
704 device.orphaned_data.timeline_ + 1);
710 const VkPhysicalDeviceProperties &properties =
device.physical_device_properties_get();
711 const VkPhysicalDeviceLimits &limits = properties.limits;
715 GCaps.geometry_shader_support =
true;
716 GCaps.stencil_export_support =
device.supports_extension(
717 VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
719 GCaps.max_texture_size =
max_ii(limits.maxImageDimension1D, limits.maxImageDimension2D);
720 GCaps.max_texture_3d_size =
min_uu(limits.maxImageDimension3D, INT_MAX);
722 GCaps.max_texture_layers =
min_uu(limits.maxImageArrayLayers, INT_MAX);
723 GCaps.max_textures =
min_uu(limits.maxDescriptorSetSampledImages, INT_MAX);
725 limits.maxPerStageDescriptorSampledImages, INT_MAX);
726 GCaps.max_samplers =
min_uu(limits.maxSamplerAllocationCount, INT_MAX);
727 GCaps.max_images =
min_uu(limits.maxPerStageDescriptorStorageImages, INT_MAX);
728 for (
int i = 0;
i < 3;
i++) {
729 GCaps.max_work_group_count[
i] =
min_uu(limits.maxComputeWorkGroupCount[
i], INT_MAX);
730 GCaps.max_work_group_size[
i] =
min_uu(limits.maxComputeWorkGroupSize[
i], INT_MAX);
733 limits.maxPerStageDescriptorUniformBuffers, INT_MAX);
734 GCaps.max_batch_indices =
min_uu(limits.maxDrawIndirectCount, INT_MAX);
735 GCaps.max_batch_vertices =
min_uu(limits.maxDrawIndexedIndexValue, INT_MAX);
736 GCaps.max_vertex_attribs =
min_uu(limits.maxVertexInputAttributes, INT_MAX);
737 GCaps.max_varying_floats =
min_uu(limits.maxVertexOutputComponents, INT_MAX);
738 GCaps.max_shader_storage_buffer_bindings =
GCaps.max_compute_shader_storage_blocks =
min_uu(
739 limits.maxPerStageDescriptorStorageBuffers, INT_MAX);
740 GCaps.max_uniform_buffer_size = size_t(limits.maxUniformBufferRange);
741 GCaps.max_storage_buffer_size = size_t(limits.maxStorageBufferRange);
742 GCaps.storage_buffer_alignment = limits.minStorageBufferOffsetAlignment;
745 GCaps.mem_stats_support =
true;
747 uint32_t vk_extension_count;
748 vkEnumerateDeviceExtensionProperties(
749 device.physical_device_get(),
nullptr, &vk_extension_count,
nullptr);
750 GCaps.extensions_len = vk_extension_count;
753 detect_workarounds(
device);
@ G_DEBUG_GPU_FORCE_WORKAROUNDS
@ G_DEBUG_GPU_FORCE_VULKAN_LOCAL_READ
#define BLI_assert_msg(a, msg)
MINLINE uint min_uu(uint a, uint b)
MINLINE int max_ii(int a, int b)
void BLI_setenv(const char *env, const char *val) ATTR_NONNULL(1)
int BLI_system_thread_count(void)
int BLI_thread_is_main(void)
#define CLOG_ERROR(clg_ref,...)
#define CLOG_DEBUG(clg_ref,...)
#define CLOG_WARN(clg_ref,...)
void CLG_logref_init(CLG_LogRef *clg_ref)
#define CLOG_INFO(clg_ref,...)
GHOST C-API function and type declarations.
GHOST_ContextHandle GHOST_GetDrawingContext(GHOST_WindowHandle windowhandle)
int GPU_max_parallel_compilations()
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
bool contains(const Key &key) const
static constexpr int64_t not_found
constexpr int64_t find(char c, int64_t pos=0) const
void append(const T &value)
ShaderCompiler * compiler_
void render_step(bool) override
Context * context_alloc(void *ghost_window, void *ghost_context) override
Batch * batch_alloc() override
StorageBuf * storagebuf_alloc(size_t size, GPUUsageType usage, const char *name) override
static void capabilities_init(VKDevice &device)
IndexBuf * indexbuf_alloc() override
static void platform_init(const VKDevice &device)
QueryPool * querypool_alloc() override
static bool is_supported()
UniformBuf * uniformbuf_alloc(size_t size, const char *name) override
Texture * texture_alloc(const char *name) override
void samplers_update() override
PixelBuffer * pixelbuf_alloc(size_t size) override
void render_begin() override
Shader * shader_alloc(const char *name) override
void render_end() override
FrameBuffer * framebuffer_alloc(const char *name) override
void compute_dispatch(int groups_x_len, int groups_y_len, int groups_z_len) override
void compute_dispatch_indirect(StorageBuf *indirect_buf) override
void delete_resources() override
void init_resources() override
VertBuf * vertbuf_alloc() override
Fence * fence_alloc() override
static void openxr_acquire_framebuffer_image_callback(GHOST_VulkanOpenXRData *data)
static void swap_buffer_acquired_callback()
static void openxr_release_framebuffer_image_callback(GHOST_VulkanOpenXRData *data)
static void swap_buffer_draw_callback(const GHOST_VulkanSwapChainData *data)
const char * extension_name_get(int index) const
const VkPhysicalDeviceFeatures & physical_device_features_get() const
bool supports_extension(const char *extension_name) const
const VkPhysicalDeviceProperties & physical_device_properties_get() const
std::string vendor_name() const
std::string driver_version() const
VkPhysicalDevice physical_device_get() const
const VkPhysicalDeviceVulkan12Features & physical_device_vulkan_12_features_get() const
VkBuffer vk_handle() const
VKDispatchCreateInfo CreateInfo
static Context * unwrap(GPUContext *ctx)
bool GPU_vulkan_is_supported_driver(VkPhysicalDevice vk_physical_device)
static GPUOSType determine_os_type()
static void init_device_list(GHOST_ContextHandle ghost_context)
static const char * vk_extension_get(int index)
static Vector< StringRefNull > missing_capabilities_get(VkPhysicalDevice vk_physical_device)
bool dynamic_rendering_unused_attachments
bool fragment_shader_barycentric
bool pageable_device_local_memory
bool dynamic_rendering_local_read
bool shader_output_viewport_index
bool not_aligned_pixel_formats
struct blender::gpu::VKWorkarounds::@332200143060210304170234105102153121344114125320 vertex_formats
VKDispatchData dispatch_node
VKPipelineData pipeline_data
VKDispatchIndirectData dispatch_indirect_node
VKPipelineData pipeline_data