51 VkPhysicalDeviceProperties2 vk_physical_device_properties = {};
52 VkPhysicalDeviceDriverProperties vk_physical_device_driver_properties = {};
53 vk_physical_device_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
54 vk_physical_device_driver_properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
55 vk_physical_device_properties.pNext = &vk_physical_device_driver_properties;
56 vkGetPhysicalDeviceProperties2(vk_physical_device, &vk_physical_device_properties);
57 uint32_t conformance_version = VK_MAKE_API_VERSION(
58 vk_physical_device_driver_properties.conformanceVersion.major,
59 vk_physical_device_driver_properties.conformanceVersion.minor,
60 vk_physical_device_driver_properties.conformanceVersion.subminor,
61 vk_physical_device_driver_properties.conformanceVersion.patch);
73 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS &&
74 vk_physical_device_properties.properties.deviceType ==
75 VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU &&
76 conformance_version < VK_MAKE_API_VERSION(1, 3, 2, 0))
87 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY &&
88 conformance_version < VK_MAKE_API_VERSION(1, 3, 7, 2))
97 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY &&
98 ((
StringRefNull(vk_physical_device_driver_properties.driverInfo).
find(
"580.76.5", 0) !=
100 (
StringRefNull(vk_physical_device_driver_properties.driverInfo).
find(
"580.76.05", 0) !=
108 if (vk_physical_device_driver_properties.driverID == VK_DRIVER_ID_QUALCOMM_PROPRIETARY) {
115 const uint32_t driver_version = vk_physical_device_properties.properties.driverVersion;
116 constexpr uint32_t version_31_0_112 = VK_MAKE_VERSION(512, 827, 0);
117 if (driver_version < version_31_0_112) {
119 "Detected qualcomm driver is not supported. To run the Vulkan backend "
120 "driver 31.0.112.0 or later is required. Switching to OpenGL.");
133 VkPhysicalDeviceVulkan12Features features_12 = {
134 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES};
135 VkPhysicalDeviceFeatures2 features = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
138 vkGetPhysicalDeviceFeatures2(vk_physical_device, &features);
141 if (features.features.geometryShader == VK_FALSE) {
142 missing_capabilities.
append(
"geometry shaders");
144 if (features.features.logicOp == VK_FALSE) {
145 missing_capabilities.
append(
"logical operations");
148 if (features.features.dualSrcBlend == VK_FALSE) {
149 missing_capabilities.
append(
"dual source blending");
151 if (features.features.imageCubeArray == VK_FALSE) {
152 missing_capabilities.
append(
"image cube array");
154 if (features.features.multiDrawIndirect == VK_FALSE) {
155 missing_capabilities.
append(
"multi draw indirect");
157 if (features.features.multiViewport == VK_FALSE) {
158 missing_capabilities.
append(
"multi viewport");
160 if (features.features.shaderClipDistance == VK_FALSE) {
161 missing_capabilities.
append(
"shader clip distance");
163 if (features.features.drawIndirectFirstInstance == VK_FALSE) {
164 missing_capabilities.
append(
"draw indirect first instance");
166 if (features.features.fragmentStoresAndAtomics == VK_FALSE) {
167 missing_capabilities.
append(
"fragment stores and atomics");
169 if (features_12.timelineSemaphore == VK_FALSE) {
170 missing_capabilities.
append(
"timeline semaphores");
172 if (features_12.bufferDeviceAddress == VK_FALSE) {
173 missing_capabilities.
append(
"buffer device address");
177 uint32_t vk_extension_count;
178 vkEnumerateDeviceExtensionProperties(vk_physical_device,
nullptr, &vk_extension_count,
nullptr);
181 vkEnumerateDeviceExtensionProperties(
182 vk_physical_device,
nullptr, &vk_extension_count, vk_extensions.
data());
184 for (VkExtensionProperties &vk_extension : vk_extensions) {
185 extensions.
add(vk_extension.extensionName);
188 if (!extensions.
contains(VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
189 missing_capabilities.
append(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
193 if (!extensions.
contains(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME)) {
194 missing_capabilities.
append(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
198 return missing_capabilities;
211 std::stringstream allowed_layers;
212 allowed_layers <<
"VK_LAYER_KHRONOS_*";
213 allowed_layers <<
",VK_LAYER_AMD_*";
214 allowed_layers <<
",VK_LAYER_INTEL_*";
215 allowed_layers <<
",VK_LAYER_NV_*";
216 allowed_layers <<
",VK_LAYER_MESA_*";
218 allowed_layers <<
",VK_LAYER_LUNARG_*";
219 allowed_layers <<
",VK_LAYER_RENDERDOC_*";
221 BLI_setenv(
"VK_LOADER_LAYERS_DISABLE",
"~implicit~");
222 BLI_setenv(
"VK_LOADER_LAYERS_ALLOW", allowed_layers.str().c_str());
225 VkApplicationInfo vk_application_info = {VK_STRUCTURE_TYPE_APPLICATION_INFO};
226 vk_application_info.pApplicationName =
"Blender";
227 vk_application_info.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
228 vk_application_info.pEngineName =
"Blender";
229 vk_application_info.engineVersion = VK_MAKE_VERSION(1, 0, 0);
230 vk_application_info.apiVersion = VK_API_VERSION_1_2;
232 VkInstanceCreateInfo vk_instance_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO};
233 vk_instance_info.pApplicationInfo = &vk_application_info;
235 VkInstance vk_instance = VK_NULL_HANDLE;
236 vkCreateInstance(&vk_instance_info,
nullptr, &vk_instance);
237 if (vk_instance == VK_NULL_HANDLE) {
238 CLOG_ERROR(&
LOG,
"Unable to initialize a Vulkan 1.2 instance.");
243 uint32_t physical_devices_count = 0;
244 vkEnumeratePhysicalDevices(vk_instance, &physical_devices_count,
nullptr);
246 vkEnumeratePhysicalDevices(vk_instance, &physical_devices_count, vk_physical_devices.
data());
248 for (VkPhysicalDevice vk_physical_device : vk_physical_devices) {
249 VkPhysicalDeviceProperties vk_properties = {};
250 vkGetPhysicalDeviceProperties(vk_physical_device, &vk_properties);
254 "Installed driver for device [%s] has known issues and will not be used. Updating "
255 "driver might improve compatibility.",
256 vk_properties.deviceName);
262 if (missing_capabilities.
is_empty()) {
266 "Device [%s] supports minimum requirements. Skip checking other GPUs. Another GPU "
267 "can still be selected during auto-detection.",
268 vk_properties.deviceName);
270 vkDestroyInstance(vk_instance,
nullptr);
274 std::stringstream ss;
275 ss <<
"Device [" << vk_properties.deviceName
276 <<
"] does not meet minimum requirements. Missing features are [";
278 ss << feature <<
", ";
280 ss.seekp(-2, std::ios_base::end);
287 vkDestroyInstance(vk_instance,
nullptr);
289 "No Vulkan device found that meets the minimum requirements. "
290 "Updating GPU driver can improve compatibility.");
298#elif defined(__APPLE__)
320 GHOST_VulkanHandles vulkan_handles = {};
321 GHOST_GetVulkanHandles(ghost_context, &vulkan_handles);
323 uint32_t physical_devices_count = 0;
324 vkEnumeratePhysicalDevices(vulkan_handles.instance, &physical_devices_count,
nullptr);
326 vkEnumeratePhysicalDevices(
327 vulkan_handles.instance, &physical_devices_count, vk_physical_devices.
data());
329 for (VkPhysicalDevice vk_physical_device : vk_physical_devices) {
333 VkPhysicalDeviceProperties vk_properties = {};
334 vkGetPhysicalDeviceProperties(vk_physical_device, &vk_properties);
335 std::stringstream identifier;
336 identifier << std::hex << vk_properties.vendorID <<
"/" << vk_properties.deviceID <<
"/"
338 GPG.devices.append({identifier.str(),
340 vk_properties.vendorID,
341 vk_properties.deviceID,
342 std::string(vk_properties.deviceName)});
348 if (a.name == b.name) {
349 return a.index < b.index;
351 return a.
name <
b.name;
357 const VkPhysicalDeviceProperties &properties =
device.physical_device_properties_get();
364 std::string vendor_name =
device.vendor_name();
365 std::string driver_version =
device.driver_version();
371 GPG.init(device_type,
377 properties.deviceName,
378 driver_version.c_str(),
380 GPG.devices = devices;
382 const VkPhysicalDeviceIDProperties &id_properties =
device.physical_device_id_properties_get();
386 if (id_properties.deviceLUIDValid) {
388 GPG.device_luid_node_mask = id_properties.deviceNodeMask;
391 GPG.device_luid.reinitialize(0);
392 GPG.device_luid_node_mask = 0;
397 "Using vendor [%s] device [%s] driver version [%s].",
399 device.vk_physical_device_properties_.deviceName,
400 driver_version.c_str());
403void VKBackend::detect_workarounds(
VKDevice &device)
410 printf(
"VK: Forcing workaround usage and disabling features and extensions.\n");
426 GCaps.render_pass_workaround =
true;
428 device.workarounds_ = workarounds;
429 device.extensions_ = extensions;
438 VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME);
440 VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME);
442 VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME);
444 VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME);
449 VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME);
453 VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME);
456 VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
457#elif not defined(__APPLE__)
468 if (device.vk_physical_device_driver_properties_.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY) {
480 if (device.vk_physical_device_driver_properties_.driverID ==
481 VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS)
509 VkFormatProperties format_properties = {};
510 vkGetPhysicalDeviceFormatProperties(
513 VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) == 0;
526 device.workarounds_ = workarounds;
527 device.extensions_ = extensions;
530void VKBackend::platform_exit()
533 VKDevice &device = VKBackend::get().device;
534 if (device.is_initialized()) {
553 if (
device.is_initialized()) {
567 context.render_graph().add_node(dispatch_info);
580 context.render_graph().add_node(dispatch_indirect_info);
591 if (!
device.is_initialized()) {
592 device.init(ghost_context);
593 device.extensions_get().log();
598 device.context_register(*context);
599 GHOST_SetVulkanSwapBuffersCallbacks((GHOST_ContextHandle)ghost_context,
678 if (context !=
nullptr) {
681 std::scoped_lock
lock(
device.orphaned_data.mutex_get());
682 device.orphaned_data.move_data(
device.orphaned_data_render,
683 device.orphaned_data.timeline_ + 1);
691 std::scoped_lock
lock(
device.orphaned_data.mutex_get());
692 device.orphaned_data.move_data(
device.orphaned_data_render,
693 device.orphaned_data.timeline_ + 1);
699 if (force_resource_release) {
700 std::scoped_lock
lock(
device.orphaned_data.mutex_get());
701 device.orphaned_data.move_data(
device.orphaned_data_render,
702 device.orphaned_data.timeline_ + 1);
708 const VkPhysicalDeviceProperties &properties =
device.physical_device_properties_get();
709 const VkPhysicalDeviceLimits &limits = properties.limits;
713 GCaps.geometry_shader_support =
true;
714 GCaps.clip_control_support =
true;
715 GCaps.stencil_export_support =
device.supports_extension(
716 VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
717 GCaps.shader_draw_parameters_support =
718 device.physical_device_vulkan_11_features_get().shaderDrawParameters;
720 GCaps.max_texture_size =
max_ii(limits.maxImageDimension1D, limits.maxImageDimension2D);
721 GCaps.max_texture_3d_size =
min_uu(limits.maxImageDimension3D, INT_MAX);
722 GCaps.max_texture_layers =
min_uu(limits.maxImageArrayLayers, INT_MAX);
723 GCaps.max_textures =
min_uu(limits.maxDescriptorSetSampledImages, INT_MAX);
725 limits.maxPerStageDescriptorSampledImages, INT_MAX);
726 GCaps.max_samplers =
min_uu(limits.maxSamplerAllocationCount, INT_MAX);
727 GCaps.max_images =
min_uu(limits.maxPerStageDescriptorStorageImages, INT_MAX);
728 for (
int i = 0;
i < 3;
i++) {
729 GCaps.max_work_group_count[
i] =
min_uu(limits.maxComputeWorkGroupCount[
i], INT_MAX);
730 GCaps.max_work_group_size[
i] =
min_uu(limits.maxComputeWorkGroupSize[
i], INT_MAX);
733 limits.maxPerStageDescriptorUniformBuffers, INT_MAX);
734 GCaps.max_batch_indices =
min_uu(limits.maxDrawIndirectCount, INT_MAX);
735 GCaps.max_batch_vertices =
min_uu(limits.maxDrawIndexedIndexValue, INT_MAX);
736 GCaps.max_vertex_attribs =
min_uu(limits.maxVertexInputAttributes, INT_MAX);
737 GCaps.max_varying_floats =
min_uu(limits.maxVertexOutputComponents, INT_MAX);
738 GCaps.max_shader_storage_buffer_bindings =
GCaps.max_compute_shader_storage_blocks =
min_uu(
739 limits.maxPerStageDescriptorStorageBuffers, INT_MAX);
740 GCaps.max_storage_buffer_size = size_t(limits.maxStorageBufferRange);
741 GCaps.storage_buffer_alignment = limits.minStorageBufferOffsetAlignment;
744 GCaps.mem_stats_support =
true;
746 uint32_t vk_extension_count;
747 vkEnumerateDeviceExtensionProperties(
748 device.physical_device_get(),
nullptr, &vk_extension_count,
nullptr);
749 GCaps.extensions_len = vk_extension_count;
752 detect_workarounds(
device);
@ G_DEBUG_GPU_FORCE_WORKAROUNDS
@ G_DEBUG_GPU_FORCE_VULKAN_LOCAL_READ
#define BLI_assert_msg(a, msg)
MINLINE uint min_uu(uint a, uint b)
MINLINE int max_ii(int a, int b)
void BLI_setenv(const char *env, const char *val) ATTR_NONNULL(1)
int BLI_system_thread_count(void)
int BLI_thread_is_main(void)
#define CLOG_ERROR(clg_ref,...)
#define CLOG_WARN(clg_ref,...)
void CLG_logref_init(CLG_LogRef *clg_ref)
#define CLOG_INFO(clg_ref, level,...)
GHOST C-API function and type declarations.
GHOST_ContextHandle GHOST_GetDrawingContext(GHOST_WindowHandle windowhandle)
int GPU_max_parallel_compilations()
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
bool contains(const Key &key) const
static constexpr int64_t not_found
constexpr int64_t find(char c, int64_t pos=0) const
void append(const T &value)
ShaderCompiler * compiler_
void render_step(bool) override
Context * context_alloc(void *ghost_window, void *ghost_context) override
Batch * batch_alloc() override
StorageBuf * storagebuf_alloc(size_t size, GPUUsageType usage, const char *name) override
static void capabilities_init(VKDevice &device)
IndexBuf * indexbuf_alloc() override
static void platform_init(const VKDevice &device)
QueryPool * querypool_alloc() override
static bool is_supported()
UniformBuf * uniformbuf_alloc(size_t size, const char *name) override
Texture * texture_alloc(const char *name) override
void samplers_update() override
PixelBuffer * pixelbuf_alloc(size_t size) override
void render_begin() override
Shader * shader_alloc(const char *name) override
void render_end() override
FrameBuffer * framebuffer_alloc(const char *name) override
void compute_dispatch(int groups_x_len, int groups_y_len, int groups_z_len) override
void compute_dispatch_indirect(StorageBuf *indirect_buf) override
void delete_resources() override
void init_resources() override
VertBuf * vertbuf_alloc() override
Fence * fence_alloc() override
static void openxr_acquire_framebuffer_image_callback(GHOST_VulkanOpenXRData *data)
static void openxr_release_framebuffer_image_callback(GHOST_VulkanOpenXRData *data)
static void swap_buffers_post_callback()
static void swap_buffers_pre_callback(const GHOST_VulkanSwapChainData *data)
const char * extension_name_get(int index) const
const VkPhysicalDeviceFeatures & physical_device_features_get() const
bool supports_extension(const char *extension_name) const
const VkPhysicalDeviceProperties & physical_device_properties_get() const
std::string vendor_name() const
std::string driver_version() const
VkPhysicalDevice physical_device_get() const
const VkPhysicalDeviceVulkan12Features & physical_device_vulkan_12_features_get() const
VkBuffer vk_handle() const
VKDispatchCreateInfo CreateInfo
static Context * unwrap(GPUContext *ctx)
static eGPUOSType determine_os_type()
bool GPU_vulkan_is_supported_driver(VkPhysicalDevice vk_physical_device)
static void init_device_list(GHOST_ContextHandle ghost_context)
static const char * vk_extension_get(int index)
static Vector< StringRefNull > missing_capabilities_get(VkPhysicalDevice vk_physical_device)
bool render_pass_workaround
bool dynamic_rendering_unused_attachments
bool fragment_shader_barycentric
bool pageable_device_local_memory
bool dynamic_rendering_local_read
bool shader_output_viewport_index
bool not_aligned_pixel_formats
struct blender::gpu::VKWorkarounds::@250247362134102263364163314040144106045031265005 vertex_formats
VKDispatchData dispatch_node
VKPipelineData pipeline_data
VKDispatchIndirectData dispatch_indirect_node
VKPipelineData pipeline_data