1 #define __STDC_LIMIT_MACROS
2
3 #include <cstdlib>
4 #include <cstdint>
5 #include <cstring>
6 #include <iostream>
7
8 #include "Common/System/Display.h"
9 #include "Common/Log.h"
10 #include "Common/GPU/Vulkan/VulkanContext.h"
11 #include "Common/GPU/Vulkan/VulkanDebug.h"
12 #include "GPU/Common/ShaderCommon.h"
13 #include "Common/StringUtils.h"
14 #include "Core/Config.h"
15
16 // Change this to 1, 2, and 3 to fake failures in a few places, so that
17 // we can test our fallback-to-GL code.
18 #define SIMULATE_VULKAN_FAILURE 0
19
20 #ifdef USE_CRT_DBG
21 #undef new
22 #endif
23
24 #include "ext/glslang/SPIRV/GlslangToSpv.h"
25
26 #ifdef USE_CRT_DBG
27 #define new DBG_NEW
28 #endif
29
30 using namespace PPSSPP_VK;
31
32 VulkanLogOptions g_LogOptions;
33
34 static const char *validationLayers[] = {
35 "VK_LAYER_KHRONOS_validation",
36 /*
37 // For layers included in the Android NDK.
38 "VK_LAYER_GOOGLE_threading",
39 "VK_LAYER_LUNARG_parameter_validation",
40 "VK_LAYER_LUNARG_core_validation",
41 "VK_LAYER_LUNARG_image",
42 "VK_LAYER_LUNARG_object_tracker",
43 "VK_LAYER_LUNARG_swapchain",
44 "VK_LAYER_GOOGLE_unique_objects",
45 */
46 };
47
VulkanVendorString(uint32_t vendorId)48 std::string VulkanVendorString(uint32_t vendorId) {
49 switch (vendorId) {
50 case VULKAN_VENDOR_INTEL: return "Intel";
51 case VULKAN_VENDOR_NVIDIA: return "NVIDIA";
52 case VULKAN_VENDOR_AMD: return "AMD";
53 case VULKAN_VENDOR_ARM: return "ARM";
54 case VULKAN_VENDOR_QUALCOMM: return "Qualcomm";
55 case VULKAN_VENDOR_IMGTEC: return "Imagination";
56
57 default:
58 return StringFromFormat("%08x", vendorId);
59 }
60 }
61
PresentModeString(VkPresentModeKHR presentMode)62 const char *PresentModeString(VkPresentModeKHR presentMode) {
63 switch (presentMode) {
64 case VK_PRESENT_MODE_IMMEDIATE_KHR: return "IMMEDIATE";
65 case VK_PRESENT_MODE_MAILBOX_KHR: return "MAILBOX";
66 case VK_PRESENT_MODE_FIFO_KHR: return "FIFO";
67 case VK_PRESENT_MODE_FIFO_RELAXED_KHR: return "FIFO_RELAXED";
68 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: return "SHARED_DEMAND_REFRESH_KHR";
69 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: return "SHARED_CONTINUOUS_REFRESH_KHR";
70 default: return "UNKNOWN";
71 }
72 }
73
VulkanContext()74 VulkanContext::VulkanContext() {
75 // Do nothing here.
76 }
77
CreateInstance(const CreateInfo & info)78 VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
79 if (!vkCreateInstance) {
80 init_error_ = "Vulkan not loaded - can't create instance";
81 return VK_ERROR_INITIALIZATION_FAILED;
82 }
83
84 instance_layer_names_.clear();
85 device_layer_names_.clear();
86
87 // We can get the list of layers and extensions without an instance so we can use this information
88 // to enable the extensions we need that are available.
89 GetInstanceLayerProperties();
90 GetInstanceLayerExtensionList(nullptr, instance_extension_properties_);
91
92 if (!IsInstanceExtensionAvailable(VK_KHR_SURFACE_EXTENSION_NAME)) {
93 // Cannot create a Vulkan display without VK_KHR_SURFACE_EXTENSION.
94 init_error_ = "Vulkan not loaded - no surface extension";
95 return VK_ERROR_INITIALIZATION_FAILED;
96 }
97 flags_ = info.flags;
98
99 // List extensions to try to enable.
100 instance_extensions_enabled_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
101 #ifdef _WIN32
102 instance_extensions_enabled_.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
103 #elif defined(__ANDROID__)
104 instance_extensions_enabled_.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
105 #else
106 #if defined(VK_USE_PLATFORM_XLIB_KHR)
107 if (IsInstanceExtensionAvailable(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
108 instance_extensions_enabled_.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
109 }
110 #endif
111 //#if defined(VK_USE_PLATFORM_XCB_KHR)
112 // instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
113 //#endif
114 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
115 if (IsInstanceExtensionAvailable(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME)) {
116 instance_extensions_enabled_.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
117 }
118 #endif
119 #if defined(VK_USE_PLATFORM_DISPLAY_KHR)
120 if (IsInstanceExtensionAvailable(VK_KHR_DISPLAY_EXTENSION_NAME)) {
121 instance_extensions_enabled_.push_back(VK_KHR_DISPLAY_EXTENSION_NAME);
122 }
123 #endif
124 #if defined(VK_USE_PLATFORM_METAL_EXT)
125 if (IsInstanceExtensionAvailable(VK_EXT_METAL_SURFACE_EXTENSION_NAME)) {
126 instance_extensions_enabled_.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME);
127 }
128 #endif
129 #endif
130
131 if (flags_ & VULKAN_FLAG_VALIDATE) {
132 if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
133 // Enable the validation layers
134 for (size_t i = 0; i < ARRAY_SIZE(validationLayers); i++) {
135 instance_layer_names_.push_back(validationLayers[i]);
136 device_layer_names_.push_back(validationLayers[i]);
137 }
138 instance_extensions_enabled_.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
139 extensionsLookup_.EXT_debug_utils = true;
140 INFO_LOG(G3D, "Vulkan debug_utils validation enabled.");
141 } else {
142 ERROR_LOG(G3D, "Validation layer extension not available - not enabling Vulkan validation.");
143 flags_ &= ~VULKAN_FLAG_VALIDATE;
144 }
145 }
146
147 // Temporary hack for libretro. For some reason, when we try to load the functions from this extension,
148 // we get null pointers when running libretro. Quite strange.
149 #if !defined(__LIBRETRO__)
150 if (IsInstanceExtensionAvailable(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
151 instance_extensions_enabled_.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
152 extensionsLookup_.KHR_get_physical_device_properties2 = true;
153 }
154 #endif
155
156 // Validate that all the instance extensions we ask for are actually available.
157 for (auto ext : instance_extensions_enabled_) {
158 if (!IsInstanceExtensionAvailable(ext))
159 WARN_LOG(G3D, "WARNING: Does not seem that instance extension '%s' is available. Trying to proceed anyway.", ext);
160 }
161
162 VkApplicationInfo app_info{ VK_STRUCTURE_TYPE_APPLICATION_INFO };
163 app_info.pApplicationName = info.app_name;
164 app_info.applicationVersion = info.app_ver;
165 app_info.pEngineName = info.app_name;
166 // Let's increment this when we make major engine/context changes.
167 app_info.engineVersion = 2;
168 app_info.apiVersion = VK_API_VERSION_1_0;
169
170 VkInstanceCreateInfo inst_info{ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
171 inst_info.flags = 0;
172 inst_info.pApplicationInfo = &app_info;
173 inst_info.enabledLayerCount = (uint32_t)instance_layer_names_.size();
174 inst_info.ppEnabledLayerNames = instance_layer_names_.size() ? instance_layer_names_.data() : nullptr;
175 inst_info.enabledExtensionCount = (uint32_t)instance_extensions_enabled_.size();
176 inst_info.ppEnabledExtensionNames = instance_extensions_enabled_.size() ? instance_extensions_enabled_.data() : nullptr;
177
178 #if SIMULATE_VULKAN_FAILURE == 2
179 VkResult res = VK_ERROR_INCOMPATIBLE_DRIVER;
180 #else
181 VkResult res = vkCreateInstance(&inst_info, nullptr, &instance_);
182 #endif
183 if (res != VK_SUCCESS) {
184 if (res == VK_ERROR_LAYER_NOT_PRESENT) {
185 WARN_LOG(G3D, "Validation on but instance layer not available - dropping layers");
186 // Drop the validation layers and try again.
187 instance_layer_names_.clear();
188 device_layer_names_.clear();
189 inst_info.enabledLayerCount = 0;
190 inst_info.ppEnabledLayerNames = nullptr;
191 res = vkCreateInstance(&inst_info, nullptr, &instance_);
192 if (res != VK_SUCCESS)
193 ERROR_LOG(G3D, "Failed to create instance even without validation: %d", res);
194 } else {
195 ERROR_LOG(G3D, "Failed to create instance : %d", res);
196 }
197 }
198 if (res != VK_SUCCESS) {
199 init_error_ = "Failed to create Vulkan instance";
200 return res;
201 }
202
203 VulkanLoadInstanceFunctions(instance_, extensionsLookup_);
204 if (!CheckLayers(instance_layer_properties_, instance_layer_names_)) {
205 WARN_LOG(G3D, "CheckLayers for instance failed");
206 // init_error_ = "Failed to validate instance layers";
207 // return;
208 }
209
210 uint32_t gpu_count = 1;
211 #if SIMULATE_VULKAN_FAILURE == 3
212 gpu_count = 0;
213 #else
214 res = vkEnumeratePhysicalDevices(instance_, &gpu_count, nullptr);
215 #endif
216 if (gpu_count <= 0) {
217 ERROR_LOG(G3D, "Vulkan driver found but no supported GPU is available");
218 init_error_ = "No Vulkan physical devices found";
219 vkDestroyInstance(instance_, nullptr);
220 instance_ = nullptr;
221 return VK_ERROR_INITIALIZATION_FAILED;
222 }
223
224 _dbg_assert_(gpu_count > 0);
225 physical_devices_.resize(gpu_count);
226 physicalDeviceProperties_.resize(gpu_count);
227 res = vkEnumeratePhysicalDevices(instance_, &gpu_count, physical_devices_.data());
228 if (res != VK_SUCCESS) {
229 init_error_ = "Failed to enumerate physical devices";
230 vkDestroyInstance(instance_, nullptr);
231 instance_ = nullptr;
232 return res;
233 }
234
235 if (extensionsLookup_.KHR_get_physical_device_properties2) {
236 for (uint32_t i = 0; i < gpu_count; i++) {
237 VkPhysicalDeviceProperties2 props2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2};
238 VkPhysicalDevicePushDescriptorPropertiesKHR pushProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR};
239 VkPhysicalDeviceExternalMemoryHostPropertiesEXT extHostMemProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT};
240 props2.pNext = &pushProps;
241 pushProps.pNext = &extHostMemProps;
242 vkGetPhysicalDeviceProperties2KHR(physical_devices_[i], &props2);
243 // Don't want bad pointers sitting around.
244 props2.pNext = nullptr;
245 pushProps.pNext = nullptr;
246 physicalDeviceProperties_[i].properties = props2.properties;
247 physicalDeviceProperties_[i].pushDescriptorProperties = pushProps;
248 physicalDeviceProperties_[i].externalMemoryHostProperties = extHostMemProps;
249 }
250 } else {
251 for (uint32_t i = 0; i < gpu_count; i++) {
252 vkGetPhysicalDeviceProperties(physical_devices_[i], &physicalDeviceProperties_[i].properties);
253 }
254 }
255
256 if (extensionsLookup_.EXT_debug_utils) {
257 InitDebugUtilsCallback();
258 }
259
260 return VK_SUCCESS;
261 }
262
~VulkanContext()263 VulkanContext::~VulkanContext() {
264 _dbg_assert_(instance_ == VK_NULL_HANDLE);
265 }
266
DestroyInstance()267 void VulkanContext::DestroyInstance() {
268 if (extensionsLookup_.EXT_debug_utils) {
269 while (utils_callbacks.size() > 0) {
270 vkDestroyDebugUtilsMessengerEXT(instance_, utils_callbacks.back(), nullptr);
271 utils_callbacks.pop_back();
272 }
273 }
274
275 vkDestroyInstance(instance_, nullptr);
276 VulkanFree();
277 instance_ = VK_NULL_HANDLE;
278 }
279
BeginFrame()280 void VulkanContext::BeginFrame() {
281 FrameData *frame = &frame_[curFrame_];
282 // Process pending deletes.
283 frame->deleteList.PerformDeletes(device_);
284 }
285
EndFrame()286 void VulkanContext::EndFrame() {
287 frame_[curFrame_].deleteList.Take(globalDeleteList_);
288 curFrame_++;
289 if (curFrame_ >= inflightFrames_) {
290 curFrame_ = 0;
291 }
292 }
293
UpdateInflightFrames(int n)294 void VulkanContext::UpdateInflightFrames(int n) {
295 _dbg_assert_(n >= 1 && n <= MAX_INFLIGHT_FRAMES);
296 inflightFrames_ = n;
297 if (curFrame_ >= inflightFrames_) {
298 curFrame_ = 0;
299 }
300 }
301
WaitUntilQueueIdle()302 void VulkanContext::WaitUntilQueueIdle() {
303 // Should almost never be used
304 vkQueueWaitIdle(gfx_queue_);
305 }
306
MemoryTypeFromProperties(uint32_t typeBits,VkFlags requirements_mask,uint32_t * typeIndex)307 bool VulkanContext::MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
308 // Search memtypes to find first index with those properties
309 for (uint32_t i = 0; i < 32; i++) {
310 if ((typeBits & 1) == 1) {
311 // Type is available, does it match user properties?
312 if ((memory_properties.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
313 *typeIndex = i;
314 return true;
315 }
316 }
317 typeBits >>= 1;
318 }
319 // No memory types matched, return failure
320 return false;
321 }
322
DestroySwapchain()323 void VulkanContext::DestroySwapchain() {
324 if (swapchain_ != VK_NULL_HANDLE) {
325 vkDestroySwapchainKHR(device_, swapchain_, nullptr);
326 swapchain_ = VK_NULL_HANDLE;
327 }
328 }
329
DestroySurface()330 void VulkanContext::DestroySurface() {
331 if (surface_ != VK_NULL_HANDLE) {
332 vkDestroySurfaceKHR(instance_, surface_, nullptr);
333 surface_ = VK_NULL_HANDLE;
334 }
335 }
336
GetInstanceLayerExtensionList(const char * layerName,std::vector<VkExtensionProperties> & extensions)337 VkResult VulkanContext::GetInstanceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
338 VkResult res;
339 do {
340 uint32_t instance_extension_count;
341 res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, nullptr);
342 if (res != VK_SUCCESS)
343 return res;
344 if (instance_extension_count == 0)
345 return VK_SUCCESS;
346 extensions.resize(instance_extension_count);
347 res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, extensions.data());
348 } while (res == VK_INCOMPLETE);
349 return res;
350 }
351
GetInstanceLayerProperties()352 VkResult VulkanContext::GetInstanceLayerProperties() {
353 /*
354 * It's possible, though very rare, that the number of
355 * instance layers could change. For example, installing something
356 * could include new layers that the loader would pick up
357 * between the initial query for the count and the
358 * request for VkLayerProperties. The loader indicates that
359 * by returning a VK_INCOMPLETE status and will update the
360 * the count parameter.
361 * The count parameter will be updated with the number of
362 * entries loaded into the data pointer - in case the number
363 * of layers went down or is smaller than the size given.
364 */
365 uint32_t instance_layer_count;
366 std::vector<VkLayerProperties> vk_props;
367 VkResult res;
368 do {
369 res = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
370 if (res != VK_SUCCESS)
371 return res;
372 if (!instance_layer_count)
373 return VK_SUCCESS;
374 vk_props.resize(instance_layer_count);
375 res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props.data());
376 } while (res == VK_INCOMPLETE);
377
378 // Now gather the extension list for each instance layer.
379 for (uint32_t i = 0; i < instance_layer_count; i++) {
380 LayerProperties layer_props;
381 layer_props.properties = vk_props[i];
382 res = GetInstanceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
383 if (res != VK_SUCCESS)
384 return res;
385 instance_layer_properties_.push_back(layer_props);
386 }
387 return res;
388 }
389
390 // Pass layerName == nullptr to get the extension list for the device.
GetDeviceLayerExtensionList(const char * layerName,std::vector<VkExtensionProperties> & extensions)391 VkResult VulkanContext::GetDeviceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
392 VkResult res;
393 do {
394 uint32_t device_extension_count;
395 res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, nullptr);
396 if (res != VK_SUCCESS)
397 return res;
398 if (!device_extension_count)
399 return VK_SUCCESS;
400 extensions.resize(device_extension_count);
401 res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, extensions.data());
402 } while (res == VK_INCOMPLETE);
403 return res;
404 }
405
GetDeviceLayerProperties()406 VkResult VulkanContext::GetDeviceLayerProperties() {
407 /*
408 * It's possible, though very rare, that the number of
409 * instance layers could change. For example, installing something
410 * could include new layers that the loader would pick up
411 * between the initial query for the count and the
412 * request for VkLayerProperties. The loader indicates that
413 * by returning a VK_INCOMPLETE status and will update the
414 * the count parameter.
415 * The count parameter will be updated with the number of
416 * entries loaded into the data pointer - in case the number
417 * of layers went down or is smaller than the size given.
418 */
419 uint32_t device_layer_count;
420 std::vector<VkLayerProperties> vk_props;
421 VkResult res;
422 do {
423 res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, nullptr);
424 if (res != VK_SUCCESS)
425 return res;
426 if (device_layer_count == 0)
427 return VK_SUCCESS;
428 vk_props.resize(device_layer_count);
429 res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, vk_props.data());
430 } while (res == VK_INCOMPLETE);
431
432 // Gather the list of extensions for each device layer.
433 for (uint32_t i = 0; i < device_layer_count; i++) {
434 LayerProperties layer_props;
435 layer_props.properties = vk_props[i];
436 res = GetDeviceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
437 if (res != VK_SUCCESS)
438 return res;
439 device_layer_properties_.push_back(layer_props);
440 }
441 return res;
442 }
443
444 // Returns true if all layer names specified in check_names can be found in given layer properties.
CheckLayers(const std::vector<LayerProperties> & layer_props,const std::vector<const char * > & layer_names) const445 bool VulkanContext::CheckLayers(const std::vector<LayerProperties> &layer_props, const std::vector<const char *> &layer_names) const {
446 uint32_t check_count = (uint32_t)layer_names.size();
447 uint32_t layer_count = (uint32_t)layer_props.size();
448 for (uint32_t i = 0; i < check_count; i++) {
449 bool found = false;
450 for (uint32_t j = 0; j < layer_count; j++) {
451 if (!strcmp(layer_names[i], layer_props[j].properties.layerName)) {
452 found = true;
453 }
454 }
455 if (!found) {
456 std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
457 return false;
458 }
459 }
460 return true;
461 }
462
GetPhysicalDeviceByName(std::string name)463 int VulkanContext::GetPhysicalDeviceByName(std::string name) {
464 for (size_t i = 0; i < physical_devices_.size(); i++) {
465 if (physicalDeviceProperties_[i].properties.deviceName == name)
466 return (int)i;
467 }
468 return -1;
469 }
470
GetBestPhysicalDevice()471 int VulkanContext::GetBestPhysicalDevice() {
472 // Rules: Prefer discrete over embedded.
473 // Prefer nVidia over Intel.
474
475 int maxScore = -1;
476 int best = -1;
477
478 for (size_t i = 0; i < physical_devices_.size(); i++) {
479 int score = 0;
480 VkPhysicalDeviceProperties props;
481 vkGetPhysicalDeviceProperties(physical_devices_[i], &props);
482 switch (props.deviceType) {
483 case VK_PHYSICAL_DEVICE_TYPE_CPU:
484 score += 1;
485 break;
486 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
487 score += 2;
488 break;
489 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
490 score += 20;
491 break;
492 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
493 score += 10;
494 break;
495 default:
496 break;
497 }
498 if (props.vendorID == VULKAN_VENDOR_AMD) {
499 score += 5;
500 } else if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
501 score += 5;
502 }
503 if (score > maxScore) {
504 best = (int)i;
505 maxScore = score;
506 }
507 }
508 return best;
509 }
510
ChooseDevice(int physical_device)511 void VulkanContext::ChooseDevice(int physical_device) {
512 physical_device_ = physical_device;
513 INFO_LOG(G3D, "Chose physical device %d: %p", physical_device, physical_devices_[physical_device]);
514
515 GetDeviceLayerProperties();
516 if (!CheckLayers(device_layer_properties_, device_layer_names_)) {
517 WARN_LOG(G3D, "CheckLayers for device %d failed", physical_device);
518 }
519
520 vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, nullptr);
521 _dbg_assert_(queue_count >= 1);
522
523 queueFamilyProperties_.resize(queue_count);
524 vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, queueFamilyProperties_.data());
525 _dbg_assert_(queue_count >= 1);
526
527 // Detect preferred formats, in this order.
528 static const VkFormat depthStencilFormats[] = {
529 VK_FORMAT_D24_UNORM_S8_UINT,
530 VK_FORMAT_D32_SFLOAT_S8_UINT,
531 VK_FORMAT_D16_UNORM_S8_UINT,
532 };
533 deviceInfo_.preferredDepthStencilFormat = VK_FORMAT_UNDEFINED;
534 for (size_t i = 0; i < ARRAY_SIZE(depthStencilFormats); i++) {
535 VkFormatProperties props;
536 vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], depthStencilFormats[i], &props);
537 if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
538 deviceInfo_.preferredDepthStencilFormat = depthStencilFormats[i];
539 break;
540 }
541 }
542
543 _assert_msg_(deviceInfo_.preferredDepthStencilFormat != VK_FORMAT_UNDEFINED, "Could not find a usable depth stencil format.");
544 VkFormatProperties preferredProps;
545 vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], deviceInfo_.preferredDepthStencilFormat, &preferredProps);
546 if ((preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) &&
547 (preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
548 deviceInfo_.canBlitToPreferredDepthStencilFormat = true;
549 }
550
551 // This is as good a place as any to do this.
552 vkGetPhysicalDeviceMemoryProperties(physical_devices_[physical_device_], &memory_properties);
553 INFO_LOG(G3D, "Memory Types (%d):", memory_properties.memoryTypeCount);
554 for (int i = 0; i < (int)memory_properties.memoryTypeCount; i++) {
555 // Don't bother printing dummy memory types.
556 if (!memory_properties.memoryTypes[i].propertyFlags)
557 continue;
558 INFO_LOG(G3D, " %d: Heap %d; Flags: %s%s%s%s ", i, memory_properties.memoryTypes[i].heapIndex,
559 (memory_properties.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) ? "DEVICE_LOCAL " : "",
560 (memory_properties.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ? "HOST_VISIBLE " : "",
561 (memory_properties.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) ? "HOST_CACHED " : "",
562 (memory_properties.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) ? "HOST_COHERENT " : "");
563 }
564
565 // Optional features
566 if (extensionsLookup_.KHR_get_physical_device_properties2) {
567 VkPhysicalDeviceFeatures2 features2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR};
568 vkGetPhysicalDeviceFeatures2KHR(physical_devices_[physical_device_], &features2);
569 deviceFeatures_.available = features2.features;
570 } else {
571 vkGetPhysicalDeviceFeatures(physical_devices_[physical_device_], &deviceFeatures_.available);
572 }
573
574 deviceFeatures_.enabled = {};
575 // Enable a few safe ones if they are available.
576 deviceFeatures_.enabled.dualSrcBlend = deviceFeatures_.available.dualSrcBlend;
577 deviceFeatures_.enabled.largePoints = deviceFeatures_.available.largePoints;
578 deviceFeatures_.enabled.wideLines = deviceFeatures_.available.wideLines;
579 deviceFeatures_.enabled.logicOp = deviceFeatures_.available.logicOp;
580 deviceFeatures_.enabled.depthClamp = deviceFeatures_.available.depthClamp;
581 deviceFeatures_.enabled.depthBounds = deviceFeatures_.available.depthBounds;
582 deviceFeatures_.enabled.samplerAnisotropy = deviceFeatures_.available.samplerAnisotropy;
583 // For easy wireframe mode, someday.
584 deviceFeatures_.enabled.fillModeNonSolid = deviceFeatures_.available.fillModeNonSolid;
585
586 GetDeviceLayerExtensionList(nullptr, device_extension_properties_);
587
588 device_extensions_enabled_.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
589 }
590
EnableDeviceExtension(const char * extension)591 bool VulkanContext::EnableDeviceExtension(const char *extension) {
592 for (auto &iter : device_extension_properties_) {
593 if (!strcmp(iter.extensionName, extension)) {
594 device_extensions_enabled_.push_back(extension);
595 return true;
596 }
597 }
598 return false;
599 }
600
CreateDevice()601 VkResult VulkanContext::CreateDevice() {
602 if (!init_error_.empty() || physical_device_ < 0) {
603 ERROR_LOG(G3D, "Vulkan init failed: %s", init_error_.c_str());
604 return VK_ERROR_INITIALIZATION_FAILED;
605 }
606
607 VkDeviceQueueCreateInfo queue_info{VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO};
608 float queue_priorities[1] = {1.0f};
609 queue_info.queueCount = 1;
610 queue_info.pQueuePriorities = queue_priorities;
611 bool found = false;
612 for (int i = 0; i < (int)queue_count; i++) {
613 if (queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
614 queue_info.queueFamilyIndex = i;
615 found = true;
616 break;
617 }
618 }
619 _dbg_assert_(found);
620
621 extensionsLookup_.KHR_maintenance1 = EnableDeviceExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
622 extensionsLookup_.KHR_maintenance2 = EnableDeviceExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
623 extensionsLookup_.KHR_maintenance3 = EnableDeviceExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
624 extensionsLookup_.KHR_multiview = EnableDeviceExtension(VK_KHR_MULTIVIEW_EXTENSION_NAME);
625
626 if (EnableDeviceExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
627 extensionsLookup_.KHR_get_memory_requirements2 = true;
628 extensionsLookup_.KHR_dedicated_allocation = EnableDeviceExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
629 }
630 if (EnableDeviceExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME)) {
631 if (EnableDeviceExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
632 extensionsLookup_.EXT_external_memory_host = EnableDeviceExtension(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
633 }
634 }
635 if (EnableDeviceExtension(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
636 extensionsLookup_.KHR_create_renderpass2 = true;
637 extensionsLookup_.KHR_depth_stencil_resolve = EnableDeviceExtension(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
638 }
639 extensionsLookup_.EXT_shader_stencil_export = EnableDeviceExtension(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
640
641 VkDeviceCreateInfo device_info{ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
642 device_info.queueCreateInfoCount = 1;
643 device_info.pQueueCreateInfos = &queue_info;
644 device_info.enabledLayerCount = (uint32_t)device_layer_names_.size();
645 device_info.ppEnabledLayerNames = device_info.enabledLayerCount ? device_layer_names_.data() : nullptr;
646 device_info.enabledExtensionCount = (uint32_t)device_extensions_enabled_.size();
647 device_info.ppEnabledExtensionNames = device_info.enabledExtensionCount ? device_extensions_enabled_.data() : nullptr;
648 device_info.pEnabledFeatures = &deviceFeatures_.enabled;
649
650 VkResult res = vkCreateDevice(physical_devices_[physical_device_], &device_info, nullptr, &device_);
651 if (res != VK_SUCCESS) {
652 init_error_ = "Unable to create Vulkan device";
653 ERROR_LOG(G3D, "Unable to create Vulkan device");
654 } else {
655 VulkanLoadDeviceFunctions(device_, extensionsLookup_);
656 }
657 INFO_LOG(G3D, "Device created.\n");
658 VulkanSetAvailable(true);
659 return res;
660 }
661
InitDebugUtilsCallback()662 VkResult VulkanContext::InitDebugUtilsCallback() {
663 // We're intentionally skipping VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT and
664 // VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, just too spammy.
665 int bits = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
666 | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
667 | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
668
669 VkDebugUtilsMessengerCreateInfoEXT callback1{VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT};
670 callback1.messageSeverity = bits;
671 callback1.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
672 callback1.pfnUserCallback = &VulkanDebugUtilsCallback;
673 callback1.pUserData = (void *)&g_LogOptions;
674 VkDebugUtilsMessengerEXT messenger;
675 VkResult res = vkCreateDebugUtilsMessengerEXT(instance_, &callback1, nullptr, &messenger);
676 if (res != VK_SUCCESS) {
677 ERROR_LOG(G3D, "Failed to register debug callback with vkCreateDebugUtilsMessengerEXT");
678 // Do error handling for VK_ERROR_OUT_OF_MEMORY
679 } else {
680 INFO_LOG(G3D, "Debug callback registered with vkCreateDebugUtilsMessengerEXT.");
681 utils_callbacks.push_back(messenger);
682 }
683 return res;
684 }
685
SetDebugNameImpl(uint64_t handle,VkObjectType type,const char * name)686 void VulkanContext::SetDebugNameImpl(uint64_t handle, VkObjectType type, const char *name) {
687 VkDebugUtilsObjectNameInfoEXT info{ VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT };
688 info.pObjectName = name;
689 info.objectHandle = handle;
690 info.objectType = type;
691 vkSetDebugUtilsObjectNameEXT(device_, &info);
692 }
693
InitSurface(WindowSystem winsys,void * data1,void * data2)694 VkResult VulkanContext::InitSurface(WindowSystem winsys, void *data1, void *data2) {
695 winsys_ = winsys;
696 winsysData1_ = data1;
697 winsysData2_ = data2;
698 return ReinitSurface();
699 }
700
ReinitSurface()701 VkResult VulkanContext::ReinitSurface() {
702 if (surface_ != VK_NULL_HANDLE) {
703 INFO_LOG(G3D, "Destroying Vulkan surface (%d, %d)", swapChainExtent_.width, swapChainExtent_.height);
704 vkDestroySurfaceKHR(instance_, surface_, nullptr);
705 surface_ = VK_NULL_HANDLE;
706 }
707
708 INFO_LOG(G3D, "Creating Vulkan surface for window (%p %p)", winsysData1_, winsysData2_);
709
710 VkResult retval = VK_SUCCESS;
711
712 switch (winsys_) {
713 #ifdef _WIN32
714 case WINDOWSYSTEM_WIN32:
715 {
716 VkWin32SurfaceCreateInfoKHR win32{ VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
717 win32.flags = 0;
718 win32.hwnd = (HWND)winsysData2_;
719 win32.hinstance = (HINSTANCE)winsysData1_;
720 retval = vkCreateWin32SurfaceKHR(instance_, &win32, nullptr, &surface_);
721 break;
722 }
723 #endif
724 #if defined(__ANDROID__)
725 case WINDOWSYSTEM_ANDROID:
726 {
727 ANativeWindow *wnd = (ANativeWindow *)winsysData1_;
728 VkAndroidSurfaceCreateInfoKHR android{ VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR };
729 android.flags = 0;
730 android.window = wnd;
731 retval = vkCreateAndroidSurfaceKHR(instance_, &android, nullptr, &surface_);
732 break;
733 }
734 #endif
735 #if defined(VK_USE_PLATFORM_METAL_EXT)
736 case WINDOWSYSTEM_METAL_EXT:
737 {
738 VkMetalSurfaceCreateInfoEXT metal{ VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT };
739 metal.flags = 0;
740 metal.pLayer = winsysData1_;
741 metal.pNext = winsysData2_;
742 retval = vkCreateMetalSurfaceEXT(instance_, &metal, nullptr, &surface_);
743 break;
744 }
745 #endif
746 #if defined(VK_USE_PLATFORM_XLIB_KHR)
747 case WINDOWSYSTEM_XLIB:
748 {
749 VkXlibSurfaceCreateInfoKHR xlib{ VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR };
750 xlib.flags = 0;
751 xlib.dpy = (Display *)winsysData1_;
752 xlib.window = (Window)winsysData2_;
753 retval = vkCreateXlibSurfaceKHR(instance_, &xlib, nullptr, &surface_);
754 break;
755 }
756 #endif
757 #if defined(VK_USE_PLATFORM_XCB_KHR)
758 case WINDOWSYSTEM_XCB:
759 {
760 VkXCBSurfaceCreateInfoKHR xcb{ VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR };
761 xcb.flags = 0;
762 xcb.connection = (Connection *)winsysData1_;
763 xcb.window = (Window)(uintptr_t)winsysData2_;
764 retval = vkCreateXcbSurfaceKHR(instance_, &xcb, nullptr, &surface_);
765 break;
766 }
767 #endif
768 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
769 case WINDOWSYSTEM_WAYLAND:
770 {
771 VkWaylandSurfaceCreateInfoKHR wayland{ VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR };
772 wayland.flags = 0;
773 wayland.display = (wl_display *)winsysData1_;
774 wayland.surface = (wl_surface *)winsysData2_;
775 retval = vkCreateWaylandSurfaceKHR(instance_, &wayland, nullptr, &surface_);
776 break;
777 }
778 #endif
779 #if defined(VK_USE_PLATFORM_DISPLAY_KHR)
780 case WINDOWSYSTEM_DISPLAY:
781 {
782 VkDisplaySurfaceCreateInfoKHR display{ VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR };
783 display.flags = 0;
784 retval = vkCreateDisplayPlaneSurfaceKHR(instance_, &display, nullptr, &surface_);
785 break;
786 }
787 #endif
788
789 default:
790 _assert_msg_(false, "Vulkan support for chosen window system not implemented");
791 return VK_ERROR_INITIALIZATION_FAILED;
792 }
793
794 if (retval != VK_SUCCESS) {
795 return retval;
796 }
797
798 if (!ChooseQueue()) {
799 return VK_ERROR_INITIALIZATION_FAILED;
800 }
801
802 return VK_SUCCESS;
803 }
804
ChooseQueue()805 bool VulkanContext::ChooseQueue() {
806 // Iterate over each queue to learn whether it supports presenting:
807 VkBool32 *supportsPresent = new VkBool32[queue_count];
808 for (uint32_t i = 0; i < queue_count; i++) {
809 vkGetPhysicalDeviceSurfaceSupportKHR(physical_devices_[physical_device_], i, surface_, &supportsPresent[i]);
810 }
811
812 // Search for a graphics queue and a present queue in the array of queue
813 // families, try to find one that supports both
814 uint32_t graphicsQueueNodeIndex = UINT32_MAX;
815 uint32_t presentQueueNodeIndex = UINT32_MAX;
816 for (uint32_t i = 0; i < queue_count; i++) {
817 if ((queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
818 if (graphicsQueueNodeIndex == UINT32_MAX) {
819 graphicsQueueNodeIndex = i;
820 }
821
822 if (supportsPresent[i] == VK_TRUE) {
823 graphicsQueueNodeIndex = i;
824 presentQueueNodeIndex = i;
825 break;
826 }
827 }
828 }
829 if (presentQueueNodeIndex == UINT32_MAX) {
830 // If didn't find a queue that supports both graphics and present, then
831 // find a separate present queue.
832 for (uint32_t i = 0; i < queue_count; ++i) {
833 if (supportsPresent[i] == VK_TRUE) {
834 presentQueueNodeIndex = i;
835 break;
836 }
837 }
838 }
839 delete[] supportsPresent;
840
841 // Generate error if could not find both a graphics and a present queue
842 if (graphicsQueueNodeIndex == UINT32_MAX || presentQueueNodeIndex == UINT32_MAX) {
843 ERROR_LOG(G3D, "Could not find a graphics and a present queue");
844 return false;
845 }
846
847 graphics_queue_family_index_ = graphicsQueueNodeIndex;
848
849 // Get the list of VkFormats that are supported:
850 uint32_t formatCount = 0;
851 VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, nullptr);
852 _assert_msg_(res == VK_SUCCESS, "Failed to get formats for device %d: %d", physical_device_, (int)res);
853 if (res != VK_SUCCESS) {
854 return false;
855 }
856
857 std::vector<VkSurfaceFormatKHR> surfFormats(formatCount);
858 res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, surfFormats.data());
859 _dbg_assert_(res == VK_SUCCESS);
860 if (res != VK_SUCCESS) {
861 return false;
862 }
863 // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
864 // the surface has no preferred format. Otherwise, at least one
865 // supported format will be returned.
866 if (formatCount == 0 || (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED)) {
867 INFO_LOG(G3D, "swapchain_format: Falling back to B8G8R8A8_UNORM");
868 swapchainFormat_ = VK_FORMAT_B8G8R8A8_UNORM;
869 } else {
870 swapchainFormat_ = VK_FORMAT_UNDEFINED;
871 for (uint32_t i = 0; i < formatCount; ++i) {
872 if (surfFormats[i].colorSpace != VK_COLORSPACE_SRGB_NONLINEAR_KHR) {
873 continue;
874 }
875
876 if (surfFormats[i].format == VK_FORMAT_B8G8R8A8_UNORM || surfFormats[i].format == VK_FORMAT_R8G8B8A8_UNORM) {
877 swapchainFormat_ = surfFormats[i].format;
878 break;
879 }
880 }
881 if (swapchainFormat_ == VK_FORMAT_UNDEFINED) {
882 // Okay, take the first one then.
883 swapchainFormat_ = surfFormats[0].format;
884 }
885 INFO_LOG(G3D, "swapchain_format: %d (/%d)", swapchainFormat_, formatCount);
886 }
887
888 vkGetDeviceQueue(device_, graphics_queue_family_index_, 0, &gfx_queue_);
889 return true;
890 }
891
clamp(int x,int a,int b)892 int clamp(int x, int a, int b) {
893 if (x < a)
894 return a;
895 if (x > b)
896 return b;
897 return x;
898 }
899
surface_transforms_to_string(VkSurfaceTransformFlagsKHR transformFlags)900 static std::string surface_transforms_to_string(VkSurfaceTransformFlagsKHR transformFlags) {
901 std::string str;
902 if (transformFlags & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) str += "IDENTITY ";
903 if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR) str += "ROTATE_90 ";
904 if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR) str += "ROTATE_180 ";
905 if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR) str += "ROTATE_270 ";
906 if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR) str += "HMIRROR ";
907 if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR) str += "HMIRROR_90 ";
908 if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR) str += "HMIRROR_180 ";
909 if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR) str += "HMIRROR_270 ";
910 if (transformFlags & VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR) str += "INHERIT ";
911 return str;
912 }
913
InitSwapchain()914 bool VulkanContext::InitSwapchain() {
915 VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_devices_[physical_device_], surface_, &surfCapabilities_);
916 if (res == VK_ERROR_SURFACE_LOST_KHR) {
917 // Not much to do.
918 ERROR_LOG(G3D, "VK: Surface lost in InitSwapchain");
919 return false;
920 }
921 _dbg_assert_(res == VK_SUCCESS);
922 uint32_t presentModeCount;
923 res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, nullptr);
924 _dbg_assert_(res == VK_SUCCESS);
925 VkPresentModeKHR *presentModes = new VkPresentModeKHR[presentModeCount];
926 _dbg_assert_(presentModes);
927 res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, presentModes);
928 _dbg_assert_(res == VK_SUCCESS);
929
930
931 swapChainExtent_.width = clamp(surfCapabilities_.currentExtent.width, surfCapabilities_.minImageExtent.width, surfCapabilities_.maxImageExtent.width);
932 swapChainExtent_.height = clamp(surfCapabilities_.currentExtent.height, surfCapabilities_.minImageExtent.height, surfCapabilities_.maxImageExtent.height);
933
934 INFO_LOG(G3D, "surfCapabilities_.current: %dx%d min: %dx%d max: %dx%d computed: %dx%d",
935 surfCapabilities_.currentExtent.width, surfCapabilities_.currentExtent.height,
936 surfCapabilities_.minImageExtent.width, surfCapabilities_.minImageExtent.height,
937 surfCapabilities_.maxImageExtent.width, surfCapabilities_.maxImageExtent.height,
938 swapChainExtent_.width, swapChainExtent_.height);
939
940 // TODO: Find a better way to specify the prioritized present mode while being able
941 // to fall back in a sensible way.
942 VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_MAX_ENUM_KHR;
943 std::string modes = "";
944 for (size_t i = 0; i < presentModeCount; i++) {
945 modes += PresentModeString(presentModes[i]);
946 if (i != presentModeCount - 1) {
947 modes += ", ";
948 }
949 }
950 INFO_LOG(G3D, "Supported present modes: %s", modes.c_str());
951 for (size_t i = 0; i < presentModeCount; i++) {
952 bool match = false;
953 match = match || ((flags_ & VULKAN_FLAG_PRESENT_MAILBOX) && presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR);
954 match = match || ((flags_ & VULKAN_FLAG_PRESENT_FIFO_RELAXED) && presentModes[i] == VK_PRESENT_MODE_FIFO_RELAXED_KHR);
955 match = match || ((flags_ & VULKAN_FLAG_PRESENT_FIFO) && presentModes[i] == VK_PRESENT_MODE_FIFO_KHR);
956 match = match || ((flags_ & VULKAN_FLAG_PRESENT_IMMEDIATE) && presentModes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR);
957
958 // Default to the first present mode from the list.
959 if (match || swapchainPresentMode == VK_PRESENT_MODE_MAX_ENUM_KHR) {
960 swapchainPresentMode = presentModes[i];
961 }
962 if (match) {
963 break;
964 }
965 }
966 #ifdef __ANDROID__
967 // HACK
968 swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
969 #endif
970 delete[] presentModes;
971 // Determine the number of VkImage's to use in the swap chain (we desire to
972 // own only 1 image at a time, besides the images being displayed and
973 // queued for display):
974 uint32_t desiredNumberOfSwapChainImages = surfCapabilities_.minImageCount + 1;
975 if ((surfCapabilities_.maxImageCount > 0) &&
976 (desiredNumberOfSwapChainImages > surfCapabilities_.maxImageCount))
977 {
978 // Application must settle for fewer images than desired:
979 desiredNumberOfSwapChainImages = surfCapabilities_.maxImageCount;
980 }
981
982 INFO_LOG(G3D, "Chosen present mode: %d (%s). numSwapChainImages: %d/%d",
983 swapchainPresentMode, PresentModeString(swapchainPresentMode),
984 desiredNumberOfSwapChainImages, surfCapabilities_.maxImageCount);
985
986 // We mostly follow the practices from
987 // https://arm-software.github.io/vulkan_best_practice_for_mobile_developers/samples/surface_rotation/surface_rotation_tutorial.html
988 //
989 VkSurfaceTransformFlagBitsKHR preTransform;
990 std::string supportedTransforms = surface_transforms_to_string(surfCapabilities_.supportedTransforms);
991 std::string currentTransform = surface_transforms_to_string(surfCapabilities_.currentTransform);
992 g_display_rotation = DisplayRotation::ROTATE_0;
993 g_display_rot_matrix.setIdentity();
994 if (surfCapabilities_.currentTransform & (VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR | VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR)) {
995 preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
996 } else if (surfCapabilities_.currentTransform & (VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR)) {
997 // Normal, sensible rotations. Let's handle it.
998 preTransform = surfCapabilities_.currentTransform;
999 g_display_rot_matrix.setIdentity();
1000 switch (surfCapabilities_.currentTransform) {
1001 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
1002 g_display_rotation = DisplayRotation::ROTATE_90;
1003 g_display_rot_matrix.setRotationZ90();
1004 std::swap(swapChainExtent_.width, swapChainExtent_.height);
1005 break;
1006 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
1007 g_display_rotation = DisplayRotation::ROTATE_180;
1008 g_display_rot_matrix.setRotationZ180();
1009 break;
1010 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
1011 g_display_rotation = DisplayRotation::ROTATE_270;
1012 g_display_rot_matrix.setRotationZ270();
1013 std::swap(swapChainExtent_.width, swapChainExtent_.height);
1014 break;
1015 default:
1016 _dbg_assert_(false);
1017 }
1018 } else {
1019 // Let the OS rotate the image (potentially slow on many Android devices)
1020 preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1021 }
1022
1023 std::string preTransformStr = surface_transforms_to_string(preTransform);
1024
1025 INFO_LOG(G3D, "Transform supported: %s current: %s chosen: %s", supportedTransforms.c_str(), currentTransform.c_str(), preTransformStr.c_str());
1026
1027 if (physicalDeviceProperties_[physical_device_].properties.vendorID == VULKAN_VENDOR_IMGTEC) {
1028 INFO_LOG(G3D, "Applying PowerVR hack (rounding off the width!)");
1029 // Swap chain width hack to avoid issue #11743 (PowerVR driver bug).
1030 // To keep the size consistent even with pretransform, do this after the swap. Should be fine.
1031 // This is fixed in newer PowerVR drivers but I don't know the cutoff.
1032 swapChainExtent_.width &= ~31;
1033 }
1034
1035 VkSwapchainCreateInfoKHR swap_chain_info{ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
1036 swap_chain_info.surface = surface_;
1037 swap_chain_info.minImageCount = desiredNumberOfSwapChainImages;
1038 swap_chain_info.imageFormat = swapchainFormat_;
1039 swap_chain_info.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
1040 swap_chain_info.imageExtent.width = swapChainExtent_.width;
1041 swap_chain_info.imageExtent.height = swapChainExtent_.height;
1042 swap_chain_info.preTransform = preTransform;
1043 swap_chain_info.imageArrayLayers = 1;
1044 swap_chain_info.presentMode = swapchainPresentMode;
1045 swap_chain_info.oldSwapchain = VK_NULL_HANDLE;
1046 swap_chain_info.clipped = true;
1047 swap_chain_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1048
1049 // Don't ask for TRANSFER_DST for the swapchain image, we don't use that.
1050 // if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
1051 // swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1052
1053 #ifndef ANDROID
1054 // We don't support screenshots on Android
1055 // Add more usage flags if they're supported.
1056 if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)
1057 swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1058 #endif
1059
1060 swap_chain_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
1061 swap_chain_info.queueFamilyIndexCount = 0;
1062 swap_chain_info.pQueueFamilyIndices = NULL;
1063 // OPAQUE is not supported everywhere.
1064 if (surfCapabilities_.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR) {
1065 swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
1066 } else {
1067 // This should be supported anywhere, and is the only thing supported on the SHIELD TV, for example.
1068 swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1069 }
1070
1071 res = vkCreateSwapchainKHR(device_, &swap_chain_info, NULL, &swapchain_);
1072 if (res != VK_SUCCESS) {
1073 ERROR_LOG(G3D, "vkCreateSwapchainKHR failed!");
1074 return false;
1075 }
1076 INFO_LOG(G3D, "Created swapchain: %dx%d", swap_chain_info.imageExtent.width, swap_chain_info.imageExtent.height);
1077 return true;
1078 }
1079
CreateFence(bool presignalled)1080 VkFence VulkanContext::CreateFence(bool presignalled) {
1081 VkFence fence;
1082 VkFenceCreateInfo fenceInfo{ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
1083 fenceInfo.flags = presignalled ? VK_FENCE_CREATE_SIGNALED_BIT : 0;
1084 vkCreateFence(device_, &fenceInfo, NULL, &fence);
1085 return fence;
1086 }
1087
PerformPendingDeletes()1088 void VulkanContext::PerformPendingDeletes() {
1089 for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1090 frame_[i].deleteList.PerformDeletes(device_);
1091 }
1092 Delete().PerformDeletes(device_);
1093 }
1094
DestroyDevice()1095 void VulkanContext::DestroyDevice() {
1096 if (swapchain_) {
1097 ERROR_LOG(G3D, "DestroyDevice: Swapchain should have been destroyed.");
1098 }
1099 if (surface_) {
1100 ERROR_LOG(G3D, "DestroyDevice: Surface should have been destroyed.");
1101 }
1102
1103 INFO_LOG(G3D, "VulkanContext::DestroyDevice (performing deletes)");
1104 PerformPendingDeletes();
1105
1106 vkDestroyDevice(device_, nullptr);
1107 device_ = nullptr;
1108 }
1109
CreateShaderModule(const std::vector<uint32_t> & spirv,VkShaderModule * shaderModule)1110 bool VulkanContext::CreateShaderModule(const std::vector<uint32_t> &spirv, VkShaderModule *shaderModule) {
1111 VkShaderModuleCreateInfo sm{ VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
1112 sm.pCode = spirv.data();
1113 sm.codeSize = spirv.size() * sizeof(uint32_t);
1114 sm.flags = 0;
1115 VkResult result = vkCreateShaderModule(device_, &sm, nullptr, shaderModule);
1116 if (result != VK_SUCCESS) {
1117 return false;
1118 } else {
1119 return true;
1120 }
1121 }
1122
TransitionImageLayout2(VkCommandBuffer cmd,VkImage image,int baseMip,int numMipLevels,VkImageAspectFlags aspectMask,VkImageLayout oldImageLayout,VkImageLayout newImageLayout,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkAccessFlags srcAccessMask,VkAccessFlags dstAccessMask)1123 void TransitionImageLayout2(VkCommandBuffer cmd, VkImage image, int baseMip, int numMipLevels, VkImageAspectFlags aspectMask,
1124 VkImageLayout oldImageLayout, VkImageLayout newImageLayout,
1125 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
1126 VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask) {
1127 VkImageMemoryBarrier image_memory_barrier{ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
1128 image_memory_barrier.srcAccessMask = srcAccessMask;
1129 image_memory_barrier.dstAccessMask = dstAccessMask;
1130 image_memory_barrier.oldLayout = oldImageLayout;
1131 image_memory_barrier.newLayout = newImageLayout;
1132 image_memory_barrier.image = image;
1133 image_memory_barrier.subresourceRange.aspectMask = aspectMask;
1134 image_memory_barrier.subresourceRange.baseMipLevel = baseMip;
1135 image_memory_barrier.subresourceRange.levelCount = numMipLevels;
1136 image_memory_barrier.subresourceRange.layerCount = 1; // We never use more than one layer, and old Mali drivers have problems with VK_REMAINING_ARRAY_LAYERS/VK_REMAINING_MIP_LEVELS.
1137 image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1138 image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1139 vkCmdPipelineBarrier(cmd, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1, &image_memory_barrier);
1140 }
1141
FindLanguage(const VkShaderStageFlagBits shader_type)1142 EShLanguage FindLanguage(const VkShaderStageFlagBits shader_type) {
1143 switch (shader_type) {
1144 case VK_SHADER_STAGE_VERTEX_BIT:
1145 return EShLangVertex;
1146
1147 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
1148 return EShLangTessControl;
1149
1150 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
1151 return EShLangTessEvaluation;
1152
1153 case VK_SHADER_STAGE_GEOMETRY_BIT:
1154 return EShLangGeometry;
1155
1156 case VK_SHADER_STAGE_FRAGMENT_BIT:
1157 return EShLangFragment;
1158
1159 case VK_SHADER_STAGE_COMPUTE_BIT:
1160 return EShLangCompute;
1161
1162 default:
1163 return EShLangVertex;
1164 }
1165 }
1166
1167 // Compile a given string containing GLSL into SPV for use by VK
1168 // Return value of false means an error was encountered.
GLSLtoSPV(const VkShaderStageFlagBits shader_type,const char * sourceCode,GLSLVariant variant,std::vector<unsigned int> & spirv,std::string * errorMessage)1169 bool GLSLtoSPV(const VkShaderStageFlagBits shader_type, const char *sourceCode, GLSLVariant variant,
1170 std::vector<unsigned int> &spirv, std::string *errorMessage) {
1171
1172 glslang::TProgram program;
1173 const char *shaderStrings[1];
1174 TBuiltInResource Resources;
1175 init_resources(Resources);
1176
1177 int defaultVersion;
1178 EShMessages messages;
1179 EProfile profile;
1180
1181 switch (variant) {
1182 case GLSLVariant::VULKAN:
1183 // Enable SPIR-V and Vulkan rules when parsing GLSL
1184 messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1185 defaultVersion = 450;
1186 profile = ECoreProfile;
1187 break;
1188 case GLSLVariant::GL140:
1189 messages = (EShMessages)(EShMsgDefault);
1190 defaultVersion = 140;
1191 profile = ECompatibilityProfile;
1192 break;
1193 case GLSLVariant::GLES300:
1194 messages = (EShMessages)(EShMsgDefault);
1195 defaultVersion = 300;
1196 profile = EEsProfile;
1197 break;
1198 default:
1199 return false;
1200 }
1201
1202 EShLanguage stage = FindLanguage(shader_type);
1203 glslang::TShader shader(stage);
1204
1205 shaderStrings[0] = sourceCode;
1206 shader.setStrings(shaderStrings, 1);
1207
1208 if (!shader.parse(&Resources, defaultVersion, profile, false, true, messages)) {
1209 puts(shader.getInfoLog());
1210 puts(shader.getInfoDebugLog());
1211 if (errorMessage) {
1212 *errorMessage = shader.getInfoLog();
1213 (*errorMessage) += shader.getInfoDebugLog();
1214 }
1215 return false; // something didn't work
1216 }
1217
1218 // Note that program does not take ownership of &shader, so this is fine.
1219 program.addShader(&shader);
1220
1221 if (!program.link(messages)) {
1222 puts(shader.getInfoLog());
1223 puts(shader.getInfoDebugLog());
1224 if (errorMessage) {
1225 *errorMessage = shader.getInfoLog();
1226 (*errorMessage) += shader.getInfoDebugLog();
1227 }
1228 return false;
1229 }
1230
1231 // Can't fail, parsing worked, "linking" worked.
1232 glslang::SpvOptions options;
1233 options.disableOptimizer = false;
1234 options.optimizeSize = false;
1235 options.generateDebugInfo = false;
1236 glslang::GlslangToSpv(*program.getIntermediate(stage), spirv, &options);
1237 return true;
1238 }
1239
init_glslang()1240 void init_glslang() {
1241 glslang::InitializeProcess();
1242 }
1243
finalize_glslang()1244 void finalize_glslang() {
1245 glslang::FinalizeProcess();
1246 }
1247
VulkanResultToString(VkResult res)1248 const char *VulkanResultToString(VkResult res) {
1249 switch (res) {
1250 case VK_NOT_READY: return "VK_NOT_READY";
1251 case VK_TIMEOUT: return "VK_TIMEOUT";
1252 case VK_EVENT_SET: return "VK_EVENT_SET";
1253 case VK_EVENT_RESET: return "VK_EVENT_RESET";
1254 case VK_INCOMPLETE: return "VK_INCOMPLETE";
1255 case VK_ERROR_OUT_OF_HOST_MEMORY: return "VK_ERROR_OUT_OF_HOST_MEMORY";
1256 case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
1257 case VK_ERROR_INITIALIZATION_FAILED: return "VK_ERROR_INITIALIZATION_FAILED";
1258 case VK_ERROR_DEVICE_LOST: return "VK_ERROR_DEVICE_LOST";
1259 case VK_ERROR_MEMORY_MAP_FAILED: return "VK_ERROR_MEMORY_MAP_FAILED";
1260 case VK_ERROR_LAYER_NOT_PRESENT: return "VK_ERROR_LAYER_NOT_PRESENT";
1261 case VK_ERROR_EXTENSION_NOT_PRESENT: return "VK_ERROR_EXTENSION_NOT_PRESENT";
1262 case VK_ERROR_FEATURE_NOT_PRESENT: return "VK_ERROR_FEATURE_NOT_PRESENT";
1263 case VK_ERROR_INCOMPATIBLE_DRIVER: return "VK_ERROR_INCOMPATIBLE_DRIVER";
1264 case VK_ERROR_TOO_MANY_OBJECTS: return "VK_ERROR_TOO_MANY_OBJECTS";
1265 case VK_ERROR_FORMAT_NOT_SUPPORTED: return "VK_ERROR_FORMAT_NOT_SUPPORTED";
1266 case VK_ERROR_SURFACE_LOST_KHR: return "VK_ERROR_SURFACE_LOST_KHR";
1267 case VK_SUBOPTIMAL_KHR: return "VK_SUBOPTIMAL_KHR";
1268 case VK_ERROR_OUT_OF_DATE_KHR: return "VK_ERROR_OUT_OF_DATE_KHR";
1269 case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
1270 case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
1271 case VK_ERROR_OUT_OF_POOL_MEMORY_KHR: return "VK_ERROR_OUT_OF_POOL_MEMORY_KHR";
1272 case VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR: return "VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR";
1273
1274 default:
1275 return "VK_ERROR_...(unknown)";
1276 }
1277 }
1278
Take(VulkanDeleteList & del)1279 void VulkanDeleteList::Take(VulkanDeleteList &del) {
1280 _dbg_assert_(cmdPools_.empty());
1281 _dbg_assert_(descPools_.empty());
1282 _dbg_assert_(modules_.empty());
1283 _dbg_assert_(buffers_.empty());
1284 _dbg_assert_(bufferViews_.empty());
1285 _dbg_assert_(images_.empty());
1286 _dbg_assert_(imageViews_.empty());
1287 _dbg_assert_(deviceMemory_.empty());
1288 _dbg_assert_(samplers_.empty());
1289 _dbg_assert_(pipelines_.empty());
1290 _dbg_assert_(pipelineCaches_.empty());
1291 _dbg_assert_(renderPasses_.empty());
1292 _dbg_assert_(framebuffers_.empty());
1293 _dbg_assert_(pipelineLayouts_.empty());
1294 _dbg_assert_(descSetLayouts_.empty());
1295 _dbg_assert_(callbacks_.empty());
1296 cmdPools_ = std::move(del.cmdPools_);
1297 descPools_ = std::move(del.descPools_);
1298 modules_ = std::move(del.modules_);
1299 buffers_ = std::move(del.buffers_);
1300 bufferViews_ = std::move(del.bufferViews_);
1301 images_ = std::move(del.images_);
1302 imageViews_ = std::move(del.imageViews_);
1303 deviceMemory_ = std::move(del.deviceMemory_);
1304 samplers_ = std::move(del.samplers_);
1305 pipelines_ = std::move(del.pipelines_);
1306 pipelineCaches_ = std::move(del.pipelineCaches_);
1307 renderPasses_ = std::move(del.renderPasses_);
1308 framebuffers_ = std::move(del.framebuffers_);
1309 pipelineLayouts_ = std::move(del.pipelineLayouts_);
1310 descSetLayouts_ = std::move(del.descSetLayouts_);
1311 callbacks_ = std::move(del.callbacks_);
1312 del.cmdPools_.clear();
1313 del.descPools_.clear();
1314 del.modules_.clear();
1315 del.buffers_.clear();
1316 del.images_.clear();
1317 del.imageViews_.clear();
1318 del.deviceMemory_.clear();
1319 del.samplers_.clear();
1320 del.pipelines_.clear();
1321 del.pipelineCaches_.clear();
1322 del.renderPasses_.clear();
1323 del.framebuffers_.clear();
1324 del.pipelineLayouts_.clear();
1325 del.descSetLayouts_.clear();
1326 del.callbacks_.clear();
1327 }
1328
PerformDeletes(VkDevice device)1329 void VulkanDeleteList::PerformDeletes(VkDevice device) {
1330 for (auto &callback : callbacks_) {
1331 callback.func(callback.userdata);
1332 }
1333 callbacks_.clear();
1334 for (auto &cmdPool : cmdPools_) {
1335 vkDestroyCommandPool(device, cmdPool, nullptr);
1336 }
1337 cmdPools_.clear();
1338 for (auto &descPool : descPools_) {
1339 vkDestroyDescriptorPool(device, descPool, nullptr);
1340 }
1341 descPools_.clear();
1342 for (auto &module : modules_) {
1343 vkDestroyShaderModule(device, module, nullptr);
1344 }
1345 modules_.clear();
1346 for (auto &buf : buffers_) {
1347 vkDestroyBuffer(device, buf, nullptr);
1348 }
1349 buffers_.clear();
1350 for (auto &bufView : bufferViews_) {
1351 vkDestroyBufferView(device, bufView, nullptr);
1352 }
1353 bufferViews_.clear();
1354 for (auto &image : images_) {
1355 vkDestroyImage(device, image, nullptr);
1356 }
1357 images_.clear();
1358 for (auto &imageView : imageViews_) {
1359 vkDestroyImageView(device, imageView, nullptr);
1360 }
1361 imageViews_.clear();
1362 for (auto &mem : deviceMemory_) {
1363 vkFreeMemory(device, mem, nullptr);
1364 }
1365 deviceMemory_.clear();
1366 for (auto &sampler : samplers_) {
1367 vkDestroySampler(device, sampler, nullptr);
1368 }
1369 samplers_.clear();
1370 for (auto &pipeline : pipelines_) {
1371 vkDestroyPipeline(device, pipeline, nullptr);
1372 }
1373 pipelines_.clear();
1374 for (auto &pcache : pipelineCaches_) {
1375 vkDestroyPipelineCache(device, pcache, nullptr);
1376 }
1377 pipelineCaches_.clear();
1378 for (auto &renderPass : renderPasses_) {
1379 vkDestroyRenderPass(device, renderPass, nullptr);
1380 }
1381 renderPasses_.clear();
1382 for (auto &framebuffer : framebuffers_) {
1383 vkDestroyFramebuffer(device, framebuffer, nullptr);
1384 }
1385 framebuffers_.clear();
1386 for (auto &pipeLayout : pipelineLayouts_) {
1387 vkDestroyPipelineLayout(device, pipeLayout, nullptr);
1388 }
1389 pipelineLayouts_.clear();
1390 for (auto &descSetLayout : descSetLayouts_) {
1391 vkDestroyDescriptorSetLayout(device, descSetLayout, nullptr);
1392 }
1393 descSetLayouts_.clear();
1394 }
1395
GetImageMemoryRequirements(VkImage image,VkMemoryRequirements * mem_reqs,bool * dedicatedAllocation)1396 void VulkanContext::GetImageMemoryRequirements(VkImage image, VkMemoryRequirements *mem_reqs, bool *dedicatedAllocation) {
1397 if (Extensions().KHR_dedicated_allocation) {
1398 VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
1399 memReqInfo2.image = image;
1400
1401 VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
1402 VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
1403 memReq2.pNext = &memDedicatedReq;
1404
1405 vkGetImageMemoryRequirements2KHR(GetDevice(), &memReqInfo2, &memReq2);
1406
1407 *mem_reqs = memReq2.memoryRequirements;
1408 *dedicatedAllocation =
1409 (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
1410 (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
1411 } else {
1412 vkGetImageMemoryRequirements(GetDevice(), image, mem_reqs);
1413 *dedicatedAllocation = false;
1414 }
1415 }
1416
IsHashMaliDriverVersion(const VkPhysicalDeviceProperties & props)1417 bool IsHashMaliDriverVersion(const VkPhysicalDeviceProperties &props) {
1418 // ARM used to put a hash in place of the driver version.
1419 // Now they only use major versions. We'll just make a bad heuristic.
1420 uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1421 uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1422 if (branch > 0)
1423 return true;
1424 if (branch > 100 || major > 100)
1425 return true;
1426 return false;
1427 }
1428
1429 // From Sascha's code
FormatDriverVersion(const VkPhysicalDeviceProperties & props)1430 std::string FormatDriverVersion(const VkPhysicalDeviceProperties &props) {
1431 if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
1432 // For whatever reason, NVIDIA has their own scheme.
1433 // 10 bits = major version (up to r1023)
1434 // 8 bits = minor version (up to 255)
1435 // 8 bits = secondary branch version/build version (up to 255)
1436 // 6 bits = tertiary branch/build version (up to 63)
1437 uint32_t major = (props.driverVersion >> 22) & 0x3ff;
1438 uint32_t minor = (props.driverVersion >> 14) & 0x0ff;
1439 uint32_t secondaryBranch = (props.driverVersion >> 6) & 0x0ff;
1440 uint32_t tertiaryBranch = (props.driverVersion) & 0x003f;
1441 return StringFromFormat("%d.%d.%d.%d", major, minor, secondaryBranch, tertiaryBranch);
1442 } else if (props.vendorID == VULKAN_VENDOR_ARM) {
1443 // ARM used to just put a hash here. No point in splitting it up.
1444 if (IsHashMaliDriverVersion(props)) {
1445 return StringFromFormat("(hash) %08x", props.driverVersion);
1446 }
1447 }
1448 // Qualcomm has an inscrutable versioning scheme. Let's just display it as normal.
1449 // Standard scheme, use the standard macros.
1450 uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1451 uint32_t minor = VK_VERSION_MINOR(props.driverVersion);
1452 uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1453 return StringFromFormat("%d.%d.%d (%08x)", major, minor, branch, props.driverVersion);
1454 }
1455