1 /* Copyright (c) 2017-2020 Hans-Kristian Arntzen
2  *
3  * Permission is hereby granted, free of charge, to any person obtaining
4  * a copy of this software and associated documentation files (the
5  * "Software"), to deal in the Software without restriction, including
6  * without limitation the rights to use, copy, modify, merge, publish,
7  * distribute, sublicense, and/or sell copies of the Software, and to
8  * permit persons to whom the Software is furnished to do so, subject to
9  * the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be
12  * included in all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21  */
22 
23 #include "context.hpp"
24 #include <vector>
25 #include <mutex>
26 #include <algorithm>
27 #include <string.h>
28 
29 #ifndef _WIN32
30 #include <dlfcn.h>
31 #elif defined(_WIN32)
32 #define WIN32_LEAN_AND_MEAN
33 #include <windows.h>
34 #endif
35 
36 //#undef VULKAN_DEBUG
37 
38 using namespace std;
39 
40 namespace Vulkan
41 {
init_instance_and_device(const char ** instance_ext,uint32_t instance_ext_count,const char ** device_ext,uint32_t device_ext_count,ContextCreationFlags flags)42 bool Context::init_instance_and_device(const char **instance_ext, uint32_t instance_ext_count, const char **device_ext,
43                                        uint32_t device_ext_count, ContextCreationFlags flags)
44 {
45 	destroy();
46 
47 	owned_instance = true;
48 	owned_device = true;
49 	if (!create_instance(instance_ext, instance_ext_count))
50 	{
51 		destroy();
52 		LOGE("Failed to create Vulkan instance.\n");
53 		return false;
54 	}
55 
56 	VkPhysicalDeviceFeatures features = {};
57 	if (!create_device(VK_NULL_HANDLE, VK_NULL_HANDLE, device_ext, device_ext_count, nullptr, 0, &features, flags))
58 	{
59 		destroy();
60 		LOGE("Failed to create Vulkan device.\n");
61 		return false;
62 	}
63 
64 	return true;
65 }
66 
67 static mutex loader_init_lock;
68 static bool loader_init_once;
69 
init_loader(PFN_vkGetInstanceProcAddr addr)70 bool Context::init_loader(PFN_vkGetInstanceProcAddr addr)
71 {
72 	lock_guard<mutex> holder(loader_init_lock);
73 	if (loader_init_once && !addr)
74 		return true;
75 
76 	if (!addr)
77 	{
78 #ifndef _WIN32
79 		static void *module;
80 		if (!module)
81 		{
82 			const char *vulkan_path = getenv("GRANITE_VULKAN_LIBRARY");
83 			if (vulkan_path)
84 				module = dlopen(vulkan_path, RTLD_LOCAL | RTLD_LAZY);
85 #ifdef __APPLE__
86 			if (!module)
87 				module = dlopen("libvulkan.1.dylib", RTLD_LOCAL | RTLD_LAZY);
88 #else
89 			if (!module)
90 				module = dlopen("libvulkan.so.1", RTLD_LOCAL | RTLD_LAZY);
91 			if (!module)
92 				module = dlopen("libvulkan.so", RTLD_LOCAL | RTLD_LAZY);
93 #endif
94 			if (!module)
95 				return false;
96 		}
97 
98 		addr = reinterpret_cast<PFN_vkGetInstanceProcAddr>(dlsym(module, "vkGetInstanceProcAddr"));
99 		if (!addr)
100 			return false;
101 #else
102 		static HMODULE module;
103 		if (!module)
104 		{
105 			module = LoadLibraryA("vulkan-1.dll");
106 			if (!module)
107 				return false;
108 		}
109 
110 		// Ugly pointer warning workaround.
111 		auto ptr = GetProcAddress(module, "vkGetInstanceProcAddr");
112 		static_assert(sizeof(ptr) == sizeof(addr), "Mismatch pointer type.");
113 		memcpy(&addr, &ptr, sizeof(ptr));
114 
115 		if (!addr)
116 			return false;
117 #endif
118 	}
119 
120 	volkInitializeCustom(addr);
121 	loader_init_once = true;
122 	return true;
123 }
124 
init_from_instance_and_device(VkInstance instance_,VkPhysicalDevice gpu_,VkDevice device_,VkQueue queue_,uint32_t queue_family_)125 bool Context::init_from_instance_and_device(VkInstance instance_, VkPhysicalDevice gpu_, VkDevice device_, VkQueue queue_, uint32_t queue_family_)
126 {
127 	destroy();
128 
129 	device = device_;
130 	instance = instance_;
131 	gpu = gpu_;
132 	graphics_queue = queue_;
133 	compute_queue = queue_;
134 	transfer_queue = queue_;
135 	graphics_queue_family = queue_family_;
136 	compute_queue_family = queue_family_;
137 	transfer_queue_family = queue_family_;
138 	owned_instance = false;
139 	owned_device = true;
140 
141 	volkLoadInstance(instance);
142 	volkLoadDeviceTable(&device_table, device);
143 	vkGetPhysicalDeviceProperties(gpu, &gpu_props);
144 	vkGetPhysicalDeviceMemoryProperties(gpu, &mem_props);
145 	return true;
146 }
147 
init_device_from_instance(VkInstance instance_,VkPhysicalDevice gpu_,VkSurfaceKHR surface,const char ** required_device_extensions,unsigned num_required_device_extensions,const char ** required_device_layers,unsigned num_required_device_layers,const VkPhysicalDeviceFeatures * required_features,ContextCreationFlags flags)148 bool Context::init_device_from_instance(VkInstance instance_, VkPhysicalDevice gpu_, VkSurfaceKHR surface,
149                                         const char **required_device_extensions, unsigned num_required_device_extensions,
150                                         const char **required_device_layers, unsigned num_required_device_layers,
151                                         const VkPhysicalDeviceFeatures *required_features,
152                                         ContextCreationFlags flags)
153 {
154 	destroy();
155 
156 	instance = instance_;
157 	owned_instance = false;
158 	owned_device = true;
159 
160 	if (!create_instance(nullptr, 0))
161 		return false;
162 
163 	if (!create_device(gpu_, surface, required_device_extensions, num_required_device_extensions, required_device_layers,
164 	                   num_required_device_layers, required_features, flags))
165 	{
166 		destroy();
167 		LOGE("Failed to create Vulkan device.\n");
168 		return false;
169 	}
170 
171 	return true;
172 }
173 
destroy()174 void Context::destroy()
175 {
176 	if (device != VK_NULL_HANDLE)
177 		device_table.vkDeviceWaitIdle(device);
178 
179 #ifdef VULKAN_DEBUG
180 	if (debug_callback)
181 		vkDestroyDebugReportCallbackEXT(instance, debug_callback, nullptr);
182 	if (debug_messenger)
183 		vkDestroyDebugUtilsMessengerEXT(instance, debug_messenger, nullptr);
184 	debug_callback = VK_NULL_HANDLE;
185 	debug_messenger = VK_NULL_HANDLE;
186 #endif
187 
188 	if (owned_device && device != VK_NULL_HANDLE)
189 		device_table.vkDestroyDevice(device, nullptr);
190 	if (owned_instance && instance != VK_NULL_HANDLE)
191 		vkDestroyInstance(instance, nullptr);
192 }
193 
~Context()194 Context::~Context()
195 {
196 	destroy();
197 }
198 
get_application_info(bool supports_vulkan_11)199 const VkApplicationInfo &Context::get_application_info(bool supports_vulkan_11)
200 {
201 	static const VkApplicationInfo info_11 = {
202 		VK_STRUCTURE_TYPE_APPLICATION_INFO, nullptr, "Granite", 0, "Granite", 0, VK_API_VERSION_1_1,
203 	};
204 
205 	static const VkApplicationInfo info = {
206 		VK_STRUCTURE_TYPE_APPLICATION_INFO, nullptr, "Granite", 0, "Granite", 0, VK_MAKE_VERSION(1, 0, 57),
207 	};
208 	return supports_vulkan_11 ? info_11 : info;
209 }
210 
notify_validation_error(const char * msg)211 void Context::notify_validation_error(const char *msg)
212 {
213 	if (message_callback)
214 		message_callback(msg);
215 }
216 
set_notification_callback(function<void (const char *)> func)217 void Context::set_notification_callback(function<void(const char *)> func)
218 {
219 	message_callback = move(func);
220 }
221 
222 #ifdef VULKAN_DEBUG
vulkan_messenger_cb(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageType,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData,void * pUserData)223 static VKAPI_ATTR VkBool32 VKAPI_CALL vulkan_messenger_cb(
224 		VkDebugUtilsMessageSeverityFlagBitsEXT           messageSeverity,
225 		VkDebugUtilsMessageTypeFlagsEXT                  messageType,
226 		const VkDebugUtilsMessengerCallbackDataEXT*      pCallbackData,
227 		void *pUserData)
228 {
229 	auto *context = static_cast<Context *>(pUserData);
230 
231 	switch (messageSeverity)
232 	{
233 	case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
234 		if (messageType == VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT)
235 		{
236 			LOGE("[Vulkan]: Validation Error: %s\n", pCallbackData->pMessage);
237 			context->notify_validation_error(pCallbackData->pMessage);
238 		}
239 		else
240 			LOGE("[Vulkan]: Other Error: %s\n", pCallbackData->pMessage);
241 		break;
242 
243 	case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
244 		if (messageType == VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT)
245 			LOGW("[Vulkan]: Validation Warning: %s\n", pCallbackData->pMessage);
246 		else
247 			LOGW("[Vulkan]: Other Warning: %s\n", pCallbackData->pMessage);
248 		break;
249 
250 #if 0
251 	case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
252 	case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
253 		if (messageType == VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT)
254 			LOGI("[Vulkan]: Validation Info: %s\n", pCallbackData->pMessage);
255 		else
256 			LOGI("[Vulkan]: Other Info: %s\n", pCallbackData->pMessage);
257 		break;
258 #endif
259 
260 	default:
261 		return VK_FALSE;
262 	}
263 
264 	bool log_object_names = false;
265 	for (uint32_t i = 0; i < pCallbackData->objectCount; i++)
266 	{
267 		auto *name = pCallbackData->pObjects[i].pObjectName;
268 		if (name)
269 		{
270 			log_object_names = true;
271 			break;
272 		}
273 	}
274 
275 	if (log_object_names)
276 	{
277 		for (uint32_t i = 0; i < pCallbackData->objectCount; i++)
278 		{
279 			auto *name = pCallbackData->pObjects[i].pObjectName;
280 			LOGI("  Object #%u: %s\n", i, name ? name : "N/A");
281 		}
282 	}
283 
284 	return VK_FALSE;
285 }
286 
vulkan_debug_cb(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT,uint64_t,size_t,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,void * pUserData)287 static VKAPI_ATTR VkBool32 VKAPI_CALL vulkan_debug_cb(VkDebugReportFlagsEXT flags,
288                                                       VkDebugReportObjectTypeEXT, uint64_t,
289                                                       size_t, int32_t messageCode, const char *pLayerPrefix,
290                                                       const char *pMessage, void *pUserData)
291 {
292 	auto *context = static_cast<Context *>(pUserData);
293 
294 	// False positives about lack of srcAccessMask/dstAccessMask.
295 	if (strcmp(pLayerPrefix, "DS") == 0 && messageCode == 10)
296 		return VK_FALSE;
297 
298 	// Demote to a warning, it's a false positive almost all the time for Granite.
299 	if (strcmp(pLayerPrefix, "DS") == 0 && messageCode == 6)
300 		flags = VK_DEBUG_REPORT_DEBUG_BIT_EXT;
301 
302 	if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
303 	{
304 		LOGE("[Vulkan]: Error: %s: %s\n", pLayerPrefix, pMessage);
305 		context->notify_validation_error(pMessage);
306 	}
307 	else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT)
308 	{
309 		LOGW("[Vulkan]: Warning: %s: %s\n", pLayerPrefix, pMessage);
310 	}
311 	else if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
312 	{
313 		//LOGW("[Vulkan]: Performance warning: %s: %s\n", pLayerPrefix, pMessage);
314 	}
315 	else
316 	{
317 		LOGI("[Vulkan]: Information: %s: %s\n", pLayerPrefix, pMessage);
318 	}
319 
320 	return VK_FALSE;
321 }
322 #endif
323 
create_instance(const char ** instance_ext,uint32_t instance_ext_count)324 bool Context::create_instance(const char **instance_ext, uint32_t instance_ext_count)
325 {
326 	ext.supports_vulkan_11_instance = volkGetInstanceVersion() >= VK_API_VERSION_1_1;
327 
328 	VkInstanceCreateInfo info = { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
329 	info.pApplicationInfo = &get_application_info(ext.supports_vulkan_11_instance);
330 
331 	vector<const char *> instance_exts;
332 	vector<const char *> instance_layers;
333 	for (uint32_t i = 0; i < instance_ext_count; i++)
334 		instance_exts.push_back(instance_ext[i]);
335 
336 	uint32_t ext_count = 0;
337 	vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, nullptr);
338 	vector<VkExtensionProperties> queried_extensions(ext_count);
339 	if (ext_count)
340 		vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, queried_extensions.data());
341 
342 	uint32_t layer_count = 0;
343 	vkEnumerateInstanceLayerProperties(&layer_count, nullptr);
344 	vector<VkLayerProperties> queried_layers(layer_count);
345 	if (layer_count)
346 		vkEnumerateInstanceLayerProperties(&layer_count, queried_layers.data());
347 
348 	const auto has_extension = [&](const char *name) -> bool {
349 		auto itr = find_if(begin(queried_extensions), end(queried_extensions), [name](const VkExtensionProperties &e) -> bool {
350 			return strcmp(e.extensionName, name) == 0;
351 		});
352 		return itr != end(queried_extensions);
353 	};
354 
355 	for (uint32_t i = 0; i < instance_ext_count; i++)
356 		if (!has_extension(instance_ext[i]))
357 			return false;
358 
359 	if (has_extension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME))
360 	{
361 		ext.supports_physical_device_properties2 = true;
362 		instance_exts.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
363 	}
364 
365 	if (ext.supports_physical_device_properties2 &&
366 	    has_extension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME) &&
367 	    has_extension(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME))
368 	{
369 		instance_exts.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
370 		instance_exts.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
371 		ext.supports_external = true;
372 	}
373 
374 	if (has_extension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME))
375 	{
376 		instance_exts.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
377 		ext.supports_debug_utils = true;
378 	}
379 
380 	auto itr = find_if(instance_ext, instance_ext + instance_ext_count, [](const char *name) {
381 		return strcmp(name, VK_KHR_SURFACE_EXTENSION_NAME) == 0;
382 	});
383 	bool has_surface_extension = itr != (instance_ext + instance_ext_count);
384 
385 	if (has_surface_extension && has_extension(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME))
386 	{
387 		instance_exts.push_back(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
388 		ext.supports_surface_capabilities2 = true;
389 	}
390 
391 #ifdef VULKAN_DEBUG
392 	const auto has_layer = [&](const char *name) -> bool {
393 		auto layer_itr = find_if(begin(queried_layers), end(queried_layers), [name](const VkLayerProperties &e) -> bool {
394 			return strcmp(e.layerName, name) == 0;
395 		});
396 		return layer_itr != end(queried_layers);
397 	};
398 
399 	if (!ext.supports_debug_utils && has_extension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME))
400 		instance_exts.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
401 
402 	if (getenv("GRANITE_VULKAN_NO_VALIDATION"))
403 		force_no_validation = true;
404 
405 	if (!force_no_validation && has_layer("VK_LAYER_KHRONOS_validation"))
406 	{
407 		instance_layers.push_back("VK_LAYER_KHRONOS_validation");
408 		LOGI("Enabling VK_LAYER_KHRONOS_validation.\n");
409 	}
410 	else if (!force_no_validation && has_layer("VK_LAYER_LUNARG_standard_validation"))
411 	{
412 		instance_layers.push_back("VK_LAYER_LUNARG_standard_validation");
413 		LOGI("Enabling VK_LAYER_LUNARG_standard_validation.\n");
414 	}
415 #endif
416 
417 	info.enabledExtensionCount = instance_exts.size();
418 	info.ppEnabledExtensionNames = instance_exts.empty() ? nullptr : instance_exts.data();
419 	info.enabledLayerCount = instance_layers.size();
420 	info.ppEnabledLayerNames = instance_layers.empty() ? nullptr : instance_layers.data();
421 
422 	for (auto *ext_name : instance_exts)
423 		LOGI("Enabling instance extension: %s.\n", ext_name);
424 
425 	if (instance == VK_NULL_HANDLE)
426 		if (vkCreateInstance(&info, nullptr, &instance) != VK_SUCCESS)
427 			return false;
428 
429 	volkLoadInstance(instance);
430 
431 #ifdef VULKAN_DEBUG
432 	if (ext.supports_debug_utils)
433 	{
434 		VkDebugUtilsMessengerCreateInfoEXT debug_info = { VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT };
435 		debug_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
436 		                             VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
437 		                             VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
438 		                             VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
439 		debug_info.pfnUserCallback = vulkan_messenger_cb;
440 		debug_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
441 		                         VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT |
442 		                         VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
443 		debug_info.pUserData = this;
444 
445 		vkCreateDebugUtilsMessengerEXT(instance, &debug_info, nullptr, &debug_messenger);
446 	}
447 	else if (has_extension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME))
448 	{
449 		VkDebugReportCallbackCreateInfoEXT debug_info = { VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT };
450 		debug_info.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
451 		                   VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
452 		debug_info.pfnCallback = vulkan_debug_cb;
453 		debug_info.pUserData = this;
454 		vkCreateDebugReportCallbackEXT(instance, &debug_info, nullptr, &debug_callback);
455 	}
456 #endif
457 
458 	return true;
459 }
460 
create_device(VkPhysicalDevice gpu_,VkSurfaceKHR surface,const char ** required_device_extensions,unsigned num_required_device_extensions,const char ** required_device_layers,unsigned num_required_device_layers,const VkPhysicalDeviceFeatures * required_features,ContextCreationFlags flags)461 bool Context::create_device(VkPhysicalDevice gpu_, VkSurfaceKHR surface, const char **required_device_extensions,
462                             unsigned num_required_device_extensions, const char **required_device_layers,
463                             unsigned num_required_device_layers, const VkPhysicalDeviceFeatures *required_features,
464                             ContextCreationFlags flags)
465 {
466 	gpu = gpu_;
467 	if (gpu == VK_NULL_HANDLE)
468 	{
469 		uint32_t gpu_count = 0;
470 		if (vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr) != VK_SUCCESS)
471 			return false;
472 
473 		if (gpu_count == 0)
474 			return false;
475 
476 		vector<VkPhysicalDevice> gpus(gpu_count);
477 		if (vkEnumeratePhysicalDevices(instance, &gpu_count, gpus.data()) != VK_SUCCESS)
478 			return false;
479 
480 		for (auto &g : gpus)
481 		{
482 			VkPhysicalDeviceProperties props;
483 			vkGetPhysicalDeviceProperties(g, &props);
484 			LOGI("Found Vulkan GPU: %s\n", props.deviceName);
485 			LOGI("    API: %u.%u.%u\n",
486 			     VK_VERSION_MAJOR(props.apiVersion),
487 			     VK_VERSION_MINOR(props.apiVersion),
488 			     VK_VERSION_PATCH(props.apiVersion));
489 			LOGI("    Driver: %u.%u.%u\n",
490 			     VK_VERSION_MAJOR(props.driverVersion),
491 			     VK_VERSION_MINOR(props.driverVersion),
492 			     VK_VERSION_PATCH(props.driverVersion));
493 		}
494 
495 		const char *gpu_index = getenv("GRANITE_VULKAN_DEVICE_INDEX");
496 		if (gpu_index)
497 		{
498 			unsigned index = strtoul(gpu_index, nullptr, 0);
499 			if (index < gpu_count)
500 				gpu = gpus[index];
501 		}
502 
503 		if (gpu == VK_NULL_HANDLE)
504 			gpu = gpus.front();
505 	}
506 
507 	uint32_t ext_count = 0;
508 	vkEnumerateDeviceExtensionProperties(gpu, nullptr, &ext_count, nullptr);
509 	vector<VkExtensionProperties> queried_extensions(ext_count);
510 	if (ext_count)
511 		vkEnumerateDeviceExtensionProperties(gpu, nullptr, &ext_count, queried_extensions.data());
512 
513 	uint32_t layer_count = 0;
514 	vkEnumerateDeviceLayerProperties(gpu, &layer_count, nullptr);
515 	vector<VkLayerProperties> queried_layers(layer_count);
516 	if (layer_count)
517 		vkEnumerateDeviceLayerProperties(gpu, &layer_count, queried_layers.data());
518 
519 	const auto has_extension = [&](const char *name) -> bool {
520 		auto itr = find_if(begin(queried_extensions), end(queried_extensions), [name](const VkExtensionProperties &e) -> bool {
521 			return strcmp(e.extensionName, name) == 0;
522 		});
523 		return itr != end(queried_extensions);
524 	};
525 
526 	const auto has_layer = [&](const char *name) -> bool {
527 		auto itr = find_if(begin(queried_layers), end(queried_layers), [name](const VkLayerProperties &e) -> bool {
528 			return strcmp(e.layerName, name) == 0;
529 		});
530 		return itr != end(queried_layers);
531 	};
532 
533 	for (uint32_t i = 0; i < num_required_device_extensions; i++)
534 		if (!has_extension(required_device_extensions[i]))
535 			return false;
536 
537 	for (uint32_t i = 0; i < num_required_device_layers; i++)
538 		if (!has_layer(required_device_layers[i]))
539 			return false;
540 
541 	vkGetPhysicalDeviceProperties(gpu, &gpu_props);
542 	vkGetPhysicalDeviceMemoryProperties(gpu, &mem_props);
543 
544 	LOGI("Selected Vulkan GPU: %s\n", gpu_props.deviceName);
545 
546 	if (gpu_props.apiVersion >= VK_API_VERSION_1_1)
547 	{
548 		ext.supports_vulkan_11_device = ext.supports_vulkan_11_instance;
549 		LOGI("GPU supports Vulkan 1.1.\n");
550 	}
551 	else if (gpu_props.apiVersion >= VK_API_VERSION_1_0)
552 	{
553 		ext.supports_vulkan_11_device = false;
554 		LOGI("GPU supports Vulkan 1.0.\n");
555 	}
556 
557 	uint32_t queue_count;
558 	vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_count, nullptr);
559 	vector<VkQueueFamilyProperties> queue_props(queue_count);
560 	vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_count, queue_props.data());
561 
562 	for (unsigned i = 0; i < queue_count; i++)
563 	{
564 		VkBool32 supported = surface == VK_NULL_HANDLE;
565 		if (surface != VK_NULL_HANDLE)
566 			vkGetPhysicalDeviceSurfaceSupportKHR(gpu, i, surface, &supported);
567 
568 		static const VkQueueFlags required = VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT;
569 		if (supported && ((queue_props[i].queueFlags & required) == required))
570 		{
571 			graphics_queue_family = i;
572 
573 			// XXX: This assumes timestamp valid bits is the same for all queue types.
574 			timestamp_valid_bits = queue_props[i].timestampValidBits;
575 			break;
576 		}
577 	}
578 
579 	for (unsigned i = 0; i < queue_count; i++)
580 	{
581 		static const VkQueueFlags required = VK_QUEUE_COMPUTE_BIT;
582 		if (i != graphics_queue_family && (queue_props[i].queueFlags & required) == required)
583 		{
584 			compute_queue_family = i;
585 			break;
586 		}
587 	}
588 
589 	for (unsigned i = 0; i < queue_count; i++)
590 	{
591 		static const VkQueueFlags required = VK_QUEUE_TRANSFER_BIT;
592 		if (i != graphics_queue_family && i != compute_queue_family && (queue_props[i].queueFlags & required) == required)
593 		{
594 			transfer_queue_family = i;
595 			break;
596 		}
597 	}
598 
599 	if (transfer_queue_family == VK_QUEUE_FAMILY_IGNORED)
600 	{
601 		for (unsigned i = 0; i < queue_count; i++)
602 		{
603 			static const VkQueueFlags required = VK_QUEUE_TRANSFER_BIT;
604 			if (i != graphics_queue_family && (queue_props[i].queueFlags & required) == required)
605 			{
606 				transfer_queue_family = i;
607 				break;
608 			}
609 		}
610 	}
611 
612 	if (graphics_queue_family == VK_QUEUE_FAMILY_IGNORED)
613 		return false;
614 
615 	unsigned universal_queue_index = 1;
616 	uint32_t graphics_queue_index = 0;
617 	uint32_t compute_queue_index = 0;
618 	uint32_t transfer_queue_index = 0;
619 
620 	if (compute_queue_family == VK_QUEUE_FAMILY_IGNORED)
621 	{
622 		compute_queue_family = graphics_queue_family;
623 		compute_queue_index = std::min(queue_props[graphics_queue_family].queueCount - 1, universal_queue_index);
624 		universal_queue_index++;
625 	}
626 
627 	if (transfer_queue_family == VK_QUEUE_FAMILY_IGNORED)
628 	{
629 		transfer_queue_family = graphics_queue_family;
630 		transfer_queue_index = std::min(queue_props[graphics_queue_family].queueCount - 1, universal_queue_index);
631 		universal_queue_index++;
632 	}
633 	else if (transfer_queue_family == compute_queue_family)
634 		transfer_queue_index = std::min(queue_props[compute_queue_family].queueCount - 1, 1u);
635 
636 	static const float graphics_queue_prio = 0.5f;
637 	static const float compute_queue_prio = 1.0f;
638 	static const float transfer_queue_prio = 1.0f;
639 	float prio[3] = { graphics_queue_prio, compute_queue_prio, transfer_queue_prio };
640 
641 	unsigned queue_family_count = 0;
642 	VkDeviceQueueCreateInfo queue_info[3] = {};
643 
644 	VkDeviceCreateInfo device_info = { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
645 	device_info.pQueueCreateInfos = queue_info;
646 
647 	queue_info[queue_family_count].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
648 	queue_info[queue_family_count].queueFamilyIndex = graphics_queue_family;
649 	queue_info[queue_family_count].queueCount = std::min(universal_queue_index,
650 	                                                     queue_props[graphics_queue_family].queueCount);
651 	queue_info[queue_family_count].pQueuePriorities = prio;
652 	queue_family_count++;
653 
654 	if (compute_queue_family != graphics_queue_family)
655 	{
656 		queue_info[queue_family_count].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
657 		queue_info[queue_family_count].queueFamilyIndex = compute_queue_family;
658 		queue_info[queue_family_count].queueCount = std::min(transfer_queue_family == compute_queue_family ? 2u : 1u,
659 		                                                     queue_props[compute_queue_family].queueCount);
660 		queue_info[queue_family_count].pQueuePriorities = prio + 1;
661 		queue_family_count++;
662 	}
663 
664 	if (transfer_queue_family != graphics_queue_family && transfer_queue_family != compute_queue_family)
665 	{
666 		queue_info[queue_family_count].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
667 		queue_info[queue_family_count].queueFamilyIndex = transfer_queue_family;
668 		queue_info[queue_family_count].queueCount = 1;
669 		queue_info[queue_family_count].pQueuePriorities = prio + 2;
670 		queue_family_count++;
671 	}
672 
673 	device_info.queueCreateInfoCount = queue_family_count;
674 
675 	vector<const char *> enabled_extensions;
676 	vector<const char *> enabled_layers;
677 
678 	for (uint32_t i = 0; i < num_required_device_extensions; i++)
679 		enabled_extensions.push_back(required_device_extensions[i]);
680 	for (uint32_t i = 0; i < num_required_device_layers; i++)
681 		enabled_layers.push_back(required_device_layers[i]);
682 
683 	if (has_extension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME))
684 	{
685 		ext.supports_get_memory_requirements2 = true;
686 		enabled_extensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
687 	}
688 
689 	if (ext.supports_get_memory_requirements2 && has_extension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME))
690 	{
691 		ext.supports_dedicated = true;
692 		enabled_extensions.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
693 	}
694 
695 	if (has_extension(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME))
696 	{
697 		ext.supports_image_format_list = true;
698 		enabled_extensions.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
699 	}
700 
701 	if (has_extension(VK_EXT_DEBUG_MARKER_EXTENSION_NAME))
702 	{
703 		ext.supports_debug_marker = true;
704 		enabled_extensions.push_back(VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
705 	}
706 
707 	if (has_extension(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME))
708 	{
709 		ext.supports_mirror_clamp_to_edge = true;
710 		enabled_extensions.push_back(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
711 	}
712 
713 	if (has_extension(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME))
714 	{
715 		ext.supports_google_display_timing = true;
716 		enabled_extensions.push_back(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
717 	}
718 
719 #ifdef _WIN32
720 	if (ext.supports_surface_capabilities2 && has_extension(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME))
721 	{
722 		ext.supports_full_screen_exclusive = true;
723 		enabled_extensions.push_back(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
724 	}
725 #endif
726 
727 #ifdef VULKAN_DEBUG
728 	if (has_extension(VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME))
729 	{
730 		ext.supports_nv_device_diagnostic_checkpoints = true;
731 		enabled_extensions.push_back(VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME);
732 	}
733 #endif
734 
735 	if (ext.supports_external && ext.supports_dedicated &&
736 	    has_extension(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME) &&
737 	    has_extension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME) &&
738 #ifdef _WIN32
739 	    has_extension(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME) &&
740 	    has_extension(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME)
741 #else
742 	    has_extension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME) &&
743 	    has_extension(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)
744 #endif
745 		)
746 	{
747 		ext.supports_external = true;
748 		enabled_extensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
749 		enabled_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
750 #ifdef _WIN32
751 		enabled_extensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
752 		enabled_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
753 #else
754 		enabled_extensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
755 		enabled_extensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
756 #endif
757 	}
758 	else
759 		ext.supports_external = false;
760 
761 	if (has_extension(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME))
762 	{
763 		enabled_extensions.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
764 		ext.supports_update_template = true;
765 	}
766 
767 	if (has_extension(VK_KHR_MAINTENANCE1_EXTENSION_NAME))
768 	{
769 		enabled_extensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
770 		ext.supports_maintenance_1 = true;
771 	}
772 
773 	if (has_extension(VK_KHR_MAINTENANCE2_EXTENSION_NAME))
774 	{
775 		enabled_extensions.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
776 		ext.supports_maintenance_2 = true;
777 	}
778 
779 	if (has_extension(VK_KHR_MAINTENANCE3_EXTENSION_NAME))
780 	{
781 		enabled_extensions.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
782 		ext.supports_maintenance_3 = true;
783 	}
784 
785 	if (has_extension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME))
786 	{
787 		ext.supports_bind_memory2 = true;
788 		enabled_extensions.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
789 	}
790 
791 	if (has_extension(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME))
792 	{
793 		ext.supports_draw_indirect_count = true;
794 		enabled_extensions.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
795 	}
796 
797 	if (has_extension(VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME))
798 	{
799 		ext.supports_draw_parameters = true;
800 		enabled_extensions.push_back(VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME);
801 	}
802 
803 	if (has_extension(VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME))
804 	{
805 		ext.supports_calibrated_timestamps = true;
806 		enabled_extensions.push_back(VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME);
807 	}
808 
809 	if (has_extension(VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME))
810 		enabled_extensions.push_back(VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME);
811 
812 	if (has_extension(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME))
813 	{
814 		enabled_extensions.push_back(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME);
815 		ext.supports_conservative_rasterization = true;
816 	}
817 
818 	VkPhysicalDeviceFeatures2KHR features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR };
819 	ext.storage_8bit_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR };
820 	ext.storage_16bit_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR };
821 	ext.float16_int8_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR };
822 	ext.multiview_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR };
823 	ext.imageless_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR };
824 	ext.subgroup_size_control_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT };
825 	ext.compute_shader_derivative_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV };
826 	ext.host_query_reset_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT };
827 	ext.demote_to_helper_invocation_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT };
828 	ext.scalar_block_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT };
829 	ext.ubo_std430_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR };
830 	ext.timeline_semaphore_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR };
831 	ext.descriptor_indexing_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT };
832 	ext.performance_query_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR };
833 	ext.sampler_ycbcr_conversion_features = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR };
834 	void **ppNext = &features.pNext;
835 
836 	bool has_pdf2 = ext.supports_physical_device_properties2 ||
837 	                (ext.supports_vulkan_11_instance && ext.supports_vulkan_11_device);
838 
839 	if (has_pdf2)
840 	{
841 		if (has_extension(VK_KHR_8BIT_STORAGE_EXTENSION_NAME))
842 		{
843 			enabled_extensions.push_back(VK_KHR_8BIT_STORAGE_EXTENSION_NAME);
844 			*ppNext = &ext.storage_8bit_features;
845 			ppNext = &ext.storage_8bit_features.pNext;
846 		}
847 
848 		if (has_extension(VK_KHR_16BIT_STORAGE_EXTENSION_NAME))
849 		{
850 			enabled_extensions.push_back(VK_KHR_16BIT_STORAGE_EXTENSION_NAME);
851 			*ppNext = &ext.storage_16bit_features;
852 			ppNext = &ext.storage_16bit_features.pNext;
853 		}
854 
855 		if (has_extension(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME))
856 		{
857 			enabled_extensions.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
858 			*ppNext = &ext.float16_int8_features;
859 			ppNext = &ext.float16_int8_features.pNext;
860 		}
861 
862 		if (has_extension(VK_KHR_MULTIVIEW_EXTENSION_NAME))
863 		{
864 			enabled_extensions.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
865 			*ppNext = &ext.multiview_features;
866 			ppNext = &ext.multiview_features.pNext;
867 		}
868 
869 		if (has_extension(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME))
870 		{
871 			enabled_extensions.push_back(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME);
872 			*ppNext = &ext.subgroup_size_control_features;
873 			ppNext = &ext.subgroup_size_control_features.pNext;
874 		}
875 
876 		if (has_extension(VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME))
877 		{
878 			enabled_extensions.push_back(VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME);
879 			*ppNext = &ext.compute_shader_derivative_features;
880 			ppNext = &ext.compute_shader_derivative_features.pNext;
881 		}
882 
883 		if (has_extension(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME))
884 		{
885 			enabled_extensions.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
886 			*ppNext = &ext.host_query_reset_features;
887 			ppNext = &ext.host_query_reset_features.pNext;
888 		}
889 
890 		if (has_extension(VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME))
891 		{
892 			enabled_extensions.push_back(VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME);
893 			*ppNext = &ext.demote_to_helper_invocation_features;
894 			ppNext = &ext.demote_to_helper_invocation_features.pNext;
895 		}
896 
897 		if (has_extension(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME))
898 		{
899 			enabled_extensions.push_back(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
900 			*ppNext = &ext.scalar_block_features;
901 			ppNext = &ext.scalar_block_features.pNext;
902 		}
903 
904 		if (has_extension(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME))
905 		{
906 			enabled_extensions.push_back(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
907 			*ppNext = &ext.ubo_std430_features;
908 			ppNext = &ext.ubo_std430_features.pNext;
909 		}
910 
911 #ifdef VULKAN_DEBUG
912 		bool use_timeline_semaphore = force_no_validation;
913 		if (const char *use_timeline = getenv("GRANITE_VULKAN_FORCE_TIMELINE_SEMAPHORE"))
914 		{
915 			if (strtol(use_timeline, nullptr, 0) != 0)
916 				use_timeline_semaphore = true;
917 		}
918 #else
919 		constexpr bool use_timeline_semaphore = true;
920 #endif
921 		if (use_timeline_semaphore && has_extension(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME))
922 		{
923 			enabled_extensions.push_back(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
924 			*ppNext = &ext.timeline_semaphore_features;
925 			ppNext = &ext.timeline_semaphore_features.pNext;
926 		}
927 
928 		if ((flags & CONTEXT_CREATION_DISABLE_BINDLESS_BIT) == 0 &&
929 		    ext.supports_maintenance_3 &&
930 		    has_extension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME))
931 		{
932 			enabled_extensions.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
933 			*ppNext = &ext.descriptor_indexing_features;
934 			ppNext = &ext.descriptor_indexing_features.pNext;
935 		}
936 
937 		if (has_extension(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME))
938 		{
939 			enabled_extensions.push_back(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
940 			*ppNext = &ext.performance_query_features;
941 			ppNext = &ext.performance_query_features.pNext;
942 		}
943 
944 		if (ext.supports_bind_memory2 &&
945 		    ext.supports_get_memory_requirements2 &&
946 		    has_extension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME))
947 		{
948 			enabled_extensions.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
949 			*ppNext = &ext.sampler_ycbcr_conversion_features;
950 			ppNext = &ext.sampler_ycbcr_conversion_features.pNext;
951 		}
952 
953 #if 0
954 		if (has_extension(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME))
955 		{
956 			enabled_extensions.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
957 			*ppNext = &ext.imageless_features;
958 			ppNext = &ext.imageless_features.pNext;
959 		}
960 #endif
961 	}
962 
963 	if (ext.supports_vulkan_11_device && ext.supports_vulkan_11_instance)
964 		vkGetPhysicalDeviceFeatures2(gpu, &features);
965 	else if (ext.supports_physical_device_properties2)
966 		vkGetPhysicalDeviceFeatures2KHR(gpu, &features);
967 	else
968 		vkGetPhysicalDeviceFeatures(gpu, &features.features);
969 
970 	// Enable device features we might care about.
971 	{
972 		VkPhysicalDeviceFeatures enabled_features = *required_features;
973 		if (features.features.textureCompressionETC2)
974 			enabled_features.textureCompressionETC2 = VK_TRUE;
975 		if (features.features.textureCompressionBC)
976 			enabled_features.textureCompressionBC = VK_TRUE;
977 		if (features.features.textureCompressionASTC_LDR)
978 			enabled_features.textureCompressionASTC_LDR = VK_TRUE;
979 		if (features.features.fullDrawIndexUint32)
980 			enabled_features.fullDrawIndexUint32 = VK_TRUE;
981 		if (features.features.imageCubeArray)
982 			enabled_features.imageCubeArray = VK_TRUE;
983 		if (features.features.fillModeNonSolid)
984 			enabled_features.fillModeNonSolid = VK_TRUE;
985 		if (features.features.independentBlend)
986 			enabled_features.independentBlend = VK_TRUE;
987 		if (features.features.sampleRateShading)
988 			enabled_features.sampleRateShading = VK_TRUE;
989 		if (features.features.fragmentStoresAndAtomics)
990 			enabled_features.fragmentStoresAndAtomics = VK_TRUE;
991 		if (features.features.shaderStorageImageExtendedFormats)
992 			enabled_features.shaderStorageImageExtendedFormats = VK_TRUE;
993 		if (features.features.shaderStorageImageMultisample)
994 			enabled_features.shaderStorageImageMultisample = VK_TRUE;
995 		if (features.features.largePoints)
996 			enabled_features.largePoints = VK_TRUE;
997 		if (features.features.shaderInt16)
998 			enabled_features.shaderInt16 = VK_TRUE;
999 		if (features.features.shaderInt64)
1000 			enabled_features.shaderInt64 = VK_TRUE;
1001 
1002 		if (features.features.shaderSampledImageArrayDynamicIndexing)
1003 			enabled_features.shaderSampledImageArrayDynamicIndexing = VK_TRUE;
1004 		if (features.features.shaderUniformBufferArrayDynamicIndexing)
1005 			enabled_features.shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
1006 		if (features.features.shaderStorageBufferArrayDynamicIndexing)
1007 			enabled_features.shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
1008 		if (features.features.shaderStorageImageArrayDynamicIndexing)
1009 			enabled_features.shaderStorageImageArrayDynamicIndexing = VK_TRUE;
1010 
1011 		features.features = enabled_features;
1012 		ext.enabled_features = enabled_features;
1013 	}
1014 
1015 	if (ext.supports_physical_device_properties2)
1016 		device_info.pNext = &features;
1017 	else
1018 		device_info.pEnabledFeatures = &features.features;
1019 
1020 #ifdef VULKAN_DEBUG
1021 	if (!force_no_validation && has_layer("VK_LAYER_KHRONOS_validation"))
1022 		enabled_layers.push_back("VK_LAYER_KHRONOS_validation");
1023 	else if (!force_no_validation && has_layer("VK_LAYER_LUNARG_standard_validation"))
1024 		enabled_layers.push_back("VK_LAYER_LUNARG_standard_validation");
1025 #endif
1026 
1027 	if (ext.supports_external && has_extension(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME))
1028 	{
1029 		ext.supports_external_memory_host = true;
1030 		enabled_extensions.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
1031 	}
1032 
1033 	// Only need GetPhysicalDeviceProperties2 for Vulkan 1.1-only code, so don't bother getting KHR variant.
1034 	ext.subgroup_properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES };
1035 	ext.host_memory_properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT };
1036 	ext.subgroup_size_control_properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT };
1037 	ext.descriptor_indexing_properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT };
1038 	ext.conservative_rasterization_properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT };
1039 	ext.driver_properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR };
1040 	VkPhysicalDeviceProperties2 props = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 };
1041 	ppNext = &props.pNext;
1042 
1043 	*ppNext = &ext.subgroup_properties;
1044 	ppNext = &ext.subgroup_properties.pNext;
1045 
1046 	if (ext.supports_external_memory_host)
1047 	{
1048 		*ppNext = &ext.host_memory_properties;
1049 		ppNext = &ext.host_memory_properties.pNext;
1050 	}
1051 
1052 	if (has_extension(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME))
1053 	{
1054 		*ppNext = &ext.subgroup_size_control_properties;
1055 		ppNext = &ext.subgroup_size_control_properties.pNext;
1056 	}
1057 
1058 	if (ext.supports_maintenance_3 && has_extension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME))
1059 	{
1060 		*ppNext = &ext.descriptor_indexing_properties;
1061 		ppNext = &ext.descriptor_indexing_properties.pNext;
1062 	}
1063 
1064 	if (ext.supports_conservative_rasterization)
1065 	{
1066 		*ppNext = &ext.conservative_rasterization_properties;
1067 		ppNext = &ext.conservative_rasterization_properties.pNext;
1068 	}
1069 
1070 	if (ext.supports_vulkan_11_instance && ext.supports_vulkan_11_device &&
1071 	    has_extension(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME))
1072 	{
1073 		enabled_extensions.push_back(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME);
1074 		ext.supports_driver_properties = true;
1075 		*ppNext = &ext.driver_properties;
1076 		ppNext = &ext.driver_properties.pNext;
1077 	}
1078 
1079 	if (ext.supports_vulkan_11_instance && ext.supports_vulkan_11_device)
1080 		vkGetPhysicalDeviceProperties2(gpu, &props);
1081 
1082 	device_info.enabledExtensionCount = enabled_extensions.size();
1083 	device_info.ppEnabledExtensionNames = enabled_extensions.empty() ? nullptr : enabled_extensions.data();
1084 	device_info.enabledLayerCount = enabled_layers.size();
1085 	device_info.ppEnabledLayerNames = enabled_layers.empty() ? nullptr : enabled_layers.data();
1086 
1087 	for (auto *enabled_extension : enabled_extensions)
1088 		LOGI("Enabling device extension: %s.\n", enabled_extension);
1089 
1090 	if (vkCreateDevice(gpu, &device_info, nullptr, &device) != VK_SUCCESS)
1091 		return false;
1092 
1093 	volkLoadDeviceTable(&device_table, device);
1094 	device_table.vkGetDeviceQueue(device, graphics_queue_family, graphics_queue_index, &graphics_queue);
1095 	device_table.vkGetDeviceQueue(device, compute_queue_family, compute_queue_index, &compute_queue);
1096 	device_table.vkGetDeviceQueue(device, transfer_queue_family, transfer_queue_index, &transfer_queue);
1097 
1098 	check_descriptor_indexing_features();
1099 
1100 	return true;
1101 }
1102 
check_descriptor_indexing_features()1103 void Context::check_descriptor_indexing_features()
1104 {
1105 	auto &f = ext.descriptor_indexing_features;
1106 	if (f.descriptorBindingSampledImageUpdateAfterBind &&
1107 	    f.descriptorBindingPartiallyBound &&
1108 	    f.runtimeDescriptorArray &&
1109 	    f.shaderSampledImageArrayNonUniformIndexing)
1110 	{
1111 		ext.supports_descriptor_indexing = true;
1112 	}
1113 }
1114 }
1115