1 // Debugging notes
2 // The crash happens when we try to call vkGetPhysicalDeviceProperties2KHR which seems to be null.
3 //
4 // Apparently we don't manage to specify the extensions we want. Still something reports that this one
5 // is present?
6 // Failed to load : vkGetPhysicalDeviceProperties2KHR
7 // Failed to load : vkGetPhysicalDeviceFeatures2KHR
8 
9 #include <cstring>
10 #include <cassert>
11 #include <vector>
12 #include <mutex>
13 #include <condition_variable>
14 
15 #include "Common/GPU/Vulkan/VulkanLoader.h"
16 #include "Common/Log.h"
17 #include "Core/Config.h"
18 
19 #define VK_NO_PROTOTYPES
20 #include "libretro/libretro_vulkan.h"
21 
22 using namespace PPSSPP_VK;
23 
24 static retro_hw_render_interface_vulkan *vulkan;
25 
26 static struct {
27 	VkInstance instance;
28 	VkPhysicalDevice gpu;
29 	VkSurfaceKHR surface;
30 	PFN_vkGetInstanceProcAddr get_instance_proc_addr;
31 	const char **required_device_extensions;
32 	unsigned num_required_device_extensions;
33 	const char **required_device_layers;
34 	unsigned num_required_device_layers;
35 	const VkPhysicalDeviceFeatures *required_features;
36 } vk_init_info;
37 static bool DEDICATED_ALLOCATION;
38 
39 #define VULKAN_MAX_SWAPCHAIN_IMAGES 8
40 struct VkSwapchainKHR_T {
41 	uint32_t count;
42 	struct {
43 		VkImage handle;
44 		VkDeviceMemory memory;
45 		retro_vulkan_image retro_image;
46 	} images[VULKAN_MAX_SWAPCHAIN_IMAGES];
47 	std::mutex mutex;
48 	std::condition_variable condVar;
49 	int current_index;
50 };
51 static VkSwapchainKHR_T chain;
52 
53 #define LIBRETRO_VK_WARP_LIST()                                      \
54 	LIBRETRO_VK_WARP_FUNC(vkCreateInstance);                          \
55 	LIBRETRO_VK_WARP_FUNC(vkDestroyInstance);                         \
56 	LIBRETRO_VK_WARP_FUNC(vkCreateDevice);                            \
57 	LIBRETRO_VK_WARP_FUNC(vkDestroyDevice);                           \
58 	LIBRETRO_VK_WARP_FUNC(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); \
59 	LIBRETRO_VK_WARP_FUNC(vkDestroySurfaceKHR);                       \
60 	LIBRETRO_VK_WARP_FUNC(vkCreateSwapchainKHR);                      \
61 	LIBRETRO_VK_WARP_FUNC(vkGetSwapchainImagesKHR);                   \
62 	LIBRETRO_VK_WARP_FUNC(vkAcquireNextImageKHR);                     \
63 	LIBRETRO_VK_WARP_FUNC(vkQueuePresentKHR);                         \
64 	LIBRETRO_VK_WARP_FUNC(vkDestroySwapchainKHR);                     \
65 	LIBRETRO_VK_WARP_FUNC(vkQueueSubmit);                             \
66 	LIBRETRO_VK_WARP_FUNC(vkQueueWaitIdle);                           \
67 	LIBRETRO_VK_WARP_FUNC(vkCmdPipelineBarrier);                      \
68 	LIBRETRO_VK_WARP_FUNC(vkCreateRenderPass);
69 
70 #define LIBRETRO_VK_WARP_FUNC(x)                                     \
71 	PFN_##x x##_org
72 
73 LIBRETRO_VK_WARP_FUNC(vkGetInstanceProcAddr);
74 LIBRETRO_VK_WARP_FUNC(vkGetDeviceProcAddr);
75 LIBRETRO_VK_WARP_LIST();
76 
vkCreateInstance_libretro(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)77 static VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance_libretro(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
78 	*pInstance = vk_init_info.instance;
79 	return VK_SUCCESS;
80 }
81 
add_name_unique(std::vector<const char * > & list,const char * value)82 static void add_name_unique(std::vector<const char *> &list, const char *value) {
83 	for (const char *name : list)
84 		if (!strcmp(value, name))
85 			return;
86 
87 	list.push_back(value);
88 }
vkCreateDevice_libretro(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)89 static VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice_libretro(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
90 	VkDeviceCreateInfo info = *pCreateInfo;
91 	std::vector<const char *> EnabledLayerNames(info.ppEnabledLayerNames, info.ppEnabledLayerNames + info.enabledLayerCount);
92 	std::vector<const char *> EnabledExtensionNames(info.ppEnabledExtensionNames, info.ppEnabledExtensionNames + info.enabledExtensionCount);
93 	VkPhysicalDeviceFeatures EnabledFeatures = *info.pEnabledFeatures;
94 
95 	for (unsigned i = 0; i < vk_init_info.num_required_device_layers; i++)
96 		add_name_unique(EnabledLayerNames, vk_init_info.required_device_layers[i]);
97 
98 	for (unsigned i = 0; i < vk_init_info.num_required_device_extensions; i++)
99 		add_name_unique(EnabledExtensionNames, vk_init_info.required_device_extensions[i]);
100 
101 	add_name_unique(EnabledExtensionNames, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
102 	for (unsigned i = 0; i < sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32); i++) {
103 		if (((VkBool32 *)vk_init_info.required_features)[i])
104 			((VkBool32 *)&EnabledFeatures)[i] = VK_TRUE;
105 	}
106 
107 	for (auto extension_name : EnabledExtensionNames) {
108 		if (!strcmp(extension_name, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME))
109 			DEDICATED_ALLOCATION = true;
110 	}
111 
112 	info.enabledLayerCount = (uint32_t)EnabledLayerNames.size();
113 	info.ppEnabledLayerNames = info.enabledLayerCount ? EnabledLayerNames.data() : nullptr;
114 	info.enabledExtensionCount = (uint32_t)EnabledExtensionNames.size();
115 	info.ppEnabledExtensionNames = info.enabledExtensionCount ? EnabledExtensionNames.data() : nullptr;
116 	info.pEnabledFeatures = &EnabledFeatures;
117 
118 	return vkCreateDevice_org(physicalDevice, &info, pAllocator, pDevice);
119 }
120 
vkCreateLibretroSurfaceKHR(VkInstance instance,const void * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)121 static VKAPI_ATTR VkResult VKAPI_CALL vkCreateLibretroSurfaceKHR(VkInstance instance, const void *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
122 	*pSurface = vk_init_info.surface;
123 	return VK_SUCCESS;
124 }
125 
vkGetPhysicalDeviceSurfaceCapabilitiesKHR_libretro(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)126 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR_libretro(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
127 	VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR_org(physicalDevice, surface, pSurfaceCapabilities);
128 	if (res == VK_SUCCESS) {
129       int w = g_Config.iInternalResolution * 480;
130       int h = g_Config.iInternalResolution * 272;
131 
132       pSurfaceCapabilities->minImageExtent.width = w;
133       pSurfaceCapabilities->minImageExtent.height = h;
134       pSurfaceCapabilities->maxImageExtent.width = w;
135       pSurfaceCapabilities->maxImageExtent.height = h;
136       pSurfaceCapabilities->currentExtent.width = w;
137 		pSurfaceCapabilities->currentExtent.height = h;
138 	}
139 	return res;
140 }
141 
MemoryTypeFromProperties(uint32_t typeBits,VkFlags requirements_mask,uint32_t * typeIndex)142 static bool MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
143 	VkPhysicalDeviceMemoryProperties memory_properties;
144 	vkGetPhysicalDeviceMemoryProperties(vulkan->gpu, &memory_properties);
145 	// Search memtypes to find first index with those properties
146 	for (uint32_t i = 0; i < 32; i++) {
147 		if ((typeBits & 1) == 1) {
148 			// Type is available, does it match user properties?
149 			if ((memory_properties.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
150 				*typeIndex = i;
151 				return true;
152 			}
153 		}
154 		typeBits >>= 1;
155 	}
156 	// No memory types matched, return failure
157 	return false;
158 }
159 
vkCreateSwapchainKHR_libretro(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)160 static VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR_libretro(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
161 	uint32_t swapchain_mask = vulkan->get_sync_index_mask(vulkan->handle);
162 
163 	chain.count = 0;
164 	while (swapchain_mask) {
165 		chain.count++;
166 		swapchain_mask >>= 1;
167 	}
168 	assert(chain.count <= VULKAN_MAX_SWAPCHAIN_IMAGES);
169 
170 	for (uint32_t i = 0; i < chain.count; i++) {
171 		{
172 			VkImageCreateInfo info{ VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
173 			info.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
174 			info.imageType = VK_IMAGE_TYPE_2D;
175 			info.format = pCreateInfo->imageFormat;
176 			info.extent.width = pCreateInfo->imageExtent.width;
177 			info.extent.height = pCreateInfo->imageExtent.height;
178 			info.extent.depth = 1;
179 			info.mipLevels = 1;
180 			info.arrayLayers = 1;
181 			info.samples = VK_SAMPLE_COUNT_1_BIT;
182 			info.tiling = VK_IMAGE_TILING_OPTIMAL;
183 			info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
184 			info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
185 
186 			vkCreateImage(device, &info, pAllocator, &chain.images[i].handle);
187 		}
188 
189 		VkMemoryRequirements memreq;
190 		vkGetImageMemoryRequirements(device, chain.images[i].handle, &memreq);
191 
192 		VkMemoryAllocateInfo alloc{ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
193 		alloc.allocationSize = memreq.size;
194 
195 		VkMemoryDedicatedAllocateInfoKHR dedicated{ VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
196 		if (DEDICATED_ALLOCATION) {
197 			alloc.pNext = &dedicated;
198 			dedicated.image = chain.images[i].handle;
199 		}
200 
201 		MemoryTypeFromProperties(memreq.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, &alloc.memoryTypeIndex);
202 		VkResult res = vkAllocateMemory(device, &alloc, pAllocator, &chain.images[i].memory);
203 		assert(res == VK_SUCCESS);
204 		res = vkBindImageMemory(device, chain.images[i].handle, chain.images[i].memory, 0);
205 		assert(res == VK_SUCCESS);
206 
207 		chain.images[i].retro_image.create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
208 		chain.images[i].retro_image.create_info.image = chain.images[i].handle;
209 		chain.images[i].retro_image.create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
210 		chain.images[i].retro_image.create_info.format = pCreateInfo->imageFormat;
211 		chain.images[i].retro_image.create_info.components = { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
212 		chain.images[i].retro_image.create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
213 		chain.images[i].retro_image.create_info.subresourceRange.layerCount = 1;
214 		chain.images[i].retro_image.create_info.subresourceRange.levelCount = 1;
215 		res = vkCreateImageView(device, &chain.images[i].retro_image.create_info, pAllocator, &chain.images[i].retro_image.image_view);
216 		assert(res == VK_SUCCESS);
217 
218 		chain.images[i].retro_image.image_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
219 	}
220 
221 	chain.current_index = -1;
222 	*pSwapchain = (VkSwapchainKHR)&chain;
223 
224 	return VK_SUCCESS;
225 }
vkGetSwapchainImagesKHR_libretro(VkDevice device,VkSwapchainKHR swapchain_,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)226 static VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR_libretro(VkDevice device, VkSwapchainKHR swapchain_, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages) {
227 	VkSwapchainKHR_T *swapchain = (VkSwapchainKHR_T *)swapchain_;
228 	if (pSwapchainImages) {
229 		assert(*pSwapchainImageCount <= swapchain->count);
230 		for (int i = 0; i < *pSwapchainImageCount; i++)
231 			pSwapchainImages[i] = swapchain->images[i].handle;
232 	} else
233 		*pSwapchainImageCount = swapchain->count;
234 
235 	return VK_SUCCESS;
236 }
237 
vkAcquireNextImageKHR_libretro(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)238 static VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR_libretro(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
239 	vulkan->wait_sync_index(vulkan->handle);
240 	*pImageIndex = vulkan->get_sync_index(vulkan->handle);
241 #if 0
242 	vulkan->set_signal_semaphore(vulkan->handle, semaphore);
243 #endif
244 	return VK_SUCCESS;
245 }
246 
vkQueuePresentKHR_libretro(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)247 static VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR_libretro(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
248 	VkSwapchainKHR_T *swapchain = (VkSwapchainKHR_T *)pPresentInfo->pSwapchains[0];
249 	std::unique_lock<std::mutex> lock(swapchain->mutex);
250 #if 0
251 	if(chain.current_index >= 0)
252 		chain.condVar.wait(lock);
253 #endif
254 
255 	chain.current_index = pPresentInfo->pImageIndices[0];
256 #if 0
257 	vulkan->set_image(vulkan->handle, &swapchain->images[pPresentInfo->pImageIndices[0]].retro_image, pPresentInfo->waitSemaphoreCount, pPresentInfo->pWaitSemaphores, vulkan->queue_index);
258 #else
259 	vulkan->set_image(vulkan->handle, &swapchain->images[pPresentInfo->pImageIndices[0]].retro_image, 0, nullptr, vulkan->queue_index);
260 #endif
261 	swapchain->condVar.notify_all();
262 
263 	return VK_SUCCESS;
264 }
265 
vk_libretro_wait_for_presentation()266 void vk_libretro_wait_for_presentation() {
267 	std::unique_lock<std::mutex> lock(chain.mutex);
268 	if (chain.current_index < 0)
269 		chain.condVar.wait(lock);
270 #if 0
271 	chain.current_index = -1;
272 	chain.condVar.notify_all();
273 #endif
274 }
275 
vkDestroyInstance_libretro(VkInstance instance,const VkAllocationCallbacks * pAllocator)276 static VKAPI_ATTR void VKAPI_CALL vkDestroyInstance_libretro(VkInstance instance, const VkAllocationCallbacks *pAllocator) {}
vkDestroyDevice_libretro(VkDevice device,const VkAllocationCallbacks * pAllocator)277 static VKAPI_ATTR void VKAPI_CALL vkDestroyDevice_libretro(VkDevice device, const VkAllocationCallbacks *pAllocator) {}
vkDestroySurfaceKHR_libretro(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)278 static VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR_libretro(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {}
vkDestroySwapchainKHR_libretro(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)279 static VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR_libretro(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
280 	for (int i = 0; i < chain.count; i++) {
281 		vkDestroyImage(device, chain.images[i].handle, pAllocator);
282 		vkDestroyImageView(device, chain.images[i].retro_image.image_view, pAllocator);
283 		vkFreeMemory(device, chain.images[i].memory, pAllocator);
284 	}
285 
286 	memset(&chain.images, 0x00, sizeof(chain.images));
287 	chain.count = 0;
288 	chain.current_index = -1;
289 }
290 
vkQueueSubmit_libretro(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)291 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit_libretro(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
292 	VkResult res = VK_SUCCESS;
293 
294 #if 0
295 	for(int i = 0; i < submitCount; i++)
296 		vulkan->set_command_buffers(vulkan->handle, pSubmits[i].commandBufferCount, pSubmits[i].pCommandBuffers);
297 #else
298 #if 1
299 	for (int i = 0; i < submitCount; i++) {
300 		((VkSubmitInfo *)pSubmits)[i].waitSemaphoreCount = 0;
301 		((VkSubmitInfo *)pSubmits)[i].pWaitSemaphores = nullptr;
302 		((VkSubmitInfo *)pSubmits)[i].signalSemaphoreCount = 0;
303 		((VkSubmitInfo *)pSubmits)[i].pSignalSemaphores = nullptr;
304 	}
305 #endif
306 	vulkan->lock_queue(vulkan->handle);
307 	res = vkQueueSubmit_org(queue, submitCount, pSubmits, fence);
308 	vulkan->unlock_queue(vulkan->handle);
309 #endif
310 
311 	return res;
312 }
313 
vkQueueWaitIdle_libretro(VkQueue queue)314 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle_libretro(VkQueue queue) {
315 	vulkan->lock_queue(vulkan->handle);
316 	VkResult res = vkQueueWaitIdle_org(queue);
317 	vulkan->unlock_queue(vulkan->handle);
318 	return res;
319 }
320 
vkCmdPipelineBarrier_libretro(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)321 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier_libretro(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
322 	VkImageMemoryBarrier *barriers = (VkImageMemoryBarrier *)pImageMemoryBarriers;
323 	for (int i = 0; i < imageMemoryBarrierCount; i++) {
324 		if (pImageMemoryBarriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
325 			barriers[i].oldLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
326 			barriers[i].srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
327 		}
328 		if (pImageMemoryBarriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
329 			barriers[i].newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
330 			barriers[i].dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
331 		}
332 	}
333 	return vkCmdPipelineBarrier_org(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, barriers);
334 }
335 
vkCreateRenderPass_libretro(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)336 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass_libretro(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
337 	if (pCreateInfo->pAttachments[0].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
338 		((VkAttachmentDescription *)pCreateInfo->pAttachments)[0].finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
339 
340 	return vkCreateRenderPass_org(device, pCreateInfo, pAllocator, pRenderPass);
341 }
342 
343 #undef LIBRETRO_VK_WARP_FUNC
344 #define LIBRETRO_VK_WARP_FUNC(x)                    \
345 	if (!strcmp(pName, #x)) {                     \
346 		x##_org = (PFN_##x)fptr;                   \
347 		return (PFN_vkVoidFunction)x##_libretro;   \
348 	}
349 
vkGetInstanceProcAddr_libretro(VkInstance instance,const char * pName)350 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr_libretro(VkInstance instance, const char *pName) {
351 	if (false
352 #ifdef _WIN32
353 		 || !strcmp(pName, "vkCreateWin32SurfaceKHR")
354 #endif
355 #ifdef __ANDROID__
356 		 || !strcmp(pName, "vkCreateAndroidSurfaceKHR")
357 #endif
358 #ifdef VK_USE_PLATFORM_XLIB_KHR
359 		 || !strcmp(pName, "vkCreateXlibSurfaceKHR")
360 #endif
361 #ifdef VK_USE_PLATFORM_XCB_KHR
362 		 || !strcmp(pName, "vkCreateXcbSurfaceKHR")
363 #endif
364 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
365 		 || !strcmp(pName, "vkCreateWaylandSurfaceKHR")
366 #endif
367 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
368 		 || !strcmp(pName, "vkCreateDisplayPlaneSurfaceKHR")
369 #endif
370 	) {
371 		return (PFN_vkVoidFunction)vkCreateLibretroSurfaceKHR;
372 	}
373 
374 	PFN_vkVoidFunction fptr = vkGetInstanceProcAddr_org(instance, pName);
375    if (!fptr) {
376       ERROR_LOG(G3D, "Failed to load VK instance function: %s", pName);
377       return fptr;
378    }
379 
380 	LIBRETRO_VK_WARP_LIST();
381 
382 	return fptr;
383 }
384 
vkGetDeviceProcAddr_libretro(VkDevice device,const char * pName)385 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr_libretro(VkDevice device, const char *pName) {
386 	PFN_vkVoidFunction fptr = vkGetDeviceProcAddr_org(device, pName);
387 	if (!fptr)
388 		return fptr;
389 
390 	LIBRETRO_VK_WARP_LIST();
391 
392 	return fptr;
393 }
394 
vk_libretro_init(VkInstance instance,VkPhysicalDevice gpu,VkSurfaceKHR surface,PFN_vkGetInstanceProcAddr get_instance_proc_addr,const char ** required_device_extensions,unsigned num_required_device_extensions,const char ** required_device_layers,unsigned num_required_device_layers,const VkPhysicalDeviceFeatures * required_features)395 void vk_libretro_init(VkInstance instance, VkPhysicalDevice gpu, VkSurfaceKHR surface, PFN_vkGetInstanceProcAddr get_instance_proc_addr, const char **required_device_extensions, unsigned num_required_device_extensions, const char **required_device_layers, unsigned num_required_device_layers, const VkPhysicalDeviceFeatures *required_features) {
396 	assert(surface);
397 
398 	vk_init_info.instance = instance;
399 	vk_init_info.gpu = gpu;
400 	vk_init_info.surface = surface;
401 	vk_init_info.get_instance_proc_addr = get_instance_proc_addr;
402 	vk_init_info.required_device_extensions = required_device_extensions;
403 	vk_init_info.num_required_device_extensions = num_required_device_extensions;
404 	vk_init_info.required_device_layers = required_device_layers;
405 	vk_init_info.num_required_device_layers = num_required_device_layers;
406 	vk_init_info.required_features = required_features;
407 
408 	vkGetInstanceProcAddr_org = vkGetInstanceProcAddr;
409 	vkGetInstanceProcAddr = vkGetInstanceProcAddr_libretro;
410 	vkGetDeviceProcAddr_org = vkGetDeviceProcAddr;
411 	vkGetDeviceProcAddr = vkGetDeviceProcAddr_libretro;
412 	vkCreateInstance = vkCreateInstance_libretro;
413 }
414 
vk_libretro_set_hwrender_interface(retro_hw_render_interface * hw_render_interface)415 void vk_libretro_set_hwrender_interface(retro_hw_render_interface *hw_render_interface) {
416    vulkan = (retro_hw_render_interface_vulkan *)hw_render_interface;
417 }
418 
vk_libretro_shutdown()419 void vk_libretro_shutdown() {
420 	memset(&vk_init_info, 0, sizeof(vk_init_info));
421 	vulkan = nullptr;
422 	DEDICATED_ALLOCATION = false;
423 }
424