1 // Copyright 2018 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // This file is auto-generated from
6 // gpu/vulkan/generate_bindings.py
7 // It's formatted by clang-format using chromium coding style:
8 // clang-format -i -style=chromium filename
9 // DO NOT EDIT!
10
11 #ifndef GPU_VULKAN_VULKAN_FUNCTION_POINTERS_H_
12 #define GPU_VULKAN_VULKAN_FUNCTION_POINTERS_H_
13
14 #include <vulkan/vulkan.h>
15
16 #include "base/compiler_specific.h"
17 #include "base/component_export.h"
18 #include "base/native_library.h"
19 #include "build/build_config.h"
20 #include "ui/gfx/extension_set.h"
21
22 #if defined(OS_ANDROID)
23 #include <vulkan/vulkan_android.h>
24 #endif
25
26 #if defined(OS_FUCHSIA)
27 #include <zircon/types.h>
28 // <vulkan/vulkan_fuchsia.h> must be included after <zircon/types.h>
29 #include <vulkan/vulkan_fuchsia.h>
30
31 #include "gpu/vulkan/fuchsia/vulkan_fuchsia_ext.h"
32 #endif
33
34 #if defined(USE_VULKAN_XCB)
35 #include <xcb/xcb.h>
36 // <vulkan/vulkan_xcb.h> must be included after <xcb/xcb.h>
37 #include <vulkan/vulkan_xcb.h>
38 #endif
39
40 #if defined(OS_WIN)
41 #include <vulkan/vulkan_win32.h>
42 #endif
43
44 namespace gpu {
45
46 struct VulkanFunctionPointers;
47
48 constexpr uint32_t kVulkanRequiredApiVersion = VK_API_VERSION_1_1;
49
50 COMPONENT_EXPORT(VULKAN) VulkanFunctionPointers* GetVulkanFunctionPointers();
51
COMPONENT_EXPORT(VULKAN)52 struct COMPONENT_EXPORT(VULKAN) VulkanFunctionPointers {
53 VulkanFunctionPointers();
54 ~VulkanFunctionPointers();
55
56 bool BindUnassociatedFunctionPointers();
57
58 // These functions assume that vkGetInstanceProcAddr has been populated.
59 bool BindInstanceFunctionPointers(
60 VkInstance vk_instance,
61 uint32_t api_version,
62 const gfx::ExtensionSet& enabled_extensions);
63
64 // These functions assume that vkGetDeviceProcAddr has been populated.
65 bool BindDeviceFunctionPointers(VkDevice vk_device,
66 uint32_t api_version,
67 const gfx::ExtensionSet& enabled_extensions);
68
69 base::NativeLibrary vulkan_loader_library = nullptr;
70
71 template <typename T>
72 class VulkanFunction;
73 template <typename R, typename... Args>
74 class VulkanFunction<R(VKAPI_PTR*)(Args...)> {
75 public:
76 using Fn = R(VKAPI_PTR*)(Args...);
77
78 explicit operator bool() const { return !!fn_; }
79
80 NO_SANITIZE("cfi-icall")
81 R operator()(Args... args) const { return fn_(args...); }
82
83 Fn get() const { return fn_; }
84
85 private:
86 friend VulkanFunctionPointers;
87
88 Fn operator=(Fn fn) {
89 fn_ = fn;
90 return fn_;
91 }
92
93 Fn fn_ = nullptr;
94 };
95
96 // Unassociated functions
97 VulkanFunction<PFN_vkGetInstanceProcAddr> vkGetInstanceProcAddr;
98
99 VulkanFunction<PFN_vkEnumerateInstanceVersion> vkEnumerateInstanceVersion;
100 VulkanFunction<PFN_vkCreateInstance> vkCreateInstance;
101 VulkanFunction<PFN_vkEnumerateInstanceExtensionProperties>
102 vkEnumerateInstanceExtensionProperties;
103 VulkanFunction<PFN_vkEnumerateInstanceLayerProperties>
104 vkEnumerateInstanceLayerProperties;
105
106 // Instance functions
107 VulkanFunction<PFN_vkCreateDevice> vkCreateDevice;
108 VulkanFunction<PFN_vkDestroyInstance> vkDestroyInstance;
109 VulkanFunction<PFN_vkEnumerateDeviceExtensionProperties>
110 vkEnumerateDeviceExtensionProperties;
111 VulkanFunction<PFN_vkEnumerateDeviceLayerProperties>
112 vkEnumerateDeviceLayerProperties;
113 VulkanFunction<PFN_vkEnumeratePhysicalDevices> vkEnumeratePhysicalDevices;
114 VulkanFunction<PFN_vkGetDeviceProcAddr> vkGetDeviceProcAddr;
115 VulkanFunction<PFN_vkGetPhysicalDeviceFeatures2> vkGetPhysicalDeviceFeatures2;
116 VulkanFunction<PFN_vkGetPhysicalDeviceFormatProperties>
117 vkGetPhysicalDeviceFormatProperties;
118 VulkanFunction<PFN_vkGetPhysicalDeviceImageFormatProperties2>
119 vkGetPhysicalDeviceImageFormatProperties2;
120 VulkanFunction<PFN_vkGetPhysicalDeviceMemoryProperties>
121 vkGetPhysicalDeviceMemoryProperties;
122 VulkanFunction<PFN_vkGetPhysicalDeviceMemoryProperties2>
123 vkGetPhysicalDeviceMemoryProperties2;
124 VulkanFunction<PFN_vkGetPhysicalDeviceProperties>
125 vkGetPhysicalDeviceProperties;
126 VulkanFunction<PFN_vkGetPhysicalDeviceQueueFamilyProperties>
127 vkGetPhysicalDeviceQueueFamilyProperties;
128
129 #if DCHECK_IS_ON()
130 VulkanFunction<PFN_vkCreateDebugReportCallbackEXT>
131 vkCreateDebugReportCallbackEXT;
132 VulkanFunction<PFN_vkDestroyDebugReportCallbackEXT>
133 vkDestroyDebugReportCallbackEXT;
134 #endif // DCHECK_IS_ON()
135
136 VulkanFunction<PFN_vkDestroySurfaceKHR> vkDestroySurfaceKHR;
137 VulkanFunction<PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR>
138 vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
139 VulkanFunction<PFN_vkGetPhysicalDeviceSurfaceFormatsKHR>
140 vkGetPhysicalDeviceSurfaceFormatsKHR;
141 VulkanFunction<PFN_vkGetPhysicalDeviceSurfaceSupportKHR>
142 vkGetPhysicalDeviceSurfaceSupportKHR;
143
144 #if defined(USE_VULKAN_XCB)
145 VulkanFunction<PFN_vkCreateXcbSurfaceKHR> vkCreateXcbSurfaceKHR;
146 VulkanFunction<PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR>
147 vkGetPhysicalDeviceXcbPresentationSupportKHR;
148 #endif // defined(USE_VULKAN_XCB)
149
150 #if defined(OS_WIN)
151 VulkanFunction<PFN_vkCreateWin32SurfaceKHR> vkCreateWin32SurfaceKHR;
152 VulkanFunction<PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR>
153 vkGetPhysicalDeviceWin32PresentationSupportKHR;
154 #endif // defined(OS_WIN)
155
156 #if defined(OS_ANDROID)
157 VulkanFunction<PFN_vkCreateAndroidSurfaceKHR> vkCreateAndroidSurfaceKHR;
158 #endif // defined(OS_ANDROID)
159
160 #if defined(OS_FUCHSIA)
161 VulkanFunction<PFN_vkCreateImagePipeSurfaceFUCHSIA>
162 vkCreateImagePipeSurfaceFUCHSIA;
163 #endif // defined(OS_FUCHSIA)
164
165 // Device functions
166 VulkanFunction<PFN_vkAllocateCommandBuffers> vkAllocateCommandBuffers;
167 VulkanFunction<PFN_vkAllocateDescriptorSets> vkAllocateDescriptorSets;
168 VulkanFunction<PFN_vkAllocateMemory> vkAllocateMemory;
169 VulkanFunction<PFN_vkBeginCommandBuffer> vkBeginCommandBuffer;
170 VulkanFunction<PFN_vkBindBufferMemory> vkBindBufferMemory;
171 VulkanFunction<PFN_vkBindBufferMemory2> vkBindBufferMemory2;
172 VulkanFunction<PFN_vkBindImageMemory> vkBindImageMemory;
173 VulkanFunction<PFN_vkBindImageMemory2> vkBindImageMemory2;
174 VulkanFunction<PFN_vkCmdBeginRenderPass> vkCmdBeginRenderPass;
175 VulkanFunction<PFN_vkCmdCopyBuffer> vkCmdCopyBuffer;
176 VulkanFunction<PFN_vkCmdCopyBufferToImage> vkCmdCopyBufferToImage;
177 VulkanFunction<PFN_vkCmdEndRenderPass> vkCmdEndRenderPass;
178 VulkanFunction<PFN_vkCmdExecuteCommands> vkCmdExecuteCommands;
179 VulkanFunction<PFN_vkCmdNextSubpass> vkCmdNextSubpass;
180 VulkanFunction<PFN_vkCmdPipelineBarrier> vkCmdPipelineBarrier;
181 VulkanFunction<PFN_vkCreateBuffer> vkCreateBuffer;
182 VulkanFunction<PFN_vkCreateCommandPool> vkCreateCommandPool;
183 VulkanFunction<PFN_vkCreateDescriptorPool> vkCreateDescriptorPool;
184 VulkanFunction<PFN_vkCreateDescriptorSetLayout> vkCreateDescriptorSetLayout;
185 VulkanFunction<PFN_vkCreateFence> vkCreateFence;
186 VulkanFunction<PFN_vkCreateFramebuffer> vkCreateFramebuffer;
187 VulkanFunction<PFN_vkCreateImage> vkCreateImage;
188 VulkanFunction<PFN_vkCreateImageView> vkCreateImageView;
189 VulkanFunction<PFN_vkCreateRenderPass> vkCreateRenderPass;
190 VulkanFunction<PFN_vkCreateSampler> vkCreateSampler;
191 VulkanFunction<PFN_vkCreateSemaphore> vkCreateSemaphore;
192 VulkanFunction<PFN_vkCreateShaderModule> vkCreateShaderModule;
193 VulkanFunction<PFN_vkDestroyBuffer> vkDestroyBuffer;
194 VulkanFunction<PFN_vkDestroyCommandPool> vkDestroyCommandPool;
195 VulkanFunction<PFN_vkDestroyDescriptorPool> vkDestroyDescriptorPool;
196 VulkanFunction<PFN_vkDestroyDescriptorSetLayout> vkDestroyDescriptorSetLayout;
197 VulkanFunction<PFN_vkDestroyDevice> vkDestroyDevice;
198 VulkanFunction<PFN_vkDestroyFence> vkDestroyFence;
199 VulkanFunction<PFN_vkDestroyFramebuffer> vkDestroyFramebuffer;
200 VulkanFunction<PFN_vkDestroyImage> vkDestroyImage;
201 VulkanFunction<PFN_vkDestroyImageView> vkDestroyImageView;
202 VulkanFunction<PFN_vkDestroyRenderPass> vkDestroyRenderPass;
203 VulkanFunction<PFN_vkDestroySampler> vkDestroySampler;
204 VulkanFunction<PFN_vkDestroySemaphore> vkDestroySemaphore;
205 VulkanFunction<PFN_vkDestroyShaderModule> vkDestroyShaderModule;
206 VulkanFunction<PFN_vkDeviceWaitIdle> vkDeviceWaitIdle;
207 VulkanFunction<PFN_vkFlushMappedMemoryRanges> vkFlushMappedMemoryRanges;
208 VulkanFunction<PFN_vkEndCommandBuffer> vkEndCommandBuffer;
209 VulkanFunction<PFN_vkFreeCommandBuffers> vkFreeCommandBuffers;
210 VulkanFunction<PFN_vkFreeDescriptorSets> vkFreeDescriptorSets;
211 VulkanFunction<PFN_vkFreeMemory> vkFreeMemory;
212 VulkanFunction<PFN_vkInvalidateMappedMemoryRanges>
213 vkInvalidateMappedMemoryRanges;
214 VulkanFunction<PFN_vkGetBufferMemoryRequirements>
215 vkGetBufferMemoryRequirements;
216 VulkanFunction<PFN_vkGetBufferMemoryRequirements2>
217 vkGetBufferMemoryRequirements2;
218 VulkanFunction<PFN_vkGetDeviceQueue> vkGetDeviceQueue;
219 VulkanFunction<PFN_vkGetDeviceQueue2> vkGetDeviceQueue2;
220 VulkanFunction<PFN_vkGetFenceStatus> vkGetFenceStatus;
221 VulkanFunction<PFN_vkGetImageMemoryRequirements> vkGetImageMemoryRequirements;
222 VulkanFunction<PFN_vkGetImageMemoryRequirements2>
223 vkGetImageMemoryRequirements2;
224 VulkanFunction<PFN_vkMapMemory> vkMapMemory;
225 VulkanFunction<PFN_vkQueueSubmit> vkQueueSubmit;
226 VulkanFunction<PFN_vkQueueWaitIdle> vkQueueWaitIdle;
227 VulkanFunction<PFN_vkResetCommandBuffer> vkResetCommandBuffer;
228 VulkanFunction<PFN_vkResetFences> vkResetFences;
229 VulkanFunction<PFN_vkUnmapMemory> vkUnmapMemory;
230 VulkanFunction<PFN_vkUpdateDescriptorSets> vkUpdateDescriptorSets;
231 VulkanFunction<PFN_vkWaitForFences> vkWaitForFences;
232
233 #if defined(OS_ANDROID)
234 VulkanFunction<PFN_vkGetAndroidHardwareBufferPropertiesANDROID>
235 vkGetAndroidHardwareBufferPropertiesANDROID;
236 #endif // defined(OS_ANDROID)
237
238 #if defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
239 VulkanFunction<PFN_vkGetSemaphoreFdKHR> vkGetSemaphoreFdKHR;
240 VulkanFunction<PFN_vkImportSemaphoreFdKHR> vkImportSemaphoreFdKHR;
241 #endif // defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
242
243 #if defined(OS_WIN)
244 VulkanFunction<PFN_vkGetSemaphoreWin32HandleKHR> vkGetSemaphoreWin32HandleKHR;
245 VulkanFunction<PFN_vkImportSemaphoreWin32HandleKHR>
246 vkImportSemaphoreWin32HandleKHR;
247 #endif // defined(OS_WIN)
248
249 #if defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
250 VulkanFunction<PFN_vkGetMemoryFdKHR> vkGetMemoryFdKHR;
251 VulkanFunction<PFN_vkGetMemoryFdPropertiesKHR> vkGetMemoryFdPropertiesKHR;
252 #endif // defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
253
254 #if defined(OS_WIN)
255 VulkanFunction<PFN_vkGetMemoryWin32HandleKHR> vkGetMemoryWin32HandleKHR;
256 VulkanFunction<PFN_vkGetMemoryWin32HandlePropertiesKHR>
257 vkGetMemoryWin32HandlePropertiesKHR;
258 #endif // defined(OS_WIN)
259
260 #if defined(OS_FUCHSIA)
261 VulkanFunction<PFN_vkImportSemaphoreZirconHandleFUCHSIA>
262 vkImportSemaphoreZirconHandleFUCHSIA;
263 VulkanFunction<PFN_vkGetSemaphoreZirconHandleFUCHSIA>
264 vkGetSemaphoreZirconHandleFUCHSIA;
265 #endif // defined(OS_FUCHSIA)
266
267 #if defined(OS_FUCHSIA)
268 VulkanFunction<PFN_vkGetMemoryZirconHandleFUCHSIA>
269 vkGetMemoryZirconHandleFUCHSIA;
270 #endif // defined(OS_FUCHSIA)
271
272 #if defined(OS_FUCHSIA)
273 VulkanFunction<PFN_vkCreateBufferCollectionFUCHSIA>
274 vkCreateBufferCollectionFUCHSIA;
275 VulkanFunction<PFN_vkSetBufferCollectionConstraintsFUCHSIA>
276 vkSetBufferCollectionConstraintsFUCHSIA;
277 VulkanFunction<PFN_vkGetBufferCollectionPropertiesFUCHSIA>
278 vkGetBufferCollectionPropertiesFUCHSIA;
279 VulkanFunction<PFN_vkDestroyBufferCollectionFUCHSIA>
280 vkDestroyBufferCollectionFUCHSIA;
281 #endif // defined(OS_FUCHSIA)
282
283 VulkanFunction<PFN_vkAcquireNextImageKHR> vkAcquireNextImageKHR;
284 VulkanFunction<PFN_vkCreateSwapchainKHR> vkCreateSwapchainKHR;
285 VulkanFunction<PFN_vkDestroySwapchainKHR> vkDestroySwapchainKHR;
286 VulkanFunction<PFN_vkGetSwapchainImagesKHR> vkGetSwapchainImagesKHR;
287 VulkanFunction<PFN_vkQueuePresentKHR> vkQueuePresentKHR;
288 };
289
290 } // namespace gpu
291
292 // Unassociated functions
vkGetInstanceProcAddr(VkInstance instance,const char * pName)293 ALWAYS_INLINE PFN_vkVoidFunction vkGetInstanceProcAddr(VkInstance instance,
294 const char* pName) {
295 return gpu::GetVulkanFunctionPointers()->vkGetInstanceProcAddr(instance,
296 pName);
297 }
298
vkEnumerateInstanceVersion(uint32_t * pApiVersion)299 ALWAYS_INLINE VkResult vkEnumerateInstanceVersion(uint32_t* pApiVersion) {
300 return gpu::GetVulkanFunctionPointers()->vkEnumerateInstanceVersion(
301 pApiVersion);
302 }
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)303 ALWAYS_INLINE VkResult vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo,
304 const VkAllocationCallbacks* pAllocator,
305 VkInstance* pInstance) {
306 return gpu::GetVulkanFunctionPointers()->vkCreateInstance(
307 pCreateInfo, pAllocator, pInstance);
308 }
309 ALWAYS_INLINE VkResult
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)310 vkEnumerateInstanceExtensionProperties(const char* pLayerName,
311 uint32_t* pPropertyCount,
312 VkExtensionProperties* pProperties) {
313 return gpu::GetVulkanFunctionPointers()
314 ->vkEnumerateInstanceExtensionProperties(pLayerName, pPropertyCount,
315 pProperties);
316 }
317 ALWAYS_INLINE VkResult
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)318 vkEnumerateInstanceLayerProperties(uint32_t* pPropertyCount,
319 VkLayerProperties* pProperties) {
320 return gpu::GetVulkanFunctionPointers()->vkEnumerateInstanceLayerProperties(
321 pPropertyCount, pProperties);
322 }
323
324 // Instance functions
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)325 ALWAYS_INLINE VkResult vkCreateDevice(VkPhysicalDevice physicalDevice,
326 const VkDeviceCreateInfo* pCreateInfo,
327 const VkAllocationCallbacks* pAllocator,
328 VkDevice* pDevice) {
329 return gpu::GetVulkanFunctionPointers()->vkCreateDevice(
330 physicalDevice, pCreateInfo, pAllocator, pDevice);
331 }
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)332 ALWAYS_INLINE void vkDestroyInstance(VkInstance instance,
333 const VkAllocationCallbacks* pAllocator) {
334 return gpu::GetVulkanFunctionPointers()->vkDestroyInstance(instance,
335 pAllocator);
336 }
337 ALWAYS_INLINE VkResult
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)338 vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
339 const char* pLayerName,
340 uint32_t* pPropertyCount,
341 VkExtensionProperties* pProperties) {
342 return gpu::GetVulkanFunctionPointers()->vkEnumerateDeviceExtensionProperties(
343 physicalDevice, pLayerName, pPropertyCount, pProperties);
344 }
345 ALWAYS_INLINE VkResult
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)346 vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
347 uint32_t* pPropertyCount,
348 VkLayerProperties* pProperties) {
349 return gpu::GetVulkanFunctionPointers()->vkEnumerateDeviceLayerProperties(
350 physicalDevice, pPropertyCount, pProperties);
351 }
352 ALWAYS_INLINE VkResult
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)353 vkEnumeratePhysicalDevices(VkInstance instance,
354 uint32_t* pPhysicalDeviceCount,
355 VkPhysicalDevice* pPhysicalDevices) {
356 return gpu::GetVulkanFunctionPointers()->vkEnumeratePhysicalDevices(
357 instance, pPhysicalDeviceCount, pPhysicalDevices);
358 }
vkGetDeviceProcAddr(VkDevice device,const char * pName)359 ALWAYS_INLINE PFN_vkVoidFunction vkGetDeviceProcAddr(VkDevice device,
360 const char* pName) {
361 return gpu::GetVulkanFunctionPointers()->vkGetDeviceProcAddr(device, pName);
362 }
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)363 ALWAYS_INLINE void vkGetPhysicalDeviceFeatures2(
364 VkPhysicalDevice physicalDevice,
365 VkPhysicalDeviceFeatures2* pFeatures) {
366 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceFeatures2(
367 physicalDevice, pFeatures);
368 }
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)369 ALWAYS_INLINE void vkGetPhysicalDeviceFormatProperties(
370 VkPhysicalDevice physicalDevice,
371 VkFormat format,
372 VkFormatProperties* pFormatProperties) {
373 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceFormatProperties(
374 physicalDevice, format, pFormatProperties);
375 }
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)376 ALWAYS_INLINE VkResult vkGetPhysicalDeviceImageFormatProperties2(
377 VkPhysicalDevice physicalDevice,
378 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
379 VkImageFormatProperties2* pImageFormatProperties) {
380 return gpu::GetVulkanFunctionPointers()
381 ->vkGetPhysicalDeviceImageFormatProperties2(
382 physicalDevice, pImageFormatInfo, pImageFormatProperties);
383 }
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)384 ALWAYS_INLINE void vkGetPhysicalDeviceMemoryProperties(
385 VkPhysicalDevice physicalDevice,
386 VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
387 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceMemoryProperties(
388 physicalDevice, pMemoryProperties);
389 }
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)390 ALWAYS_INLINE void vkGetPhysicalDeviceMemoryProperties2(
391 VkPhysicalDevice physicalDevice,
392 VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
393 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceMemoryProperties2(
394 physicalDevice, pMemoryProperties);
395 }
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)396 ALWAYS_INLINE void vkGetPhysicalDeviceProperties(
397 VkPhysicalDevice physicalDevice,
398 VkPhysicalDeviceProperties* pProperties) {
399 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceProperties(
400 physicalDevice, pProperties);
401 }
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)402 ALWAYS_INLINE void vkGetPhysicalDeviceQueueFamilyProperties(
403 VkPhysicalDevice physicalDevice,
404 uint32_t* pQueueFamilyPropertyCount,
405 VkQueueFamilyProperties* pQueueFamilyProperties) {
406 return gpu::GetVulkanFunctionPointers()
407 ->vkGetPhysicalDeviceQueueFamilyProperties(
408 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
409 }
410
411 #if DCHECK_IS_ON()
vkCreateDebugReportCallbackEXT(VkInstance instance,const VkDebugReportCallbackCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDebugReportCallbackEXT * pCallback)412 ALWAYS_INLINE VkResult vkCreateDebugReportCallbackEXT(
413 VkInstance instance,
414 const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
415 const VkAllocationCallbacks* pAllocator,
416 VkDebugReportCallbackEXT* pCallback) {
417 return gpu::GetVulkanFunctionPointers()->vkCreateDebugReportCallbackEXT(
418 instance, pCreateInfo, pAllocator, pCallback);
419 }
vkDestroyDebugReportCallbackEXT(VkInstance instance,VkDebugReportCallbackEXT callback,const VkAllocationCallbacks * pAllocator)420 ALWAYS_INLINE void vkDestroyDebugReportCallbackEXT(
421 VkInstance instance,
422 VkDebugReportCallbackEXT callback,
423 const VkAllocationCallbacks* pAllocator) {
424 return gpu::GetVulkanFunctionPointers()->vkDestroyDebugReportCallbackEXT(
425 instance, callback, pAllocator);
426 }
427 #endif // DCHECK_IS_ON()
428
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)429 ALWAYS_INLINE void vkDestroySurfaceKHR(
430 VkInstance instance,
431 VkSurfaceKHR surface,
432 const VkAllocationCallbacks* pAllocator) {
433 return gpu::GetVulkanFunctionPointers()->vkDestroySurfaceKHR(
434 instance, surface, pAllocator);
435 }
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)436 ALWAYS_INLINE VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
437 VkPhysicalDevice physicalDevice,
438 VkSurfaceKHR surface,
439 VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {
440 return gpu::GetVulkanFunctionPointers()
441 ->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface,
442 pSurfaceCapabilities);
443 }
444 ALWAYS_INLINE VkResult
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)445 vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,
446 VkSurfaceKHR surface,
447 uint32_t* pSurfaceFormatCount,
448 VkSurfaceFormatKHR* pSurfaceFormats) {
449 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceSurfaceFormatsKHR(
450 physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
451 }
452 ALWAYS_INLINE VkResult
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)453 vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
454 uint32_t queueFamilyIndex,
455 VkSurfaceKHR surface,
456 VkBool32* pSupported) {
457 return gpu::GetVulkanFunctionPointers()->vkGetPhysicalDeviceSurfaceSupportKHR(
458 physicalDevice, queueFamilyIndex, surface, pSupported);
459 }
460
461 #if defined(USE_VULKAN_XCB)
462 ALWAYS_INLINE VkResult
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)463 vkCreateXcbSurfaceKHR(VkInstance instance,
464 const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
465 const VkAllocationCallbacks* pAllocator,
466 VkSurfaceKHR* pSurface) {
467 return gpu::GetVulkanFunctionPointers()->vkCreateXcbSurfaceKHR(
468 instance, pCreateInfo, pAllocator, pSurface);
469 }
470 ALWAYS_INLINE VkBool32
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)471 vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
472 uint32_t queueFamilyIndex,
473 xcb_connection_t* connection,
474 xcb_visualid_t visual_id) {
475 return gpu::GetVulkanFunctionPointers()
476 ->vkGetPhysicalDeviceXcbPresentationSupportKHR(
477 physicalDevice, queueFamilyIndex, connection, visual_id);
478 }
479 #endif // defined(USE_VULKAN_XCB)
480
481 #if defined(OS_WIN)
482 ALWAYS_INLINE VkResult
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)483 vkCreateWin32SurfaceKHR(VkInstance instance,
484 const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
485 const VkAllocationCallbacks* pAllocator,
486 VkSurfaceKHR* pSurface) {
487 return gpu::GetVulkanFunctionPointers()->vkCreateWin32SurfaceKHR(
488 instance, pCreateInfo, pAllocator, pSurface);
489 }
490 ALWAYS_INLINE VkBool32
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)491 vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
492 uint32_t queueFamilyIndex) {
493 return gpu::GetVulkanFunctionPointers()
494 ->vkGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice,
495 queueFamilyIndex);
496 }
497 #endif // defined(OS_WIN)
498
499 #if defined(OS_ANDROID)
500 ALWAYS_INLINE VkResult
vkCreateAndroidSurfaceKHR(VkInstance instance,const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)501 vkCreateAndroidSurfaceKHR(VkInstance instance,
502 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
503 const VkAllocationCallbacks* pAllocator,
504 VkSurfaceKHR* pSurface) {
505 return gpu::GetVulkanFunctionPointers()->vkCreateAndroidSurfaceKHR(
506 instance, pCreateInfo, pAllocator, pSurface);
507 }
508 #endif // defined(OS_ANDROID)
509
510 #if defined(OS_FUCHSIA)
vkCreateImagePipeSurfaceFUCHSIA(VkInstance instance,const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)511 ALWAYS_INLINE VkResult vkCreateImagePipeSurfaceFUCHSIA(
512 VkInstance instance,
513 const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo,
514 const VkAllocationCallbacks* pAllocator,
515 VkSurfaceKHR* pSurface) {
516 return gpu::GetVulkanFunctionPointers()->vkCreateImagePipeSurfaceFUCHSIA(
517 instance, pCreateInfo, pAllocator, pSurface);
518 }
519 #endif // defined(OS_FUCHSIA)
520
521 // Device functions
522 ALWAYS_INLINE VkResult
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)523 vkAllocateCommandBuffers(VkDevice device,
524 const VkCommandBufferAllocateInfo* pAllocateInfo,
525 VkCommandBuffer* pCommandBuffers) {
526 return gpu::GetVulkanFunctionPointers()->vkAllocateCommandBuffers(
527 device, pAllocateInfo, pCommandBuffers);
528 }
529 ALWAYS_INLINE VkResult
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)530 vkAllocateDescriptorSets(VkDevice device,
531 const VkDescriptorSetAllocateInfo* pAllocateInfo,
532 VkDescriptorSet* pDescriptorSets) {
533 return gpu::GetVulkanFunctionPointers()->vkAllocateDescriptorSets(
534 device, pAllocateInfo, pDescriptorSets);
535 }
536 ALWAYS_INLINE VkResult
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)537 vkAllocateMemory(VkDevice device,
538 const VkMemoryAllocateInfo* pAllocateInfo,
539 const VkAllocationCallbacks* pAllocator,
540 VkDeviceMemory* pMemory) {
541 return gpu::GetVulkanFunctionPointers()->vkAllocateMemory(
542 device, pAllocateInfo, pAllocator, pMemory);
543 }
544 ALWAYS_INLINE VkResult
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)545 vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
546 const VkCommandBufferBeginInfo* pBeginInfo) {
547 return gpu::GetVulkanFunctionPointers()->vkBeginCommandBuffer(commandBuffer,
548 pBeginInfo);
549 }
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)550 ALWAYS_INLINE VkResult vkBindBufferMemory(VkDevice device,
551 VkBuffer buffer,
552 VkDeviceMemory memory,
553 VkDeviceSize memoryOffset) {
554 return gpu::GetVulkanFunctionPointers()->vkBindBufferMemory(
555 device, buffer, memory, memoryOffset);
556 }
557 ALWAYS_INLINE VkResult
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)558 vkBindBufferMemory2(VkDevice device,
559 uint32_t bindInfoCount,
560 const VkBindBufferMemoryInfo* pBindInfos) {
561 return gpu::GetVulkanFunctionPointers()->vkBindBufferMemory2(
562 device, bindInfoCount, pBindInfos);
563 }
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)564 ALWAYS_INLINE VkResult vkBindImageMemory(VkDevice device,
565 VkImage image,
566 VkDeviceMemory memory,
567 VkDeviceSize memoryOffset) {
568 return gpu::GetVulkanFunctionPointers()->vkBindImageMemory(
569 device, image, memory, memoryOffset);
570 }
571 ALWAYS_INLINE VkResult
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)572 vkBindImageMemory2(VkDevice device,
573 uint32_t bindInfoCount,
574 const VkBindImageMemoryInfo* pBindInfos) {
575 return gpu::GetVulkanFunctionPointers()->vkBindImageMemory2(
576 device, bindInfoCount, pBindInfos);
577 }
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)578 ALWAYS_INLINE void vkCmdBeginRenderPass(
579 VkCommandBuffer commandBuffer,
580 const VkRenderPassBeginInfo* pRenderPassBegin,
581 VkSubpassContents contents) {
582 return gpu::GetVulkanFunctionPointers()->vkCmdBeginRenderPass(
583 commandBuffer, pRenderPassBegin, contents);
584 }
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)585 ALWAYS_INLINE void vkCmdCopyBuffer(VkCommandBuffer commandBuffer,
586 VkBuffer srcBuffer,
587 VkBuffer dstBuffer,
588 uint32_t regionCount,
589 const VkBufferCopy* pRegions) {
590 return gpu::GetVulkanFunctionPointers()->vkCmdCopyBuffer(
591 commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
592 }
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)593 ALWAYS_INLINE void vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,
594 VkBuffer srcBuffer,
595 VkImage dstImage,
596 VkImageLayout dstImageLayout,
597 uint32_t regionCount,
598 const VkBufferImageCopy* pRegions) {
599 return gpu::GetVulkanFunctionPointers()->vkCmdCopyBufferToImage(
600 commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount,
601 pRegions);
602 }
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)603 ALWAYS_INLINE void vkCmdEndRenderPass(VkCommandBuffer commandBuffer) {
604 return gpu::GetVulkanFunctionPointers()->vkCmdEndRenderPass(commandBuffer);
605 }
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)606 ALWAYS_INLINE void vkCmdExecuteCommands(
607 VkCommandBuffer commandBuffer,
608 uint32_t commandBufferCount,
609 const VkCommandBuffer* pCommandBuffers) {
610 return gpu::GetVulkanFunctionPointers()->vkCmdExecuteCommands(
611 commandBuffer, commandBufferCount, pCommandBuffers);
612 }
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)613 ALWAYS_INLINE void vkCmdNextSubpass(VkCommandBuffer commandBuffer,
614 VkSubpassContents contents) {
615 return gpu::GetVulkanFunctionPointers()->vkCmdNextSubpass(commandBuffer,
616 contents);
617 }
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)618 ALWAYS_INLINE void vkCmdPipelineBarrier(
619 VkCommandBuffer commandBuffer,
620 VkPipelineStageFlags srcStageMask,
621 VkPipelineStageFlags dstStageMask,
622 VkDependencyFlags dependencyFlags,
623 uint32_t memoryBarrierCount,
624 const VkMemoryBarrier* pMemoryBarriers,
625 uint32_t bufferMemoryBarrierCount,
626 const VkBufferMemoryBarrier* pBufferMemoryBarriers,
627 uint32_t imageMemoryBarrierCount,
628 const VkImageMemoryBarrier* pImageMemoryBarriers) {
629 return gpu::GetVulkanFunctionPointers()->vkCmdPipelineBarrier(
630 commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
631 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
632 pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
633 }
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)634 ALWAYS_INLINE VkResult vkCreateBuffer(VkDevice device,
635 const VkBufferCreateInfo* pCreateInfo,
636 const VkAllocationCallbacks* pAllocator,
637 VkBuffer* pBuffer) {
638 return gpu::GetVulkanFunctionPointers()->vkCreateBuffer(device, pCreateInfo,
639 pAllocator, pBuffer);
640 }
641 ALWAYS_INLINE VkResult
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)642 vkCreateCommandPool(VkDevice device,
643 const VkCommandPoolCreateInfo* pCreateInfo,
644 const VkAllocationCallbacks* pAllocator,
645 VkCommandPool* pCommandPool) {
646 return gpu::GetVulkanFunctionPointers()->vkCreateCommandPool(
647 device, pCreateInfo, pAllocator, pCommandPool);
648 }
649 ALWAYS_INLINE VkResult
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)650 vkCreateDescriptorPool(VkDevice device,
651 const VkDescriptorPoolCreateInfo* pCreateInfo,
652 const VkAllocationCallbacks* pAllocator,
653 VkDescriptorPool* pDescriptorPool) {
654 return gpu::GetVulkanFunctionPointers()->vkCreateDescriptorPool(
655 device, pCreateInfo, pAllocator, pDescriptorPool);
656 }
657 ALWAYS_INLINE VkResult
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)658 vkCreateDescriptorSetLayout(VkDevice device,
659 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
660 const VkAllocationCallbacks* pAllocator,
661 VkDescriptorSetLayout* pSetLayout) {
662 return gpu::GetVulkanFunctionPointers()->vkCreateDescriptorSetLayout(
663 device, pCreateInfo, pAllocator, pSetLayout);
664 }
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)665 ALWAYS_INLINE VkResult vkCreateFence(VkDevice device,
666 const VkFenceCreateInfo* pCreateInfo,
667 const VkAllocationCallbacks* pAllocator,
668 VkFence* pFence) {
669 return gpu::GetVulkanFunctionPointers()->vkCreateFence(device, pCreateInfo,
670 pAllocator, pFence);
671 }
672 ALWAYS_INLINE VkResult
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)673 vkCreateFramebuffer(VkDevice device,
674 const VkFramebufferCreateInfo* pCreateInfo,
675 const VkAllocationCallbacks* pAllocator,
676 VkFramebuffer* pFramebuffer) {
677 return gpu::GetVulkanFunctionPointers()->vkCreateFramebuffer(
678 device, pCreateInfo, pAllocator, pFramebuffer);
679 }
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)680 ALWAYS_INLINE VkResult vkCreateImage(VkDevice device,
681 const VkImageCreateInfo* pCreateInfo,
682 const VkAllocationCallbacks* pAllocator,
683 VkImage* pImage) {
684 return gpu::GetVulkanFunctionPointers()->vkCreateImage(device, pCreateInfo,
685 pAllocator, pImage);
686 }
687 ALWAYS_INLINE VkResult
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)688 vkCreateImageView(VkDevice device,
689 const VkImageViewCreateInfo* pCreateInfo,
690 const VkAllocationCallbacks* pAllocator,
691 VkImageView* pView) {
692 return gpu::GetVulkanFunctionPointers()->vkCreateImageView(
693 device, pCreateInfo, pAllocator, pView);
694 }
695 ALWAYS_INLINE VkResult
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)696 vkCreateRenderPass(VkDevice device,
697 const VkRenderPassCreateInfo* pCreateInfo,
698 const VkAllocationCallbacks* pAllocator,
699 VkRenderPass* pRenderPass) {
700 return gpu::GetVulkanFunctionPointers()->vkCreateRenderPass(
701 device, pCreateInfo, pAllocator, pRenderPass);
702 }
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)703 ALWAYS_INLINE VkResult vkCreateSampler(VkDevice device,
704 const VkSamplerCreateInfo* pCreateInfo,
705 const VkAllocationCallbacks* pAllocator,
706 VkSampler* pSampler) {
707 return gpu::GetVulkanFunctionPointers()->vkCreateSampler(
708 device, pCreateInfo, pAllocator, pSampler);
709 }
710 ALWAYS_INLINE VkResult
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)711 vkCreateSemaphore(VkDevice device,
712 const VkSemaphoreCreateInfo* pCreateInfo,
713 const VkAllocationCallbacks* pAllocator,
714 VkSemaphore* pSemaphore) {
715 return gpu::GetVulkanFunctionPointers()->vkCreateSemaphore(
716 device, pCreateInfo, pAllocator, pSemaphore);
717 }
718 ALWAYS_INLINE VkResult
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)719 vkCreateShaderModule(VkDevice device,
720 const VkShaderModuleCreateInfo* pCreateInfo,
721 const VkAllocationCallbacks* pAllocator,
722 VkShaderModule* pShaderModule) {
723 return gpu::GetVulkanFunctionPointers()->vkCreateShaderModule(
724 device, pCreateInfo, pAllocator, pShaderModule);
725 }
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)726 ALWAYS_INLINE void vkDestroyBuffer(VkDevice device,
727 VkBuffer buffer,
728 const VkAllocationCallbacks* pAllocator) {
729 return gpu::GetVulkanFunctionPointers()->vkDestroyBuffer(device, buffer,
730 pAllocator);
731 }
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)732 ALWAYS_INLINE void vkDestroyCommandPool(
733 VkDevice device,
734 VkCommandPool commandPool,
735 const VkAllocationCallbacks* pAllocator) {
736 return gpu::GetVulkanFunctionPointers()->vkDestroyCommandPool(
737 device, commandPool, pAllocator);
738 }
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)739 ALWAYS_INLINE void vkDestroyDescriptorPool(
740 VkDevice device,
741 VkDescriptorPool descriptorPool,
742 const VkAllocationCallbacks* pAllocator) {
743 return gpu::GetVulkanFunctionPointers()->vkDestroyDescriptorPool(
744 device, descriptorPool, pAllocator);
745 }
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)746 ALWAYS_INLINE void vkDestroyDescriptorSetLayout(
747 VkDevice device,
748 VkDescriptorSetLayout descriptorSetLayout,
749 const VkAllocationCallbacks* pAllocator) {
750 return gpu::GetVulkanFunctionPointers()->vkDestroyDescriptorSetLayout(
751 device, descriptorSetLayout, pAllocator);
752 }
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)753 ALWAYS_INLINE void vkDestroyDevice(VkDevice device,
754 const VkAllocationCallbacks* pAllocator) {
755 return gpu::GetVulkanFunctionPointers()->vkDestroyDevice(device, pAllocator);
756 }
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)757 ALWAYS_INLINE void vkDestroyFence(VkDevice device,
758 VkFence fence,
759 const VkAllocationCallbacks* pAllocator) {
760 return gpu::GetVulkanFunctionPointers()->vkDestroyFence(device, fence,
761 pAllocator);
762 }
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)763 ALWAYS_INLINE void vkDestroyFramebuffer(
764 VkDevice device,
765 VkFramebuffer framebuffer,
766 const VkAllocationCallbacks* pAllocator) {
767 return gpu::GetVulkanFunctionPointers()->vkDestroyFramebuffer(
768 device, framebuffer, pAllocator);
769 }
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)770 ALWAYS_INLINE void vkDestroyImage(VkDevice device,
771 VkImage image,
772 const VkAllocationCallbacks* pAllocator) {
773 return gpu::GetVulkanFunctionPointers()->vkDestroyImage(device, image,
774 pAllocator);
775 }
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)776 ALWAYS_INLINE void vkDestroyImageView(VkDevice device,
777 VkImageView imageView,
778 const VkAllocationCallbacks* pAllocator) {
779 return gpu::GetVulkanFunctionPointers()->vkDestroyImageView(device, imageView,
780 pAllocator);
781 }
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)782 ALWAYS_INLINE void vkDestroyRenderPass(
783 VkDevice device,
784 VkRenderPass renderPass,
785 const VkAllocationCallbacks* pAllocator) {
786 return gpu::GetVulkanFunctionPointers()->vkDestroyRenderPass(
787 device, renderPass, pAllocator);
788 }
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)789 ALWAYS_INLINE void vkDestroySampler(VkDevice device,
790 VkSampler sampler,
791 const VkAllocationCallbacks* pAllocator) {
792 return gpu::GetVulkanFunctionPointers()->vkDestroySampler(device, sampler,
793 pAllocator);
794 }
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)795 ALWAYS_INLINE void vkDestroySemaphore(VkDevice device,
796 VkSemaphore semaphore,
797 const VkAllocationCallbacks* pAllocator) {
798 return gpu::GetVulkanFunctionPointers()->vkDestroySemaphore(device, semaphore,
799 pAllocator);
800 }
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)801 ALWAYS_INLINE void vkDestroyShaderModule(
802 VkDevice device,
803 VkShaderModule shaderModule,
804 const VkAllocationCallbacks* pAllocator) {
805 return gpu::GetVulkanFunctionPointers()->vkDestroyShaderModule(
806 device, shaderModule, pAllocator);
807 }
vkDeviceWaitIdle(VkDevice device)808 ALWAYS_INLINE VkResult vkDeviceWaitIdle(VkDevice device) {
809 return gpu::GetVulkanFunctionPointers()->vkDeviceWaitIdle(device);
810 }
811 ALWAYS_INLINE VkResult
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)812 vkFlushMappedMemoryRanges(VkDevice device,
813 uint32_t memoryRangeCount,
814 const VkMappedMemoryRange* pMemoryRanges) {
815 return gpu::GetVulkanFunctionPointers()->vkFlushMappedMemoryRanges(
816 device, memoryRangeCount, pMemoryRanges);
817 }
vkEndCommandBuffer(VkCommandBuffer commandBuffer)818 ALWAYS_INLINE VkResult vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
819 return gpu::GetVulkanFunctionPointers()->vkEndCommandBuffer(commandBuffer);
820 }
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)821 ALWAYS_INLINE void vkFreeCommandBuffers(
822 VkDevice device,
823 VkCommandPool commandPool,
824 uint32_t commandBufferCount,
825 const VkCommandBuffer* pCommandBuffers) {
826 return gpu::GetVulkanFunctionPointers()->vkFreeCommandBuffers(
827 device, commandPool, commandBufferCount, pCommandBuffers);
828 }
829 ALWAYS_INLINE VkResult
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)830 vkFreeDescriptorSets(VkDevice device,
831 VkDescriptorPool descriptorPool,
832 uint32_t descriptorSetCount,
833 const VkDescriptorSet* pDescriptorSets) {
834 return gpu::GetVulkanFunctionPointers()->vkFreeDescriptorSets(
835 device, descriptorPool, descriptorSetCount, pDescriptorSets);
836 }
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)837 ALWAYS_INLINE void vkFreeMemory(VkDevice device,
838 VkDeviceMemory memory,
839 const VkAllocationCallbacks* pAllocator) {
840 return gpu::GetVulkanFunctionPointers()->vkFreeMemory(device, memory,
841 pAllocator);
842 }
843 ALWAYS_INLINE VkResult
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)844 vkInvalidateMappedMemoryRanges(VkDevice device,
845 uint32_t memoryRangeCount,
846 const VkMappedMemoryRange* pMemoryRanges) {
847 return gpu::GetVulkanFunctionPointers()->vkInvalidateMappedMemoryRanges(
848 device, memoryRangeCount, pMemoryRanges);
849 }
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)850 ALWAYS_INLINE void vkGetBufferMemoryRequirements(
851 VkDevice device,
852 VkBuffer buffer,
853 VkMemoryRequirements* pMemoryRequirements) {
854 return gpu::GetVulkanFunctionPointers()->vkGetBufferMemoryRequirements(
855 device, buffer, pMemoryRequirements);
856 }
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)857 ALWAYS_INLINE void vkGetBufferMemoryRequirements2(
858 VkDevice device,
859 const VkBufferMemoryRequirementsInfo2* pInfo,
860 VkMemoryRequirements2* pMemoryRequirements) {
861 return gpu::GetVulkanFunctionPointers()->vkGetBufferMemoryRequirements2(
862 device, pInfo, pMemoryRequirements);
863 }
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)864 ALWAYS_INLINE void vkGetDeviceQueue(VkDevice device,
865 uint32_t queueFamilyIndex,
866 uint32_t queueIndex,
867 VkQueue* pQueue) {
868 return gpu::GetVulkanFunctionPointers()->vkGetDeviceQueue(
869 device, queueFamilyIndex, queueIndex, pQueue);
870 }
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)871 ALWAYS_INLINE void vkGetDeviceQueue2(VkDevice device,
872 const VkDeviceQueueInfo2* pQueueInfo,
873 VkQueue* pQueue) {
874 return gpu::GetVulkanFunctionPointers()->vkGetDeviceQueue2(device, pQueueInfo,
875 pQueue);
876 }
vkGetFenceStatus(VkDevice device,VkFence fence)877 ALWAYS_INLINE VkResult vkGetFenceStatus(VkDevice device, VkFence fence) {
878 return gpu::GetVulkanFunctionPointers()->vkGetFenceStatus(device, fence);
879 }
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)880 ALWAYS_INLINE void vkGetImageMemoryRequirements(
881 VkDevice device,
882 VkImage image,
883 VkMemoryRequirements* pMemoryRequirements) {
884 return gpu::GetVulkanFunctionPointers()->vkGetImageMemoryRequirements(
885 device, image, pMemoryRequirements);
886 }
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)887 ALWAYS_INLINE void vkGetImageMemoryRequirements2(
888 VkDevice device,
889 const VkImageMemoryRequirementsInfo2* pInfo,
890 VkMemoryRequirements2* pMemoryRequirements) {
891 return gpu::GetVulkanFunctionPointers()->vkGetImageMemoryRequirements2(
892 device, pInfo, pMemoryRequirements);
893 }
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)894 ALWAYS_INLINE VkResult vkMapMemory(VkDevice device,
895 VkDeviceMemory memory,
896 VkDeviceSize offset,
897 VkDeviceSize size,
898 VkMemoryMapFlags flags,
899 void** ppData) {
900 return gpu::GetVulkanFunctionPointers()->vkMapMemory(device, memory, offset,
901 size, flags, ppData);
902 }
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)903 ALWAYS_INLINE VkResult vkQueueSubmit(VkQueue queue,
904 uint32_t submitCount,
905 const VkSubmitInfo* pSubmits,
906 VkFence fence) {
907 return gpu::GetVulkanFunctionPointers()->vkQueueSubmit(queue, submitCount,
908 pSubmits, fence);
909 }
vkQueueWaitIdle(VkQueue queue)910 ALWAYS_INLINE VkResult vkQueueWaitIdle(VkQueue queue) {
911 return gpu::GetVulkanFunctionPointers()->vkQueueWaitIdle(queue);
912 }
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)913 ALWAYS_INLINE VkResult vkResetCommandBuffer(VkCommandBuffer commandBuffer,
914 VkCommandBufferResetFlags flags) {
915 return gpu::GetVulkanFunctionPointers()->vkResetCommandBuffer(commandBuffer,
916 flags);
917 }
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)918 ALWAYS_INLINE VkResult vkResetFences(VkDevice device,
919 uint32_t fenceCount,
920 const VkFence* pFences) {
921 return gpu::GetVulkanFunctionPointers()->vkResetFences(device, fenceCount,
922 pFences);
923 }
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)924 ALWAYS_INLINE void vkUnmapMemory(VkDevice device, VkDeviceMemory memory) {
925 return gpu::GetVulkanFunctionPointers()->vkUnmapMemory(device, memory);
926 }
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)927 ALWAYS_INLINE void vkUpdateDescriptorSets(
928 VkDevice device,
929 uint32_t descriptorWriteCount,
930 const VkWriteDescriptorSet* pDescriptorWrites,
931 uint32_t descriptorCopyCount,
932 const VkCopyDescriptorSet* pDescriptorCopies) {
933 return gpu::GetVulkanFunctionPointers()->vkUpdateDescriptorSets(
934 device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
935 pDescriptorCopies);
936 }
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)937 ALWAYS_INLINE VkResult vkWaitForFences(VkDevice device,
938 uint32_t fenceCount,
939 const VkFence* pFences,
940 VkBool32 waitAll,
941 uint64_t timeout) {
942 return gpu::GetVulkanFunctionPointers()->vkWaitForFences(
943 device, fenceCount, pFences, waitAll, timeout);
944 }
945
946 #if defined(OS_ANDROID)
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)947 ALWAYS_INLINE VkResult vkGetAndroidHardwareBufferPropertiesANDROID(
948 VkDevice device,
949 const struct AHardwareBuffer* buffer,
950 VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
951 return gpu::GetVulkanFunctionPointers()
952 ->vkGetAndroidHardwareBufferPropertiesANDROID(device, buffer,
953 pProperties);
954 }
955 #endif // defined(OS_ANDROID)
956
957 #if defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
958 ALWAYS_INLINE VkResult
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)959 vkGetSemaphoreFdKHR(VkDevice device,
960 const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
961 int* pFd) {
962 return gpu::GetVulkanFunctionPointers()->vkGetSemaphoreFdKHR(device,
963 pGetFdInfo, pFd);
964 }
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)965 ALWAYS_INLINE VkResult vkImportSemaphoreFdKHR(
966 VkDevice device,
967 const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
968 return gpu::GetVulkanFunctionPointers()->vkImportSemaphoreFdKHR(
969 device, pImportSemaphoreFdInfo);
970 }
971 #endif // defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
972
973 #if defined(OS_WIN)
vkGetSemaphoreWin32HandleKHR(VkDevice device,const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle)974 ALWAYS_INLINE VkResult vkGetSemaphoreWin32HandleKHR(
975 VkDevice device,
976 const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
977 HANDLE* pHandle) {
978 return gpu::GetVulkanFunctionPointers()->vkGetSemaphoreWin32HandleKHR(
979 device, pGetWin32HandleInfo, pHandle);
980 }
981 ALWAYS_INLINE VkResult
vkImportSemaphoreWin32HandleKHR(VkDevice device,const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo)982 vkImportSemaphoreWin32HandleKHR(VkDevice device,
983 const VkImportSemaphoreWin32HandleInfoKHR*
984 pImportSemaphoreWin32HandleInfo) {
985 return gpu::GetVulkanFunctionPointers()->vkImportSemaphoreWin32HandleKHR(
986 device, pImportSemaphoreWin32HandleInfo);
987 }
988 #endif // defined(OS_WIN)
989
990 #if defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFd)991 ALWAYS_INLINE VkResult vkGetMemoryFdKHR(VkDevice device,
992 const VkMemoryGetFdInfoKHR* pGetFdInfo,
993 int* pFd) {
994 return gpu::GetVulkanFunctionPointers()->vkGetMemoryFdKHR(device, pGetFdInfo,
995 pFd);
996 }
997 ALWAYS_INLINE VkResult
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)998 vkGetMemoryFdPropertiesKHR(VkDevice device,
999 VkExternalMemoryHandleTypeFlagBits handleType,
1000 int fd,
1001 VkMemoryFdPropertiesKHR* pMemoryFdProperties) {
1002 return gpu::GetVulkanFunctionPointers()->vkGetMemoryFdPropertiesKHR(
1003 device, handleType, fd, pMemoryFdProperties);
1004 }
1005 #endif // defined(OS_LINUX) || defined(OS_CHROMEOS) || defined(OS_ANDROID) || defined(OS_BSD)
1006
1007 #if defined(OS_WIN)
vkGetMemoryWin32HandleKHR(VkDevice device,const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,HANDLE * pHandle)1008 ALWAYS_INLINE VkResult vkGetMemoryWin32HandleKHR(
1009 VkDevice device,
1010 const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo,
1011 HANDLE* pHandle) {
1012 return gpu::GetVulkanFunctionPointers()->vkGetMemoryWin32HandleKHR(
1013 device, pGetWin32HandleInfo, pHandle);
1014 }
vkGetMemoryWin32HandlePropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,HANDLE handle,VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties)1015 ALWAYS_INLINE VkResult vkGetMemoryWin32HandlePropertiesKHR(
1016 VkDevice device,
1017 VkExternalMemoryHandleTypeFlagBits handleType,
1018 HANDLE handle,
1019 VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {
1020 return gpu::GetVulkanFunctionPointers()->vkGetMemoryWin32HandlePropertiesKHR(
1021 device, handleType, handle, pMemoryWin32HandleProperties);
1022 }
1023 #endif // defined(OS_WIN)
1024
1025 #if defined(OS_FUCHSIA)
1026 #define vkImportSemaphoreZirconHandleFUCHSIA \
1027 gpu::GetVulkanFunctionPointers()->vkImportSemaphoreZirconHandleFUCHSIA
1028 #define vkGetSemaphoreZirconHandleFUCHSIA \
1029 gpu::GetVulkanFunctionPointers()->vkGetSemaphoreZirconHandleFUCHSIA
1030 #endif // defined(OS_FUCHSIA)
1031
1032 #if defined(OS_FUCHSIA)
1033 #define vkGetMemoryZirconHandleFUCHSIA \
1034 gpu::GetVulkanFunctionPointers()->vkGetMemoryZirconHandleFUCHSIA
1035 #endif // defined(OS_FUCHSIA)
1036
1037 #if defined(OS_FUCHSIA)
1038 #define vkCreateBufferCollectionFUCHSIA \
1039 gpu::GetVulkanFunctionPointers()->vkCreateBufferCollectionFUCHSIA
1040 #define vkSetBufferCollectionConstraintsFUCHSIA \
1041 gpu::GetVulkanFunctionPointers()->vkSetBufferCollectionConstraintsFUCHSIA
1042 #define vkGetBufferCollectionPropertiesFUCHSIA \
1043 gpu::GetVulkanFunctionPointers()->vkGetBufferCollectionPropertiesFUCHSIA
1044 #define vkDestroyBufferCollectionFUCHSIA \
1045 gpu::GetVulkanFunctionPointers()->vkDestroyBufferCollectionFUCHSIA
1046 #endif // defined(OS_FUCHSIA)
1047
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)1048 ALWAYS_INLINE VkResult vkAcquireNextImageKHR(VkDevice device,
1049 VkSwapchainKHR swapchain,
1050 uint64_t timeout,
1051 VkSemaphore semaphore,
1052 VkFence fence,
1053 uint32_t* pImageIndex) {
1054 return gpu::GetVulkanFunctionPointers()->vkAcquireNextImageKHR(
1055 device, swapchain, timeout, semaphore, fence, pImageIndex);
1056 }
1057 ALWAYS_INLINE VkResult
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)1058 vkCreateSwapchainKHR(VkDevice device,
1059 const VkSwapchainCreateInfoKHR* pCreateInfo,
1060 const VkAllocationCallbacks* pAllocator,
1061 VkSwapchainKHR* pSwapchain) {
1062 return gpu::GetVulkanFunctionPointers()->vkCreateSwapchainKHR(
1063 device, pCreateInfo, pAllocator, pSwapchain);
1064 }
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)1065 ALWAYS_INLINE void vkDestroySwapchainKHR(
1066 VkDevice device,
1067 VkSwapchainKHR swapchain,
1068 const VkAllocationCallbacks* pAllocator) {
1069 return gpu::GetVulkanFunctionPointers()->vkDestroySwapchainKHR(
1070 device, swapchain, pAllocator);
1071 }
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)1072 ALWAYS_INLINE VkResult vkGetSwapchainImagesKHR(VkDevice device,
1073 VkSwapchainKHR swapchain,
1074 uint32_t* pSwapchainImageCount,
1075 VkImage* pSwapchainImages) {
1076 return gpu::GetVulkanFunctionPointers()->vkGetSwapchainImagesKHR(
1077 device, swapchain, pSwapchainImageCount, pSwapchainImages);
1078 }
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)1079 ALWAYS_INLINE VkResult vkQueuePresentKHR(VkQueue queue,
1080 const VkPresentInfoKHR* pPresentInfo) {
1081 return gpu::GetVulkanFunctionPointers()->vkQueuePresentKHR(queue,
1082 pPresentInfo);
1083 }
1084
1085 #endif // GPU_VULKAN_VULKAN_FUNCTION_POINTERS_H_
1086