1 /*
2  * Copyright © 2020 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "vk_device.h"
25 
26 #include "vk_common_entrypoints.h"
27 #include "vk_instance.h"
28 #include "vk_log.h"
29 #include "vk_physical_device.h"
30 #include "vk_queue.h"
31 #include "vk_sync.h"
32 #include "vk_sync_timeline.h"
33 #include "vk_util.h"
34 #include "util/debug.h"
35 #include "util/hash_table.h"
36 #include "util/ralloc.h"
37 
38 static enum vk_device_timeline_mode
get_timeline_mode(struct vk_physical_device * physical_device)39 get_timeline_mode(struct vk_physical_device *physical_device)
40 {
41    if (physical_device->supported_sync_types == NULL)
42       return VK_DEVICE_TIMELINE_MODE_NONE;
43 
44    const struct vk_sync_type *timeline_type = NULL;
45    for (const struct vk_sync_type *const *t =
46         physical_device->supported_sync_types; *t; t++) {
47       if ((*t)->features & VK_SYNC_FEATURE_TIMELINE) {
48          /* We can only have one timeline mode */
49          assert(timeline_type == NULL);
50          timeline_type = *t;
51       }
52    }
53 
54    if (timeline_type == NULL)
55       return VK_DEVICE_TIMELINE_MODE_NONE;
56 
57    if (vk_sync_type_is_vk_sync_timeline(timeline_type))
58       return VK_DEVICE_TIMELINE_MODE_EMULATED;
59 
60    if (timeline_type->features & VK_SYNC_FEATURE_WAIT_BEFORE_SIGNAL)
61       return VK_DEVICE_TIMELINE_MODE_NATIVE;
62 
63    /* For assisted mode, we require a few additional things of all sync types
64     * which may be used as semaphores.
65     */
66    for (const struct vk_sync_type *const *t =
67         physical_device->supported_sync_types; *t; t++) {
68       if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT) {
69          assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
70          if ((*t)->features & VK_SYNC_FEATURE_BINARY)
71             assert((*t)->features & VK_SYNC_FEATURE_CPU_RESET);
72       }
73    }
74 
75    return VK_DEVICE_TIMELINE_MODE_ASSISTED;
76 }
77 
78 VkResult
vk_device_init(struct vk_device * device,struct vk_physical_device * physical_device,const struct vk_device_dispatch_table * dispatch_table,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * alloc)79 vk_device_init(struct vk_device *device,
80                struct vk_physical_device *physical_device,
81                const struct vk_device_dispatch_table *dispatch_table,
82                const VkDeviceCreateInfo *pCreateInfo,
83                const VkAllocationCallbacks *alloc)
84 {
85    memset(device, 0, sizeof(*device));
86    vk_object_base_init(device, &device->base, VK_OBJECT_TYPE_DEVICE);
87    if (alloc != NULL)
88       device->alloc = *alloc;
89    else
90       device->alloc = physical_device->instance->alloc;
91 
92    device->physical = physical_device;
93 
94    device->dispatch_table = *dispatch_table;
95 
96    /* Add common entrypoints without overwriting driver-provided ones. */
97    vk_device_dispatch_table_from_entrypoints(
98       &device->dispatch_table, &vk_common_device_entrypoints, false);
99 
100    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
101       int idx;
102       for (idx = 0; idx < VK_DEVICE_EXTENSION_COUNT; idx++) {
103          if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
104                     vk_device_extensions[idx].extensionName) == 0)
105             break;
106       }
107 
108       if (idx >= VK_DEVICE_EXTENSION_COUNT)
109          return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
110                           "%s not supported",
111                           pCreateInfo->ppEnabledExtensionNames[i]);
112 
113       if (!physical_device->supported_extensions.extensions[idx])
114          return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
115                           "%s not supported",
116                           pCreateInfo->ppEnabledExtensionNames[i]);
117 
118 #ifdef ANDROID
119       if (!vk_android_allowed_device_extensions.extensions[idx])
120          return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
121                           "%s not supported",
122                           pCreateInfo->ppEnabledExtensionNames[i]);
123 #endif
124 
125       device->enabled_extensions.extensions[idx] = true;
126    }
127 
128    VkResult result =
129       vk_physical_device_check_device_features(physical_device,
130                                                pCreateInfo);
131    if (result != VK_SUCCESS)
132       return result;
133 
134    p_atomic_set(&device->private_data_next_index, 0);
135 
136    list_inithead(&device->queues);
137 
138    device->drm_fd = -1;
139 
140    device->timeline_mode = get_timeline_mode(physical_device);
141 
142    switch (device->timeline_mode) {
143    case VK_DEVICE_TIMELINE_MODE_NONE:
144    case VK_DEVICE_TIMELINE_MODE_NATIVE:
145       device->submit_mode = VK_QUEUE_SUBMIT_MODE_IMMEDIATE;
146       break;
147 
148    case VK_DEVICE_TIMELINE_MODE_EMULATED:
149       device->submit_mode = VK_QUEUE_SUBMIT_MODE_DEFERRED;
150       break;
151 
152    case VK_DEVICE_TIMELINE_MODE_ASSISTED:
153       if (env_var_as_boolean("MESA_VK_ENABLE_SUBMIT_THREAD", false)) {
154          device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED;
155       } else {
156          device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
157       }
158       break;
159 
160    default:
161       unreachable("Invalid timeline mode");
162    }
163 
164 #ifdef ANDROID
165    mtx_init(&device->swapchain_private_mtx, mtx_plain);
166    device->swapchain_private = NULL;
167 #endif /* ANDROID */
168 
169    return VK_SUCCESS;
170 }
171 
172 void
vk_device_finish(UNUSED struct vk_device * device)173 vk_device_finish(UNUSED struct vk_device *device)
174 {
175    /* Drivers should tear down their own queues */
176    assert(list_is_empty(&device->queues));
177 
178 #ifdef ANDROID
179    if (device->swapchain_private) {
180       hash_table_foreach(device->swapchain_private, entry)
181          util_sparse_array_finish(entry->data);
182       ralloc_free(device->swapchain_private);
183    }
184 #endif /* ANDROID */
185 
186    vk_object_base_finish(&device->base);
187 }
188 
189 void
vk_device_enable_threaded_submit(struct vk_device * device)190 vk_device_enable_threaded_submit(struct vk_device *device)
191 {
192    /* This must be called before any queues are created */
193    assert(list_is_empty(&device->queues));
194 
195    /* In order to use threaded submit, we need every sync type that can be
196     * used as a wait fence for vkQueueSubmit() to support WAIT_PENDING.
197     * It's required for cross-thread/process submit re-ordering.
198     */
199    for (const struct vk_sync_type *const *t =
200         device->physical->supported_sync_types; *t; t++) {
201       if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT)
202          assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
203    }
204 
205    /* Any binary vk_sync types which will be used as permanent semaphore
206     * payloads also need to support vk_sync_type::move, but that's a lot
207     * harder to assert since it only applies to permanent semaphore payloads.
208     */
209 
210    if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_THREADED)
211       device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
212 }
213 
214 VkResult
vk_device_flush(struct vk_device * device)215 vk_device_flush(struct vk_device *device)
216 {
217    if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_DEFERRED)
218       return VK_SUCCESS;
219 
220    bool progress;
221    do {
222       progress = false;
223 
224       vk_foreach_queue(queue, device) {
225          uint32_t queue_submit_count;
226          VkResult result = vk_queue_flush(queue, &queue_submit_count);
227          if (unlikely(result != VK_SUCCESS))
228             return result;
229 
230          if (queue_submit_count)
231             progress = true;
232       }
233    } while (progress);
234 
235    return VK_SUCCESS;
236 }
237 
238 static const char *
timeline_mode_str(struct vk_device * device)239 timeline_mode_str(struct vk_device *device)
240 {
241    switch (device->timeline_mode) {
242 #define CASE(X) case VK_DEVICE_TIMELINE_MODE_##X: return #X;
243    CASE(NONE)
244    CASE(EMULATED)
245    CASE(ASSISTED)
246    CASE(NATIVE)
247 #undef CASE
248    default: return "UNKNOWN";
249    }
250 }
251 
252 void
_vk_device_report_lost(struct vk_device * device)253 _vk_device_report_lost(struct vk_device *device)
254 {
255    assert(p_atomic_read(&device->_lost.lost) > 0);
256 
257    device->_lost.reported = true;
258 
259    vk_foreach_queue(queue, device) {
260       if (queue->_lost.lost) {
261          __vk_errorf(queue, VK_ERROR_DEVICE_LOST,
262                      queue->_lost.error_file, queue->_lost.error_line,
263                      "%s", queue->_lost.error_msg);
264       }
265    }
266 
267    vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
268            timeline_mode_str(device));
269 }
270 
271 VkResult
_vk_device_set_lost(struct vk_device * device,const char * file,int line,const char * msg,...)272 _vk_device_set_lost(struct vk_device *device,
273                     const char *file, int line,
274                     const char *msg, ...)
275 {
276    /* This flushes out any per-queue device lost messages */
277    if (vk_device_is_lost(device))
278       return VK_ERROR_DEVICE_LOST;
279 
280    p_atomic_inc(&device->_lost.lost);
281    device->_lost.reported = true;
282 
283    va_list ap;
284    va_start(ap, msg);
285    __vk_errorv(device, VK_ERROR_DEVICE_LOST, file, line, msg, ap);
286    va_end(ap);
287 
288    vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
289            timeline_mode_str(device));
290 
291    if (env_var_as_boolean("MESA_VK_ABORT_ON_DEVICE_LOSS", false))
292       abort();
293 
294    return VK_ERROR_DEVICE_LOST;
295 }
296 
297 PFN_vkVoidFunction
vk_device_get_proc_addr(const struct vk_device * device,const char * name)298 vk_device_get_proc_addr(const struct vk_device *device,
299                         const char *name)
300 {
301    if (device == NULL || name == NULL)
302       return NULL;
303 
304    struct vk_instance *instance = device->physical->instance;
305    return vk_device_dispatch_table_get_if_supported(&device->dispatch_table,
306                                                     name,
307                                                     instance->app_info.api_version,
308                                                     &instance->enabled_extensions,
309                                                     &device->enabled_extensions);
310 }
311 
312 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_common_GetDeviceProcAddr(VkDevice _device,const char * pName)313 vk_common_GetDeviceProcAddr(VkDevice _device,
314                             const char *pName)
315 {
316    VK_FROM_HANDLE(vk_device, device, _device);
317    return vk_device_get_proc_addr(device, pName);
318 }
319 
320 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue(VkDevice _device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)321 vk_common_GetDeviceQueue(VkDevice _device,
322                          uint32_t queueFamilyIndex,
323                          uint32_t queueIndex,
324                          VkQueue *pQueue)
325 {
326    VK_FROM_HANDLE(vk_device, device, _device);
327 
328    const VkDeviceQueueInfo2 info = {
329       .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
330       .pNext = NULL,
331       /* flags = 0 because (Vulkan spec 1.2.170 - vkGetDeviceQueue):
332        *
333        *    "vkGetDeviceQueue must only be used to get queues that were
334        *     created with the flags parameter of VkDeviceQueueCreateInfo set
335        *     to zero. To get queues that were created with a non-zero flags
336        *     parameter use vkGetDeviceQueue2."
337        */
338       .flags = 0,
339       .queueFamilyIndex = queueFamilyIndex,
340       .queueIndex = queueIndex,
341    };
342 
343    device->dispatch_table.GetDeviceQueue2(_device, &info, pQueue);
344 }
345 
346 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue2(VkDevice _device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)347 vk_common_GetDeviceQueue2(VkDevice _device,
348                           const VkDeviceQueueInfo2 *pQueueInfo,
349                           VkQueue *pQueue)
350 {
351    VK_FROM_HANDLE(vk_device, device, _device);
352 
353    struct vk_queue *queue = NULL;
354    vk_foreach_queue(iter, device) {
355       if (iter->queue_family_index == pQueueInfo->queueFamilyIndex &&
356           iter->index_in_family == pQueueInfo->queueIndex) {
357          queue = iter;
358          break;
359       }
360    }
361 
362    /* From the Vulkan 1.1.70 spec:
363     *
364     *    "The queue returned by vkGetDeviceQueue2 must have the same flags
365     *    value from this structure as that used at device creation time in a
366     *    VkDeviceQueueCreateInfo instance. If no matching flags were specified
367     *    at device creation time then pQueue will return VK_NULL_HANDLE."
368     */
369    if (queue && queue->flags == pQueueInfo->flags)
370       *pQueue = vk_queue_to_handle(queue);
371    else
372       *pQueue = VK_NULL_HANDLE;
373 }
374 
375 VKAPI_ATTR void VKAPI_CALL
vk_common_GetBufferMemoryRequirements(VkDevice _device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)376 vk_common_GetBufferMemoryRequirements(VkDevice _device,
377                                       VkBuffer buffer,
378                                       VkMemoryRequirements *pMemoryRequirements)
379 {
380    VK_FROM_HANDLE(vk_device, device, _device);
381 
382    VkBufferMemoryRequirementsInfo2 info = {
383       .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
384       .buffer = buffer,
385    };
386    VkMemoryRequirements2 reqs = {
387       .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
388    };
389    device->dispatch_table.GetBufferMemoryRequirements2(_device, &info, &reqs);
390 
391    *pMemoryRequirements = reqs.memoryRequirements;
392 }
393 
394 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_BindBufferMemory(VkDevice _device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)395 vk_common_BindBufferMemory(VkDevice _device,
396                            VkBuffer buffer,
397                            VkDeviceMemory memory,
398                            VkDeviceSize memoryOffset)
399 {
400    VK_FROM_HANDLE(vk_device, device, _device);
401 
402    VkBindBufferMemoryInfo bind = {
403       .sType         = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
404       .buffer        = buffer,
405       .memory        = memory,
406       .memoryOffset  = memoryOffset,
407    };
408 
409    return device->dispatch_table.BindBufferMemory2(_device, 1, &bind);
410 }
411 
412 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageMemoryRequirements(VkDevice _device,VkImage image,VkMemoryRequirements * pMemoryRequirements)413 vk_common_GetImageMemoryRequirements(VkDevice _device,
414                                      VkImage image,
415                                      VkMemoryRequirements *pMemoryRequirements)
416 {
417    VK_FROM_HANDLE(vk_device, device, _device);
418 
419    VkImageMemoryRequirementsInfo2 info = {
420       .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
421       .image = image,
422    };
423    VkMemoryRequirements2 reqs = {
424       .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
425    };
426    device->dispatch_table.GetImageMemoryRequirements2(_device, &info, &reqs);
427 
428    *pMemoryRequirements = reqs.memoryRequirements;
429 }
430 
431 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_BindImageMemory(VkDevice _device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)432 vk_common_BindImageMemory(VkDevice _device,
433                           VkImage image,
434                           VkDeviceMemory memory,
435                           VkDeviceSize memoryOffset)
436 {
437    VK_FROM_HANDLE(vk_device, device, _device);
438 
439    VkBindImageMemoryInfo bind = {
440       .sType         = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
441       .image         = image,
442       .memory        = memory,
443       .memoryOffset  = memoryOffset,
444    };
445 
446    return device->dispatch_table.BindImageMemory2(_device, 1, &bind);
447 }
448 
449 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageSparseMemoryRequirements(VkDevice _device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)450 vk_common_GetImageSparseMemoryRequirements(VkDevice _device,
451                                            VkImage image,
452                                            uint32_t *pSparseMemoryRequirementCount,
453                                            VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
454 {
455    VK_FROM_HANDLE(vk_device, device, _device);
456 
457    VkImageSparseMemoryRequirementsInfo2 info = {
458       .sType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
459       .image = image,
460    };
461 
462    if (!pSparseMemoryRequirements) {
463       device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
464                                                                &info,
465                                                                pSparseMemoryRequirementCount,
466                                                                NULL);
467       return;
468    }
469 
470    STACK_ARRAY(VkSparseImageMemoryRequirements2, mem_reqs2, *pSparseMemoryRequirementCount);
471 
472    for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i) {
473       mem_reqs2[i].sType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
474       mem_reqs2[i].pNext = NULL;
475    }
476 
477    device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
478                                                             &info,
479                                                             pSparseMemoryRequirementCount,
480                                                             mem_reqs2);
481 
482    for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i)
483       pSparseMemoryRequirements[i] = mem_reqs2[i].memoryRequirements;
484 
485    STACK_ARRAY_FINISH(mem_reqs2);
486 }
487 
488 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_DeviceWaitIdle(VkDevice _device)489 vk_common_DeviceWaitIdle(VkDevice _device)
490 {
491    VK_FROM_HANDLE(vk_device, device, _device);
492    const struct vk_device_dispatch_table *disp = &device->dispatch_table;
493 
494    vk_foreach_queue(queue, device) {
495       VkResult result = disp->QueueWaitIdle(vk_queue_to_handle(queue));
496       if (result != VK_SUCCESS)
497          return result;
498    }
499 
500    return VK_SUCCESS;
501 }
502 
503 static void
copy_vk_struct_guts(VkBaseOutStructure * dst,VkBaseInStructure * src,size_t struct_size)504 copy_vk_struct_guts(VkBaseOutStructure *dst, VkBaseInStructure *src, size_t struct_size)
505 {
506    STATIC_ASSERT(sizeof(*dst) == sizeof(*src));
507    memcpy(dst + 1, src + 1, struct_size - sizeof(VkBaseOutStructure));
508 }
509 
510 #define CORE_FEATURE(feature) features->feature = core->feature
511 
512 bool
vk_get_physical_device_core_1_1_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Features * core)513 vk_get_physical_device_core_1_1_feature_ext(struct VkBaseOutStructure *ext,
514                                             const VkPhysicalDeviceVulkan11Features *core)
515 {
516 
517    switch (ext->sType) {
518    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
519       VkPhysicalDevice16BitStorageFeatures *features = (void *)ext;
520       CORE_FEATURE(storageBuffer16BitAccess);
521       CORE_FEATURE(uniformAndStorageBuffer16BitAccess);
522       CORE_FEATURE(storagePushConstant16);
523       CORE_FEATURE(storageInputOutput16);
524       return true;
525    }
526 
527    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
528       VkPhysicalDeviceMultiviewFeatures *features = (void *)ext;
529       CORE_FEATURE(multiview);
530       CORE_FEATURE(multiviewGeometryShader);
531       CORE_FEATURE(multiviewTessellationShader);
532       return true;
533    }
534 
535    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
536       VkPhysicalDeviceProtectedMemoryFeatures *features = (void *)ext;
537       CORE_FEATURE(protectedMemory);
538       return true;
539    }
540 
541    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
542       VkPhysicalDeviceSamplerYcbcrConversionFeatures *features = (void *) ext;
543       CORE_FEATURE(samplerYcbcrConversion);
544       return true;
545    }
546 
547    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
548       VkPhysicalDeviceShaderDrawParametersFeatures *features = (void *)ext;
549       CORE_FEATURE(shaderDrawParameters);
550       return true;
551    }
552 
553    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
554       VkPhysicalDeviceVariablePointersFeatures *features = (void *)ext;
555       CORE_FEATURE(variablePointersStorageBuffer);
556       CORE_FEATURE(variablePointers);
557       return true;
558    }
559 
560    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
561       copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
562       return true;
563 
564    default:
565       return false;
566    }
567 }
568 
569 bool
vk_get_physical_device_core_1_2_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Features * core)570 vk_get_physical_device_core_1_2_feature_ext(struct VkBaseOutStructure *ext,
571                                             const VkPhysicalDeviceVulkan12Features *core)
572 {
573 
574    switch (ext->sType) {
575    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR: {
576       VkPhysicalDevice8BitStorageFeaturesKHR *features = (void *)ext;
577       CORE_FEATURE(storageBuffer8BitAccess);
578       CORE_FEATURE(uniformAndStorageBuffer8BitAccess);
579       CORE_FEATURE(storagePushConstant8);
580       return true;
581    }
582 
583    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR: {
584       VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *features = (void *)ext;
585       CORE_FEATURE(bufferDeviceAddress);
586       CORE_FEATURE(bufferDeviceAddressCaptureReplay);
587       CORE_FEATURE(bufferDeviceAddressMultiDevice);
588       return true;
589    }
590 
591    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: {
592       VkPhysicalDeviceDescriptorIndexingFeaturesEXT *features = (void *)ext;
593       CORE_FEATURE(shaderInputAttachmentArrayDynamicIndexing);
594       CORE_FEATURE(shaderUniformTexelBufferArrayDynamicIndexing);
595       CORE_FEATURE(shaderStorageTexelBufferArrayDynamicIndexing);
596       CORE_FEATURE(shaderUniformBufferArrayNonUniformIndexing);
597       CORE_FEATURE(shaderSampledImageArrayNonUniformIndexing);
598       CORE_FEATURE(shaderStorageBufferArrayNonUniformIndexing);
599       CORE_FEATURE(shaderStorageImageArrayNonUniformIndexing);
600       CORE_FEATURE(shaderInputAttachmentArrayNonUniformIndexing);
601       CORE_FEATURE(shaderUniformTexelBufferArrayNonUniformIndexing);
602       CORE_FEATURE(shaderStorageTexelBufferArrayNonUniformIndexing);
603       CORE_FEATURE(descriptorBindingUniformBufferUpdateAfterBind);
604       CORE_FEATURE(descriptorBindingSampledImageUpdateAfterBind);
605       CORE_FEATURE(descriptorBindingStorageImageUpdateAfterBind);
606       CORE_FEATURE(descriptorBindingStorageBufferUpdateAfterBind);
607       CORE_FEATURE(descriptorBindingUniformTexelBufferUpdateAfterBind);
608       CORE_FEATURE(descriptorBindingStorageTexelBufferUpdateAfterBind);
609       CORE_FEATURE(descriptorBindingUpdateUnusedWhilePending);
610       CORE_FEATURE(descriptorBindingPartiallyBound);
611       CORE_FEATURE(descriptorBindingVariableDescriptorCount);
612       CORE_FEATURE(runtimeDescriptorArray);
613       return true;
614    }
615 
616    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR: {
617       VkPhysicalDeviceFloat16Int8FeaturesKHR *features = (void *)ext;
618       CORE_FEATURE(shaderFloat16);
619       CORE_FEATURE(shaderInt8);
620       return true;
621    }
622 
623    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT: {
624       VkPhysicalDeviceHostQueryResetFeaturesEXT *features = (void *)ext;
625       CORE_FEATURE(hostQueryReset);
626       return true;
627    }
628 
629    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR: {
630       VkPhysicalDeviceImagelessFramebufferFeaturesKHR *features = (void *)ext;
631       CORE_FEATURE(imagelessFramebuffer);
632       return true;
633    }
634 
635    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
636       VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *features =(void *)ext;
637       CORE_FEATURE(scalarBlockLayout);
638       return true;
639    }
640 
641    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR: {
642       VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *features = (void *)ext;
643       CORE_FEATURE(separateDepthStencilLayouts);
644       return true;
645    }
646 
647    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR: {
648       VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *features = (void *)ext;
649       CORE_FEATURE(shaderBufferInt64Atomics);
650       CORE_FEATURE(shaderSharedInt64Atomics);
651       return true;
652    }
653 
654    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR: {
655       VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR *features = (void *)ext;
656       CORE_FEATURE(shaderSubgroupExtendedTypes);
657       return true;
658    }
659 
660    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR: {
661       VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *features = (void *) ext;
662       CORE_FEATURE(timelineSemaphore);
663       return true;
664    }
665 
666    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: {
667       VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *features = (void *)ext;
668       CORE_FEATURE(uniformBufferStandardLayout);
669       return true;
670    }
671 
672    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR: {
673       VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *features = (void *)ext;
674       CORE_FEATURE(vulkanMemoryModel);
675       CORE_FEATURE(vulkanMemoryModelDeviceScope);
676       CORE_FEATURE(vulkanMemoryModelAvailabilityVisibilityChains);
677       return true;
678    }
679 
680    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
681       copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
682       return true;
683 
684    default:
685       return false;
686    }
687 }
688 
689 bool
vk_get_physical_device_core_1_3_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Features * core)690 vk_get_physical_device_core_1_3_feature_ext(struct VkBaseOutStructure *ext,
691                                             const VkPhysicalDeviceVulkan13Features *core)
692 {
693    switch (ext->sType) {
694    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR: {
695       VkPhysicalDeviceDynamicRenderingFeaturesKHR *features = (void *)ext;
696       CORE_FEATURE(dynamicRendering);
697       return true;
698    }
699 
700    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT: {
701       VkPhysicalDeviceImageRobustnessFeaturesEXT *features = (void *)ext;
702       CORE_FEATURE(robustImageAccess);
703       return true;
704    }
705 
706    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: {
707       VkPhysicalDeviceInlineUniformBlockFeaturesEXT *features = (void *)ext;
708       CORE_FEATURE(inlineUniformBlock);
709       CORE_FEATURE(descriptorBindingInlineUniformBlockUpdateAfterBind);
710       return true;
711    }
712 
713    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR: {
714       VkPhysicalDeviceMaintenance4FeaturesKHR *features = (void *)ext;
715       CORE_FEATURE(maintenance4);
716       return true;
717    }
718 
719    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT: {
720       VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT *features = (void *)ext;
721       CORE_FEATURE(pipelineCreationCacheControl);
722       return true;
723    }
724 
725    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
726       VkPhysicalDevicePrivateDataFeaturesEXT *features = (void *)ext;
727       CORE_FEATURE(privateData);
728       return true;
729    }
730 
731    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT: {
732       VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *features = (void *)ext;
733       CORE_FEATURE(shaderDemoteToHelperInvocation);
734       return true;
735    }
736 
737    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR: {
738       VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR *features = (void *)ext;
739       CORE_FEATURE(shaderIntegerDotProduct);
740       return true;
741    };
742 
743    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR: {
744       VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR *features = (void *)ext;
745       CORE_FEATURE(shaderTerminateInvocation);
746       return true;
747    }
748 
749    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT: {
750       VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *features = (void *)ext;
751       CORE_FEATURE(subgroupSizeControl);
752       CORE_FEATURE(computeFullSubgroups);
753       return true;
754    }
755 
756    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR: {
757       VkPhysicalDeviceSynchronization2FeaturesKHR *features = (void *)ext;
758       CORE_FEATURE(synchronization2);
759       return true;
760    }
761 
762    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT: {
763       VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *features = (void *)ext;
764       CORE_FEATURE(textureCompressionASTC_HDR);
765       return true;
766    }
767 
768    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR: {
769       VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR *features = (void *)ext;
770       CORE_FEATURE(shaderZeroInitializeWorkgroupMemory);
771       return true;
772    }
773 
774    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
775       copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
776       return true;
777 
778    default:
779       return false;
780    }
781 }
782 
783 #undef CORE_FEATURE
784 
785 #define CORE_RENAMED_PROPERTY(ext_property, core_property) \
786    memcpy(&properties->ext_property, &core->core_property, sizeof(core->core_property))
787 
788 #define CORE_PROPERTY(property) CORE_RENAMED_PROPERTY(property, property)
789 
790 bool
vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Properties * core)791 vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure *ext,
792                                              const VkPhysicalDeviceVulkan11Properties *core)
793 {
794    switch (ext->sType) {
795    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
796       VkPhysicalDeviceIDProperties *properties = (void *)ext;
797       CORE_PROPERTY(deviceUUID);
798       CORE_PROPERTY(driverUUID);
799       CORE_PROPERTY(deviceLUID);
800       CORE_PROPERTY(deviceLUIDValid);
801       return true;
802    }
803 
804    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
805       VkPhysicalDeviceMaintenance3Properties *properties = (void *)ext;
806       CORE_PROPERTY(maxPerSetDescriptors);
807       CORE_PROPERTY(maxMemoryAllocationSize);
808       return true;
809    }
810 
811    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
812       VkPhysicalDeviceMultiviewProperties *properties = (void *)ext;
813       CORE_PROPERTY(maxMultiviewViewCount);
814       CORE_PROPERTY(maxMultiviewInstanceIndex);
815       return true;
816    }
817 
818    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
819       VkPhysicalDevicePointClippingProperties *properties = (void *) ext;
820       CORE_PROPERTY(pointClippingBehavior);
821       return true;
822    }
823 
824    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: {
825       VkPhysicalDeviceProtectedMemoryProperties *properties = (void *)ext;
826       CORE_PROPERTY(protectedNoFault);
827       return true;
828    }
829 
830    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
831       VkPhysicalDeviceSubgroupProperties *properties = (void *)ext;
832       CORE_PROPERTY(subgroupSize);
833       CORE_RENAMED_PROPERTY(supportedStages,
834                                     subgroupSupportedStages);
835       CORE_RENAMED_PROPERTY(supportedOperations,
836                                     subgroupSupportedOperations);
837       CORE_RENAMED_PROPERTY(quadOperationsInAllStages,
838                                     subgroupQuadOperationsInAllStages);
839       return true;
840    }
841 
842    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
843       copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
844       return true;
845 
846    default:
847       return false;
848    }
849 }
850 
851 bool
vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Properties * core)852 vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure *ext,
853                                              const VkPhysicalDeviceVulkan12Properties *core)
854 {
855    switch (ext->sType) {
856    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR: {
857       VkPhysicalDeviceDepthStencilResolvePropertiesKHR *properties = (void *)ext;
858       CORE_PROPERTY(supportedDepthResolveModes);
859       CORE_PROPERTY(supportedStencilResolveModes);
860       CORE_PROPERTY(independentResolveNone);
861       CORE_PROPERTY(independentResolve);
862       return true;
863    }
864 
865    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT: {
866       VkPhysicalDeviceDescriptorIndexingPropertiesEXT *properties = (void *)ext;
867       CORE_PROPERTY(maxUpdateAfterBindDescriptorsInAllPools);
868       CORE_PROPERTY(shaderUniformBufferArrayNonUniformIndexingNative);
869       CORE_PROPERTY(shaderSampledImageArrayNonUniformIndexingNative);
870       CORE_PROPERTY(shaderStorageBufferArrayNonUniformIndexingNative);
871       CORE_PROPERTY(shaderStorageImageArrayNonUniformIndexingNative);
872       CORE_PROPERTY(shaderInputAttachmentArrayNonUniformIndexingNative);
873       CORE_PROPERTY(robustBufferAccessUpdateAfterBind);
874       CORE_PROPERTY(quadDivergentImplicitLod);
875       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSamplers);
876       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindUniformBuffers);
877       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageBuffers);
878       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSampledImages);
879       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageImages);
880       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInputAttachments);
881       CORE_PROPERTY(maxPerStageUpdateAfterBindResources);
882       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSamplers);
883       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffers);
884       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
885       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffers);
886       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
887       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSampledImages);
888       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageImages);
889       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInputAttachments);
890       return true;
891    }
892 
893    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR: {
894       VkPhysicalDeviceDriverPropertiesKHR *properties = (void *) ext;
895       CORE_PROPERTY(driverID);
896       CORE_PROPERTY(driverName);
897       CORE_PROPERTY(driverInfo);
898       CORE_PROPERTY(conformanceVersion);
899       return true;
900    }
901 
902    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT: {
903       VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *properties = (void *)ext;
904       CORE_PROPERTY(filterMinmaxImageComponentMapping);
905       CORE_PROPERTY(filterMinmaxSingleComponentFormats);
906       return true;
907    }
908 
909    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR : {
910       VkPhysicalDeviceFloatControlsPropertiesKHR *properties = (void *)ext;
911       CORE_PROPERTY(denormBehaviorIndependence);
912       CORE_PROPERTY(roundingModeIndependence);
913       CORE_PROPERTY(shaderDenormFlushToZeroFloat16);
914       CORE_PROPERTY(shaderDenormPreserveFloat16);
915       CORE_PROPERTY(shaderRoundingModeRTEFloat16);
916       CORE_PROPERTY(shaderRoundingModeRTZFloat16);
917       CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat16);
918       CORE_PROPERTY(shaderDenormFlushToZeroFloat32);
919       CORE_PROPERTY(shaderDenormPreserveFloat32);
920       CORE_PROPERTY(shaderRoundingModeRTEFloat32);
921       CORE_PROPERTY(shaderRoundingModeRTZFloat32);
922       CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat32);
923       CORE_PROPERTY(shaderDenormFlushToZeroFloat64);
924       CORE_PROPERTY(shaderDenormPreserveFloat64);
925       CORE_PROPERTY(shaderRoundingModeRTEFloat64);
926       CORE_PROPERTY(shaderRoundingModeRTZFloat64);
927       CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat64);
928       return true;
929    }
930 
931    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR: {
932       VkPhysicalDeviceTimelineSemaphorePropertiesKHR *properties = (void *) ext;
933       CORE_PROPERTY(maxTimelineSemaphoreValueDifference);
934       return true;
935    }
936 
937    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
938       copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
939       return true;
940 
941    default:
942       return false;
943    }
944 }
945 
946 bool
vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Properties * core)947 vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure *ext,
948                                              const VkPhysicalDeviceVulkan13Properties *core)
949 {
950    switch (ext->sType) {
951    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT: {
952       VkPhysicalDeviceInlineUniformBlockPropertiesEXT *properties = (void *)ext;
953       CORE_PROPERTY(maxInlineUniformBlockSize);
954       CORE_PROPERTY(maxPerStageDescriptorInlineUniformBlocks);
955       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
956       CORE_PROPERTY(maxDescriptorSetInlineUniformBlocks);
957       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
958       return true;
959    }
960 
961    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR: {
962       VkPhysicalDeviceMaintenance4PropertiesKHR *properties = (void *)ext;
963       CORE_PROPERTY(maxBufferSize);
964       return true;
965    }
966 
967    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR: {
968       VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR *properties = (void *)ext;
969 
970 #define IDP_PROPERTY(x) CORE_PROPERTY(integerDotProduct##x)
971       IDP_PROPERTY(8BitUnsignedAccelerated);
972       IDP_PROPERTY(8BitSignedAccelerated);
973       IDP_PROPERTY(8BitMixedSignednessAccelerated);
974       IDP_PROPERTY(4x8BitPackedUnsignedAccelerated);
975       IDP_PROPERTY(4x8BitPackedSignedAccelerated);
976       IDP_PROPERTY(4x8BitPackedMixedSignednessAccelerated);
977       IDP_PROPERTY(16BitUnsignedAccelerated);
978       IDP_PROPERTY(16BitSignedAccelerated);
979       IDP_PROPERTY(16BitMixedSignednessAccelerated);
980       IDP_PROPERTY(32BitUnsignedAccelerated);
981       IDP_PROPERTY(32BitSignedAccelerated);
982       IDP_PROPERTY(32BitMixedSignednessAccelerated);
983       IDP_PROPERTY(64BitUnsignedAccelerated);
984       IDP_PROPERTY(64BitSignedAccelerated);
985       IDP_PROPERTY(64BitMixedSignednessAccelerated);
986       IDP_PROPERTY(AccumulatingSaturating8BitUnsignedAccelerated);
987       IDP_PROPERTY(AccumulatingSaturating8BitSignedAccelerated);
988       IDP_PROPERTY(AccumulatingSaturating8BitMixedSignednessAccelerated);
989       IDP_PROPERTY(AccumulatingSaturating4x8BitPackedUnsignedAccelerated);
990       IDP_PROPERTY(AccumulatingSaturating4x8BitPackedSignedAccelerated);
991       IDP_PROPERTY(AccumulatingSaturating4x8BitPackedMixedSignednessAccelerated);
992       IDP_PROPERTY(AccumulatingSaturating16BitUnsignedAccelerated);
993       IDP_PROPERTY(AccumulatingSaturating16BitSignedAccelerated);
994       IDP_PROPERTY(AccumulatingSaturating16BitMixedSignednessAccelerated);
995       IDP_PROPERTY(AccumulatingSaturating32BitUnsignedAccelerated);
996       IDP_PROPERTY(AccumulatingSaturating32BitSignedAccelerated);
997       IDP_PROPERTY(AccumulatingSaturating32BitMixedSignednessAccelerated);
998       IDP_PROPERTY(AccumulatingSaturating64BitUnsignedAccelerated);
999       IDP_PROPERTY(AccumulatingSaturating64BitSignedAccelerated);
1000       IDP_PROPERTY(AccumulatingSaturating64BitMixedSignednessAccelerated);
1001 #undef IDP_PROPERTY
1002       return true;
1003    }
1004 
1005    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT: {
1006       VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *properties = (void *)ext;
1007       CORE_PROPERTY(minSubgroupSize);
1008       CORE_PROPERTY(maxSubgroupSize);
1009       CORE_PROPERTY(maxComputeWorkgroupSubgroups);
1010       CORE_PROPERTY(requiredSubgroupSizeStages);
1011       return true;
1012    }
1013 
1014    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT: {
1015       VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *properties = (void *)ext;
1016       CORE_PROPERTY(storageTexelBufferOffsetAlignmentBytes);
1017       CORE_PROPERTY(storageTexelBufferOffsetSingleTexelAlignment);
1018       CORE_PROPERTY(uniformTexelBufferOffsetAlignmentBytes);
1019       CORE_PROPERTY(uniformTexelBufferOffsetSingleTexelAlignment);
1020       return true;
1021    }
1022 
1023    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
1024       copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
1025       return true;
1026 
1027    default:
1028       return false;
1029    }
1030 }
1031 
1032 #undef CORE_RENAMED_PROPERTY
1033 #undef CORE_PROPERTY
1034 
1035