1 /*
2  * Copyright © 2021 Collabora Ltd.
3  *
4  * Derived from tu_device.c which is:
5  * Copyright © 2016 Red Hat.
6  * Copyright © 2016 Bas Nieuwenhuizen
7  * Copyright © 2015 Intel Corporation
8  *
9  * Permission is hereby granted, free of charge, to any person obtaining a
10  * copy of this software and associated documentation files (the "Software"),
11  * to deal in the Software without restriction, including without limitation
12  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13  * and/or sell copies of the Software, and to permit persons to whom the
14  * Software is furnished to do so, subject to the following conditions:
15  *
16  * The above copyright notice and this permission notice (including the next
17  * paragraph) shall be included in all copies or substantial portions of the
18  * Software.
19  *
20  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
23  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
25  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26  * DEALINGS IN THE SOFTWARE.
27  */
28 
29 #include "panvk_private.h"
30 
31 #include "panfrost-quirks.h"
32 #include "pan_bo.h"
33 #include "pan_encoder.h"
34 #include "pan_util.h"
35 
36 #include <fcntl.h>
37 #include <libsync.h>
38 #include <stdbool.h>
39 #include <string.h>
40 #include <sys/mman.h>
41 #include <sys/sysinfo.h>
42 #include <unistd.h>
43 #include <xf86drm.h>
44 
45 #include "drm-uapi/panfrost_drm.h"
46 
47 #include "util/debug.h"
48 #include "util/disk_cache.h"
49 #include "util/strtod.h"
50 #include "vk_format.h"
51 #include "vk_util.h"
52 
53 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
54 #include <wayland-client.h>
55 #include "wayland-drm-client-protocol.h"
56 #endif
57 
58 #include "panvk_cs.h"
59 
60 VkResult
_panvk_device_set_lost(struct panvk_device * device,const char * file,int line,const char * msg,...)61 _panvk_device_set_lost(struct panvk_device *device,
62                        const char *file, int line,
63                        const char *msg, ...)
64 {
65    /* Set the flag indicating that waits should return in finite time even
66     * after device loss.
67     */
68    p_atomic_inc(&device->_lost);
69 
70    /* TODO: Report the log message through VkDebugReportCallbackEXT instead */
71    fprintf(stderr, "%s:%d: ", file, line);
72    va_list ap;
73    va_start(ap, msg);
74    vfprintf(stderr, msg, ap);
75    va_end(ap);
76 
77    if (env_var_as_boolean("PANVK_ABORT_ON_DEVICE_LOSS", false))
78       abort();
79 
80    return VK_ERROR_DEVICE_LOST;
81 }
82 
83 static int
panvk_device_get_cache_uuid(uint16_t family,void * uuid)84 panvk_device_get_cache_uuid(uint16_t family, void *uuid)
85 {
86    uint32_t mesa_timestamp;
87    uint16_t f = family;
88 
89    if (!disk_cache_get_function_timestamp(panvk_device_get_cache_uuid,
90                                           &mesa_timestamp))
91       return -1;
92 
93    memset(uuid, 0, VK_UUID_SIZE);
94    memcpy(uuid, &mesa_timestamp, 4);
95    memcpy((char *) uuid + 4, &f, 2);
96    snprintf((char *) uuid + 6, VK_UUID_SIZE - 10, "pan");
97    return 0;
98 }
99 
100 static void
panvk_get_driver_uuid(void * uuid)101 panvk_get_driver_uuid(void *uuid)
102 {
103    memset(uuid, 0, VK_UUID_SIZE);
104    snprintf(uuid, VK_UUID_SIZE, "panfrost");
105 }
106 
107 static void
panvk_get_device_uuid(void * uuid)108 panvk_get_device_uuid(void *uuid)
109 {
110    memset(uuid, 0, VK_UUID_SIZE);
111 }
112 
113 static const struct debug_control panvk_debug_options[] = {
114    { "startup", PANVK_DEBUG_STARTUP },
115    { "nir", PANVK_DEBUG_NIR },
116    { "trace", PANVK_DEBUG_TRACE },
117    { "sync", PANVK_DEBUG_SYNC },
118    { "afbc", PANVK_DEBUG_AFBC },
119    { "linear", PANVK_DEBUG_LINEAR },
120    { NULL, 0 }
121 };
122 
123 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
124 #define PANVK_USE_WSI_PLATFORM
125 #endif
126 
127 #define PANVK_API_VERSION VK_MAKE_VERSION(1, 1, VK_HEADER_VERSION)
128 
129 VkResult
panvk_EnumerateInstanceVersion(uint32_t * pApiVersion)130 panvk_EnumerateInstanceVersion(uint32_t *pApiVersion)
131 {
132     *pApiVersion = PANVK_API_VERSION;
133     return VK_SUCCESS;
134 }
135 
136 static const struct vk_instance_extension_table panvk_instance_extensions = {
137 #ifdef PANVK_USE_WSI_PLATFORM
138    .KHR_surface = true,
139 #endif
140 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
141    .KHR_wayland_surface = true,
142 #endif
143 };
144 
145 static void
panvk_get_device_extensions(const struct panvk_physical_device * device,struct vk_device_extension_table * ext)146 panvk_get_device_extensions(const struct panvk_physical_device *device,
147                             struct vk_device_extension_table *ext)
148 {
149    *ext = (struct vk_device_extension_table) {
150 #ifdef PANVK_USE_WSI_PLATFORM
151       .KHR_swapchain = true,
152 #endif
153       .EXT_custom_border_color = true,
154    };
155 }
156 
157 VkResult
panvk_CreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)158 panvk_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
159                      const VkAllocationCallbacks *pAllocator,
160                      VkInstance *pInstance)
161 {
162    struct panvk_instance *instance;
163    VkResult result;
164 
165    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
166 
167    pAllocator = pAllocator ? : vk_default_allocator();
168    instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
169                         VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
170    if (!instance)
171       return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
172 
173    struct vk_instance_dispatch_table dispatch_table;
174 
175    vk_instance_dispatch_table_from_entrypoints(&dispatch_table,
176                                                &panvk_instance_entrypoints,
177                                                true);
178    vk_instance_dispatch_table_from_entrypoints(&dispatch_table,
179                                                &wsi_instance_entrypoints,
180                                                false);
181    result = vk_instance_init(&instance->vk,
182                              &panvk_instance_extensions,
183                              &dispatch_table,
184                              pCreateInfo,
185                              pAllocator);
186    if (result != VK_SUCCESS) {
187       vk_free(pAllocator, instance);
188       return vk_error(NULL, result);
189    }
190 
191    instance->physical_device_count = -1;
192    instance->debug_flags = parse_debug_string(getenv("PANVK_DEBUG"),
193                                               panvk_debug_options);
194 
195    if (instance->debug_flags & PANVK_DEBUG_STARTUP)
196       panvk_logi("Created an instance");
197 
198    VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
199 
200    *pInstance = panvk_instance_to_handle(instance);
201 
202    return VK_SUCCESS;
203 }
204 
205 static void
panvk_physical_device_finish(struct panvk_physical_device * device)206 panvk_physical_device_finish(struct panvk_physical_device *device)
207 {
208    panvk_wsi_finish(device);
209 
210    panvk_arch_dispatch(device->pdev.arch, meta_cleanup, device);
211    panfrost_close_device(&device->pdev);
212    if (device->master_fd != -1)
213       close(device->master_fd);
214 
215    vk_physical_device_finish(&device->vk);
216 }
217 
218 void
panvk_DestroyInstance(VkInstance _instance,const VkAllocationCallbacks * pAllocator)219 panvk_DestroyInstance(VkInstance _instance,
220                       const VkAllocationCallbacks *pAllocator)
221 {
222    VK_FROM_HANDLE(panvk_instance, instance, _instance);
223 
224    if (!instance)
225       return;
226 
227    for (int i = 0; i < instance->physical_device_count; ++i) {
228       panvk_physical_device_finish(instance->physical_devices + i);
229    }
230 
231    vk_instance_finish(&instance->vk);
232    vk_free(&instance->vk.alloc, instance);
233 }
234 
235 static VkResult
panvk_physical_device_init(struct panvk_physical_device * device,struct panvk_instance * instance,drmDevicePtr drm_device)236 panvk_physical_device_init(struct panvk_physical_device *device,
237                            struct panvk_instance *instance,
238                            drmDevicePtr drm_device)
239 {
240    const char *path = drm_device->nodes[DRM_NODE_RENDER];
241    VkResult result = VK_SUCCESS;
242    drmVersionPtr version;
243    int fd;
244    int master_fd = -1;
245 
246    if (!getenv("PAN_I_WANT_A_BROKEN_VULKAN_DRIVER")) {
247       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
248                        "WARNING: panvk is not a conformant vulkan implementation, "
249                        "pass PAN_I_WANT_A_BROKEN_VULKAN_DRIVER=1 if you know what you're doing.");
250    }
251 
252    fd = open(path, O_RDWR | O_CLOEXEC);
253    if (fd < 0) {
254       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
255                        "failed to open device %s", path);
256    }
257 
258    version = drmGetVersion(fd);
259    if (!version) {
260       close(fd);
261       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
262                        "failed to query kernel driver version for device %s",
263                        path);
264    }
265 
266    if (strcmp(version->name, "panfrost")) {
267       drmFreeVersion(version);
268       close(fd);
269       return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
270                        "device %s does not use the panfrost kernel driver", path);
271    }
272 
273    drmFreeVersion(version);
274 
275    if (instance->debug_flags & PANVK_DEBUG_STARTUP)
276       panvk_logi("Found compatible device '%s'.", path);
277 
278    struct vk_device_extension_table supported_extensions;
279    panvk_get_device_extensions(device, &supported_extensions);
280 
281    struct vk_physical_device_dispatch_table dispatch_table;
282    vk_physical_device_dispatch_table_from_entrypoints(&dispatch_table,
283                                                       &panvk_physical_device_entrypoints,
284                                                       true);
285    vk_physical_device_dispatch_table_from_entrypoints(&dispatch_table,
286                                                       &wsi_physical_device_entrypoints,
287                                                       false);
288 
289    result = vk_physical_device_init(&device->vk, &instance->vk,
290                                     &supported_extensions,
291                                     &dispatch_table);
292 
293    if (result != VK_SUCCESS) {
294       vk_error(instance, result);
295       goto fail;
296    }
297 
298    device->instance = instance;
299    assert(strlen(path) < ARRAY_SIZE(device->path));
300    strncpy(device->path, path, ARRAY_SIZE(device->path));
301 
302    if (instance->vk.enabled_extensions.KHR_display) {
303       master_fd = open(drm_device->nodes[DRM_NODE_PRIMARY], O_RDWR | O_CLOEXEC);
304       if (master_fd >= 0) {
305          /* TODO: free master_fd is accel is not working? */
306       }
307    }
308 
309    device->master_fd = master_fd;
310    if (instance->debug_flags & PANVK_DEBUG_TRACE)
311       device->pdev.debug |= PAN_DBG_TRACE;
312 
313    device->pdev.debug |= PAN_DBG_NO_CACHE;
314    panfrost_open_device(NULL, fd, &device->pdev);
315    fd = -1;
316 
317    if (device->pdev.quirks & MIDGARD_SFBD) {
318       result = vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
319                          "%s not supported",
320                          panfrost_model_name(device->pdev.gpu_id));
321       goto fail;
322    }
323 
324    panvk_arch_dispatch(device->pdev.arch, meta_init, device);
325 
326    memset(device->name, 0, sizeof(device->name));
327    sprintf(device->name, "%s", panfrost_model_name(device->pdev.gpu_id));
328 
329    if (panvk_device_get_cache_uuid(device->pdev.gpu_id, device->cache_uuid)) {
330       result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
331                          "cannot generate UUID");
332       goto fail_close_device;
333    }
334 
335    fprintf(stderr, "WARNING: panvk is not a conformant vulkan implementation, "
336                    "testing use only.\n");
337 
338    panvk_get_driver_uuid(&device->device_uuid);
339    panvk_get_device_uuid(&device->device_uuid);
340 
341    result = panvk_wsi_init(device);
342    if (result != VK_SUCCESS) {
343       vk_error(instance, result);
344       goto fail_close_device;
345    }
346 
347    return VK_SUCCESS;
348 
349 fail_close_device:
350    panfrost_close_device(&device->pdev);
351 fail:
352    if (fd != -1)
353       close(fd);
354    if (master_fd != -1)
355       close(master_fd);
356    return result;
357 }
358 
359 static VkResult
panvk_enumerate_devices(struct panvk_instance * instance)360 panvk_enumerate_devices(struct panvk_instance *instance)
361 {
362    /* TODO: Check for more devices ? */
363    drmDevicePtr devices[8];
364    VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
365    int max_devices;
366 
367    instance->physical_device_count = 0;
368 
369    max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
370 
371    if (instance->debug_flags & PANVK_DEBUG_STARTUP)
372       panvk_logi("Found %d drm nodes", max_devices);
373 
374    if (max_devices < 1)
375       return vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
376 
377    for (unsigned i = 0; i < (unsigned) max_devices; i++) {
378       if ((devices[i]->available_nodes & (1 << DRM_NODE_RENDER)) &&
379           devices[i]->bustype == DRM_BUS_PLATFORM) {
380 
381          result = panvk_physical_device_init(instance->physical_devices +
382                                            instance->physical_device_count,
383                                            instance, devices[i]);
384          if (result == VK_SUCCESS)
385             ++instance->physical_device_count;
386          else if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
387             break;
388       }
389    }
390    drmFreeDevices(devices, max_devices);
391 
392    return result;
393 }
394 
395 VkResult
panvk_EnumeratePhysicalDevices(VkInstance _instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)396 panvk_EnumeratePhysicalDevices(VkInstance _instance,
397                                uint32_t *pPhysicalDeviceCount,
398                                VkPhysicalDevice *pPhysicalDevices)
399 {
400    VK_FROM_HANDLE(panvk_instance, instance, _instance);
401    VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
402 
403    VkResult result;
404 
405    if (instance->physical_device_count < 0) {
406       result = panvk_enumerate_devices(instance);
407       if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
408          return result;
409    }
410 
411    for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
412       vk_outarray_append(&out, p)
413       {
414          *p = panvk_physical_device_to_handle(instance->physical_devices + i);
415       }
416    }
417 
418    return vk_outarray_status(&out);
419 }
420 
421 VkResult
panvk_EnumeratePhysicalDeviceGroups(VkInstance _instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)422 panvk_EnumeratePhysicalDeviceGroups(VkInstance _instance,
423                                     uint32_t *pPhysicalDeviceGroupCount,
424                                     VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
425 {
426    VK_FROM_HANDLE(panvk_instance, instance, _instance);
427    VK_OUTARRAY_MAKE(out, pPhysicalDeviceGroupProperties,
428                     pPhysicalDeviceGroupCount);
429    VkResult result;
430 
431    if (instance->physical_device_count < 0) {
432       result = panvk_enumerate_devices(instance);
433       if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
434          return result;
435    }
436 
437    for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
438       vk_outarray_append(&out, p)
439       {
440          p->physicalDeviceCount = 1;
441          p->physicalDevices[0] =
442             panvk_physical_device_to_handle(instance->physical_devices + i);
443          p->subsetAllocation = false;
444       }
445    }
446 
447    return VK_SUCCESS;
448 }
449 
450 void
panvk_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)451 panvk_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
452                                  VkPhysicalDeviceFeatures2 *pFeatures)
453 {
454    vk_foreach_struct(ext, pFeatures->pNext)
455    {
456       switch (ext->sType) {
457       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: {
458          VkPhysicalDeviceVulkan11Features *features = (void *) ext;
459          features->storageBuffer16BitAccess            = false;
460          features->uniformAndStorageBuffer16BitAccess  = false;
461          features->storagePushConstant16               = false;
462          features->storageInputOutput16                = false;
463          features->multiview                           = false;
464          features->multiviewGeometryShader             = false;
465          features->multiviewTessellationShader         = false;
466          features->variablePointersStorageBuffer       = true;
467          features->variablePointers                    = true;
468          features->protectedMemory                     = false;
469          features->samplerYcbcrConversion              = false;
470          features->shaderDrawParameters                = false;
471          break;
472       }
473       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: {
474          VkPhysicalDeviceVulkan12Features *features = (void *) ext;
475          features->samplerMirrorClampToEdge            = false;
476          features->drawIndirectCount                   = false;
477          features->storageBuffer8BitAccess             = false;
478          features->uniformAndStorageBuffer8BitAccess   = false;
479          features->storagePushConstant8                = false;
480          features->shaderBufferInt64Atomics            = false;
481          features->shaderSharedInt64Atomics            = false;
482          features->shaderFloat16                       = false;
483          features->shaderInt8                          = false;
484 
485          features->descriptorIndexing                                 = false;
486          features->shaderInputAttachmentArrayDynamicIndexing          = false;
487          features->shaderUniformTexelBufferArrayDynamicIndexing       = false;
488          features->shaderStorageTexelBufferArrayDynamicIndexing       = false;
489          features->shaderUniformBufferArrayNonUniformIndexing         = false;
490          features->shaderSampledImageArrayNonUniformIndexing          = false;
491          features->shaderStorageBufferArrayNonUniformIndexing         = false;
492          features->shaderStorageImageArrayNonUniformIndexing          = false;
493          features->shaderInputAttachmentArrayNonUniformIndexing       = false;
494          features->shaderUniformTexelBufferArrayNonUniformIndexing    = false;
495          features->shaderStorageTexelBufferArrayNonUniformIndexing    = false;
496          features->descriptorBindingUniformBufferUpdateAfterBind      = false;
497          features->descriptorBindingSampledImageUpdateAfterBind       = false;
498          features->descriptorBindingStorageImageUpdateAfterBind       = false;
499          features->descriptorBindingStorageBufferUpdateAfterBind      = false;
500          features->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
501          features->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
502          features->descriptorBindingUpdateUnusedWhilePending          = false;
503          features->descriptorBindingPartiallyBound                    = false;
504          features->descriptorBindingVariableDescriptorCount           = false;
505          features->runtimeDescriptorArray                             = false;
506 
507          features->samplerFilterMinmax                 = false;
508          features->scalarBlockLayout                   = false;
509          features->imagelessFramebuffer                = false;
510          features->uniformBufferStandardLayout         = false;
511          features->shaderSubgroupExtendedTypes         = false;
512          features->separateDepthStencilLayouts         = false;
513          features->hostQueryReset                      = false;
514          features->timelineSemaphore                   = false;
515          features->bufferDeviceAddress                 = false;
516          features->bufferDeviceAddressCaptureReplay    = false;
517          features->bufferDeviceAddressMultiDevice      = false;
518          features->vulkanMemoryModel                   = false;
519          features->vulkanMemoryModelDeviceScope        = false;
520          features->vulkanMemoryModelAvailabilityVisibilityChains = false;
521          features->shaderOutputViewportIndex           = false;
522          features->shaderOutputLayer                   = false;
523          features->subgroupBroadcastDynamicId          = false;
524          break;
525       }
526       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
527          VkPhysicalDeviceVariablePointersFeatures *features = (void *) ext;
528          features->variablePointersStorageBuffer = true;
529          features->variablePointers = true;
530          break;
531       }
532       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
533          VkPhysicalDeviceMultiviewFeatures *features =
534             (VkPhysicalDeviceMultiviewFeatures *) ext;
535          features->multiview = false;
536          features->multiviewGeometryShader = false;
537          features->multiviewTessellationShader = false;
538          break;
539       }
540       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
541          VkPhysicalDeviceShaderDrawParametersFeatures *features =
542             (VkPhysicalDeviceShaderDrawParametersFeatures *) ext;
543          features->shaderDrawParameters = false;
544          break;
545       }
546       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
547          VkPhysicalDeviceProtectedMemoryFeatures *features =
548             (VkPhysicalDeviceProtectedMemoryFeatures *) ext;
549          features->protectedMemory = false;
550          break;
551       }
552       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
553          VkPhysicalDevice16BitStorageFeatures *features =
554             (VkPhysicalDevice16BitStorageFeatures *) ext;
555          features->storageBuffer16BitAccess = false;
556          features->uniformAndStorageBuffer16BitAccess = false;
557          features->storagePushConstant16 = false;
558          features->storageInputOutput16 = false;
559          break;
560       }
561       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
562          VkPhysicalDeviceSamplerYcbcrConversionFeatures *features =
563             (VkPhysicalDeviceSamplerYcbcrConversionFeatures *) ext;
564          features->samplerYcbcrConversion = false;
565          break;
566       }
567       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: {
568          VkPhysicalDeviceDescriptorIndexingFeaturesEXT *features =
569             (VkPhysicalDeviceDescriptorIndexingFeaturesEXT *) ext;
570          features->shaderInputAttachmentArrayDynamicIndexing = false;
571          features->shaderUniformTexelBufferArrayDynamicIndexing = false;
572          features->shaderStorageTexelBufferArrayDynamicIndexing = false;
573          features->shaderUniformBufferArrayNonUniformIndexing = false;
574          features->shaderSampledImageArrayNonUniformIndexing = false;
575          features->shaderStorageBufferArrayNonUniformIndexing = false;
576          features->shaderStorageImageArrayNonUniformIndexing = false;
577          features->shaderInputAttachmentArrayNonUniformIndexing = false;
578          features->shaderUniformTexelBufferArrayNonUniformIndexing = false;
579          features->shaderStorageTexelBufferArrayNonUniformIndexing = false;
580          features->descriptorBindingUniformBufferUpdateAfterBind = false;
581          features->descriptorBindingSampledImageUpdateAfterBind = false;
582          features->descriptorBindingStorageImageUpdateAfterBind = false;
583          features->descriptorBindingStorageBufferUpdateAfterBind = false;
584          features->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
585          features->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
586          features->descriptorBindingUpdateUnusedWhilePending = false;
587          features->descriptorBindingPartiallyBound = false;
588          features->descriptorBindingVariableDescriptorCount = false;
589          features->runtimeDescriptorArray = false;
590          break;
591       }
592       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
593          VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
594             (VkPhysicalDeviceConditionalRenderingFeaturesEXT *) ext;
595          features->conditionalRendering = false;
596          features->inheritedConditionalRendering = false;
597          break;
598       }
599       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
600          VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
601             (VkPhysicalDeviceTransformFeedbackFeaturesEXT *) ext;
602          features->transformFeedback = false;
603          features->geometryStreams = false;
604          break;
605       }
606       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
607          VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
608             (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
609          features->indexTypeUint8 = true;
610          break;
611       }
612       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
613          VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
614             (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
615          features->vertexAttributeInstanceRateDivisor = true;
616          features->vertexAttributeInstanceRateZeroDivisor = true;
617          break;
618       }
619       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
620          VkPhysicalDevicePrivateDataFeaturesEXT *features =
621             (VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
622          features->privateData = true;
623          break;
624       }
625       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
626          VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
627             (VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
628          features->depthClipEnable = true;
629          break;
630       }
631       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
632          VkPhysicalDevice4444FormatsFeaturesEXT *features = (void *)ext;
633          features->formatA4R4G4B4 = true;
634          features->formatA4B4G4R4 = true;
635          break;
636       }
637       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
638          VkPhysicalDeviceCustomBorderColorFeaturesEXT *features = (void *) ext;
639          features->customBorderColors = true;
640          features->customBorderColorWithoutFormat = true;
641          break;
642       }
643       default:
644          break;
645       }
646    }
647 
648    pFeatures->features = (VkPhysicalDeviceFeatures) {
649       .fullDrawIndexUint32 = true,
650       .independentBlend = true,
651       .wideLines = true,
652       .largePoints = true,
653       .textureCompressionETC2 = true,
654       .textureCompressionASTC_LDR = true,
655       .shaderUniformBufferArrayDynamicIndexing = true,
656       .shaderSampledImageArrayDynamicIndexing = true,
657       .shaderStorageBufferArrayDynamicIndexing = true,
658       .shaderStorageImageArrayDynamicIndexing = true,
659    };
660 }
661 
662 void
panvk_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)663 panvk_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
664                                    VkPhysicalDeviceProperties2 *pProperties)
665 {
666    VK_FROM_HANDLE(panvk_physical_device, pdevice, physicalDevice);
667 
668    vk_foreach_struct(ext, pProperties->pNext)
669    {
670       switch (ext->sType) {
671       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
672          VkPhysicalDevicePushDescriptorPropertiesKHR *properties = (VkPhysicalDevicePushDescriptorPropertiesKHR *)ext;
673          properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
674          break;
675       }
676       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
677          VkPhysicalDeviceIDProperties *properties = (VkPhysicalDeviceIDProperties *)ext;
678          memcpy(properties->driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
679          memcpy(properties->deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
680          properties->deviceLUIDValid = false;
681          break;
682       }
683       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
684          VkPhysicalDeviceMultiviewProperties *properties = (VkPhysicalDeviceMultiviewProperties *)ext;
685          properties->maxMultiviewViewCount = 0;
686          properties->maxMultiviewInstanceIndex = 0;
687          break;
688       }
689       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
690          VkPhysicalDevicePointClippingProperties *properties = (VkPhysicalDevicePointClippingProperties *)ext;
691          properties->pointClippingBehavior =
692             VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
693          break;
694       }
695       case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
696          VkPhysicalDeviceMaintenance3Properties *properties = (VkPhysicalDeviceMaintenance3Properties *)ext;
697          /* Make sure everything is addressable by a signed 32-bit int, and
698           * our largest descriptors are 96 bytes. */
699          properties->maxPerSetDescriptors = (1ull << 31) / 96;
700          /* Our buffer size fields allow only this much */
701          properties->maxMemoryAllocationSize = 0xFFFFFFFFull;
702          break;
703       }
704       default:
705          break;
706       }
707    }
708 
709    VkSampleCountFlags sample_counts =
710       VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
711 
712    /* make sure that the entire descriptor set is addressable with a signed
713     * 32-bit int. So the sum of all limits scaled by descriptor size has to
714     * be at most 2 GiB. the combined image & samples object count as one of
715     * both. This limit is for the pipeline layout, not for the set layout, but
716     * there is no set limit, so we just set a pipeline limit. I don't think
717     * any app is going to hit this soon. */
718    size_t max_descriptor_set_size =
719       ((1ull << 31) - 16 * MAX_DYNAMIC_BUFFERS) /
720       (32 /* uniform buffer, 32 due to potential space wasted on alignment */ +
721        32 /* storage buffer, 32 due to potential space wasted on alignment */ +
722        32 /* sampler, largest when combined with image */ +
723        64 /* sampled image */ + 64 /* storage image */);
724 
725    VkPhysicalDeviceLimits limits = {
726       .maxImageDimension1D = (1 << 14),
727       .maxImageDimension2D = (1 << 14),
728       .maxImageDimension3D = (1 << 11),
729       .maxImageDimensionCube = (1 << 14),
730       .maxImageArrayLayers = (1 << 11),
731       .maxTexelBufferElements = 128 * 1024 * 1024,
732       .maxUniformBufferRange = UINT32_MAX,
733       .maxStorageBufferRange = UINT32_MAX,
734       .maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
735       .maxMemoryAllocationCount = UINT32_MAX,
736       .maxSamplerAllocationCount = 64 * 1024,
737       .bufferImageGranularity = 64,          /* A cache line */
738       .sparseAddressSpaceSize = 0xffffffffu, /* buffer max size */
739       .maxBoundDescriptorSets = MAX_SETS,
740       .maxPerStageDescriptorSamplers = max_descriptor_set_size,
741       .maxPerStageDescriptorUniformBuffers = max_descriptor_set_size,
742       .maxPerStageDescriptorStorageBuffers = max_descriptor_set_size,
743       .maxPerStageDescriptorSampledImages = max_descriptor_set_size,
744       .maxPerStageDescriptorStorageImages = max_descriptor_set_size,
745       .maxPerStageDescriptorInputAttachments = max_descriptor_set_size,
746       .maxPerStageResources = max_descriptor_set_size,
747       .maxDescriptorSetSamplers = max_descriptor_set_size,
748       .maxDescriptorSetUniformBuffers = max_descriptor_set_size,
749       .maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
750       .maxDescriptorSetStorageBuffers = max_descriptor_set_size,
751       .maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
752       .maxDescriptorSetSampledImages = max_descriptor_set_size,
753       .maxDescriptorSetStorageImages = max_descriptor_set_size,
754       .maxDescriptorSetInputAttachments = max_descriptor_set_size,
755       .maxVertexInputAttributes = 32,
756       .maxVertexInputBindings = 32,
757       .maxVertexInputAttributeOffset = 2047,
758       .maxVertexInputBindingStride = 2048,
759       .maxVertexOutputComponents = 128,
760       .maxTessellationGenerationLevel = 64,
761       .maxTessellationPatchSize = 32,
762       .maxTessellationControlPerVertexInputComponents = 128,
763       .maxTessellationControlPerVertexOutputComponents = 128,
764       .maxTessellationControlPerPatchOutputComponents = 120,
765       .maxTessellationControlTotalOutputComponents = 4096,
766       .maxTessellationEvaluationInputComponents = 128,
767       .maxTessellationEvaluationOutputComponents = 128,
768       .maxGeometryShaderInvocations = 127,
769       .maxGeometryInputComponents = 64,
770       .maxGeometryOutputComponents = 128,
771       .maxGeometryOutputVertices = 256,
772       .maxGeometryTotalOutputComponents = 1024,
773       .maxFragmentInputComponents = 128,
774       .maxFragmentOutputAttachments = 8,
775       .maxFragmentDualSrcAttachments = 1,
776       .maxFragmentCombinedOutputResources = 8,
777       .maxComputeSharedMemorySize = 32768,
778       .maxComputeWorkGroupCount = { 65535, 65535, 65535 },
779       .maxComputeWorkGroupInvocations = 2048,
780       .maxComputeWorkGroupSize = { 2048, 2048, 2048 },
781       .subPixelPrecisionBits = 4 /* FIXME */,
782       .subTexelPrecisionBits = 4 /* FIXME */,
783       .mipmapPrecisionBits = 4 /* FIXME */,
784       .maxDrawIndexedIndexValue = UINT32_MAX,
785       .maxDrawIndirectCount = UINT32_MAX,
786       .maxSamplerLodBias = 16,
787       .maxSamplerAnisotropy = 16,
788       .maxViewports = MAX_VIEWPORTS,
789       .maxViewportDimensions = { (1 << 14), (1 << 14) },
790       .viewportBoundsRange = { INT16_MIN, INT16_MAX },
791       .viewportSubPixelBits = 8,
792       .minMemoryMapAlignment = 4096, /* A page */
793       .minTexelBufferOffsetAlignment = 1,
794       .minUniformBufferOffsetAlignment = 4,
795       .minStorageBufferOffsetAlignment = 4,
796       .minTexelOffset = -32,
797       .maxTexelOffset = 31,
798       .minTexelGatherOffset = -32,
799       .maxTexelGatherOffset = 31,
800       .minInterpolationOffset = -2,
801       .maxInterpolationOffset = 2,
802       .subPixelInterpolationOffsetBits = 8,
803       .maxFramebufferWidth = (1 << 14),
804       .maxFramebufferHeight = (1 << 14),
805       .maxFramebufferLayers = (1 << 10),
806       .framebufferColorSampleCounts = sample_counts,
807       .framebufferDepthSampleCounts = sample_counts,
808       .framebufferStencilSampleCounts = sample_counts,
809       .framebufferNoAttachmentsSampleCounts = sample_counts,
810       .maxColorAttachments = MAX_RTS,
811       .sampledImageColorSampleCounts = sample_counts,
812       .sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT,
813       .sampledImageDepthSampleCounts = sample_counts,
814       .sampledImageStencilSampleCounts = sample_counts,
815       .storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
816       .maxSampleMaskWords = 1,
817       .timestampComputeAndGraphics = true,
818       .timestampPeriod = 1,
819       .maxClipDistances = 8,
820       .maxCullDistances = 8,
821       .maxCombinedClipAndCullDistances = 8,
822       .discreteQueuePriorities = 1,
823       .pointSizeRange = { 0.125, 255.875 },
824       .lineWidthRange = { 0.0, 7.9921875 },
825       .pointSizeGranularity = (1.0 / 8.0),
826       .lineWidthGranularity = (1.0 / 128.0),
827       .strictLines = false, /* FINISHME */
828       .standardSampleLocations = true,
829       .optimalBufferCopyOffsetAlignment = 128,
830       .optimalBufferCopyRowPitchAlignment = 128,
831       .nonCoherentAtomSize = 64,
832    };
833 
834    pProperties->properties = (VkPhysicalDeviceProperties) {
835       .apiVersion = PANVK_API_VERSION,
836       .driverVersion = vk_get_driver_version(),
837       .vendorID = 0, /* TODO */
838       .deviceID = 0,
839       .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
840       .limits = limits,
841       .sparseProperties = { 0 },
842    };
843 
844    strcpy(pProperties->properties.deviceName, pdevice->name);
845    memcpy(pProperties->properties.pipelineCacheUUID, pdevice->cache_uuid, VK_UUID_SIZE);
846 }
847 
848 static const VkQueueFamilyProperties panvk_queue_family_properties = {
849    .queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
850    .queueCount = 1,
851    .timestampValidBits = 64,
852    .minImageTransferGranularity = { 1, 1, 1 },
853 };
854 
855 void
panvk_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)856 panvk_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
857                                              uint32_t *pQueueFamilyPropertyCount,
858                                              VkQueueFamilyProperties *pQueueFamilyProperties)
859 {
860    VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
861 
862    vk_outarray_append(&out, p) { *p = panvk_queue_family_properties; }
863 }
864 
865 void
panvk_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)866 panvk_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
867                                               uint32_t *pQueueFamilyPropertyCount,
868                                               VkQueueFamilyProperties2 *pQueueFamilyProperties)
869 {
870    VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
871 
872    vk_outarray_append(&out, p)
873    {
874       p->queueFamilyProperties = panvk_queue_family_properties;
875    }
876 }
877 
878 static uint64_t
panvk_get_system_heap_size()879 panvk_get_system_heap_size()
880 {
881    struct sysinfo info;
882    sysinfo(&info);
883 
884    uint64_t total_ram = (uint64_t)info.totalram * info.mem_unit;
885 
886    /* We don't want to burn too much ram with the GPU.  If the user has 4GiB
887     * or less, we use at most half.  If they have more than 4GiB, we use 3/4.
888     */
889    uint64_t available_ram;
890    if (total_ram <= 4ull * 1024 * 1024 * 1024)
891       available_ram = total_ram / 2;
892    else
893       available_ram = total_ram * 3 / 4;
894 
895    return available_ram;
896 }
897 
898 void
panvk_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)899 panvk_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
900                                          VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
901 {
902    pMemoryProperties->memoryProperties = (VkPhysicalDeviceMemoryProperties) {
903       .memoryHeapCount = 1,
904       .memoryHeaps[0].size = panvk_get_system_heap_size(),
905       .memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
906       .memoryTypeCount = 1,
907       .memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
908                                       VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
909                                       VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
910       .memoryTypes[0].heapIndex = 0,
911    };
912 }
913 
914 static VkResult
panvk_queue_init(struct panvk_device * device,struct panvk_queue * queue,int idx,const VkDeviceQueueCreateInfo * create_info)915 panvk_queue_init(struct panvk_device *device,
916                  struct panvk_queue *queue,
917                  int idx,
918                  const VkDeviceQueueCreateInfo *create_info)
919 {
920    const struct panfrost_device *pdev = &device->physical_device->pdev;
921 
922    VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info, idx);
923    if (result != VK_SUCCESS)
924       return result;
925    queue->device = device;
926 
927    struct drm_syncobj_create create = {
928       .flags = DRM_SYNCOBJ_CREATE_SIGNALED,
929    };
930 
931    int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_CREATE, &create);
932    if (ret) {
933       vk_queue_finish(&queue->vk);
934       return VK_ERROR_OUT_OF_HOST_MEMORY;
935    }
936 
937    queue->sync = create.handle;
938    return VK_SUCCESS;
939 }
940 
941 static void
panvk_queue_finish(struct panvk_queue * queue)942 panvk_queue_finish(struct panvk_queue *queue)
943 {
944    vk_queue_finish(&queue->vk);
945 }
946 
947 VkResult
panvk_CreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)948 panvk_CreateDevice(VkPhysicalDevice physicalDevice,
949                    const VkDeviceCreateInfo *pCreateInfo,
950                    const VkAllocationCallbacks *pAllocator,
951                    VkDevice *pDevice)
952 {
953    VK_FROM_HANDLE(panvk_physical_device, physical_device, physicalDevice);
954    VkResult result;
955    struct panvk_device *device;
956 
957    device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator,
958                        sizeof(*device), 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
959    if (!device)
960       return vk_error(physical_device, VK_ERROR_OUT_OF_HOST_MEMORY);
961 
962    const struct vk_device_entrypoint_table *dev_entrypoints;
963    struct vk_device_dispatch_table dispatch_table;
964 
965    switch (physical_device->pdev.arch) {
966    case 5:
967       dev_entrypoints = &panvk_v5_device_entrypoints;
968       break;
969    case 6:
970       dev_entrypoints = &panvk_v6_device_entrypoints;
971       break;
972    case 7:
973       dev_entrypoints = &panvk_v7_device_entrypoints;
974       break;
975    default:
976       unreachable("Unsupported architecture");
977    }
978 
979    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
980                                              dev_entrypoints,
981                                              true);
982    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
983                                              &panvk_device_entrypoints,
984                                              false);
985    vk_device_dispatch_table_from_entrypoints(&dispatch_table,
986                                              &wsi_device_entrypoints,
987                                              false);
988    result = vk_device_init(&device->vk, &physical_device->vk, &dispatch_table,
989                            pCreateInfo, pAllocator);
990    if (result != VK_SUCCESS) {
991       vk_free(&device->vk.alloc, device);
992       return result;
993    }
994 
995    device->instance = physical_device->instance;
996    device->physical_device = physical_device;
997 
998    for (unsigned i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
999       const VkDeviceQueueCreateInfo *queue_create =
1000          &pCreateInfo->pQueueCreateInfos[i];
1001       uint32_t qfi = queue_create->queueFamilyIndex;
1002       device->queues[qfi] =
1003          vk_alloc(&device->vk.alloc,
1004                   queue_create->queueCount * sizeof(struct panvk_queue),
1005                   8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1006       if (!device->queues[qfi]) {
1007          result = VK_ERROR_OUT_OF_HOST_MEMORY;
1008          goto fail;
1009       }
1010 
1011       memset(device->queues[qfi], 0,
1012              queue_create->queueCount * sizeof(struct panvk_queue));
1013 
1014       device->queue_count[qfi] = queue_create->queueCount;
1015 
1016       for (unsigned q = 0; q < queue_create->queueCount; q++) {
1017          result = panvk_queue_init(device, &device->queues[qfi][q], q,
1018                                    queue_create);
1019          if (result != VK_SUCCESS)
1020             goto fail;
1021       }
1022    }
1023 
1024    *pDevice = panvk_device_to_handle(device);
1025    return VK_SUCCESS;
1026 
1027 fail:
1028    for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1029       for (unsigned q = 0; q < device->queue_count[i]; q++)
1030          panvk_queue_finish(&device->queues[i][q]);
1031       if (device->queue_count[i])
1032          vk_object_free(&device->vk, NULL, device->queues[i]);
1033    }
1034 
1035    vk_free(&device->vk.alloc, device);
1036    return result;
1037 }
1038 
1039 void
panvk_DestroyDevice(VkDevice _device,const VkAllocationCallbacks * pAllocator)1040 panvk_DestroyDevice(VkDevice _device, const VkAllocationCallbacks *pAllocator)
1041 {
1042    VK_FROM_HANDLE(panvk_device, device, _device);
1043 
1044    if (!device)
1045       return;
1046 
1047    for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1048       for (unsigned q = 0; q < device->queue_count[i]; q++)
1049          panvk_queue_finish(&device->queues[i][q]);
1050       if (device->queue_count[i])
1051          vk_object_free(&device->vk, NULL, device->queues[i]);
1052    }
1053 
1054    vk_free(&device->vk.alloc, device);
1055 }
1056 
1057 VkResult
panvk_EnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1058 panvk_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1059                                        VkLayerProperties *pProperties)
1060 {
1061    *pPropertyCount = 0;
1062    return VK_SUCCESS;
1063 }
1064 
1065 VkResult
panvk_QueueWaitIdle(VkQueue _queue)1066 panvk_QueueWaitIdle(VkQueue _queue)
1067 {
1068    VK_FROM_HANDLE(panvk_queue, queue, _queue);
1069 
1070    if (panvk_device_is_lost(queue->device))
1071       return VK_ERROR_DEVICE_LOST;
1072 
1073    const struct panfrost_device *pdev = &queue->device->physical_device->pdev;
1074    struct drm_syncobj_wait wait = {
1075       .handles = (uint64_t) (uintptr_t)(&queue->sync),
1076       .count_handles = 1,
1077       .timeout_nsec = INT64_MAX,
1078       .flags = DRM_SYNCOBJ_WAIT_FLAGS_WAIT_ALL,
1079    };
1080    int ret;
1081 
1082    ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_WAIT, &wait);
1083    assert(!ret);
1084 
1085    return VK_SUCCESS;
1086 }
1087 
1088 VkResult
panvk_EnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1089 panvk_EnumerateInstanceExtensionProperties(const char *pLayerName,
1090                                            uint32_t *pPropertyCount,
1091                                            VkExtensionProperties *pProperties)
1092 {
1093    if (pLayerName)
1094       return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1095 
1096    return vk_enumerate_instance_extension_properties(&panvk_instance_extensions,
1097                                                      pPropertyCount, pProperties);
1098 }
1099 
1100 PFN_vkVoidFunction
panvk_GetInstanceProcAddr(VkInstance _instance,const char * pName)1101 panvk_GetInstanceProcAddr(VkInstance _instance, const char *pName)
1102 {
1103    VK_FROM_HANDLE(panvk_instance, instance, _instance);
1104    return vk_instance_get_proc_addr(&instance->vk,
1105                                     &panvk_instance_entrypoints,
1106                                     pName);
1107 }
1108 
1109 /* The loader wants us to expose a second GetInstanceProcAddr function
1110  * to work around certain LD_PRELOAD issues seen in apps.
1111  */
1112 PUBLIC
1113 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1114 vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName);
1115 
1116 PUBLIC
1117 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)1118 vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
1119 {
1120    return panvk_GetInstanceProcAddr(instance, pName);
1121 }
1122 
1123 VkResult
panvk_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)1124 panvk_AllocateMemory(VkDevice _device,
1125                      const VkMemoryAllocateInfo *pAllocateInfo,
1126                      const VkAllocationCallbacks *pAllocator,
1127                      VkDeviceMemory *pMem)
1128 {
1129    VK_FROM_HANDLE(panvk_device, device, _device);
1130    struct panvk_device_memory *mem;
1131 
1132    assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1133 
1134    if (pAllocateInfo->allocationSize == 0) {
1135       /* Apparently, this is allowed */
1136       *pMem = VK_NULL_HANDLE;
1137       return VK_SUCCESS;
1138    }
1139 
1140    mem = vk_object_alloc(&device->vk, pAllocator, sizeof(*mem),
1141                          VK_OBJECT_TYPE_DEVICE_MEMORY);
1142    if (mem == NULL)
1143       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1144 
1145    const VkImportMemoryFdInfoKHR *fd_info =
1146       vk_find_struct_const(pAllocateInfo->pNext,
1147                            IMPORT_MEMORY_FD_INFO_KHR);
1148 
1149    if (fd_info && !fd_info->handleType)
1150       fd_info = NULL;
1151 
1152    if (fd_info) {
1153       assert(fd_info->handleType ==
1154                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
1155              fd_info->handleType ==
1156                 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1157 
1158       /*
1159        * TODO Importing the same fd twice gives us the same handle without
1160        * reference counting.  We need to maintain a per-instance handle-to-bo
1161        * table and add reference count to panvk_bo.
1162        */
1163       mem->bo = panfrost_bo_import(&device->physical_device->pdev, fd_info->fd);
1164       /* take ownership and close the fd */
1165       close(fd_info->fd);
1166    } else {
1167       mem->bo = panfrost_bo_create(&device->physical_device->pdev,
1168                                    pAllocateInfo->allocationSize, 0,
1169                                    "User-requested memory");
1170    }
1171 
1172    assert(mem->bo);
1173 
1174    *pMem = panvk_device_memory_to_handle(mem);
1175 
1176    return VK_SUCCESS;
1177 }
1178 
1179 void
panvk_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)1180 panvk_FreeMemory(VkDevice _device,
1181                  VkDeviceMemory _mem,
1182                  const VkAllocationCallbacks *pAllocator)
1183 {
1184    VK_FROM_HANDLE(panvk_device, device, _device);
1185    VK_FROM_HANDLE(panvk_device_memory, mem, _mem);
1186 
1187    if (mem == NULL)
1188       return;
1189 
1190    panfrost_bo_unreference(mem->bo);
1191    vk_object_free(&device->vk, pAllocator, mem);
1192 }
1193 
1194 VkResult
panvk_MapMemory(VkDevice _device,VkDeviceMemory _memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1195 panvk_MapMemory(VkDevice _device,
1196                 VkDeviceMemory _memory,
1197                 VkDeviceSize offset,
1198                 VkDeviceSize size,
1199                 VkMemoryMapFlags flags,
1200                 void **ppData)
1201 {
1202    VK_FROM_HANDLE(panvk_device, device, _device);
1203    VK_FROM_HANDLE(panvk_device_memory, mem, _memory);
1204 
1205    if (mem == NULL) {
1206       *ppData = NULL;
1207       return VK_SUCCESS;
1208    }
1209 
1210    if (!mem->bo->ptr.cpu)
1211       panfrost_bo_mmap(mem->bo);
1212 
1213    *ppData = mem->bo->ptr.cpu;
1214 
1215    if (*ppData) {
1216       *ppData += offset;
1217       return VK_SUCCESS;
1218    }
1219 
1220    return vk_error(device, VK_ERROR_MEMORY_MAP_FAILED);
1221 }
1222 
1223 void
panvk_UnmapMemory(VkDevice _device,VkDeviceMemory _memory)1224 panvk_UnmapMemory(VkDevice _device, VkDeviceMemory _memory)
1225 {
1226 }
1227 
1228 VkResult
panvk_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1229 panvk_FlushMappedMemoryRanges(VkDevice _device,
1230                               uint32_t memoryRangeCount,
1231                               const VkMappedMemoryRange *pMemoryRanges)
1232 {
1233    return VK_SUCCESS;
1234 }
1235 
1236 VkResult
panvk_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1237 panvk_InvalidateMappedMemoryRanges(VkDevice _device,
1238                                    uint32_t memoryRangeCount,
1239                                    const VkMappedMemoryRange *pMemoryRanges)
1240 {
1241    return VK_SUCCESS;
1242 }
1243 
1244 void
panvk_GetBufferMemoryRequirements(VkDevice _device,VkBuffer _buffer,VkMemoryRequirements * pMemoryRequirements)1245 panvk_GetBufferMemoryRequirements(VkDevice _device,
1246                                   VkBuffer _buffer,
1247                                   VkMemoryRequirements *pMemoryRequirements)
1248 {
1249    VK_FROM_HANDLE(panvk_buffer, buffer, _buffer);
1250 
1251    pMemoryRequirements->memoryTypeBits = 1;
1252    pMemoryRequirements->alignment = 64;
1253    pMemoryRequirements->size =
1254       MAX2(align64(buffer->size, pMemoryRequirements->alignment), buffer->size);
1255 }
1256 
1257 void
panvk_GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1258 panvk_GetBufferMemoryRequirements2(VkDevice device,
1259                                    const VkBufferMemoryRequirementsInfo2 *pInfo,
1260                                    VkMemoryRequirements2 *pMemoryRequirements)
1261 {
1262    panvk_GetBufferMemoryRequirements(device, pInfo->buffer,
1263                                      &pMemoryRequirements->memoryRequirements);
1264 }
1265 
1266 void
panvk_GetImageMemoryRequirements(VkDevice _device,VkImage _image,VkMemoryRequirements * pMemoryRequirements)1267 panvk_GetImageMemoryRequirements(VkDevice _device,
1268                                  VkImage _image,
1269                                  VkMemoryRequirements *pMemoryRequirements)
1270 {
1271    VK_FROM_HANDLE(panvk_image, image, _image);
1272 
1273    pMemoryRequirements->memoryTypeBits = 1;
1274    pMemoryRequirements->size = panvk_image_get_total_size(image);
1275    pMemoryRequirements->alignment = 4096;
1276 }
1277 
1278 void
panvk_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1279 panvk_GetImageMemoryRequirements2(VkDevice device,
1280                                  const VkImageMemoryRequirementsInfo2 *pInfo,
1281                                  VkMemoryRequirements2 *pMemoryRequirements)
1282 {
1283    panvk_GetImageMemoryRequirements(device, pInfo->image,
1284                                     &pMemoryRequirements->memoryRequirements);
1285 }
1286 
1287 void
panvk_GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1288 panvk_GetImageSparseMemoryRequirements(VkDevice device, VkImage image,
1289                                        uint32_t *pSparseMemoryRequirementCount,
1290                                        VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1291 {
1292    panvk_stub();
1293 }
1294 
1295 void
panvk_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1296 panvk_GetImageSparseMemoryRequirements2(VkDevice device,
1297                                         const VkImageSparseMemoryRequirementsInfo2 *pInfo,
1298                                         uint32_t *pSparseMemoryRequirementCount,
1299                                         VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
1300 {
1301    panvk_stub();
1302 }
1303 
1304 void
panvk_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)1305 panvk_GetDeviceMemoryCommitment(VkDevice device,
1306                                 VkDeviceMemory memory,
1307                                 VkDeviceSize *pCommittedMemoryInBytes)
1308 {
1309    *pCommittedMemoryInBytes = 0;
1310 }
1311 
1312 VkResult
panvk_BindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1313 panvk_BindBufferMemory2(VkDevice device,
1314                         uint32_t bindInfoCount,
1315                         const VkBindBufferMemoryInfo *pBindInfos)
1316 {
1317    for (uint32_t i = 0; i < bindInfoCount; ++i) {
1318       VK_FROM_HANDLE(panvk_device_memory, mem, pBindInfos[i].memory);
1319       VK_FROM_HANDLE(panvk_buffer, buffer, pBindInfos[i].buffer);
1320 
1321       if (mem) {
1322          buffer->bo = mem->bo;
1323          buffer->bo_offset = pBindInfos[i].memoryOffset;
1324       } else {
1325          buffer->bo = NULL;
1326       }
1327    }
1328    return VK_SUCCESS;
1329 }
1330 
1331 VkResult
panvk_BindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1332 panvk_BindBufferMemory(VkDevice device,
1333                        VkBuffer buffer,
1334                        VkDeviceMemory memory,
1335                        VkDeviceSize memoryOffset)
1336 {
1337    const VkBindBufferMemoryInfo info = {
1338       .sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
1339       .buffer = buffer,
1340       .memory = memory,
1341       .memoryOffset = memoryOffset
1342    };
1343 
1344    return panvk_BindBufferMemory2(device, 1, &info);
1345 }
1346 
1347 VkResult
panvk_BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)1348 panvk_BindImageMemory2(VkDevice device,
1349                        uint32_t bindInfoCount,
1350                        const VkBindImageMemoryInfo *pBindInfos)
1351 {
1352    for (uint32_t i = 0; i < bindInfoCount; ++i) {
1353       VK_FROM_HANDLE(panvk_image, image, pBindInfos[i].image);
1354       VK_FROM_HANDLE(panvk_device_memory, mem, pBindInfos[i].memory);
1355 
1356       if (mem) {
1357          image->pimage.data.bo = mem->bo;
1358          image->pimage.data.offset = pBindInfos[i].memoryOffset;
1359          /* Reset the AFBC headers */
1360          if (drm_is_afbc(image->pimage.layout.modifier)) {
1361             void *base = image->pimage.data.bo->ptr.cpu + image->pimage.data.offset;
1362 
1363             for (unsigned layer = 0; layer < image->pimage.layout.array_size; layer++) {
1364                for (unsigned level = 0; level < image->pimage.layout.nr_slices; level++) {
1365                   void *header = base +
1366                                  (layer * image->pimage.layout.array_stride) +
1367                                  image->pimage.layout.slices[level].offset;
1368                   memset(header, 0, image->pimage.layout.slices[level].afbc.header_size);
1369                }
1370             }
1371          }
1372       } else {
1373          image->pimage.data.bo = NULL;
1374          image->pimage.data.offset = pBindInfos[i].memoryOffset;
1375       }
1376    }
1377 
1378    return VK_SUCCESS;
1379 }
1380 
1381 VkResult
panvk_BindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1382 panvk_BindImageMemory(VkDevice device,
1383                       VkImage image,
1384                       VkDeviceMemory memory,
1385                       VkDeviceSize memoryOffset)
1386 {
1387    const VkBindImageMemoryInfo info = {
1388       .sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
1389       .image = image,
1390       .memory = memory,
1391       .memoryOffset = memoryOffset
1392    };
1393 
1394    return panvk_BindImageMemory2(device, 1, &info);
1395 }
1396 
1397 VkResult
panvk_QueueBindSparse(VkQueue _queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence _fence)1398 panvk_QueueBindSparse(VkQueue _queue,
1399                       uint32_t bindInfoCount,
1400                       const VkBindSparseInfo *pBindInfo,
1401                       VkFence _fence)
1402 {
1403    return VK_SUCCESS;
1404 }
1405 
1406 VkResult
panvk_CreateEvent(VkDevice _device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1407 panvk_CreateEvent(VkDevice _device,
1408                   const VkEventCreateInfo *pCreateInfo,
1409                   const VkAllocationCallbacks *pAllocator,
1410                   VkEvent *pEvent)
1411 {
1412    VK_FROM_HANDLE(panvk_device, device, _device);
1413    const struct panfrost_device *pdev = &device->physical_device->pdev;
1414    struct panvk_event *event =
1415       vk_object_zalloc(&device->vk, pAllocator, sizeof(*event),
1416                        VK_OBJECT_TYPE_EVENT);
1417    if (!event)
1418       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1419 
1420    struct drm_syncobj_create create = {
1421       .flags = 0,
1422    };
1423 
1424    int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_CREATE, &create);
1425    if (ret)
1426       return VK_ERROR_OUT_OF_HOST_MEMORY;
1427 
1428    event->syncobj = create.handle;
1429    *pEvent = panvk_event_to_handle(event);
1430 
1431    return VK_SUCCESS;
1432 }
1433 
1434 void
panvk_DestroyEvent(VkDevice _device,VkEvent _event,const VkAllocationCallbacks * pAllocator)1435 panvk_DestroyEvent(VkDevice _device,
1436                    VkEvent _event,
1437                    const VkAllocationCallbacks *pAllocator)
1438 {
1439    VK_FROM_HANDLE(panvk_device, device, _device);
1440    VK_FROM_HANDLE(panvk_event, event, _event);
1441    const struct panfrost_device *pdev = &device->physical_device->pdev;
1442 
1443    if (!event)
1444       return;
1445 
1446    struct drm_syncobj_destroy destroy = { .handle = event->syncobj };
1447    drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_DESTROY, &destroy);
1448 
1449    vk_object_free(&device->vk, pAllocator, event);
1450 }
1451 
1452 VkResult
panvk_GetEventStatus(VkDevice _device,VkEvent _event)1453 panvk_GetEventStatus(VkDevice _device, VkEvent _event)
1454 {
1455    VK_FROM_HANDLE(panvk_device, device, _device);
1456    VK_FROM_HANDLE(panvk_event, event, _event);
1457    const struct panfrost_device *pdev = &device->physical_device->pdev;
1458    bool signaled;
1459 
1460    struct drm_syncobj_wait wait = {
1461       .handles = (uintptr_t) &event->syncobj,
1462       .count_handles = 1,
1463       .timeout_nsec = 0,
1464       .flags = DRM_SYNCOBJ_WAIT_FLAGS_WAIT_FOR_SUBMIT,
1465    };
1466 
1467    int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_WAIT, &wait);
1468    if (ret) {
1469       if (errno == ETIME)
1470          signaled = false;
1471       else {
1472          assert(0);
1473          return VK_ERROR_DEVICE_LOST; /* TODO */
1474       }
1475    } else
1476       signaled = true;
1477 
1478    return signaled ? VK_EVENT_SET : VK_EVENT_RESET;
1479 }
1480 
1481 VkResult
panvk_SetEvent(VkDevice _device,VkEvent _event)1482 panvk_SetEvent(VkDevice _device, VkEvent _event)
1483 {
1484    VK_FROM_HANDLE(panvk_device, device, _device);
1485    VK_FROM_HANDLE(panvk_event, event, _event);
1486    const struct panfrost_device *pdev = &device->physical_device->pdev;
1487 
1488    struct drm_syncobj_array objs = {
1489       .handles = (uint64_t) (uintptr_t) &event->syncobj,
1490       .count_handles = 1
1491    };
1492 
1493    /* This is going to just replace the fence for this syncobj with one that
1494     * is already in signaled state. This won't be a problem because the spec
1495     * mandates that the event will have been set before the vkCmdWaitEvents
1496     * command executes.
1497     * https://www.khronos.org/registry/vulkan/specs/1.2/html/chap6.html#commandbuffers-submission-progress
1498     */
1499    if (drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_SIGNAL, &objs))
1500       return VK_ERROR_DEVICE_LOST;
1501 
1502   return VK_SUCCESS;
1503 }
1504 
1505 VkResult
panvk_ResetEvent(VkDevice _device,VkEvent _event)1506 panvk_ResetEvent(VkDevice _device, VkEvent _event)
1507 {
1508    VK_FROM_HANDLE(panvk_device, device, _device);
1509    VK_FROM_HANDLE(panvk_event, event, _event);
1510    const struct panfrost_device *pdev = &device->physical_device->pdev;
1511 
1512    struct drm_syncobj_array objs = {
1513       .handles = (uint64_t) (uintptr_t) &event->syncobj,
1514       .count_handles = 1
1515    };
1516 
1517    if (drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_RESET, &objs))
1518       return VK_ERROR_DEVICE_LOST;
1519 
1520   return VK_SUCCESS;
1521 }
1522 
1523 VkResult
panvk_CreateBuffer(VkDevice _device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1524 panvk_CreateBuffer(VkDevice _device,
1525                    const VkBufferCreateInfo *pCreateInfo,
1526                    const VkAllocationCallbacks *pAllocator,
1527                    VkBuffer *pBuffer)
1528 {
1529    VK_FROM_HANDLE(panvk_device, device, _device);
1530    struct panvk_buffer *buffer;
1531 
1532    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1533 
1534    buffer = vk_object_alloc(&device->vk, pAllocator, sizeof(*buffer),
1535                             VK_OBJECT_TYPE_BUFFER);
1536    if (buffer == NULL)
1537       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1538 
1539    buffer->size = pCreateInfo->size;
1540    buffer->usage = pCreateInfo->usage;
1541    buffer->flags = pCreateInfo->flags;
1542 
1543    *pBuffer = panvk_buffer_to_handle(buffer);
1544 
1545    return VK_SUCCESS;
1546 }
1547 
1548 void
panvk_DestroyBuffer(VkDevice _device,VkBuffer _buffer,const VkAllocationCallbacks * pAllocator)1549 panvk_DestroyBuffer(VkDevice _device,
1550                     VkBuffer _buffer,
1551                     const VkAllocationCallbacks *pAllocator)
1552 {
1553    VK_FROM_HANDLE(panvk_device, device, _device);
1554    VK_FROM_HANDLE(panvk_buffer, buffer, _buffer);
1555 
1556    if (!buffer)
1557       return;
1558 
1559    vk_object_free(&device->vk, pAllocator, buffer);
1560 }
1561 
1562 VkResult
panvk_CreateFramebuffer(VkDevice _device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)1563 panvk_CreateFramebuffer(VkDevice _device,
1564                         const VkFramebufferCreateInfo *pCreateInfo,
1565                         const VkAllocationCallbacks *pAllocator,
1566                         VkFramebuffer *pFramebuffer)
1567 {
1568    VK_FROM_HANDLE(panvk_device, device, _device);
1569    struct panvk_framebuffer *framebuffer;
1570 
1571    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
1572 
1573    size_t size = sizeof(*framebuffer) + sizeof(struct panvk_attachment_info) *
1574                                            pCreateInfo->attachmentCount;
1575    framebuffer = vk_object_alloc(&device->vk, pAllocator, size,
1576                                  VK_OBJECT_TYPE_FRAMEBUFFER);
1577    if (framebuffer == NULL)
1578       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1579 
1580    framebuffer->attachment_count = pCreateInfo->attachmentCount;
1581    framebuffer->width = pCreateInfo->width;
1582    framebuffer->height = pCreateInfo->height;
1583    framebuffer->layers = pCreateInfo->layers;
1584    for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
1585       VkImageView _iview = pCreateInfo->pAttachments[i];
1586       struct panvk_image_view *iview = panvk_image_view_from_handle(_iview);
1587       framebuffer->attachments[i].iview = iview;
1588    }
1589 
1590    *pFramebuffer = panvk_framebuffer_to_handle(framebuffer);
1591    return VK_SUCCESS;
1592 }
1593 
1594 void
panvk_DestroyFramebuffer(VkDevice _device,VkFramebuffer _fb,const VkAllocationCallbacks * pAllocator)1595 panvk_DestroyFramebuffer(VkDevice _device,
1596                          VkFramebuffer _fb,
1597                          const VkAllocationCallbacks *pAllocator)
1598 {
1599    VK_FROM_HANDLE(panvk_device, device, _device);
1600    VK_FROM_HANDLE(panvk_framebuffer, fb, _fb);
1601 
1602    if (fb)
1603       vk_object_free(&device->vk, pAllocator, fb);
1604 }
1605 
1606 void
panvk_DestroySampler(VkDevice _device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)1607 panvk_DestroySampler(VkDevice _device,
1608                      VkSampler _sampler,
1609                      const VkAllocationCallbacks *pAllocator)
1610 {
1611    VK_FROM_HANDLE(panvk_device, device, _device);
1612    VK_FROM_HANDLE(panvk_sampler, sampler, _sampler);
1613 
1614    if (!sampler)
1615       return;
1616 
1617    vk_object_free(&device->vk, pAllocator, sampler);
1618 }
1619 
1620 /* vk_icd.h does not declare this function, so we declare it here to
1621  * suppress Wmissing-prototypes.
1622  */
1623 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
1624 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion);
1625 
1626 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)1627 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
1628 {
1629    /* For the full details on loader interface versioning, see
1630     * <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
1631     * What follows is a condensed summary, to help you navigate the large and
1632     * confusing official doc.
1633     *
1634     *   - Loader interface v0 is incompatible with later versions. We don't
1635     *     support it.
1636     *
1637     *   - In loader interface v1:
1638     *       - The first ICD entrypoint called by the loader is
1639     *         vk_icdGetInstanceProcAddr(). The ICD must statically expose this
1640     *         entrypoint.
1641     *       - The ICD must statically expose no other Vulkan symbol unless it
1642     * is linked with -Bsymbolic.
1643     *       - Each dispatchable Vulkan handle created by the ICD must be
1644     *         a pointer to a struct whose first member is VK_LOADER_DATA. The
1645     *         ICD must initialize VK_LOADER_DATA.loadMagic to
1646     * ICD_LOADER_MAGIC.
1647     *       - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
1648     *         vkDestroySurfaceKHR(). The ICD must be capable of working with
1649     *         such loader-managed surfaces.
1650     *
1651     *    - Loader interface v2 differs from v1 in:
1652     *       - The first ICD entrypoint called by the loader is
1653     *         vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
1654     *         statically expose this entrypoint.
1655     *
1656     *    - Loader interface v3 differs from v2 in:
1657     *        - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
1658     *          vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
1659     *          because the loader no longer does so.
1660     */
1661    *pSupportedVersion = MIN2(*pSupportedVersion, 3u);
1662    return VK_SUCCESS;
1663 }
1664 
1665 VkResult
panvk_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFd)1666 panvk_GetMemoryFdKHR(VkDevice _device,
1667                      const VkMemoryGetFdInfoKHR *pGetFdInfo,
1668                      int *pFd)
1669 {
1670    VK_FROM_HANDLE(panvk_device, device, _device);
1671    VK_FROM_HANDLE(panvk_device_memory, memory, pGetFdInfo->memory);
1672 
1673    assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
1674 
1675    /* At the moment, we support only the below handle types. */
1676    assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
1677           pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1678 
1679    int prime_fd = panfrost_bo_export(memory->bo);
1680    if (prime_fd < 0)
1681       return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1682 
1683    *pFd = prime_fd;
1684    return VK_SUCCESS;
1685 }
1686 
1687 VkResult
panvk_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1688 panvk_GetMemoryFdPropertiesKHR(VkDevice _device,
1689                                VkExternalMemoryHandleTypeFlagBits handleType,
1690                                int fd,
1691                                VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1692 {
1693    assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1694    pMemoryFdProperties->memoryTypeBits = 1;
1695    return VK_SUCCESS;
1696 }
1697 
1698 void
panvk_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)1699 panvk_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,
1700                                                    const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
1701                                                    VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
1702 {
1703    if ((pExternalSemaphoreInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ||
1704         pExternalSemaphoreInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
1705       pExternalSemaphoreProperties->exportFromImportedHandleTypes =
1706          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT |
1707          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
1708       pExternalSemaphoreProperties->compatibleHandleTypes =
1709          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT |
1710          VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
1711       pExternalSemaphoreProperties->externalSemaphoreFeatures =
1712          VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
1713          VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
1714    } else {
1715       pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
1716       pExternalSemaphoreProperties->compatibleHandleTypes = 0;
1717       pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
1718    }
1719 }
1720 
1721 void
panvk_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)1722 panvk_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,
1723                                                const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
1724                                                VkExternalFenceProperties *pExternalFenceProperties)
1725 {
1726    pExternalFenceProperties->exportFromImportedHandleTypes = 0;
1727    pExternalFenceProperties->compatibleHandleTypes = 0;
1728    pExternalFenceProperties->externalFenceFeatures = 0;
1729 }
1730 
1731 void
panvk_GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)1732 panvk_GetDeviceGroupPeerMemoryFeatures(VkDevice device,
1733                                        uint32_t heapIndex,
1734                                        uint32_t localDeviceIndex,
1735                                        uint32_t remoteDeviceIndex,
1736                                        VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
1737 {
1738    assert(localDeviceIndex == remoteDeviceIndex);
1739 
1740    *pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
1741                           VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
1742                           VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
1743                           VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
1744 }
1745