1 /*
2  * Copyright 2021 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_android.h"
12 
13 #include <dlfcn.h>
14 #include <hardware/gralloc.h>
15 #include <hardware/hwvulkan.h>
16 #include <vndk/hardware_buffer.h>
17 #include <vulkan/vk_icd.h>
18 
19 #include "drm-uapi/drm_fourcc.h"
20 #include "util/libsync.h"
21 #include "util/os_file.h"
22 
23 #include "vn_buffer.h"
24 #include "vn_device.h"
25 #include "vn_device_memory.h"
26 #include "vn_image.h"
27 #include "vn_instance.h"
28 #include "vn_physical_device.h"
29 #include "vn_queue.h"
30 
31 static int
32 vn_hal_open(const struct hw_module_t *mod,
33             const char *id,
34             struct hw_device_t **dev);
35 
36 static void UNUSED
static_asserts(void)37 static_asserts(void)
38 {
39    STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
40 }
41 
42 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
43    .common = {
44       .tag = HARDWARE_MODULE_TAG,
45       .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
46       .hal_api_version = HARDWARE_HAL_API_VERSION,
47       .id = HWVULKAN_HARDWARE_MODULE_ID,
48       .name = "Venus Vulkan HAL",
49       .author = "Google LLC",
50       .methods = &(hw_module_methods_t) {
51          .open = vn_hal_open,
52       },
53    },
54 };
55 
56 static const gralloc_module_t *gralloc = NULL;
57 
58 static int
vn_hal_close(UNUSED struct hw_device_t * dev)59 vn_hal_close(UNUSED struct hw_device_t *dev)
60 {
61    dlclose(gralloc->common.dso);
62    return 0;
63 }
64 
65 static hwvulkan_device_t vn_hal_dev = {
66   .common = {
67      .tag = HARDWARE_DEVICE_TAG,
68      .version = HWVULKAN_DEVICE_API_VERSION_0_1,
69      .module = &HAL_MODULE_INFO_SYM.common,
70      .close = vn_hal_close,
71   },
72  .EnumerateInstanceExtensionProperties = vn_EnumerateInstanceExtensionProperties,
73  .CreateInstance = vn_CreateInstance,
74  .GetInstanceProcAddr = vn_GetInstanceProcAddr,
75 };
76 
77 static int
vn_hal_open(const struct hw_module_t * mod,const char * id,struct hw_device_t ** dev)78 vn_hal_open(const struct hw_module_t *mod,
79             const char *id,
80             struct hw_device_t **dev)
81 {
82    static const char CROS_GRALLOC_MODULE_NAME[] = "CrOS Gralloc";
83 
84    assert(mod == &HAL_MODULE_INFO_SYM.common);
85    assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
86 
87    /* get gralloc module for gralloc buffer info query */
88    int ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID,
89                            (const hw_module_t **)&gralloc);
90    if (ret) {
91       if (VN_DEBUG(WSI))
92          vn_log(NULL, "failed to open gralloc module(ret=%d)", ret);
93       return ret;
94    }
95 
96    if (VN_DEBUG(WSI))
97       vn_log(NULL, "opened gralloc module name: %s", gralloc->common.name);
98 
99    if (strcmp(gralloc->common.name, CROS_GRALLOC_MODULE_NAME) != 0 ||
100        !gralloc->perform) {
101       dlclose(gralloc->common.dso);
102       return -1;
103    }
104 
105    *dev = &vn_hal_dev.common;
106 
107    return 0;
108 }
109 
110 static uint32_t
vn_android_ahb_format_from_vk_format(VkFormat format)111 vn_android_ahb_format_from_vk_format(VkFormat format)
112 {
113    switch (format) {
114    case VK_FORMAT_R8G8B8A8_UNORM:
115       return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
116    case VK_FORMAT_R8G8B8_UNORM:
117       return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
118    case VK_FORMAT_R5G6B5_UNORM_PACK16:
119       return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
120    case VK_FORMAT_R16G16B16A16_SFLOAT:
121       return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
122    case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
123       return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
124    case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
125       return AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
126    default:
127       return 0;
128    }
129 }
130 
131 VkFormat
vn_android_drm_format_to_vk_format(uint32_t format)132 vn_android_drm_format_to_vk_format(uint32_t format)
133 {
134    switch (format) {
135    case DRM_FORMAT_ABGR8888:
136    case DRM_FORMAT_XBGR8888:
137       return VK_FORMAT_R8G8B8A8_UNORM;
138    case DRM_FORMAT_BGR888:
139       return VK_FORMAT_R8G8B8_UNORM;
140    case DRM_FORMAT_RGB565:
141       return VK_FORMAT_R5G6B5_UNORM_PACK16;
142    case DRM_FORMAT_ABGR16161616F:
143       return VK_FORMAT_R16G16B16A16_SFLOAT;
144    case DRM_FORMAT_ABGR2101010:
145       return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
146    case DRM_FORMAT_YVU420:
147    case DRM_FORMAT_NV12:
148       return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
149    default:
150       return VK_FORMAT_UNDEFINED;
151    }
152 }
153 
154 static bool
vn_android_drm_format_is_yuv(uint32_t format)155 vn_android_drm_format_is_yuv(uint32_t format)
156 {
157    assert(vn_android_drm_format_to_vk_format(format) != VK_FORMAT_UNDEFINED);
158 
159    switch (format) {
160    case DRM_FORMAT_YVU420:
161    case DRM_FORMAT_NV12:
162       return true;
163    default:
164       return false;
165    }
166 }
167 
168 uint64_t
vn_android_get_ahb_usage(const VkImageUsageFlags usage,const VkImageCreateFlags flags)169 vn_android_get_ahb_usage(const VkImageUsageFlags usage,
170                          const VkImageCreateFlags flags)
171 {
172    uint64_t ahb_usage = 0;
173    if (usage &
174        (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
175       ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
176 
177    if (usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
178                 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT))
179       ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
180 
181    if (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
182       ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
183 
184    if (flags & VK_IMAGE_CREATE_PROTECTED_BIT)
185       ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
186 
187    /* must include at least one GPU usage flag */
188    if (ahb_usage == 0)
189       ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
190 
191    return ahb_usage;
192 }
193 
194 VkResult
vn_GetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)195 vn_GetSwapchainGrallocUsage2ANDROID(
196    VkDevice device,
197    VkFormat format,
198    VkImageUsageFlags imageUsage,
199    VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
200    uint64_t *grallocConsumerUsage,
201    uint64_t *grallocProducerUsage)
202 {
203    struct vn_device *dev = vn_device_from_handle(device);
204    *grallocConsumerUsage = 0;
205    *grallocProducerUsage = 0;
206 
207    if (swapchainImageUsage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)
208       return vn_error(dev->instance, VK_ERROR_INITIALIZATION_FAILED);
209 
210    if (VN_DEBUG(WSI))
211       vn_log(dev->instance, "format=%d, imageUsage=0x%x", format, imageUsage);
212 
213    if (imageUsage & (VK_IMAGE_USAGE_TRANSFER_DST_BIT |
214                      VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
215       *grallocProducerUsage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
216 
217    if (imageUsage &
218        (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
219         VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
220       *grallocConsumerUsage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
221 
222    return VK_SUCCESS;
223 }
224 
225 struct cros_gralloc0_buffer_info {
226    uint32_t drm_fourcc;
227    int num_fds; /* ignored */
228    int fds[4];  /* ignored */
229    uint64_t modifier;
230    uint32_t offset[4];
231    uint32_t stride[4];
232 };
233 
234 struct vn_android_gralloc_buffer_properties {
235    uint32_t drm_fourcc;
236    uint64_t modifier;
237    uint32_t offset[4];
238    uint32_t stride[4];
239 };
240 
241 static VkResult
vn_android_get_dma_buf_from_native_handle(const native_handle_t * handle,int * out_dma_buf)242 vn_android_get_dma_buf_from_native_handle(const native_handle_t *handle,
243                                           int *out_dma_buf)
244 {
245    /* There can be multiple fds wrapped inside a native_handle_t, but we
246     * expect only the 1st one points to the dma_buf. For multi-planar format,
247     * there should only exist one dma_buf as well. The other fd(s) may point
248     * to shared memory used to store buffer metadata or other vendor specific
249     * bits.
250     */
251    if (handle->numFds < 1) {
252       vn_log(NULL, "handle->numFds is %d, expected >= 1", handle->numFds);
253       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
254    }
255 
256    if (handle->data[0] < 0) {
257       vn_log(NULL, "handle->data[0] < 0");
258       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
259    }
260 
261    *out_dma_buf = handle->data[0];
262    return VK_SUCCESS;
263 }
264 
265 static bool
vn_android_get_gralloc_buffer_properties(buffer_handle_t handle,struct vn_android_gralloc_buffer_properties * out_props)266 vn_android_get_gralloc_buffer_properties(
267    buffer_handle_t handle,
268    struct vn_android_gralloc_buffer_properties *out_props)
269 {
270    static const int32_t CROS_GRALLOC_DRM_GET_BUFFER_INFO = 4;
271    struct cros_gralloc0_buffer_info info;
272    if (gralloc->perform(gralloc, CROS_GRALLOC_DRM_GET_BUFFER_INFO, handle,
273                         &info) != 0) {
274       vn_log(NULL, "CROS_GRALLOC_DRM_GET_BUFFER_INFO failed");
275       return false;
276    }
277 
278    if (info.modifier == DRM_FORMAT_MOD_INVALID) {
279       vn_log(NULL, "Unexpected DRM_FORMAT_MOD_INVALID");
280       return false;
281    }
282 
283    out_props->drm_fourcc = info.drm_fourcc;
284    for (uint32_t i = 0; i < 4; i++) {
285       out_props->stride[i] = info.stride[i];
286       out_props->offset[i] = info.offset[i];
287    }
288    out_props->modifier = info.modifier;
289 
290    return true;
291 }
292 
293 static VkResult
vn_android_get_modifier_properties(struct vn_device * dev,VkFormat format,uint64_t modifier,const VkAllocationCallbacks * alloc,VkDrmFormatModifierPropertiesEXT * out_props)294 vn_android_get_modifier_properties(struct vn_device *dev,
295                                    VkFormat format,
296                                    uint64_t modifier,
297                                    const VkAllocationCallbacks *alloc,
298                                    VkDrmFormatModifierPropertiesEXT *out_props)
299 {
300    VkPhysicalDevice physical_device =
301       vn_physical_device_to_handle(dev->physical_device);
302    VkDrmFormatModifierPropertiesListEXT mod_prop_list = {
303       .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
304       .pNext = NULL,
305       .drmFormatModifierCount = 0,
306       .pDrmFormatModifierProperties = NULL,
307    };
308    VkFormatProperties2 format_prop = {
309       .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
310       .pNext = &mod_prop_list,
311    };
312    VkDrmFormatModifierPropertiesEXT *mod_props = NULL;
313    bool modifier_found = false;
314 
315    vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
316                                          &format_prop);
317 
318    if (!mod_prop_list.drmFormatModifierCount) {
319       vn_log(dev->instance, "No compatible modifier for VkFormat(%u)",
320              format);
321       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
322    }
323 
324    mod_props = vk_zalloc(
325       alloc, sizeof(*mod_props) * mod_prop_list.drmFormatModifierCount,
326       VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
327    if (!mod_props)
328       return VK_ERROR_OUT_OF_HOST_MEMORY;
329 
330    mod_prop_list.pDrmFormatModifierProperties = mod_props;
331    vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
332                                          &format_prop);
333 
334    for (uint32_t i = 0; i < mod_prop_list.drmFormatModifierCount; i++) {
335       if (mod_props[i].drmFormatModifier == modifier) {
336          *out_props = mod_props[i];
337          modifier_found = true;
338          break;
339       }
340    }
341 
342    vk_free(alloc, mod_props);
343 
344    if (!modifier_found) {
345       vn_log(dev->instance,
346              "No matching modifier(%" PRIu64 ") properties for VkFormat(%u)",
347              modifier, format);
348       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
349    }
350 
351    return VK_SUCCESS;
352 }
353 
354 struct vn_android_image_builder {
355    VkImageCreateInfo create;
356    VkSubresourceLayout layouts[4];
357    VkImageDrmFormatModifierExplicitCreateInfoEXT modifier;
358    VkExternalMemoryImageCreateInfo external;
359 };
360 
361 static VkResult
vn_android_get_image_builder(struct vn_device * dev,const VkImageCreateInfo * create_info,const native_handle_t * handle,const VkAllocationCallbacks * alloc,struct vn_android_image_builder * out_builder)362 vn_android_get_image_builder(struct vn_device *dev,
363                              const VkImageCreateInfo *create_info,
364                              const native_handle_t *handle,
365                              const VkAllocationCallbacks *alloc,
366                              struct vn_android_image_builder *out_builder)
367 {
368    VkResult result = VK_SUCCESS;
369    struct vn_android_gralloc_buffer_properties buf_props;
370    VkDrmFormatModifierPropertiesEXT mod_props;
371 
372    if (!vn_android_get_gralloc_buffer_properties(handle, &buf_props))
373       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
374 
375    result = vn_android_get_modifier_properties(
376       dev, create_info->format, buf_props.modifier, alloc, &mod_props);
377    if (result != VK_SUCCESS)
378       return result;
379 
380    memset(out_builder->layouts, 0, sizeof(out_builder->layouts));
381    for (uint32_t i = 0; i < mod_props.drmFormatModifierPlaneCount; i++) {
382       out_builder->layouts[i].offset = buf_props.offset[i];
383       out_builder->layouts[i].rowPitch = buf_props.stride[i];
384    }
385    out_builder->modifier = (VkImageDrmFormatModifierExplicitCreateInfoEXT){
386       .sType =
387          VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
388       .pNext = create_info->pNext,
389       .drmFormatModifier = buf_props.modifier,
390       .drmFormatModifierPlaneCount = mod_props.drmFormatModifierPlaneCount,
391       .pPlaneLayouts = out_builder->layouts,
392    };
393    out_builder->external = (VkExternalMemoryImageCreateInfo){
394       .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
395       .pNext = &out_builder->modifier,
396       .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
397    };
398    out_builder->create = *create_info;
399    out_builder->create.pNext = &out_builder->external;
400    out_builder->create.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
401 
402    return VK_SUCCESS;
403 }
404 
405 VkResult
vn_android_image_from_anb(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkNativeBufferANDROID * anb_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)406 vn_android_image_from_anb(struct vn_device *dev,
407                           const VkImageCreateInfo *create_info,
408                           const VkNativeBufferANDROID *anb_info,
409                           const VkAllocationCallbacks *alloc,
410                           struct vn_image **out_img)
411 {
412    /* If anb_info->handle points to a classic resouce created from
413     * virtio_gpu_cmd_resource_create_3d, anb_info->stride is the stride of the
414     * guest shadow storage other than the host gpu storage.
415     *
416     * We also need to pass the correct stride to vn_CreateImage, which will be
417     * done via VkImageDrmFormatModifierExplicitCreateInfoEXT and will require
418     * VK_EXT_image_drm_format_modifier support in the host driver. The struct
419     * needs host storage info which can be queried from cros gralloc.
420     */
421    VkResult result = VK_SUCCESS;
422    VkDevice device = vn_device_to_handle(dev);
423    VkDeviceMemory memory = VK_NULL_HANDLE;
424    VkImage image = VK_NULL_HANDLE;
425    struct vn_image *img = NULL;
426    uint64_t alloc_size = 0;
427    uint32_t mem_type_bits = 0;
428    int dma_buf_fd = -1;
429    int dup_fd = -1;
430    struct vn_android_image_builder builder;
431 
432    result = vn_android_get_dma_buf_from_native_handle(anb_info->handle,
433                                                       &dma_buf_fd);
434    if (result != VK_SUCCESS)
435       goto fail;
436 
437    result = vn_android_get_image_builder(dev, create_info, anb_info->handle,
438                                          alloc, &builder);
439    if (result != VK_SUCCESS)
440       goto fail;
441 
442    /* encoder will strip the Android specific pNext structs */
443    result = vn_image_create(dev, &builder.create, alloc, &img);
444    if (result != VK_SUCCESS) {
445       if (VN_DEBUG(WSI))
446          vn_log(dev->instance, "vn_image_create failed");
447       goto fail;
448    }
449 
450    image = vn_image_to_handle(img);
451    VkMemoryRequirements mem_req;
452    vn_GetImageMemoryRequirements(device, image, &mem_req);
453    if (!mem_req.memoryTypeBits) {
454       if (VN_DEBUG(WSI))
455          vn_log(dev->instance, "mem_req.memoryTypeBits cannot be zero");
456       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
457       goto fail;
458    }
459 
460    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
461                                              &mem_type_bits);
462    if (result != VK_SUCCESS)
463       goto fail;
464 
465    if (VN_DEBUG(WSI)) {
466       vn_log(dev->instance,
467              "size = img(%" PRIu64 ") fd(%" PRIu64 "), "
468              "memoryTypeBits = img(0x%X) & fd(0x%X)",
469              mem_req.size, alloc_size, mem_req.memoryTypeBits, mem_type_bits);
470    }
471 
472    if (alloc_size < mem_req.size) {
473       if (VN_DEBUG(WSI)) {
474          vn_log(dev->instance,
475                 "alloc_size(%" PRIu64 ") mem_req.size(%" PRIu64 ")",
476                 alloc_size, mem_req.size);
477       }
478       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
479       goto fail;
480    }
481 
482    mem_type_bits &= mem_req.memoryTypeBits;
483    if (!mem_type_bits) {
484       result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
485       goto fail;
486    }
487 
488    dup_fd = os_dupfd_cloexec(dma_buf_fd);
489    if (dup_fd < 0) {
490       result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
491                                  : VK_ERROR_OUT_OF_HOST_MEMORY;
492       goto fail;
493    }
494 
495    const VkImportMemoryFdInfoKHR import_fd_info = {
496       .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
497       .pNext = NULL,
498       .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
499       .fd = dup_fd,
500    };
501    const VkMemoryAllocateInfo memory_info = {
502       .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
503       .pNext = &import_fd_info,
504       .allocationSize = mem_req.size,
505       .memoryTypeIndex = ffs(mem_type_bits) - 1,
506    };
507    result = vn_AllocateMemory(device, &memory_info, alloc, &memory);
508    if (result != VK_SUCCESS) {
509       /* only need to close the dup_fd on import failure */
510       close(dup_fd);
511       goto fail;
512    }
513 
514    result = vn_BindImageMemory(device, image, memory, 0);
515    if (result != VK_SUCCESS)
516       goto fail;
517 
518    img->is_wsi = true;
519    /* Android WSI image owns the memory */
520    img->private_memory = memory;
521    *out_img = img;
522 
523    return VK_SUCCESS;
524 
525 fail:
526    if (image != VK_NULL_HANDLE)
527       vn_DestroyImage(device, image, alloc);
528    if (memory != VK_NULL_HANDLE)
529       vn_FreeMemory(device, memory, alloc);
530    return vn_error(dev->instance, result);
531 }
532 
533 VkResult
vn_AcquireImageANDROID(VkDevice device,UNUSED VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)534 vn_AcquireImageANDROID(VkDevice device,
535                        UNUSED VkImage image,
536                        int nativeFenceFd,
537                        VkSemaphore semaphore,
538                        VkFence fence)
539 {
540    struct vn_device *dev = vn_device_from_handle(device);
541    VkResult result = VK_SUCCESS;
542 
543    if (dev->instance->experimental.globalFencing == VK_FALSE) {
544       /* Fallback when VkVenusExperimentalFeatures100000MESA::globalFencing is
545        * VK_FALSE, out semaphore and fence are filled with already signaled
546        * payloads, and the native fence fd is waited inside until signaled.
547        */
548       if (nativeFenceFd >= 0) {
549          int ret = sync_wait(nativeFenceFd, -1);
550          /* Android loader expects the ICD to always close the fd */
551          close(nativeFenceFd);
552          if (ret)
553             return vn_error(dev->instance, VK_ERROR_SURFACE_LOST_KHR);
554       }
555 
556       if (semaphore != VK_NULL_HANDLE)
557          vn_semaphore_signal_wsi(dev, vn_semaphore_from_handle(semaphore));
558 
559       if (fence != VK_NULL_HANDLE)
560          vn_fence_signal_wsi(dev, vn_fence_from_handle(fence));
561 
562       return VK_SUCCESS;
563    }
564 
565    int semaphore_fd = -1;
566    int fence_fd = -1;
567    if (nativeFenceFd >= 0) {
568       if (semaphore != VK_NULL_HANDLE && fence != VK_NULL_HANDLE) {
569          semaphore_fd = nativeFenceFd;
570          fence_fd = os_dupfd_cloexec(nativeFenceFd);
571          if (fence_fd < 0) {
572             result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
573                                        : VK_ERROR_OUT_OF_HOST_MEMORY;
574             close(nativeFenceFd);
575             return vn_error(dev->instance, result);
576          }
577       } else if (semaphore != VK_NULL_HANDLE) {
578          semaphore_fd = nativeFenceFd;
579       } else if (fence != VK_NULL_HANDLE) {
580          fence_fd = nativeFenceFd;
581       } else {
582          close(nativeFenceFd);
583       }
584    }
585 
586    if (semaphore != VK_NULL_HANDLE) {
587       const VkImportSemaphoreFdInfoKHR info = {
588          .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
589          .pNext = NULL,
590          .semaphore = semaphore,
591          .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
592          .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
593          .fd = semaphore_fd,
594       };
595       result = vn_ImportSemaphoreFdKHR(device, &info);
596       if (result == VK_SUCCESS)
597          semaphore_fd = -1;
598    }
599 
600    if (result == VK_SUCCESS && fence != VK_NULL_HANDLE) {
601       const VkImportFenceFdInfoKHR info = {
602          .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
603          .pNext = NULL,
604          .fence = fence,
605          .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
606          .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
607          .fd = fence_fd,
608       };
609       result = vn_ImportFenceFdKHR(device, &info);
610       if (result == VK_SUCCESS)
611          fence_fd = -1;
612    }
613 
614    if (semaphore_fd >= 0)
615       close(semaphore_fd);
616    if (fence_fd >= 0)
617       close(fence_fd);
618 
619    return vn_result(dev->instance, result);
620 }
621 
622 VkResult
vn_QueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)623 vn_QueueSignalReleaseImageANDROID(VkQueue queue,
624                                   uint32_t waitSemaphoreCount,
625                                   const VkSemaphore *pWaitSemaphores,
626                                   VkImage image,
627                                   int *pNativeFenceFd)
628 {
629    struct vn_queue *que = vn_queue_from_handle(queue);
630    struct vn_device *dev = que->device;
631    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
632    VkDevice device = vn_device_to_handle(dev);
633    VkPipelineStageFlags local_stage_masks[8];
634    VkPipelineStageFlags *stage_masks = local_stage_masks;
635    VkResult result = VK_SUCCESS;
636    int fd = -1;
637 
638    if (waitSemaphoreCount == 0) {
639       *pNativeFenceFd = -1;
640       return VK_SUCCESS;
641    }
642 
643    if (waitSemaphoreCount > ARRAY_SIZE(local_stage_masks)) {
644       stage_masks =
645          vk_alloc(alloc, sizeof(*stage_masks) * waitSemaphoreCount,
646                   VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
647       if (!stage_masks)
648          return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
649    }
650 
651    for (uint32_t i = 0; i < waitSemaphoreCount; i++)
652       stage_masks[i] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
653 
654    const VkSubmitInfo submit_info = {
655       .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
656       .pNext = NULL,
657       .waitSemaphoreCount = waitSemaphoreCount,
658       .pWaitSemaphores = pWaitSemaphores,
659       .pWaitDstStageMask = stage_masks,
660       .commandBufferCount = 0,
661       .pCommandBuffers = NULL,
662       .signalSemaphoreCount = 0,
663       .pSignalSemaphores = NULL,
664    };
665    /* XXX When globalFencing is supported, our implementation is not able to
666     * reset the fence during vn_GetFenceFdKHR currently. Thus to ensure proper
667     * host driver behavior, we pass VK_NULL_HANDLE here.
668     */
669    result = vn_QueueSubmit(
670       queue, 1, &submit_info,
671       dev->instance->experimental.globalFencing == VK_TRUE ? VK_NULL_HANDLE
672                                                            : que->wait_fence);
673 
674    if (stage_masks != local_stage_masks)
675       vk_free(alloc, stage_masks);
676 
677    if (result != VK_SUCCESS)
678       return vn_error(dev->instance, result);
679 
680    if (dev->instance->experimental.globalFencing == VK_TRUE) {
681       const VkFenceGetFdInfoKHR fd_info = {
682          .sType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
683          .pNext = NULL,
684          .fence = que->wait_fence,
685          .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
686       };
687       result = vn_GetFenceFdKHR(device, &fd_info, &fd);
688    } else {
689       result =
690          vn_WaitForFences(device, 1, &que->wait_fence, VK_TRUE, UINT64_MAX);
691       if (result != VK_SUCCESS)
692          return vn_error(dev->instance, result);
693 
694       result = vn_ResetFences(device, 1, &que->wait_fence);
695    }
696 
697    if (result != VK_SUCCESS)
698       return vn_error(dev->instance, result);
699 
700    *pNativeFenceFd = fd;
701 
702    return VK_SUCCESS;
703 }
704 
705 static VkResult
vn_android_get_ahb_format_properties(struct vn_device * dev,const struct AHardwareBuffer * ahb,VkAndroidHardwareBufferFormatPropertiesANDROID * out_props)706 vn_android_get_ahb_format_properties(
707    struct vn_device *dev,
708    const struct AHardwareBuffer *ahb,
709    VkAndroidHardwareBufferFormatPropertiesANDROID *out_props)
710 {
711    AHardwareBuffer_Desc desc;
712    VkFormat format;
713    struct vn_android_gralloc_buffer_properties buf_props;
714    VkDrmFormatModifierPropertiesEXT mod_props;
715 
716    AHardwareBuffer_describe(ahb, &desc);
717    if (!(desc.usage & (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
718                        AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
719                        AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
720       vn_log(dev->instance,
721              "AHB usage(%" PRIu64 ") must include at least one GPU bit",
722              desc.usage);
723       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
724    }
725 
726    /* Handle the special AHARDWAREBUFFER_FORMAT_BLOB for VkBuffer case. */
727    if (desc.format == AHARDWAREBUFFER_FORMAT_BLOB) {
728       out_props->format = VK_FORMAT_UNDEFINED;
729       return VK_SUCCESS;
730    }
731 
732    if (!vn_android_get_gralloc_buffer_properties(
733           AHardwareBuffer_getNativeHandle(ahb), &buf_props))
734       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
735 
736    /* We implement AHB extension support with EXT_image_drm_format_modifier.
737     * It requires us to have a compatible VkFormat but not DRM formats. So if
738     * the ahb is not intended for backing a VkBuffer, error out early if the
739     * format is VK_FORMAT_UNDEFINED.
740     */
741    format = vn_android_drm_format_to_vk_format(buf_props.drm_fourcc);
742    if (format == VK_FORMAT_UNDEFINED) {
743       vn_log(dev->instance, "Unknown drm_fourcc(%u) from AHB format(0x%X)",
744              buf_props.drm_fourcc, desc.format);
745       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
746    }
747 
748    VkResult result = vn_android_get_modifier_properties(
749       dev, format, buf_props.modifier, &dev->base.base.alloc, &mod_props);
750    if (result != VK_SUCCESS)
751       return result;
752 
753    /* The spec requires that formatFeatures must include at least one of
754     * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
755     * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT.
756     */
757    const VkFormatFeatureFlags format_features =
758       mod_props.drmFormatModifierTilingFeatures |
759       VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
760 
761    /* 11.2.7. Android Hardware Buffer External Memory
762     *
763     * Implementations may not always be able to determine the color model,
764     * numerical range, or chroma offsets of the image contents, so the values
765     * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
766     * Applications should treat these values as sensible defaults to use in the
767     * absence of more reliable information obtained through some other means.
768     */
769    const VkSamplerYcbcrModelConversion model =
770       vn_android_drm_format_is_yuv(buf_props.drm_fourcc)
771          ? VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601
772          : VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
773    *out_props = (VkAndroidHardwareBufferFormatPropertiesANDROID) {
774       .sType = out_props->sType,
775       .pNext = out_props->pNext,
776       .format = format,
777       .externalFormat = buf_props.drm_fourcc,
778       .formatFeatures = format_features,
779       .samplerYcbcrConversionComponents = {
780          .r = VK_COMPONENT_SWIZZLE_IDENTITY,
781          .g = VK_COMPONENT_SWIZZLE_IDENTITY,
782          .b = VK_COMPONENT_SWIZZLE_IDENTITY,
783          .a = VK_COMPONENT_SWIZZLE_IDENTITY,
784       },
785       .suggestedYcbcrModel = model,
786       .suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
787       .suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
788       .suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
789    };
790 
791    return VK_SUCCESS;
792 }
793 
794 VkResult
vn_GetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)795 vn_GetAndroidHardwareBufferPropertiesANDROID(
796    VkDevice device,
797    const struct AHardwareBuffer *buffer,
798    VkAndroidHardwareBufferPropertiesANDROID *pProperties)
799 {
800    struct vn_device *dev = vn_device_from_handle(device);
801    VkResult result = VK_SUCCESS;
802    int dma_buf_fd = -1;
803    uint64_t alloc_size = 0;
804    uint32_t mem_type_bits = 0;
805 
806    VkAndroidHardwareBufferFormatPropertiesANDROID *format_props =
807       vk_find_struct(pProperties->pNext,
808                      ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
809    if (format_props) {
810       result =
811          vn_android_get_ahb_format_properties(dev, buffer, format_props);
812       if (result != VK_SUCCESS)
813          return vn_error(dev->instance, result);
814    }
815 
816    const native_handle_t *handle = AHardwareBuffer_getNativeHandle(buffer);
817    result = vn_android_get_dma_buf_from_native_handle(handle, &dma_buf_fd);
818    if (result != VK_SUCCESS)
819       return vn_error(dev->instance, result);
820 
821    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
822                                              &mem_type_bits);
823    if (result != VK_SUCCESS)
824       return vn_error(dev->instance, result);
825 
826    pProperties->allocationSize = alloc_size;
827    pProperties->memoryTypeBits = mem_type_bits;
828 
829    return VK_SUCCESS;
830 }
831 
832 static AHardwareBuffer *
vn_android_ahb_allocate(uint32_t width,uint32_t height,uint32_t layers,uint32_t format,uint64_t usage)833 vn_android_ahb_allocate(uint32_t width,
834                         uint32_t height,
835                         uint32_t layers,
836                         uint32_t format,
837                         uint64_t usage)
838 {
839    AHardwareBuffer *ahb = NULL;
840    AHardwareBuffer_Desc desc;
841    int ret = 0;
842 
843    memset(&desc, 0, sizeof(desc));
844    desc.width = width;
845    desc.height = height;
846    desc.layers = layers;
847    desc.format = format;
848    desc.usage = usage;
849 
850    ret = AHardwareBuffer_allocate(&desc, &ahb);
851    if (ret) {
852       /* We just log the error code here for now since the platform falsely
853        * maps all gralloc allocation failures to oom.
854        */
855       vn_log(NULL, "AHB alloc(w=%u,h=%u,l=%u,f=%u,u=%" PRIu64 ") failed(%d)",
856              width, height, layers, format, usage, ret);
857       return NULL;
858    }
859 
860    return ahb;
861 }
862 
863 bool
vn_android_get_drm_format_modifier_info(const VkPhysicalDeviceImageFormatInfo2 * format_info,VkPhysicalDeviceImageDrmFormatModifierInfoEXT * out_info)864 vn_android_get_drm_format_modifier_info(
865    const VkPhysicalDeviceImageFormatInfo2 *format_info,
866    VkPhysicalDeviceImageDrmFormatModifierInfoEXT *out_info)
867 {
868    /* To properly fill VkPhysicalDeviceImageDrmFormatModifierInfoEXT, we have
869     * to allocate an ahb to retrieve the drm format modifier. For the image
870     * sharing mode, we assume VK_SHARING_MODE_EXCLUSIVE for now.
871     */
872    AHardwareBuffer *ahb = NULL;
873    uint32_t format = 0;
874    uint64_t usage = 0;
875    struct vn_android_gralloc_buffer_properties buf_props;
876 
877    assert(format_info->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
878 
879    format = vn_android_ahb_format_from_vk_format(format_info->format);
880    if (!format)
881       return false;
882 
883    usage = vn_android_get_ahb_usage(format_info->usage, format_info->flags);
884    ahb = vn_android_ahb_allocate(16, 16, 1, format, usage);
885    if (!ahb)
886       return false;
887 
888    if (!vn_android_get_gralloc_buffer_properties(
889           AHardwareBuffer_getNativeHandle(ahb), &buf_props)) {
890       AHardwareBuffer_release(ahb);
891       return false;
892    }
893 
894    *out_info = (VkPhysicalDeviceImageDrmFormatModifierInfoEXT){
895       .sType =
896          VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
897       .pNext = NULL,
898       .drmFormatModifier = buf_props.modifier,
899       .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
900       .queueFamilyIndexCount = 0,
901       .pQueueFamilyIndices = NULL,
902    };
903 
904    AHardwareBuffer_release(ahb);
905    return true;
906 }
907 
908 VkResult
vn_android_image_from_ahb(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)909 vn_android_image_from_ahb(struct vn_device *dev,
910                           const VkImageCreateInfo *create_info,
911                           const VkAllocationCallbacks *alloc,
912                           struct vn_image **out_img)
913 {
914    const VkExternalFormatANDROID *ext_info =
915       vk_find_struct_const(create_info->pNext, EXTERNAL_FORMAT_ANDROID);
916 
917    VkImageCreateInfo local_info;
918    if (ext_info && ext_info->externalFormat) {
919       assert(create_info->format == VK_FORMAT_UNDEFINED);
920       assert(create_info->imageType == VK_IMAGE_TYPE_2D);
921       assert(create_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
922       assert(create_info->tiling == VK_IMAGE_TILING_OPTIMAL);
923 
924       local_info = *create_info;
925       local_info.format =
926          vn_android_drm_format_to_vk_format(ext_info->externalFormat);
927       create_info = &local_info;
928    }
929 
930    return vn_image_create_deferred(dev, create_info, alloc, out_img);
931 }
932 
933 VkResult
vn_android_device_import_ahb(struct vn_device * dev,struct vn_device_memory * mem,const VkMemoryAllocateInfo * alloc_info,const VkAllocationCallbacks * alloc,struct AHardwareBuffer * ahb)934 vn_android_device_import_ahb(struct vn_device *dev,
935                              struct vn_device_memory *mem,
936                              const VkMemoryAllocateInfo *alloc_info,
937                              const VkAllocationCallbacks *alloc,
938                              struct AHardwareBuffer *ahb)
939 {
940    VkDevice device = vn_device_to_handle(dev);
941    const VkMemoryDedicatedAllocateInfo *dedicated_info =
942       vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
943    const native_handle_t *handle = NULL;
944    int dma_buf_fd = -1;
945    int dup_fd = -1;
946    uint64_t alloc_size = 0;
947    uint32_t mem_type_bits = 0;
948    bool force_unmappable = false;
949    VkResult result = VK_SUCCESS;
950 
951    handle = AHardwareBuffer_getNativeHandle(ahb);
952    result = vn_android_get_dma_buf_from_native_handle(handle, &dma_buf_fd);
953    if (result != VK_SUCCESS)
954       return result;
955 
956    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
957                                              &mem_type_bits);
958    if (result != VK_SUCCESS)
959       return result;
960 
961    if (((1 << alloc_info->memoryTypeIndex) & mem_type_bits) == 0) {
962       vn_log(dev->instance, "memoryTypeIndex(%u) mem_type_bits(0x%X)",
963              alloc_info->memoryTypeIndex, mem_type_bits);
964       return VK_ERROR_INVALID_EXTERNAL_HANDLE;
965    }
966 
967    /* If ahb is for an image, finish the deferred image creation first */
968    if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
969       struct vn_image *img = vn_image_from_handle(dedicated_info->image);
970       struct vn_android_image_builder builder;
971 
972       result = vn_android_get_image_builder(dev, &img->deferred_info->create,
973                                             handle, alloc, &builder);
974       if (result != VK_SUCCESS)
975          return result;
976 
977       result = vn_image_init_deferred(dev, &builder.create, img);
978       if (result != VK_SUCCESS)
979          return result;
980 
981       VkMemoryRequirements mem_req;
982       vn_GetImageMemoryRequirements(device, dedicated_info->image, &mem_req);
983       if (alloc_size < mem_req.size) {
984          vn_log(dev->instance,
985                 "alloc_size(%" PRIu64 ") mem_req.size(%" PRIu64 ")",
986                 alloc_size, mem_req.size);
987          return VK_ERROR_INVALID_EXTERNAL_HANDLE;
988       }
989 
990       alloc_size = mem_req.size;
991 
992       /* XXX Workaround before we use cross-domain backend in minigbm. The
993        * blob_mem allocated from virgl backend can have a queried guest mappable
994        * size smaller than the size returned from image memory requirement.
995        */
996       force_unmappable = true;
997    }
998 
999    if (dedicated_info && dedicated_info->buffer != VK_NULL_HANDLE) {
1000       VkMemoryRequirements mem_req;
1001       vn_GetBufferMemoryRequirements(device, dedicated_info->buffer,
1002                                      &mem_req);
1003       if (alloc_size < mem_req.size) {
1004          vn_log(dev->instance,
1005                 "alloc_size(%" PRIu64 ") mem_req.size(%" PRIu64 ")",
1006                 alloc_size, mem_req.size);
1007          return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1008       }
1009 
1010       alloc_size = mem_req.size;
1011    }
1012 
1013    errno = 0;
1014    dup_fd = os_dupfd_cloexec(dma_buf_fd);
1015    if (dup_fd < 0)
1016       return (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
1017                                : VK_ERROR_OUT_OF_HOST_MEMORY;
1018 
1019    /* Spec requires AHB export info to be present, so we must strip it. In
1020     * practice, the AHB import path here only needs the main allocation info
1021     * and the dedicated_info.
1022     */
1023    VkMemoryDedicatedAllocateInfo local_dedicated_info;
1024    /* Override when dedicated_info exists and is not the tail struct. */
1025    if (dedicated_info && dedicated_info->pNext) {
1026       local_dedicated_info = *dedicated_info;
1027       local_dedicated_info.pNext = NULL;
1028       dedicated_info = &local_dedicated_info;
1029    }
1030    const VkMemoryAllocateInfo local_alloc_info = {
1031       .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1032       .pNext = dedicated_info,
1033       .allocationSize = alloc_size,
1034       .memoryTypeIndex = alloc_info->memoryTypeIndex,
1035    };
1036    result = vn_device_memory_import_dma_buf(dev, mem, &local_alloc_info,
1037                                             force_unmappable, dup_fd);
1038    if (result != VK_SUCCESS) {
1039       close(dup_fd);
1040       return result;
1041    }
1042 
1043    AHardwareBuffer_acquire(ahb);
1044    mem->ahb = ahb;
1045 
1046    return VK_SUCCESS;
1047 }
1048 
1049 VkResult
vn_android_device_allocate_ahb(struct vn_device * dev,struct vn_device_memory * mem,const VkMemoryAllocateInfo * alloc_info,const VkAllocationCallbacks * alloc)1050 vn_android_device_allocate_ahb(struct vn_device *dev,
1051                                struct vn_device_memory *mem,
1052                                const VkMemoryAllocateInfo *alloc_info,
1053                                const VkAllocationCallbacks *alloc)
1054 {
1055    const VkMemoryDedicatedAllocateInfo *dedicated_info =
1056       vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
1057    uint32_t width = 0;
1058    uint32_t height = 1;
1059    uint32_t layers = 1;
1060    uint32_t format = 0;
1061    uint64_t usage = 0;
1062    struct AHardwareBuffer *ahb = NULL;
1063 
1064    if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
1065       const VkImageCreateInfo *image_info =
1066          &vn_image_from_handle(dedicated_info->image)->deferred_info->create;
1067       assert(image_info);
1068       width = image_info->extent.width;
1069       height = image_info->extent.height;
1070       layers = image_info->arrayLayers;
1071       format = vn_android_ahb_format_from_vk_format(image_info->format);
1072       usage = vn_android_get_ahb_usage(image_info->usage, image_info->flags);
1073    } else {
1074       const VkPhysicalDeviceMemoryProperties *mem_props =
1075          &dev->physical_device->memory_properties.memoryProperties;
1076 
1077       assert(alloc_info->memoryTypeIndex < mem_props->memoryTypeCount);
1078 
1079       width = alloc_info->allocationSize;
1080       format = AHARDWAREBUFFER_FORMAT_BLOB;
1081       usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
1082       if (mem_props->memoryTypes[alloc_info->memoryTypeIndex].propertyFlags &
1083           VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
1084          usage |= AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1085                   AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1086       }
1087    }
1088 
1089    ahb = vn_android_ahb_allocate(width, height, layers, format, usage);
1090    if (!ahb)
1091       return VK_ERROR_OUT_OF_HOST_MEMORY;
1092 
1093    VkResult result =
1094       vn_android_device_import_ahb(dev, mem, alloc_info, alloc, ahb);
1095 
1096    /* ahb alloc has already acquired a ref and import will acquire another,
1097     * must release one here to avoid leak.
1098     */
1099    AHardwareBuffer_release(ahb);
1100 
1101    return result;
1102 }
1103 
1104 void
vn_android_release_ahb(struct AHardwareBuffer * ahb)1105 vn_android_release_ahb(struct AHardwareBuffer *ahb)
1106 {
1107    AHardwareBuffer_release(ahb);
1108 }
1109 
1110 VkResult
vn_GetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1111 vn_GetMemoryAndroidHardwareBufferANDROID(
1112    VkDevice device,
1113    const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1114    struct AHardwareBuffer **pBuffer)
1115 {
1116    struct vn_device_memory *mem = vn_device_memory_from_handle(pInfo->memory);
1117 
1118    AHardwareBuffer_acquire(mem->ahb);
1119    *pBuffer = mem->ahb;
1120 
1121    return VK_SUCCESS;
1122 }
1123 
1124 struct vn_android_buffer_create_info {
1125    VkBufferCreateInfo create;
1126    VkExternalMemoryBufferCreateInfo external;
1127    VkBufferOpaqueCaptureAddressCreateInfo address;
1128 };
1129 
1130 static const VkBufferCreateInfo *
vn_android_fix_buffer_create_info(const VkBufferCreateInfo * create_info,struct vn_android_buffer_create_info * local_info)1131 vn_android_fix_buffer_create_info(
1132    const VkBufferCreateInfo *create_info,
1133    struct vn_android_buffer_create_info *local_info)
1134 {
1135    local_info->create = *create_info;
1136    VkBaseOutStructure *dst = (void *)&local_info->create;
1137 
1138    vk_foreach_struct_const(src, create_info->pNext) {
1139       void *pnext = NULL;
1140       switch (src->sType) {
1141       case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1142          memcpy(&local_info->external, src, sizeof(local_info->external));
1143          local_info->external.handleTypes =
1144             VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1145          pnext = &local_info->external;
1146          break;
1147       case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO:
1148          memcpy(&local_info->address, src, sizeof(local_info->address));
1149          pnext = &local_info->address;
1150          break;
1151       default:
1152          break;
1153       }
1154 
1155       if (pnext) {
1156          dst->pNext = pnext;
1157          dst = pnext;
1158       }
1159    }
1160 
1161    dst->pNext = NULL;
1162 
1163    return &local_info->create;
1164 }
1165 
1166 VkResult
vn_android_init_ahb_buffer_memory_type_bits(struct vn_device * dev)1167 vn_android_init_ahb_buffer_memory_type_bits(struct vn_device *dev)
1168 {
1169    const uint32_t format = AHARDWAREBUFFER_FORMAT_BLOB;
1170    /* ensure dma_buf_memory_type_bits covers host visible usage */
1171    const uint64_t usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER |
1172                           AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1173                           AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1174    AHardwareBuffer *ahb = NULL;
1175    int dma_buf_fd = -1;
1176    uint64_t alloc_size = 0;
1177    uint32_t mem_type_bits = 0;
1178    VkResult result;
1179 
1180    ahb = vn_android_ahb_allocate(4096, 1, 1, format, usage);
1181    if (!ahb)
1182       return VK_ERROR_OUT_OF_HOST_MEMORY;
1183 
1184    result = vn_android_get_dma_buf_from_native_handle(
1185       AHardwareBuffer_getNativeHandle(ahb), &dma_buf_fd);
1186    if (result != VK_SUCCESS) {
1187       AHardwareBuffer_release(ahb);
1188       return result;
1189    }
1190 
1191    result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1192                                              &mem_type_bits);
1193 
1194    AHardwareBuffer_release(ahb);
1195 
1196    if (result != VK_SUCCESS)
1197       return result;
1198 
1199    dev->ahb_buffer_memory_type_bits = mem_type_bits;
1200 
1201    return VK_SUCCESS;
1202 }
1203 
1204 VkResult
vn_android_buffer_from_ahb(struct vn_device * dev,const VkBufferCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_buffer ** out_buf)1205 vn_android_buffer_from_ahb(struct vn_device *dev,
1206                            const VkBufferCreateInfo *create_info,
1207                            const VkAllocationCallbacks *alloc,
1208                            struct vn_buffer **out_buf)
1209 {
1210    struct vn_android_buffer_create_info local_info;
1211    VkResult result;
1212 
1213    create_info = vn_android_fix_buffer_create_info(create_info, &local_info);
1214    result = vn_buffer_create(dev, create_info, alloc, out_buf);
1215    if (result != VK_SUCCESS)
1216       return result;
1217 
1218    /* AHB backed buffer layers on top of dma_buf, so here we must comine the
1219     * queried type bits from both buffer memory requirement and dma_buf fd
1220     * properties.
1221     */
1222    (*out_buf)->memory_requirements.memoryRequirements.memoryTypeBits &=
1223       dev->ahb_buffer_memory_type_bits;
1224 
1225    assert((*out_buf)->memory_requirements.memoryRequirements.memoryTypeBits);
1226 
1227    return VK_SUCCESS;
1228 }
1229