1 /*
2 * Copyright 2021 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_android.h"
12
13 #include <dlfcn.h>
14 #include <hardware/gralloc.h>
15 #include <hardware/hwvulkan.h>
16 #include <vndk/hardware_buffer.h>
17 #include <vulkan/vk_icd.h>
18
19 #include "drm-uapi/drm_fourcc.h"
20 #include "util/libsync.h"
21 #include "util/os_file.h"
22
23 #include "vn_buffer.h"
24 #include "vn_device.h"
25 #include "vn_device_memory.h"
26 #include "vn_image.h"
27 #include "vn_instance.h"
28 #include "vn_physical_device.h"
29 #include "vn_queue.h"
30
31 static int
32 vn_hal_open(const struct hw_module_t *mod,
33 const char *id,
34 struct hw_device_t **dev);
35
36 static void UNUSED
static_asserts(void)37 static_asserts(void)
38 {
39 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
40 }
41
42 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
43 .common = {
44 .tag = HARDWARE_MODULE_TAG,
45 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
46 .hal_api_version = HARDWARE_HAL_API_VERSION,
47 .id = HWVULKAN_HARDWARE_MODULE_ID,
48 .name = "Venus Vulkan HAL",
49 .author = "Google LLC",
50 .methods = &(hw_module_methods_t) {
51 .open = vn_hal_open,
52 },
53 },
54 };
55
56 static const gralloc_module_t *gralloc = NULL;
57
58 static int
vn_hal_close(UNUSED struct hw_device_t * dev)59 vn_hal_close(UNUSED struct hw_device_t *dev)
60 {
61 dlclose(gralloc->common.dso);
62 return 0;
63 }
64
65 static hwvulkan_device_t vn_hal_dev = {
66 .common = {
67 .tag = HARDWARE_DEVICE_TAG,
68 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
69 .module = &HAL_MODULE_INFO_SYM.common,
70 .close = vn_hal_close,
71 },
72 .EnumerateInstanceExtensionProperties = vn_EnumerateInstanceExtensionProperties,
73 .CreateInstance = vn_CreateInstance,
74 .GetInstanceProcAddr = vn_GetInstanceProcAddr,
75 };
76
77 static int
vn_hal_open(const struct hw_module_t * mod,const char * id,struct hw_device_t ** dev)78 vn_hal_open(const struct hw_module_t *mod,
79 const char *id,
80 struct hw_device_t **dev)
81 {
82 static const char CROS_GRALLOC_MODULE_NAME[] = "CrOS Gralloc";
83
84 assert(mod == &HAL_MODULE_INFO_SYM.common);
85 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
86
87 /* get gralloc module for gralloc buffer info query */
88 int ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID,
89 (const hw_module_t **)&gralloc);
90 if (ret) {
91 if (VN_DEBUG(WSI))
92 vn_log(NULL, "failed to open gralloc module(ret=%d)", ret);
93 return ret;
94 }
95
96 if (VN_DEBUG(WSI))
97 vn_log(NULL, "opened gralloc module name: %s", gralloc->common.name);
98
99 if (strcmp(gralloc->common.name, CROS_GRALLOC_MODULE_NAME) != 0 ||
100 !gralloc->perform) {
101 dlclose(gralloc->common.dso);
102 return -1;
103 }
104
105 *dev = &vn_hal_dev.common;
106
107 return 0;
108 }
109
110 static uint32_t
vn_android_ahb_format_from_vk_format(VkFormat format)111 vn_android_ahb_format_from_vk_format(VkFormat format)
112 {
113 switch (format) {
114 case VK_FORMAT_R8G8B8A8_UNORM:
115 return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
116 case VK_FORMAT_R8G8B8_UNORM:
117 return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
118 case VK_FORMAT_R5G6B5_UNORM_PACK16:
119 return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
120 case VK_FORMAT_R16G16B16A16_SFLOAT:
121 return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
122 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
123 return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
124 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
125 return AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
126 default:
127 return 0;
128 }
129 }
130
131 const VkFormat *
vn_android_format_to_view_formats(VkFormat format,uint32_t * out_count)132 vn_android_format_to_view_formats(VkFormat format, uint32_t *out_count)
133 {
134 /* For AHB image prop query and creation, venus overrides the tiling to
135 * VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, which requires to chain
136 * VkImageFormatListCreateInfo struct in the corresponding pNext when the
137 * VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT is set. Those AHB images are assumed
138 * to be mutable no more than sRGB-ness, and the implementations can fail
139 * whenever going beyond.
140 *
141 * This helper provides the view formats that have sRGB variants for the
142 * image format that venus supports.
143 */
144 static const VkFormat view_formats_r8g8b8a8[] = {
145 VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_R8G8B8A8_SRGB
146 };
147 static const VkFormat view_formats_r8g8b8[] = { VK_FORMAT_R8G8B8_UNORM,
148 VK_FORMAT_R8G8B8_SRGB };
149
150 switch (format) {
151 case VK_FORMAT_R8G8B8A8_UNORM:
152 *out_count = ARRAY_SIZE(view_formats_r8g8b8a8);
153 return view_formats_r8g8b8a8;
154 break;
155 case VK_FORMAT_R8G8B8_UNORM:
156 *out_count = ARRAY_SIZE(view_formats_r8g8b8);
157 return view_formats_r8g8b8;
158 break;
159 default:
160 /* let the caller handle the fallback case */
161 *out_count = 0;
162 return NULL;
163 }
164 }
165
166 VkFormat
vn_android_drm_format_to_vk_format(uint32_t format)167 vn_android_drm_format_to_vk_format(uint32_t format)
168 {
169 switch (format) {
170 case DRM_FORMAT_ABGR8888:
171 case DRM_FORMAT_XBGR8888:
172 return VK_FORMAT_R8G8B8A8_UNORM;
173 case DRM_FORMAT_BGR888:
174 return VK_FORMAT_R8G8B8_UNORM;
175 case DRM_FORMAT_RGB565:
176 return VK_FORMAT_R5G6B5_UNORM_PACK16;
177 case DRM_FORMAT_ABGR16161616F:
178 return VK_FORMAT_R16G16B16A16_SFLOAT;
179 case DRM_FORMAT_ABGR2101010:
180 return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
181 case DRM_FORMAT_YVU420:
182 case DRM_FORMAT_NV12:
183 return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
184 default:
185 return VK_FORMAT_UNDEFINED;
186 }
187 }
188
189 static bool
vn_android_drm_format_is_yuv(uint32_t format)190 vn_android_drm_format_is_yuv(uint32_t format)
191 {
192 assert(vn_android_drm_format_to_vk_format(format) != VK_FORMAT_UNDEFINED);
193
194 switch (format) {
195 case DRM_FORMAT_YVU420:
196 case DRM_FORMAT_NV12:
197 return true;
198 default:
199 return false;
200 }
201 }
202
203 uint64_t
vn_android_get_ahb_usage(const VkImageUsageFlags usage,const VkImageCreateFlags flags)204 vn_android_get_ahb_usage(const VkImageUsageFlags usage,
205 const VkImageCreateFlags flags)
206 {
207 uint64_t ahb_usage = 0;
208 if (usage &
209 (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
210 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
211
212 if (usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
213 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT))
214 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
215
216 if (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
217 ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
218
219 if (flags & VK_IMAGE_CREATE_PROTECTED_BIT)
220 ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
221
222 /* must include at least one GPU usage flag */
223 if (ahb_usage == 0)
224 ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
225
226 return ahb_usage;
227 }
228
229 VkResult
vn_GetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)230 vn_GetSwapchainGrallocUsage2ANDROID(
231 VkDevice device,
232 VkFormat format,
233 VkImageUsageFlags imageUsage,
234 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
235 uint64_t *grallocConsumerUsage,
236 uint64_t *grallocProducerUsage)
237 {
238 struct vn_device *dev = vn_device_from_handle(device);
239 *grallocConsumerUsage = 0;
240 *grallocProducerUsage = 0;
241
242 if (swapchainImageUsage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)
243 return vn_error(dev->instance, VK_ERROR_INITIALIZATION_FAILED);
244
245 if (VN_DEBUG(WSI))
246 vn_log(dev->instance, "format=%d, imageUsage=0x%x", format, imageUsage);
247
248 if (imageUsage & (VK_IMAGE_USAGE_TRANSFER_DST_BIT |
249 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
250 *grallocProducerUsage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
251
252 if (imageUsage &
253 (VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
254 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
255 *grallocConsumerUsage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
256
257 return VK_SUCCESS;
258 }
259
260 struct cros_gralloc0_buffer_info {
261 uint32_t drm_fourcc;
262 int num_fds; /* ignored */
263 int fds[4]; /* ignored */
264 uint64_t modifier;
265 uint32_t offset[4];
266 uint32_t stride[4];
267 };
268
269 struct vn_android_gralloc_buffer_properties {
270 uint32_t drm_fourcc;
271 uint64_t modifier;
272 uint32_t offset[4];
273 uint32_t stride[4];
274 };
275
276 static VkResult
vn_android_get_dma_buf_from_native_handle(const native_handle_t * handle,int * out_dma_buf)277 vn_android_get_dma_buf_from_native_handle(const native_handle_t *handle,
278 int *out_dma_buf)
279 {
280 /* There can be multiple fds wrapped inside a native_handle_t, but we
281 * expect only the 1st one points to the dma_buf. For multi-planar format,
282 * there should only exist one dma_buf as well. The other fd(s) may point
283 * to shared memory used to store buffer metadata or other vendor specific
284 * bits.
285 */
286 if (handle->numFds < 1) {
287 vn_log(NULL, "handle->numFds is %d, expected >= 1", handle->numFds);
288 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
289 }
290
291 if (handle->data[0] < 0) {
292 vn_log(NULL, "handle->data[0] < 0");
293 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
294 }
295
296 *out_dma_buf = handle->data[0];
297 return VK_SUCCESS;
298 }
299
300 static bool
vn_android_get_gralloc_buffer_properties(buffer_handle_t handle,struct vn_android_gralloc_buffer_properties * out_props)301 vn_android_get_gralloc_buffer_properties(
302 buffer_handle_t handle,
303 struct vn_android_gralloc_buffer_properties *out_props)
304 {
305 static const int32_t CROS_GRALLOC_DRM_GET_BUFFER_INFO = 4;
306 struct cros_gralloc0_buffer_info info;
307 if (gralloc->perform(gralloc, CROS_GRALLOC_DRM_GET_BUFFER_INFO, handle,
308 &info) != 0) {
309 vn_log(NULL, "CROS_GRALLOC_DRM_GET_BUFFER_INFO failed");
310 return false;
311 }
312
313 if (info.modifier == DRM_FORMAT_MOD_INVALID) {
314 vn_log(NULL, "Unexpected DRM_FORMAT_MOD_INVALID");
315 return false;
316 }
317
318 out_props->drm_fourcc = info.drm_fourcc;
319 for (uint32_t i = 0; i < 4; i++) {
320 out_props->stride[i] = info.stride[i];
321 out_props->offset[i] = info.offset[i];
322 }
323 out_props->modifier = info.modifier;
324
325 return true;
326 }
327
328 static VkResult
vn_android_get_modifier_properties(struct vn_device * dev,VkFormat format,uint64_t modifier,const VkAllocationCallbacks * alloc,VkDrmFormatModifierPropertiesEXT * out_props)329 vn_android_get_modifier_properties(struct vn_device *dev,
330 VkFormat format,
331 uint64_t modifier,
332 const VkAllocationCallbacks *alloc,
333 VkDrmFormatModifierPropertiesEXT *out_props)
334 {
335 VkPhysicalDevice physical_device =
336 vn_physical_device_to_handle(dev->physical_device);
337 VkDrmFormatModifierPropertiesListEXT mod_prop_list = {
338 .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
339 .pNext = NULL,
340 .drmFormatModifierCount = 0,
341 .pDrmFormatModifierProperties = NULL,
342 };
343 VkFormatProperties2 format_prop = {
344 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
345 .pNext = &mod_prop_list,
346 };
347 VkDrmFormatModifierPropertiesEXT *mod_props = NULL;
348 bool modifier_found = false;
349
350 vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
351 &format_prop);
352
353 if (!mod_prop_list.drmFormatModifierCount) {
354 vn_log(dev->instance, "No compatible modifier for VkFormat(%u)",
355 format);
356 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
357 }
358
359 mod_props = vk_zalloc(
360 alloc, sizeof(*mod_props) * mod_prop_list.drmFormatModifierCount,
361 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
362 if (!mod_props)
363 return VK_ERROR_OUT_OF_HOST_MEMORY;
364
365 mod_prop_list.pDrmFormatModifierProperties = mod_props;
366 vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
367 &format_prop);
368
369 for (uint32_t i = 0; i < mod_prop_list.drmFormatModifierCount; i++) {
370 if (mod_props[i].drmFormatModifier == modifier) {
371 *out_props = mod_props[i];
372 modifier_found = true;
373 break;
374 }
375 }
376
377 vk_free(alloc, mod_props);
378
379 if (!modifier_found) {
380 vn_log(dev->instance,
381 "No matching modifier(%" PRIu64 ") properties for VkFormat(%u)",
382 modifier, format);
383 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
384 }
385
386 return VK_SUCCESS;
387 }
388
389 struct vn_android_image_builder {
390 VkImageCreateInfo create;
391 VkSubresourceLayout layouts[4];
392 VkImageDrmFormatModifierExplicitCreateInfoEXT modifier;
393 VkExternalMemoryImageCreateInfo external;
394 VkImageFormatListCreateInfo list;
395 };
396
397 static VkResult
vn_android_get_image_builder(struct vn_device * dev,const VkImageCreateInfo * create_info,const native_handle_t * handle,const VkAllocationCallbacks * alloc,struct vn_android_image_builder * out_builder)398 vn_android_get_image_builder(struct vn_device *dev,
399 const VkImageCreateInfo *create_info,
400 const native_handle_t *handle,
401 const VkAllocationCallbacks *alloc,
402 struct vn_android_image_builder *out_builder)
403 {
404 VkResult result = VK_SUCCESS;
405 struct vn_android_gralloc_buffer_properties buf_props;
406 VkDrmFormatModifierPropertiesEXT mod_props;
407 uint32_t vcount = 0;
408 const VkFormat *vformats = NULL;
409
410 /* Android image builder is only used by ANB or AHB. For ANB, Android
411 * Vulkan loader will never pass the below structs. For AHB, struct
412 * vn_image_create_deferred_info will never carry below either.
413 */
414 assert(!vk_find_struct_const(
415 create_info->pNext,
416 IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT));
417 assert(!vk_find_struct_const(create_info->pNext,
418 EXTERNAL_MEMORY_IMAGE_CREATE_INFO));
419
420 if (!vn_android_get_gralloc_buffer_properties(handle, &buf_props))
421 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
422
423 result = vn_android_get_modifier_properties(
424 dev, create_info->format, buf_props.modifier, alloc, &mod_props);
425 if (result != VK_SUCCESS)
426 return result;
427
428 /* fill VkImageCreateInfo */
429 memset(out_builder, 0, sizeof(*out_builder));
430 out_builder->create = *create_info;
431 out_builder->create.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
432
433 /* fill VkImageDrmFormatModifierExplicitCreateInfoEXT */
434 for (uint32_t i = 0; i < mod_props.drmFormatModifierPlaneCount; i++) {
435 out_builder->layouts[i].offset = buf_props.offset[i];
436 out_builder->layouts[i].rowPitch = buf_props.stride[i];
437 }
438 out_builder->modifier = (VkImageDrmFormatModifierExplicitCreateInfoEXT){
439 .sType =
440 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
441 .pNext = out_builder->create.pNext,
442 .drmFormatModifier = buf_props.modifier,
443 .drmFormatModifierPlaneCount = mod_props.drmFormatModifierPlaneCount,
444 .pPlaneLayouts = out_builder->layouts,
445 };
446 out_builder->create.pNext = &out_builder->modifier;
447
448 /* fill VkExternalMemoryImageCreateInfo */
449 out_builder->external = (VkExternalMemoryImageCreateInfo){
450 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
451 .pNext = out_builder->create.pNext,
452 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
453 };
454 out_builder->create.pNext = &out_builder->external;
455
456 /* fill VkImageFormatListCreateInfo if needed
457 *
458 * vn_image::deferred_info only stores VkImageFormatListCreateInfo with a
459 * non-zero viewFormatCount, and that stored struct will be respected.
460 */
461 if ((create_info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) &&
462 !vk_find_struct_const(create_info->pNext,
463 IMAGE_FORMAT_LIST_CREATE_INFO)) {
464 /* 12.3. Images
465 *
466 * If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT and flags
467 * contains VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, then the pNext chain
468 * must include a VkImageFormatListCreateInfo structure with non-zero
469 * viewFormatCount.
470 */
471 vformats =
472 vn_android_format_to_view_formats(create_info->format, &vcount);
473 if (!vformats) {
474 /* image builder struct persists through the image creation call */
475 vformats = &out_builder->create.format;
476 vcount = 1;
477 }
478 out_builder->list = (VkImageFormatListCreateInfo){
479 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
480 .pNext = out_builder->create.pNext,
481 .viewFormatCount = vcount,
482 .pViewFormats = vformats,
483 };
484 out_builder->create.pNext = &out_builder->list;
485 }
486
487 return VK_SUCCESS;
488 }
489
490 VkResult
vn_android_image_from_anb(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkNativeBufferANDROID * anb_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)491 vn_android_image_from_anb(struct vn_device *dev,
492 const VkImageCreateInfo *create_info,
493 const VkNativeBufferANDROID *anb_info,
494 const VkAllocationCallbacks *alloc,
495 struct vn_image **out_img)
496 {
497 /* If anb_info->handle points to a classic resouce created from
498 * virtio_gpu_cmd_resource_create_3d, anb_info->stride is the stride of the
499 * guest shadow storage other than the host gpu storage.
500 *
501 * We also need to pass the correct stride to vn_CreateImage, which will be
502 * done via VkImageDrmFormatModifierExplicitCreateInfoEXT and will require
503 * VK_EXT_image_drm_format_modifier support in the host driver. The struct
504 * needs host storage info which can be queried from cros gralloc.
505 */
506 VkResult result = VK_SUCCESS;
507 VkDevice device = vn_device_to_handle(dev);
508 VkDeviceMemory memory = VK_NULL_HANDLE;
509 VkImage image = VK_NULL_HANDLE;
510 struct vn_image *img = NULL;
511 uint64_t alloc_size = 0;
512 uint32_t mem_type_bits = 0;
513 int dma_buf_fd = -1;
514 int dup_fd = -1;
515 VkImageCreateInfo local_create_info;
516 struct vn_android_image_builder builder;
517
518 result = vn_android_get_dma_buf_from_native_handle(anb_info->handle,
519 &dma_buf_fd);
520 if (result != VK_SUCCESS)
521 goto fail;
522
523 assert(!(create_info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT));
524 assert(!vk_find_struct_const(create_info->pNext,
525 IMAGE_FORMAT_LIST_CREATE_INFO));
526 assert(!vk_find_struct_const(create_info->pNext,
527 IMAGE_STENCIL_USAGE_CREATE_INFO));
528
529 /* strip VkNativeBufferANDROID and VkSwapchainImageCreateInfoANDROID */
530 local_create_info = *create_info;
531 local_create_info.pNext = NULL;
532 result = vn_android_get_image_builder(dev, &local_create_info,
533 anb_info->handle, alloc, &builder);
534 if (result != VK_SUCCESS)
535 goto fail;
536
537 /* encoder will strip the Android specific pNext structs */
538 result = vn_image_create(dev, &builder.create, alloc, &img);
539 if (result != VK_SUCCESS) {
540 if (VN_DEBUG(WSI))
541 vn_log(dev->instance, "vn_image_create failed");
542 goto fail;
543 }
544
545 image = vn_image_to_handle(img);
546
547 const VkMemoryRequirements *mem_req =
548 &img->requirements[0].memory.memoryRequirements;
549 if (!mem_req->memoryTypeBits) {
550 if (VN_DEBUG(WSI))
551 vn_log(dev->instance, "mem_req->memoryTypeBits cannot be zero");
552 result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
553 goto fail;
554 }
555
556 result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
557 &mem_type_bits);
558 if (result != VK_SUCCESS)
559 goto fail;
560
561 if (VN_DEBUG(WSI)) {
562 vn_log(dev->instance,
563 "size = img(%" PRIu64 ") fd(%" PRIu64 "), "
564 "memoryTypeBits = img(0x%X) & fd(0x%X)",
565 mem_req->size, alloc_size, mem_req->memoryTypeBits,
566 mem_type_bits);
567 }
568
569 if (alloc_size < mem_req->size) {
570 if (VN_DEBUG(WSI)) {
571 vn_log(dev->instance,
572 "alloc_size(%" PRIu64 ") mem_req->size(%" PRIu64 ")",
573 alloc_size, mem_req->size);
574 }
575 result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
576 goto fail;
577 }
578
579 mem_type_bits &= mem_req->memoryTypeBits;
580 if (!mem_type_bits) {
581 result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
582 goto fail;
583 }
584
585 dup_fd = os_dupfd_cloexec(dma_buf_fd);
586 if (dup_fd < 0) {
587 result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
588 : VK_ERROR_OUT_OF_HOST_MEMORY;
589 goto fail;
590 }
591
592 const VkImportMemoryFdInfoKHR import_fd_info = {
593 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
594 .pNext = NULL,
595 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
596 .fd = dup_fd,
597 };
598 const VkMemoryAllocateInfo memory_info = {
599 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
600 .pNext = &import_fd_info,
601 .allocationSize = mem_req->size,
602 .memoryTypeIndex = ffs(mem_type_bits) - 1,
603 };
604 result = vn_AllocateMemory(device, &memory_info, alloc, &memory);
605 if (result != VK_SUCCESS) {
606 /* only need to close the dup_fd on import failure */
607 close(dup_fd);
608 goto fail;
609 }
610
611 const VkBindImageMemoryInfo bind_info = {
612 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
613 .pNext = NULL,
614 .image = image,
615 .memory = memory,
616 .memoryOffset = 0,
617 };
618 result = vn_BindImageMemory2(device, 1, &bind_info);
619 if (result != VK_SUCCESS)
620 goto fail;
621
622 img->wsi.is_wsi = true;
623 img->wsi.tiling_override = builder.create.tiling;
624 img->wsi.drm_format_modifier = builder.modifier.drmFormatModifier;
625 /* Android WSI image owns the memory */
626 img->wsi.memory = vn_device_memory_from_handle(memory);
627 img->wsi.memory_owned = true;
628 *out_img = img;
629
630 return VK_SUCCESS;
631
632 fail:
633 if (image != VK_NULL_HANDLE)
634 vn_DestroyImage(device, image, alloc);
635 if (memory != VK_NULL_HANDLE)
636 vn_FreeMemory(device, memory, alloc);
637 return vn_error(dev->instance, result);
638 }
639
640 VkResult
vn_AcquireImageANDROID(VkDevice device,UNUSED VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)641 vn_AcquireImageANDROID(VkDevice device,
642 UNUSED VkImage image,
643 int nativeFenceFd,
644 VkSemaphore semaphore,
645 VkFence fence)
646 {
647 VN_TRACE_FUNC();
648 struct vn_device *dev = vn_device_from_handle(device);
649 VkResult result = VK_SUCCESS;
650
651 if (dev->instance->experimental.globalFencing == VK_FALSE) {
652 /* Fallback when VkVenusExperimentalFeatures100000MESA::globalFencing is
653 * VK_FALSE, out semaphore and fence are filled with already signaled
654 * payloads, and the native fence fd is waited inside until signaled.
655 */
656 if (nativeFenceFd >= 0) {
657 int ret = sync_wait(nativeFenceFd, -1);
658 /* Android loader expects the ICD to always close the fd */
659 close(nativeFenceFd);
660 if (ret)
661 return vn_error(dev->instance, VK_ERROR_SURFACE_LOST_KHR);
662 }
663
664 if (semaphore != VK_NULL_HANDLE)
665 vn_semaphore_signal_wsi(dev, vn_semaphore_from_handle(semaphore));
666
667 if (fence != VK_NULL_HANDLE)
668 vn_fence_signal_wsi(dev, vn_fence_from_handle(fence));
669
670 return VK_SUCCESS;
671 }
672
673 int semaphore_fd = -1;
674 int fence_fd = -1;
675 if (nativeFenceFd >= 0) {
676 if (semaphore != VK_NULL_HANDLE && fence != VK_NULL_HANDLE) {
677 semaphore_fd = nativeFenceFd;
678 fence_fd = os_dupfd_cloexec(nativeFenceFd);
679 if (fence_fd < 0) {
680 result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
681 : VK_ERROR_OUT_OF_HOST_MEMORY;
682 close(nativeFenceFd);
683 return vn_error(dev->instance, result);
684 }
685 } else if (semaphore != VK_NULL_HANDLE) {
686 semaphore_fd = nativeFenceFd;
687 } else if (fence != VK_NULL_HANDLE) {
688 fence_fd = nativeFenceFd;
689 } else {
690 close(nativeFenceFd);
691 }
692 }
693
694 if (semaphore != VK_NULL_HANDLE) {
695 const VkImportSemaphoreFdInfoKHR info = {
696 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
697 .pNext = NULL,
698 .semaphore = semaphore,
699 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
700 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
701 .fd = semaphore_fd,
702 };
703 result = vn_ImportSemaphoreFdKHR(device, &info);
704 if (result == VK_SUCCESS)
705 semaphore_fd = -1;
706 }
707
708 if (result == VK_SUCCESS && fence != VK_NULL_HANDLE) {
709 const VkImportFenceFdInfoKHR info = {
710 .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
711 .pNext = NULL,
712 .fence = fence,
713 .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
714 .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
715 .fd = fence_fd,
716 };
717 result = vn_ImportFenceFdKHR(device, &info);
718 if (result == VK_SUCCESS)
719 fence_fd = -1;
720 }
721
722 if (semaphore_fd >= 0)
723 close(semaphore_fd);
724 if (fence_fd >= 0)
725 close(fence_fd);
726
727 return vn_result(dev->instance, result);
728 }
729
730 VkResult
vn_QueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)731 vn_QueueSignalReleaseImageANDROID(VkQueue queue,
732 uint32_t waitSemaphoreCount,
733 const VkSemaphore *pWaitSemaphores,
734 VkImage image,
735 int *pNativeFenceFd)
736 {
737 VN_TRACE_FUNC();
738 struct vn_queue *que = vn_queue_from_handle(queue);
739 struct vn_device *dev = que->device;
740 const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
741 VkDevice device = vn_device_to_handle(dev);
742 VkPipelineStageFlags local_stage_masks[8];
743 VkPipelineStageFlags *stage_masks = local_stage_masks;
744 VkResult result = VK_SUCCESS;
745 int fd = -1;
746
747 if (waitSemaphoreCount == 0) {
748 *pNativeFenceFd = -1;
749 return VK_SUCCESS;
750 }
751
752 if (waitSemaphoreCount > ARRAY_SIZE(local_stage_masks)) {
753 stage_masks =
754 vk_alloc(alloc, sizeof(*stage_masks) * waitSemaphoreCount,
755 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
756 if (!stage_masks)
757 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
758 }
759
760 for (uint32_t i = 0; i < waitSemaphoreCount; i++)
761 stage_masks[i] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
762
763 const VkSubmitInfo submit_info = {
764 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
765 .pNext = NULL,
766 .waitSemaphoreCount = waitSemaphoreCount,
767 .pWaitSemaphores = pWaitSemaphores,
768 .pWaitDstStageMask = stage_masks,
769 .commandBufferCount = 0,
770 .pCommandBuffers = NULL,
771 .signalSemaphoreCount = 0,
772 .pSignalSemaphores = NULL,
773 };
774 /* XXX When globalFencing is supported, our implementation is not able to
775 * reset the fence during vn_GetFenceFdKHR currently. Thus to ensure proper
776 * host driver behavior, we pass VK_NULL_HANDLE here.
777 */
778 result = vn_QueueSubmit(
779 queue, 1, &submit_info,
780 dev->instance->experimental.globalFencing == VK_TRUE ? VK_NULL_HANDLE
781 : que->wait_fence);
782
783 if (stage_masks != local_stage_masks)
784 vk_free(alloc, stage_masks);
785
786 if (result != VK_SUCCESS)
787 return vn_error(dev->instance, result);
788
789 if (dev->instance->experimental.globalFencing == VK_TRUE) {
790 /* XXX With globalFencing, the external queue fence was not passed in the
791 * above vn_QueueSubmit to hint it to be synchronous. So we need to wait
792 * for the ring here before vn_GetFenceFdKHR which is pure kernel ops.
793 */
794 vn_instance_ring_wait(dev->instance);
795
796 const VkFenceGetFdInfoKHR fd_info = {
797 .sType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
798 .pNext = NULL,
799 .fence = que->wait_fence,
800 .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
801 };
802 result = vn_GetFenceFdKHR(device, &fd_info, &fd);
803 } else {
804 result =
805 vn_WaitForFences(device, 1, &que->wait_fence, VK_TRUE, UINT64_MAX);
806 if (result != VK_SUCCESS)
807 return vn_error(dev->instance, result);
808
809 result = vn_ResetFences(device, 1, &que->wait_fence);
810 }
811
812 if (result != VK_SUCCESS)
813 return vn_error(dev->instance, result);
814
815 *pNativeFenceFd = fd;
816
817 return VK_SUCCESS;
818 }
819
820 static VkResult
vn_android_get_ahb_format_properties(struct vn_device * dev,const struct AHardwareBuffer * ahb,VkAndroidHardwareBufferFormatPropertiesANDROID * out_props)821 vn_android_get_ahb_format_properties(
822 struct vn_device *dev,
823 const struct AHardwareBuffer *ahb,
824 VkAndroidHardwareBufferFormatPropertiesANDROID *out_props)
825 {
826 AHardwareBuffer_Desc desc;
827 VkFormat format;
828 struct vn_android_gralloc_buffer_properties buf_props;
829 VkDrmFormatModifierPropertiesEXT mod_props;
830
831 AHardwareBuffer_describe(ahb, &desc);
832 if (!(desc.usage & (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
833 AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
834 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
835 vn_log(dev->instance,
836 "AHB usage(%" PRIu64 ") must include at least one GPU bit",
837 desc.usage);
838 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
839 }
840
841 /* Handle the special AHARDWAREBUFFER_FORMAT_BLOB for VkBuffer case. */
842 if (desc.format == AHARDWAREBUFFER_FORMAT_BLOB) {
843 out_props->format = VK_FORMAT_UNDEFINED;
844 return VK_SUCCESS;
845 }
846
847 if (!vn_android_get_gralloc_buffer_properties(
848 AHardwareBuffer_getNativeHandle(ahb), &buf_props))
849 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
850
851 /* We implement AHB extension support with EXT_image_drm_format_modifier.
852 * It requires us to have a compatible VkFormat but not DRM formats. So if
853 * the ahb is not intended for backing a VkBuffer, error out early if the
854 * format is VK_FORMAT_UNDEFINED.
855 */
856 format = vn_android_drm_format_to_vk_format(buf_props.drm_fourcc);
857 if (format == VK_FORMAT_UNDEFINED) {
858 vn_log(dev->instance, "Unknown drm_fourcc(%u) from AHB format(0x%X)",
859 buf_props.drm_fourcc, desc.format);
860 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
861 }
862
863 VkResult result = vn_android_get_modifier_properties(
864 dev, format, buf_props.modifier, &dev->base.base.alloc, &mod_props);
865 if (result != VK_SUCCESS)
866 return result;
867
868 /* The spec requires that formatFeatures must include at least one of
869 * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
870 * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT.
871 */
872 const VkFormatFeatureFlags format_features =
873 mod_props.drmFormatModifierTilingFeatures |
874 VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
875
876 /* 11.2.7. Android Hardware Buffer External Memory
877 *
878 * Implementations may not always be able to determine the color model,
879 * numerical range, or chroma offsets of the image contents, so the values
880 * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
881 * Applications should treat these values as sensible defaults to use in the
882 * absence of more reliable information obtained through some other means.
883 */
884 const VkSamplerYcbcrModelConversion model =
885 vn_android_drm_format_is_yuv(buf_props.drm_fourcc)
886 ? VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601
887 : VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
888 *out_props = (VkAndroidHardwareBufferFormatPropertiesANDROID) {
889 .sType = out_props->sType,
890 .pNext = out_props->pNext,
891 .format = format,
892 .externalFormat = buf_props.drm_fourcc,
893 .formatFeatures = format_features,
894 .samplerYcbcrConversionComponents = {
895 .r = VK_COMPONENT_SWIZZLE_IDENTITY,
896 .g = VK_COMPONENT_SWIZZLE_IDENTITY,
897 .b = VK_COMPONENT_SWIZZLE_IDENTITY,
898 .a = VK_COMPONENT_SWIZZLE_IDENTITY,
899 },
900 .suggestedYcbcrModel = model,
901 .suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
902 .suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
903 .suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
904 };
905
906 return VK_SUCCESS;
907 }
908
909 VkResult
vn_GetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)910 vn_GetAndroidHardwareBufferPropertiesANDROID(
911 VkDevice device,
912 const struct AHardwareBuffer *buffer,
913 VkAndroidHardwareBufferPropertiesANDROID *pProperties)
914 {
915 struct vn_device *dev = vn_device_from_handle(device);
916 VkResult result = VK_SUCCESS;
917 int dma_buf_fd = -1;
918 uint64_t alloc_size = 0;
919 uint32_t mem_type_bits = 0;
920
921 VkAndroidHardwareBufferFormatPropertiesANDROID *format_props =
922 vk_find_struct(pProperties->pNext,
923 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
924 if (format_props) {
925 result =
926 vn_android_get_ahb_format_properties(dev, buffer, format_props);
927 if (result != VK_SUCCESS)
928 return vn_error(dev->instance, result);
929 }
930
931 const native_handle_t *handle = AHardwareBuffer_getNativeHandle(buffer);
932 result = vn_android_get_dma_buf_from_native_handle(handle, &dma_buf_fd);
933 if (result != VK_SUCCESS)
934 return vn_error(dev->instance, result);
935
936 result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
937 &mem_type_bits);
938 if (result != VK_SUCCESS)
939 return vn_error(dev->instance, result);
940
941 pProperties->allocationSize = alloc_size;
942 pProperties->memoryTypeBits = mem_type_bits;
943
944 return VK_SUCCESS;
945 }
946
947 static AHardwareBuffer *
vn_android_ahb_allocate(uint32_t width,uint32_t height,uint32_t layers,uint32_t format,uint64_t usage)948 vn_android_ahb_allocate(uint32_t width,
949 uint32_t height,
950 uint32_t layers,
951 uint32_t format,
952 uint64_t usage)
953 {
954 AHardwareBuffer *ahb = NULL;
955 AHardwareBuffer_Desc desc;
956 int ret = 0;
957
958 memset(&desc, 0, sizeof(desc));
959 desc.width = width;
960 desc.height = height;
961 desc.layers = layers;
962 desc.format = format;
963 desc.usage = usage;
964
965 ret = AHardwareBuffer_allocate(&desc, &ahb);
966 if (ret) {
967 /* We just log the error code here for now since the platform falsely
968 * maps all gralloc allocation failures to oom.
969 */
970 vn_log(NULL, "AHB alloc(w=%u,h=%u,l=%u,f=%u,u=%" PRIu64 ") failed(%d)",
971 width, height, layers, format, usage, ret);
972 return NULL;
973 }
974
975 return ahb;
976 }
977
978 bool
vn_android_get_drm_format_modifier_info(const VkPhysicalDeviceImageFormatInfo2 * format_info,VkPhysicalDeviceImageDrmFormatModifierInfoEXT * out_info)979 vn_android_get_drm_format_modifier_info(
980 const VkPhysicalDeviceImageFormatInfo2 *format_info,
981 VkPhysicalDeviceImageDrmFormatModifierInfoEXT *out_info)
982 {
983 /* To properly fill VkPhysicalDeviceImageDrmFormatModifierInfoEXT, we have
984 * to allocate an ahb to retrieve the drm format modifier. For the image
985 * sharing mode, we assume VK_SHARING_MODE_EXCLUSIVE for now.
986 */
987 AHardwareBuffer *ahb = NULL;
988 uint32_t format = 0;
989 uint64_t usage = 0;
990 struct vn_android_gralloc_buffer_properties buf_props;
991
992 assert(format_info->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
993
994 format = vn_android_ahb_format_from_vk_format(format_info->format);
995 if (!format)
996 return false;
997
998 usage = vn_android_get_ahb_usage(format_info->usage, format_info->flags);
999 ahb = vn_android_ahb_allocate(16, 16, 1, format, usage);
1000 if (!ahb)
1001 return false;
1002
1003 if (!vn_android_get_gralloc_buffer_properties(
1004 AHardwareBuffer_getNativeHandle(ahb), &buf_props)) {
1005 AHardwareBuffer_release(ahb);
1006 return false;
1007 }
1008
1009 *out_info = (VkPhysicalDeviceImageDrmFormatModifierInfoEXT){
1010 .sType =
1011 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
1012 .pNext = NULL,
1013 .drmFormatModifier = buf_props.modifier,
1014 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
1015 .queueFamilyIndexCount = 0,
1016 .pQueueFamilyIndices = NULL,
1017 };
1018
1019 AHardwareBuffer_release(ahb);
1020 return true;
1021 }
1022
1023 VkResult
vn_android_image_from_ahb(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)1024 vn_android_image_from_ahb(struct vn_device *dev,
1025 const VkImageCreateInfo *create_info,
1026 const VkAllocationCallbacks *alloc,
1027 struct vn_image **out_img)
1028 {
1029 const VkExternalFormatANDROID *ext_info =
1030 vk_find_struct_const(create_info->pNext, EXTERNAL_FORMAT_ANDROID);
1031
1032 VkImageCreateInfo local_info;
1033 if (ext_info && ext_info->externalFormat) {
1034 assert(create_info->format == VK_FORMAT_UNDEFINED);
1035 assert(create_info->imageType == VK_IMAGE_TYPE_2D);
1036 assert(create_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
1037 assert(create_info->tiling == VK_IMAGE_TILING_OPTIMAL);
1038 assert(!(create_info->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT));
1039
1040 local_info = *create_info;
1041 local_info.format =
1042 vn_android_drm_format_to_vk_format(ext_info->externalFormat);
1043 create_info = &local_info;
1044 }
1045
1046 return vn_image_create_deferred(dev, create_info, alloc, out_img);
1047 }
1048
1049 VkResult
vn_android_device_import_ahb(struct vn_device * dev,struct vn_device_memory * mem,const VkMemoryAllocateInfo * alloc_info,const VkAllocationCallbacks * alloc,struct AHardwareBuffer * ahb,bool internal_ahb)1050 vn_android_device_import_ahb(struct vn_device *dev,
1051 struct vn_device_memory *mem,
1052 const VkMemoryAllocateInfo *alloc_info,
1053 const VkAllocationCallbacks *alloc,
1054 struct AHardwareBuffer *ahb,
1055 bool internal_ahb)
1056 {
1057 const VkMemoryDedicatedAllocateInfo *dedicated_info =
1058 vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
1059 const native_handle_t *handle = NULL;
1060 int dma_buf_fd = -1;
1061 int dup_fd = -1;
1062 uint64_t alloc_size = 0;
1063 uint32_t mem_type_bits = 0;
1064 uint32_t mem_type_index = alloc_info->memoryTypeIndex;
1065 bool force_unmappable = false;
1066 VkResult result = VK_SUCCESS;
1067
1068 handle = AHardwareBuffer_getNativeHandle(ahb);
1069 result = vn_android_get_dma_buf_from_native_handle(handle, &dma_buf_fd);
1070 if (result != VK_SUCCESS)
1071 return result;
1072
1073 result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1074 &mem_type_bits);
1075 if (result != VK_SUCCESS)
1076 return result;
1077
1078 /* If ahb is for an image, finish the deferred image creation first */
1079 if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
1080 struct vn_image *img = vn_image_from_handle(dedicated_info->image);
1081 struct vn_android_image_builder builder;
1082
1083 result = vn_android_get_image_builder(dev, &img->deferred_info->create,
1084 handle, alloc, &builder);
1085 if (result != VK_SUCCESS)
1086 return result;
1087
1088 result = vn_image_init_deferred(dev, &builder.create, img);
1089 if (result != VK_SUCCESS)
1090 return result;
1091
1092 const VkMemoryRequirements *mem_req =
1093 &img->requirements[0].memory.memoryRequirements;
1094 if (alloc_size < mem_req->size) {
1095 vn_log(dev->instance,
1096 "alloc_size(%" PRIu64 ") mem_req->size(%" PRIu64 ")",
1097 alloc_size, mem_req->size);
1098 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1099 }
1100
1101 alloc_size = mem_req->size;
1102
1103 /* XXX Workaround before spec issue #2762 gets resolved. If importing an
1104 * internally allocated AHB from the exportable path, memoryTypeIndex is
1105 * undefined while defaulting to zero, which can be incompatible with
1106 * the queried memoryTypeBits from the combined memory requirement and
1107 * dma_buf fd properties. Thus we override the requested memoryTypeIndex
1108 * to an applicable one if existed.
1109 */
1110 if (internal_ahb) {
1111 if ((mem_type_bits & mem_req->memoryTypeBits) == 0) {
1112 vn_log(dev->instance, "memoryTypeBits: img(0x%X) fd(0x%X)",
1113 mem_req->memoryTypeBits, mem_type_bits);
1114 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1115 }
1116
1117 mem_type_index = ffs(mem_type_bits & mem_req->memoryTypeBits) - 1;
1118 }
1119
1120 /* XXX Workaround before we use cross-domain backend in minigbm. The
1121 * blob_mem allocated from virgl backend can have a queried guest
1122 * mappable size smaller than the size returned from image memory
1123 * requirement.
1124 */
1125 force_unmappable = true;
1126 }
1127
1128 if (dedicated_info && dedicated_info->buffer != VK_NULL_HANDLE) {
1129 struct vn_buffer *buf = vn_buffer_from_handle(dedicated_info->buffer);
1130 const VkMemoryRequirements *mem_req =
1131 &buf->requirements.memory.memoryRequirements;
1132 if (alloc_size < mem_req->size) {
1133 vn_log(dev->instance,
1134 "alloc_size(%" PRIu64 ") mem_req->size(%" PRIu64 ")",
1135 alloc_size, mem_req->size);
1136 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1137 }
1138
1139 alloc_size = mem_req->size;
1140
1141 assert((1 << mem_type_index) & mem_req->memoryTypeBits);
1142 }
1143
1144 assert((1 << mem_type_index) & mem_type_bits);
1145
1146 errno = 0;
1147 dup_fd = os_dupfd_cloexec(dma_buf_fd);
1148 if (dup_fd < 0)
1149 return (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
1150 : VK_ERROR_OUT_OF_HOST_MEMORY;
1151
1152 /* Spec requires AHB export info to be present, so we must strip it. In
1153 * practice, the AHB import path here only needs the main allocation info
1154 * and the dedicated_info.
1155 */
1156 VkMemoryDedicatedAllocateInfo local_dedicated_info;
1157 /* Override when dedicated_info exists and is not the tail struct. */
1158 if (dedicated_info && dedicated_info->pNext) {
1159 local_dedicated_info = *dedicated_info;
1160 local_dedicated_info.pNext = NULL;
1161 dedicated_info = &local_dedicated_info;
1162 }
1163 const VkMemoryAllocateInfo local_alloc_info = {
1164 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1165 .pNext = dedicated_info,
1166 .allocationSize = alloc_size,
1167 .memoryTypeIndex = mem_type_index,
1168 };
1169 result = vn_device_memory_import_dma_buf(dev, mem, &local_alloc_info,
1170 force_unmappable, dup_fd);
1171 if (result != VK_SUCCESS) {
1172 close(dup_fd);
1173 return result;
1174 }
1175
1176 AHardwareBuffer_acquire(ahb);
1177 mem->ahb = ahb;
1178
1179 return VK_SUCCESS;
1180 }
1181
1182 VkResult
vn_android_device_allocate_ahb(struct vn_device * dev,struct vn_device_memory * mem,const VkMemoryAllocateInfo * alloc_info,const VkAllocationCallbacks * alloc)1183 vn_android_device_allocate_ahb(struct vn_device *dev,
1184 struct vn_device_memory *mem,
1185 const VkMemoryAllocateInfo *alloc_info,
1186 const VkAllocationCallbacks *alloc)
1187 {
1188 const VkMemoryDedicatedAllocateInfo *dedicated_info =
1189 vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
1190 uint32_t width = 0;
1191 uint32_t height = 1;
1192 uint32_t layers = 1;
1193 uint32_t format = 0;
1194 uint64_t usage = 0;
1195 struct AHardwareBuffer *ahb = NULL;
1196
1197 if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
1198 const VkImageCreateInfo *image_info =
1199 &vn_image_from_handle(dedicated_info->image)->deferred_info->create;
1200 assert(image_info);
1201 width = image_info->extent.width;
1202 height = image_info->extent.height;
1203 layers = image_info->arrayLayers;
1204 format = vn_android_ahb_format_from_vk_format(image_info->format);
1205 usage = vn_android_get_ahb_usage(image_info->usage, image_info->flags);
1206 } else {
1207 const VkPhysicalDeviceMemoryProperties *mem_props =
1208 &dev->physical_device->memory_properties.memoryProperties;
1209
1210 assert(alloc_info->memoryTypeIndex < mem_props->memoryTypeCount);
1211
1212 width = alloc_info->allocationSize;
1213 format = AHARDWAREBUFFER_FORMAT_BLOB;
1214 usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
1215 if (mem_props->memoryTypes[alloc_info->memoryTypeIndex].propertyFlags &
1216 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
1217 usage |= AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1218 AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1219 }
1220 }
1221
1222 ahb = vn_android_ahb_allocate(width, height, layers, format, usage);
1223 if (!ahb)
1224 return VK_ERROR_OUT_OF_HOST_MEMORY;
1225
1226 VkResult result =
1227 vn_android_device_import_ahb(dev, mem, alloc_info, alloc, ahb, true);
1228
1229 /* ahb alloc has already acquired a ref and import will acquire another,
1230 * must release one here to avoid leak.
1231 */
1232 AHardwareBuffer_release(ahb);
1233
1234 return result;
1235 }
1236
1237 void
vn_android_release_ahb(struct AHardwareBuffer * ahb)1238 vn_android_release_ahb(struct AHardwareBuffer *ahb)
1239 {
1240 AHardwareBuffer_release(ahb);
1241 }
1242
1243 VkResult
vn_GetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1244 vn_GetMemoryAndroidHardwareBufferANDROID(
1245 VkDevice device,
1246 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1247 struct AHardwareBuffer **pBuffer)
1248 {
1249 struct vn_device_memory *mem = vn_device_memory_from_handle(pInfo->memory);
1250
1251 AHardwareBuffer_acquire(mem->ahb);
1252 *pBuffer = mem->ahb;
1253
1254 return VK_SUCCESS;
1255 }
1256
1257 struct vn_android_buffer_create_info {
1258 VkBufferCreateInfo create;
1259 VkExternalMemoryBufferCreateInfo external;
1260 VkBufferOpaqueCaptureAddressCreateInfo address;
1261 };
1262
1263 static const VkBufferCreateInfo *
vn_android_fix_buffer_create_info(const VkBufferCreateInfo * create_info,struct vn_android_buffer_create_info * local_info)1264 vn_android_fix_buffer_create_info(
1265 const VkBufferCreateInfo *create_info,
1266 struct vn_android_buffer_create_info *local_info)
1267 {
1268 local_info->create = *create_info;
1269 VkBaseOutStructure *dst = (void *)&local_info->create;
1270
1271 vk_foreach_struct_const(src, create_info->pNext) {
1272 void *pnext = NULL;
1273 switch (src->sType) {
1274 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1275 memcpy(&local_info->external, src, sizeof(local_info->external));
1276 local_info->external.handleTypes =
1277 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1278 pnext = &local_info->external;
1279 break;
1280 case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO:
1281 memcpy(&local_info->address, src, sizeof(local_info->address));
1282 pnext = &local_info->address;
1283 break;
1284 default:
1285 break;
1286 }
1287
1288 if (pnext) {
1289 dst->pNext = pnext;
1290 dst = pnext;
1291 }
1292 }
1293
1294 dst->pNext = NULL;
1295
1296 return &local_info->create;
1297 }
1298
1299 VkResult
vn_android_get_ahb_buffer_memory_type_bits(struct vn_device * dev,uint32_t * out_mem_type_bits)1300 vn_android_get_ahb_buffer_memory_type_bits(struct vn_device *dev,
1301 uint32_t *out_mem_type_bits)
1302 {
1303 const uint32_t format = AHARDWAREBUFFER_FORMAT_BLOB;
1304 /* ensure dma_buf_memory_type_bits covers host visible usage */
1305 const uint64_t usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER |
1306 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1307 AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1308 AHardwareBuffer *ahb = NULL;
1309 int dma_buf_fd = -1;
1310 uint64_t alloc_size = 0;
1311 uint32_t mem_type_bits = 0;
1312 VkResult result;
1313
1314 ahb = vn_android_ahb_allocate(4096, 1, 1, format, usage);
1315 if (!ahb)
1316 return VK_ERROR_OUT_OF_HOST_MEMORY;
1317
1318 result = vn_android_get_dma_buf_from_native_handle(
1319 AHardwareBuffer_getNativeHandle(ahb), &dma_buf_fd);
1320 if (result != VK_SUCCESS) {
1321 AHardwareBuffer_release(ahb);
1322 return result;
1323 }
1324
1325 result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1326 &mem_type_bits);
1327
1328 AHardwareBuffer_release(ahb);
1329
1330 if (result != VK_SUCCESS)
1331 return result;
1332
1333 *out_mem_type_bits = mem_type_bits;
1334
1335 return VK_SUCCESS;
1336 }
1337
1338 VkResult
vn_android_buffer_from_ahb(struct vn_device * dev,const VkBufferCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_buffer ** out_buf)1339 vn_android_buffer_from_ahb(struct vn_device *dev,
1340 const VkBufferCreateInfo *create_info,
1341 const VkAllocationCallbacks *alloc,
1342 struct vn_buffer **out_buf)
1343 {
1344 struct vn_android_buffer_create_info local_info;
1345 VkResult result;
1346
1347 create_info = vn_android_fix_buffer_create_info(create_info, &local_info);
1348 result = vn_buffer_create(dev, create_info, alloc, out_buf);
1349 if (result != VK_SUCCESS)
1350 return result;
1351
1352 /* AHB backed buffer layers on top of dma_buf, so here we must comine the
1353 * queried type bits from both buffer memory requirement and dma_buf fd
1354 * properties.
1355 */
1356 (*out_buf)->requirements.memory.memoryRequirements.memoryTypeBits &=
1357 dev->buffer_cache.ahb_mem_type_bits;
1358
1359 assert((*out_buf)->requirements.memory.memoryRequirements.memoryTypeBits);
1360
1361 return VK_SUCCESS;
1362 }
1363