1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_image.h"
12
13 #include "venus-protocol/vn_protocol_driver_image.h"
14 #include "venus-protocol/vn_protocol_driver_image_view.h"
15 #include "venus-protocol/vn_protocol_driver_sampler.h"
16 #include "venus-protocol/vn_protocol_driver_sampler_ycbcr_conversion.h"
17
18 #include "vn_android.h"
19 #include "vn_device.h"
20 #include "vn_device_memory.h"
21 #include "vn_wsi.h"
22
23 static void
vn_image_init_memory_requirements(struct vn_image * img,struct vn_device * dev,const VkImageCreateInfo * create_info)24 vn_image_init_memory_requirements(struct vn_image *img,
25 struct vn_device *dev,
26 const VkImageCreateInfo *create_info)
27 {
28 uint32_t plane_count = 1;
29 if (create_info->flags & VK_IMAGE_CREATE_DISJOINT_BIT) {
30 /* TODO VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount */
31 assert(create_info->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
32
33 switch (create_info->format) {
34 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
35 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
36 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
37 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
38 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
39 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
40 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
41 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
42 plane_count = 2;
43 break;
44 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
45 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
46 case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
47 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
48 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
49 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
50 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
51 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
52 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
53 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
54 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
55 case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
56 plane_count = 3;
57 break;
58 default:
59 plane_count = 1;
60 break;
61 }
62 }
63 assert(plane_count <= ARRAY_SIZE(img->memory_requirements));
64
65 /* TODO add a per-device cache for the requirements */
66 for (uint32_t i = 0; i < plane_count; i++) {
67 img->memory_requirements[i].sType =
68 VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
69 img->memory_requirements[i].pNext = &img->dedicated_requirements[i];
70 img->dedicated_requirements[i].sType =
71 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
72 img->dedicated_requirements[i].pNext = NULL;
73 }
74
75 VkDevice dev_handle = vn_device_to_handle(dev);
76 VkImage img_handle = vn_image_to_handle(img);
77 if (plane_count == 1) {
78 vn_call_vkGetImageMemoryRequirements2(
79 dev->instance, dev_handle,
80 &(VkImageMemoryRequirementsInfo2){
81 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
82 .image = img_handle,
83 },
84 &img->memory_requirements[0]);
85
86 /* AHB backed image requires dedicated allocation */
87 if (img->deferred_info) {
88 img->dedicated_requirements[0].prefersDedicatedAllocation = VK_TRUE;
89 img->dedicated_requirements[0].requiresDedicatedAllocation = VK_TRUE;
90 }
91 } else {
92 for (uint32_t i = 0; i < plane_count; i++) {
93 vn_call_vkGetImageMemoryRequirements2(
94 dev->instance, dev_handle,
95 &(VkImageMemoryRequirementsInfo2){
96 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
97 .pNext =
98 &(VkImagePlaneMemoryRequirementsInfo){
99 .sType =
100 VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
101 .planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT << i,
102 },
103 .image = img_handle,
104 },
105 &img->memory_requirements[i]);
106 }
107 }
108 }
109
110 static VkResult
vn_image_store_deferred_create_info(const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image_create_deferred_info ** out_info)111 vn_image_store_deferred_create_info(
112 const VkImageCreateInfo *create_info,
113 const VkAllocationCallbacks *alloc,
114 struct vn_image_create_deferred_info **out_info)
115 {
116 struct vn_image_create_deferred_info *info = NULL;
117 VkBaseOutStructure *dst = NULL;
118
119 info = vk_zalloc(alloc, sizeof(*info), VN_DEFAULT_ALIGN,
120 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
121 if (!info)
122 return VK_ERROR_OUT_OF_HOST_MEMORY;
123
124 info->create = *create_info;
125 dst = (void *)&info->create;
126
127 vk_foreach_struct_const(src, create_info->pNext) {
128 void *pnext = NULL;
129 switch (src->sType) {
130 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
131 memcpy(&info->list, src, sizeof(info->list));
132 pnext = &info->list;
133 break;
134 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
135 memcpy(&info->stencil, src, sizeof(info->stencil));
136 pnext = &info->stencil;
137 break;
138 default:
139 break;
140 }
141
142 if (pnext) {
143 dst->pNext = pnext;
144 dst = pnext;
145 }
146 }
147 dst->pNext = NULL;
148
149 *out_info = info;
150
151 return VK_SUCCESS;
152 }
153
154 static VkResult
vn_image_init(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)155 vn_image_init(struct vn_device *dev,
156 const VkImageCreateInfo *create_info,
157 struct vn_image *img)
158 {
159 VkDevice device = vn_device_to_handle(dev);
160 VkImage image = vn_image_to_handle(img);
161 VkResult result = VK_SUCCESS;
162
163 img->sharing_mode = create_info->sharingMode;
164
165 /* TODO async */
166 result =
167 vn_call_vkCreateImage(dev->instance, device, create_info, NULL, &image);
168 if (result != VK_SUCCESS)
169 return result;
170
171 vn_image_init_memory_requirements(img, dev, create_info);
172
173 return VK_SUCCESS;
174 }
175
176 VkResult
vn_image_create(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)177 vn_image_create(struct vn_device *dev,
178 const VkImageCreateInfo *create_info,
179 const VkAllocationCallbacks *alloc,
180 struct vn_image **out_img)
181 {
182 struct vn_image *img = NULL;
183 VkResult result = VK_SUCCESS;
184
185 img = vk_zalloc(alloc, sizeof(*img), VN_DEFAULT_ALIGN,
186 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
187 if (!img)
188 return VK_ERROR_OUT_OF_HOST_MEMORY;
189
190 vn_object_base_init(&img->base, VK_OBJECT_TYPE_IMAGE, &dev->base);
191
192 result = vn_image_init(dev, create_info, img);
193 if (result != VK_SUCCESS) {
194 vn_object_base_fini(&img->base);
195 vk_free(alloc, img);
196 return result;
197 }
198
199 *out_img = img;
200
201 return VK_SUCCESS;
202 }
203
204 VkResult
vn_image_init_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)205 vn_image_init_deferred(struct vn_device *dev,
206 const VkImageCreateInfo *create_info,
207 struct vn_image *img)
208 {
209 return vn_image_init(dev, create_info, img);
210 }
211
212 VkResult
vn_image_create_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)213 vn_image_create_deferred(struct vn_device *dev,
214 const VkImageCreateInfo *create_info,
215 const VkAllocationCallbacks *alloc,
216 struct vn_image **out_img)
217 {
218 struct vn_image *img = NULL;
219 VkResult result = VK_SUCCESS;
220
221 img = vk_zalloc(alloc, sizeof(*img), VN_DEFAULT_ALIGN,
222 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
223 if (!img)
224 return VK_ERROR_OUT_OF_HOST_MEMORY;
225
226 vn_object_base_init(&img->base, VK_OBJECT_TYPE_IMAGE, &dev->base);
227
228 result = vn_image_store_deferred_create_info(create_info, alloc,
229 &img->deferred_info);
230 if (result != VK_SUCCESS) {
231 vn_object_base_fini(&img->base);
232 vk_free(alloc, img);
233 return result;
234 }
235
236 *out_img = img;
237
238 return VK_SUCCESS;
239 }
240
241 /* image commands */
242
243 VkResult
vn_CreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)244 vn_CreateImage(VkDevice device,
245 const VkImageCreateInfo *pCreateInfo,
246 const VkAllocationCallbacks *pAllocator,
247 VkImage *pImage)
248 {
249 struct vn_device *dev = vn_device_from_handle(device);
250 const VkAllocationCallbacks *alloc =
251 pAllocator ? pAllocator : &dev->base.base.alloc;
252 struct vn_image *img;
253 VkResult result;
254
255 const struct wsi_image_create_info *wsi_info =
256 vn_wsi_find_wsi_image_create_info(pCreateInfo);
257 const VkNativeBufferANDROID *anb_info =
258 vn_android_find_native_buffer(pCreateInfo);
259 const VkExternalMemoryImageCreateInfo *external_info =
260 vk_find_struct_const(pCreateInfo->pNext,
261 EXTERNAL_MEMORY_IMAGE_CREATE_INFO);
262 const bool ahb_info =
263 external_info &&
264 external_info->handleTypes ==
265 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
266
267 if (wsi_info) {
268 result = vn_wsi_create_image(dev, pCreateInfo, wsi_info, alloc, &img);
269 } else if (anb_info) {
270 result =
271 vn_android_image_from_anb(dev, pCreateInfo, anb_info, alloc, &img);
272 } else if (ahb_info) {
273 result = vn_android_image_from_ahb(dev, pCreateInfo, alloc, &img);
274 } else {
275 result = vn_image_create(dev, pCreateInfo, alloc, &img);
276 }
277
278 if (result != VK_SUCCESS)
279 return vn_error(dev->instance, result);
280
281 *pImage = vn_image_to_handle(img);
282 return VK_SUCCESS;
283 }
284
285 void
vn_DestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)286 vn_DestroyImage(VkDevice device,
287 VkImage image,
288 const VkAllocationCallbacks *pAllocator)
289 {
290 struct vn_device *dev = vn_device_from_handle(device);
291 struct vn_image *img = vn_image_from_handle(image);
292 const VkAllocationCallbacks *alloc =
293 pAllocator ? pAllocator : &dev->base.base.alloc;
294
295 if (!img)
296 return;
297
298 if (img->private_memory != VK_NULL_HANDLE)
299 vn_FreeMemory(device, img->private_memory, pAllocator);
300
301 vn_async_vkDestroyImage(dev->instance, device, image, NULL);
302
303 if (img->deferred_info)
304 vk_free(alloc, img->deferred_info);
305
306 vn_object_base_fini(&img->base);
307 vk_free(alloc, img);
308 }
309
310 void
vn_GetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)311 vn_GetImageMemoryRequirements(VkDevice device,
312 VkImage image,
313 VkMemoryRequirements *pMemoryRequirements)
314 {
315 const struct vn_image *img = vn_image_from_handle(image);
316
317 *pMemoryRequirements = img->memory_requirements[0].memoryRequirements;
318 }
319
320 void
vn_GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)321 vn_GetImageSparseMemoryRequirements(
322 VkDevice device,
323 VkImage image,
324 uint32_t *pSparseMemoryRequirementCount,
325 VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
326 {
327 struct vn_device *dev = vn_device_from_handle(device);
328
329 /* TODO per-device cache */
330 vn_call_vkGetImageSparseMemoryRequirements(dev->instance, device, image,
331 pSparseMemoryRequirementCount,
332 pSparseMemoryRequirements);
333 }
334
335 void
vn_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)336 vn_GetImageMemoryRequirements2(VkDevice device,
337 const VkImageMemoryRequirementsInfo2 *pInfo,
338 VkMemoryRequirements2 *pMemoryRequirements)
339 {
340 const struct vn_image *img = vn_image_from_handle(pInfo->image);
341 union {
342 VkBaseOutStructure *pnext;
343 VkMemoryRequirements2 *two;
344 VkMemoryDedicatedRequirements *dedicated;
345 } u = { .two = pMemoryRequirements };
346
347 uint32_t plane = 0;
348 const VkImagePlaneMemoryRequirementsInfo *plane_info =
349 vk_find_struct_const(pInfo->pNext,
350 IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO);
351 if (plane_info) {
352 switch (plane_info->planeAspect) {
353 case VK_IMAGE_ASPECT_PLANE_1_BIT:
354 plane = 1;
355 break;
356 case VK_IMAGE_ASPECT_PLANE_2_BIT:
357 plane = 2;
358 break;
359 default:
360 plane = 0;
361 break;
362 }
363 }
364
365 while (u.pnext) {
366 switch (u.pnext->sType) {
367 case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
368 u.two->memoryRequirements =
369 img->memory_requirements[plane].memoryRequirements;
370 break;
371 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
372 u.dedicated->prefersDedicatedAllocation =
373 img->dedicated_requirements[plane].prefersDedicatedAllocation;
374 u.dedicated->requiresDedicatedAllocation =
375 img->dedicated_requirements[plane].requiresDedicatedAllocation;
376 break;
377 default:
378 break;
379 }
380 u.pnext = u.pnext->pNext;
381 }
382 }
383
384 void
vn_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)385 vn_GetImageSparseMemoryRequirements2(
386 VkDevice device,
387 const VkImageSparseMemoryRequirementsInfo2 *pInfo,
388 uint32_t *pSparseMemoryRequirementCount,
389 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
390 {
391 struct vn_device *dev = vn_device_from_handle(device);
392
393 /* TODO per-device cache */
394 vn_call_vkGetImageSparseMemoryRequirements2(dev->instance, device, pInfo,
395 pSparseMemoryRequirementCount,
396 pSparseMemoryRequirements);
397 }
398
399 VkResult
vn_BindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)400 vn_BindImageMemory(VkDevice device,
401 VkImage image,
402 VkDeviceMemory memory,
403 VkDeviceSize memoryOffset)
404 {
405 struct vn_device *dev = vn_device_from_handle(device);
406 struct vn_device_memory *mem = vn_device_memory_from_handle(memory);
407
408 if (mem->base_memory) {
409 memory = vn_device_memory_to_handle(mem->base_memory);
410 memoryOffset += mem->base_offset;
411 }
412
413 vn_async_vkBindImageMemory(dev->instance, device, image, memory,
414 memoryOffset);
415
416 return VK_SUCCESS;
417 }
418
419 VkResult
vn_BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)420 vn_BindImageMemory2(VkDevice device,
421 uint32_t bindInfoCount,
422 const VkBindImageMemoryInfo *pBindInfos)
423 {
424 struct vn_device *dev = vn_device_from_handle(device);
425 const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
426
427 VkBindImageMemoryInfo *local_infos = NULL;
428 for (uint32_t i = 0; i < bindInfoCount; i++) {
429 const VkBindImageMemoryInfo *info = &pBindInfos[i];
430 struct vn_device_memory *mem =
431 vn_device_memory_from_handle(info->memory);
432 /* TODO handle VkBindImageMemorySwapchainInfoKHR */
433 if (!mem || !mem->base_memory)
434 continue;
435
436 if (!local_infos) {
437 const size_t size = sizeof(*local_infos) * bindInfoCount;
438 local_infos = vk_alloc(alloc, size, VN_DEFAULT_ALIGN,
439 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
440 if (!local_infos)
441 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
442
443 memcpy(local_infos, pBindInfos, size);
444 }
445
446 local_infos[i].memory = vn_device_memory_to_handle(mem->base_memory);
447 local_infos[i].memoryOffset += mem->base_offset;
448 }
449 if (local_infos)
450 pBindInfos = local_infos;
451
452 vn_async_vkBindImageMemory2(dev->instance, device, bindInfoCount,
453 pBindInfos);
454
455 vk_free(alloc, local_infos);
456
457 return VK_SUCCESS;
458 }
459
460 VkResult
vn_GetImageDrmFormatModifierPropertiesEXT(VkDevice device,VkImage image,VkImageDrmFormatModifierPropertiesEXT * pProperties)461 vn_GetImageDrmFormatModifierPropertiesEXT(
462 VkDevice device,
463 VkImage image,
464 VkImageDrmFormatModifierPropertiesEXT *pProperties)
465 {
466 struct vn_device *dev = vn_device_from_handle(device);
467
468 /* TODO local cache */
469 return vn_call_vkGetImageDrmFormatModifierPropertiesEXT(
470 dev->instance, device, image, pProperties);
471 }
472
473 void
vn_GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)474 vn_GetImageSubresourceLayout(VkDevice device,
475 VkImage image,
476 const VkImageSubresource *pSubresource,
477 VkSubresourceLayout *pLayout)
478 {
479 struct vn_device *dev = vn_device_from_handle(device);
480
481 /* TODO local cache */
482 vn_call_vkGetImageSubresourceLayout(dev->instance, device, image,
483 pSubresource, pLayout);
484 }
485
486 /* image view commands */
487
488 VkResult
vn_CreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)489 vn_CreateImageView(VkDevice device,
490 const VkImageViewCreateInfo *pCreateInfo,
491 const VkAllocationCallbacks *pAllocator,
492 VkImageView *pView)
493 {
494 struct vn_device *dev = vn_device_from_handle(device);
495 const VkAllocationCallbacks *alloc =
496 pAllocator ? pAllocator : &dev->base.base.alloc;
497
498 struct vn_image_view *view =
499 vk_zalloc(alloc, sizeof(*view), VN_DEFAULT_ALIGN,
500 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
501 if (!view)
502 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
503
504 vn_object_base_init(&view->base, VK_OBJECT_TYPE_IMAGE_VIEW, &dev->base);
505 view->image = vn_image_from_handle(pCreateInfo->image);
506
507 VkImageView view_handle = vn_image_view_to_handle(view);
508 vn_async_vkCreateImageView(dev->instance, device, pCreateInfo, NULL,
509 &view_handle);
510
511 *pView = view_handle;
512
513 return VK_SUCCESS;
514 }
515
516 void
vn_DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)517 vn_DestroyImageView(VkDevice device,
518 VkImageView imageView,
519 const VkAllocationCallbacks *pAllocator)
520 {
521 struct vn_device *dev = vn_device_from_handle(device);
522 struct vn_image_view *view = vn_image_view_from_handle(imageView);
523 const VkAllocationCallbacks *alloc =
524 pAllocator ? pAllocator : &dev->base.base.alloc;
525
526 if (!view)
527 return;
528
529 vn_async_vkDestroyImageView(dev->instance, device, imageView, NULL);
530
531 vn_object_base_fini(&view->base);
532 vk_free(alloc, view);
533 }
534
535 /* sampler commands */
536
537 VkResult
vn_CreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)538 vn_CreateSampler(VkDevice device,
539 const VkSamplerCreateInfo *pCreateInfo,
540 const VkAllocationCallbacks *pAllocator,
541 VkSampler *pSampler)
542 {
543 struct vn_device *dev = vn_device_from_handle(device);
544 const VkAllocationCallbacks *alloc =
545 pAllocator ? pAllocator : &dev->base.base.alloc;
546
547 struct vn_sampler *sampler =
548 vk_zalloc(alloc, sizeof(*sampler), VN_DEFAULT_ALIGN,
549 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
550 if (!sampler)
551 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
552
553 vn_object_base_init(&sampler->base, VK_OBJECT_TYPE_SAMPLER, &dev->base);
554
555 VkSampler sampler_handle = vn_sampler_to_handle(sampler);
556 vn_async_vkCreateSampler(dev->instance, device, pCreateInfo, NULL,
557 &sampler_handle);
558
559 *pSampler = sampler_handle;
560
561 return VK_SUCCESS;
562 }
563
564 void
vn_DestroySampler(VkDevice device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)565 vn_DestroySampler(VkDevice device,
566 VkSampler _sampler,
567 const VkAllocationCallbacks *pAllocator)
568 {
569 struct vn_device *dev = vn_device_from_handle(device);
570 struct vn_sampler *sampler = vn_sampler_from_handle(_sampler);
571 const VkAllocationCallbacks *alloc =
572 pAllocator ? pAllocator : &dev->base.base.alloc;
573
574 if (!sampler)
575 return;
576
577 vn_async_vkDestroySampler(dev->instance, device, _sampler, NULL);
578
579 vn_object_base_fini(&sampler->base);
580 vk_free(alloc, sampler);
581 }
582
583 /* sampler YCbCr conversion commands */
584
585 VkResult
vn_CreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)586 vn_CreateSamplerYcbcrConversion(
587 VkDevice device,
588 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
589 const VkAllocationCallbacks *pAllocator,
590 VkSamplerYcbcrConversion *pYcbcrConversion)
591 {
592 struct vn_device *dev = vn_device_from_handle(device);
593 const VkAllocationCallbacks *alloc =
594 pAllocator ? pAllocator : &dev->base.base.alloc;
595 const VkExternalFormatANDROID *ext_info =
596 vk_find_struct_const(pCreateInfo->pNext, EXTERNAL_FORMAT_ANDROID);
597
598 VkSamplerYcbcrConversionCreateInfo local_info;
599 if (ext_info && ext_info->externalFormat) {
600 assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
601
602 local_info = *pCreateInfo;
603 local_info.format =
604 vn_android_drm_format_to_vk_format(ext_info->externalFormat);
605 local_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
606 local_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
607 local_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
608 local_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
609 pCreateInfo = &local_info;
610
611 assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
612 }
613
614 struct vn_sampler_ycbcr_conversion *conv =
615 vk_zalloc(alloc, sizeof(*conv), VN_DEFAULT_ALIGN,
616 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
617 if (!conv)
618 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
619
620 vn_object_base_init(&conv->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
621 &dev->base);
622
623 VkSamplerYcbcrConversion conv_handle =
624 vn_sampler_ycbcr_conversion_to_handle(conv);
625 vn_async_vkCreateSamplerYcbcrConversion(dev->instance, device, pCreateInfo,
626 NULL, &conv_handle);
627
628 *pYcbcrConversion = conv_handle;
629
630 return VK_SUCCESS;
631 }
632
633 void
vn_DestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)634 vn_DestroySamplerYcbcrConversion(VkDevice device,
635 VkSamplerYcbcrConversion ycbcrConversion,
636 const VkAllocationCallbacks *pAllocator)
637 {
638 struct vn_device *dev = vn_device_from_handle(device);
639 struct vn_sampler_ycbcr_conversion *conv =
640 vn_sampler_ycbcr_conversion_from_handle(ycbcrConversion);
641 const VkAllocationCallbacks *alloc =
642 pAllocator ? pAllocator : &dev->base.base.alloc;
643
644 if (!conv)
645 return;
646
647 vn_async_vkDestroySamplerYcbcrConversion(dev->instance, device,
648 ycbcrConversion, NULL);
649
650 vn_object_base_fini(&conv->base);
651 vk_free(alloc, conv);
652 }
653