1 /*
2  * Copyright © 2020 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  *
23  * Author:
24  *    Eleni Maria Stea <estea@igalia.com>
25  *    Juan A. Suarez Romero <jasuarez@igalia.com>
26  */
27 
28 #include <piglit-util-gl.h>
29 #include <sized-internalformats.h>
30 #include "interop.h"
31 
32 GLuint
gl_get_target(const struct vk_image_props * props)33 gl_get_target(const struct vk_image_props *props)
34 {
35 	if (props->h == 1)
36 		return GL_TEXTURE_1D;
37 
38 	if (props->depth > 1)
39 		return GL_TEXTURE_3D;
40 
41 	return GL_TEXTURE_2D;
42 }
43 
44 bool
gl_create_mem_obj_from_vk_mem(struct vk_ctx * ctx,struct vk_mem_obj * vk_mem_obj,GLuint * gl_mem_obj)45 gl_create_mem_obj_from_vk_mem(struct vk_ctx *ctx,
46 			      struct vk_mem_obj *vk_mem_obj,
47 			      GLuint *gl_mem_obj)
48 {
49 	VkMemoryGetFdInfoKHR fd_info;
50 	int fd;
51 	GLint dedicated = vk_mem_obj->dedicated ? GL_TRUE : GL_FALSE;
52 
53 	PFN_vkGetMemoryFdKHR _vkGetMemoryFdKHR =
54 		(PFN_vkGetMemoryFdKHR)vkGetDeviceProcAddr(ctx->dev,
55 				"vkGetMemoryFdKHR");
56 
57 	if (!_vkGetMemoryFdKHR) {
58 		fprintf(stderr, "vkGetMemoryFdKHR not found\n");
59 		return false;
60 	}
61 
62 	memset(&fd_info, 0, sizeof fd_info);
63 	fd_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
64 	fd_info.memory = vk_mem_obj->mem;
65 	fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
66 
67 	if (_vkGetMemoryFdKHR(ctx->dev, &fd_info, &fd) != VK_SUCCESS) {
68 		fprintf(stderr, "Failed to get the Vulkan memory FD");
69 		return false;
70 	}
71 
72 	glCreateMemoryObjectsEXT(1, gl_mem_obj);
73 	glMemoryObjectParameterivEXT(*gl_mem_obj, GL_DEDICATED_MEMORY_OBJECT_EXT, &dedicated);
74 	glImportMemoryFdEXT(*gl_mem_obj, vk_mem_obj->mem_sz, GL_HANDLE_TYPE_OPAQUE_FD_EXT, fd);
75 
76 	if (!glIsMemoryObjectEXT(*gl_mem_obj))
77 		return false;
78 
79 	return glGetError() == GL_NO_ERROR;
80 }
81 
82 bool
gl_gen_tex_from_mem_obj(const struct vk_image_props * props,GLenum tex_storage_format,GLuint mem_obj,uint32_t offset,GLuint * tex)83 gl_gen_tex_from_mem_obj(const struct vk_image_props *props,
84 			GLenum tex_storage_format,
85 			GLuint mem_obj, uint32_t offset,
86 			GLuint *tex)
87 {
88 	GLint filter;
89 	GLuint target = gl_get_target(props);
90 	const struct sized_internalformat *format = get_sized_internalformat(tex_storage_format);
91 	GLint tiling = props->tiling == VK_IMAGE_TILING_LINEAR ? GL_LINEAR_TILING_EXT :
92 					GL_OPTIMAL_TILING_EXT;
93 
94 	glGenTextures(1, tex);
95 	glBindTexture(target, *tex);
96 
97 	glTexParameteri(target, GL_TEXTURE_TILING_EXT, tiling);
98 
99 	switch (target) {
100 	case GL_TEXTURE_1D:
101 		assert(props->depth == 1);
102 		glTexStorageMem1DEXT(target, props->num_levels,
103 				     tex_storage_format,
104 				     props->w,
105 				     mem_obj, offset);
106 		break;
107 	case GL_TEXTURE_2D:
108 		assert(props->depth == 1);
109 		glTexStorageMem2DEXT(target, props->num_levels,
110 				     tex_storage_format,
111 				     props->w, props->h,
112 				     mem_obj, offset);
113 		break;
114 	case GL_TEXTURE_3D:
115 		glTexStorageMem3DEXT(target, props->num_levels,
116 				     tex_storage_format,
117 				     props->w, props->h, props->depth,
118 				     mem_obj, offset);
119 		break;
120 	default:
121 		fprintf(stderr, "Invalid GL texture target\n");
122 		return false;
123 	}
124 
125 	switch (get_channel_type(format, 1)) {
126 	case GL_INT:
127 	case GL_UNSIGNED_INT:
128 		filter = GL_NEAREST;
129 		break;
130 	default:
131 		filter = GL_LINEAR;
132 		break;
133 	}
134 
135 	glTexParameteri(target, GL_TEXTURE_MIN_FILTER, filter);
136 	glTexParameteri(target, GL_TEXTURE_MAG_FILTER, filter);
137 
138 	return glGetError() == GL_NO_ERROR;
139 }
140 
141 bool
gl_gen_buf_from_mem_obj(GLuint mem_obj,GLenum gl_target,size_t sz,uint32_t offset,GLuint * bo)142 gl_gen_buf_from_mem_obj(GLuint mem_obj,
143 			GLenum gl_target,
144 			size_t sz,
145 			uint32_t offset,
146 			GLuint *bo)
147 {
148 	glGenBuffers(1, bo);
149 	glBindBuffer(gl_target, *bo);
150 
151 	glBufferStorageMemEXT(gl_target, sz, mem_obj, 0);
152 
153 	glBindBuffer(gl_target, 0);
154 
155 	return glGetError() == GL_NO_ERROR;
156 }
157 
158 bool
gl_create_semaphores_from_vk(const struct vk_ctx * ctx,const struct vk_semaphores * vk_smps,struct gl_ext_semaphores * gl_smps)159 gl_create_semaphores_from_vk(const struct vk_ctx *ctx,
160 			     const struct vk_semaphores *vk_smps,
161 			     struct gl_ext_semaphores *gl_smps)
162 {
163 	VkSemaphoreGetFdInfoKHR sem_fd_info;
164 	int fd_gl_ready;
165 	int fd_vk_done;
166 	PFN_vkGetSemaphoreFdKHR _vkGetSemaphoreFdKHR;
167 
168 	glGenSemaphoresEXT(1, &gl_smps->vk_frame_done);
169 	glGenSemaphoresEXT(1, &gl_smps->gl_frame_ready);
170 
171 	_vkGetSemaphoreFdKHR =
172 		(PFN_vkGetSemaphoreFdKHR)vkGetDeviceProcAddr(ctx->dev,
173 							     "vkGetSemaphoreFdKHR");
174 	if (!_vkGetSemaphoreFdKHR) {
175 		fprintf(stderr, "vkGetSemaphoreFdKHR not found\n");
176 		return false;
177 	}
178 
179 	memset(&sem_fd_info, 0, sizeof sem_fd_info);
180 	sem_fd_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
181 	sem_fd_info.semaphore = vk_smps->vk_frame_ready;
182 	sem_fd_info.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
183 
184 	if (_vkGetSemaphoreFdKHR(ctx->dev, &sem_fd_info, &fd_vk_done) != VK_SUCCESS) {
185 		fprintf(stderr, "Failed to get the Vulkan memory FD");
186 		return false;
187 	}
188 
189 	sem_fd_info.semaphore = vk_smps->gl_frame_done;
190 	if (_vkGetSemaphoreFdKHR(ctx->dev, &sem_fd_info, &fd_gl_ready) != VK_SUCCESS) {
191 		fprintf(stderr, "Failed to get the Vulkan memory FD");
192 		return false;
193 	}
194 
195 	glImportSemaphoreFdEXT(gl_smps->vk_frame_done,
196 			       GL_HANDLE_TYPE_OPAQUE_FD_EXT,
197 			       fd_vk_done);
198 
199 	glImportSemaphoreFdEXT(gl_smps->gl_frame_ready,
200 			       GL_HANDLE_TYPE_OPAQUE_FD_EXT,
201 			       fd_gl_ready);
202 
203 	if (!glIsSemaphoreEXT(gl_smps->vk_frame_done))
204 		return false;
205 
206 	if (!glIsSemaphoreEXT(gl_smps->gl_frame_ready))
207 		return false;
208 
209 	return glGetError() == GL_NO_ERROR;
210 }
211 
212 GLenum
gl_get_layout_from_vk(const VkImageLayout vk_layout)213 gl_get_layout_from_vk(const VkImageLayout vk_layout)
214 {
215 	switch (vk_layout) {
216 	case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
217 		return GL_LAYOUT_COLOR_ATTACHMENT_EXT;
218 	case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
219 		return GL_LAYOUT_DEPTH_STENCIL_ATTACHMENT_EXT;
220 	case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
221 		return GL_LAYOUT_DEPTH_STENCIL_READ_ONLY_EXT;
222 	case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
223 		return GL_LAYOUT_SHADER_READ_ONLY_EXT;
224 	case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
225 		return GL_LAYOUT_TRANSFER_SRC_EXT;
226 	case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
227 		return GL_LAYOUT_TRANSFER_DST_EXT;
228 	case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
229 		return GL_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_EXT;
230 	case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
231 		return GL_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_EXT;
232 	case VK_IMAGE_LAYOUT_UNDEFINED:
233 	default:
234 		return GL_NONE;
235 	};
236 }
237 
238 bool
gl_check_vk_compatibility(const struct vk_ctx * ctx)239 gl_check_vk_compatibility(const struct vk_ctx *ctx)
240 {
241 	GLubyte deviceUUID[GL_UUID_SIZE_EXT];
242 	GLubyte driverUUID[GL_UUID_SIZE_EXT];
243 
244 	/* FIXME: we select the first device so make sure you've
245 	 * exported VK_ICD_FILENAMES */
246 	glGetUnsignedBytei_vEXT(GL_DEVICE_UUID_EXT, 0, deviceUUID);
247 	glGetUnsignedBytevEXT(GL_DRIVER_UUID_EXT, driverUUID);
248 
249 	if ((strncmp((const char *)deviceUUID,
250 		     (const char *)ctx->deviceUUID, GL_UUID_SIZE_EXT) != 0) ||
251 	    (strncmp((const char* )driverUUID,
252 		     (const char* )ctx->driverUUID, GL_UUID_SIZE_EXT) != 0)) {
253 		fprintf(stderr, "Mismatch in device/driver UUID\n");
254 		return false;
255 	}
256 
257 	return glGetError() == GL_NO_ERROR;
258 }
259 
260 bool
vk_check_gl_compatibility(struct vk_ctx * ctx)261 vk_check_gl_compatibility(struct vk_ctx *ctx)
262 {
263 	GLubyte deviceUUID[GL_UUID_SIZE_EXT];
264 	GLubyte driverUUID[GL_UUID_SIZE_EXT];
265 
266 	/* FIXME: we select the first device so make sure you've
267 	 * exported VK_ICD_FILENAMES */
268 	glGetUnsignedBytei_vEXT(GL_DEVICE_UUID_EXT, 0, deviceUUID);
269 	glGetUnsignedBytevEXT(GL_DRIVER_UUID_EXT, driverUUID);
270 
271 	if ((strncmp((const char *)deviceUUID,
272 		     (const char *)ctx->deviceUUID, GL_UUID_SIZE_EXT) != 0) ||
273 	    (strncmp((const char *)driverUUID,
274 		     (const char *)ctx->driverUUID, GL_UUID_SIZE_EXT) != 0))
275 		return false;
276 
277 	return glGetError() == GL_NO_ERROR;
278 }
279