1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25
26 #include <assert.h>
27 #include <stdlib.h>
28 #include <stdio.h>
29 #include <unistd.h>
30 #include <errno.h>
31 #include <string.h>
32 #include <pthread.h>
33 #include <poll.h>
34 #include <sys/mman.h>
35
36 #include "drm-uapi/drm_fourcc.h"
37
38 #include "vk_instance.h"
39 #include "vk_physical_device.h"
40 #include "vk_util.h"
41 #include "wsi_common_entrypoints.h"
42 #include "wsi_common_private.h"
43 #include "linux-dmabuf-unstable-v1-client-protocol.h"
44
45 #include <util/compiler.h>
46 #include <util/hash_table.h>
47 #include <util/timespec.h>
48 #include <util/u_vector.h>
49 #include <util/anon_file.h>
50
51 struct wsi_wayland;
52
53 struct wsi_wl_format {
54 VkFormat vk_format;
55 uint32_t flags;
56 struct u_vector modifiers;
57 };
58
59 struct wsi_wl_display {
60 /* The real wl_display */
61 struct wl_display * wl_display;
62 /* Actually a proxy wrapper around the event queue */
63 struct wl_display * wl_display_wrapper;
64 struct wl_event_queue * queue;
65
66 struct wl_shm * wl_shm;
67 struct zwp_linux_dmabuf_v1 * wl_dmabuf;
68
69 struct wsi_wayland *wsi_wl;
70
71 /* Formats populated by zwp_linux_dmabuf_v1 or wl_shm interfaces */
72 struct u_vector formats;
73
74 /* Only used for displays created by wsi_wl_display_create */
75 uint32_t refcount;
76
77 bool sw;
78 };
79
80 struct wsi_wayland {
81 struct wsi_interface base;
82
83 struct wsi_device *wsi;
84
85 const VkAllocationCallbacks *alloc;
86 VkPhysicalDevice physical_device;
87 };
88
89 enum wsi_wl_fmt_flag {
90 WSI_WL_FMT_ALPHA = 1 << 0,
91 WSI_WL_FMT_OPAQUE = 1 << 1,
92 };
93
94 static struct wsi_wl_format *
find_format(struct u_vector * formats,VkFormat format)95 find_format(struct u_vector *formats, VkFormat format)
96 {
97 struct wsi_wl_format *f;
98
99 u_vector_foreach(f, formats)
100 if (f->vk_format == format)
101 return f;
102
103 return NULL;
104 }
105
106 static struct wsi_wl_format *
wsi_wl_display_add_vk_format(struct wsi_wl_display * display,struct u_vector * formats,VkFormat format,uint32_t flags)107 wsi_wl_display_add_vk_format(struct wsi_wl_display *display,
108 struct u_vector *formats,
109 VkFormat format, uint32_t flags)
110 {
111 assert(flags & (WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE));
112
113 /* Don't add a format that's already in the list */
114 struct wsi_wl_format *f = find_format(formats, format);
115 if (f) {
116 f->flags |= flags;
117 return f;
118 }
119
120 /* Don't add formats that aren't renderable. */
121 VkFormatProperties props;
122
123 display->wsi_wl->wsi->GetPhysicalDeviceFormatProperties(display->wsi_wl->physical_device,
124 format, &props);
125 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
126 return NULL;
127
128 struct u_vector modifiers;
129 if (!u_vector_init_pow2(&modifiers, 4, sizeof(uint64_t)))
130 return NULL;
131
132 f = u_vector_add(formats);
133 if (!f) {
134 u_vector_finish(&modifiers);
135 return NULL;
136 }
137
138 f->vk_format = format;
139 f->flags = flags;
140 f->modifiers = modifiers;
141
142 return f;
143 }
144
145 static void
wsi_wl_format_add_modifier(struct wsi_wl_format * format,uint64_t modifier)146 wsi_wl_format_add_modifier(struct wsi_wl_format *format, uint64_t modifier)
147 {
148 uint64_t *mod;
149
150 if (modifier == DRM_FORMAT_MOD_INVALID)
151 return;
152
153 u_vector_foreach(mod, &format->modifiers)
154 if (*mod == modifier)
155 return;
156
157 mod = u_vector_add(&format->modifiers);
158 if (mod)
159 *mod = modifier;
160 }
161
162 static void
wsi_wl_display_add_vk_format_modifier(struct wsi_wl_display * display,struct u_vector * formats,VkFormat vk_format,uint32_t flags,uint64_t modifier)163 wsi_wl_display_add_vk_format_modifier(struct wsi_wl_display *display,
164 struct u_vector *formats,
165 VkFormat vk_format, uint32_t flags,
166 uint64_t modifier)
167 {
168 struct wsi_wl_format *format;
169
170 format = wsi_wl_display_add_vk_format(display, formats, vk_format, flags);
171 if (format)
172 wsi_wl_format_add_modifier(format, modifier);
173 }
174
175 static void
wsi_wl_display_add_drm_format_modifier(struct wsi_wl_display * display,struct u_vector * formats,uint32_t drm_format,uint64_t modifier)176 wsi_wl_display_add_drm_format_modifier(struct wsi_wl_display *display,
177 struct u_vector *formats,
178 uint32_t drm_format, uint64_t modifier)
179 {
180 switch (drm_format) {
181 #if 0
182 /* TODO: These are only available when VK_EXT_4444_formats is enabled, so
183 * we probably need to make their use conditional on this extension. */
184 case DRM_FORMAT_ARGB4444:
185 wsi_wl_display_add_vk_format_modifier(display, formats,
186 VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
187 WSI_WL_FMT_ALPHA, modifier);
188 break;
189 case DRM_FORMAT_XRGB4444:
190 wsi_wl_display_add_vk_format_modifier(display, formats,
191 VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
192 WSI_WL_FMT_OPAQUE, modifier);
193 break;
194 case DRM_FORMAT_ABGR4444:
195 wsi_wl_display_add_vk_format_modifier(display, formats,
196 VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
197 WSI_WL_FMT_ALPHA, modifier);
198 break;
199 case DRM_FORMAT_XBGR4444:
200 wsi_wl_display_add_vk_format_modifier(display, formats,
201 VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
202 WSI_WL_FMT_OPAQUE, modifier);
203 break;
204 #endif
205
206 /* Vulkan _PACKN formats have the same component order as DRM formats
207 * on little endian systems, on big endian there exists no analog. */
208 #if MESA_LITTLE_ENDIAN
209 case DRM_FORMAT_RGBA4444:
210 wsi_wl_display_add_vk_format_modifier(display, formats,
211 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
212 WSI_WL_FMT_ALPHA, modifier);
213 break;
214 case DRM_FORMAT_RGBX4444:
215 wsi_wl_display_add_vk_format_modifier(display, formats,
216 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
217 WSI_WL_FMT_OPAQUE, modifier);
218 break;
219 case DRM_FORMAT_BGRA4444:
220 wsi_wl_display_add_vk_format_modifier(display, formats,
221 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
222 WSI_WL_FMT_ALPHA, modifier);
223 break;
224 case DRM_FORMAT_BGRX4444:
225 wsi_wl_display_add_vk_format_modifier(display, formats,
226 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
227 WSI_WL_FMT_OPAQUE, modifier);
228 break;
229 case DRM_FORMAT_RGB565:
230 wsi_wl_display_add_vk_format_modifier(display, formats,
231 VK_FORMAT_R5G6B5_UNORM_PACK16,
232 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
233 modifier);
234 break;
235 case DRM_FORMAT_BGR565:
236 wsi_wl_display_add_vk_format_modifier(display, formats,
237 VK_FORMAT_B5G6R5_UNORM_PACK16,
238 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
239 modifier);
240 break;
241 case DRM_FORMAT_ARGB1555:
242 wsi_wl_display_add_vk_format_modifier(display, formats,
243 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
244 WSI_WL_FMT_ALPHA, modifier);
245 break;
246 case DRM_FORMAT_XRGB1555:
247 wsi_wl_display_add_vk_format_modifier(display, formats,
248 VK_FORMAT_A1R5G5B5_UNORM_PACK16,
249 WSI_WL_FMT_OPAQUE, modifier);
250 break;
251 case DRM_FORMAT_RGBA5551:
252 wsi_wl_display_add_vk_format_modifier(display, formats,
253 VK_FORMAT_R5G5B5A1_UNORM_PACK16,
254 WSI_WL_FMT_ALPHA, modifier);
255 break;
256 case DRM_FORMAT_RGBX5551:
257 wsi_wl_display_add_vk_format_modifier(display, formats,
258 VK_FORMAT_R5G5B5A1_UNORM_PACK16,
259 WSI_WL_FMT_OPAQUE, modifier);
260 break;
261 case DRM_FORMAT_BGRA5551:
262 wsi_wl_display_add_vk_format_modifier(display, formats,
263 VK_FORMAT_B5G5R5A1_UNORM_PACK16,
264 WSI_WL_FMT_ALPHA, modifier);
265 break;
266 case DRM_FORMAT_BGRX5551:
267 wsi_wl_display_add_vk_format_modifier(display, formats,
268 VK_FORMAT_B5G5R5A1_UNORM_PACK16,
269 WSI_WL_FMT_OPAQUE, modifier);
270 break;
271 case DRM_FORMAT_ARGB2101010:
272 wsi_wl_display_add_vk_format_modifier(display, formats,
273 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
274 WSI_WL_FMT_ALPHA, modifier);
275 break;
276 case DRM_FORMAT_XRGB2101010:
277 wsi_wl_display_add_vk_format_modifier(display, formats,
278 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
279 WSI_WL_FMT_OPAQUE, modifier);
280 break;
281 case DRM_FORMAT_ABGR2101010:
282 wsi_wl_display_add_vk_format_modifier(display, formats,
283 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
284 WSI_WL_FMT_ALPHA, modifier);
285 break;
286 case DRM_FORMAT_XBGR2101010:
287 wsi_wl_display_add_vk_format_modifier(display, formats,
288 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
289 WSI_WL_FMT_OPAQUE, modifier);
290 break;
291 #endif
292
293 /* Non-packed 8-bit formats have an inverted channel order compared to the
294 * little endian DRM formats, because the DRM channel ordering is high->low
295 * but the vulkan channel ordering is in memory byte order
296 *
297 * For all UNORM formats which have a SRGB variant, we must support both if
298 * we can. SRGB in this context means that rendering to it will result in a
299 * linear -> nonlinear SRGB colorspace conversion before the data is stored.
300 * The inverse function is applied when sampling from SRGB images.
301 * From Wayland's perspective nothing changes, the difference is just how
302 * Vulkan interprets the pixel data. */
303 case DRM_FORMAT_XBGR8888:
304 wsi_wl_display_add_vk_format_modifier(display, formats,
305 VK_FORMAT_R8G8B8_SRGB,
306 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
307 modifier);
308 wsi_wl_display_add_vk_format_modifier(display, formats,
309 VK_FORMAT_R8G8B8_UNORM,
310 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
311 modifier);
312 wsi_wl_display_add_vk_format_modifier(display, formats,
313 VK_FORMAT_R8G8B8A8_SRGB,
314 WSI_WL_FMT_OPAQUE, modifier);
315 wsi_wl_display_add_vk_format_modifier(display, formats,
316 VK_FORMAT_R8G8B8A8_UNORM,
317 WSI_WL_FMT_OPAQUE, modifier);
318 break;
319 case DRM_FORMAT_ABGR8888:
320 wsi_wl_display_add_vk_format_modifier(display, formats,
321 VK_FORMAT_R8G8B8A8_SRGB,
322 WSI_WL_FMT_ALPHA, modifier);
323 wsi_wl_display_add_vk_format_modifier(display, formats,
324 VK_FORMAT_R8G8B8A8_UNORM,
325 WSI_WL_FMT_ALPHA, modifier);
326 break;
327 case DRM_FORMAT_XRGB8888:
328 wsi_wl_display_add_vk_format_modifier(display, formats,
329 VK_FORMAT_B8G8R8_SRGB,
330 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
331 modifier);
332 wsi_wl_display_add_vk_format_modifier(display, formats,
333 VK_FORMAT_B8G8R8_UNORM,
334 WSI_WL_FMT_ALPHA | WSI_WL_FMT_OPAQUE,
335 modifier);
336 wsi_wl_display_add_vk_format_modifier(display, formats,
337 VK_FORMAT_B8G8R8A8_SRGB,
338 WSI_WL_FMT_OPAQUE, modifier);
339 wsi_wl_display_add_vk_format_modifier(display, formats,
340 VK_FORMAT_B8G8R8A8_UNORM,
341 WSI_WL_FMT_OPAQUE, modifier);
342 break;
343 case DRM_FORMAT_ARGB8888:
344 wsi_wl_display_add_vk_format_modifier(display, formats,
345 VK_FORMAT_B8G8R8A8_SRGB,
346 WSI_WL_FMT_ALPHA, modifier);
347 wsi_wl_display_add_vk_format_modifier(display, formats,
348 VK_FORMAT_B8G8R8A8_UNORM,
349 WSI_WL_FMT_ALPHA, modifier);
350 break;
351 }
352 }
353
354 static uint32_t
drm_format_for_wl_shm_format(enum wl_shm_format shm_format)355 drm_format_for_wl_shm_format(enum wl_shm_format shm_format)
356 {
357 /* wl_shm formats are identical to DRM, except ARGB8888 and XRGB8888 */
358 switch (shm_format) {
359 case WL_SHM_FORMAT_ARGB8888:
360 return DRM_FORMAT_ARGB8888;
361 case WL_SHM_FORMAT_XRGB8888:
362 return DRM_FORMAT_XRGB8888;
363 default:
364 return shm_format;
365 }
366 }
367
368 static void
wsi_wl_display_add_wl_shm_format(struct wsi_wl_display * display,struct u_vector * formats,enum wl_shm_format shm_format)369 wsi_wl_display_add_wl_shm_format(struct wsi_wl_display *display,
370 struct u_vector *formats,
371 enum wl_shm_format shm_format)
372 {
373 uint32_t drm_format = drm_format_for_wl_shm_format(shm_format);
374
375 wsi_wl_display_add_drm_format_modifier(display, formats, drm_format,
376 DRM_FORMAT_MOD_INVALID);
377 }
378
379 static uint32_t
wl_drm_format_for_vk_format(VkFormat vk_format,bool alpha)380 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
381 {
382 switch (vk_format) {
383 #if 0
384 case VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT:
385 return alpha ? DRM_FORMAT_ARGB4444 : DRM_FORMAT_XRGB4444;
386 case VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT:
387 return alpha ? DRM_FORMAT_ABGR4444 : DRM_FORMAT_XBGR4444;
388 #endif
389 #if MESA_LITTLE_ENDIAN
390 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
391 return alpha ? DRM_FORMAT_RGBA4444 : DRM_FORMAT_RGBX4444;
392 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
393 return alpha ? DRM_FORMAT_BGRA4444 : DRM_FORMAT_BGRX4444;
394 case VK_FORMAT_R5G6B5_UNORM_PACK16:
395 return DRM_FORMAT_RGB565;
396 case VK_FORMAT_B5G6R5_UNORM_PACK16:
397 return DRM_FORMAT_BGR565;
398 case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
399 return alpha ? DRM_FORMAT_ARGB1555 : DRM_FORMAT_XRGB1555;
400 case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
401 return alpha ? DRM_FORMAT_RGBA5551 : DRM_FORMAT_RGBX5551;
402 case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
403 return alpha ? DRM_FORMAT_BGRA5551 : DRM_FORMAT_BGRX5551;
404 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
405 return alpha ? DRM_FORMAT_ARGB2101010 : DRM_FORMAT_XRGB2101010;
406 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
407 return alpha ? DRM_FORMAT_ABGR2101010 : DRM_FORMAT_XBGR2101010;
408 #endif
409 case VK_FORMAT_R8G8B8_UNORM:
410 case VK_FORMAT_R8G8B8_SRGB:
411 return DRM_FORMAT_XBGR8888;
412 case VK_FORMAT_R8G8B8A8_UNORM:
413 case VK_FORMAT_R8G8B8A8_SRGB:
414 return alpha ? DRM_FORMAT_ABGR8888 : DRM_FORMAT_XBGR8888;
415 case VK_FORMAT_B8G8R8_UNORM:
416 case VK_FORMAT_B8G8R8_SRGB:
417 return DRM_FORMAT_BGRX8888;
418 case VK_FORMAT_B8G8R8A8_UNORM:
419 case VK_FORMAT_B8G8R8A8_SRGB:
420 return alpha ? DRM_FORMAT_ARGB8888 : DRM_FORMAT_XRGB8888;
421
422 default:
423 assert(!"Unsupported Vulkan format");
424 return DRM_FORMAT_INVALID;
425 }
426 }
427
428 static enum wl_shm_format
wl_shm_format_for_vk_format(VkFormat vk_format,bool alpha)429 wl_shm_format_for_vk_format(VkFormat vk_format, bool alpha)
430 {
431 uint32_t drm_format = wl_drm_format_for_vk_format(vk_format, alpha);
432 if (drm_format == DRM_FORMAT_INVALID) {
433 return 0;
434 }
435
436 /* wl_shm formats are identical to DRM, except ARGB8888 and XRGB8888 */
437 switch (drm_format) {
438 case DRM_FORMAT_ARGB8888:
439 return WL_SHM_FORMAT_ARGB8888;
440 case DRM_FORMAT_XRGB8888:
441 return WL_SHM_FORMAT_XRGB8888;
442 default:
443 return drm_format;
444 }
445 }
446
447 static void
dmabuf_handle_format(void * data,struct zwp_linux_dmabuf_v1 * dmabuf,uint32_t format)448 dmabuf_handle_format(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
449 uint32_t format)
450 {
451 /* Formats are implicitly advertised by the modifier event, so we ignore
452 * them here. */
453 }
454
455 static void
dmabuf_handle_modifier(void * data,struct zwp_linux_dmabuf_v1 * dmabuf,uint32_t format,uint32_t modifier_hi,uint32_t modifier_lo)456 dmabuf_handle_modifier(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
457 uint32_t format, uint32_t modifier_hi,
458 uint32_t modifier_lo)
459 {
460 struct wsi_wl_display *display = data;
461 uint64_t modifier;
462
463 modifier = ((uint64_t) modifier_hi << 32) | modifier_lo;
464 wsi_wl_display_add_drm_format_modifier(display, &display->formats,
465 format, modifier);
466 }
467
468 static const struct zwp_linux_dmabuf_v1_listener dmabuf_listener = {
469 dmabuf_handle_format,
470 dmabuf_handle_modifier,
471 };
472
473 static void
shm_handle_format(void * data,struct wl_shm * shm,uint32_t format)474 shm_handle_format(void *data, struct wl_shm *shm, uint32_t format)
475 {
476 struct wsi_wl_display *display = data;
477
478 wsi_wl_display_add_wl_shm_format(display, &display->formats, format);
479 }
480
481 static const struct wl_shm_listener shm_listener = {
482 .format = shm_handle_format
483 };
484
485 static void
registry_handle_global(void * data,struct wl_registry * registry,uint32_t name,const char * interface,uint32_t version)486 registry_handle_global(void *data, struct wl_registry *registry,
487 uint32_t name, const char *interface, uint32_t version)
488 {
489 struct wsi_wl_display *display = data;
490
491 if (display->sw) {
492 if (strcmp(interface, "wl_shm") == 0) {
493 display->wl_shm = wl_registry_bind(registry, name, &wl_shm_interface, 1);
494 wl_shm_add_listener(display->wl_shm, &shm_listener, display);
495 }
496 return;
497 }
498
499 if (strcmp(interface, "zwp_linux_dmabuf_v1") == 0 && version >= 3) {
500 display->wl_dmabuf =
501 wl_registry_bind(registry, name, &zwp_linux_dmabuf_v1_interface, 3);
502 zwp_linux_dmabuf_v1_add_listener(display->wl_dmabuf,
503 &dmabuf_listener, display);
504 }
505 }
506
507 static void
registry_handle_global_remove(void * data,struct wl_registry * registry,uint32_t name)508 registry_handle_global_remove(void *data, struct wl_registry *registry,
509 uint32_t name)
510 { /* No-op */ }
511
512 static const struct wl_registry_listener registry_listener = {
513 registry_handle_global,
514 registry_handle_global_remove
515 };
516
517 static void
wsi_wl_display_finish(struct wsi_wl_display * display)518 wsi_wl_display_finish(struct wsi_wl_display *display)
519 {
520 assert(display->refcount == 0);
521
522 struct wsi_wl_format *f;
523 u_vector_foreach(f, &display->formats)
524 u_vector_finish(&f->modifiers);
525 u_vector_finish(&display->formats);
526 if (display->wl_shm)
527 wl_shm_destroy(display->wl_shm);
528 if (display->wl_dmabuf)
529 zwp_linux_dmabuf_v1_destroy(display->wl_dmabuf);
530 if (display->wl_display_wrapper)
531 wl_proxy_wrapper_destroy(display->wl_display_wrapper);
532 if (display->queue)
533 wl_event_queue_destroy(display->queue);
534 }
535
536 static VkResult
wsi_wl_display_init(struct wsi_wayland * wsi_wl,struct wsi_wl_display * display,struct wl_display * wl_display,bool get_format_list,bool sw)537 wsi_wl_display_init(struct wsi_wayland *wsi_wl,
538 struct wsi_wl_display *display,
539 struct wl_display *wl_display,
540 bool get_format_list, bool sw)
541 {
542 VkResult result = VK_SUCCESS;
543 memset(display, 0, sizeof(*display));
544
545 if (!u_vector_init(&display->formats, 8, sizeof(struct wsi_wl_format)))
546 return VK_ERROR_OUT_OF_HOST_MEMORY;
547
548 display->wsi_wl = wsi_wl;
549 display->wl_display = wl_display;
550 display->sw = sw;
551
552 display->queue = wl_display_create_queue(wl_display);
553 if (!display->queue) {
554 result = VK_ERROR_OUT_OF_HOST_MEMORY;
555 goto fail;
556 }
557
558 display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
559 if (!display->wl_display_wrapper) {
560 result = VK_ERROR_OUT_OF_HOST_MEMORY;
561 goto fail;
562 }
563
564 wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
565 display->queue);
566
567 struct wl_registry *registry =
568 wl_display_get_registry(display->wl_display_wrapper);
569 if (!registry) {
570 result = VK_ERROR_OUT_OF_HOST_MEMORY;
571 goto fail;
572 }
573
574 wl_registry_add_listener(registry, ®istry_listener, display);
575
576 /* Round-trip to get wl_shm and zwp_linux_dmabuf_v1 globals */
577 wl_display_roundtrip_queue(display->wl_display, display->queue);
578 if (!display->wl_dmabuf && !display->wl_shm) {
579 result = VK_ERROR_SURFACE_LOST_KHR;
580 goto fail_registry;
581 }
582
583 /* Caller doesn't expect us to query formats/modifiers, so return */
584 if (!get_format_list)
585 goto out;
586
587 /* Round-trip again to get formats and modifiers */
588 wl_display_roundtrip_queue(display->wl_display, display->queue);
589
590 if (wsi_wl->wsi->force_bgra8_unorm_first) {
591 /* Find BGRA8_UNORM in the list and swap it to the first position if we
592 * can find it. Some apps get confused if SRGB is first in the list.
593 */
594 struct wsi_wl_format *first_fmt = u_vector_head(&display->formats);
595 struct wsi_wl_format *f, tmp_fmt;
596 f = find_format(&display->formats, VK_FORMAT_B8G8R8A8_UNORM);
597 if (f) {
598 tmp_fmt = *f;
599 *f = *first_fmt;
600 *first_fmt = tmp_fmt;
601 }
602 }
603
604 out:
605 /* We don't need this anymore */
606 wl_registry_destroy(registry);
607
608 display->refcount = 0;
609
610 return VK_SUCCESS;
611
612 fail_registry:
613 if (registry)
614 wl_registry_destroy(registry);
615
616 fail:
617 wsi_wl_display_finish(display);
618 return result;
619 }
620
621 static VkResult
wsi_wl_display_create(struct wsi_wayland * wsi,struct wl_display * wl_display,bool sw,struct wsi_wl_display ** display_out)622 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display,
623 bool sw,
624 struct wsi_wl_display **display_out)
625 {
626 struct wsi_wl_display *display =
627 vk_alloc(wsi->alloc, sizeof(*display), 8,
628 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
629 if (!display)
630 return VK_ERROR_OUT_OF_HOST_MEMORY;
631
632 VkResult result = wsi_wl_display_init(wsi, display, wl_display, true,
633 sw);
634 if (result != VK_SUCCESS) {
635 vk_free(wsi->alloc, display);
636 return result;
637 }
638
639 display->refcount++;
640 *display_out = display;
641
642 return result;
643 }
644
645 static struct wsi_wl_display *
wsi_wl_display_ref(struct wsi_wl_display * display)646 wsi_wl_display_ref(struct wsi_wl_display *display)
647 {
648 display->refcount++;
649 return display;
650 }
651
652 static void
wsi_wl_display_unref(struct wsi_wl_display * display)653 wsi_wl_display_unref(struct wsi_wl_display *display)
654 {
655 if (display->refcount-- > 1)
656 return;
657
658 struct wsi_wayland *wsi = display->wsi_wl;
659 wsi_wl_display_finish(display);
660 vk_free(wsi->alloc, display);
661 }
662
663 VKAPI_ATTR VkBool32 VKAPI_CALL
wsi_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * wl_display)664 wsi_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
665 uint32_t queueFamilyIndex,
666 struct wl_display *wl_display)
667 {
668 VK_FROM_HANDLE(vk_physical_device, pdevice, physicalDevice);
669 struct wsi_device *wsi_device = pdevice->wsi_device;
670 struct wsi_wayland *wsi =
671 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
672
673 struct wsi_wl_display display;
674 VkResult ret = wsi_wl_display_init(wsi, &display, wl_display, false,
675 wsi_device->sw);
676 if (ret == VK_SUCCESS)
677 wsi_wl_display_finish(&display);
678
679 return ret == VK_SUCCESS;
680 }
681
682 static VkResult
wsi_wl_surface_get_support(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t queueFamilyIndex,VkBool32 * pSupported)683 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
684 struct wsi_device *wsi_device,
685 uint32_t queueFamilyIndex,
686 VkBool32* pSupported)
687 {
688 *pSupported = true;
689
690 return VK_SUCCESS;
691 }
692
693 static const VkPresentModeKHR present_modes[] = {
694 VK_PRESENT_MODE_MAILBOX_KHR,
695 VK_PRESENT_MODE_FIFO_KHR,
696 };
697
698 static VkResult
wsi_wl_surface_get_capabilities(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,VkSurfaceCapabilitiesKHR * caps)699 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
700 struct wsi_device *wsi_device,
701 VkSurfaceCapabilitiesKHR* caps)
702 {
703 /* For true mailbox mode, we need at least 4 images:
704 * 1) One to scan out from
705 * 2) One to have queued for scan-out
706 * 3) One to be currently held by the Wayland compositor
707 * 4) One to render to
708 */
709 caps->minImageCount = 4;
710 /* There is no real maximum */
711 caps->maxImageCount = 0;
712
713 caps->currentExtent = (VkExtent2D) { UINT32_MAX, UINT32_MAX };
714 caps->minImageExtent = (VkExtent2D) { 1, 1 };
715 caps->maxImageExtent = (VkExtent2D) {
716 wsi_device->maxImageDimension2D,
717 wsi_device->maxImageDimension2D,
718 };
719
720 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
721 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
722 caps->maxImageArrayLayers = 1;
723
724 caps->supportedCompositeAlpha =
725 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
726 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
727
728 caps->supportedUsageFlags =
729 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
730 VK_IMAGE_USAGE_SAMPLED_BIT |
731 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
732 VK_IMAGE_USAGE_STORAGE_BIT |
733 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
734 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
735
736 return VK_SUCCESS;
737 }
738
739 static VkResult
wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,const void * info_next,VkSurfaceCapabilities2KHR * caps)740 wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
741 struct wsi_device *wsi_device,
742 const void *info_next,
743 VkSurfaceCapabilities2KHR* caps)
744 {
745 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
746
747 VkResult result =
748 wsi_wl_surface_get_capabilities(surface, wsi_device,
749 &caps->surfaceCapabilities);
750
751 vk_foreach_struct(ext, caps->pNext) {
752 switch (ext->sType) {
753 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
754 VkSurfaceProtectedCapabilitiesKHR *protected = (void *)ext;
755 protected->supportsProtected = VK_FALSE;
756 break;
757 }
758
759 default:
760 /* Ignored */
761 break;
762 }
763 }
764
765 return result;
766 }
767
768 static VkResult
wsi_wl_surface_get_formats(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)769 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
770 struct wsi_device *wsi_device,
771 uint32_t* pSurfaceFormatCount,
772 VkSurfaceFormatKHR* pSurfaceFormats)
773 {
774 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
775 struct wsi_wayland *wsi =
776 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
777
778 struct wsi_wl_display display;
779 if (wsi_wl_display_init(wsi, &display, surface->display, true,
780 wsi_device->sw))
781 return VK_ERROR_SURFACE_LOST_KHR;
782
783 VK_OUTARRAY_MAKE_TYPED(VkSurfaceFormatKHR, out,
784 pSurfaceFormats, pSurfaceFormatCount);
785
786 struct wsi_wl_format *disp_fmt;
787 u_vector_foreach(disp_fmt, &display.formats) {
788 /* Skip formats for which we can't support both alpha & opaque
789 * formats.
790 */
791 if (!(disp_fmt->flags & WSI_WL_FMT_ALPHA) ||
792 !(disp_fmt->flags & WSI_WL_FMT_OPAQUE))
793 continue;
794
795 vk_outarray_append_typed(VkSurfaceFormatKHR, &out, out_fmt) {
796 out_fmt->format = disp_fmt->vk_format;
797 out_fmt->colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
798 }
799 }
800
801 wsi_wl_display_finish(&display);
802
803 return vk_outarray_status(&out);
804 }
805
806 static VkResult
wsi_wl_surface_get_formats2(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,const void * info_next,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)807 wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
808 struct wsi_device *wsi_device,
809 const void *info_next,
810 uint32_t* pSurfaceFormatCount,
811 VkSurfaceFormat2KHR* pSurfaceFormats)
812 {
813 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
814 struct wsi_wayland *wsi =
815 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
816
817 struct wsi_wl_display display;
818 if (wsi_wl_display_init(wsi, &display, surface->display, true,
819 wsi_device->sw))
820 return VK_ERROR_SURFACE_LOST_KHR;
821
822 VK_OUTARRAY_MAKE_TYPED(VkSurfaceFormat2KHR, out,
823 pSurfaceFormats, pSurfaceFormatCount);
824
825 struct wsi_wl_format *disp_fmt;
826 u_vector_foreach(disp_fmt, &display.formats) {
827 /* Skip formats for which we can't support both alpha & opaque
828 * formats.
829 */
830 if (!(disp_fmt->flags & WSI_WL_FMT_ALPHA) ||
831 !(disp_fmt->flags & WSI_WL_FMT_OPAQUE))
832 continue;
833
834 vk_outarray_append_typed(VkSurfaceFormat2KHR, &out, out_fmt) {
835 out_fmt->surfaceFormat.format = disp_fmt->vk_format;
836 out_fmt->surfaceFormat.colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
837 }
838 }
839
840 wsi_wl_display_finish(&display);
841
842 return vk_outarray_status(&out);
843 }
844
845 static VkResult
wsi_wl_surface_get_present_modes(VkIcdSurfaceBase * surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)846 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
847 uint32_t* pPresentModeCount,
848 VkPresentModeKHR* pPresentModes)
849 {
850 if (pPresentModes == NULL) {
851 *pPresentModeCount = ARRAY_SIZE(present_modes);
852 return VK_SUCCESS;
853 }
854
855 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
856 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
857
858 if (*pPresentModeCount < ARRAY_SIZE(present_modes))
859 return VK_INCOMPLETE;
860 else
861 return VK_SUCCESS;
862 }
863
864 static VkResult
wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t * pRectCount,VkRect2D * pRects)865 wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase *surface,
866 struct wsi_device *wsi_device,
867 uint32_t* pRectCount,
868 VkRect2D* pRects)
869 {
870 VK_OUTARRAY_MAKE_TYPED(VkRect2D, out, pRects, pRectCount);
871
872 vk_outarray_append_typed(VkRect2D, &out, rect) {
873 /* We don't know a size so just return the usual "I don't know." */
874 *rect = (VkRect2D) {
875 .offset = { 0, 0 },
876 .extent = { UINT32_MAX, UINT32_MAX },
877 };
878 }
879
880 return vk_outarray_status(&out);
881 }
882
883 VKAPI_ATTR VkResult VKAPI_CALL
wsi_CreateWaylandSurfaceKHR(VkInstance _instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)884 wsi_CreateWaylandSurfaceKHR(VkInstance _instance,
885 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
886 const VkAllocationCallbacks *pAllocator,
887 VkSurfaceKHR *pSurface)
888 {
889 VK_FROM_HANDLE(vk_instance, instance, _instance);
890 VkIcdSurfaceWayland *surface;
891
892 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR);
893
894 surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
895 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
896 if (surface == NULL)
897 return VK_ERROR_OUT_OF_HOST_MEMORY;
898
899 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
900 surface->display = pCreateInfo->display;
901 surface->surface = pCreateInfo->surface;
902
903 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
904
905 return VK_SUCCESS;
906 }
907
908 struct wsi_wl_image {
909 struct wsi_image base;
910 struct wl_buffer * buffer;
911 bool busy;
912 void * data_ptr;
913 uint32_t data_size;
914 };
915
916 struct wsi_wl_swapchain {
917 struct wsi_swapchain base;
918
919 struct wsi_wl_display *display;
920
921 struct wl_surface * surface;
922
923 struct wl_callback * frame;
924
925 VkExtent2D extent;
926 VkFormat vk_format;
927 uint32_t drm_format;
928 enum wl_shm_format shm_format;
929
930 uint32_t num_drm_modifiers;
931 const uint64_t * drm_modifiers;
932
933 VkPresentModeKHR present_mode;
934 bool fifo_ready;
935
936 struct wsi_wl_image images[0];
937 };
938 VK_DEFINE_NONDISP_HANDLE_CASTS(wsi_wl_swapchain, base.base, VkSwapchainKHR,
939 VK_OBJECT_TYPE_SWAPCHAIN_KHR)
940
941 static struct wsi_image *
wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain * wsi_chain,uint32_t image_index)942 wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
943 uint32_t image_index)
944 {
945 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
946 return &chain->images[image_index].base;
947 }
948
949 static VkResult
wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain * wsi_chain,const VkAcquireNextImageInfoKHR * info,uint32_t * image_index)950 wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
951 const VkAcquireNextImageInfoKHR *info,
952 uint32_t *image_index)
953 {
954 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
955 struct timespec start_time, end_time;
956 struct timespec rel_timeout;
957 int wl_fd = wl_display_get_fd(chain->display->wl_display);
958
959 timespec_from_nsec(&rel_timeout, info->timeout);
960
961 clock_gettime(CLOCK_MONOTONIC, &start_time);
962 timespec_add(&end_time, &rel_timeout, &start_time);
963
964 while (1) {
965 /* Try to dispatch potential events. */
966 int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
967 chain->display->queue);
968 if (ret < 0)
969 return VK_ERROR_OUT_OF_DATE_KHR;
970
971 /* Try to find a free image. */
972 for (uint32_t i = 0; i < chain->base.image_count; i++) {
973 if (!chain->images[i].busy) {
974 /* We found a non-busy image */
975 *image_index = i;
976 chain->images[i].busy = true;
977 return VK_SUCCESS;
978 }
979 }
980
981 /* Check for timeout. */
982 struct timespec current_time;
983 clock_gettime(CLOCK_MONOTONIC, ¤t_time);
984 if (timespec_after(¤t_time, &end_time))
985 return VK_NOT_READY;
986
987 /* Try to read events from the server. */
988 ret = wl_display_prepare_read_queue(chain->display->wl_display,
989 chain->display->queue);
990 if (ret < 0) {
991 /* Another thread might have read events for our queue already. Go
992 * back to dispatch them.
993 */
994 if (errno == EAGAIN)
995 continue;
996 return VK_ERROR_OUT_OF_DATE_KHR;
997 }
998
999 struct pollfd pollfd = {
1000 .fd = wl_fd,
1001 .events = POLLIN
1002 };
1003 timespec_sub(&rel_timeout, &end_time, ¤t_time);
1004 ret = ppoll(&pollfd, 1, &rel_timeout, NULL);
1005 if (ret <= 0) {
1006 int lerrno = errno;
1007 wl_display_cancel_read(chain->display->wl_display);
1008 if (ret < 0) {
1009 /* If ppoll() was interrupted, try again. */
1010 if (lerrno == EINTR || lerrno == EAGAIN)
1011 continue;
1012 return VK_ERROR_OUT_OF_DATE_KHR;
1013 }
1014 assert(ret == 0);
1015 continue;
1016 }
1017
1018 ret = wl_display_read_events(chain->display->wl_display);
1019 if (ret < 0)
1020 return VK_ERROR_OUT_OF_DATE_KHR;
1021 }
1022 }
1023
1024 static void
frame_handle_done(void * data,struct wl_callback * callback,uint32_t serial)1025 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
1026 {
1027 struct wsi_wl_swapchain *chain = data;
1028
1029 chain->frame = NULL;
1030 chain->fifo_ready = true;
1031
1032 wl_callback_destroy(callback);
1033 }
1034
1035 static const struct wl_callback_listener frame_listener = {
1036 frame_handle_done,
1037 };
1038
1039 static VkResult
wsi_wl_swapchain_queue_present(struct wsi_swapchain * wsi_chain,uint32_t image_index,const VkPresentRegionKHR * damage)1040 wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
1041 uint32_t image_index,
1042 const VkPresentRegionKHR *damage)
1043 {
1044 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1045
1046 if (chain->display->sw) {
1047 struct wsi_wl_image *image = &chain->images[image_index];
1048 void *dptr = image->data_ptr;
1049 void *sptr;
1050 chain->base.wsi->MapMemory(chain->base.device,
1051 image->base.memory,
1052 0, 0, 0, &sptr);
1053
1054 for (unsigned r = 0; r < chain->extent.height; r++) {
1055 memcpy(dptr, sptr, image->base.row_pitches[0]);
1056 dptr += image->base.row_pitches[0];
1057 sptr += image->base.row_pitches[0];
1058 }
1059 chain->base.wsi->UnmapMemory(chain->base.device,
1060 image->base.memory);
1061
1062 }
1063 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1064 while (!chain->fifo_ready) {
1065 int ret = wl_display_dispatch_queue(chain->display->wl_display,
1066 chain->display->queue);
1067 if (ret < 0)
1068 return VK_ERROR_OUT_OF_DATE_KHR;
1069 }
1070 }
1071
1072 assert(image_index < chain->base.image_count);
1073 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
1074
1075 if (wl_surface_get_version(chain->surface) >= 4 && damage &&
1076 damage->pRectangles && damage->rectangleCount > 0) {
1077 for (unsigned i = 0; i < damage->rectangleCount; i++) {
1078 const VkRectLayerKHR *rect = &damage->pRectangles[i];
1079 assert(rect->layer == 0);
1080 wl_surface_damage_buffer(chain->surface,
1081 rect->offset.x, rect->offset.y,
1082 rect->extent.width, rect->extent.height);
1083 }
1084 } else {
1085 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
1086 }
1087
1088 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1089 chain->frame = wl_surface_frame(chain->surface);
1090 wl_callback_add_listener(chain->frame, &frame_listener, chain);
1091 chain->fifo_ready = false;
1092 }
1093
1094 chain->images[image_index].busy = true;
1095 wl_surface_commit(chain->surface);
1096 wl_display_flush(chain->display->wl_display);
1097
1098 return VK_SUCCESS;
1099 }
1100
1101 static void
buffer_handle_release(void * data,struct wl_buffer * buffer)1102 buffer_handle_release(void *data, struct wl_buffer *buffer)
1103 {
1104 struct wsi_wl_image *image = data;
1105
1106 assert(image->buffer == buffer);
1107
1108 image->busy = false;
1109 }
1110
1111 static const struct wl_buffer_listener buffer_listener = {
1112 buffer_handle_release,
1113 };
1114
1115 static VkResult
wsi_wl_image_init(struct wsi_wl_swapchain * chain,struct wsi_wl_image * image,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator)1116 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
1117 struct wsi_wl_image *image,
1118 const VkSwapchainCreateInfoKHR *pCreateInfo,
1119 const VkAllocationCallbacks* pAllocator)
1120 {
1121 struct wsi_wl_display *display = chain->display;
1122 VkResult result;
1123
1124 result = wsi_create_image(&chain->base, &chain->base.image_info,
1125 &image->base);
1126 if (result != VK_SUCCESS)
1127 return result;
1128
1129 if (display->sw) {
1130 int fd, stride;
1131
1132 stride = image->base.row_pitches[0];
1133 image->data_size = stride * chain->extent.height;
1134
1135 /* Create a shareable buffer */
1136 fd = os_create_anonymous_file(image->data_size, NULL);
1137 if (fd < 0)
1138 goto fail_image;
1139
1140 image->data_ptr = mmap(NULL, image->data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
1141 if (image->data_ptr == MAP_FAILED) {
1142 close(fd);
1143 goto fail_image;
1144 }
1145 /* Share it in a wl_buffer */
1146 struct wl_shm_pool *pool = wl_shm_create_pool(display->wl_shm, fd, image->data_size);
1147 wl_proxy_set_queue((struct wl_proxy *)pool, display->queue);
1148 image->buffer = wl_shm_pool_create_buffer(pool, 0, chain->extent.width,
1149 chain->extent.height, stride,
1150 chain->shm_format);
1151 wl_shm_pool_destroy(pool);
1152 close(fd);
1153 } else {
1154 assert(display->wl_dmabuf);
1155
1156 struct zwp_linux_buffer_params_v1 *params =
1157 zwp_linux_dmabuf_v1_create_params(display->wl_dmabuf);
1158 if (!params)
1159 goto fail_image;
1160
1161 for (int i = 0; i < image->base.num_planes; i++) {
1162 zwp_linux_buffer_params_v1_add(params,
1163 image->base.fds[i],
1164 i,
1165 image->base.offsets[i],
1166 image->base.row_pitches[i],
1167 image->base.drm_modifier >> 32,
1168 image->base.drm_modifier & 0xffffffff);
1169 close(image->base.fds[i]);
1170 }
1171
1172 image->buffer =
1173 zwp_linux_buffer_params_v1_create_immed(params,
1174 chain->extent.width,
1175 chain->extent.height,
1176 chain->drm_format,
1177 0);
1178 zwp_linux_buffer_params_v1_destroy(params);
1179 }
1180
1181 if (!image->buffer)
1182 goto fail_image;
1183
1184 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
1185
1186 return VK_SUCCESS;
1187
1188 fail_image:
1189 wsi_destroy_image(&chain->base, &image->base);
1190
1191 return VK_ERROR_OUT_OF_HOST_MEMORY;
1192 }
1193
1194 static VkResult
wsi_wl_swapchain_destroy(struct wsi_swapchain * wsi_chain,const VkAllocationCallbacks * pAllocator)1195 wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
1196 const VkAllocationCallbacks *pAllocator)
1197 {
1198 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1199
1200 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1201 if (chain->images[i].buffer) {
1202 wl_buffer_destroy(chain->images[i].buffer);
1203 wsi_destroy_image(&chain->base, &chain->images[i].base);
1204 if (chain->images[i].data_ptr)
1205 munmap(chain->images[i].data_ptr, chain->images[i].data_size);
1206 }
1207 }
1208 wsi_destroy_image_info(&chain->base, &chain->base.image_info);
1209
1210 if (chain->frame)
1211 wl_callback_destroy(chain->frame);
1212 if (chain->surface)
1213 wl_proxy_wrapper_destroy(chain->surface);
1214
1215 if (chain->display)
1216 wsi_wl_display_unref(chain->display);
1217
1218 wsi_swapchain_finish(&chain->base);
1219
1220 vk_free(pAllocator, chain);
1221
1222 return VK_SUCCESS;
1223 }
1224
1225 static VkResult
wsi_wl_surface_create_swapchain(VkIcdSurfaceBase * icd_surface,VkDevice device,struct wsi_device * wsi_device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,struct wsi_swapchain ** swapchain_out)1226 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
1227 VkDevice device,
1228 struct wsi_device *wsi_device,
1229 const VkSwapchainCreateInfoKHR* pCreateInfo,
1230 const VkAllocationCallbacks* pAllocator,
1231 struct wsi_swapchain **swapchain_out)
1232 {
1233 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
1234 struct wsi_wayland *wsi =
1235 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1236 struct wsi_wl_swapchain *chain;
1237 VkResult result;
1238
1239 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
1240
1241 int num_images = pCreateInfo->minImageCount;
1242
1243 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
1244 chain = vk_zalloc(pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1245 if (chain == NULL)
1246 return VK_ERROR_OUT_OF_HOST_MEMORY;
1247
1248 result = wsi_swapchain_init(wsi_device, &chain->base, device,
1249 pCreateInfo, pAllocator, false);
1250 if (result != VK_SUCCESS) {
1251 vk_free(pAllocator, chain);
1252 return result;
1253 }
1254
1255 bool alpha = pCreateInfo->compositeAlpha ==
1256 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
1257
1258 chain->base.destroy = wsi_wl_swapchain_destroy;
1259 chain->base.get_wsi_image = wsi_wl_swapchain_get_wsi_image;
1260 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
1261 chain->base.queue_present = wsi_wl_swapchain_queue_present;
1262 chain->base.present_mode = wsi_swapchain_get_present_mode(wsi_device, pCreateInfo);
1263 chain->base.image_count = num_images;
1264 chain->extent = pCreateInfo->imageExtent;
1265 chain->vk_format = pCreateInfo->imageFormat;
1266 if (wsi_device->sw)
1267 chain->shm_format = wl_shm_format_for_vk_format(chain->vk_format, alpha);
1268 else
1269 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
1270
1271 if (pCreateInfo->oldSwapchain) {
1272 /* If we have an oldSwapchain parameter, copy the display struct over
1273 * from the old one so we don't have to fully re-initialize it.
1274 */
1275 VK_FROM_HANDLE(wsi_wl_swapchain, old_chain, pCreateInfo->oldSwapchain);
1276 chain->display = wsi_wl_display_ref(old_chain->display);
1277 } else {
1278 chain->display = NULL;
1279 result = wsi_wl_display_create(wsi, surface->display,
1280 wsi_device->sw, &chain->display);
1281 if (result != VK_SUCCESS)
1282 goto fail;
1283 }
1284
1285 chain->surface = wl_proxy_create_wrapper(surface->surface);
1286 if (!chain->surface) {
1287 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1288 goto fail;
1289 }
1290 wl_proxy_set_queue((struct wl_proxy *) chain->surface,
1291 chain->display->queue);
1292
1293 chain->num_drm_modifiers = 0;
1294 chain->drm_modifiers = 0;
1295
1296 /* Use explicit DRM format modifiers when both the server and the driver
1297 * support them.
1298 */
1299 if (chain->display->wl_dmabuf && chain->base.wsi->supports_modifiers) {
1300 struct wsi_wl_format *f = find_format(&chain->display->formats, chain->vk_format);
1301 if (f) {
1302 chain->drm_modifiers = u_vector_tail(&f->modifiers);
1303 chain->num_drm_modifiers = u_vector_length(&f->modifiers);
1304 }
1305 }
1306
1307 chain->fifo_ready = true;
1308
1309 result = wsi_configure_native_image(&chain->base, pCreateInfo,
1310 chain->num_drm_modifiers > 0 ? 1 : 0,
1311 &chain->num_drm_modifiers,
1312 &chain->drm_modifiers,
1313 NULL /* alloc_shm */,
1314 &chain->base.image_info);
1315 if (result != VK_SUCCESS)
1316 goto fail;
1317
1318 for (uint32_t i = 0; i < chain->base.image_count; i++) {
1319 result = wsi_wl_image_init(chain, &chain->images[i],
1320 pCreateInfo, pAllocator);
1321 if (result != VK_SUCCESS)
1322 goto fail;
1323 chain->images[i].busy = false;
1324 }
1325
1326 *swapchain_out = &chain->base;
1327
1328 return VK_SUCCESS;
1329
1330 fail:
1331 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
1332
1333 return result;
1334 }
1335
1336 VkResult
wsi_wl_init_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc,VkPhysicalDevice physical_device)1337 wsi_wl_init_wsi(struct wsi_device *wsi_device,
1338 const VkAllocationCallbacks *alloc,
1339 VkPhysicalDevice physical_device)
1340 {
1341 struct wsi_wayland *wsi;
1342 VkResult result;
1343
1344 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
1345 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1346 if (!wsi) {
1347 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1348 goto fail;
1349 }
1350
1351 wsi->physical_device = physical_device;
1352 wsi->alloc = alloc;
1353 wsi->wsi = wsi_device;
1354
1355 wsi->base.get_support = wsi_wl_surface_get_support;
1356 wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
1357 wsi->base.get_formats = wsi_wl_surface_get_formats;
1358 wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
1359 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
1360 wsi->base.get_present_rectangles = wsi_wl_surface_get_present_rectangles;
1361 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
1362
1363 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
1364
1365 return VK_SUCCESS;
1366
1367 fail:
1368 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
1369
1370 return result;
1371 }
1372
1373 void
wsi_wl_finish_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc)1374 wsi_wl_finish_wsi(struct wsi_device *wsi_device,
1375 const VkAllocationCallbacks *alloc)
1376 {
1377 struct wsi_wayland *wsi =
1378 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1379 if (!wsi)
1380 return;
1381
1382 vk_free(alloc, wsi);
1383 }
1384