1 /*
2  * Copyright © 2019 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #pragma once
25 #include <stdlib.h>
26 #include <stdio.h>
27 #include <stdbool.h>
28 #include <string.h>
29 #include <assert.h>
30 #include <stdint.h>
31 
32 #include "util/macros.h"
33 #include "util/list.h"
34 #include "util/u_dynarray.h"
35 #include "util/simple_mtx.h"
36 #include "util/u_queue.h"
37 #include "util/u_upload_mgr.h"
38 
39 #include "compiler/shader_enums.h"
40 #include "pipe/p_screen.h"
41 #include "pipe/p_state.h"
42 #include "cso_cache/cso_context.h"
43 #include "nir.h"
44 
45 /* Pre-declarations needed for WSI entrypoints */
46 struct wl_surface;
47 struct wl_display;
48 typedef struct xcb_connection_t xcb_connection_t;
49 typedef uint32_t xcb_visualid_t;
50 typedef uint32_t xcb_window_t;
51 
52 #define VK_PROTOTYPES
53 #include <vulkan/vulkan.h>
54 #include <vulkan/vk_icd.h>
55 
56 #include "lvp_entrypoints.h"
57 #include "vk_device.h"
58 #include "vk_instance.h"
59 #include "vk_image.h"
60 #include "vk_log.h"
61 #include "vk_physical_device.h"
62 #include "vk_shader_module.h"
63 #include "vk_util.h"
64 #include "vk_format.h"
65 #include "vk_cmd_queue.h"
66 #include "vk_command_buffer.h"
67 #include "vk_command_pool.h"
68 #include "vk_queue.h"
69 
70 #include "wsi_common.h"
71 
72 #include <assert.h>
73 #ifdef __cplusplus
74 extern "C" {
75 #endif
76 
77 #define MAX_SETS         8
78 #define MAX_PUSH_CONSTANTS_SIZE 128
79 #define MAX_PUSH_DESCRIPTORS 32
80 #define MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE 4096
81 #define MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS 8
82 
83 #ifdef _WIN32
84 #define lvp_printflike(a, b)
85 #else
86 #define lvp_printflike(a, b) __attribute__((__format__(__printf__, a, b)))
87 #endif
88 
89 int lvp_get_instance_entrypoint_index(const char *name);
90 int lvp_get_device_entrypoint_index(const char *name);
91 int lvp_get_physical_device_entrypoint_index(const char *name);
92 
93 const char *lvp_get_instance_entry_name(int index);
94 const char *lvp_get_physical_device_entry_name(int index);
95 const char *lvp_get_device_entry_name(int index);
96 
97 bool lvp_instance_entrypoint_is_enabled(int index, uint32_t core_version,
98                                          const struct vk_instance_extension_table *instance);
99 bool lvp_physical_device_entrypoint_is_enabled(int index, uint32_t core_version,
100                                                 const struct vk_instance_extension_table *instance);
101 bool lvp_device_entrypoint_is_enabled(int index, uint32_t core_version,
102                                        const struct vk_instance_extension_table *instance,
103                                        const struct vk_device_extension_table *device);
104 
105 #define LVP_DEBUG_ALL_ENTRYPOINTS (1 << 0)
106 
107 void __lvp_finishme(const char *file, int line, const char *format, ...)
108    lvp_printflike(3, 4);
109 
110 #define lvp_finishme(format, ...) \
111    __lvp_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__);
112 
113 #define stub_return(v) \
114    do { \
115       lvp_finishme("stub %s", __func__); \
116       return (v); \
117    } while (0)
118 
119 #define stub() \
120    do { \
121       lvp_finishme("stub %s", __func__); \
122       return; \
123    } while (0)
124 
125 #define LVP_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1)
126 
127 #define lvp_foreach_stage(stage, stage_bits)                         \
128    for (gl_shader_stage stage,                                       \
129         __tmp = (gl_shader_stage)((stage_bits) & LVP_STAGE_MASK);    \
130         stage = ffs(__tmp) - 1, __tmp;                     \
131         __tmp &= ~(1 << (stage)))
132 
133 struct lvp_physical_device {
134    struct vk_physical_device vk;
135 
136    struct pipe_loader_device *pld;
137    struct pipe_screen *pscreen;
138    uint32_t max_images;
139 
140    VkPhysicalDeviceLimits device_limits;
141 
142    struct wsi_device                       wsi_device;
143 };
144 
145 struct lvp_instance {
146    struct vk_instance vk;
147 
148    uint32_t apiVersion;
149    int physicalDeviceCount;
150    struct lvp_physical_device physicalDevice;
151 
152    uint64_t debug_flags;
153 
154    struct pipe_loader_device *devs;
155    int num_devices;
156 };
157 
158 VkResult lvp_init_wsi(struct lvp_physical_device *physical_device);
159 void lvp_finish_wsi(struct lvp_physical_device *physical_device);
160 
161 bool lvp_physical_device_extension_supported(struct lvp_physical_device *dev,
162                                               const char *name);
163 
164 struct lvp_queue {
165    struct vk_queue vk;
166    struct lvp_device *                         device;
167    struct pipe_context *ctx;
168    struct cso_context *cso;
169    struct u_upload_mgr *uploader;
170    bool shutdown;
171    uint64_t timeline;
172    struct util_queue queue;
173    simple_mtx_t last_lock;
174    uint64_t last_finished;
175    uint64_t last_fence_timeline;
176    struct pipe_fence_handle *last_fence;
177    volatile int count;
178    void *state;
179 };
180 
181 struct lvp_semaphore_wait {
182    struct lvp_semaphore *sema;
183    uint64_t wait;
184 };
185 
186 struct lvp_queue_work {
187    struct list_head list;
188    uint32_t cmd_buffer_count;
189    uint32_t timeline_count;
190    uint32_t wait_count;
191    uint32_t signal_count;
192    uint64_t timeline;
193    struct lvp_fence *fence;
194    struct lvp_cmd_buffer **cmd_buffers;
195    struct lvp_semaphore_timeline **timelines;
196    struct lvp_semaphore **signals;
197    VkSemaphore *waits;
198    uint64_t *wait_vals;
199 };
200 
201 struct lvp_pipeline_cache {
202    struct vk_object_base                        base;
203    struct lvp_device *                          device;
204    VkAllocationCallbacks                        alloc;
205 };
206 
207 struct lvp_device {
208    struct vk_device vk;
209 
210    struct lvp_queue queue;
211    struct lvp_instance *                       instance;
212    struct lvp_physical_device *physical_device;
213    struct pipe_screen *pscreen;
214 };
215 
216 void lvp_device_get_cache_uuid(void *uuid);
217 
218 enum lvp_device_memory_type {
219    LVP_DEVICE_MEMORY_TYPE_DEFAULT,
220    LVP_DEVICE_MEMORY_TYPE_USER_PTR,
221    LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD,
222 };
223 
224 struct lvp_device_memory {
225    struct vk_object_base base;
226    struct pipe_memory_allocation *pmem;
227    uint32_t                                     type_index;
228    VkDeviceSize                                 map_size;
229    void *                                       map;
230    enum lvp_device_memory_type memory_type;
231    int                                          backed_fd;
232 };
233 
234 struct lvp_image {
235    struct vk_image vk;
236    VkDeviceSize size;
237    uint32_t alignment;
238    struct pipe_memory_allocation *pmem;
239    unsigned memory_offset;
240    struct pipe_resource *bo;
241 };
242 
243 struct lvp_image_view {
244    struct vk_image_view vk;
245    const struct lvp_image *image; /**< VkImageViewCreateInfo::image */
246 
247    enum pipe_format pformat;
248 
249    struct pipe_surface *surface; /* have we created a pipe surface for this? */
250 };
251 
252 struct lvp_sampler {
253    struct vk_object_base base;
254    VkSamplerCreateInfo create_info;
255    union pipe_color_union border_color;
256    VkSamplerReductionMode reduction_mode;
257    uint32_t state[4];
258 };
259 
260 struct lvp_descriptor_set_binding_layout {
261    uint16_t descriptor_index;
262    /* Number of array elements in this binding */
263    VkDescriptorType type;
264    uint16_t array_size;
265    bool valid;
266 
267    int16_t dynamic_index;
268    struct {
269       int16_t const_buffer_index;
270       int16_t shader_buffer_index;
271       int16_t sampler_index;
272       int16_t sampler_view_index;
273       int16_t image_index;
274       int16_t uniform_block_index;
275       int16_t uniform_block_offset;
276    } stage[MESA_SHADER_STAGES];
277 
278    /* Immutable samplers (or NULL if no immutable samplers) */
279    struct lvp_sampler **immutable_samplers;
280 };
281 
282 struct lvp_descriptor_set_layout {
283    struct vk_object_base base;
284 
285    /* Descriptor set layouts can be destroyed at almost any time */
286    uint32_t ref_cnt;
287    /* add new members after this */
288 
289    uint32_t immutable_sampler_count;
290 
291    /* Number of bindings in this descriptor set */
292    uint16_t binding_count;
293 
294    /* Total size of the descriptor set with room for all array entries */
295    uint16_t size;
296 
297    /* Shader stages affected by this descriptor set */
298    uint16_t shader_stages;
299 
300    struct {
301       uint16_t const_buffer_count;
302       uint16_t shader_buffer_count;
303       uint16_t sampler_count;
304       uint16_t sampler_view_count;
305       uint16_t image_count;
306       uint16_t uniform_block_count;
307       uint16_t uniform_block_size;
308       uint16_t uniform_block_sizes[MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS]; //zero-indexed
309    } stage[MESA_SHADER_STAGES];
310 
311    /* Number of dynamic offsets used by this descriptor set */
312    uint16_t dynamic_offset_count;
313 
314    /* Bindings in this descriptor set */
315    struct lvp_descriptor_set_binding_layout binding[0];
316 };
317 
318 void lvp_descriptor_set_layout_destroy(struct lvp_device *device,
319                                        struct lvp_descriptor_set_layout *layout);
320 
321 static inline void
lvp_descriptor_set_layout_ref(struct lvp_descriptor_set_layout * layout)322 lvp_descriptor_set_layout_ref(struct lvp_descriptor_set_layout *layout)
323 {
324    assert(layout && layout->ref_cnt >= 1);
325    p_atomic_inc(&layout->ref_cnt);
326 }
327 
328 static inline void
lvp_descriptor_set_layout_unref(struct lvp_device * device,struct lvp_descriptor_set_layout * layout)329 lvp_descriptor_set_layout_unref(struct lvp_device *device,
330                                 struct lvp_descriptor_set_layout *layout)
331 {
332    if (!layout)
333       return;
334    assert(layout->ref_cnt >= 1);
335    if (p_atomic_dec_zero(&layout->ref_cnt))
336       lvp_descriptor_set_layout_destroy(device, layout);
337 }
338 
339 union lvp_descriptor_info {
340    struct {
341       struct lvp_sampler *sampler;
342       struct lvp_image_view *iview;
343       VkImageLayout image_layout;
344    };
345    struct {
346       struct lvp_buffer *buffer;
347       VkDeviceSize offset;
348       VkDeviceSize range;
349    };
350    struct lvp_buffer_view *buffer_view;
351    uint8_t *uniform;
352 };
353 
354 struct lvp_descriptor {
355    VkDescriptorType type;
356 
357    union lvp_descriptor_info info;
358 };
359 
360 struct lvp_descriptor_set {
361    struct vk_object_base base;
362    struct lvp_descriptor_set_layout *layout;
363    struct list_head link;
364    struct lvp_descriptor descriptors[0];
365 };
366 
367 struct lvp_descriptor_pool {
368    struct vk_object_base base;
369    VkDescriptorPoolCreateFlags flags;
370    uint32_t max_sets;
371 
372    struct list_head sets;
373 };
374 
375 struct lvp_descriptor_update_template {
376    struct vk_object_base base;
377    uint32_t entry_count;
378    uint32_t set;
379    VkDescriptorUpdateTemplateType type;
380    VkPipelineBindPoint bind_point;
381    struct lvp_pipeline_layout *pipeline_layout;
382    VkDescriptorUpdateTemplateEntry entry[0];
383 };
384 
385 VkResult
386 lvp_descriptor_set_create(struct lvp_device *device,
387                           struct lvp_descriptor_set_layout *layout,
388                           struct lvp_descriptor_set **out_set);
389 
390 void
391 lvp_descriptor_set_destroy(struct lvp_device *device,
392                            struct lvp_descriptor_set *set);
393 
394 struct lvp_pipeline_layout {
395    struct vk_object_base base;
396 
397    /* Pipeline layouts can be destroyed at almost any time */
398    uint32_t ref_cnt;
399 
400    struct {
401       struct lvp_descriptor_set_layout *layout;
402    } set[MAX_SETS];
403 
404    uint32_t num_sets;
405    uint32_t push_constant_size;
406    VkShaderStageFlags push_constant_stages;
407    struct {
408       uint16_t uniform_block_size;
409       uint16_t uniform_block_count;
410       uint16_t uniform_block_sizes[MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS * MAX_SETS];
411    } stage[MESA_SHADER_STAGES];
412    bool independent_sets;
413 };
414 
415 void lvp_pipeline_layout_destroy(struct lvp_device *device,
416                                  struct lvp_pipeline_layout *layout);
417 
418 static inline void
lvp_pipeline_layout_ref(struct lvp_pipeline_layout * layout)419 lvp_pipeline_layout_ref(struct lvp_pipeline_layout *layout)
420 {
421    assert(layout && layout->ref_cnt >= 1);
422    p_atomic_inc(&layout->ref_cnt);
423 }
424 
425 static inline void
lvp_pipeline_layout_unref(struct lvp_device * device,struct lvp_pipeline_layout * layout)426 lvp_pipeline_layout_unref(struct lvp_device *device,
427                           struct lvp_pipeline_layout *layout)
428 {
429    assert(layout && layout->ref_cnt >= 1);
430    if (p_atomic_dec_zero(&layout->ref_cnt))
431       lvp_pipeline_layout_destroy(device, layout);
432 }
433 
434 struct lvp_access_info {
435    uint32_t images_read;
436    uint32_t images_written;
437    uint32_t buffers_written;
438 };
439 
440 struct lvp_pipeline {
441    struct vk_object_base base;
442    struct lvp_device *                          device;
443    struct lvp_pipeline_layout *                 layout;
444 
445    struct lvp_access_info access[MESA_SHADER_STAGES];
446 
447    void *mem_ctx;
448    bool is_compute_pipeline;
449    bool force_min_sample;
450    nir_shader *pipeline_nir[MESA_SHADER_STAGES];
451    void *shader_cso[PIPE_SHADER_TYPES];
452    VkGraphicsPipelineCreateInfo graphics_create_info;
453    VkComputePipelineCreateInfo compute_create_info;
454    VkGraphicsPipelineLibraryFlagsEXT stages;
455    uint32_t line_stipple_factor;
456    uint16_t line_stipple_pattern;
457    bool line_stipple_enable;
458    bool line_smooth;
459    bool disable_multisample;
460    bool line_rectangular;
461    bool gs_output_lines;
462    bool provoking_vertex_last;
463    bool negative_one_to_one;
464    bool library;
465 };
466 
467 struct lvp_event {
468    struct vk_object_base base;
469    volatile uint64_t event_storage;
470 };
471 
472 struct lvp_fence {
473    struct vk_object_base base;
474    uint64_t timeline;
475    struct util_queue_fence fence;
476    struct pipe_fence_handle *handle;
477    bool signalled;
478 };
479 
480 struct lvp_semaphore_timeline {
481    struct lvp_semaphore_timeline *next;
482    uint64_t signal; //api
483    uint64_t timeline; //queue
484    struct pipe_fence_handle *fence;
485 };
486 
487 struct lvp_semaphore {
488    struct vk_object_base base;
489    bool is_timeline;
490    uint64_t current;
491    simple_mtx_t lock;
492    mtx_t submit_lock;
493    cnd_t submit;
494    void *mem;
495    struct util_dynarray links;
496    struct lvp_semaphore_timeline *timeline;
497    struct lvp_semaphore_timeline *latest;
498    struct pipe_fence_handle *handle;
499 };
500 
501 struct lvp_queue_noop {
502    struct lvp_fence *fence;
503    struct lvp_semaphore *sema;
504 };
505 
506 struct lvp_buffer {
507    struct vk_object_base base;
508 
509    VkDeviceSize                                 size;
510 
511    VkBufferUsageFlags                           usage;
512    VkDeviceSize                                 offset;
513 
514    struct pipe_memory_allocation *pmem;
515    struct pipe_resource *bo;
516    uint64_t total_size;
517 };
518 
519 struct lvp_buffer_view {
520    struct vk_object_base base;
521    VkFormat format;
522    enum pipe_format pformat;
523    struct lvp_buffer *buffer;
524    uint32_t offset;
525    uint64_t range;
526 };
527 
528 struct lvp_query_pool {
529    struct vk_object_base base;
530    VkQueryType type;
531    uint32_t count;
532    VkQueryPipelineStatisticFlags pipeline_stats;
533    enum pipe_query_type base_type;
534    struct pipe_query *queries[0];
535 };
536 
537 struct lvp_cmd_pool {
538    struct vk_command_pool                       vk;
539    struct list_head                             cmd_buffers;
540    struct list_head                             free_cmd_buffers;
541 };
542 
543 
544 enum lvp_cmd_buffer_status {
545    LVP_CMD_BUFFER_STATUS_INVALID,
546    LVP_CMD_BUFFER_STATUS_INITIAL,
547    LVP_CMD_BUFFER_STATUS_RECORDING,
548    LVP_CMD_BUFFER_STATUS_EXECUTABLE,
549    LVP_CMD_BUFFER_STATUS_PENDING,
550 };
551 
552 struct lvp_cmd_buffer {
553    struct vk_command_buffer vk;
554 
555    struct lvp_device *                          device;
556 
557    enum lvp_cmd_buffer_status status;
558    struct lvp_cmd_pool *                        pool;
559    struct list_head                             pool_link;
560 
561    uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE];
562 };
563 
564 
565 #define LVP_FROM_HANDLE(__lvp_type, __name, __handle) \
566    struct __lvp_type *__name = __lvp_type ## _from_handle(__handle)
567 
568 VK_DEFINE_HANDLE_CASTS(lvp_cmd_buffer, vk.base, VkCommandBuffer,
569                        VK_OBJECT_TYPE_COMMAND_BUFFER)
570 VK_DEFINE_HANDLE_CASTS(lvp_device, vk.base, VkDevice, VK_OBJECT_TYPE_DEVICE)
571 VK_DEFINE_HANDLE_CASTS(lvp_instance, vk.base, VkInstance, VK_OBJECT_TYPE_INSTANCE)
572 VK_DEFINE_HANDLE_CASTS(lvp_physical_device, vk.base, VkPhysicalDevice,
573                        VK_OBJECT_TYPE_PHYSICAL_DEVICE)
574 VK_DEFINE_HANDLE_CASTS(lvp_queue, vk.base, VkQueue, VK_OBJECT_TYPE_QUEUE)
575 
576 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_cmd_pool, vk.base, VkCommandPool,
577                                VK_OBJECT_TYPE_COMMAND_POOL)
578 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer, base, VkBuffer,
579                                VK_OBJECT_TYPE_BUFFER)
580 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_buffer_view, base, VkBufferView,
581                                VK_OBJECT_TYPE_BUFFER_VIEW)
582 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_pool, base, VkDescriptorPool,
583                                VK_OBJECT_TYPE_DESCRIPTOR_POOL)
584 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set, base, VkDescriptorSet,
585                                VK_OBJECT_TYPE_DESCRIPTOR_SET)
586 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set_layout, base, VkDescriptorSetLayout,
587                                VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT)
588 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_update_template, base, VkDescriptorUpdateTemplate,
589                                VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE)
590 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_device_memory, base, VkDeviceMemory,
591                                VK_OBJECT_TYPE_DEVICE_MEMORY)
592 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_event, base, VkEvent, VK_OBJECT_TYPE_EVENT)
593 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_image, vk.base, VkImage, VK_OBJECT_TYPE_IMAGE)
594 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_image_view, vk.base, VkImageView,
595                                VK_OBJECT_TYPE_IMAGE_VIEW);
596 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_cache, base, VkPipelineCache,
597                                VK_OBJECT_TYPE_PIPELINE_CACHE)
598 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline, base, VkPipeline,
599                                VK_OBJECT_TYPE_PIPELINE)
600 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_pipeline_layout, base, VkPipelineLayout,
601                                VK_OBJECT_TYPE_PIPELINE_LAYOUT)
602 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_query_pool, base, VkQueryPool,
603                                VK_OBJECT_TYPE_QUERY_POOL)
604 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_sampler, base, VkSampler,
605                                VK_OBJECT_TYPE_SAMPLER)
606 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_fence, base, VkFence, VK_OBJECT_TYPE_FENCE);
607 VK_DEFINE_NONDISP_HANDLE_CASTS(lvp_semaphore, base, VkSemaphore,
608                                VK_OBJECT_TYPE_SEMAPHORE);
609 
610 struct lvp_write_descriptor {
611    uint32_t dst_binding;
612    uint32_t dst_array_element;
613    uint32_t descriptor_count;
614    VkDescriptorType descriptor_type;
615 };
616 
617 struct lvp_cmd_push_descriptor_set {
618    VkPipelineBindPoint bind_point;
619    struct lvp_pipeline_layout *layout;
620    uint32_t set;
621    uint32_t descriptor_write_count;
622    struct lvp_write_descriptor *descriptors;
623    union lvp_descriptor_info *infos;
624 };
625 
626 void lvp_add_enqueue_cmd_entrypoints(struct vk_device_dispatch_table *disp);
627 
628 VkResult lvp_execute_cmds(struct lvp_device *device,
629                           struct lvp_queue *queue,
630                           struct lvp_cmd_buffer *cmd_buffer);
631 size_t
632 lvp_get_rendering_state_size(void);
633 struct lvp_image *lvp_swapchain_get_image(VkSwapchainKHR swapchain,
634 					  uint32_t index);
635 
636 static inline enum pipe_format
lvp_vk_format_to_pipe_format(VkFormat format)637 lvp_vk_format_to_pipe_format(VkFormat format)
638 {
639    /* Some formats cause problems with CTS right now.*/
640    if (format == VK_FORMAT_R4G4B4A4_UNORM_PACK16 ||
641        format == VK_FORMAT_R5G5B5A1_UNORM_PACK16 ||
642        format == VK_FORMAT_R8_SRGB ||
643        format == VK_FORMAT_R8G8_SRGB ||
644        format == VK_FORMAT_R64G64B64A64_SFLOAT ||
645        format == VK_FORMAT_R64_SFLOAT ||
646        format == VK_FORMAT_R64G64_SFLOAT ||
647        format == VK_FORMAT_R64G64B64_SFLOAT ||
648        format == VK_FORMAT_A2R10G10B10_SINT_PACK32 ||
649        format == VK_FORMAT_A2B10G10R10_SINT_PACK32 ||
650        format == VK_FORMAT_G8B8G8R8_422_UNORM ||
651        format == VK_FORMAT_B8G8R8G8_422_UNORM ||
652        format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM ||
653        format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
654        format == VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM ||
655        format == VK_FORMAT_G8_B8R8_2PLANE_422_UNORM ||
656        format == VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM ||
657        format == VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM ||
658        format == VK_FORMAT_G16_B16R16_2PLANE_420_UNORM ||
659        format == VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM ||
660        format == VK_FORMAT_G16_B16R16_2PLANE_422_UNORM ||
661        format == VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM ||
662        format == VK_FORMAT_D16_UNORM_S8_UINT)
663       return PIPE_FORMAT_NONE;
664 
665    return vk_format_to_pipe_format(format);
666 }
667 
668 void
669 queue_thread_noop(void *data, void *gdata, int thread_index);
670 #ifdef __cplusplus
671 }
672 #endif
673