1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25
26 #include "pipe-loader/pipe_loader.h"
27 #include "git_sha1.h"
28 #include "vk_cmd_enqueue_entrypoints.h"
29 #include "vk_util.h"
30 #include "pipe/p_config.h"
31 #include "pipe/p_defines.h"
32 #include "pipe/p_state.h"
33 #include "pipe/p_context.h"
34 #include "frontend/drisw_api.h"
35
36 #include "util/u_inlines.h"
37 #include "util/os_memory.h"
38 #include "util/u_thread.h"
39 #include "util/u_atomic.h"
40 #include "util/timespec.h"
41 #include "util/ptralloc.h"
42 #include "os_time.h"
43
44 #if defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
45 defined(VK_USE_PLATFORM_WIN32_KHR) || \
46 defined(VK_USE_PLATFORM_XCB_KHR) || \
47 defined(VK_USE_PLATFORM_XLIB_KHR)
48 #define LVP_USE_WSI_PLATFORM
49 #endif
50 #define LVP_API_VERSION VK_MAKE_VERSION(1, 3, VK_HEADER_VERSION)
51
lvp_EnumerateInstanceVersion(uint32_t * pApiVersion)52 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceVersion(uint32_t* pApiVersion)
53 {
54 *pApiVersion = LVP_API_VERSION;
55 return VK_SUCCESS;
56 }
57
58 static const struct vk_instance_extension_table lvp_instance_extensions_supported = {
59 .KHR_device_group_creation = true,
60 .KHR_external_fence_capabilities = true,
61 .KHR_external_memory_capabilities = true,
62 .KHR_external_semaphore_capabilities = true,
63 .KHR_get_physical_device_properties2 = true,
64 .EXT_debug_report = true,
65 #ifdef LVP_USE_WSI_PLATFORM
66 .KHR_get_surface_capabilities2 = true,
67 .KHR_surface = true,
68 .KHR_surface_protected_capabilities = true,
69 #endif
70 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
71 .KHR_wayland_surface = true,
72 #endif
73 #ifdef VK_USE_PLATFORM_WIN32_KHR
74 .KHR_win32_surface = true,
75 #endif
76 #ifdef VK_USE_PLATFORM_XCB_KHR
77 .KHR_xcb_surface = true,
78 #endif
79 #ifdef VK_USE_PLATFORM_XLIB_KHR
80 .KHR_xlib_surface = true,
81 #endif
82 };
83
84 static const struct vk_device_extension_table lvp_device_extensions_supported = {
85 .KHR_8bit_storage = true,
86 .KHR_16bit_storage = true,
87 .KHR_bind_memory2 = true,
88 .KHR_buffer_device_address = true,
89 .KHR_create_renderpass2 = true,
90 .KHR_copy_commands2 = true,
91 .KHR_dedicated_allocation = true,
92 .KHR_depth_stencil_resolve = true,
93 .KHR_descriptor_update_template = true,
94 .KHR_device_group = true,
95 .KHR_draw_indirect_count = true,
96 .KHR_driver_properties = true,
97 .KHR_dynamic_rendering = true,
98 .KHR_format_feature_flags2 = true,
99 .KHR_external_fence = true,
100 .KHR_external_memory = true,
101 #ifdef PIPE_MEMORY_FD
102 .KHR_external_memory_fd = true,
103 #endif
104 .KHR_external_semaphore = true,
105 .KHR_shader_float_controls = true,
106 .KHR_get_memory_requirements2 = true,
107 #ifdef LVP_USE_WSI_PLATFORM
108 .KHR_incremental_present = true,
109 #endif
110 .KHR_image_format_list = true,
111 .KHR_imageless_framebuffer = true,
112 .KHR_maintenance1 = true,
113 .KHR_maintenance2 = true,
114 .KHR_maintenance3 = true,
115 .KHR_maintenance4 = true,
116 .KHR_multiview = true,
117 .KHR_push_descriptor = true,
118 .KHR_pipeline_library = true,
119 .KHR_relaxed_block_layout = true,
120 .KHR_sampler_mirror_clamp_to_edge = true,
121 .KHR_separate_depth_stencil_layouts = true,
122 .KHR_shader_atomic_int64 = true,
123 .KHR_shader_draw_parameters = true,
124 .KHR_shader_float16_int8 = true,
125 .KHR_shader_integer_dot_product = true,
126 .KHR_shader_subgroup_extended_types = true,
127 .KHR_shader_terminate_invocation = true,
128 .KHR_spirv_1_4 = true,
129 .KHR_storage_buffer_storage_class = true,
130 #ifdef LVP_USE_WSI_PLATFORM
131 .KHR_swapchain = true,
132 #endif
133 .KHR_synchronization2 = true,
134 .KHR_timeline_semaphore = true,
135 .KHR_uniform_buffer_standard_layout = true,
136 .KHR_variable_pointers = true,
137 .KHR_vulkan_memory_model = true,
138 .KHR_zero_initialize_workgroup_memory = true,
139 .EXT_4444_formats = true,
140 .EXT_calibrated_timestamps = true,
141 .EXT_color_write_enable = true,
142 .EXT_conditional_rendering = true,
143 .EXT_depth_clip_enable = true,
144 .EXT_depth_clip_control = true,
145 .EXT_extended_dynamic_state = true,
146 .EXT_extended_dynamic_state2 = true,
147 .EXT_external_memory_host = true,
148 .EXT_graphics_pipeline_library = true,
149 .EXT_host_query_reset = true,
150 .EXT_image_robustness = true,
151 .EXT_index_type_uint8 = true,
152 .EXT_inline_uniform_block = true,
153 .EXT_multi_draw = true,
154 .EXT_pipeline_creation_feedback = true,
155 .EXT_pipeline_creation_cache_control = true,
156 .EXT_post_depth_coverage = true,
157 .EXT_private_data = true,
158 .EXT_primitives_generated_query = true,
159 .EXT_primitive_topology_list_restart = true,
160 .EXT_sampler_filter_minmax = true,
161 .EXT_scalar_block_layout = true,
162 .EXT_separate_stencil_usage = true,
163 .EXT_shader_demote_to_helper_invocation= true,
164 .EXT_shader_stencil_export = true,
165 .EXT_shader_viewport_index_layer = true,
166 .EXT_subgroup_size_control = true,
167 .EXT_texel_buffer_alignment = true,
168 .EXT_transform_feedback = true,
169 .EXT_vertex_attribute_divisor = true,
170 .EXT_vertex_input_dynamic_state = true,
171 .EXT_custom_border_color = true,
172 .EXT_provoking_vertex = true,
173 .EXT_line_rasterization = true,
174 .GOOGLE_decorate_string = true,
175 .GOOGLE_hlsl_functionality1 = true,
176 };
177
178 static int
min_vertex_pipeline_param(struct pipe_screen * pscreen,enum pipe_shader_cap param)179 min_vertex_pipeline_param(struct pipe_screen *pscreen, enum pipe_shader_cap param)
180 {
181 int val = INT_MAX;
182 for (int i = 0; i < PIPE_SHADER_COMPUTE; ++i) {
183 if (i == PIPE_SHADER_FRAGMENT ||
184 !pscreen->get_shader_param(pscreen, i,
185 PIPE_SHADER_CAP_MAX_INSTRUCTIONS))
186 continue;
187
188 val = MAX2(val, pscreen->get_shader_param(pscreen, i, param));
189 }
190 return val;
191 }
192
193 static int
min_shader_param(struct pipe_screen * pscreen,enum pipe_shader_cap param)194 min_shader_param(struct pipe_screen *pscreen, enum pipe_shader_cap param)
195 {
196 return MIN3(min_vertex_pipeline_param(pscreen, param),
197 pscreen->get_shader_param(pscreen, PIPE_SHADER_FRAGMENT, param),
198 pscreen->get_shader_param(pscreen, PIPE_SHADER_COMPUTE, param));
199 }
200
201 static VkResult VKAPI_CALL
lvp_physical_device_init(struct lvp_physical_device * device,struct lvp_instance * instance,struct pipe_loader_device * pld)202 lvp_physical_device_init(struct lvp_physical_device *device,
203 struct lvp_instance *instance,
204 struct pipe_loader_device *pld)
205 {
206 VkResult result;
207
208 struct vk_physical_device_dispatch_table dispatch_table;
209 vk_physical_device_dispatch_table_from_entrypoints(
210 &dispatch_table, &lvp_physical_device_entrypoints, true);
211 vk_physical_device_dispatch_table_from_entrypoints(
212 &dispatch_table, &wsi_physical_device_entrypoints, false);
213 result = vk_physical_device_init(&device->vk, &instance->vk,
214 NULL, &dispatch_table);
215 if (result != VK_SUCCESS) {
216 vk_error(instance, result);
217 goto fail;
218 }
219 device->pld = pld;
220
221 device->pscreen = pipe_loader_create_screen_vk(device->pld, true);
222 if (!device->pscreen)
223 return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
224
225 device->max_images = device->pscreen->get_shader_param(device->pscreen, PIPE_SHADER_FRAGMENT, PIPE_SHADER_CAP_MAX_SHADER_IMAGES);
226 device->vk.supported_extensions = lvp_device_extensions_supported;
227
228 VkSampleCountFlags sample_counts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
229
230 uint64_t grid_size[3], block_size[3];
231 uint64_t max_threads_per_block, max_local_size;
232
233 device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
234 PIPE_COMPUTE_CAP_MAX_GRID_SIZE, grid_size);
235 device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
236 PIPE_COMPUTE_CAP_MAX_BLOCK_SIZE, block_size);
237 device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
238 PIPE_COMPUTE_CAP_MAX_THREADS_PER_BLOCK,
239 &max_threads_per_block);
240 device->pscreen->get_compute_param(device->pscreen, PIPE_SHADER_IR_NIR,
241 PIPE_COMPUTE_CAP_MAX_LOCAL_SIZE,
242 &max_local_size);
243
244 const uint64_t max_render_targets = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_RENDER_TARGETS);
245 device->device_limits = (VkPhysicalDeviceLimits) {
246 .maxImageDimension1D = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
247 .maxImageDimension2D = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
248 .maxImageDimension3D = (1 << device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_3D_LEVELS)),
249 .maxImageDimensionCube = (1 << device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_CUBE_LEVELS)),
250 .maxImageArrayLayers = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS),
251 .maxTexelBufferElements = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_BUFFER_SIZE),
252 .maxUniformBufferRange = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_CONST_BUFFER_SIZE),
253 .maxStorageBufferRange = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_SHADER_BUFFER_SIZE),
254 .maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
255 .maxMemoryAllocationCount = UINT32_MAX,
256 .maxSamplerAllocationCount = 32 * 1024,
257 .bufferImageGranularity = 64, /* A cache line */
258 .sparseAddressSpaceSize = 0,
259 .maxBoundDescriptorSets = MAX_SETS,
260 .maxPerStageDescriptorSamplers = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS),
261 .maxPerStageDescriptorUniformBuffers = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_CONST_BUFFERS) - 1,
262 .maxPerStageDescriptorStorageBuffers = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS),
263 .maxPerStageDescriptorSampledImages = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS),
264 .maxPerStageDescriptorStorageImages = min_shader_param(device->pscreen, PIPE_SHADER_CAP_MAX_SHADER_IMAGES),
265 .maxPerStageDescriptorInputAttachments = 8,
266 .maxPerStageResources = 128,
267 .maxDescriptorSetSamplers = 32 * 1024,
268 .maxDescriptorSetUniformBuffers = 256,
269 .maxDescriptorSetUniformBuffersDynamic = 256,
270 .maxDescriptorSetStorageBuffers = 256,
271 .maxDescriptorSetStorageBuffersDynamic = 256,
272 .maxDescriptorSetSampledImages = 256,
273 .maxDescriptorSetStorageImages = 256,
274 .maxDescriptorSetInputAttachments = 256,
275 .maxVertexInputAttributes = 32,
276 .maxVertexInputBindings = 32,
277 .maxVertexInputAttributeOffset = 2047,
278 .maxVertexInputBindingStride = 2048,
279 .maxVertexOutputComponents = 128,
280 .maxTessellationGenerationLevel = 64,
281 .maxTessellationPatchSize = 32,
282 .maxTessellationControlPerVertexInputComponents = 128,
283 .maxTessellationControlPerVertexOutputComponents = 128,
284 .maxTessellationControlPerPatchOutputComponents = 128,
285 .maxTessellationControlTotalOutputComponents = 4096,
286 .maxTessellationEvaluationInputComponents = 128,
287 .maxTessellationEvaluationOutputComponents = 128,
288 .maxGeometryShaderInvocations = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_GS_INVOCATIONS),
289 .maxGeometryInputComponents = 64,
290 .maxGeometryOutputComponents = 128,
291 .maxGeometryOutputVertices = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_GEOMETRY_OUTPUT_VERTICES),
292 .maxGeometryTotalOutputComponents = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS),
293 .maxFragmentInputComponents = 128,
294 .maxFragmentOutputAttachments = 8,
295 .maxFragmentDualSrcAttachments = 2,
296 .maxFragmentCombinedOutputResources = max_render_targets +
297 device->pscreen->get_shader_param(device->pscreen, PIPE_SHADER_FRAGMENT,
298 PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) +
299 device->pscreen->get_shader_param(device->pscreen, PIPE_SHADER_FRAGMENT,
300 PIPE_SHADER_CAP_MAX_SHADER_IMAGES),
301 .maxComputeSharedMemorySize = max_local_size,
302 .maxComputeWorkGroupCount = { grid_size[0], grid_size[1], grid_size[2] },
303 .maxComputeWorkGroupInvocations = max_threads_per_block,
304 .maxComputeWorkGroupSize = { block_size[0], block_size[1], block_size[2] },
305 .subPixelPrecisionBits = device->pscreen->get_param(device->pscreen, PIPE_CAP_RASTERIZER_SUBPIXEL_BITS),
306 .subTexelPrecisionBits = 8,
307 .mipmapPrecisionBits = 4,
308 .maxDrawIndexedIndexValue = UINT32_MAX,
309 .maxDrawIndirectCount = UINT32_MAX,
310 .maxSamplerLodBias = 16,
311 .maxSamplerAnisotropy = 16,
312 .maxViewports = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_VIEWPORTS),
313 .maxViewportDimensions = { (1 << 14), (1 << 14) },
314 .viewportBoundsRange = { -32768.0, 32768.0 },
315 .viewportSubPixelBits = device->pscreen->get_param(device->pscreen, PIPE_CAP_VIEWPORT_SUBPIXEL_BITS),
316 .minMemoryMapAlignment = device->pscreen->get_param(device->pscreen, PIPE_CAP_MIN_MAP_BUFFER_ALIGNMENT),
317 .minTexelBufferOffsetAlignment = device->pscreen->get_param(device->pscreen, PIPE_CAP_TEXTURE_BUFFER_OFFSET_ALIGNMENT),
318 .minUniformBufferOffsetAlignment = device->pscreen->get_param(device->pscreen, PIPE_CAP_CONSTANT_BUFFER_OFFSET_ALIGNMENT),
319 .minStorageBufferOffsetAlignment = device->pscreen->get_param(device->pscreen, PIPE_CAP_SHADER_BUFFER_OFFSET_ALIGNMENT),
320 .minTexelOffset = device->pscreen->get_param(device->pscreen, PIPE_CAP_MIN_TEXEL_OFFSET),
321 .maxTexelOffset = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXEL_OFFSET),
322 .minTexelGatherOffset = device->pscreen->get_param(device->pscreen, PIPE_CAP_MIN_TEXTURE_GATHER_OFFSET),
323 .maxTexelGatherOffset = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_GATHER_OFFSET),
324 .minInterpolationOffset = -2, /* FIXME */
325 .maxInterpolationOffset = 2, /* FIXME */
326 .subPixelInterpolationOffsetBits = 8, /* FIXME */
327 .maxFramebufferWidth = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
328 .maxFramebufferHeight = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
329 .maxFramebufferLayers = device->pscreen->get_param(device->pscreen, PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS),
330 .framebufferColorSampleCounts = sample_counts,
331 .framebufferDepthSampleCounts = sample_counts,
332 .framebufferStencilSampleCounts = sample_counts,
333 .framebufferNoAttachmentsSampleCounts = sample_counts,
334 .maxColorAttachments = max_render_targets,
335 .sampledImageColorSampleCounts = sample_counts,
336 .sampledImageIntegerSampleCounts = sample_counts,
337 .sampledImageDepthSampleCounts = sample_counts,
338 .sampledImageStencilSampleCounts = sample_counts,
339 .storageImageSampleCounts = sample_counts,
340 .maxSampleMaskWords = 1,
341 .timestampComputeAndGraphics = true,
342 .timestampPeriod = 1,
343 .maxClipDistances = 8,
344 .maxCullDistances = 8,
345 .maxCombinedClipAndCullDistances = 8,
346 .discreteQueuePriorities = 2,
347 .pointSizeRange = { 0.0, device->pscreen->get_paramf(device->pscreen, PIPE_CAPF_MAX_POINT_SIZE) },
348 .lineWidthRange = { 1.0, device->pscreen->get_paramf(device->pscreen, PIPE_CAPF_MAX_LINE_WIDTH) },
349 .pointSizeGranularity = (1.0 / 8.0),
350 .lineWidthGranularity = 1.0 / 128.0,
351 .strictLines = true,
352 .standardSampleLocations = true,
353 .optimalBufferCopyOffsetAlignment = 128,
354 .optimalBufferCopyRowPitchAlignment = 128,
355 .nonCoherentAtomSize = 64,
356 };
357 result = lvp_init_wsi(device);
358 if (result != VK_SUCCESS) {
359 vk_physical_device_finish(&device->vk);
360 vk_error(instance, result);
361 goto fail;
362 }
363
364 return VK_SUCCESS;
365 fail:
366 return result;
367 }
368
369 static void VKAPI_CALL
lvp_physical_device_finish(struct lvp_physical_device * device)370 lvp_physical_device_finish(struct lvp_physical_device *device)
371 {
372 lvp_finish_wsi(device);
373 device->pscreen->destroy(device->pscreen);
374 vk_physical_device_finish(&device->vk);
375 }
376
lvp_CreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)377 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateInstance(
378 const VkInstanceCreateInfo* pCreateInfo,
379 const VkAllocationCallbacks* pAllocator,
380 VkInstance* pInstance)
381 {
382 struct lvp_instance *instance;
383 VkResult result;
384
385 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
386
387 if (pAllocator == NULL)
388 pAllocator = vk_default_allocator();
389
390 instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
391 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
392 if (!instance)
393 return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
394
395 struct vk_instance_dispatch_table dispatch_table;
396 vk_instance_dispatch_table_from_entrypoints(
397 &dispatch_table, &lvp_instance_entrypoints, true);
398 vk_instance_dispatch_table_from_entrypoints(
399 &dispatch_table, &wsi_instance_entrypoints, false);
400
401 result = vk_instance_init(&instance->vk,
402 &lvp_instance_extensions_supported,
403 &dispatch_table,
404 pCreateInfo,
405 pAllocator);
406 if (result != VK_SUCCESS) {
407 vk_free(pAllocator, instance);
408 return vk_error(instance, result);
409 }
410
411 instance->apiVersion = LVP_API_VERSION;
412 instance->physicalDeviceCount = -1;
413
414 // _mesa_locale_init();
415 // VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
416
417 *pInstance = lvp_instance_to_handle(instance);
418
419 return VK_SUCCESS;
420 }
421
lvp_DestroyInstance(VkInstance _instance,const VkAllocationCallbacks * pAllocator)422 VKAPI_ATTR void VKAPI_CALL lvp_DestroyInstance(
423 VkInstance _instance,
424 const VkAllocationCallbacks* pAllocator)
425 {
426 LVP_FROM_HANDLE(lvp_instance, instance, _instance);
427
428 if (!instance)
429 return;
430 if (instance->physicalDeviceCount > 0)
431 lvp_physical_device_finish(&instance->physicalDevice);
432 // _mesa_locale_fini();
433
434 pipe_loader_release(&instance->devs, instance->num_devices);
435
436 vk_instance_finish(&instance->vk);
437 vk_free(&instance->vk.alloc, instance);
438 }
439
440 #if defined(HAVE_PIPE_LOADER_DRI)
lvp_get_image(struct dri_drawable * dri_drawable,int x,int y,unsigned width,unsigned height,unsigned stride,void * data)441 static void lvp_get_image(struct dri_drawable *dri_drawable,
442 int x, int y, unsigned width, unsigned height, unsigned stride,
443 void *data)
444 {
445
446 }
447
lvp_put_image(struct dri_drawable * dri_drawable,void * data,unsigned width,unsigned height)448 static void lvp_put_image(struct dri_drawable *dri_drawable,
449 void *data, unsigned width, unsigned height)
450 {
451 fprintf(stderr, "put image %dx%d\n", width, height);
452 }
453
lvp_put_image2(struct dri_drawable * dri_drawable,void * data,int x,int y,unsigned width,unsigned height,unsigned stride)454 static void lvp_put_image2(struct dri_drawable *dri_drawable,
455 void *data, int x, int y, unsigned width, unsigned height,
456 unsigned stride)
457 {
458 fprintf(stderr, "put image 2 %d,%d %dx%d\n", x, y, width, height);
459 }
460
461 static struct drisw_loader_funcs lvp_sw_lf = {
462 .get_image = lvp_get_image,
463 .put_image = lvp_put_image,
464 .put_image2 = lvp_put_image2,
465 };
466 #endif
467
468 static VkResult
lvp_enumerate_physical_devices(struct lvp_instance * instance)469 lvp_enumerate_physical_devices(struct lvp_instance *instance)
470 {
471 VkResult result;
472
473 if (instance->physicalDeviceCount != -1)
474 return VK_SUCCESS;
475
476 /* sw only for now */
477 instance->num_devices = pipe_loader_sw_probe(NULL, 0);
478
479 assert(instance->num_devices == 1);
480
481 #if defined(HAVE_PIPE_LOADER_DRI)
482 pipe_loader_sw_probe_dri(&instance->devs, &lvp_sw_lf);
483 #else
484 pipe_loader_sw_probe_null(&instance->devs);
485 #endif
486
487 result = lvp_physical_device_init(&instance->physicalDevice,
488 instance, &instance->devs[0]);
489 if (result == VK_ERROR_INCOMPATIBLE_DRIVER) {
490 instance->physicalDeviceCount = 0;
491 } else if (result == VK_SUCCESS) {
492 instance->physicalDeviceCount = 1;
493 }
494
495 return result;
496 }
497
lvp_EnumeratePhysicalDevices(VkInstance _instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)498 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumeratePhysicalDevices(
499 VkInstance _instance,
500 uint32_t* pPhysicalDeviceCount,
501 VkPhysicalDevice* pPhysicalDevices)
502 {
503 LVP_FROM_HANDLE(lvp_instance, instance, _instance);
504 VkResult result;
505
506 result = lvp_enumerate_physical_devices(instance);
507 if (result != VK_SUCCESS)
508 return result;
509
510 if (!pPhysicalDevices) {
511 *pPhysicalDeviceCount = instance->physicalDeviceCount;
512 } else if (*pPhysicalDeviceCount >= 1) {
513 pPhysicalDevices[0] = lvp_physical_device_to_handle(&instance->physicalDevice);
514 *pPhysicalDeviceCount = 1;
515 } else {
516 *pPhysicalDeviceCount = 0;
517 }
518
519 return VK_SUCCESS;
520 }
521
lvp_EnumeratePhysicalDeviceGroups(VkInstance _instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)522 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumeratePhysicalDeviceGroups(
523 VkInstance _instance,
524 uint32_t* pPhysicalDeviceGroupCount,
525 VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties)
526 {
527 LVP_FROM_HANDLE(lvp_instance, instance, _instance);
528 VK_OUTARRAY_MAKE_TYPED(VkPhysicalDeviceGroupProperties, out,
529 pPhysicalDeviceGroupProperties,
530 pPhysicalDeviceGroupCount);
531
532 VkResult result = lvp_enumerate_physical_devices(instance);
533 if (result != VK_SUCCESS)
534 return result;
535
536 vk_outarray_append_typed(VkPhysicalDeviceGroupProperties, &out, p) {
537 p->physicalDeviceCount = 1;
538 memset(p->physicalDevices, 0, sizeof(p->physicalDevices));
539 p->physicalDevices[0] = lvp_physical_device_to_handle(&instance->physicalDevice);
540 p->subsetAllocation = false;
541 }
542
543 return vk_outarray_status(&out);
544 }
545
lvp_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)546 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceFeatures(
547 VkPhysicalDevice physicalDevice,
548 VkPhysicalDeviceFeatures* pFeatures)
549 {
550 LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
551 bool indirect = false;//pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_GLSL_FEATURE_LEVEL) >= 400;
552 memset(pFeatures, 0, sizeof(*pFeatures));
553 *pFeatures = (VkPhysicalDeviceFeatures) {
554 .robustBufferAccess = true,
555 .fullDrawIndexUint32 = true,
556 .imageCubeArray = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CUBE_MAP_ARRAY) != 0),
557 .independentBlend = true,
558 .geometryShader = (pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_GEOMETRY, PIPE_SHADER_CAP_MAX_INSTRUCTIONS) != 0),
559 .tessellationShader = (pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_TESS_EVAL, PIPE_SHADER_CAP_MAX_INSTRUCTIONS) != 0),
560 .sampleRateShading = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_SAMPLE_SHADING) != 0),
561 .dualSrcBlend = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_DUAL_SOURCE_RENDER_TARGETS) != 0),
562 .logicOp = true,
563 .multiDrawIndirect = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MULTI_DRAW_INDIRECT) != 0),
564 .drawIndirectFirstInstance = true,
565 .depthClamp = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_CLIP_DISABLE) != 0),
566 .depthBiasClamp = true,
567 .fillModeNonSolid = true,
568 .depthBounds = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_BOUNDS_TEST) != 0),
569 .wideLines = true,
570 .largePoints = true,
571 .alphaToOne = true,
572 .multiViewport = true,
573 .samplerAnisotropy = true,
574 .textureCompressionETC2 = false,
575 .textureCompressionASTC_LDR = false,
576 .textureCompressionBC = true,
577 .occlusionQueryPrecise = true,
578 .pipelineStatisticsQuery = true,
579 .vertexPipelineStoresAndAtomics = (min_vertex_pipeline_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) != 0),
580 .fragmentStoresAndAtomics = (pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_FRAGMENT, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) != 0),
581 .shaderTessellationAndGeometryPointSize = true,
582 .shaderImageGatherExtended = true,
583 .shaderStorageImageExtendedFormats = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_IMAGES) != 0),
584 .shaderStorageImageMultisample = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_TEXTURE_MULTISAMPLE) != 0),
585 .shaderUniformBufferArrayDynamicIndexing = true,
586 .shaderSampledImageArrayDynamicIndexing = indirect,
587 .shaderStorageBufferArrayDynamicIndexing = true,
588 .shaderStorageImageArrayDynamicIndexing = indirect,
589 .shaderStorageImageReadWithoutFormat = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_IMAGE_LOAD_FORMATTED) != 0),
590 .shaderStorageImageWriteWithoutFormat = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_IMAGE_STORE_FORMATTED) != 0),
591 .shaderClipDistance = true,
592 .shaderCullDistance = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CULL_DISTANCE) == 1),
593 .shaderFloat64 = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DOUBLES) == 1),
594 .shaderInt64 = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_INT64) == 1),
595 .shaderInt16 = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_INT16) == 1),
596 .variableMultisampleRate = false,
597 .inheritedQueries = false,
598 };
599 }
600
601 static void
lvp_get_physical_device_features_1_1(struct lvp_physical_device * pdevice,VkPhysicalDeviceVulkan11Features * f)602 lvp_get_physical_device_features_1_1(struct lvp_physical_device *pdevice,
603 VkPhysicalDeviceVulkan11Features *f)
604 {
605 assert(f->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES);
606
607 f->storageBuffer16BitAccess = true;
608 f->uniformAndStorageBuffer16BitAccess = true;
609 f->storagePushConstant16 = true;
610 f->storageInputOutput16 = false;
611 f->multiview = true;
612 f->multiviewGeometryShader = true;
613 f->multiviewTessellationShader = true;
614 f->variablePointersStorageBuffer = true;
615 f->variablePointers = false;
616 f->protectedMemory = false;
617 f->samplerYcbcrConversion = false;
618 f->shaderDrawParameters = true;
619 }
620
621 static void
lvp_get_physical_device_features_1_2(struct lvp_physical_device * pdevice,VkPhysicalDeviceVulkan12Features * f)622 lvp_get_physical_device_features_1_2(struct lvp_physical_device *pdevice,
623 VkPhysicalDeviceVulkan12Features *f)
624 {
625 assert(f->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES);
626
627 f->samplerMirrorClampToEdge = true;
628 f->drawIndirectCount = true;
629 f->storageBuffer8BitAccess = true;
630 f->uniformAndStorageBuffer8BitAccess = true;
631 f->storagePushConstant8 = true;
632 f->shaderBufferInt64Atomics = true;
633 f->shaderSharedInt64Atomics = true;
634 f->shaderFloat16 = pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_FRAGMENT, PIPE_SHADER_CAP_FP16) != 0;
635 f->shaderInt8 = true;
636
637 f->descriptorIndexing = false;
638 f->shaderInputAttachmentArrayDynamicIndexing = false;
639 f->shaderUniformTexelBufferArrayDynamicIndexing = false;
640 f->shaderStorageTexelBufferArrayDynamicIndexing = false;
641 f->shaderUniformBufferArrayNonUniformIndexing = false;
642 f->shaderSampledImageArrayNonUniformIndexing = false;
643 f->shaderStorageBufferArrayNonUniformIndexing = false;
644 f->shaderStorageImageArrayNonUniformIndexing = false;
645 f->shaderInputAttachmentArrayNonUniformIndexing = false;
646 f->shaderUniformTexelBufferArrayNonUniformIndexing = false;
647 f->shaderStorageTexelBufferArrayNonUniformIndexing = false;
648 f->descriptorBindingUniformBufferUpdateAfterBind = false;
649 f->descriptorBindingSampledImageUpdateAfterBind = false;
650 f->descriptorBindingStorageImageUpdateAfterBind = false;
651 f->descriptorBindingStorageBufferUpdateAfterBind = false;
652 f->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
653 f->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
654 f->descriptorBindingUpdateUnusedWhilePending = false;
655 f->descriptorBindingPartiallyBound = false;
656 f->descriptorBindingVariableDescriptorCount = false;
657 f->runtimeDescriptorArray = false;
658
659 f->samplerFilterMinmax = true;
660 f->scalarBlockLayout = true;
661 f->imagelessFramebuffer = true;
662 f->uniformBufferStandardLayout = true;
663 f->shaderSubgroupExtendedTypes = true;
664 f->separateDepthStencilLayouts = true;
665 f->hostQueryReset = true;
666 f->timelineSemaphore = true;
667 f->bufferDeviceAddress = true;
668 f->bufferDeviceAddressCaptureReplay = false;
669 f->bufferDeviceAddressMultiDevice = false;
670 f->vulkanMemoryModel = true;
671 f->vulkanMemoryModelDeviceScope = true;
672 f->vulkanMemoryModelAvailabilityVisibilityChains = true;
673 f->shaderOutputViewportIndex = true;
674 f->shaderOutputLayer = true;
675 f->subgroupBroadcastDynamicId = true;
676 }
677
678 static void
lvp_get_physical_device_features_1_3(struct lvp_physical_device * pdevice,VkPhysicalDeviceVulkan13Features * f)679 lvp_get_physical_device_features_1_3(struct lvp_physical_device *pdevice,
680 VkPhysicalDeviceVulkan13Features *f)
681 {
682 assert(f->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES);
683
684 f->robustImageAccess = VK_TRUE;
685 f->inlineUniformBlock = VK_TRUE;
686 f->descriptorBindingInlineUniformBlockUpdateAfterBind = VK_TRUE;
687 f->pipelineCreationCacheControl = VK_TRUE;
688 f->privateData = VK_TRUE;
689 f->shaderDemoteToHelperInvocation = VK_TRUE;
690 f->shaderTerminateInvocation = VK_TRUE;
691 f->subgroupSizeControl = VK_TRUE;
692 f->computeFullSubgroups = VK_TRUE;
693 f->synchronization2 = VK_TRUE;
694 f->textureCompressionASTC_HDR = VK_FALSE;
695 f->shaderZeroInitializeWorkgroupMemory = VK_TRUE;
696 f->dynamicRendering = VK_TRUE;
697 f->shaderIntegerDotProduct = VK_TRUE;
698 f->maintenance4 = VK_TRUE;
699 }
700
lvp_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)701 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceFeatures2(
702 VkPhysicalDevice physicalDevice,
703 VkPhysicalDeviceFeatures2 *pFeatures)
704 {
705 LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
706 lvp_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
707
708 VkPhysicalDeviceVulkan11Features core_1_1 = {
709 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
710 };
711 lvp_get_physical_device_features_1_1(pdevice, &core_1_1);
712
713 VkPhysicalDeviceVulkan12Features core_1_2 = {
714 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
715 };
716 lvp_get_physical_device_features_1_2(pdevice, &core_1_2);
717
718 VkPhysicalDeviceVulkan13Features core_1_3 = {
719 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
720 };
721 lvp_get_physical_device_features_1_3(pdevice, &core_1_3);
722
723 vk_foreach_struct(ext, pFeatures->pNext) {
724
725 if (vk_get_physical_device_core_1_1_feature_ext(ext, &core_1_1))
726 continue;
727 if (vk_get_physical_device_core_1_2_feature_ext(ext, &core_1_2))
728 continue;
729 if (vk_get_physical_device_core_1_3_feature_ext(ext, &core_1_3))
730 continue;
731
732 switch (ext->sType) {
733 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
734 VkPhysicalDevicePrivateDataFeaturesEXT *features =
735 (VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
736 features->privateData = true;
737 break;
738 }
739 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: {
740 VkPhysicalDeviceSynchronization2Features *features =
741 (VkPhysicalDeviceSynchronization2Features *)ext;
742 features->synchronization2 = true;
743 break;
744 }
745 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: {
746 VkPhysicalDevicePipelineCreationCacheControlFeatures *features =
747 (VkPhysicalDevicePipelineCreationCacheControlFeatures *)ext;
748 features->pipelineCreationCacheControl = true;
749 break;
750 }
751 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT: {
752 VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *features =
753 (VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *)ext;
754 features->primitivesGeneratedQuery = true;
755 features->primitivesGeneratedQueryWithRasterizerDiscard = true;
756 features->primitivesGeneratedQueryWithNonZeroStreams = true;
757 break;
758 }
759 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: {
760 VkPhysicalDeviceLineRasterizationFeaturesEXT *features =
761 (VkPhysicalDeviceLineRasterizationFeaturesEXT *)ext;
762 features->rectangularLines = true;
763 features->bresenhamLines = true;
764 features->smoothLines = true;
765 features->stippledRectangularLines = true;
766 features->stippledBresenhamLines = true;
767 features->stippledSmoothLines = true;
768 break;
769 }
770 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
771 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
772 (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
773 features->vertexAttributeInstanceRateZeroDivisor = false;
774 if (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_VERTEX_ELEMENT_INSTANCE_DIVISOR) != 0) {
775 features->vertexAttributeInstanceRateDivisor = true;
776 } else {
777 features->vertexAttributeInstanceRateDivisor = false;
778 }
779 break;
780 }
781
782 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
783 VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
784 (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
785 features->indexTypeUint8 = true;
786 break;
787 }
788 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: {
789 VkPhysicalDeviceShaderIntegerDotProductFeatures *features =
790 (VkPhysicalDeviceShaderIntegerDotProductFeatures *)ext;
791 features->shaderIntegerDotProduct = true;
792 break;
793 }
794
795 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT: {
796 VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *features =
797 (VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *)ext;
798 features->vertexInputDynamicState = true;
799 break;
800 }
801 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: {
802 VkPhysicalDeviceMaintenance4Features *features =
803 (VkPhysicalDeviceMaintenance4Features *)ext;
804 features->maintenance4 = true;
805 break;
806 }
807
808 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: {
809 VkPhysicalDeviceSubgroupSizeControlFeatures *features =
810 (VkPhysicalDeviceSubgroupSizeControlFeatures *)ext;
811 features->subgroupSizeControl = true;
812 features->computeFullSubgroups = true;
813 break;
814 }
815
816 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT: {
817 VkPhysicalDeviceDepthClipControlFeaturesEXT *features =
818 (VkPhysicalDeviceDepthClipControlFeaturesEXT *)ext;
819 features->depthClipControl = true;
820 break;
821 }
822 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: {
823 VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *features =
824 (VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *)ext;
825 features->shaderZeroInitializeWorkgroupMemory = true;
826 break;
827 }
828 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: {
829 VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *features =
830 (VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *)ext;
831 features->texelBufferAlignment = true;
832 break;
833 }
834 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
835 VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
836 (VkPhysicalDeviceTransformFeedbackFeaturesEXT*)ext;
837
838 features->transformFeedback = true;
839 features->geometryStreams = true;
840 break;
841 }
842 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
843 VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
844 (VkPhysicalDeviceConditionalRenderingFeaturesEXT*)ext;
845 features->conditionalRendering = true;
846 features->inheritedConditionalRendering = false;
847 break;
848 }
849 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: {
850 VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *features =
851 (VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*)ext;
852 features->extendedDynamicState = true;
853 break;
854 }
855 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT: {
856 VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *features =
857 (VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *)ext;
858 features->shaderDemoteToHelperInvocation = true;
859 break;
860 }
861 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
862 VkPhysicalDevice4444FormatsFeaturesEXT *features =
863 (VkPhysicalDevice4444FormatsFeaturesEXT*)ext;
864 features->formatA4R4G4B4 = true;
865 features->formatA4B4G4R4 = true;
866 break;
867 }
868 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: {
869 VkPhysicalDeviceInlineUniformBlockFeatures *features =
870 (VkPhysicalDeviceInlineUniformBlockFeatures*)ext;
871 features->inlineUniformBlock = true;
872 features->descriptorBindingInlineUniformBlockUpdateAfterBind = true;
873 break;
874 }
875 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
876 VkPhysicalDeviceCustomBorderColorFeaturesEXT *features =
877 (VkPhysicalDeviceCustomBorderColorFeaturesEXT *)ext;
878 features->customBorderColors = true;
879 features->customBorderColorWithoutFormat = true;
880 break;
881 }
882 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: {
883 VkPhysicalDeviceColorWriteEnableFeaturesEXT *features =
884 (VkPhysicalDeviceColorWriteEnableFeaturesEXT *)ext;
885 features->colorWriteEnable = true;
886 break;
887 }
888 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: {
889 VkPhysicalDeviceProvokingVertexFeaturesEXT *features =
890 (VkPhysicalDeviceProvokingVertexFeaturesEXT*)ext;
891 features->provokingVertexLast = true;
892 features->transformFeedbackPreservesProvokingVertex = true;
893 break;
894 }
895 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT: {
896 VkPhysicalDeviceMultiDrawFeaturesEXT *features = (VkPhysicalDeviceMultiDrawFeaturesEXT *)ext;
897 features->multiDraw = true;
898 break;
899 }
900 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
901 VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
902 (VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
903 if (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_CLAMP_ENABLE) != 0)
904 features->depthClipEnable = true;
905 else
906 features->depthClipEnable = false;
907 break;
908 }
909 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: {
910 VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *features = (VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *)ext;
911 features->extendedDynamicState2 = true;
912 features->extendedDynamicState2LogicOp = true;
913 features->extendedDynamicState2PatchControlPoints = true;
914 break;
915 }
916 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: {
917 VkPhysicalDeviceImageRobustnessFeaturesEXT *features = (VkPhysicalDeviceImageRobustnessFeaturesEXT *)ext;
918 features->robustImageAccess = true;
919 break;
920 }
921 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: {
922 VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *features = (VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *)ext;
923 features->primitiveTopologyListRestart = true;
924 features->primitiveTopologyPatchListRestart = true;
925 break;
926 }
927 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: {
928 VkPhysicalDeviceShaderTerminateInvocationFeatures *features = (VkPhysicalDeviceShaderTerminateInvocationFeatures *)ext;
929 features->shaderTerminateInvocation = true;
930 break;
931 }
932 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR: {
933 VkPhysicalDeviceDynamicRenderingFeaturesKHR *features = (VkPhysicalDeviceDynamicRenderingFeaturesKHR *)ext;
934 features->dynamicRendering = VK_TRUE;
935 break;
936 }
937 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT: {
938 VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *features = (VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *)ext;
939 features->graphicsPipelineLibrary = VK_TRUE;
940 break;
941 }
942 default:
943 break;
944 }
945 }
946 }
947
948 void
lvp_device_get_cache_uuid(void * uuid)949 lvp_device_get_cache_uuid(void *uuid)
950 {
951 memset(uuid, 0, VK_UUID_SIZE);
952 snprintf(uuid, VK_UUID_SIZE, "val-%s", &MESA_GIT_SHA1[4]);
953 }
954
lvp_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)955 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
956 VkPhysicalDeviceProperties *pProperties)
957 {
958 LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
959
960 *pProperties = (VkPhysicalDeviceProperties) {
961 .apiVersion = LVP_API_VERSION,
962 .driverVersion = 1,
963 .vendorID = VK_VENDOR_ID_MESA,
964 .deviceID = 0,
965 .deviceType = VK_PHYSICAL_DEVICE_TYPE_CPU,
966 .limits = pdevice->device_limits,
967 .sparseProperties = {0},
968 };
969
970 strcpy(pProperties->deviceName, pdevice->pscreen->get_name(pdevice->pscreen));
971 lvp_device_get_cache_uuid(pProperties->pipelineCacheUUID);
972
973 }
974
975 extern unsigned lp_native_vector_width;
976 static void
lvp_get_physical_device_properties_1_1(struct lvp_physical_device * pdevice,VkPhysicalDeviceVulkan11Properties * p)977 lvp_get_physical_device_properties_1_1(struct lvp_physical_device *pdevice,
978 VkPhysicalDeviceVulkan11Properties *p)
979 {
980 assert(p->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES);
981
982 pdevice->pscreen->get_device_uuid(pdevice->pscreen, (char*)(p->deviceUUID));
983 pdevice->pscreen->get_driver_uuid(pdevice->pscreen, (char*)(p->driverUUID));
984 memset(p->deviceLUID, 0, VK_LUID_SIZE);
985 /* The LUID is for Windows. */
986 p->deviceLUIDValid = false;
987 p->deviceNodeMask = 0;
988
989 p->subgroupSize = lp_native_vector_width / 32;
990 p->subgroupSupportedStages = VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
991 p->subgroupSupportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT | VK_SUBGROUP_FEATURE_VOTE_BIT | VK_SUBGROUP_FEATURE_ARITHMETIC_BIT | VK_SUBGROUP_FEATURE_BALLOT_BIT;
992 p->subgroupQuadOperationsInAllStages = false;
993
994 p->pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
995 p->maxMultiviewViewCount = 6;
996 p->maxMultiviewInstanceIndex = INT_MAX;
997 p->protectedNoFault = false;
998 p->maxPerSetDescriptors = 1024;
999 p->maxMemoryAllocationSize = (1u << 31);
1000 }
1001
1002 static void
lvp_get_physical_device_properties_1_2(struct lvp_physical_device * pdevice,VkPhysicalDeviceVulkan12Properties * p)1003 lvp_get_physical_device_properties_1_2(struct lvp_physical_device *pdevice,
1004 VkPhysicalDeviceVulkan12Properties *p)
1005 {
1006 p->driverID = VK_DRIVER_ID_MESA_LLVMPIPE;
1007 snprintf(p->driverName, VK_MAX_DRIVER_NAME_SIZE, "llvmpipe");
1008 snprintf(p->driverInfo, VK_MAX_DRIVER_INFO_SIZE, "Mesa " PACKAGE_VERSION MESA_GIT_SHA1
1009 #ifdef MESA_LLVM_VERSION_STRING
1010 " (LLVM " MESA_LLVM_VERSION_STRING ")"
1011 #endif
1012 );
1013
1014 p->conformanceVersion = (VkConformanceVersion){
1015 .major = 0,
1016 .minor = 0,
1017 .subminor = 0,
1018 .patch = 0,
1019 };
1020
1021 p->denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR;
1022 p->roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR;
1023 p->shaderDenormFlushToZeroFloat16 = false;
1024 p->shaderDenormPreserveFloat16 = false;
1025 p->shaderRoundingModeRTEFloat16 = true;
1026 p->shaderRoundingModeRTZFloat16 = false;
1027 p->shaderSignedZeroInfNanPreserveFloat16 = true;
1028
1029 p->shaderDenormFlushToZeroFloat32 = false;
1030 p->shaderDenormPreserveFloat32 = false;
1031 p->shaderRoundingModeRTEFloat32 = true;
1032 p->shaderRoundingModeRTZFloat32 = false;
1033 p->shaderSignedZeroInfNanPreserveFloat32 = true;
1034
1035 p->shaderDenormFlushToZeroFloat64 = false;
1036 p->shaderDenormPreserveFloat64 = false;
1037 p->shaderRoundingModeRTEFloat64 = true;
1038 p->shaderRoundingModeRTZFloat64 = false;
1039 p->shaderSignedZeroInfNanPreserveFloat64 = true;
1040
1041 p->maxUpdateAfterBindDescriptorsInAllPools = UINT32_MAX / 64;
1042 p->shaderUniformBufferArrayNonUniformIndexingNative = false;
1043 p->shaderSampledImageArrayNonUniformIndexingNative = false;
1044 p->shaderStorageBufferArrayNonUniformIndexingNative = false;
1045 p->shaderStorageImageArrayNonUniformIndexingNative = false;
1046 p->shaderInputAttachmentArrayNonUniformIndexingNative = false;
1047 p->robustBufferAccessUpdateAfterBind = true;
1048 p->quadDivergentImplicitLod = false;
1049
1050 size_t max_descriptor_set_size = 65536; //TODO
1051 p->maxPerStageDescriptorUpdateAfterBindSamplers = max_descriptor_set_size;
1052 p->maxPerStageDescriptorUpdateAfterBindUniformBuffers = max_descriptor_set_size;
1053 p->maxPerStageDescriptorUpdateAfterBindStorageBuffers = max_descriptor_set_size;
1054 p->maxPerStageDescriptorUpdateAfterBindSampledImages = max_descriptor_set_size;
1055 p->maxPerStageDescriptorUpdateAfterBindStorageImages = max_descriptor_set_size;
1056 p->maxPerStageDescriptorUpdateAfterBindInputAttachments = max_descriptor_set_size;
1057 p->maxPerStageUpdateAfterBindResources = max_descriptor_set_size;
1058 p->maxDescriptorSetUpdateAfterBindSamplers = max_descriptor_set_size;
1059 p->maxDescriptorSetUpdateAfterBindUniformBuffers = max_descriptor_set_size;
1060 p->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 16;
1061 p->maxDescriptorSetUpdateAfterBindStorageBuffers = max_descriptor_set_size;
1062 p->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 16;
1063 p->maxDescriptorSetUpdateAfterBindSampledImages = max_descriptor_set_size;
1064 p->maxDescriptorSetUpdateAfterBindStorageImages = max_descriptor_set_size;
1065 p->maxDescriptorSetUpdateAfterBindInputAttachments = max_descriptor_set_size;
1066
1067 p->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT | VK_RESOLVE_MODE_AVERAGE_BIT;
1068 p->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT;
1069 p->independentResolveNone = false;
1070 p->independentResolve = false;
1071
1072 p->filterMinmaxImageComponentMapping = true;
1073 p->filterMinmaxSingleComponentFormats = true;
1074
1075 p->maxTimelineSemaphoreValueDifference = UINT64_MAX;
1076 p->framebufferIntegerColorSampleCounts = VK_SAMPLE_COUNT_1_BIT;
1077 }
1078
1079 static void
lvp_get_physical_device_properties_1_3(struct lvp_physical_device * pdevice,VkPhysicalDeviceVulkan13Properties * p)1080 lvp_get_physical_device_properties_1_3(struct lvp_physical_device *pdevice,
1081 VkPhysicalDeviceVulkan13Properties *p)
1082 {
1083 p->minSubgroupSize = lp_native_vector_width / 32;
1084 p->maxSubgroupSize = lp_native_vector_width / 32;
1085 p->maxComputeWorkgroupSubgroups = 32;
1086 p->requiredSubgroupSizeStages = VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
1087 p->maxInlineUniformTotalSize = MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE * MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS * MAX_SETS;
1088 p->maxInlineUniformBlockSize = MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE;
1089 p->maxPerStageDescriptorInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1090 p->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1091 p->maxDescriptorSetInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1092 p->maxDescriptorSetUpdateAfterBindInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1093 int alignment = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_TEXTURE_BUFFER_OFFSET_ALIGNMENT);
1094 p->storageTexelBufferOffsetAlignmentBytes = alignment;
1095 p->storageTexelBufferOffsetSingleTexelAlignment = true;
1096 p->uniformTexelBufferOffsetAlignmentBytes = alignment;
1097 p->uniformTexelBufferOffsetSingleTexelAlignment = true;
1098 p->maxBufferSize = UINT32_MAX;
1099 }
1100
lvp_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1101 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceProperties2(
1102 VkPhysicalDevice physicalDevice,
1103 VkPhysicalDeviceProperties2 *pProperties)
1104 {
1105 LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
1106 lvp_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1107
1108 VkPhysicalDeviceVulkan11Properties core_1_1 = {
1109 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
1110 };
1111 lvp_get_physical_device_properties_1_1(pdevice, &core_1_1);
1112
1113 VkPhysicalDeviceVulkan12Properties core_1_2 = {
1114 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
1115 };
1116 lvp_get_physical_device_properties_1_2(pdevice, &core_1_2);
1117
1118 VkPhysicalDeviceVulkan13Properties core_1_3 = {
1119 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
1120 };
1121 lvp_get_physical_device_properties_1_3(pdevice, &core_1_3);
1122
1123 vk_foreach_struct(ext, pProperties->pNext) {
1124
1125 if (vk_get_physical_device_core_1_1_property_ext(ext, &core_1_1))
1126 continue;
1127 if (vk_get_physical_device_core_1_2_property_ext(ext, &core_1_2))
1128 continue;
1129 if (vk_get_physical_device_core_1_3_property_ext(ext, &core_1_3))
1130 continue;
1131 switch (ext->sType) {
1132 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
1133 VkPhysicalDevicePushDescriptorPropertiesKHR *properties =
1134 (VkPhysicalDevicePushDescriptorPropertiesKHR *) ext;
1135 properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
1136 break;
1137 }
1138 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: {
1139 VkPhysicalDeviceShaderIntegerDotProductProperties *properties =
1140 (VkPhysicalDeviceShaderIntegerDotProductProperties *) ext;
1141 void *pnext = properties->pNext;
1142 memset(properties, 0, sizeof(VkPhysicalDeviceShaderIntegerDotProductProperties));
1143 properties->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES;
1144 properties->pNext = pnext;
1145 break;
1146 }
1147 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
1148 VkPhysicalDevicePointClippingProperties *properties =
1149 (VkPhysicalDevicePointClippingProperties*)ext;
1150 properties->pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
1151 break;
1152 }
1153 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
1154 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *props =
1155 (VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
1156 if (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_VERTEX_ELEMENT_INSTANCE_DIVISOR) != 0)
1157 props->maxVertexAttribDivisor = UINT32_MAX;
1158 else
1159 props->maxVertexAttribDivisor = 1;
1160 break;
1161 }
1162 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: {
1163 VkPhysicalDeviceTransformFeedbackPropertiesEXT *properties =
1164 (VkPhysicalDeviceTransformFeedbackPropertiesEXT*)ext;
1165 properties->maxTransformFeedbackStreams = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_VERTEX_STREAMS);
1166 properties->maxTransformFeedbackBuffers = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS);
1167 properties->maxTransformFeedbackBufferSize = UINT32_MAX;
1168 properties->maxTransformFeedbackStreamDataSize = 512;
1169 properties->maxTransformFeedbackBufferDataSize = 512;
1170 properties->maxTransformFeedbackBufferDataStride = 512;
1171 properties->transformFeedbackQueries = true;
1172 properties->transformFeedbackStreamsLinesTriangles = false;
1173 properties->transformFeedbackRasterizationStreamSelect = false;
1174 properties->transformFeedbackDraw = true;
1175 break;
1176 }
1177 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR: {
1178 VkPhysicalDeviceMaintenance4PropertiesKHR *properties =
1179 (VkPhysicalDeviceMaintenance4PropertiesKHR *)ext;
1180 properties->maxBufferSize = UINT32_MAX;
1181 break;
1182 }
1183 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT: {
1184 VkPhysicalDeviceLineRasterizationPropertiesEXT *properties =
1185 (VkPhysicalDeviceLineRasterizationPropertiesEXT *)ext;
1186 properties->lineSubPixelPrecisionBits =
1187 pdevice->pscreen->get_param(pdevice->pscreen,
1188 PIPE_CAP_RASTERIZER_SUBPIXEL_BITS);
1189 break;
1190 }
1191 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: {
1192 VkPhysicalDeviceInlineUniformBlockProperties *properties =
1193 (VkPhysicalDeviceInlineUniformBlockProperties *)ext;
1194 properties->maxInlineUniformBlockSize = MAX_DESCRIPTOR_UNIFORM_BLOCK_SIZE;
1195 properties->maxPerStageDescriptorInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1196 properties->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1197 properties->maxDescriptorSetInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1198 properties->maxDescriptorSetUpdateAfterBindInlineUniformBlocks = MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS;
1199 break;
1200 }
1201 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: {
1202 VkPhysicalDeviceExternalMemoryHostPropertiesEXT *properties =
1203 (VkPhysicalDeviceExternalMemoryHostPropertiesEXT *)ext;
1204 properties->minImportedHostPointerAlignment = 4096;
1205 break;
1206 }
1207 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: {
1208 VkPhysicalDeviceCustomBorderColorPropertiesEXT *properties =
1209 (VkPhysicalDeviceCustomBorderColorPropertiesEXT *)ext;
1210 properties->maxCustomBorderColorSamplers = 32 * 1024;
1211 break;
1212 }
1213 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: {
1214 VkPhysicalDeviceSubgroupSizeControlProperties *props = (VkPhysicalDeviceSubgroupSizeControlProperties *)ext;
1215 props->minSubgroupSize = lp_native_vector_width / 32;
1216 props->maxSubgroupSize = lp_native_vector_width / 32;
1217 props->maxComputeWorkgroupSubgroups = 32;
1218 props->requiredSubgroupSizeStages = VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
1219 break;
1220 }
1221 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: {
1222 VkPhysicalDeviceProvokingVertexPropertiesEXT *properties =
1223 (VkPhysicalDeviceProvokingVertexPropertiesEXT*)ext;
1224 properties->provokingVertexModePerPipeline = true;
1225 properties->transformFeedbackPreservesTriangleFanProvokingVertex = true;
1226 break;
1227 }
1228 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT: {
1229 VkPhysicalDeviceMultiDrawPropertiesEXT *props = (VkPhysicalDeviceMultiDrawPropertiesEXT *)ext;
1230 props->maxMultiDrawCount = 2048;
1231 break;
1232 }
1233 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT: {
1234 VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *properties =
1235 (VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *)ext;
1236 int alignment = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_TEXTURE_BUFFER_OFFSET_ALIGNMENT);
1237 properties->storageTexelBufferOffsetAlignmentBytes = alignment;
1238 properties->storageTexelBufferOffsetSingleTexelAlignment = true;
1239 properties->uniformTexelBufferOffsetAlignmentBytes = alignment;
1240 properties->uniformTexelBufferOffsetSingleTexelAlignment = true;
1241 break;
1242 }
1243 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT: {
1244 VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *props = (VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *)ext;
1245 props->graphicsPipelineLibraryFastLinking = VK_TRUE;
1246 props->graphicsPipelineLibraryIndependentInterpolationDecoration = VK_TRUE;
1247 break;
1248 }
1249 default:
1250 break;
1251 }
1252 }
1253 }
1254
lvp_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1255 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceQueueFamilyProperties2(
1256 VkPhysicalDevice physicalDevice,
1257 uint32_t* pCount,
1258 VkQueueFamilyProperties2 *pQueueFamilyProperties)
1259 {
1260 VK_OUTARRAY_MAKE_TYPED(VkQueueFamilyProperties2, out, pQueueFamilyProperties, pCount);
1261
1262 vk_outarray_append_typed(VkQueueFamilyProperties2, &out, p) {
1263 p->queueFamilyProperties = (VkQueueFamilyProperties) {
1264 .queueFlags = VK_QUEUE_GRAPHICS_BIT |
1265 VK_QUEUE_COMPUTE_BIT |
1266 VK_QUEUE_TRANSFER_BIT,
1267 .queueCount = 1,
1268 .timestampValidBits = 64,
1269 .minImageTransferGranularity = (VkExtent3D) { 1, 1, 1 },
1270 };
1271 }
1272 }
1273
lvp_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1274 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceMemoryProperties(
1275 VkPhysicalDevice physicalDevice,
1276 VkPhysicalDeviceMemoryProperties* pMemoryProperties)
1277 {
1278 pMemoryProperties->memoryTypeCount = 1;
1279 pMemoryProperties->memoryTypes[0] = (VkMemoryType) {
1280 .propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
1281 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1282 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
1283 VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
1284 .heapIndex = 0,
1285 };
1286
1287 pMemoryProperties->memoryHeapCount = 1;
1288 pMemoryProperties->memoryHeaps[0] = (VkMemoryHeap) {
1289 .size = 2ULL*1024*1024*1024,
1290 .flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
1291 };
1292 }
1293
lvp_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1294 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceMemoryProperties2(
1295 VkPhysicalDevice physicalDevice,
1296 VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
1297 {
1298 lvp_GetPhysicalDeviceMemoryProperties(physicalDevice,
1299 &pMemoryProperties->memoryProperties);
1300 }
1301
1302 VKAPI_ATTR VkResult VKAPI_CALL
lvp_GetMemoryHostPointerPropertiesEXT(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1303 lvp_GetMemoryHostPointerPropertiesEXT(
1304 VkDevice _device,
1305 VkExternalMemoryHandleTypeFlagBits handleType,
1306 const void *pHostPointer,
1307 VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1308 {
1309 switch (handleType) {
1310 case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT: {
1311 pMemoryHostPointerProperties->memoryTypeBits = 1;
1312 return VK_SUCCESS;
1313 }
1314 default:
1315 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1316 }
1317 }
1318
lvp_GetInstanceProcAddr(VkInstance _instance,const char * pName)1319 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL lvp_GetInstanceProcAddr(
1320 VkInstance _instance,
1321 const char* pName)
1322 {
1323 LVP_FROM_HANDLE(lvp_instance, instance, _instance);
1324 return vk_instance_get_proc_addr(&instance->vk,
1325 &lvp_instance_entrypoints,
1326 pName);
1327 }
1328
1329 /* Windows will use a dll definition file to avoid build errors. */
1330 #ifdef _WIN32
1331 #undef PUBLIC
1332 #define PUBLIC
1333 #endif
1334
1335 /* The loader wants us to expose a second GetInstanceProcAddr function
1336 * to work around certain LD_PRELOAD issues seen in apps.
1337 */
1338 PUBLIC
1339 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(
1340 VkInstance instance,
1341 const char* pName);
1342
1343 PUBLIC
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)1344 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(
1345 VkInstance instance,
1346 const char* pName)
1347 {
1348 return lvp_GetInstanceProcAddr(instance, pName);
1349 }
1350
1351 PUBLIC
1352 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(
1353 VkInstance _instance,
1354 const char* pName);
1355
1356 PUBLIC
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,const char * pName)1357 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(
1358 VkInstance _instance,
1359 const char* pName)
1360 {
1361 LVP_FROM_HANDLE(lvp_instance, instance, _instance);
1362 return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
1363 }
1364
1365 static void
set_last_fence(struct lvp_device * device,struct pipe_fence_handle * handle,uint64_t timeline)1366 set_last_fence(struct lvp_device *device, struct pipe_fence_handle *handle, uint64_t timeline)
1367 {
1368 simple_mtx_lock(&device->queue.last_lock);
1369 device->queue.last_fence_timeline = timeline;
1370 device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, handle);
1371 simple_mtx_unlock(&device->queue.last_lock);
1372 }
1373
1374 static void
thread_flush(struct lvp_device * device,struct lvp_fence * fence,uint64_t timeline,unsigned num_signal_semaphores,struct lvp_semaphore ** semaphores,unsigned num_timelines,struct lvp_semaphore_timeline ** timelines)1375 thread_flush(struct lvp_device *device, struct lvp_fence *fence, uint64_t timeline,
1376 unsigned num_signal_semaphores, struct lvp_semaphore **semaphores,
1377 unsigned num_timelines, struct lvp_semaphore_timeline **timelines)
1378 {
1379 struct pipe_fence_handle *handle = NULL;
1380 device->queue.ctx->flush(device->queue.ctx, &handle, 0);
1381 if (fence)
1382 device->pscreen->fence_reference(device->pscreen, &fence->handle, handle);
1383 for (unsigned i = 0; i < num_signal_semaphores; i++) {
1384 struct lvp_semaphore *sema = semaphores[i];
1385 if (!sema->is_timeline) {
1386 simple_mtx_lock(&sema->lock);
1387 device->pscreen->fence_reference(device->pscreen, &sema->handle, handle);
1388 simple_mtx_unlock(&sema->lock);
1389 }
1390 }
1391 set_last_fence(device, handle, timeline);
1392 /* this is the array of signaling timeline semaphore links */
1393 for (unsigned i = 0; i < num_timelines; i++)
1394 device->pscreen->fence_reference(device->pscreen, &timelines[i]->fence, handle);
1395
1396 device->pscreen->fence_reference(device->pscreen, &handle, NULL);
1397 }
1398
1399 /* get a new timeline link for creating a new signal event
1400 * sema->lock MUST be locked before calling
1401 */
1402 static struct lvp_semaphore_timeline *
get_semaphore_link(struct lvp_semaphore * sema)1403 get_semaphore_link(struct lvp_semaphore *sema)
1404 {
1405 if (!util_dynarray_num_elements(&sema->links, struct lvp_semaphore_timeline*)) {
1406 #define NUM_LINKS 50
1407 /* bucket allocate using the ralloc ctx because I like buckets */
1408 struct lvp_semaphore_timeline *link = ralloc_array(sema->mem, struct lvp_semaphore_timeline, NUM_LINKS);
1409 for (unsigned i = 0; i < NUM_LINKS; i++) {
1410 link[i].next = NULL;
1411 link[i].fence = NULL;
1412 util_dynarray_append(&sema->links, struct lvp_semaphore_timeline*, &link[i]);
1413 }
1414 }
1415 struct lvp_semaphore_timeline *tl = util_dynarray_pop(&sema->links, struct lvp_semaphore_timeline*);
1416 if (sema->timeline)
1417 sema->latest->next = tl;
1418 else
1419 sema->timeline = tl;
1420 sema->latest = tl;
1421 return tl;
1422 }
1423
1424 static bool
fence_finish(struct lvp_device * device,struct pipe_fence_handle * fence,uint64_t timeout)1425 fence_finish(struct lvp_device *device,
1426 struct pipe_fence_handle *fence, uint64_t timeout)
1427 {
1428 return fence && device->pscreen->fence_finish(device->pscreen, NULL, fence, timeout);
1429 }
1430
1431 /* prune any timeline links which are older than the current device timeline id
1432 * sema->lock MUST be locked before calling
1433 */
1434 static void
prune_semaphore_links(struct lvp_device * device,struct lvp_semaphore * sema,uint64_t timeline)1435 prune_semaphore_links(struct lvp_device *device,
1436 struct lvp_semaphore *sema, uint64_t timeline)
1437 {
1438 if (!timeline)
1439 /* zero isn't a valid id to prune with */
1440 return;
1441 struct lvp_semaphore_timeline *tl = sema->timeline;
1442 /* walk the timeline links and pop all the ones that are old */
1443 while (tl && ((tl->timeline <= timeline) || (tl->signal <= sema->current))) {
1444 struct lvp_semaphore_timeline *cur = tl;
1445 /* only update current timeline id if the update is monotonic */
1446 if (sema->current < tl->signal)
1447 sema->current = tl->signal;
1448 util_dynarray_append(&sema->links, struct lvp_semaphore_timeline*, tl);
1449 tl = tl->next;
1450 cur->next = NULL;
1451 device->pscreen->fence_reference(device->pscreen, &cur->fence, NULL);
1452 }
1453 /* this is now the current timeline link */
1454 sema->timeline = tl;
1455 }
1456
1457 /* find a timeline id that can be waited on to satisfy the signal condition
1458 * sema->lock MUST be locked before calling
1459 */
1460 static struct lvp_semaphore_timeline *
find_semaphore_timeline(struct lvp_semaphore * sema,uint64_t signal)1461 find_semaphore_timeline(struct lvp_semaphore *sema, uint64_t signal)
1462 {
1463 for (struct lvp_semaphore_timeline *tl = sema->timeline; tl; tl = tl->next) {
1464 if (tl->signal >= signal)
1465 return tl;
1466 }
1467 /* never submitted or is completed */
1468 return NULL;
1469 }
1470
1471 struct timeline_wait {
1472 bool done;
1473 struct lvp_semaphore_timeline *tl;
1474 };
1475
wait_semaphores(struct lvp_device * device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1476 static VkResult wait_semaphores(struct lvp_device *device,
1477 const VkSemaphoreWaitInfo* pWaitInfo,
1478 uint64_t timeout)
1479 {
1480 /* build array of timeline links to poll */
1481 VkResult ret = VK_TIMEOUT;
1482 bool any = (pWaitInfo->flags & VK_SEMAPHORE_WAIT_ANY_BIT) == VK_SEMAPHORE_WAIT_ANY_BIT;
1483 unsigned num_remaining = any ? 1 : pWaitInfo->semaphoreCount;
1484 /* just allocate an array for simplicity */
1485 struct timeline_wait *tl_array = calloc(pWaitInfo->semaphoreCount, sizeof(struct timeline_wait));
1486
1487 int64_t abs_timeout = os_time_get_absolute_timeout(timeout);
1488 /* UINT64_MAX will always overflow, so special case it
1489 * otherwise, calculate ((timeout / num_semaphores) / 10) to allow waiting 10 times on every semaphore
1490 */
1491 uint64_t wait_interval = timeout == UINT64_MAX ? 5000 : timeout / pWaitInfo->semaphoreCount / 10;
1492 while (num_remaining) {
1493 for (unsigned i = 0; num_remaining && i < pWaitInfo->semaphoreCount; i++) {
1494 if (tl_array[i].done) //completed
1495 continue;
1496 if (timeout && timeout != UINT64_MAX) {
1497 /* update remaining timeout on every loop */
1498 int64_t time_ns = os_time_get_nano();
1499 if (abs_timeout <= time_ns)
1500 goto end;
1501 timeout = abs_timeout > time_ns ? abs_timeout - time_ns : 0;
1502 }
1503 const uint64_t waitval = pWaitInfo->pValues[i];
1504 LVP_FROM_HANDLE(lvp_semaphore, sema, pWaitInfo->pSemaphores[i]);
1505
1506 if (!sema->is_timeline) {
1507 simple_mtx_lock(&sema->lock);
1508 if (fence_finish(device, sema->handle, wait_interval)) {
1509 tl_array[i].done = true;
1510 num_remaining--;
1511 }
1512 simple_mtx_unlock(&sema->lock);
1513 continue;
1514 }
1515 if (sema->current >= waitval) {
1516 tl_array[i].done = true;
1517 num_remaining--;
1518 continue;
1519 }
1520 if (!tl_array[i].tl) {
1521 /* no timeline link was available yet: try to find one */
1522 simple_mtx_lock(&sema->lock);
1523 /* always prune first to update current timeline id */
1524 prune_semaphore_links(device, sema, device->queue.last_finished);
1525 tl_array[i].tl = find_semaphore_timeline(sema, waitval);
1526 if (timeout && !tl_array[i].tl) {
1527 /* still no timeline link available:
1528 * try waiting on the conditional for a broadcast instead of melting the cpu
1529 */
1530 mtx_lock(&sema->submit_lock);
1531 struct timespec t;
1532 t.tv_nsec = wait_interval % 1000000000u;
1533 t.tv_sec = (wait_interval - t.tv_nsec) / 1000000000u;
1534 cnd_timedwait(&sema->submit, &sema->submit_lock, &t);
1535 mtx_unlock(&sema->submit_lock);
1536 tl_array[i].tl = find_semaphore_timeline(sema, waitval);
1537 }
1538 simple_mtx_unlock(&sema->lock);
1539 }
1540 /* mark semaphore as done if:
1541 * - timeline id comparison passes
1542 * - fence for timeline id exists and completes
1543 */
1544 if (sema->current >= waitval ||
1545 (tl_array[i].tl &&
1546 fence_finish(device, tl_array[i].tl->fence, wait_interval))) {
1547 tl_array[i].done = true;
1548 num_remaining--;
1549 }
1550 }
1551 if (!timeout)
1552 break;
1553 }
1554 if (!num_remaining)
1555 ret = VK_SUCCESS;
1556
1557 end:
1558 free(tl_array);
1559 return ret;
1560 }
1561
1562 void
queue_thread_noop(void * data,void * gdata,int thread_index)1563 queue_thread_noop(void *data, void *gdata, int thread_index)
1564 {
1565 struct lvp_device *device = gdata;
1566 struct lvp_queue_noop *noop = data;
1567
1568 struct lvp_fence *fence = noop->fence;
1569 struct lvp_semaphore *semaphore = noop->sema;
1570
1571 thread_flush(device, fence, fence ? fence->timeline : 0, semaphore ? 1 : 0, &semaphore, 0, NULL);
1572 free(noop);
1573 }
1574
1575 static void
queue_thread(void * data,void * gdata,int thread_index)1576 queue_thread(void *data, void *gdata, int thread_index)
1577 {
1578 struct lvp_queue_work *task = data;
1579 struct lvp_device *device = gdata;
1580 struct lvp_queue *queue = &device->queue;
1581
1582 if (task->wait_count) {
1583 /* identical to WaitSemaphores */
1584 VkSemaphoreWaitInfo wait;
1585 wait.flags = 0; //wait on all semaphores
1586 wait.semaphoreCount = task->wait_count;
1587 wait.pSemaphores = task->waits;
1588 wait.pValues = task->wait_vals;
1589 //wait
1590 wait_semaphores(device, &wait, UINT64_MAX);
1591 }
1592
1593 //execute
1594 for (unsigned i = 0; i < task->cmd_buffer_count; i++) {
1595 lvp_execute_cmds(queue->device, queue, task->cmd_buffers[i]);
1596 }
1597
1598 thread_flush(device, task->fence, task->timeline, task->signal_count, task->signals, task->timeline_count, task->timelines);
1599 free(task);
1600 }
1601
1602 static VkResult
lvp_queue_init(struct lvp_device * device,struct lvp_queue * queue,const VkDeviceQueueCreateInfo * create_info,uint32_t index_in_family)1603 lvp_queue_init(struct lvp_device *device, struct lvp_queue *queue,
1604 const VkDeviceQueueCreateInfo *create_info,
1605 uint32_t index_in_family)
1606 {
1607 VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info,
1608 index_in_family);
1609 if (result != VK_SUCCESS)
1610 return result;
1611
1612 queue->device = device;
1613
1614 simple_mtx_init(&queue->last_lock, mtx_plain);
1615 queue->timeline = 0;
1616 queue->ctx = device->pscreen->context_create(device->pscreen, NULL, PIPE_CONTEXT_ROBUST_BUFFER_ACCESS);
1617 queue->cso = cso_create_context(queue->ctx, CSO_NO_VBUF);
1618 util_queue_init(&queue->queue, "lavapipe", 8, 1, UTIL_QUEUE_INIT_RESIZE_IF_FULL, device);
1619 p_atomic_set(&queue->count, 0);
1620 queue->uploader = u_upload_create(queue->ctx, 1024 * 1024, PIPE_BIND_CONSTANT_BUFFER, PIPE_USAGE_STREAM, 0);
1621
1622 return VK_SUCCESS;
1623 }
1624
1625 static void
lvp_queue_finish(struct lvp_queue * queue)1626 lvp_queue_finish(struct lvp_queue *queue)
1627 {
1628 util_queue_finish(&queue->queue);
1629 util_queue_destroy(&queue->queue);
1630
1631 u_upload_destroy(queue->uploader);
1632 cso_destroy_context(queue->cso);
1633 queue->ctx->destroy(queue->ctx);
1634 simple_mtx_destroy(&queue->last_lock);
1635
1636 vk_queue_finish(&queue->vk);
1637 }
1638
1639 static void
ref_pipeline_layout(struct vk_device * vk_device,VkPipelineLayout _layout)1640 ref_pipeline_layout(struct vk_device *vk_device, VkPipelineLayout _layout)
1641 {
1642 LVP_FROM_HANDLE(lvp_pipeline_layout, layout, _layout);
1643
1644 lvp_pipeline_layout_ref(layout);
1645 }
1646
1647 static void
unref_pipeline_layout(struct vk_device * vk_device,VkPipelineLayout _layout)1648 unref_pipeline_layout(struct vk_device *vk_device, VkPipelineLayout _layout)
1649 {
1650 struct lvp_device *device = container_of(vk_device, struct lvp_device, vk);
1651 LVP_FROM_HANDLE(lvp_pipeline_layout, layout, _layout);
1652
1653 lvp_pipeline_layout_unref(device, layout);
1654 }
1655
lvp_CreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1656 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDevice(
1657 VkPhysicalDevice physicalDevice,
1658 const VkDeviceCreateInfo* pCreateInfo,
1659 const VkAllocationCallbacks* pAllocator,
1660 VkDevice* pDevice)
1661 {
1662 fprintf(stderr, "WARNING: lavapipe is not a conformant vulkan implementation, testing use only.\n");
1663
1664 LVP_FROM_HANDLE(lvp_physical_device, physical_device, physicalDevice);
1665 struct lvp_device *device;
1666 struct lvp_instance *instance = (struct lvp_instance *)physical_device->vk.instance;
1667
1668 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
1669
1670 size_t state_size = lvp_get_rendering_state_size();
1671 device = vk_zalloc2(&physical_device->vk.instance->alloc, pAllocator,
1672 sizeof(*device) + state_size, 8,
1673 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1674 if (!device)
1675 return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1676
1677 device->queue.state = device + 1;
1678
1679 struct vk_device_dispatch_table dispatch_table;
1680 vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1681 &lvp_device_entrypoints, true);
1682 lvp_add_enqueue_cmd_entrypoints(&dispatch_table);
1683 vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1684 &wsi_device_entrypoints, false);
1685 VkResult result = vk_device_init(&device->vk,
1686 &physical_device->vk,
1687 &dispatch_table, pCreateInfo,
1688 pAllocator);
1689 if (result != VK_SUCCESS) {
1690 vk_free(&device->vk.alloc, device);
1691 return result;
1692 }
1693
1694 device->instance = (struct lvp_instance *)physical_device->vk.instance;
1695 device->physical_device = physical_device;
1696
1697 device->vk.ref_pipeline_layout = ref_pipeline_layout;
1698 device->vk.unref_pipeline_layout = unref_pipeline_layout;
1699
1700 device->pscreen = physical_device->pscreen;
1701
1702 assert(pCreateInfo->queueCreateInfoCount == 1);
1703 assert(pCreateInfo->pQueueCreateInfos[0].queueFamilyIndex == 0);
1704 assert(pCreateInfo->pQueueCreateInfos[0].queueCount == 1);
1705 lvp_queue_init(device, &device->queue, pCreateInfo->pQueueCreateInfos, 0);
1706
1707 *pDevice = lvp_device_to_handle(device);
1708
1709 return VK_SUCCESS;
1710
1711 }
1712
lvp_DestroyDevice(VkDevice _device,const VkAllocationCallbacks * pAllocator)1713 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDevice(
1714 VkDevice _device,
1715 const VkAllocationCallbacks* pAllocator)
1716 {
1717 LVP_FROM_HANDLE(lvp_device, device, _device);
1718
1719 if (device->queue.last_fence)
1720 device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
1721 lvp_queue_finish(&device->queue);
1722 vk_device_finish(&device->vk);
1723 vk_free(&device->vk.alloc, device);
1724 }
1725
lvp_EnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1726 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceExtensionProperties(
1727 const char* pLayerName,
1728 uint32_t* pPropertyCount,
1729 VkExtensionProperties* pProperties)
1730 {
1731 if (pLayerName)
1732 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1733
1734 return vk_enumerate_instance_extension_properties(
1735 &lvp_instance_extensions_supported, pPropertyCount, pProperties);
1736 }
1737
lvp_EnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1738 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceLayerProperties(
1739 uint32_t* pPropertyCount,
1740 VkLayerProperties* pProperties)
1741 {
1742 if (pProperties == NULL) {
1743 *pPropertyCount = 0;
1744 return VK_SUCCESS;
1745 }
1746
1747 /* None supported at this time */
1748 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1749 }
1750
lvp_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1751 VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateDeviceLayerProperties(
1752 VkPhysicalDevice physicalDevice,
1753 uint32_t* pPropertyCount,
1754 VkLayerProperties* pProperties)
1755 {
1756 if (pProperties == NULL) {
1757 *pPropertyCount = 0;
1758 return VK_SUCCESS;
1759 }
1760
1761 /* None supported at this time */
1762 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1763 }
1764
lvp_QueueSubmit2KHR(VkQueue _queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence _fence)1765 VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueSubmit2KHR(
1766 VkQueue _queue,
1767 uint32_t submitCount,
1768 const VkSubmitInfo2* pSubmits,
1769 VkFence _fence)
1770 {
1771 LVP_FROM_HANDLE(lvp_queue, queue, _queue);
1772 LVP_FROM_HANDLE(lvp_fence, fence, _fence);
1773
1774 /* each submit is a separate job to simplify/streamline semaphore waits */
1775 for (uint32_t i = 0; i < submitCount; i++) {
1776 uint64_t timeline = ++queue->timeline;
1777 struct lvp_queue_work *task = malloc(sizeof(struct lvp_queue_work) +
1778 pSubmits[i].commandBufferInfoCount * sizeof(struct lvp_cmd_buffer *) +
1779 pSubmits[i].signalSemaphoreInfoCount * (sizeof(struct lvp_semaphore_timeline*) + sizeof(struct lvp_semaphore *)) +
1780 pSubmits[i].waitSemaphoreInfoCount * (sizeof(VkSemaphore) + sizeof(uint64_t)));
1781 task->cmd_buffer_count = pSubmits[i].commandBufferInfoCount;
1782 task->timeline_count = pSubmits[i].signalSemaphoreInfoCount;
1783 task->signal_count = pSubmits[i].signalSemaphoreInfoCount;
1784 task->wait_count = pSubmits[i].waitSemaphoreInfoCount;
1785 task->fence = fence;
1786 task->timeline = timeline;
1787 task->cmd_buffers = (struct lvp_cmd_buffer **)(task + 1);
1788 task->timelines = (struct lvp_semaphore_timeline**)((uint8_t*)task->cmd_buffers + pSubmits[i].commandBufferInfoCount * sizeof(struct lvp_cmd_buffer *));
1789 task->signals = (struct lvp_semaphore **)((uint8_t*)task->timelines + pSubmits[i].signalSemaphoreInfoCount * sizeof(struct lvp_semaphore_timeline *));
1790 task->waits = (VkSemaphore*)((uint8_t*)task->signals + pSubmits[i].signalSemaphoreInfoCount * sizeof(struct lvp_semaphore *));
1791 task->wait_vals = (uint64_t*)((uint8_t*)task->waits + pSubmits[i].waitSemaphoreInfoCount * sizeof(VkSemaphore));
1792
1793 unsigned c = 0;
1794 for (uint32_t j = 0; j < pSubmits[i].commandBufferInfoCount; j++) {
1795 task->cmd_buffers[c++] = lvp_cmd_buffer_from_handle(pSubmits[i].pCommandBufferInfos[j].commandBuffer);
1796 }
1797 unsigned s = 0;
1798 for (unsigned j = 0; j < pSubmits[i].signalSemaphoreInfoCount; j++) {
1799 const VkSemaphoreSubmitInfo *info = &pSubmits[i].pSignalSemaphoreInfos[j];
1800 LVP_FROM_HANDLE(lvp_semaphore, sema, info->semaphore);
1801 task->signals[j] = sema;
1802 if (!sema->is_timeline) {
1803 task->timeline_count--;
1804 continue;
1805 }
1806 simple_mtx_lock(&sema->lock);
1807 /* always prune first to make links available and update timeline id */
1808 prune_semaphore_links(queue->device, sema, queue->last_finished);
1809 if (sema->current < info->value) {
1810 /* only signal semaphores if the new id is >= the current one */
1811 struct lvp_semaphore_timeline *tl = get_semaphore_link(sema);
1812 tl->signal = info->value;
1813 tl->timeline = timeline;
1814 task->timelines[s] = tl;
1815 s++;
1816 } else
1817 task->timeline_count--;
1818 simple_mtx_unlock(&sema->lock);
1819 }
1820 unsigned w = 0;
1821 for (unsigned j = 0; j < pSubmits[i].waitSemaphoreInfoCount; j++) {
1822 const VkSemaphoreSubmitInfo *info = &pSubmits[i].pWaitSemaphoreInfos[j];
1823 LVP_FROM_HANDLE(lvp_semaphore, sema, info->semaphore);
1824 if (!sema->is_timeline) {
1825 task->waits[w] = info->semaphore;
1826 task->wait_vals[w] = 0;
1827 w++;
1828 continue;
1829 }
1830 simple_mtx_lock(&sema->lock);
1831 /* always prune first to update timeline id */
1832 prune_semaphore_links(queue->device, sema, queue->last_finished);
1833 if (info->value &&
1834 info->stageMask &&
1835 sema->current < info->value) {
1836 /* only wait on semaphores if the new id is > the current one and a wait mask is set
1837 *
1838 * technically the mask could be used to check whether there's gfx/compute ops on a cmdbuf and no-op,
1839 * but probably that's not worth the complexity
1840 */
1841 task->waits[w] = info->semaphore;
1842 task->wait_vals[w] = info->value;
1843 w++;
1844 } else
1845 task->wait_count--;
1846 simple_mtx_unlock(&sema->lock);
1847 }
1848 if (fence && i == submitCount - 1) {
1849 /* u_queue fences should only be signaled for the last submit, as this is the one that
1850 * the vk fence represents
1851 */
1852 fence->timeline = timeline;
1853 util_queue_add_job(&queue->queue, task, &fence->fence, queue_thread, NULL, 0);
1854 } else
1855 util_queue_add_job(&queue->queue, task, NULL, queue_thread, NULL, 0);
1856 }
1857 if (!submitCount && fence) {
1858 /* special case where a fence is created to use as a synchronization point */
1859 fence->timeline = p_atomic_inc_return(&queue->timeline);
1860 struct lvp_queue_noop *noop = malloc(sizeof(struct lvp_queue_noop));
1861 if (!noop)
1862 return VK_ERROR_OUT_OF_HOST_MEMORY;
1863 noop->fence = fence;
1864 noop->sema = NULL;
1865 util_queue_add_job(&queue->queue, noop, &fence->fence, queue_thread_noop, NULL, 0);
1866 }
1867 return VK_SUCCESS;
1868 }
1869
lvp_QueueWaitIdle(VkQueue _queue)1870 VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueWaitIdle(
1871 VkQueue _queue)
1872 {
1873 LVP_FROM_HANDLE(lvp_queue, queue, _queue);
1874
1875 util_queue_finish(&queue->queue);
1876 simple_mtx_lock(&queue->last_lock);
1877 uint64_t timeline = queue->last_fence_timeline;
1878 if (fence_finish(queue->device, queue->last_fence, PIPE_TIMEOUT_INFINITE)) {
1879 queue->device->pscreen->fence_reference(queue->device->pscreen, &queue->device->queue.last_fence, NULL);
1880 if (timeline > queue->last_finished)
1881 queue->last_finished = timeline;
1882 }
1883 simple_mtx_unlock(&queue->last_lock);
1884 return VK_SUCCESS;
1885 }
1886
lvp_DeviceWaitIdle(VkDevice _device)1887 VKAPI_ATTR VkResult VKAPI_CALL lvp_DeviceWaitIdle(
1888 VkDevice _device)
1889 {
1890 LVP_FROM_HANDLE(lvp_device, device, _device);
1891
1892 lvp_QueueWaitIdle(lvp_queue_to_handle(&device->queue));
1893
1894 return VK_SUCCESS;
1895 }
1896
lvp_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)1897 VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateMemory(
1898 VkDevice _device,
1899 const VkMemoryAllocateInfo* pAllocateInfo,
1900 const VkAllocationCallbacks* pAllocator,
1901 VkDeviceMemory* pMem)
1902 {
1903 LVP_FROM_HANDLE(lvp_device, device, _device);
1904 struct lvp_device_memory *mem;
1905 ASSERTED const VkExportMemoryAllocateInfo *export_info = NULL;
1906 ASSERTED const VkImportMemoryFdInfoKHR *import_info = NULL;
1907 const VkImportMemoryHostPointerInfoEXT *host_ptr_info = NULL;
1908 VkResult error = VK_ERROR_OUT_OF_DEVICE_MEMORY;
1909 assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1910
1911 if (pAllocateInfo->allocationSize == 0) {
1912 /* Apparently, this is allowed */
1913 *pMem = VK_NULL_HANDLE;
1914 return VK_SUCCESS;
1915 }
1916
1917 vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
1918 switch ((unsigned)ext->sType) {
1919 case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
1920 host_ptr_info = (VkImportMemoryHostPointerInfoEXT*)ext;
1921 assert(host_ptr_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT);
1922 break;
1923 case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
1924 export_info = (VkExportMemoryAllocateInfo*)ext;
1925 assert(export_info->handleTypes == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1926 break;
1927 case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
1928 import_info = (VkImportMemoryFdInfoKHR*)ext;
1929 assert(import_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1930 break;
1931 default:
1932 break;
1933 }
1934 }
1935
1936 #ifdef PIPE_MEMORY_FD
1937 if (import_info != NULL && import_info->fd < 0) {
1938 return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
1939 }
1940 #endif
1941
1942 mem = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*mem), 8,
1943 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1944 if (mem == NULL)
1945 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1946
1947 vk_object_base_init(&device->vk, &mem->base,
1948 VK_OBJECT_TYPE_DEVICE_MEMORY);
1949
1950 mem->memory_type = LVP_DEVICE_MEMORY_TYPE_DEFAULT;
1951 mem->backed_fd = -1;
1952
1953 if (host_ptr_info) {
1954 mem->pmem = host_ptr_info->pHostPointer;
1955 mem->memory_type = LVP_DEVICE_MEMORY_TYPE_USER_PTR;
1956 }
1957 #ifdef PIPE_MEMORY_FD
1958 else if(import_info) {
1959 uint64_t size;
1960 if(!device->pscreen->import_memory_fd(device->pscreen, import_info->fd, &mem->pmem, &size)) {
1961 close(import_info->fd);
1962 error = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1963 goto fail;
1964 }
1965 if(size < pAllocateInfo->allocationSize) {
1966 device->pscreen->free_memory_fd(device->pscreen, mem->pmem);
1967 close(import_info->fd);
1968 goto fail;
1969 }
1970 if (export_info) {
1971 mem->backed_fd = import_info->fd;
1972 }
1973 else {
1974 close(import_info->fd);
1975 }
1976 mem->memory_type = LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD;
1977 }
1978 else if (export_info) {
1979 mem->pmem = device->pscreen->allocate_memory_fd(device->pscreen, pAllocateInfo->allocationSize, &mem->backed_fd);
1980 if (!mem->pmem || mem->backed_fd < 0) {
1981 goto fail;
1982 }
1983 mem->memory_type = LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD;
1984 }
1985 #endif
1986 else {
1987 mem->pmem = device->pscreen->allocate_memory(device->pscreen, pAllocateInfo->allocationSize);
1988 if (!mem->pmem) {
1989 goto fail;
1990 }
1991 }
1992
1993 mem->type_index = pAllocateInfo->memoryTypeIndex;
1994
1995 *pMem = lvp_device_memory_to_handle(mem);
1996
1997 return VK_SUCCESS;
1998
1999 fail:
2000 vk_free2(&device->vk.alloc, pAllocator, mem);
2001 return vk_error(device, error);
2002 }
2003
lvp_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)2004 VKAPI_ATTR void VKAPI_CALL lvp_FreeMemory(
2005 VkDevice _device,
2006 VkDeviceMemory _mem,
2007 const VkAllocationCallbacks* pAllocator)
2008 {
2009 LVP_FROM_HANDLE(lvp_device, device, _device);
2010 LVP_FROM_HANDLE(lvp_device_memory, mem, _mem);
2011
2012 if (mem == NULL)
2013 return;
2014
2015 switch(mem->memory_type) {
2016 case LVP_DEVICE_MEMORY_TYPE_DEFAULT:
2017 device->pscreen->free_memory(device->pscreen, mem->pmem);
2018 break;
2019 #ifdef PIPE_MEMORY_FD
2020 case LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD:
2021 device->pscreen->free_memory_fd(device->pscreen, mem->pmem);
2022 if(mem->backed_fd >= 0)
2023 close(mem->backed_fd);
2024 break;
2025 #endif
2026 case LVP_DEVICE_MEMORY_TYPE_USER_PTR:
2027 default:
2028 break;
2029 }
2030 vk_object_base_finish(&mem->base);
2031 vk_free2(&device->vk.alloc, pAllocator, mem);
2032
2033 }
2034
lvp_MapMemory(VkDevice _device,VkDeviceMemory _memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)2035 VKAPI_ATTR VkResult VKAPI_CALL lvp_MapMemory(
2036 VkDevice _device,
2037 VkDeviceMemory _memory,
2038 VkDeviceSize offset,
2039 VkDeviceSize size,
2040 VkMemoryMapFlags flags,
2041 void** ppData)
2042 {
2043 LVP_FROM_HANDLE(lvp_device, device, _device);
2044 LVP_FROM_HANDLE(lvp_device_memory, mem, _memory);
2045 void *map;
2046 if (mem == NULL) {
2047 *ppData = NULL;
2048 return VK_SUCCESS;
2049 }
2050
2051 map = device->pscreen->map_memory(device->pscreen, mem->pmem);
2052
2053 *ppData = (char *)map + offset;
2054 return VK_SUCCESS;
2055 }
2056
lvp_UnmapMemory(VkDevice _device,VkDeviceMemory _memory)2057 VKAPI_ATTR void VKAPI_CALL lvp_UnmapMemory(
2058 VkDevice _device,
2059 VkDeviceMemory _memory)
2060 {
2061 LVP_FROM_HANDLE(lvp_device, device, _device);
2062 LVP_FROM_HANDLE(lvp_device_memory, mem, _memory);
2063
2064 if (mem == NULL)
2065 return;
2066
2067 device->pscreen->unmap_memory(device->pscreen, mem->pmem);
2068 }
2069
lvp_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)2070 VKAPI_ATTR VkResult VKAPI_CALL lvp_FlushMappedMemoryRanges(
2071 VkDevice _device,
2072 uint32_t memoryRangeCount,
2073 const VkMappedMemoryRange* pMemoryRanges)
2074 {
2075 return VK_SUCCESS;
2076 }
2077
lvp_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)2078 VKAPI_ATTR VkResult VKAPI_CALL lvp_InvalidateMappedMemoryRanges(
2079 VkDevice _device,
2080 uint32_t memoryRangeCount,
2081 const VkMappedMemoryRange* pMemoryRanges)
2082 {
2083 return VK_SUCCESS;
2084 }
2085
lvp_GetDeviceBufferMemoryRequirements(VkDevice _device,const VkDeviceBufferMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2086 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceBufferMemoryRequirements(
2087 VkDevice _device,
2088 const VkDeviceBufferMemoryRequirements* pInfo,
2089 VkMemoryRequirements2* pMemoryRequirements)
2090 {
2091 pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
2092 pMemoryRequirements->memoryRequirements.alignment = 64;
2093 pMemoryRequirements->memoryRequirements.size = 0;
2094
2095 VkBuffer _buffer;
2096 if (lvp_CreateBuffer(_device, pInfo->pCreateInfo, NULL, &_buffer) != VK_SUCCESS)
2097 return;
2098 LVP_FROM_HANDLE(lvp_buffer, buffer, _buffer);
2099 pMemoryRequirements->memoryRequirements.size = buffer->total_size;
2100 lvp_DestroyBuffer(_device, _buffer, NULL);
2101 }
2102
lvp_GetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)2103 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceImageSparseMemoryRequirements(
2104 VkDevice device,
2105 const VkDeviceImageMemoryRequirements* pInfo,
2106 uint32_t* pSparseMemoryRequirementCount,
2107 VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
2108 {
2109 stub();
2110 }
2111
lvp_GetDeviceImageMemoryRequirements(VkDevice _device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2112 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceImageMemoryRequirements(
2113 VkDevice _device,
2114 const VkDeviceImageMemoryRequirements* pInfo,
2115 VkMemoryRequirements2* pMemoryRequirements)
2116 {
2117 pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
2118 pMemoryRequirements->memoryRequirements.alignment = 0;
2119 pMemoryRequirements->memoryRequirements.size = 0;
2120
2121 VkImage _image;
2122 if (lvp_CreateImage(_device, pInfo->pCreateInfo, NULL, &_image) != VK_SUCCESS)
2123 return;
2124 LVP_FROM_HANDLE(lvp_image, image, _image);
2125 pMemoryRequirements->memoryRequirements.size = image->size;
2126 pMemoryRequirements->memoryRequirements.alignment = image->alignment;
2127 lvp_DestroyImage(_device, _image, NULL);
2128 }
2129
lvp_GetBufferMemoryRequirements(VkDevice device,VkBuffer _buffer,VkMemoryRequirements * pMemoryRequirements)2130 VKAPI_ATTR void VKAPI_CALL lvp_GetBufferMemoryRequirements(
2131 VkDevice device,
2132 VkBuffer _buffer,
2133 VkMemoryRequirements* pMemoryRequirements)
2134 {
2135 LVP_FROM_HANDLE(lvp_buffer, buffer, _buffer);
2136
2137 /* The Vulkan spec (git aaed022) says:
2138 *
2139 * memoryTypeBits is a bitfield and contains one bit set for every
2140 * supported memory type for the resource. The bit `1<<i` is set if and
2141 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
2142 * structure for the physical device is supported.
2143 *
2144 * We support exactly one memory type.
2145 */
2146 pMemoryRequirements->memoryTypeBits = 1;
2147
2148 pMemoryRequirements->size = buffer->total_size;
2149 pMemoryRequirements->alignment = 64;
2150 }
2151
lvp_GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2152 VKAPI_ATTR void VKAPI_CALL lvp_GetBufferMemoryRequirements2(
2153 VkDevice device,
2154 const VkBufferMemoryRequirementsInfo2 *pInfo,
2155 VkMemoryRequirements2 *pMemoryRequirements)
2156 {
2157 lvp_GetBufferMemoryRequirements(device, pInfo->buffer,
2158 &pMemoryRequirements->memoryRequirements);
2159 vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2160 switch (ext->sType) {
2161 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2162 VkMemoryDedicatedRequirements *req =
2163 (VkMemoryDedicatedRequirements *) ext;
2164 req->requiresDedicatedAllocation = false;
2165 req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
2166 break;
2167 }
2168 default:
2169 break;
2170 }
2171 }
2172 }
2173
lvp_GetImageMemoryRequirements(VkDevice device,VkImage _image,VkMemoryRequirements * pMemoryRequirements)2174 VKAPI_ATTR void VKAPI_CALL lvp_GetImageMemoryRequirements(
2175 VkDevice device,
2176 VkImage _image,
2177 VkMemoryRequirements* pMemoryRequirements)
2178 {
2179 LVP_FROM_HANDLE(lvp_image, image, _image);
2180 pMemoryRequirements->memoryTypeBits = 1;
2181
2182 pMemoryRequirements->size = image->size;
2183 pMemoryRequirements->alignment = image->alignment;
2184 }
2185
lvp_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2186 VKAPI_ATTR void VKAPI_CALL lvp_GetImageMemoryRequirements2(
2187 VkDevice device,
2188 const VkImageMemoryRequirementsInfo2 *pInfo,
2189 VkMemoryRequirements2 *pMemoryRequirements)
2190 {
2191 lvp_GetImageMemoryRequirements(device, pInfo->image,
2192 &pMemoryRequirements->memoryRequirements);
2193
2194 vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2195 switch (ext->sType) {
2196 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2197 VkMemoryDedicatedRequirements *req =
2198 (VkMemoryDedicatedRequirements *) ext;
2199 req->requiresDedicatedAllocation = false;
2200 req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
2201 break;
2202 }
2203 default:
2204 break;
2205 }
2206 }
2207 }
2208
lvp_GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)2209 VKAPI_ATTR void VKAPI_CALL lvp_GetImageSparseMemoryRequirements(
2210 VkDevice device,
2211 VkImage image,
2212 uint32_t* pSparseMemoryRequirementCount,
2213 VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
2214 {
2215 stub();
2216 }
2217
lvp_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)2218 VKAPI_ATTR void VKAPI_CALL lvp_GetImageSparseMemoryRequirements2(
2219 VkDevice device,
2220 const VkImageSparseMemoryRequirementsInfo2* pInfo,
2221 uint32_t* pSparseMemoryRequirementCount,
2222 VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
2223 {
2224 stub();
2225 }
2226
lvp_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)2227 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceMemoryCommitment(
2228 VkDevice device,
2229 VkDeviceMemory memory,
2230 VkDeviceSize* pCommittedMemoryInBytes)
2231 {
2232 *pCommittedMemoryInBytes = 0;
2233 }
2234
lvp_BindBufferMemory2(VkDevice _device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2235 VKAPI_ATTR VkResult VKAPI_CALL lvp_BindBufferMemory2(VkDevice _device,
2236 uint32_t bindInfoCount,
2237 const VkBindBufferMemoryInfo *pBindInfos)
2238 {
2239 LVP_FROM_HANDLE(lvp_device, device, _device);
2240 for (uint32_t i = 0; i < bindInfoCount; ++i) {
2241 LVP_FROM_HANDLE(lvp_device_memory, mem, pBindInfos[i].memory);
2242 LVP_FROM_HANDLE(lvp_buffer, buffer, pBindInfos[i].buffer);
2243
2244 buffer->pmem = mem->pmem;
2245 device->pscreen->resource_bind_backing(device->pscreen,
2246 buffer->bo,
2247 mem->pmem,
2248 pBindInfos[i].memoryOffset);
2249 }
2250 return VK_SUCCESS;
2251 }
2252
lvp_BindImageMemory2(VkDevice _device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2253 VKAPI_ATTR VkResult VKAPI_CALL lvp_BindImageMemory2(VkDevice _device,
2254 uint32_t bindInfoCount,
2255 const VkBindImageMemoryInfo *pBindInfos)
2256 {
2257 LVP_FROM_HANDLE(lvp_device, device, _device);
2258 for (uint32_t i = 0; i < bindInfoCount; ++i) {
2259 const VkBindImageMemoryInfo *bind_info = &pBindInfos[i];
2260 LVP_FROM_HANDLE(lvp_device_memory, mem, bind_info->memory);
2261 LVP_FROM_HANDLE(lvp_image, image, bind_info->image);
2262 bool did_bind = false;
2263
2264 vk_foreach_struct_const(s, bind_info->pNext) {
2265 switch (s->sType) {
2266 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
2267 const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
2268 (const VkBindImageMemorySwapchainInfoKHR *) s;
2269 struct lvp_image *swapchain_image =
2270 lvp_swapchain_get_image(swapchain_info->swapchain,
2271 swapchain_info->imageIndex);
2272
2273 image->pmem = swapchain_image->pmem;
2274 image->memory_offset = swapchain_image->memory_offset;
2275 device->pscreen->resource_bind_backing(device->pscreen,
2276 image->bo,
2277 image->pmem,
2278 image->memory_offset);
2279 did_bind = true;
2280 break;
2281 }
2282 default:
2283 break;
2284 }
2285 }
2286
2287 if (!did_bind) {
2288 if (!device->pscreen->resource_bind_backing(device->pscreen,
2289 image->bo,
2290 mem->pmem,
2291 bind_info->memoryOffset)) {
2292 /* This is probably caused by the texture being too large, so let's
2293 * report this as the *closest* allowed error-code. It's not ideal,
2294 * but it's unlikely that anyone will care too much.
2295 */
2296 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
2297 }
2298 image->pmem = mem->pmem;
2299 image->memory_offset = bind_info->memoryOffset;
2300 }
2301 }
2302 return VK_SUCCESS;
2303 }
2304
2305 #ifdef PIPE_MEMORY_FD
2306
2307 VkResult
lvp_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFD)2308 lvp_GetMemoryFdKHR(VkDevice _device, const VkMemoryGetFdInfoKHR *pGetFdInfo, int *pFD)
2309 {
2310 LVP_FROM_HANDLE(lvp_device_memory, memory, pGetFdInfo->memory);
2311
2312 assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2313 assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
2314
2315 *pFD = dup(memory->backed_fd);
2316 assert(*pFD >= 0);
2317 return VK_SUCCESS;
2318 }
2319
2320 VkResult
lvp_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)2321 lvp_GetMemoryFdPropertiesKHR(VkDevice _device,
2322 VkExternalMemoryHandleTypeFlagBits handleType,
2323 int fd,
2324 VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2325 {
2326 LVP_FROM_HANDLE(lvp_device, device, _device);
2327
2328 assert(pMemoryFdProperties->sType == VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR);
2329
2330 if(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT) {
2331 // There is only one memoryType so select this one
2332 pMemoryFdProperties->memoryTypeBits = 1;
2333 }
2334 else
2335 return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
2336 return VK_SUCCESS;
2337 }
2338
2339 #endif
2340
lvp_QueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)2341 VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueBindSparse(
2342 VkQueue queue,
2343 uint32_t bindInfoCount,
2344 const VkBindSparseInfo* pBindInfo,
2345 VkFence fence)
2346 {
2347 stub_return(VK_ERROR_INCOMPATIBLE_DRIVER);
2348 }
2349
2350
lvp_CreateFence(VkDevice _device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)2351 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateFence(
2352 VkDevice _device,
2353 const VkFenceCreateInfo* pCreateInfo,
2354 const VkAllocationCallbacks* pAllocator,
2355 VkFence* pFence)
2356 {
2357 LVP_FROM_HANDLE(lvp_device, device, _device);
2358 struct lvp_fence *fence;
2359
2360 fence = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*fence), 8,
2361 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2362 if (fence == NULL)
2363 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2364 vk_object_base_init(&device->vk, &fence->base, VK_OBJECT_TYPE_FENCE);
2365 util_queue_fence_init(&fence->fence);
2366 fence->signalled = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) == VK_FENCE_CREATE_SIGNALED_BIT;
2367
2368 fence->handle = NULL;
2369 fence->timeline = 0;
2370 *pFence = lvp_fence_to_handle(fence);
2371
2372 return VK_SUCCESS;
2373 }
2374
lvp_DestroyFence(VkDevice _device,VkFence _fence,const VkAllocationCallbacks * pAllocator)2375 VKAPI_ATTR void VKAPI_CALL lvp_DestroyFence(
2376 VkDevice _device,
2377 VkFence _fence,
2378 const VkAllocationCallbacks* pAllocator)
2379 {
2380 LVP_FROM_HANDLE(lvp_device, device, _device);
2381 LVP_FROM_HANDLE(lvp_fence, fence, _fence);
2382
2383 if (!_fence)
2384 return;
2385 /* evade annoying destroy assert */
2386 util_queue_fence_init(&fence->fence);
2387 util_queue_fence_destroy(&fence->fence);
2388 if (fence->handle)
2389 device->pscreen->fence_reference(device->pscreen, &fence->handle, NULL);
2390
2391 vk_object_base_finish(&fence->base);
2392 vk_free2(&device->vk.alloc, pAllocator, fence);
2393 }
2394
lvp_ResetFences(VkDevice _device,uint32_t fenceCount,const VkFence * pFences)2395 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetFences(
2396 VkDevice _device,
2397 uint32_t fenceCount,
2398 const VkFence* pFences)
2399 {
2400 LVP_FROM_HANDLE(lvp_device, device, _device);
2401 for (unsigned i = 0; i < fenceCount; i++) {
2402 struct lvp_fence *fence = lvp_fence_from_handle(pFences[i]);
2403 /* ensure u_queue doesn't explode when submitting a completed lvp_fence
2404 * which has not yet signalled its u_queue fence
2405 */
2406 util_queue_fence_wait(&fence->fence);
2407
2408 if (fence->handle) {
2409 simple_mtx_lock(&device->queue.last_lock);
2410 if (fence->handle == device->queue.last_fence)
2411 device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
2412 simple_mtx_unlock(&device->queue.last_lock);
2413 device->pscreen->fence_reference(device->pscreen, &fence->handle, NULL);
2414 }
2415 fence->signalled = false;
2416 }
2417 return VK_SUCCESS;
2418 }
2419
lvp_GetFenceStatus(VkDevice _device,VkFence _fence)2420 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetFenceStatus(
2421 VkDevice _device,
2422 VkFence _fence)
2423 {
2424 LVP_FROM_HANDLE(lvp_device, device, _device);
2425 LVP_FROM_HANDLE(lvp_fence, fence, _fence);
2426
2427 if (fence->signalled)
2428 return VK_SUCCESS;
2429
2430 if (!util_queue_fence_is_signalled(&fence->fence) || !fence_finish(device, fence->handle, 0))
2431 return VK_NOT_READY;
2432
2433 fence->signalled = true;
2434 simple_mtx_lock(&device->queue.last_lock);
2435 if (fence->handle == device->queue.last_fence) {
2436 device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
2437 if (fence->timeline > device->queue.last_finished)
2438 device->queue.last_finished = fence->timeline;
2439 }
2440 simple_mtx_unlock(&device->queue.last_lock);
2441 return VK_SUCCESS;
2442 }
2443
lvp_WaitForFences(VkDevice _device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)2444 VKAPI_ATTR VkResult VKAPI_CALL lvp_WaitForFences(
2445 VkDevice _device,
2446 uint32_t fenceCount,
2447 const VkFence* pFences,
2448 VkBool32 waitAll,
2449 uint64_t timeout)
2450 {
2451 LVP_FROM_HANDLE(lvp_device, device, _device);
2452 struct lvp_fence *fence = NULL;
2453
2454 /* lavapipe is completely synchronous, so only one fence needs to be waited on */
2455 if (waitAll) {
2456 /* find highest timeline id */
2457 for (unsigned i = 0; i < fenceCount; i++) {
2458 struct lvp_fence *f = lvp_fence_from_handle(pFences[i]);
2459
2460 /* this is an unsubmitted fence: immediately bail out */
2461 if (!f->timeline && !f->signalled)
2462 return VK_TIMEOUT;
2463 if (!fence || f->timeline > fence->timeline)
2464 fence = f;
2465 }
2466 } else {
2467 /* find lowest timeline id */
2468 for (unsigned i = 0; i < fenceCount; i++) {
2469 struct lvp_fence *f = lvp_fence_from_handle(pFences[i]);
2470 if (f->signalled)
2471 return VK_SUCCESS;
2472 if (f->timeline && (!fence || f->timeline < fence->timeline))
2473 fence = f;
2474 }
2475 }
2476 if (!fence)
2477 return VK_TIMEOUT;
2478 if (fence->signalled)
2479 return VK_SUCCESS;
2480
2481 if (!util_queue_fence_is_signalled(&fence->fence)) {
2482 int64_t abs_timeout = os_time_get_absolute_timeout(timeout);
2483 if (!util_queue_fence_wait_timeout(&fence->fence, abs_timeout))
2484 return VK_TIMEOUT;
2485
2486 if (timeout != OS_TIMEOUT_INFINITE) {
2487 int64_t time_ns = os_time_get_nano();
2488 timeout = abs_timeout > time_ns ? abs_timeout - time_ns : 0;
2489 }
2490 }
2491
2492 if (!fence_finish(device, fence->handle, timeout))
2493 return VK_TIMEOUT;
2494 simple_mtx_lock(&device->queue.last_lock);
2495 if (fence->handle == device->queue.last_fence) {
2496 device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
2497 if (fence->timeline > device->queue.last_finished)
2498 device->queue.last_finished = fence->timeline;
2499 }
2500 simple_mtx_unlock(&device->queue.last_lock);
2501 fence->signalled = true;
2502 return VK_SUCCESS;
2503 }
2504
lvp_CreateSemaphore(VkDevice _device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)2505 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSemaphore(
2506 VkDevice _device,
2507 const VkSemaphoreCreateInfo* pCreateInfo,
2508 const VkAllocationCallbacks* pAllocator,
2509 VkSemaphore* pSemaphore)
2510 {
2511 LVP_FROM_HANDLE(lvp_device, device, _device);
2512
2513 struct lvp_semaphore *sema = vk_alloc2(&device->vk.alloc, pAllocator,
2514 sizeof(*sema), 8,
2515 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2516
2517 if (!sema)
2518 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2519 vk_object_base_init(&device->vk, &sema->base,
2520 VK_OBJECT_TYPE_SEMAPHORE);
2521
2522 const VkSemaphoreTypeCreateInfo *info = vk_find_struct_const(pCreateInfo->pNext, SEMAPHORE_TYPE_CREATE_INFO);
2523 sema->is_timeline = info && info->semaphoreType == VK_SEMAPHORE_TYPE_TIMELINE;
2524 simple_mtx_init(&sema->lock, mtx_plain);
2525 sema->handle = NULL;
2526 if (sema->is_timeline) {
2527 sema->is_timeline = true;
2528 sema->timeline = NULL;
2529 sema->current = info->initialValue;
2530 sema->mem = ralloc_context(NULL);
2531 util_dynarray_init(&sema->links, sema->mem);
2532
2533 mtx_init(&sema->submit_lock, mtx_plain);
2534 cnd_init(&sema->submit);
2535 }
2536
2537 *pSemaphore = lvp_semaphore_to_handle(sema);
2538
2539 return VK_SUCCESS;
2540 }
2541
lvp_DestroySemaphore(VkDevice _device,VkSemaphore _semaphore,const VkAllocationCallbacks * pAllocator)2542 VKAPI_ATTR void VKAPI_CALL lvp_DestroySemaphore(
2543 VkDevice _device,
2544 VkSemaphore _semaphore,
2545 const VkAllocationCallbacks* pAllocator)
2546 {
2547 LVP_FROM_HANDLE(lvp_device, device, _device);
2548 LVP_FROM_HANDLE(lvp_semaphore, sema, _semaphore);
2549
2550 if (!_semaphore)
2551 return;
2552 if (sema->is_timeline) {
2553 ralloc_free(sema->mem);
2554 simple_mtx_destroy(&sema->lock);
2555 mtx_destroy(&sema->submit_lock);
2556 cnd_destroy(&sema->submit);
2557 }
2558 if (sema->handle)
2559 device->pscreen->fence_reference(device->pscreen, &sema->handle, NULL);
2560 vk_object_base_finish(&sema->base);
2561 vk_free2(&device->vk.alloc, pAllocator, sema);
2562 }
2563
lvp_WaitSemaphores(VkDevice _device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)2564 VKAPI_ATTR VkResult VKAPI_CALL lvp_WaitSemaphores(
2565 VkDevice _device,
2566 const VkSemaphoreWaitInfo* pWaitInfo,
2567 uint64_t timeout)
2568 {
2569 LVP_FROM_HANDLE(lvp_device, device, _device);
2570 /* same mechanism as used by queue submit */
2571 return wait_semaphores(device, pWaitInfo, timeout);
2572 }
2573
lvp_GetSemaphoreCounterValue(VkDevice _device,VkSemaphore _semaphore,uint64_t * pValue)2574 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetSemaphoreCounterValue(
2575 VkDevice _device,
2576 VkSemaphore _semaphore,
2577 uint64_t* pValue)
2578 {
2579 LVP_FROM_HANDLE(lvp_device, device, _device);
2580 LVP_FROM_HANDLE(lvp_semaphore, sema, _semaphore);
2581 simple_mtx_lock(&sema->lock);
2582 prune_semaphore_links(device, sema, device->queue.last_finished);
2583 struct lvp_semaphore_timeline *tl = find_semaphore_timeline(sema, sema->current);
2584 if (tl && fence_finish(device, tl->fence, 0)) {
2585 simple_mtx_lock(&device->queue.last_lock);
2586 if (tl->timeline > device->queue.last_finished)
2587 device->queue.last_finished = tl->timeline;
2588 simple_mtx_unlock(&device->queue.last_lock);
2589 *pValue = tl->signal;
2590 } else {
2591 *pValue = sema->current;
2592 }
2593 simple_mtx_unlock(&sema->lock);
2594 return VK_SUCCESS;
2595 }
2596
lvp_SignalSemaphore(VkDevice _device,const VkSemaphoreSignalInfo * pSignalInfo)2597 VKAPI_ATTR VkResult VKAPI_CALL lvp_SignalSemaphore(
2598 VkDevice _device,
2599 const VkSemaphoreSignalInfo* pSignalInfo)
2600 {
2601 LVP_FROM_HANDLE(lvp_device, device, _device);
2602 LVP_FROM_HANDLE(lvp_semaphore, sema, pSignalInfo->semaphore);
2603
2604 /* try to remain monotonic */
2605 if (sema->current < pSignalInfo->value)
2606 sema->current = pSignalInfo->value;
2607 cnd_broadcast(&sema->submit);
2608 simple_mtx_lock(&sema->lock);
2609 prune_semaphore_links(device, sema, device->queue.last_finished);
2610 simple_mtx_unlock(&sema->lock);
2611 return VK_SUCCESS;
2612 }
2613
lvp_CreateEvent(VkDevice _device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)2614 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateEvent(
2615 VkDevice _device,
2616 const VkEventCreateInfo* pCreateInfo,
2617 const VkAllocationCallbacks* pAllocator,
2618 VkEvent* pEvent)
2619 {
2620 LVP_FROM_HANDLE(lvp_device, device, _device);
2621 struct lvp_event *event = vk_alloc2(&device->vk.alloc, pAllocator,
2622 sizeof(*event), 8,
2623 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2624
2625 if (!event)
2626 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2627
2628 vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT);
2629 *pEvent = lvp_event_to_handle(event);
2630 event->event_storage = 0;
2631
2632 return VK_SUCCESS;
2633 }
2634
lvp_DestroyEvent(VkDevice _device,VkEvent _event,const VkAllocationCallbacks * pAllocator)2635 VKAPI_ATTR void VKAPI_CALL lvp_DestroyEvent(
2636 VkDevice _device,
2637 VkEvent _event,
2638 const VkAllocationCallbacks* pAllocator)
2639 {
2640 LVP_FROM_HANDLE(lvp_device, device, _device);
2641 LVP_FROM_HANDLE(lvp_event, event, _event);
2642
2643 if (!event)
2644 return;
2645
2646 vk_object_base_finish(&event->base);
2647 vk_free2(&device->vk.alloc, pAllocator, event);
2648 }
2649
lvp_GetEventStatus(VkDevice _device,VkEvent _event)2650 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetEventStatus(
2651 VkDevice _device,
2652 VkEvent _event)
2653 {
2654 LVP_FROM_HANDLE(lvp_event, event, _event);
2655 if (event->event_storage == 1)
2656 return VK_EVENT_SET;
2657 return VK_EVENT_RESET;
2658 }
2659
lvp_SetEvent(VkDevice _device,VkEvent _event)2660 VKAPI_ATTR VkResult VKAPI_CALL lvp_SetEvent(
2661 VkDevice _device,
2662 VkEvent _event)
2663 {
2664 LVP_FROM_HANDLE(lvp_event, event, _event);
2665 event->event_storage = 1;
2666
2667 return VK_SUCCESS;
2668 }
2669
lvp_ResetEvent(VkDevice _device,VkEvent _event)2670 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetEvent(
2671 VkDevice _device,
2672 VkEvent _event)
2673 {
2674 LVP_FROM_HANDLE(lvp_event, event, _event);
2675 event->event_storage = 0;
2676
2677 return VK_SUCCESS;
2678 }
2679
lvp_CreateSampler(VkDevice _device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2680 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSampler(
2681 VkDevice _device,
2682 const VkSamplerCreateInfo* pCreateInfo,
2683 const VkAllocationCallbacks* pAllocator,
2684 VkSampler* pSampler)
2685 {
2686 LVP_FROM_HANDLE(lvp_device, device, _device);
2687 struct lvp_sampler *sampler;
2688 const VkSamplerReductionModeCreateInfo *reduction_mode_create_info =
2689 vk_find_struct_const(pCreateInfo->pNext,
2690 SAMPLER_REDUCTION_MODE_CREATE_INFO);
2691 const VkSamplerCustomBorderColorCreateInfoEXT *custom_border_color_create_info =
2692 vk_find_struct_const(pCreateInfo->pNext,
2693 SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
2694
2695 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
2696
2697 sampler = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*sampler), 8,
2698 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2699 if (!sampler)
2700 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2701
2702 vk_object_base_init(&device->vk, &sampler->base,
2703 VK_OBJECT_TYPE_SAMPLER);
2704 sampler->create_info = *pCreateInfo;
2705
2706 switch (pCreateInfo->borderColor) {
2707 case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
2708 case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
2709 default:
2710 memset(&sampler->border_color, 0, sizeof(union pipe_color_union));
2711 break;
2712 case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
2713 sampler->border_color.f[0] = sampler->border_color.f[1] =
2714 sampler->border_color.f[2] = 0.0f;
2715 sampler->border_color.f[3] = 1.0f;
2716 break;
2717 case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
2718 sampler->border_color.i[0] = sampler->border_color.i[1] =
2719 sampler->border_color.i[2] = 0;
2720 sampler->border_color.i[3] = 1;
2721 break;
2722 case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
2723 sampler->border_color.f[0] = sampler->border_color.f[1] =
2724 sampler->border_color.f[2] = 1.0f;
2725 sampler->border_color.f[3] = 1.0f;
2726 break;
2727 case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
2728 sampler->border_color.i[0] = sampler->border_color.i[1] =
2729 sampler->border_color.i[2] = 1;
2730 sampler->border_color.i[3] = 1;
2731 break;
2732 case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2733 case VK_BORDER_COLOR_INT_CUSTOM_EXT:
2734 assert(custom_border_color_create_info != NULL);
2735 memcpy(&sampler->border_color,
2736 &custom_border_color_create_info->customBorderColor,
2737 sizeof(union pipe_color_union));
2738 break;
2739 }
2740
2741 sampler->reduction_mode = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE;
2742 if (reduction_mode_create_info)
2743 sampler->reduction_mode = reduction_mode_create_info->reductionMode;
2744
2745 *pSampler = lvp_sampler_to_handle(sampler);
2746
2747 return VK_SUCCESS;
2748 }
2749
lvp_DestroySampler(VkDevice _device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)2750 VKAPI_ATTR void VKAPI_CALL lvp_DestroySampler(
2751 VkDevice _device,
2752 VkSampler _sampler,
2753 const VkAllocationCallbacks* pAllocator)
2754 {
2755 LVP_FROM_HANDLE(lvp_device, device, _device);
2756 LVP_FROM_HANDLE(lvp_sampler, sampler, _sampler);
2757
2758 if (!_sampler)
2759 return;
2760 vk_object_base_finish(&sampler->base);
2761 vk_free2(&device->vk.alloc, pAllocator, sampler);
2762 }
2763
lvp_CreateSamplerYcbcrConversionKHR(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)2764 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSamplerYcbcrConversionKHR(
2765 VkDevice device,
2766 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
2767 const VkAllocationCallbacks* pAllocator,
2768 VkSamplerYcbcrConversion* pYcbcrConversion)
2769 {
2770 return VK_ERROR_OUT_OF_HOST_MEMORY;
2771 }
2772
lvp_DestroySamplerYcbcrConversionKHR(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)2773 VKAPI_ATTR void VKAPI_CALL lvp_DestroySamplerYcbcrConversionKHR(
2774 VkDevice device,
2775 VkSamplerYcbcrConversion ycbcrConversion,
2776 const VkAllocationCallbacks* pAllocator)
2777 {
2778 }
2779
2780 /* vk_icd.h does not declare this function, so we declare it here to
2781 * suppress Wmissing-prototypes.
2782 */
2783 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2784 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion);
2785
2786 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)2787 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion)
2788 {
2789 /* For the full details on loader interface versioning, see
2790 * <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2791 * What follows is a condensed summary, to help you navigate the large and
2792 * confusing official doc.
2793 *
2794 * - Loader interface v0 is incompatible with later versions. We don't
2795 * support it.
2796 *
2797 * - In loader interface v1:
2798 * - The first ICD entrypoint called by the loader is
2799 * vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2800 * entrypoint.
2801 * - The ICD must statically expose no other Vulkan symbol unless it is
2802 * linked with -Bsymbolic.
2803 * - Each dispatchable Vulkan handle created by the ICD must be
2804 * a pointer to a struct whose first member is VK_LOADER_DATA. The
2805 * ICD must initialize VK_LOADER_DATA.loadMagic to ICD_LOADER_MAGIC.
2806 * - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2807 * vkDestroySurfaceKHR(). The ICD must be capable of working with
2808 * such loader-managed surfaces.
2809 *
2810 * - Loader interface v2 differs from v1 in:
2811 * - The first ICD entrypoint called by the loader is
2812 * vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2813 * statically expose this entrypoint.
2814 *
2815 * - Loader interface v3 differs from v2 in:
2816 * - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2817 * vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2818 * because the loader no longer does so.
2819 *
2820 * - Loader interface v4 differs from v3 in:
2821 * - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
2822 *
2823 * - Loader interface v5 differs from v4 in:
2824 * - The ICD must support Vulkan API version 1.1 and must not return
2825 * VK_ERROR_INCOMPATIBLE_DRIVER from vkCreateInstance() unless a
2826 * Vulkan Loader with interface v4 or smaller is being used and the
2827 * application provides an API version that is greater than 1.0.
2828 */
2829 *pSupportedVersion = MIN2(*pSupportedVersion, 5u);
2830 return VK_SUCCESS;
2831 }
2832
lvp_CreatePrivateDataSlotEXT(VkDevice _device,const VkPrivateDataSlotCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPrivateDataSlotEXT * pPrivateDataSlot)2833 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePrivateDataSlotEXT(
2834 VkDevice _device,
2835 const VkPrivateDataSlotCreateInfoEXT* pCreateInfo,
2836 const VkAllocationCallbacks* pAllocator,
2837 VkPrivateDataSlotEXT* pPrivateDataSlot)
2838 {
2839 LVP_FROM_HANDLE(lvp_device, device, _device);
2840 return vk_private_data_slot_create(&device->vk, pCreateInfo, pAllocator,
2841 pPrivateDataSlot);
2842 }
2843
lvp_DestroyPrivateDataSlotEXT(VkDevice _device,VkPrivateDataSlotEXT privateDataSlot,const VkAllocationCallbacks * pAllocator)2844 VKAPI_ATTR void VKAPI_CALL lvp_DestroyPrivateDataSlotEXT(
2845 VkDevice _device,
2846 VkPrivateDataSlotEXT privateDataSlot,
2847 const VkAllocationCallbacks* pAllocator)
2848 {
2849 LVP_FROM_HANDLE(lvp_device, device, _device);
2850 vk_private_data_slot_destroy(&device->vk, privateDataSlot, pAllocator);
2851 }
2852
lvp_SetPrivateDataEXT(VkDevice _device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlotEXT privateDataSlot,uint64_t data)2853 VKAPI_ATTR VkResult VKAPI_CALL lvp_SetPrivateDataEXT(
2854 VkDevice _device,
2855 VkObjectType objectType,
2856 uint64_t objectHandle,
2857 VkPrivateDataSlotEXT privateDataSlot,
2858 uint64_t data)
2859 {
2860 LVP_FROM_HANDLE(lvp_device, device, _device);
2861 return vk_object_base_set_private_data(&device->vk, objectType,
2862 objectHandle, privateDataSlot,
2863 data);
2864 }
2865
lvp_GetPrivateDataEXT(VkDevice _device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlotEXT privateDataSlot,uint64_t * pData)2866 VKAPI_ATTR void VKAPI_CALL lvp_GetPrivateDataEXT(
2867 VkDevice _device,
2868 VkObjectType objectType,
2869 uint64_t objectHandle,
2870 VkPrivateDataSlotEXT privateDataSlot,
2871 uint64_t* pData)
2872 {
2873 LVP_FROM_HANDLE(lvp_device, device, _device);
2874 vk_object_base_get_private_data(&device->vk, objectType, objectHandle,
2875 privateDataSlot, pData);
2876 }
2877
lvp_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)2878 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceExternalFenceProperties(
2879 VkPhysicalDevice physicalDevice,
2880 const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
2881 VkExternalFenceProperties *pExternalFenceProperties)
2882 {
2883 pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2884 pExternalFenceProperties->compatibleHandleTypes = 0;
2885 pExternalFenceProperties->externalFenceFeatures = 0;
2886 }
2887
lvp_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)2888 VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceExternalSemaphoreProperties(
2889 VkPhysicalDevice physicalDevice,
2890 const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
2891 VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
2892 {
2893 pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
2894 pExternalSemaphoreProperties->compatibleHandleTypes = 0;
2895 pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
2896 }
2897
2898 static const VkTimeDomainEXT lvp_time_domains[] = {
2899 VK_TIME_DOMAIN_DEVICE_EXT,
2900 VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
2901 };
2902
lvp_GetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice,uint32_t * pTimeDomainCount,VkTimeDomainEXT * pTimeDomains)2903 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetPhysicalDeviceCalibrateableTimeDomainsEXT(
2904 VkPhysicalDevice physicalDevice,
2905 uint32_t *pTimeDomainCount,
2906 VkTimeDomainEXT *pTimeDomains)
2907 {
2908 int d;
2909 VK_OUTARRAY_MAKE_TYPED(VkTimeDomainEXT, out, pTimeDomains,
2910 pTimeDomainCount);
2911
2912 for (d = 0; d < ARRAY_SIZE(lvp_time_domains); d++) {
2913 vk_outarray_append_typed(VkTimeDomainEXT, &out, i) {
2914 *i = lvp_time_domains[d];
2915 }
2916 }
2917
2918 return vk_outarray_status(&out);
2919 }
2920
lvp_GetCalibratedTimestampsEXT(VkDevice device,uint32_t timestampCount,const VkCalibratedTimestampInfoEXT * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation)2921 VKAPI_ATTR VkResult VKAPI_CALL lvp_GetCalibratedTimestampsEXT(
2922 VkDevice device,
2923 uint32_t timestampCount,
2924 const VkCalibratedTimestampInfoEXT *pTimestampInfos,
2925 uint64_t *pTimestamps,
2926 uint64_t *pMaxDeviation)
2927 {
2928 *pMaxDeviation = 1;
2929
2930 uint64_t now = os_time_get_nano();
2931 for (unsigned i = 0; i < timestampCount; i++) {
2932 pTimestamps[i] = now;
2933 }
2934 return VK_SUCCESS;
2935 }
2936
lvp_GetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)2937 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceGroupPeerMemoryFeaturesKHR(
2938 VkDevice device,
2939 uint32_t heapIndex,
2940 uint32_t localDeviceIndex,
2941 uint32_t remoteDeviceIndex,
2942 VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2943 {
2944 *pPeerMemoryFeatures = 0;
2945 }
2946