1 /*
2 * GStreamer
3 * Copyright (C) 2015 Matthew Waters <matthew@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include "vkdevice.h"
26 #include "vkutils_private.h"
27
28 #include <string.h>
29
30 static const char *device_validation_layers[] = {
31 "VK_LAYER_GOOGLE_threading",
32 "VK_LAYER_LUNARG_mem_tracker",
33 "VK_LAYER_LUNARG_object_tracker",
34 "VK_LAYER_LUNARG_draw_state",
35 "VK_LAYER_LUNARG_param_checker",
36 "VK_LAYER_LUNARG_swapchain",
37 "VK_LAYER_LUNARG_device_limits",
38 "VK_LAYER_LUNARG_image",
39 };
40
41 #define GST_CAT_DEFAULT gst_vulkan_device_debug
42 GST_DEBUG_CATEGORY (GST_CAT_DEFAULT);
43 GST_DEBUG_CATEGORY_STATIC (GST_CAT_CONTEXT);
44
45 static void gst_vulkan_device_finalize (GObject * object);
46
47 struct _GstVulkanDevicePrivate
48 {
49 gboolean opened;
50 };
51
52 #define gst_vulkan_device_parent_class parent_class
53 G_DEFINE_TYPE_WITH_CODE (GstVulkanDevice, gst_vulkan_device, GST_TYPE_OBJECT,
54 G_ADD_PRIVATE (GstVulkanDevice)
55 GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "vulkandevice", 0,
56 "Vulkan Device");
57 GST_DEBUG_CATEGORY_GET (GST_CAT_CONTEXT, "GST_CONTEXT"));
58
59 GstVulkanDevice *
gst_vulkan_device_new(GstVulkanInstance * instance)60 gst_vulkan_device_new (GstVulkanInstance * instance)
61 {
62 GstVulkanDevice *device = g_object_new (GST_TYPE_VULKAN_DEVICE, NULL);
63
64 gst_object_ref_sink (device);
65
66 device->instance = gst_object_ref (instance);
67 /* FIXME: select this externally */
68 device->device_index = 0;
69
70 return device;
71 }
72
73 static void
gst_vulkan_device_init(GstVulkanDevice * device)74 gst_vulkan_device_init (GstVulkanDevice * device)
75 {
76 device->priv = gst_vulkan_device_get_instance_private (device);
77 }
78
79 static void
gst_vulkan_device_class_init(GstVulkanDeviceClass * device_class)80 gst_vulkan_device_class_init (GstVulkanDeviceClass * device_class)
81 {
82 GObjectClass *gobject_class = (GObjectClass *) device_class;
83
84 gobject_class->finalize = gst_vulkan_device_finalize;
85 }
86
87 static void
gst_vulkan_device_finalize(GObject * object)88 gst_vulkan_device_finalize (GObject * object)
89 {
90 GstVulkanDevice *device = GST_VULKAN_DEVICE (object);
91
92 g_free (device->queue_family_props);
93 device->queue_family_props = NULL;
94
95 if (device->cmd_pool)
96 vkDestroyCommandPool (device->device, device->cmd_pool, NULL);
97 device->cmd_pool = VK_NULL_HANDLE;
98
99 if (device->device) {
100 vkDeviceWaitIdle (device->device);
101 vkDestroyDevice (device->device, NULL);
102 }
103 device->device = VK_NULL_HANDLE;
104
105 if (device->instance)
106 gst_object_unref (device->instance);
107 device->instance = VK_NULL_HANDLE;
108
109 G_OBJECT_CLASS (parent_class)->finalize (object);
110 }
111
112 static const gchar *
_device_type_to_string(VkPhysicalDeviceType type)113 _device_type_to_string (VkPhysicalDeviceType type)
114 {
115 switch (type) {
116 case VK_PHYSICAL_DEVICE_TYPE_OTHER:
117 return "other";
118 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
119 return "integrated";
120 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
121 return "discrete";
122 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
123 return "virtual";
124 case VK_PHYSICAL_DEVICE_TYPE_CPU:
125 return "CPU";
126 default:
127 return "unknown";
128 }
129 }
130
131 static gboolean
_physical_device_info(GstVulkanDevice * device,GError ** error)132 _physical_device_info (GstVulkanDevice * device, GError ** error)
133 {
134 VkPhysicalDeviceProperties props;
135 VkPhysicalDevice gpu;
136
137 gpu = gst_vulkan_device_get_physical_device (device);
138 if (!gpu) {
139 g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
140 "Failed to retrieve physical device");
141 return FALSE;
142 }
143
144 vkGetPhysicalDeviceProperties (gpu, &props);
145
146 GST_INFO_OBJECT (device, "device name %s type %s api version %u.%u.%u, "
147 "driver version %u.%u.%u vendor ID 0x%x, device ID 0x%x",
148 props.deviceName, _device_type_to_string (props.deviceType),
149 VK_VERSION_MAJOR (props.apiVersion), VK_VERSION_MINOR (props.apiVersion),
150 VK_VERSION_PATCH (props.apiVersion),
151 VK_VERSION_MAJOR (props.driverVersion),
152 VK_VERSION_MINOR (props.driverVersion),
153 VK_VERSION_PATCH (props.driverVersion), props.vendorID, props.deviceID);
154
155 return TRUE;
156 }
157
158 gboolean
gst_vulkan_device_open(GstVulkanDevice * device,GError ** error)159 gst_vulkan_device_open (GstVulkanDevice * device, GError ** error)
160 {
161 const char *extension_names[64];
162 uint32_t enabled_extension_count = 0;
163 uint32_t device_extension_count = 0;
164 VkExtensionProperties *device_extensions = NULL;
165 uint32_t enabled_layer_count = 0;
166 gchar **enabled_layers;
167 uint32_t device_layer_count = 0;
168 VkLayerProperties *device_layers;
169 gboolean have_swapchain_ext;
170 VkPhysicalDevice gpu;
171 VkResult err;
172 guint i;
173
174 g_return_val_if_fail (GST_IS_VULKAN_DEVICE (device), FALSE);
175
176 GST_OBJECT_LOCK (device);
177
178 if (device->priv->opened) {
179 GST_OBJECT_UNLOCK (device);
180 return TRUE;
181 }
182
183 if (!_physical_device_info (device, error))
184 goto error;
185
186 gpu = gst_vulkan_device_get_physical_device (device);
187
188 /* Look for validation layers */
189 err = vkEnumerateDeviceLayerProperties (gpu, &device_layer_count, NULL);
190 if (gst_vulkan_error_to_g_error (err, error,
191 "vkEnumerateDeviceLayerProperties") < 0)
192 goto error;
193
194 device_layers = g_new0 (VkLayerProperties, device_layer_count);
195 err =
196 vkEnumerateDeviceLayerProperties (gpu, &device_layer_count,
197 device_layers);
198 if (gst_vulkan_error_to_g_error (err, error,
199 "vkEnumerateDeviceLayerProperties") < 0) {
200 g_free (device_layers);
201 goto error;
202 }
203
204 _check_for_all_layers (G_N_ELEMENTS (device_validation_layers),
205 device_validation_layers, device_layer_count, device_layers,
206 &enabled_layer_count, &enabled_layers);
207 g_free (device_layers);
208 device_layers = NULL;
209
210 err =
211 vkEnumerateDeviceExtensionProperties (gpu, NULL,
212 &device_extension_count, NULL);
213 if (gst_vulkan_error_to_g_error (err, error,
214 "vkEnumerateDeviceExtensionProperties") < 0) {
215 g_strfreev (enabled_layers);
216 goto error;
217 }
218 GST_DEBUG_OBJECT (device, "Found %u extensions", device_extension_count);
219
220 have_swapchain_ext = 0;
221 enabled_extension_count = 0;
222 memset (extension_names, 0, sizeof (extension_names));
223 device_extensions = g_new0 (VkExtensionProperties, device_extension_count);
224 err = vkEnumerateDeviceExtensionProperties (gpu, NULL,
225 &device_extension_count, device_extensions);
226 if (gst_vulkan_error_to_g_error (err, error,
227 "vkEnumerateDeviceExtensionProperties") < 0) {
228 g_strfreev (enabled_layers);
229 g_free (device_extensions);
230 goto error;
231 }
232
233 for (i = 0; i < device_extension_count; i++) {
234 GST_TRACE_OBJECT (device, "checking device extension %s",
235 device_extensions[i].extensionName);
236 if (!strcmp (VK_KHR_SWAPCHAIN_EXTENSION_NAME,
237 device_extensions[i].extensionName)) {
238 have_swapchain_ext = TRUE;
239 extension_names[enabled_extension_count++] =
240 (gchar *) VK_KHR_SWAPCHAIN_EXTENSION_NAME;
241 }
242 g_assert (enabled_extension_count < 64);
243 }
244 if (!have_swapchain_ext) {
245 g_set_error_literal (error, GST_VULKAN_ERROR,
246 VK_ERROR_EXTENSION_NOT_PRESENT,
247 "Failed to find required extension, \"" VK_KHR_SWAPCHAIN_EXTENSION_NAME
248 "\"");
249 g_strfreev (enabled_layers);
250 goto error;
251 }
252 g_free (device_extensions);
253
254 vkGetPhysicalDeviceProperties (gpu, &device->gpu_props);
255 vkGetPhysicalDeviceMemoryProperties (gpu, &device->memory_properties);
256 vkGetPhysicalDeviceFeatures (gpu, &device->gpu_features);
257
258 vkGetPhysicalDeviceQueueFamilyProperties (gpu, &device->n_queue_families,
259 NULL);
260 g_assert (device->n_queue_families >= 1);
261
262 device->queue_family_props =
263 g_new0 (VkQueueFamilyProperties, device->n_queue_families);
264 vkGetPhysicalDeviceQueueFamilyProperties (gpu, &device->n_queue_families,
265 device->queue_family_props);
266
267 /* FIXME: allow overriding/selecting */
268 for (i = 0; i < device->n_queue_families; i++) {
269 if (device->queue_family_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)
270 break;
271 }
272 if (i >= device->n_queue_families) {
273 g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
274 "Failed to find a compatible queue family");
275 g_strfreev (enabled_layers);
276 goto error;
277 }
278 device->queue_family_id = i;
279 device->n_queues = 1;
280
281 {
282 VkDeviceQueueCreateInfo queue_info = { 0, };
283 VkDeviceCreateInfo device_info = { 0, };
284 gfloat queue_priority = 0.5;
285
286 queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
287 queue_info.pNext = NULL;
288 queue_info.queueFamilyIndex = device->queue_family_id;
289 queue_info.queueCount = device->n_queues;
290 queue_info.pQueuePriorities = &queue_priority;
291
292 device_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
293 device_info.pNext = NULL;
294 device_info.queueCreateInfoCount = 1;
295 device_info.pQueueCreateInfos = &queue_info;
296 #if 0
297 device_info.enabledLayerCount = enabled_layer_count;
298 device_info.ppEnabledLayerNames = (const char *const *) enabled_layers;
299 #else
300 device_info.enabledLayerCount = 0;
301 device_info.ppEnabledLayerNames = NULL;
302 #endif
303 device_info.enabledExtensionCount = enabled_extension_count;
304 device_info.ppEnabledExtensionNames = (const char *const *) extension_names;
305 device_info.pEnabledFeatures = NULL;
306
307 err = vkCreateDevice (gpu, &device_info, NULL, &device->device);
308 if (gst_vulkan_error_to_g_error (err, error, "vkCreateDevice") < 0) {
309 g_strfreev (enabled_layers);
310 goto error;
311 }
312 }
313 g_strfreev (enabled_layers);
314
315 {
316 VkCommandPoolCreateInfo cmd_pool_info = { 0, };
317
318 cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
319 cmd_pool_info.pNext = NULL;
320 cmd_pool_info.queueFamilyIndex = device->queue_family_id;
321 cmd_pool_info.flags = 0;
322
323 err =
324 vkCreateCommandPool (device->device, &cmd_pool_info, NULL,
325 &device->cmd_pool);
326 if (gst_vulkan_error_to_g_error (err, error, "vkCreateCommandPool") < 0)
327 goto error;
328 }
329
330 GST_OBJECT_UNLOCK (device);
331 return TRUE;
332
333 error:
334 {
335 GST_OBJECT_UNLOCK (device);
336 return FALSE;
337 }
338 }
339
340 GstVulkanQueue *
gst_vulkan_device_get_queue(GstVulkanDevice * device,guint32 queue_family,guint32 queue_i)341 gst_vulkan_device_get_queue (GstVulkanDevice * device, guint32 queue_family,
342 guint32 queue_i)
343 {
344 GstVulkanQueue *ret;
345
346 g_return_val_if_fail (GST_IS_VULKAN_DEVICE (device), NULL);
347 g_return_val_if_fail (device->device != NULL, NULL);
348 g_return_val_if_fail (queue_family < device->n_queues, NULL);
349 g_return_val_if_fail (queue_i <
350 device->queue_family_props[queue_family].queueCount, NULL);
351
352 ret = g_object_new (GST_TYPE_VULKAN_QUEUE, NULL);
353 gst_object_ref_sink (ret);
354 ret->device = gst_object_ref (device);
355 ret->family = queue_family;
356 ret->index = queue_i;
357
358 vkGetDeviceQueue (device->device, queue_family, queue_i, &ret->queue);
359
360 return ret;
361 }
362
363 void
gst_vulkan_device_foreach_queue(GstVulkanDevice * device,GstVulkanDeviceForEachQueueFunc func,gpointer user_data)364 gst_vulkan_device_foreach_queue (GstVulkanDevice * device,
365 GstVulkanDeviceForEachQueueFunc func, gpointer user_data)
366 {
367 gboolean done = FALSE;
368 guint i;
369
370 for (i = 0; i < device->n_queues; i++) {
371 GstVulkanQueue *queue =
372 gst_vulkan_device_get_queue (device, device->queue_family_id, i);
373
374 if (!func (device, queue, user_data))
375 done = TRUE;
376
377 gst_object_unref (queue);
378
379 if (done)
380 break;
381 }
382 }
383
384 gpointer
gst_vulkan_device_get_proc_address(GstVulkanDevice * device,const gchar * name)385 gst_vulkan_device_get_proc_address (GstVulkanDevice * device,
386 const gchar * name)
387 {
388 g_return_val_if_fail (GST_IS_VULKAN_DEVICE (device), NULL);
389 g_return_val_if_fail (device->device != NULL, NULL);
390 g_return_val_if_fail (name != NULL, NULL);
391
392 GST_TRACE_OBJECT (device, "%s", name);
393
394 return vkGetDeviceProcAddr (device->device, name);
395 }
396
397 GstVulkanInstance *
gst_vulkan_device_get_instance(GstVulkanDevice * device)398 gst_vulkan_device_get_instance (GstVulkanDevice * device)
399 {
400 g_return_val_if_fail (GST_IS_VULKAN_DEVICE (device), NULL);
401
402 return device->instance ? gst_object_ref (device->instance) : NULL;
403 }
404
405 VkPhysicalDevice
gst_vulkan_device_get_physical_device(GstVulkanDevice * device)406 gst_vulkan_device_get_physical_device (GstVulkanDevice * device)
407 {
408 g_return_val_if_fail (GST_IS_VULKAN_DEVICE (device), NULL);
409
410 if (device->instance->physical_devices == NULL)
411 return NULL;
412 if (device->device_index >= device->instance->n_physical_devices)
413 return NULL;
414
415 return device->instance->physical_devices[device->device_index];
416 }
417
418 gboolean
gst_vulkan_device_create_cmd_buffer(GstVulkanDevice * device,VkCommandBuffer * cmd,GError ** error)419 gst_vulkan_device_create_cmd_buffer (GstVulkanDevice * device,
420 VkCommandBuffer * cmd, GError ** error)
421 {
422 VkResult err;
423 VkCommandBufferAllocateInfo cmd_info = { 0, };
424
425 cmd_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
426 cmd_info.pNext = NULL;
427 cmd_info.commandPool = device->cmd_pool;
428 cmd_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
429 cmd_info.commandBufferCount = 1;
430
431 err = vkAllocateCommandBuffers (device->device, &cmd_info, cmd);
432 if (gst_vulkan_error_to_g_error (err, error, "vkCreateCommandBuffer") < 0)
433 return FALSE;
434
435 GST_LOG_OBJECT (device, "created cmd buffer %p", cmd);
436
437 return TRUE;
438 }
439
440 /**
441 * gst_context_set_vulkan_device:
442 * @context: a #GstContext
443 * @device: a #GstVulkanDevice
444 *
445 * Sets @device on @context
446 *
447 * Since: 1.10
448 */
449 void
gst_context_set_vulkan_device(GstContext * context,GstVulkanDevice * device)450 gst_context_set_vulkan_device (GstContext * context, GstVulkanDevice * device)
451 {
452 GstStructure *s;
453
454 g_return_if_fail (context != NULL);
455 g_return_if_fail (gst_context_is_writable (context));
456
457 if (device)
458 GST_CAT_LOG (GST_CAT_CONTEXT,
459 "setting GstVulkanDevice(%" GST_PTR_FORMAT ") on context(%"
460 GST_PTR_FORMAT ")", device, context);
461
462 s = gst_context_writable_structure (context);
463 gst_structure_set (s, GST_VULKAN_DEVICE_CONTEXT_TYPE_STR,
464 GST_TYPE_VULKAN_DEVICE, device, NULL);
465 }
466
467 /**
468 * gst_context_get_vulkan_device:
469 * @context: a #GstContext
470 * @device: resulting #GstVulkanDevice
471 *
472 * Returns: Whether @device was in @context
473 *
474 * Since: 1.10
475 */
476 gboolean
gst_context_get_vulkan_device(GstContext * context,GstVulkanDevice ** device)477 gst_context_get_vulkan_device (GstContext * context, GstVulkanDevice ** device)
478 {
479 const GstStructure *s;
480 gboolean ret;
481
482 g_return_val_if_fail (device != NULL, FALSE);
483 g_return_val_if_fail (context != NULL, FALSE);
484
485 s = gst_context_get_structure (context);
486 ret = gst_structure_get (s, GST_VULKAN_DEVICE_CONTEXT_TYPE_STR,
487 GST_TYPE_VULKAN_DEVICE, device, NULL);
488
489 GST_CAT_LOG (GST_CAT_CONTEXT, "got GstVulkanDevice(%" GST_PTR_FORMAT
490 ") from context(%" GST_PTR_FORMAT ")", *device, context);
491
492 return ret;
493 }
494
495 gboolean
gst_vulkan_device_handle_context_query(GstElement * element,GstQuery * query,GstVulkanDevice ** device)496 gst_vulkan_device_handle_context_query (GstElement * element, GstQuery * query,
497 GstVulkanDevice ** device)
498 {
499 gboolean res = FALSE;
500 const gchar *context_type;
501 GstContext *context, *old_context;
502
503 g_return_val_if_fail (element != NULL, FALSE);
504 g_return_val_if_fail (query != NULL, FALSE);
505 g_return_val_if_fail (GST_QUERY_TYPE (query) == GST_QUERY_CONTEXT, FALSE);
506 g_return_val_if_fail (device != NULL, FALSE);
507
508 gst_query_parse_context_type (query, &context_type);
509
510 if (g_strcmp0 (context_type, GST_VULKAN_DEVICE_CONTEXT_TYPE_STR) == 0) {
511 gst_query_parse_context (query, &old_context);
512
513 if (old_context)
514 context = gst_context_copy (old_context);
515 else
516 context = gst_context_new (GST_VULKAN_DEVICE_CONTEXT_TYPE_STR, TRUE);
517
518 gst_context_set_vulkan_device (context, *device);
519 gst_query_set_context (query, context);
520 gst_context_unref (context);
521
522 res = *device != NULL;
523 }
524
525 return res;
526 }
527
528 gboolean
gst_vulkan_device_run_context_query(GstElement * element,GstVulkanDevice ** device)529 gst_vulkan_device_run_context_query (GstElement * element,
530 GstVulkanDevice ** device)
531 {
532 GstQuery *query;
533
534 g_return_val_if_fail (GST_IS_ELEMENT (element), FALSE);
535 g_return_val_if_fail (device != NULL, FALSE);
536
537 if (*device && GST_IS_VULKAN_DEVICE (*device))
538 return TRUE;
539
540 if ((query =
541 gst_vulkan_local_context_query (element,
542 GST_VULKAN_DEVICE_CONTEXT_TYPE_STR, FALSE))) {
543 GstContext *context;
544
545 gst_query_parse_context (query, &context);
546 if (context)
547 gst_context_get_vulkan_device (context, device);
548
549 gst_query_unref (query);
550 }
551
552 GST_DEBUG_OBJECT (element, "found device %p", *device);
553
554 if (*device)
555 return TRUE;
556
557 return FALSE;
558 }
559