1 /*
2 * Copyright (C) 2014 Collabora Ltd.
3 * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 *
20 */
21
22 #ifdef HAVE_CONFIG_H
23 #include "config.h"
24 #endif
25
26 #include <sys/stat.h>
27 #include <fcntl.h>
28 #include <errno.h>
29 #include <unistd.h>
30 #include <string.h>
31
32 #include "gstv4l2object.h"
33 #include "gstv4l2videodec.h"
34
35 #include <string.h>
36 #include <gst/gst-i18n-plugin.h>
37
38 GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_dec_debug);
39 #define GST_CAT_DEFAULT gst_v4l2_video_dec_debug
40
41 typedef struct
42 {
43 gchar *device;
44 GstCaps *sink_caps;
45 GstCaps *src_caps;
46 const gchar *longname;
47 const gchar *description;
48 } GstV4l2VideoDecCData;
49
50 enum
51 {
52 PROP_0,
53 V4L2_STD_OBJECT_PROPS
54 };
55
56 #define gst_v4l2_video_dec_parent_class parent_class
57 G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoDec, gst_v4l2_video_dec,
58 GST_TYPE_VIDEO_DECODER);
59
60 static GstFlowReturn gst_v4l2_video_dec_finish (GstVideoDecoder * decoder);
61
62 static void
gst_v4l2_video_dec_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)63 gst_v4l2_video_dec_set_property (GObject * object,
64 guint prop_id, const GValue * value, GParamSpec * pspec)
65 {
66 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
67
68 switch (prop_id) {
69 case PROP_CAPTURE_IO_MODE:
70 if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
71 prop_id, value, pspec)) {
72 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
73 }
74 break;
75
76 /* By default, only set on output */
77 default:
78 if (!gst_v4l2_object_set_property_helper (self->v4l2output,
79 prop_id, value, pspec)) {
80 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
81 }
82 break;
83 }
84 }
85
86 static void
gst_v4l2_video_dec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)87 gst_v4l2_video_dec_get_property (GObject * object,
88 guint prop_id, GValue * value, GParamSpec * pspec)
89 {
90 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
91
92 switch (prop_id) {
93 case PROP_CAPTURE_IO_MODE:
94 if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
95 prop_id, value, pspec)) {
96 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
97 }
98 break;
99
100 /* By default read from output */
101 default:
102 if (!gst_v4l2_object_get_property_helper (self->v4l2output,
103 prop_id, value, pspec)) {
104 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
105 }
106 break;
107 }
108 }
109
110 static gboolean
gst_v4l2_video_dec_open(GstVideoDecoder * decoder)111 gst_v4l2_video_dec_open (GstVideoDecoder * decoder)
112 {
113 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
114 GstCaps *codec_caps;
115
116 GST_DEBUG_OBJECT (self, "Opening");
117
118 if (!gst_v4l2_object_open (self->v4l2output))
119 goto failure;
120
121 if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
122 goto failure;
123
124 codec_caps = gst_pad_get_pad_template_caps (decoder->sinkpad);
125 self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output,
126 codec_caps);
127 gst_caps_unref (codec_caps);
128
129 if (gst_caps_is_empty (self->probed_sinkcaps))
130 goto no_encoded_format;
131
132 return TRUE;
133
134 no_encoded_format:
135 GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
136 (_("Decoder on device %s has no supported input format"),
137 self->v4l2output->videodev), (NULL));
138 goto failure;
139
140 failure:
141 if (GST_V4L2_IS_OPEN (self->v4l2output))
142 gst_v4l2_object_close (self->v4l2output);
143
144 if (GST_V4L2_IS_OPEN (self->v4l2capture))
145 gst_v4l2_object_close (self->v4l2capture);
146
147 gst_caps_replace (&self->probed_srccaps, NULL);
148 gst_caps_replace (&self->probed_sinkcaps, NULL);
149
150 return FALSE;
151 }
152
153 static gboolean
gst_v4l2_video_dec_close(GstVideoDecoder * decoder)154 gst_v4l2_video_dec_close (GstVideoDecoder * decoder)
155 {
156 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
157
158 GST_DEBUG_OBJECT (self, "Closing");
159
160 gst_v4l2_object_close (self->v4l2output);
161 gst_v4l2_object_close (self->v4l2capture);
162 gst_caps_replace (&self->probed_srccaps, NULL);
163 gst_caps_replace (&self->probed_sinkcaps, NULL);
164
165 return TRUE;
166 }
167
168 static gboolean
gst_v4l2_video_dec_start(GstVideoDecoder * decoder)169 gst_v4l2_video_dec_start (GstVideoDecoder * decoder)
170 {
171 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
172
173 GST_DEBUG_OBJECT (self, "Starting");
174
175 gst_v4l2_object_unlock (self->v4l2output);
176 g_atomic_int_set (&self->active, TRUE);
177 self->output_flow = GST_FLOW_OK;
178
179 return TRUE;
180 }
181
182 static gboolean
gst_v4l2_video_dec_stop(GstVideoDecoder * decoder)183 gst_v4l2_video_dec_stop (GstVideoDecoder * decoder)
184 {
185 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
186
187 GST_DEBUG_OBJECT (self, "Stopping");
188
189 gst_v4l2_object_unlock (self->v4l2output);
190 gst_v4l2_object_unlock (self->v4l2capture);
191
192 /* Wait for capture thread to stop */
193 gst_pad_stop_task (decoder->srcpad);
194
195 GST_VIDEO_DECODER_STREAM_LOCK (decoder);
196 self->output_flow = GST_FLOW_OK;
197 GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
198
199 /* Should have been flushed already */
200 g_assert (g_atomic_int_get (&self->active) == FALSE);
201
202 gst_v4l2_object_stop (self->v4l2output);
203 gst_v4l2_object_stop (self->v4l2capture);
204
205 if (self->input_state) {
206 gst_video_codec_state_unref (self->input_state);
207 self->input_state = NULL;
208 }
209
210 GST_DEBUG_OBJECT (self, "Stopped");
211
212 return TRUE;
213 }
214
215 static gboolean
gst_v4l2_video_dec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)216 gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder,
217 GstVideoCodecState * state)
218 {
219 GstV4l2Error error = GST_V4L2_ERROR_INIT;
220 gboolean ret = TRUE;
221 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
222
223 GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
224
225 if (self->input_state) {
226 if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) {
227 GST_DEBUG_OBJECT (self, "Compatible caps");
228 goto done;
229 }
230 gst_video_codec_state_unref (self->input_state);
231 self->input_state = NULL;
232
233 gst_v4l2_video_dec_finish (decoder);
234 gst_v4l2_object_stop (self->v4l2output);
235
236 /* The renegotiation flow don't blend with the base class flow. To properly
237 * stop the capture pool, if the buffers can't be orphaned, we need to
238 * reclaim our buffers, which will happend through the allocation query.
239 * The allocation query is triggered by gst_video_decoder_negotiate() which
240 * requires the output caps to be set, but we can't know this information
241 * as we rely on the decoder, which requires the capture queue to be
242 * stopped.
243 *
244 * To workaround this issue, we simply run an allocation query with the
245 * old negotiated caps in order to drain/reclaim our buffers. That breaks
246 * the complexity and should not have much impact in performance since the
247 * following allocation query will happen on a drained pipeline and won't
248 * block. */
249 if (self->v4l2capture->pool &&
250 !gst_v4l2_buffer_pool_orphan (&self->v4l2capture->pool)) {
251 GstCaps *caps = gst_pad_get_current_caps (decoder->srcpad);
252 if (caps) {
253 GstQuery *query = gst_query_new_allocation (caps, FALSE);
254 gst_pad_peer_query (decoder->srcpad, query);
255 gst_query_unref (query);
256 gst_caps_unref (caps);
257 }
258 }
259
260 gst_v4l2_object_stop (self->v4l2capture);
261 self->output_flow = GST_FLOW_OK;
262 }
263
264 ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error);
265
266 gst_caps_replace (&self->probed_srccaps, NULL);
267 self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
268 gst_v4l2_object_get_raw_caps ());
269
270 if (gst_caps_is_empty (self->probed_srccaps))
271 goto no_raw_format;
272
273 if (ret)
274 self->input_state = gst_video_codec_state_ref (state);
275 else
276 gst_v4l2_error (self, &error);
277
278 done:
279 return ret;
280
281 no_raw_format:
282 GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
283 (_("Decoder on device %s has no supported output format"),
284 self->v4l2output->videodev), (NULL));
285 return GST_FLOW_ERROR;
286 }
287
288 static gboolean
gst_v4l2_video_dec_flush(GstVideoDecoder * decoder)289 gst_v4l2_video_dec_flush (GstVideoDecoder * decoder)
290 {
291 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
292
293 GST_DEBUG_OBJECT (self, "Flushed");
294
295 /* Ensure the processing thread has stopped for the reverse playback
296 * discount case */
297 if (gst_pad_get_task_state (decoder->srcpad) == GST_TASK_STARTED) {
298 GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
299
300 gst_v4l2_object_unlock (self->v4l2output);
301 gst_v4l2_object_unlock (self->v4l2capture);
302 gst_pad_stop_task (decoder->srcpad);
303 GST_VIDEO_DECODER_STREAM_LOCK (decoder);
304 }
305
306 self->output_flow = GST_FLOW_OK;
307
308 gst_v4l2_object_unlock_stop (self->v4l2output);
309 gst_v4l2_object_unlock_stop (self->v4l2capture);
310
311 if (self->v4l2output->pool)
312 gst_v4l2_buffer_pool_flush (self->v4l2output->pool);
313
314 /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
315 * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer
316 * pool. */
317 if (self->v4l2capture->pool)
318 gst_v4l2_buffer_pool_flush (self->v4l2capture->pool);
319
320 return TRUE;
321 }
322
323 static gboolean
gst_v4l2_video_dec_negotiate(GstVideoDecoder * decoder)324 gst_v4l2_video_dec_negotiate (GstVideoDecoder * decoder)
325 {
326 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
327
328 /* We don't allow renegotiation without carefull disabling the pool */
329 if (self->v4l2capture->pool &&
330 gst_buffer_pool_is_active (GST_BUFFER_POOL (self->v4l2capture->pool)))
331 return TRUE;
332
333 return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
334 }
335
336 static gboolean
gst_v4l2_decoder_cmd(GstV4l2Object * v4l2object,guint cmd,guint flags)337 gst_v4l2_decoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
338 {
339 struct v4l2_decoder_cmd dcmd = { 0, };
340
341 GST_DEBUG_OBJECT (v4l2object->element,
342 "sending v4l2 decoder command %u with flags %u", cmd, flags);
343
344 if (!GST_V4L2_IS_OPEN (v4l2object))
345 return FALSE;
346
347 dcmd.cmd = cmd;
348 dcmd.flags = flags;
349 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
350 goto dcmd_failed;
351
352 return TRUE;
353
354 dcmd_failed:
355 if (errno == ENOTTY) {
356 GST_INFO_OBJECT (v4l2object->element,
357 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
358 cmd, flags, v4l2object->videodev, g_strerror (errno));
359 } else {
360 GST_ERROR_OBJECT (v4l2object->element,
361 "Failed to send decoder command %u with flags %u for '%s'. (%s)",
362 cmd, flags, v4l2object->videodev, g_strerror (errno));
363 }
364 return FALSE;
365 }
366
367 static GstFlowReturn
gst_v4l2_video_dec_finish(GstVideoDecoder * decoder)368 gst_v4l2_video_dec_finish (GstVideoDecoder * decoder)
369 {
370 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
371 GstFlowReturn ret = GST_FLOW_OK;
372 GstBuffer *buffer;
373
374 if (gst_pad_get_task_state (decoder->srcpad) != GST_TASK_STARTED)
375 goto done;
376
377 GST_DEBUG_OBJECT (self, "Finishing decoding");
378
379 GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
380
381 if (gst_v4l2_decoder_cmd (self->v4l2output, V4L2_DEC_CMD_STOP, 0)) {
382 GstTask *task = decoder->srcpad->task;
383
384 /* If the decoder stop command succeeded, just wait until processing is
385 * finished */
386 GST_DEBUG_OBJECT (self, "Waiting for decoder stop");
387 GST_OBJECT_LOCK (task);
388 while (GST_TASK_STATE (task) == GST_TASK_STARTED)
389 GST_TASK_WAIT (task);
390 GST_OBJECT_UNLOCK (task);
391 ret = GST_FLOW_FLUSHING;
392 } else {
393 /* otherwise keep queuing empty buffers until the processing thread has
394 * stopped, _pool_process() will return FLUSHING when that happened */
395 while (ret == GST_FLOW_OK) {
396 buffer = gst_buffer_new ();
397 ret =
398 gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
399 v4l2output->pool), &buffer);
400 gst_buffer_unref (buffer);
401 }
402 }
403
404 /* and ensure the processing thread has stopped in case another error
405 * occured. */
406 gst_v4l2_object_unlock (self->v4l2capture);
407 gst_pad_stop_task (decoder->srcpad);
408 GST_VIDEO_DECODER_STREAM_LOCK (decoder);
409
410 if (ret == GST_FLOW_FLUSHING)
411 ret = self->output_flow;
412
413 GST_DEBUG_OBJECT (decoder, "Done draining buffers");
414
415 /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
416
417 done:
418 return ret;
419 }
420
421 static GstFlowReturn
gst_v4l2_video_dec_drain(GstVideoDecoder * decoder)422 gst_v4l2_video_dec_drain (GstVideoDecoder * decoder)
423 {
424 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
425
426 GST_DEBUG_OBJECT (self, "Draining...");
427 gst_v4l2_video_dec_finish (decoder);
428 gst_v4l2_video_dec_flush (decoder);
429
430 return GST_FLOW_OK;
431 }
432
433 static GstVideoCodecFrame *
gst_v4l2_video_dec_get_oldest_frame(GstVideoDecoder * decoder)434 gst_v4l2_video_dec_get_oldest_frame (GstVideoDecoder * decoder)
435 {
436 GstVideoCodecFrame *frame = NULL;
437 GList *frames, *l;
438 gint count = 0;
439
440 frames = gst_video_decoder_get_frames (decoder);
441
442 for (l = frames; l != NULL; l = l->next) {
443 GstVideoCodecFrame *f = l->data;
444
445 if (!frame || frame->pts > f->pts)
446 frame = f;
447
448 count++;
449 }
450
451 if (frame) {
452 GST_LOG_OBJECT (decoder,
453 "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
454 frame->system_frame_number, GST_TIME_ARGS (frame->pts), count - 1);
455 gst_video_codec_frame_ref (frame);
456 }
457
458 g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);
459
460 return frame;
461 }
462
463 static void
gst_v4l2_video_dec_loop(GstVideoDecoder * decoder)464 gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
465 {
466 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
467 GstV4l2BufferPool *v4l2_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
468 GstBufferPool *pool;
469 GstVideoCodecFrame *frame;
470 GstBuffer *buffer = NULL;
471 GstFlowReturn ret;
472
473 GST_LOG_OBJECT (decoder, "Allocate output buffer");
474
475 self->output_flow = GST_FLOW_OK;
476 do {
477 /* We cannot use the base class allotate helper since it taking the internal
478 * stream lock. we know that the acquire may need to poll until more frames
479 * comes in and holding this lock would prevent that.
480 */
481 pool = gst_video_decoder_get_buffer_pool (decoder);
482
483 /* Pool may be NULL if we started going to READY state */
484 if (pool == NULL) {
485 ret = GST_FLOW_FLUSHING;
486 goto beach;
487 }
488
489 ret = gst_buffer_pool_acquire_buffer (pool, &buffer, NULL);
490 g_object_unref (pool);
491
492 if (ret != GST_FLOW_OK)
493 goto beach;
494
495 GST_LOG_OBJECT (decoder, "Process output buffer");
496 ret = gst_v4l2_buffer_pool_process (v4l2_pool, &buffer);
497
498 } while (ret == GST_V4L2_FLOW_CORRUPTED_BUFFER);
499
500 if (ret != GST_FLOW_OK)
501 goto beach;
502
503 frame = gst_v4l2_video_dec_get_oldest_frame (decoder);
504
505 if (frame) {
506 frame->output_buffer = buffer;
507 buffer = NULL;
508 ret = gst_video_decoder_finish_frame (decoder, frame);
509
510 if (ret != GST_FLOW_OK)
511 goto beach;
512 } else {
513 GST_WARNING_OBJECT (decoder, "Decoder is producing too many buffers");
514 gst_buffer_unref (buffer);
515 }
516
517 return;
518
519 beach:
520 GST_DEBUG_OBJECT (decoder, "Leaving output thread: %s",
521 gst_flow_get_name (ret));
522
523 gst_buffer_replace (&buffer, NULL);
524 self->output_flow = ret;
525 gst_v4l2_object_unlock (self->v4l2output);
526 gst_pad_pause_task (decoder->srcpad);
527 }
528
529 static gboolean
gst_v4l2_video_remove_padding(GstCapsFeatures * features,GstStructure * structure,gpointer user_data)530 gst_v4l2_video_remove_padding (GstCapsFeatures * features,
531 GstStructure * structure, gpointer user_data)
532 {
533 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (user_data);
534 GstVideoAlignment *align = &self->v4l2capture->align;
535 GstVideoInfo *info = &self->v4l2capture->info;
536 int width, height;
537
538 if (!gst_structure_get_int (structure, "width", &width))
539 return TRUE;
540
541 if (!gst_structure_get_int (structure, "height", &height))
542 return TRUE;
543
544 if (align->padding_left != 0 || align->padding_top != 0 ||
545 height != info->height + align->padding_bottom)
546 return TRUE;
547
548 if (height == info->height + align->padding_bottom) {
549 /* Some drivers may round up width to the padded with */
550 if (width == info->width + align->padding_right)
551 gst_structure_set (structure,
552 "width", G_TYPE_INT, width - align->padding_right,
553 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
554 /* Some drivers may keep visible width and only round up bytesperline */
555 else if (width == info->width)
556 gst_structure_set (structure,
557 "height", G_TYPE_INT, height - align->padding_bottom, NULL);
558 }
559
560 return TRUE;
561 }
562
563 static GstFlowReturn
gst_v4l2_video_dec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)564 gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
565 GstVideoCodecFrame * frame)
566 {
567 GstV4l2Error error = GST_V4L2_ERROR_INIT;
568 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
569 GstFlowReturn ret = GST_FLOW_OK;
570 gboolean processed = FALSE;
571 GstBuffer *tmp;
572 GstTaskState task_state;
573
574 GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
575
576 if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
577 goto flushing;
578
579 if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) {
580 if (!self->input_state)
581 goto not_negotiated;
582 if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps,
583 &error))
584 goto not_negotiated;
585 }
586
587 if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
588 GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
589 GstVideoInfo info;
590 GstVideoCodecState *output_state;
591 GstBuffer *codec_data;
592 GstCaps *acquired_caps, *available_caps, *caps, *filter;
593 GstStructure *st;
594
595 GST_DEBUG_OBJECT (self, "Sending header");
596
597 codec_data = self->input_state->codec_data;
598
599 /* We are running in byte-stream mode, so we don't know the headers, but
600 * we need to send something, otherwise the decoder will refuse to
601 * intialize.
602 */
603 if (codec_data) {
604 gst_buffer_ref (codec_data);
605 } else {
606 codec_data = gst_buffer_ref (frame->input_buffer);
607 processed = TRUE;
608 }
609
610 /* Ensure input internal pool is active */
611 if (!gst_buffer_pool_is_active (pool)) {
612 GstStructure *config = gst_buffer_pool_get_config (pool);
613 gst_buffer_pool_config_set_params (config, self->input_state->caps,
614 self->v4l2output->info.size, 2, 2);
615
616 /* There is no reason to refuse this config */
617 if (!gst_buffer_pool_set_config (pool, config))
618 goto activate_failed;
619
620 if (!gst_buffer_pool_set_active (pool, TRUE))
621 goto activate_failed;
622 }
623
624 GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
625 ret =
626 gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
627 v4l2output->pool), &codec_data);
628 GST_VIDEO_DECODER_STREAM_LOCK (decoder);
629
630 gst_buffer_unref (codec_data);
631
632 /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
633 * in the compose rectangle. gst_v4l2_object_acquire_format() checks both
634 * and returns the visible size as with/height and the coded size as
635 * padding. */
636 if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info))
637 goto not_negotiated;
638
639 /* Create caps from the acquired format, remove the format field */
640 acquired_caps = gst_video_info_to_caps (&info);
641 GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
642 st = gst_caps_get_structure (acquired_caps, 0);
643 gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
644 NULL);
645
646 /* Probe currently available pixel formats */
647 available_caps = gst_caps_copy (self->probed_srccaps);
648 GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps);
649
650 /* Replace coded size with visible size, we want to negotiate visible size
651 * with downstream, not coded size. */
652 gst_caps_map_in_place (available_caps, gst_v4l2_video_remove_padding, self);
653
654 filter = gst_caps_intersect_full (available_caps, acquired_caps,
655 GST_CAPS_INTERSECT_FIRST);
656 GST_DEBUG_OBJECT (self, "Filtered caps: %" GST_PTR_FORMAT, filter);
657 gst_caps_unref (acquired_caps);
658 gst_caps_unref (available_caps);
659 caps = gst_pad_peer_query_caps (decoder->srcpad, filter);
660 gst_caps_unref (filter);
661
662 GST_DEBUG_OBJECT (self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
663 if (gst_caps_is_empty (caps)) {
664 gst_caps_unref (caps);
665 goto not_negotiated;
666 }
667
668 /* Fixate pixel format */
669 caps = gst_caps_fixate (caps);
670
671 GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
672
673 /* Try to set negotiated format, on success replace acquired format */
674 if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error))
675 gst_video_info_from_caps (&info, caps);
676 else
677 gst_v4l2_clear_error (&error);
678 gst_caps_unref (caps);
679
680 output_state = gst_video_decoder_set_output_state (decoder,
681 info.finfo->format, info.width, info.height, self->input_state);
682
683 /* Copy the rest of the information, there might be more in the future */
684 output_state->info.interlace_mode = info.interlace_mode;
685 gst_video_codec_state_unref (output_state);
686
687 if (!gst_video_decoder_negotiate (decoder)) {
688 if (GST_PAD_IS_FLUSHING (decoder->srcpad))
689 goto flushing;
690 else
691 goto not_negotiated;
692 }
693
694 /* Ensure our internal pool is activated */
695 if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
696 TRUE))
697 goto activate_failed;
698 }
699
700 task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self));
701 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
702 /* It's possible that the processing thread stopped due to an error */
703 if (self->output_flow != GST_FLOW_OK &&
704 self->output_flow != GST_FLOW_FLUSHING) {
705 GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
706 ret = self->output_flow;
707 goto drop;
708 }
709
710 GST_DEBUG_OBJECT (self, "Starting decoding thread");
711
712 /* Start the processing task, when it quits, the task will disable input
713 * processing to unlock input if draining, or prevent potential block */
714 self->output_flow = GST_FLOW_FLUSHING;
715 if (!gst_pad_start_task (decoder->srcpad,
716 (GstTaskFunction) gst_v4l2_video_dec_loop, self, NULL))
717 goto start_task_failed;
718 }
719
720 if (!processed) {
721 GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
722 ret =
723 gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
724 pool), &frame->input_buffer);
725 GST_VIDEO_DECODER_STREAM_LOCK (decoder);
726
727 if (ret == GST_FLOW_FLUSHING) {
728 if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) !=
729 GST_TASK_STARTED)
730 ret = self->output_flow;
731 goto drop;
732 } else if (ret != GST_FLOW_OK) {
733 goto process_failed;
734 }
735 }
736
737 /* No need to keep input arround */
738 tmp = frame->input_buffer;
739 frame->input_buffer = gst_buffer_new ();
740 gst_buffer_copy_into (frame->input_buffer, tmp,
741 GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
742 GST_BUFFER_COPY_META, 0, 0);
743 gst_buffer_unref (tmp);
744
745 gst_video_codec_frame_unref (frame);
746 return ret;
747
748 /* ERRORS */
749 not_negotiated:
750 {
751 GST_ERROR_OBJECT (self, "not negotiated");
752 ret = GST_FLOW_NOT_NEGOTIATED;
753 gst_v4l2_error (self, &error);
754 goto drop;
755 }
756 activate_failed:
757 {
758 GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
759 (_("Failed to allocate required memory.")),
760 ("Buffer pool activation failed"));
761 ret = GST_FLOW_ERROR;
762 goto drop;
763 }
764 flushing:
765 {
766 ret = GST_FLOW_FLUSHING;
767 goto drop;
768 }
769
770 start_task_failed:
771 {
772 GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
773 (_("Failed to start decoding thread.")), (NULL));
774 ret = GST_FLOW_ERROR;
775 goto drop;
776 }
777 process_failed:
778 {
779 GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
780 (_("Failed to process frame.")),
781 ("Maybe be due to not enough memory or failing driver"));
782 ret = GST_FLOW_ERROR;
783 goto drop;
784 }
785 drop:
786 {
787 gst_video_decoder_drop_frame (decoder, frame);
788 return ret;
789 }
790 }
791
792 static gboolean
gst_v4l2_video_dec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)793 gst_v4l2_video_dec_decide_allocation (GstVideoDecoder * decoder,
794 GstQuery * query)
795 {
796 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
797 GstClockTime latency;
798 gboolean ret = FALSE;
799
800 if (gst_v4l2_object_decide_allocation (self->v4l2capture, query))
801 ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
802 query);
803
804 if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) {
805 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
806 GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%"
807 G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency),
808 self->v4l2capture->min_buffers, self->v4l2capture->duration);
809 gst_video_decoder_set_latency (decoder, latency, latency);
810 } else {
811 GST_WARNING_OBJECT (self, "Duration invalid, not setting latency");
812 }
813
814 return ret;
815 }
816
817 static gboolean
gst_v4l2_video_dec_src_query(GstVideoDecoder * decoder,GstQuery * query)818 gst_v4l2_video_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
819 {
820 gboolean ret = TRUE;
821 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
822
823 switch (GST_QUERY_TYPE (query)) {
824 case GST_QUERY_CAPS:{
825 GstCaps *filter, *result = NULL;
826 GstPad *pad = GST_VIDEO_DECODER_SRC_PAD (decoder);
827
828 gst_query_parse_caps (query, &filter);
829
830 if (self->probed_srccaps)
831 result = gst_caps_ref (self->probed_srccaps);
832 else
833 result = gst_pad_get_pad_template_caps (pad);
834
835 if (filter) {
836 GstCaps *tmp = result;
837 result =
838 gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
839 gst_caps_unref (tmp);
840 }
841
842 GST_DEBUG_OBJECT (self, "Returning src caps %" GST_PTR_FORMAT, result);
843
844 gst_query_set_caps_result (query, result);
845 gst_caps_unref (result);
846 break;
847 }
848
849 default:
850 ret = GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
851 break;
852 }
853
854 return ret;
855 }
856
857 static GstCaps *
gst_v4l2_video_dec_sink_getcaps(GstVideoDecoder * decoder,GstCaps * filter)858 gst_v4l2_video_dec_sink_getcaps (GstVideoDecoder * decoder, GstCaps * filter)
859 {
860 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
861 GstCaps *result;
862
863 result = gst_video_decoder_proxy_getcaps (decoder, self->probed_sinkcaps,
864 filter);
865
866 GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);
867
868 return result;
869 }
870
871 static gboolean
gst_v4l2_video_dec_sink_event(GstVideoDecoder * decoder,GstEvent * event)872 gst_v4l2_video_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
873 {
874 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
875 gboolean ret;
876 GstEventType type = GST_EVENT_TYPE (event);
877
878 switch (type) {
879 case GST_EVENT_FLUSH_START:
880 GST_DEBUG_OBJECT (self, "flush start");
881 gst_v4l2_object_unlock (self->v4l2output);
882 gst_v4l2_object_unlock (self->v4l2capture);
883 break;
884 default:
885 break;
886 }
887
888 ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
889
890 switch (type) {
891 case GST_EVENT_FLUSH_START:
892 /* The processing thread should stop now, wait for it */
893 gst_pad_stop_task (decoder->srcpad);
894 GST_DEBUG_OBJECT (self, "flush start done");
895 break;
896 default:
897 break;
898 }
899
900 return ret;
901 }
902
903 static GstStateChangeReturn
gst_v4l2_video_dec_change_state(GstElement * element,GstStateChange transition)904 gst_v4l2_video_dec_change_state (GstElement * element,
905 GstStateChange transition)
906 {
907 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (element);
908 GstVideoDecoder *decoder = GST_VIDEO_DECODER (element);
909
910 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
911 g_atomic_int_set (&self->active, FALSE);
912 gst_v4l2_object_unlock (self->v4l2output);
913 gst_v4l2_object_unlock (self->v4l2capture);
914 gst_pad_stop_task (decoder->srcpad);
915 }
916
917 return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
918 }
919
920 static void
gst_v4l2_video_dec_dispose(GObject * object)921 gst_v4l2_video_dec_dispose (GObject * object)
922 {
923 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
924
925 gst_caps_replace (&self->probed_sinkcaps, NULL);
926 gst_caps_replace (&self->probed_srccaps, NULL);
927
928 G_OBJECT_CLASS (parent_class)->dispose (object);
929 }
930
931 static void
gst_v4l2_video_dec_finalize(GObject * object)932 gst_v4l2_video_dec_finalize (GObject * object)
933 {
934 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
935
936 gst_v4l2_object_destroy (self->v4l2capture);
937 gst_v4l2_object_destroy (self->v4l2output);
938
939 G_OBJECT_CLASS (parent_class)->finalize (object);
940 }
941
942 static void
gst_v4l2_video_dec_init(GstV4l2VideoDec * self)943 gst_v4l2_video_dec_init (GstV4l2VideoDec * self)
944 {
945 /* V4L2 object are created in subinstance_init */
946 }
947
948 static void
gst_v4l2_video_dec_subinstance_init(GTypeInstance * instance,gpointer g_class)949 gst_v4l2_video_dec_subinstance_init (GTypeInstance * instance, gpointer g_class)
950 {
951 GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
952 GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (instance);
953 GstVideoDecoder *decoder = GST_VIDEO_DECODER (instance);
954
955 gst_video_decoder_set_packetized (decoder, TRUE);
956
957 self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
958 GST_OBJECT (GST_VIDEO_DECODER_SINK_PAD (self)),
959 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
960 gst_v4l2_get_output, gst_v4l2_set_output, NULL);
961 self->v4l2output->no_initial_format = TRUE;
962 self->v4l2output->keep_aspect = FALSE;
963
964 self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
965 GST_OBJECT (GST_VIDEO_DECODER_SRC_PAD (self)),
966 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
967 gst_v4l2_get_input, gst_v4l2_set_input, NULL);
968 }
969
970 static void
gst_v4l2_video_dec_class_init(GstV4l2VideoDecClass * klass)971 gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass)
972 {
973 GstElementClass *element_class;
974 GObjectClass *gobject_class;
975 GstVideoDecoderClass *video_decoder_class;
976
977 parent_class = g_type_class_peek_parent (klass);
978
979 element_class = (GstElementClass *) klass;
980 gobject_class = (GObjectClass *) klass;
981 video_decoder_class = (GstVideoDecoderClass *) klass;
982
983 GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_dec_debug, "v4l2videodec", 0,
984 "V4L2 Video Decoder");
985
986 gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_dispose);
987 gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finalize);
988 gobject_class->set_property =
989 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_property);
990 gobject_class->get_property =
991 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_get_property);
992
993 video_decoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_open);
994 video_decoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_close);
995 video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_start);
996 video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_stop);
997 video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finish);
998 video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_flush);
999 video_decoder_class->drain = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_drain);
1000 video_decoder_class->set_format =
1001 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_format);
1002 video_decoder_class->negotiate =
1003 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_negotiate);
1004 video_decoder_class->decide_allocation =
1005 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_decide_allocation);
1006 /* FIXME propose_allocation or not ? */
1007 video_decoder_class->handle_frame =
1008 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_handle_frame);
1009 video_decoder_class->getcaps =
1010 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_sink_getcaps);
1011 video_decoder_class->src_query =
1012 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_src_query);
1013 video_decoder_class->sink_event =
1014 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_sink_event);
1015
1016 element_class->change_state =
1017 GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_change_state);
1018
1019 gst_v4l2_object_install_m2m_properties_helper (gobject_class);
1020 }
1021
1022 static void
gst_v4l2_video_dec_subclass_init(gpointer g_class,gpointer data)1023 gst_v4l2_video_dec_subclass_init (gpointer g_class, gpointer data)
1024 {
1025 GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
1026 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
1027 GstV4l2VideoDecCData *cdata = data;
1028
1029 klass->default_device = cdata->device;
1030
1031 /* Note: gst_pad_template_new() take the floating ref from the caps */
1032 gst_element_class_add_pad_template (element_class,
1033 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1034 cdata->sink_caps));
1035 gst_element_class_add_pad_template (element_class,
1036 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1037 cdata->src_caps));
1038
1039 gst_element_class_set_static_metadata (element_class, cdata->longname,
1040 "Codec/Decoder/Video/Hardware", cdata->description,
1041 "Nicolas Dufresne <nicolas.dufresne@collabora.com>");
1042
1043 gst_caps_unref (cdata->sink_caps);
1044 gst_caps_unref (cdata->src_caps);
1045 g_free (cdata);
1046 }
1047
1048 /* Probing functions */
1049 gboolean
gst_v4l2_is_video_dec(GstCaps * sink_caps,GstCaps * src_caps)1050 gst_v4l2_is_video_dec (GstCaps * sink_caps, GstCaps * src_caps)
1051 {
1052 gboolean ret = FALSE;
1053
1054 if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_codec_caps ())
1055 && gst_caps_is_subset (src_caps, gst_v4l2_object_get_raw_caps ()))
1056 ret = TRUE;
1057
1058 return ret;
1059 }
1060
1061 static gchar *
gst_v4l2_video_dec_set_metadata(GstStructure * s,GstV4l2VideoDecCData * cdata,const gchar * basename)1062 gst_v4l2_video_dec_set_metadata (GstStructure * s, GstV4l2VideoDecCData * cdata,
1063 const gchar * basename)
1064 {
1065 gchar *codec_name = NULL;
1066 gchar *type_name = NULL;
1067
1068 #define SET_META(codec) \
1069 G_STMT_START { \
1070 cdata->longname = "V4L2 " codec " Decoder"; \
1071 cdata->description = "Decodes " codec " streams via V4L2 API"; \
1072 codec_name = g_ascii_strdown (codec, -1); \
1073 } G_STMT_END
1074
1075 if (gst_structure_has_name (s, "image/jpeg")) {
1076 SET_META ("JPEG");
1077 } else if (gst_structure_has_name (s, "video/mpeg")) {
1078 gint mpegversion = 0;
1079 gst_structure_get_int (s, "mpegversion", &mpegversion);
1080
1081 if (mpegversion == 2) {
1082 SET_META ("MPEG2");
1083 } else {
1084 SET_META ("MPEG4");
1085 }
1086 } else if (gst_structure_has_name (s, "video/x-h263")) {
1087 SET_META ("H263");
1088 } else if (gst_structure_has_name (s, "video/x-fwht")) {
1089 SET_META ("FWHT");
1090 } else if (gst_structure_has_name (s, "video/x-h264")) {
1091 SET_META ("H264");
1092 } else if (gst_structure_has_name (s, "video/x-h265")) {
1093 SET_META ("H265");
1094 } else if (gst_structure_has_name (s, "video/x-wmv")) {
1095 SET_META ("VC1");
1096 } else if (gst_structure_has_name (s, "video/x-vp8")) {
1097 SET_META ("VP8");
1098 } else if (gst_structure_has_name (s, "video/x-vp9")) {
1099 SET_META ("VP9");
1100 } else if (gst_structure_has_name (s, "video/x-bayer")) {
1101 SET_META ("BAYER");
1102 } else if (gst_structure_has_name (s, "video/x-sonix")) {
1103 SET_META ("SONIX");
1104 } else if (gst_structure_has_name (s, "video/x-pwc1")) {
1105 SET_META ("PWC1");
1106 } else if (gst_structure_has_name (s, "video/x-pwc2")) {
1107 SET_META ("PWC2");
1108 } else {
1109 /* This code should be kept on sync with the exposed CODEC type of format
1110 * from gstv4l2object.c. This warning will only occure in case we forget
1111 * to also add a format here. */
1112 gchar *s_str = gst_structure_to_string (s);
1113 g_warning ("Missing fixed name mapping for caps '%s', this is a GStreamer "
1114 "bug, please report at https://bugs.gnome.org", s_str);
1115 g_free (s_str);
1116 }
1117
1118 if (codec_name) {
1119 type_name = g_strdup_printf ("v4l2%sdec", codec_name);
1120 if (g_type_from_name (type_name) != 0) {
1121 g_free (type_name);
1122 type_name = g_strdup_printf ("v4l2%s%sdec", basename, codec_name);
1123 }
1124
1125 g_free (codec_name);
1126 }
1127
1128 return type_name;
1129 #undef SET_META
1130 }
1131
1132 void
gst_v4l2_video_dec_register(GstPlugin * plugin,const gchar * basename,const gchar * device_path,GstCaps * sink_caps,GstCaps * src_caps)1133 gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename,
1134 const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
1135 {
1136 gint i;
1137
1138 for (i = 0; i < gst_caps_get_size (sink_caps); i++) {
1139 GstV4l2VideoDecCData *cdata;
1140 GstStructure *s;
1141 GTypeQuery type_query;
1142 GTypeInfo type_info = { 0, };
1143 GType type, subtype;
1144 gchar *type_name;
1145
1146 s = gst_caps_get_structure (sink_caps, i);
1147
1148 cdata = g_new0 (GstV4l2VideoDecCData, 1);
1149 cdata->device = g_strdup (device_path);
1150 cdata->sink_caps = gst_caps_new_empty ();
1151 gst_caps_append_structure (cdata->sink_caps, gst_structure_copy (s));
1152 cdata->src_caps = gst_caps_ref (src_caps);
1153 type_name = gst_v4l2_video_dec_set_metadata (s, cdata, basename);
1154
1155 /* Skip over if we hit an unmapped type */
1156 if (!type_name) {
1157 g_free (cdata);
1158 continue;
1159 }
1160
1161 type = gst_v4l2_video_dec_get_type ();
1162 g_type_query (type, &type_query);
1163 memset (&type_info, 0, sizeof (type_info));
1164 type_info.class_size = type_query.class_size;
1165 type_info.instance_size = type_query.instance_size;
1166 type_info.class_init = gst_v4l2_video_dec_subclass_init;
1167 type_info.class_data = cdata;
1168 type_info.instance_init = gst_v4l2_video_dec_subinstance_init;
1169
1170 subtype = g_type_register_static (type, type_name, &type_info, 0);
1171 if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1,
1172 subtype))
1173 GST_WARNING ("Failed to register plugin '%s'", type_name);
1174
1175 g_free (type_name);
1176 }
1177 }
1178