1 /*
2  * Copyright (C) 2014-2017 SUMOMO Computer Association
3  *     Authors Ayaka <ayaka@soulik.info>
4  * Copyright (C) 2017 Collabora Ltd.
5  *     Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Library General Public
9  * License as published by the Free Software Foundation; either
10  * version 2 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Library General Public License for more details.
16  *
17  * You should have received a copy of the GNU Library General Public
18  * License along with this library; if not, write to the
19  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20  * Boston, MA 02110-1301, USA.
21  *
22  */
23 
24 #ifdef HAVE_CONFIG_H
25 #include "config.h"
26 #endif
27 
28 #include <sys/stat.h>
29 #include <fcntl.h>
30 #include <errno.h>
31 #include <unistd.h>
32 #include <string.h>
33 
34 #include "gstv4l2object.h"
35 #include "gstv4l2videoenc.h"
36 
37 #include <string.h>
38 #include <gst/gst-i18n-plugin.h>
39 
40 GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_enc_debug);
41 #define GST_CAT_DEFAULT gst_v4l2_video_enc_debug
42 
43 typedef struct
44 {
45   gchar *device;
46   GstCaps *sink_caps;
47   GstCaps *src_caps;
48 } GstV4l2VideoEncCData;
49 
50 enum
51 {
52   PROP_0,
53   V4L2_STD_OBJECT_PROPS,
54 };
55 
56 #define gst_v4l2_video_enc_parent_class parent_class
57 G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoEnc, gst_v4l2_video_enc,
58     GST_TYPE_VIDEO_ENCODER);
59 
60 static void
gst_v4l2_video_enc_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)61 gst_v4l2_video_enc_set_property (GObject * object,
62     guint prop_id, const GValue * value, GParamSpec * pspec)
63 {
64   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
65 
66   switch (prop_id) {
67     case PROP_CAPTURE_IO_MODE:
68       if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
69               prop_id, value, pspec)) {
70         G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
71       }
72       break;
73 
74       /* By default, only set on output */
75     default:
76       if (!gst_v4l2_object_set_property_helper (self->v4l2output,
77               prop_id, value, pspec)) {
78         G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
79       }
80       break;
81   }
82 }
83 
84 static void
gst_v4l2_video_enc_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)85 gst_v4l2_video_enc_get_property (GObject * object,
86     guint prop_id, GValue * value, GParamSpec * pspec)
87 {
88   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
89 
90   switch (prop_id) {
91     case PROP_CAPTURE_IO_MODE:
92       if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
93               prop_id, value, pspec)) {
94         G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
95       }
96       break;
97 
98       /* By default read from output */
99     default:
100       if (!gst_v4l2_object_get_property_helper (self->v4l2output,
101               prop_id, value, pspec)) {
102         G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
103       }
104       break;
105   }
106 }
107 
108 static gboolean
gst_v4l2_video_enc_open(GstVideoEncoder * encoder)109 gst_v4l2_video_enc_open (GstVideoEncoder * encoder)
110 {
111   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
112   GstCaps *codec_caps;
113 
114   GST_DEBUG_OBJECT (self, "Opening");
115 
116   if (!gst_v4l2_object_open (self->v4l2output))
117     goto failure;
118 
119   if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
120     goto failure;
121 
122   self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output,
123       gst_v4l2_object_get_raw_caps ());
124 
125   if (gst_caps_is_empty (self->probed_sinkcaps))
126     goto no_raw_format;
127 
128   codec_caps = gst_pad_get_pad_template_caps (encoder->srcpad);
129   self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
130       codec_caps);
131   gst_caps_unref (codec_caps);
132 
133   if (gst_caps_is_empty (self->probed_srccaps))
134     goto no_encoded_format;
135 
136   return TRUE;
137 
138 no_encoded_format:
139   GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
140       (_("Encoder on device %s has no supported output format"),
141           self->v4l2output->videodev), (NULL));
142   goto failure;
143 
144 
145 no_raw_format:
146   GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
147       (_("Encoder on device %s has no supported input format"),
148           self->v4l2output->videodev), (NULL));
149   goto failure;
150 
151 failure:
152   if (GST_V4L2_IS_OPEN (self->v4l2output))
153     gst_v4l2_object_close (self->v4l2output);
154 
155   if (GST_V4L2_IS_OPEN (self->v4l2capture))
156     gst_v4l2_object_close (self->v4l2capture);
157 
158   gst_caps_replace (&self->probed_srccaps, NULL);
159   gst_caps_replace (&self->probed_sinkcaps, NULL);
160 
161   return FALSE;
162 }
163 
164 static gboolean
gst_v4l2_video_enc_close(GstVideoEncoder * encoder)165 gst_v4l2_video_enc_close (GstVideoEncoder * encoder)
166 {
167   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
168 
169   GST_DEBUG_OBJECT (self, "Closing");
170 
171   gst_v4l2_object_close (self->v4l2output);
172   gst_v4l2_object_close (self->v4l2capture);
173   gst_caps_replace (&self->probed_srccaps, NULL);
174   gst_caps_replace (&self->probed_sinkcaps, NULL);
175 
176   return TRUE;
177 }
178 
179 static gboolean
gst_v4l2_video_enc_start(GstVideoEncoder * encoder)180 gst_v4l2_video_enc_start (GstVideoEncoder * encoder)
181 {
182   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
183 
184   GST_DEBUG_OBJECT (self, "Starting");
185 
186   gst_v4l2_object_unlock (self->v4l2output);
187   g_atomic_int_set (&self->active, TRUE);
188   self->output_flow = GST_FLOW_OK;
189 
190   return TRUE;
191 }
192 
193 static gboolean
gst_v4l2_video_enc_stop(GstVideoEncoder * encoder)194 gst_v4l2_video_enc_stop (GstVideoEncoder * encoder)
195 {
196   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
197 
198   GST_DEBUG_OBJECT (self, "Stopping");
199 
200   gst_v4l2_object_unlock (self->v4l2output);
201   gst_v4l2_object_unlock (self->v4l2capture);
202 
203   /* Wait for capture thread to stop */
204   gst_pad_stop_task (encoder->srcpad);
205 
206   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
207   self->output_flow = GST_FLOW_OK;
208   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
209 
210   /* Should have been flushed already */
211   g_assert (g_atomic_int_get (&self->active) == FALSE);
212   g_assert (g_atomic_int_get (&self->processing) == FALSE);
213 
214   gst_v4l2_object_stop (self->v4l2output);
215   gst_v4l2_object_stop (self->v4l2capture);
216 
217   if (self->input_state) {
218     gst_video_codec_state_unref (self->input_state);
219     self->input_state = NULL;
220   }
221 
222   GST_DEBUG_OBJECT (self, "Stopped");
223 
224   return TRUE;
225 }
226 
227 static gboolean
gst_v4l2_encoder_cmd(GstV4l2Object * v4l2object,guint cmd,guint flags)228 gst_v4l2_encoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
229 {
230   struct v4l2_encoder_cmd ecmd = { 0, };
231 
232   GST_DEBUG_OBJECT (v4l2object->element,
233       "sending v4l2 encoder command %u with flags %u", cmd, flags);
234 
235   if (!GST_V4L2_IS_OPEN (v4l2object))
236     return FALSE;
237 
238   ecmd.cmd = cmd;
239   ecmd.flags = flags;
240   if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENCODER_CMD, &ecmd) < 0)
241     goto ecmd_failed;
242 
243   return TRUE;
244 
245 ecmd_failed:
246   if (errno == ENOTTY) {
247     GST_INFO_OBJECT (v4l2object->element,
248         "Failed to send encoder command %u with flags %u for '%s'. (%s)",
249         cmd, flags, v4l2object->videodev, g_strerror (errno));
250   } else {
251     GST_ERROR_OBJECT (v4l2object->element,
252         "Failed to send encoder command %u with flags %u for '%s'. (%s)",
253         cmd, flags, v4l2object->videodev, g_strerror (errno));
254   }
255   return FALSE;
256 }
257 
258 static GstFlowReturn
gst_v4l2_video_enc_finish(GstVideoEncoder * encoder)259 gst_v4l2_video_enc_finish (GstVideoEncoder * encoder)
260 {
261   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
262   GstFlowReturn ret = GST_FLOW_OK;
263 
264   if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
265     goto done;
266 
267   GST_DEBUG_OBJECT (self, "Finishing encoding");
268 
269   /* drop the stream lock while draining, so remaining buffers can be
270    * pushed from the src pad task thread */
271   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
272 
273   if (gst_v4l2_encoder_cmd (self->v4l2capture, V4L2_ENC_CMD_STOP, 0)) {
274     GstTask *task = encoder->srcpad->task;
275 
276     /* Wait for the task to be drained */
277     GST_DEBUG_OBJECT (self, "Waiting for encoder stop");
278     GST_OBJECT_LOCK (task);
279     while (GST_TASK_STATE (task) == GST_TASK_STARTED)
280       GST_TASK_WAIT (task);
281     GST_OBJECT_UNLOCK (task);
282     ret = GST_FLOW_FLUSHING;
283   }
284 
285   /* and ensure the processing thread has stopped in case another error
286    * occured. */
287   gst_v4l2_object_unlock (self->v4l2capture);
288   gst_pad_stop_task (encoder->srcpad);
289   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
290 
291   if (ret == GST_FLOW_FLUSHING)
292     ret = self->output_flow;
293 
294   GST_DEBUG_OBJECT (encoder, "Done draining buffers");
295 
296 done:
297   return ret;
298 }
299 
300 static gboolean
gst_v4l2_video_enc_set_format(GstVideoEncoder * encoder,GstVideoCodecState * state)301 gst_v4l2_video_enc_set_format (GstVideoEncoder * encoder,
302     GstVideoCodecState * state)
303 {
304   gboolean ret = TRUE;
305   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
306   GstV4l2Error error = GST_V4L2_ERROR_INIT;
307   GstCaps *outcaps;
308   GstVideoCodecState *output;
309 
310   GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
311 
312   if (self->input_state) {
313     if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) {
314       GST_DEBUG_OBJECT (self, "Compatible caps");
315       return TRUE;
316     }
317 
318     if (gst_v4l2_video_enc_finish (encoder) != GST_FLOW_OK)
319       return FALSE;
320 
321     gst_v4l2_object_stop (self->v4l2output);
322     gst_v4l2_object_stop (self->v4l2capture);
323 
324     gst_video_codec_state_unref (self->input_state);
325     self->input_state = NULL;
326   }
327 
328   outcaps = gst_pad_get_pad_template_caps (encoder->srcpad);
329   outcaps = gst_caps_make_writable (outcaps);
330   output = gst_video_encoder_set_output_state (encoder, outcaps, state);
331   gst_video_codec_state_unref (output);
332 
333   if (!gst_video_encoder_negotiate (encoder))
334     return FALSE;
335 
336   if (!gst_v4l2_object_set_format (self->v4l2output, state->caps, &error)) {
337     gst_v4l2_error (self, &error);
338     return FALSE;
339   }
340 
341   self->input_state = gst_video_codec_state_ref (state);
342 
343   GST_DEBUG_OBJECT (self, "output caps: %" GST_PTR_FORMAT, state->caps);
344 
345   return ret;
346 }
347 
348 static gboolean
gst_v4l2_video_enc_flush(GstVideoEncoder * encoder)349 gst_v4l2_video_enc_flush (GstVideoEncoder * encoder)
350 {
351   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
352 
353   GST_DEBUG_OBJECT (self, "Flushing");
354 
355   /* Ensure the processing thread has stopped for the reverse playback
356    * iscount case */
357   if (g_atomic_int_get (&self->processing)) {
358     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
359 
360     gst_v4l2_object_unlock_stop (self->v4l2output);
361     gst_v4l2_object_unlock_stop (self->v4l2capture);
362     gst_pad_stop_task (encoder->srcpad);
363 
364     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
365 
366   }
367 
368   self->output_flow = GST_FLOW_OK;
369 
370   gst_v4l2_object_unlock_stop (self->v4l2output);
371   gst_v4l2_object_unlock_stop (self->v4l2capture);
372 
373   return TRUE;
374 }
375 
376 struct ProfileLevelCtx
377 {
378   GstV4l2VideoEnc *self;
379   const gchar *profile;
380   const gchar *level;
381 };
382 
383 static gboolean
get_string_list(GstStructure * s,const gchar * field,GQueue * queue)384 get_string_list (GstStructure * s, const gchar * field, GQueue * queue)
385 {
386   const GValue *value;
387 
388   value = gst_structure_get_value (s, field);
389 
390   if (!value)
391     return FALSE;
392 
393   if (GST_VALUE_HOLDS_LIST (value)) {
394     guint i;
395 
396     if (gst_value_list_get_size (value) == 0)
397       return FALSE;
398 
399     for (i = 0; i < gst_value_list_get_size (value); i++) {
400       const GValue *item = gst_value_list_get_value (value, i);
401 
402       if (G_VALUE_HOLDS_STRING (item))
403         g_queue_push_tail (queue, g_value_dup_string (item));
404     }
405   } else if (G_VALUE_HOLDS_STRING (value)) {
406     g_queue_push_tail (queue, g_value_dup_string (value));
407   }
408 
409   return TRUE;
410 }
411 
412 static gboolean
negotiate_profile_and_level(GstCapsFeatures * features,GstStructure * s,gpointer user_data)413 negotiate_profile_and_level (GstCapsFeatures * features, GstStructure * s,
414     gpointer user_data)
415 {
416   struct ProfileLevelCtx *ctx = user_data;
417   GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_GET_CLASS (ctx->self);
418   GstV4l2Object *v4l2object = GST_V4L2_VIDEO_ENC (ctx->self)->v4l2output;
419   GQueue profiles = G_QUEUE_INIT;
420   GQueue levels = G_QUEUE_INIT;
421   gboolean failed = FALSE;
422 
423   if (klass->profile_cid && get_string_list (s, "profile", &profiles)) {
424     GList *l;
425 
426     for (l = profiles.head; l; l = l->next) {
427       struct v4l2_control control = { 0, };
428       gint v4l2_profile;
429       const gchar *profile = l->data;
430 
431       GST_TRACE_OBJECT (ctx->self, "Trying profile %s", profile);
432 
433       control.id = klass->profile_cid;
434       control.value = v4l2_profile = klass->profile_from_string (profile);
435 
436       if (control.value < 0)
437         continue;
438 
439       if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
440         GST_WARNING_OBJECT (ctx->self, "Failed to set %s profile: '%s'",
441             klass->codec_name, g_strerror (errno));
442         break;
443       }
444 
445       profile = klass->profile_to_string (control.value);
446 
447       if (control.value == v4l2_profile) {
448         ctx->profile = profile;
449         break;
450       }
451 
452       if (g_list_find_custom (l, profile, g_str_equal)) {
453         ctx->profile = profile;
454         break;
455       }
456     }
457 
458     if (profiles.length && !ctx->profile)
459       failed = TRUE;
460 
461     g_queue_foreach (&profiles, (GFunc) g_free, NULL);
462     g_queue_clear (&profiles);
463   }
464 
465   if (!failed && klass->level_cid && get_string_list (s, "level", &levels)) {
466     GList *l;
467 
468     for (l = levels.head; l; l = l->next) {
469       struct v4l2_control control = { 0, };
470       gint v4l2_level;
471       const gchar *level = l->data;
472 
473       GST_TRACE_OBJECT (ctx->self, "Trying level %s", level);
474 
475       control.id = klass->level_cid;
476       control.value = v4l2_level = klass->level_from_string (level);
477 
478       if (control.value < 0)
479         continue;
480 
481       if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
482         GST_WARNING_OBJECT (ctx->self, "Failed to set %s level: '%s'",
483             klass->codec_name, g_strerror (errno));
484         break;
485       }
486 
487       level = klass->level_to_string (control.value);
488 
489       if (control.value == v4l2_level) {
490         ctx->level = level;
491         break;
492       }
493 
494       if (g_list_find_custom (l, level, g_str_equal)) {
495         ctx->level = level;
496         break;
497       }
498     }
499 
500     if (levels.length && !ctx->level)
501       failed = TRUE;
502 
503     g_queue_foreach (&levels, (GFunc) g_free, NULL);
504     g_queue_clear (&levels);
505   }
506 
507   /* If it failed, we continue */
508   return failed;
509 }
510 
511 static gboolean
gst_v4l2_video_enc_negotiate(GstVideoEncoder * encoder)512 gst_v4l2_video_enc_negotiate (GstVideoEncoder * encoder)
513 {
514   GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_GET_CLASS (encoder);
515   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
516   GstV4l2Object *v4l2object = self->v4l2output;
517   GstCaps *allowed_caps;
518   struct ProfileLevelCtx ctx = { self, NULL, NULL };
519   GstVideoCodecState *state;
520   GstStructure *s;
521 
522   GST_DEBUG_OBJECT (self, "Negotiating %s profile and level.",
523       klass->codec_name);
524 
525   /* Only renegotiate on upstream changes */
526   if (self->input_state)
527     return TRUE;
528 
529   allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
530 
531   if (allowed_caps) {
532 
533     if (gst_caps_is_empty (allowed_caps))
534       goto not_negotiated;
535 
536     allowed_caps = gst_caps_make_writable (allowed_caps);
537 
538     /* negotiate_profile_and_level() will return TRUE on failure to keep
539      * iterating, if gst_caps_foreach() returns TRUE it means there was no
540      * compatible profile and level in any of the structure */
541     if (gst_caps_foreach (allowed_caps, negotiate_profile_and_level, &ctx)) {
542       goto no_profile_level;
543     }
544   }
545 
546   if (klass->profile_cid && !ctx.profile) {
547     struct v4l2_control control = { 0, };
548 
549     control.id = klass->profile_cid;
550 
551     if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
552       goto g_ctrl_failed;
553 
554     ctx.profile = klass->profile_to_string (control.value);
555   }
556 
557   if (klass->level_cid && !ctx.level) {
558     struct v4l2_control control = { 0, };
559 
560     control.id = klass->level_cid;
561 
562     if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
563       goto g_ctrl_failed;
564 
565     ctx.level = klass->level_to_string (control.value);
566   }
567 
568   GST_DEBUG_OBJECT (self, "Selected %s profile %s at level %s",
569       klass->codec_name, ctx.profile, ctx.level);
570 
571   state = gst_video_encoder_get_output_state (encoder);
572   s = gst_caps_get_structure (state->caps, 0);
573 
574   if (klass->profile_cid)
575     gst_structure_set (s, "profile", G_TYPE_STRING, ctx.profile, NULL);
576 
577   if (klass->level_cid)
578     gst_structure_set (s, "level", G_TYPE_STRING, ctx.level, NULL);
579 
580   if (!GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder))
581     return FALSE;
582 
583   return TRUE;
584 
585 g_ctrl_failed:
586   GST_WARNING_OBJECT (self, "Failed to get %s profile and level: '%s'",
587       klass->codec_name, g_strerror (errno));
588   goto not_negotiated;
589 
590 no_profile_level:
591   GST_WARNING_OBJECT (self, "No compatible level and profile in caps: %"
592       GST_PTR_FORMAT, allowed_caps);
593   goto not_negotiated;
594 
595 not_negotiated:
596   if (allowed_caps)
597     gst_caps_unref (allowed_caps);
598   return FALSE;
599 }
600 
601 static GstVideoCodecFrame *
gst_v4l2_video_enc_get_oldest_frame(GstVideoEncoder * encoder)602 gst_v4l2_video_enc_get_oldest_frame (GstVideoEncoder * encoder)
603 {
604   GstVideoCodecFrame *frame = NULL;
605   GList *frames, *l;
606   gint count = 0;
607 
608   frames = gst_video_encoder_get_frames (encoder);
609 
610   for (l = frames; l != NULL; l = l->next) {
611     GstVideoCodecFrame *f = l->data;
612 
613     if (!frame || frame->pts > f->pts)
614       frame = f;
615 
616     count++;
617   }
618 
619   if (frame) {
620     GST_LOG_OBJECT (encoder,
621         "Oldest frame is %d %" GST_TIME_FORMAT
622         " and %d frames left",
623         frame->system_frame_number, GST_TIME_ARGS (frame->pts), count - 1);
624     gst_video_codec_frame_ref (frame);
625   }
626 
627   g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);
628 
629   return frame;
630 }
631 
632 static void
gst_v4l2_video_enc_loop(GstVideoEncoder * encoder)633 gst_v4l2_video_enc_loop (GstVideoEncoder * encoder)
634 {
635   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
636   GstVideoCodecFrame *frame;
637   GstBuffer *buffer = NULL;
638   GstFlowReturn ret;
639 
640   GST_LOG_OBJECT (encoder, "Allocate output buffer");
641 
642   buffer = gst_video_encoder_allocate_output_buffer (encoder,
643       self->v4l2capture->info.size);
644 
645   if (NULL == buffer) {
646     ret = GST_FLOW_FLUSHING;
647     goto beach;
648   }
649 
650 
651   /* FIXME Check if buffer isn't the last one here */
652 
653   GST_LOG_OBJECT (encoder, "Process output buffer");
654   ret =
655       gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL
656       (self->v4l2capture->pool), &buffer);
657 
658   if (ret != GST_FLOW_OK)
659     goto beach;
660 
661   frame = gst_v4l2_video_enc_get_oldest_frame (encoder);
662 
663   if (frame) {
664     /* At this point, the delta unit buffer flag is already correctly set by
665      * gst_v4l2_buffer_pool_process. Since gst_video_encoder_finish_frame
666      * will overwrite it from GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame),
667      * set that here.
668      */
669     if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
670       GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
671     else
672       GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
673     frame->output_buffer = buffer;
674     buffer = NULL;
675     ret = gst_video_encoder_finish_frame (encoder, frame);
676 
677     if (ret != GST_FLOW_OK)
678       goto beach;
679   } else {
680     GST_WARNING_OBJECT (encoder, "Encoder is producing too many buffers");
681     gst_buffer_unref (buffer);
682   }
683 
684   return;
685 
686 beach:
687   GST_DEBUG_OBJECT (encoder, "Leaving output thread");
688 
689   gst_buffer_replace (&buffer, NULL);
690   self->output_flow = ret;
691   g_atomic_int_set (&self->processing, FALSE);
692   gst_v4l2_object_unlock (self->v4l2output);
693   gst_pad_pause_task (encoder->srcpad);
694 }
695 
696 static void
gst_v4l2_video_enc_loop_stopped(GstV4l2VideoEnc * self)697 gst_v4l2_video_enc_loop_stopped (GstV4l2VideoEnc * self)
698 {
699   if (g_atomic_int_get (&self->processing)) {
700     GST_DEBUG_OBJECT (self, "Early stop of encoding thread");
701     self->output_flow = GST_FLOW_FLUSHING;
702     g_atomic_int_set (&self->processing, FALSE);
703   }
704 
705   GST_DEBUG_OBJECT (self, "Encoding task destroyed: %s",
706       gst_flow_get_name (self->output_flow));
707 
708 }
709 
710 static GstFlowReturn
gst_v4l2_video_enc_handle_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)711 gst_v4l2_video_enc_handle_frame (GstVideoEncoder * encoder,
712     GstVideoCodecFrame * frame)
713 {
714   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
715   GstFlowReturn ret = GST_FLOW_OK;
716   GstTaskState task_state;
717 
718   GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
719 
720   if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
721     goto flushing;
722 
723   task_state = gst_pad_get_task_state (GST_VIDEO_ENCODER_SRC_PAD (self));
724   if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
725     GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
726 
727     /* It possible that the processing thread stopped due to an error */
728     if (self->output_flow != GST_FLOW_OK &&
729         self->output_flow != GST_FLOW_FLUSHING) {
730       GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
731       ret = self->output_flow;
732       goto drop;
733     }
734 
735     /* Ensure input internal pool is active */
736     if (!gst_buffer_pool_is_active (pool)) {
737       GstStructure *config = gst_buffer_pool_get_config (pool);
738       guint min = MAX (self->v4l2output->min_buffers, GST_V4L2_MIN_BUFFERS);
739 
740       gst_buffer_pool_config_set_params (config, self->input_state->caps,
741           self->v4l2output->info.size, min, min);
742 
743       /* There is no reason to refuse this config */
744       if (!gst_buffer_pool_set_config (pool, config))
745         goto activate_failed;
746 
747       if (!gst_buffer_pool_set_active (pool, TRUE))
748         goto activate_failed;
749     }
750 
751     if (!gst_buffer_pool_set_active
752         (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) {
753       GST_WARNING_OBJECT (self, "Could not activate capture buffer pool.");
754       goto activate_failed;
755     }
756 
757     GST_DEBUG_OBJECT (self, "Starting encoding thread");
758 
759     /* Start the processing task, when it quits, the task will disable input
760      * processing to unlock input if draining, or prevent potential block */
761     if (!gst_pad_start_task (encoder->srcpad,
762             (GstTaskFunction) gst_v4l2_video_enc_loop, self,
763             (GDestroyNotify) gst_v4l2_video_enc_loop_stopped))
764       goto start_task_failed;
765   }
766 
767   if (frame->input_buffer) {
768     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
769     ret =
770         gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL
771         (self->v4l2output->pool), &frame->input_buffer);
772     GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
773 
774     if (ret == GST_FLOW_FLUSHING) {
775       if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
776         ret = self->output_flow;
777       goto drop;
778     } else if (ret != GST_FLOW_OK) {
779       goto process_failed;
780     }
781   }
782 
783   gst_video_codec_frame_unref (frame);
784   return ret;
785 
786   /* ERRORS */
787 activate_failed:
788   {
789     GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
790         (_("Failed to allocate required memory.")),
791         ("Buffer pool activation failed"));
792     return GST_FLOW_ERROR;
793 
794   }
795 flushing:
796   {
797     ret = GST_FLOW_FLUSHING;
798     goto drop;
799   }
800 start_task_failed:
801   {
802     GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
803         (_("Failed to start encoding thread.")), (NULL));
804     g_atomic_int_set (&self->processing, FALSE);
805     ret = GST_FLOW_ERROR;
806     goto drop;
807   }
808 process_failed:
809   {
810     GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
811         (_("Failed to process frame.")),
812         ("Maybe be due to not enough memory or failing driver"));
813     ret = GST_FLOW_ERROR;
814     goto drop;
815   }
816 drop:
817   {
818     gst_video_encoder_finish_frame (encoder, frame);
819     return ret;
820   }
821 }
822 
823 static gboolean
gst_v4l2_video_enc_decide_allocation(GstVideoEncoder * encoder,GstQuery * query)824 gst_v4l2_video_enc_decide_allocation (GstVideoEncoder *
825     encoder, GstQuery * query)
826 {
827   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
828   GstVideoCodecState *state = gst_video_encoder_get_output_state (encoder);
829   GstCaps *caps;
830   GstV4l2Error error = GST_V4L2_ERROR_INIT;
831   GstClockTime latency;
832   gboolean ret = FALSE;
833 
834   /* We need to set the format here, since this is called right after
835    * GstVideoEncoder have set the width, height and framerate into the state
836    * caps. These are needed by the driver to calculate the buffer size and to
837    * implement bitrate adaptation. */
838   caps = gst_caps_copy (state->caps);
839   gst_structure_remove_field (gst_caps_get_structure (caps, 0), "colorimetry");
840   if (!gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) {
841     gst_v4l2_error (self, &error);
842     gst_caps_unref (caps);
843     ret = FALSE;
844     goto done;
845   }
846   gst_caps_unref (caps);
847 
848   if (gst_v4l2_object_decide_allocation (self->v4l2capture, query)) {
849     GstVideoEncoderClass *enc_class = GST_VIDEO_ENCODER_CLASS (parent_class);
850     ret = enc_class->decide_allocation (encoder, query);
851   }
852 
853   /* FIXME This may not be entirely correct, as encoder may keep some
854    * observation withouth delaying the encoding. Linux Media API need some
855    * more work to explicitly expressed the decoder / encoder latency. This
856    * value will then become max latency, and the reported driver latency would
857    * become the min latency. */
858   latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
859   gst_video_encoder_set_latency (encoder, latency, latency);
860 
861 done:
862   gst_video_codec_state_unref (state);
863   return ret;
864 }
865 
866 static gboolean
gst_v4l2_video_enc_propose_allocation(GstVideoEncoder * encoder,GstQuery * query)867 gst_v4l2_video_enc_propose_allocation (GstVideoEncoder *
868     encoder, GstQuery * query)
869 {
870   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
871   gboolean ret = FALSE;
872 
873   GST_DEBUG_OBJECT (self, "called");
874 
875   if (query == NULL)
876     ret = TRUE;
877   else
878     ret = gst_v4l2_object_propose_allocation (self->v4l2output, query);
879 
880   if (ret)
881     ret = GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
882         query);
883 
884   return ret;
885 }
886 
887 static gboolean
gst_v4l2_video_enc_src_query(GstVideoEncoder * encoder,GstQuery * query)888 gst_v4l2_video_enc_src_query (GstVideoEncoder * encoder, GstQuery * query)
889 {
890   gboolean ret = TRUE;
891   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
892   switch (GST_QUERY_TYPE (query)) {
893     case GST_QUERY_CAPS:{
894       GstCaps *filter, *result = NULL;
895       GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (encoder);
896 
897       gst_query_parse_caps (query, &filter);
898 
899       /* FIXME Try and not probe the entire encoder, but only the implement
900        * subclass format */
901       if (self->probed_srccaps) {
902         GstCaps *tmpl = gst_pad_get_pad_template_caps (pad);
903         result = gst_caps_intersect (tmpl, self->probed_srccaps);
904         gst_caps_unref (tmpl);
905       } else
906         result = gst_pad_get_pad_template_caps (pad);
907 
908       if (filter) {
909         GstCaps *tmp = result;
910         result =
911             gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
912         gst_caps_unref (tmp);
913       }
914 
915       GST_DEBUG_OBJECT (self, "Returning src caps %" GST_PTR_FORMAT, result);
916 
917       gst_query_set_caps_result (query, result);
918       gst_caps_unref (result);
919       break;
920     }
921 
922     default:
923       ret = GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (encoder, query);
924       break;
925   }
926 
927   return ret;
928 }
929 
930 static gboolean
gst_v4l2_video_enc_sink_query(GstVideoEncoder * encoder,GstQuery * query)931 gst_v4l2_video_enc_sink_query (GstVideoEncoder * encoder, GstQuery * query)
932 {
933   gboolean ret = TRUE;
934   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
935 
936   switch (GST_QUERY_TYPE (query)) {
937     case GST_QUERY_CAPS:{
938       GstCaps *filter, *result = NULL;
939       GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
940 
941       gst_query_parse_caps (query, &filter);
942 
943       if (self->probed_sinkcaps)
944         result = gst_caps_ref (self->probed_sinkcaps);
945       else
946         result = gst_pad_get_pad_template_caps (pad);
947 
948       if (filter) {
949         GstCaps *tmp = result;
950         result =
951             gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
952         gst_caps_unref (tmp);
953       }
954 
955       GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);
956 
957       gst_query_set_caps_result (query, result);
958       gst_caps_unref (result);
959       break;
960     }
961 
962     default:
963       ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (encoder, query);
964       break;
965   }
966 
967   return ret;
968 }
969 
970 static gboolean
gst_v4l2_video_enc_sink_event(GstVideoEncoder * encoder,GstEvent * event)971 gst_v4l2_video_enc_sink_event (GstVideoEncoder * encoder, GstEvent * event)
972 {
973   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
974   gboolean ret;
975   GstEventType type = GST_EVENT_TYPE (event);
976 
977   switch (type) {
978     case GST_EVENT_FLUSH_START:
979       GST_DEBUG_OBJECT (self, "flush start");
980       gst_v4l2_object_unlock (self->v4l2output);
981       gst_v4l2_object_unlock (self->v4l2capture);
982       break;
983     default:
984       break;
985   }
986 
987   ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_event (encoder, event);
988 
989   switch (type) {
990     case GST_EVENT_FLUSH_START:
991       gst_pad_stop_task (encoder->srcpad);
992       GST_DEBUG_OBJECT (self, "flush start done");
993     default:
994       break;
995   }
996 
997   return ret;
998 }
999 
1000 static GstStateChangeReturn
gst_v4l2_video_enc_change_state(GstElement * element,GstStateChange transition)1001 gst_v4l2_video_enc_change_state (GstElement * element,
1002     GstStateChange transition)
1003 {
1004   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (element);
1005 
1006   if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
1007     g_atomic_int_set (&self->active, FALSE);
1008     gst_v4l2_object_unlock (self->v4l2output);
1009     gst_v4l2_object_unlock (self->v4l2capture);
1010   }
1011 
1012   return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1013 }
1014 
1015 
1016 static void
gst_v4l2_video_enc_dispose(GObject * object)1017 gst_v4l2_video_enc_dispose (GObject * object)
1018 {
1019   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
1020 
1021   gst_caps_replace (&self->probed_sinkcaps, NULL);
1022   gst_caps_replace (&self->probed_srccaps, NULL);
1023 
1024   G_OBJECT_CLASS (parent_class)->dispose (object);
1025 }
1026 
1027 static void
gst_v4l2_video_enc_finalize(GObject * object)1028 gst_v4l2_video_enc_finalize (GObject * object)
1029 {
1030   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
1031 
1032   gst_v4l2_object_destroy (self->v4l2capture);
1033   gst_v4l2_object_destroy (self->v4l2output);
1034 
1035   G_OBJECT_CLASS (parent_class)->finalize (object);
1036 }
1037 
1038 
1039 static void
gst_v4l2_video_enc_init(GstV4l2VideoEnc * self)1040 gst_v4l2_video_enc_init (GstV4l2VideoEnc * self)
1041 {
1042   /* V4L2 object are created in subinstance_init */
1043 }
1044 
1045 static void
gst_v4l2_video_enc_subinstance_init(GTypeInstance * instance,gpointer g_class)1046 gst_v4l2_video_enc_subinstance_init (GTypeInstance * instance, gpointer g_class)
1047 {
1048   GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
1049   GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (instance);
1050 
1051   self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
1052       GST_OBJECT (GST_VIDEO_ENCODER_SINK_PAD (self)),
1053       V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1054       gst_v4l2_get_output, gst_v4l2_set_output, NULL);
1055   self->v4l2output->no_initial_format = TRUE;
1056   self->v4l2output->keep_aspect = FALSE;
1057 
1058   self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
1059       GST_OBJECT (GST_VIDEO_ENCODER_SRC_PAD (self)),
1060       V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1061       gst_v4l2_get_input, gst_v4l2_set_input, NULL);
1062 }
1063 
1064 static void
gst_v4l2_video_enc_class_init(GstV4l2VideoEncClass * klass)1065 gst_v4l2_video_enc_class_init (GstV4l2VideoEncClass * klass)
1066 {
1067   GstElementClass *element_class;
1068   GObjectClass *gobject_class;
1069   GstVideoEncoderClass *video_encoder_class;
1070 
1071   parent_class = g_type_class_peek_parent (klass);
1072 
1073   element_class = (GstElementClass *) klass;
1074   gobject_class = (GObjectClass *) klass;
1075   video_encoder_class = (GstVideoEncoderClass *) klass;
1076 
1077   GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_enc_debug, "v4l2videoenc", 0,
1078       "V4L2 Video Encoder");
1079 
1080   gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_dispose);
1081   gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finalize);
1082   gobject_class->set_property =
1083       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_property);
1084   gobject_class->get_property =
1085       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_get_property);
1086 
1087   video_encoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_open);
1088   video_encoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_close);
1089   video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_start);
1090   video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_stop);
1091   video_encoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finish);
1092   video_encoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_flush);
1093   video_encoder_class->set_format =
1094       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_format);
1095   video_encoder_class->negotiate =
1096       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_negotiate);
1097   video_encoder_class->decide_allocation =
1098       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_decide_allocation);
1099   video_encoder_class->propose_allocation =
1100       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_propose_allocation);
1101   video_encoder_class->sink_query =
1102       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_query);
1103   video_encoder_class->src_query =
1104       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_src_query);
1105   video_encoder_class->sink_event =
1106       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_event);
1107   video_encoder_class->handle_frame =
1108       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_handle_frame);
1109 
1110   element_class->change_state =
1111       GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_change_state);
1112 
1113   gst_v4l2_object_install_m2m_properties_helper (gobject_class);
1114 }
1115 
1116 static void
gst_v4l2_video_enc_subclass_init(gpointer g_class,gpointer data)1117 gst_v4l2_video_enc_subclass_init (gpointer g_class, gpointer data)
1118 {
1119   GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
1120   GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
1121   GstV4l2VideoEncCData *cdata = data;
1122 
1123   klass->default_device = cdata->device;
1124 
1125   /* Note: gst_pad_template_new() take the floating ref from the caps */
1126   gst_element_class_add_pad_template (element_class,
1127       gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1128           cdata->sink_caps));
1129   gst_element_class_add_pad_template (element_class,
1130       gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1131           cdata->src_caps));
1132 
1133   gst_caps_unref (cdata->sink_caps);
1134   gst_caps_unref (cdata->src_caps);
1135   g_free (cdata);
1136 }
1137 
1138 /* Probing functions */
1139 gboolean
gst_v4l2_is_video_enc(GstCaps * sink_caps,GstCaps * src_caps,GstCaps * codec_caps)1140 gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps,
1141     GstCaps * codec_caps)
1142 {
1143   gboolean ret = FALSE;
1144   gboolean (*check_caps) (const GstCaps *, const GstCaps *);
1145 
1146   if (codec_caps) {
1147     check_caps = gst_caps_can_intersect;
1148   } else {
1149     codec_caps = gst_v4l2_object_get_codec_caps ();
1150     check_caps = gst_caps_is_subset;
1151   }
1152 
1153   if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_raw_caps ())
1154       && check_caps (src_caps, codec_caps))
1155     ret = TRUE;
1156 
1157   return ret;
1158 }
1159 
1160 void
gst_v4l2_video_enc_register(GstPlugin * plugin,GType type,const char * codec,const gchar * basename,const gchar * device_path,GstCaps * sink_caps,GstCaps * codec_caps,GstCaps * src_caps)1161 gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
1162     const char *codec, const gchar * basename, const gchar * device_path,
1163     GstCaps * sink_caps, GstCaps * codec_caps, GstCaps * src_caps)
1164 {
1165   GstCaps *filtered_caps;
1166   GTypeQuery type_query;
1167   GTypeInfo type_info = { 0, };
1168   GType subtype;
1169   gchar *type_name;
1170   GstV4l2VideoEncCData *cdata;
1171 
1172   filtered_caps = gst_caps_intersect (src_caps, codec_caps);
1173 
1174   cdata = g_new0 (GstV4l2VideoEncCData, 1);
1175   cdata->device = g_strdup (device_path);
1176   cdata->sink_caps = gst_caps_ref (sink_caps);
1177   cdata->src_caps = gst_caps_ref (filtered_caps);
1178 
1179   g_type_query (type, &type_query);
1180   memset (&type_info, 0, sizeof (type_info));
1181   type_info.class_size = type_query.class_size;
1182   type_info.instance_size = type_query.instance_size;
1183   type_info.class_init = gst_v4l2_video_enc_subclass_init;
1184   type_info.class_data = cdata;
1185   type_info.instance_init = gst_v4l2_video_enc_subinstance_init;
1186 
1187   /* The first encoder to be registered should use a constant name, like
1188    * v4l2h264enc, for any additional encoders, we create unique names. Encoder
1189    * names may change between boots, so this should help gain stable names for
1190    * the most common use cases. */
1191   type_name = g_strdup_printf ("v4l2%senc", codec);
1192 
1193   if (g_type_from_name (type_name) != 0) {
1194     g_free (type_name);
1195     type_name = g_strdup_printf ("v4l2%s%senc", basename, codec);
1196   }
1197 
1198   subtype = g_type_register_static (type, type_name, &type_info, 0);
1199 
1200   if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype))
1201     GST_WARNING ("Failed to register plugin '%s'", type_name);
1202 
1203   g_free (type_name);
1204 }
1205