1 /* Generic video aggregator plugin
2  * Copyright (C) 2004, 2008 Wim Taymans <wim@fluendo.com>
3  * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20 
21 /**
22  * SECTION:gstvideoaggregator
23  * @title: GstVideoAggregator
24  * @short_description: Base class for video aggregators
25  *
26  * VideoAggregator can accept AYUV, ARGB and BGRA video streams. For each of the requested
27  * sink pads it will compare the incoming geometry and framerate to define the
28  * output parameters. Indeed output video frames will have the geometry of the
29  * biggest incoming video stream and the framerate of the fastest incoming one.
30  *
31  * VideoAggregator will do colorspace conversion.
32  *
33  * Zorder for each input stream can be configured on the
34  * #GstVideoAggregatorPad.
35  *
36  */
37 
38 #ifdef HAVE_CONFIG_H
39 #include "config.h"
40 #endif
41 
42 #include <string.h>
43 
44 #include "gstvideoaggregator.h"
45 
46 GST_DEBUG_CATEGORY_STATIC (gst_video_aggregator_debug);
47 #define GST_CAT_DEFAULT gst_video_aggregator_debug
48 
49 /* Needed prototypes */
50 static void gst_video_aggregator_reset_qos (GstVideoAggregator * vagg);
51 
52 /****************************************
53  * GstVideoAggregatorPad implementation *
54  ****************************************/
55 
56 #define DEFAULT_PAD_ZORDER 0
57 #define DEFAULT_PAD_REPEAT_AFTER_EOS FALSE
58 enum
59 {
60   PROP_PAD_0,
61   PROP_PAD_ZORDER,
62   PROP_PAD_REPEAT_AFTER_EOS,
63 };
64 
65 
66 struct _GstVideoAggregatorPadPrivate
67 {
68   GstBuffer *buffer;
69   GstVideoFrame prepared_frame;
70 
71   /* properties */
72   guint zorder;
73   gboolean repeat_after_eos;
74 
75   /* Subclasses can force an alpha channel in the (input thus output)
76    * colorspace format */
77   gboolean needs_alpha;
78 
79   GstClockTime start_time;
80   GstClockTime end_time;
81 
82   GstVideoInfo pending_vinfo;
83 };
84 
85 
86 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorPad, gst_video_aggregator_pad,
87     GST_TYPE_AGGREGATOR_PAD);
88 
89 static void
gst_video_aggregator_pad_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)90 gst_video_aggregator_pad_get_property (GObject * object, guint prop_id,
91     GValue * value, GParamSpec * pspec)
92 {
93   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
94 
95   switch (prop_id) {
96     case PROP_PAD_ZORDER:
97       g_value_set_uint (value, pad->priv->zorder);
98       break;
99     case PROP_PAD_REPEAT_AFTER_EOS:
100       g_value_set_boolean (value, pad->priv->repeat_after_eos);
101       break;
102     default:
103       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
104       break;
105   }
106 }
107 
108 static int
pad_zorder_compare(const GstVideoAggregatorPad * pad1,const GstVideoAggregatorPad * pad2)109 pad_zorder_compare (const GstVideoAggregatorPad * pad1,
110     const GstVideoAggregatorPad * pad2)
111 {
112   return pad1->priv->zorder - pad2->priv->zorder;
113 }
114 
115 static void
gst_video_aggregator_pad_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)116 gst_video_aggregator_pad_set_property (GObject * object, guint prop_id,
117     const GValue * value, GParamSpec * pspec)
118 {
119   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
120   GstVideoAggregator *vagg =
121       GST_VIDEO_AGGREGATOR (gst_pad_get_parent (GST_PAD (pad)));
122 
123   switch (prop_id) {
124     case PROP_PAD_ZORDER:
125       GST_OBJECT_LOCK (vagg);
126       pad->priv->zorder = g_value_get_uint (value);
127       GST_ELEMENT (vagg)->sinkpads = g_list_sort (GST_ELEMENT (vagg)->sinkpads,
128           (GCompareFunc) pad_zorder_compare);
129       GST_OBJECT_UNLOCK (vagg);
130       break;
131     case PROP_PAD_REPEAT_AFTER_EOS:
132       pad->priv->repeat_after_eos = g_value_get_boolean (value);
133       break;
134     default:
135       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
136       break;
137   }
138 
139   gst_object_unref (vagg);
140 }
141 
142 static GstFlowReturn
_flush_pad(GstAggregatorPad * aggpad,GstAggregator * aggregator)143 _flush_pad (GstAggregatorPad * aggpad, GstAggregator * aggregator)
144 {
145   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (aggregator);
146   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
147 
148   gst_video_aggregator_reset_qos (vagg);
149   gst_buffer_replace (&pad->priv->buffer, NULL);
150   pad->priv->start_time = -1;
151   pad->priv->end_time = -1;
152 
153   return GST_FLOW_OK;
154 }
155 
156 static gboolean
gst_video_aggregator_pad_skip_buffer(GstAggregatorPad * aggpad,GstAggregator * agg,GstBuffer * buffer)157 gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad,
158     GstAggregator * agg, GstBuffer * buffer)
159 {
160   gboolean ret = FALSE;
161   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
162 
163   if (agg_segment->position != GST_CLOCK_TIME_NONE
164       && GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE) {
165     GstClockTime start_time =
166         gst_segment_to_running_time (&aggpad->segment, GST_FORMAT_TIME,
167         GST_BUFFER_PTS (buffer));
168     GstClockTime end_time = start_time + GST_BUFFER_DURATION (buffer);
169     GstClockTime output_start_running_time =
170         gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
171         agg_segment->position);
172 
173     ret = end_time < output_start_running_time;
174   }
175 
176   return ret;
177 }
178 
179 static gboolean
gst_video_aggregator_pad_prepare_frame(GstVideoAggregatorPad * pad,GstVideoAggregator * vagg,GstBuffer * buffer,GstVideoFrame * prepared_frame)180 gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad,
181     GstVideoAggregator * vagg, GstBuffer * buffer,
182     GstVideoFrame * prepared_frame)
183 {
184   if (!gst_video_frame_map (prepared_frame, &pad->info, buffer, GST_MAP_READ)) {
185     GST_WARNING_OBJECT (vagg, "Could not map input buffer");
186     return FALSE;
187   }
188 
189   return TRUE;
190 }
191 
192 static void
gst_video_aggregator_pad_clean_frame(GstVideoAggregatorPad * pad,GstVideoAggregator * vagg,GstVideoFrame * prepared_frame)193 gst_video_aggregator_pad_clean_frame (GstVideoAggregatorPad * pad,
194     GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
195 {
196   if (prepared_frame->buffer) {
197     gst_video_frame_unmap (prepared_frame);
198     memset (prepared_frame, 0, sizeof (GstVideoFrame));
199   }
200 }
201 
202 static void
gst_video_aggregator_pad_class_init(GstVideoAggregatorPadClass * klass)203 gst_video_aggregator_pad_class_init (GstVideoAggregatorPadClass * klass)
204 {
205   GObjectClass *gobject_class = (GObjectClass *) klass;
206   GstAggregatorPadClass *aggpadclass = (GstAggregatorPadClass *) klass;
207 
208   gobject_class->set_property = gst_video_aggregator_pad_set_property;
209   gobject_class->get_property = gst_video_aggregator_pad_get_property;
210 
211   g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
212       g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
213           0, G_MAXUINT, DEFAULT_PAD_ZORDER,
214           G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
215   g_object_class_install_property (gobject_class, PROP_PAD_REPEAT_AFTER_EOS,
216       g_param_spec_boolean ("repeat-after-eos", "Repeat After EOS",
217           "Repeat the " "last frame after EOS until all pads are EOS",
218           DEFAULT_PAD_REPEAT_AFTER_EOS,
219           G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
220 
221   aggpadclass->flush = GST_DEBUG_FUNCPTR (_flush_pad);
222   aggpadclass->skip_buffer =
223       GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_skip_buffer);
224   klass->prepare_frame =
225       GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_prepare_frame);
226   klass->clean_frame = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_clean_frame);
227 }
228 
229 static void
gst_video_aggregator_pad_init(GstVideoAggregatorPad * vaggpad)230 gst_video_aggregator_pad_init (GstVideoAggregatorPad * vaggpad)
231 {
232   vaggpad->priv = gst_video_aggregator_pad_get_instance_private (vaggpad);
233 
234   vaggpad->priv->zorder = DEFAULT_PAD_ZORDER;
235   vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS;
236   memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
237 }
238 
239 /**
240  * gst_video_aggregator_pad_has_current_buffer:
241  * @pad: a #GstVideoAggregatorPad
242  *
243  * Checks if the pad currently has a buffer queued that is going to be used
244  * for the current output frame.
245  *
246  * This must only be called from the aggregate_frames() virtual method,
247  * or from the prepare_frame() virtual method of the aggregator pads.
248  *
249  * Returns: %TRUE if the pad has currently a buffer queued
250  */
251 gboolean
gst_video_aggregator_pad_has_current_buffer(GstVideoAggregatorPad * pad)252 gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad * pad)
253 {
254   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), FALSE);
255 
256   return pad->priv->buffer != NULL;
257 }
258 
259 /**
260  * gst_video_aggregator_pad_get_current_buffer:
261  * @pad: a #GstVideoAggregatorPad
262  *
263  * Returns the currently queued buffer that is going to be used
264  * for the current output frame.
265  *
266  * This must only be called from the aggregate_frames() virtual method,
267  * or from the prepare_frame() virtual method of the aggregator pads.
268  *
269  * The return value is only valid until aggregate_frames() or prepare_frames()
270  * returns.
271  *
272  * Returns: (transfer none): The currently queued buffer
273  */
274 GstBuffer *
gst_video_aggregator_pad_get_current_buffer(GstVideoAggregatorPad * pad)275 gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad * pad)
276 {
277   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
278 
279   return pad->priv->buffer;
280 }
281 
282 /**
283  * gst_video_aggregator_pad_get_prepared_frame:
284  * @pad: a #GstVideoAggregatorPad
285  *
286  * Returns the currently prepared video frame that has to be aggregated into
287  * the current output frame.
288  *
289  * This must only be called from the aggregate_frames() virtual method,
290  * or from the prepare_frame() virtual method of the aggregator pads.
291  *
292  * The return value is only valid until aggregate_frames() or prepare_frames()
293  * returns.
294  *
295  * Returns: (transfer none): The currently prepared video frame
296  */
297 GstVideoFrame *
gst_video_aggregator_pad_get_prepared_frame(GstVideoAggregatorPad * pad)298 gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad * pad)
299 {
300   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
301 
302   return pad->priv->prepared_frame.buffer ? &pad->priv->prepared_frame : NULL;
303 }
304 
305 /**
306  * gst_video_aggregator_pad_set_needs_alpha:
307  * @pad: a #GstVideoAggregatorPad
308  * @needs_alpha: %TRUE if this pad requires alpha output
309  *
310  * Allows selecting that this pad requires an output format with alpha
311  *
312  */
313 void
gst_video_aggregator_pad_set_needs_alpha(GstVideoAggregatorPad * pad,gboolean needs_alpha)314 gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad * pad,
315     gboolean needs_alpha)
316 {
317   g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad));
318 
319   if (needs_alpha != pad->priv->needs_alpha) {
320     GstAggregator *agg =
321         GST_AGGREGATOR (gst_object_get_parent (GST_OBJECT (pad)));
322     pad->priv->needs_alpha = needs_alpha;
323     if (agg) {
324       gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (agg));
325       gst_object_unref (agg);
326     }
327   }
328 }
329 
330 /****************************************
331  * GstVideoAggregatorConvertPad implementation *
332  ****************************************/
333 
334 enum
335 {
336   PROP_CONVERT_PAD_0,
337   PROP_CONVERT_PAD_CONVERTER_CONFIG,
338 };
339 
340 struct _GstVideoAggregatorConvertPadPrivate
341 {
342   /* Converter, if NULL no conversion is done */
343   GstVideoConverter *convert;
344 
345   /* caps used for conversion if needed */
346   GstVideoInfo conversion_info;
347   GstBuffer *converted_buffer;
348 
349   GstStructure *converter_config;
350   gboolean converter_config_changed;
351 };
352 
353 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorConvertPad,
354     gst_video_aggregator_convert_pad, GST_TYPE_VIDEO_AGGREGATOR_PAD);
355 
356 static void
gst_video_aggregator_convert_pad_finalize(GObject * o)357 gst_video_aggregator_convert_pad_finalize (GObject * o)
358 {
359   GstVideoAggregatorConvertPad *vaggpad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (o);
360 
361   if (vaggpad->priv->convert)
362     gst_video_converter_free (vaggpad->priv->convert);
363   vaggpad->priv->convert = NULL;
364 
365   if (vaggpad->priv->converter_config)
366     gst_structure_free (vaggpad->priv->converter_config);
367   vaggpad->priv->converter_config = NULL;
368 
369   G_OBJECT_CLASS (gst_video_aggregator_pad_parent_class)->finalize (o);
370 }
371 
372 static void
gst_video_aggregator_convert_pad_update_conversion_info_internal(GstVideoAggregatorPad * vpad)373     gst_video_aggregator_convert_pad_update_conversion_info_internal
374     (GstVideoAggregatorPad * vpad)
375 {
376   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
377 
378   pad->priv->converter_config_changed = TRUE;
379 }
380 
381 static gboolean
gst_video_aggregator_convert_pad_prepare_frame(GstVideoAggregatorPad * vpad,GstVideoAggregator * vagg,GstBuffer * buffer,GstVideoFrame * prepared_frame)382 gst_video_aggregator_convert_pad_prepare_frame (GstVideoAggregatorPad * vpad,
383     GstVideoAggregator * vagg, GstBuffer * buffer,
384     GstVideoFrame * prepared_frame)
385 {
386   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
387   GstVideoFrame frame;
388 
389   /* Update/create converter as needed */
390   if (pad->priv->converter_config_changed) {
391     GstVideoAggregatorConvertPadClass *klass =
392         GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS (pad);
393     GstVideoInfo conversion_info;
394 
395     gst_video_info_init (&conversion_info);
396     klass->create_conversion_info (pad, vagg, &conversion_info);
397     if (conversion_info.finfo == NULL)
398       return FALSE;
399     pad->priv->converter_config_changed = FALSE;
400 
401     if (!pad->priv->conversion_info.finfo
402         || !gst_video_info_is_equal (&conversion_info,
403             &pad->priv->conversion_info)) {
404       pad->priv->conversion_info = conversion_info;
405 
406       if (pad->priv->convert)
407         gst_video_converter_free (pad->priv->convert);
408       pad->priv->convert = NULL;
409 
410       if (!gst_video_info_is_equal (&vpad->info, &pad->priv->conversion_info)) {
411         pad->priv->convert =
412             gst_video_converter_new (&vpad->info, &pad->priv->conversion_info,
413             pad->priv->converter_config ? gst_structure_copy (pad->
414                 priv->converter_config) : NULL);
415         if (!pad->priv->convert) {
416           GST_WARNING_OBJECT (pad, "No path found for conversion");
417           return FALSE;
418         }
419 
420         GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
421             GST_VIDEO_INFO_FORMAT (&vpad->info),
422             GST_VIDEO_INFO_FORMAT (&pad->priv->conversion_info));
423       } else {
424         GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
425       }
426     }
427   }
428 
429   if (!gst_video_frame_map (&frame, &vpad->info, buffer, GST_MAP_READ)) {
430     GST_WARNING_OBJECT (vagg, "Could not map input buffer");
431     return FALSE;
432   }
433 
434   if (pad->priv->convert) {
435     GstVideoFrame converted_frame;
436     GstBuffer *converted_buf = NULL;
437     static GstAllocationParams params = { 0, 15, 0, 0, };
438     gint converted_size;
439     guint outsize;
440 
441     /* We wait until here to set the conversion infos, in case vagg->info changed */
442     converted_size = pad->priv->conversion_info.size;
443     outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
444     converted_size = converted_size > outsize ? converted_size : outsize;
445     converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
446 
447     if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
448             converted_buf, GST_MAP_READWRITE)) {
449       GST_WARNING_OBJECT (vagg, "Could not map converted frame");
450 
451       gst_video_frame_unmap (&frame);
452       return FALSE;
453     }
454 
455     gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
456     pad->priv->converted_buffer = converted_buf;
457     gst_video_frame_unmap (&frame);
458     *prepared_frame = converted_frame;
459   } else {
460     *prepared_frame = frame;
461   }
462 
463   return TRUE;
464 }
465 
466 static void
gst_video_aggregator_convert_pad_clean_frame(GstVideoAggregatorPad * vpad,GstVideoAggregator * vagg,GstVideoFrame * prepared_frame)467 gst_video_aggregator_convert_pad_clean_frame (GstVideoAggregatorPad * vpad,
468     GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
469 {
470   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
471 
472   if (prepared_frame->buffer) {
473     gst_video_frame_unmap (prepared_frame);
474     memset (prepared_frame, 0, sizeof (GstVideoFrame));
475   }
476 
477   if (pad->priv->converted_buffer) {
478     gst_buffer_unref (pad->priv->converted_buffer);
479     pad->priv->converted_buffer = NULL;
480   }
481 }
482 
483 static void
gst_video_aggregator_convert_pad_create_conversion_info(GstVideoAggregatorConvertPad * pad,GstVideoAggregator * agg,GstVideoInfo * convert_info)484     gst_video_aggregator_convert_pad_create_conversion_info
485     (GstVideoAggregatorConvertPad * pad, GstVideoAggregator * agg,
486     GstVideoInfo * convert_info)
487 {
488   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD (pad);
489   gchar *colorimetry, *best_colorimetry;
490   const gchar *chroma, *best_chroma;
491 
492   g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
493   g_return_if_fail (convert_info != NULL);
494 
495   if (!vpad->info.finfo
496       || GST_VIDEO_INFO_FORMAT (&vpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
497     return;
498   }
499 
500   if (!agg->info.finfo
501       || GST_VIDEO_INFO_FORMAT (&agg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
502     return;
503   }
504 
505   colorimetry = gst_video_colorimetry_to_string (&vpad->info.colorimetry);
506   chroma = gst_video_chroma_to_string (vpad->info.chroma_site);
507 
508   best_colorimetry = gst_video_colorimetry_to_string (&agg->info.colorimetry);
509   best_chroma = gst_video_chroma_to_string (agg->info.chroma_site);
510 
511   if (GST_VIDEO_INFO_FORMAT (&agg->info) != GST_VIDEO_INFO_FORMAT (&vpad->info)
512       || g_strcmp0 (colorimetry, best_colorimetry)
513       || g_strcmp0 (chroma, best_chroma)) {
514     GstVideoInfo tmp_info;
515 
516     /* Initialize with the wanted video format and our original width and
517      * height as we don't want to rescale. Then copy over the wanted
518      * colorimetry, and chroma-site and our current pixel-aspect-ratio
519      * and other relevant fields.
520      */
521     gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (&agg->info),
522         vpad->info.width, vpad->info.height);
523     tmp_info.chroma_site = agg->info.chroma_site;
524     tmp_info.colorimetry = agg->info.colorimetry;
525     tmp_info.par_n = vpad->info.par_n;
526     tmp_info.par_d = vpad->info.par_d;
527     tmp_info.fps_n = vpad->info.fps_n;
528     tmp_info.fps_d = vpad->info.fps_d;
529     tmp_info.flags = vpad->info.flags;
530     tmp_info.interlace_mode = vpad->info.interlace_mode;
531 
532     *convert_info = tmp_info;
533   } else {
534     *convert_info = vpad->info;
535   }
536 
537   g_free (colorimetry);
538   g_free (best_colorimetry);
539 }
540 
541 static void
gst_video_aggregator_convert_pad_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)542 gst_video_aggregator_convert_pad_get_property (GObject * object, guint prop_id,
543     GValue * value, GParamSpec * pspec)
544 {
545   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
546 
547   switch (prop_id) {
548     case PROP_CONVERT_PAD_CONVERTER_CONFIG:
549       GST_OBJECT_LOCK (pad);
550       if (pad->priv->converter_config)
551         g_value_set_boxed (value, pad->priv->converter_config);
552       GST_OBJECT_UNLOCK (pad);
553       break;
554     default:
555       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
556       break;
557   }
558 }
559 
560 static void
gst_video_aggregator_convert_pad_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)561 gst_video_aggregator_convert_pad_set_property (GObject * object, guint prop_id,
562     const GValue * value, GParamSpec * pspec)
563 {
564   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
565 
566   switch (prop_id) {
567     case PROP_CONVERT_PAD_CONVERTER_CONFIG:
568       GST_OBJECT_LOCK (pad);
569       if (pad->priv->converter_config)
570         gst_structure_free (pad->priv->converter_config);
571       pad->priv->converter_config = g_value_dup_boxed (value);
572       pad->priv->converter_config_changed = TRUE;
573       GST_OBJECT_UNLOCK (pad);
574       break;
575     default:
576       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
577       break;
578   }
579 }
580 
581 static void
gst_video_aggregator_convert_pad_class_init(GstVideoAggregatorConvertPadClass * klass)582 gst_video_aggregator_convert_pad_class_init (GstVideoAggregatorConvertPadClass *
583     klass)
584 {
585   GObjectClass *gobject_class = (GObjectClass *) klass;
586   GstVideoAggregatorPadClass *vaggpadclass =
587       (GstVideoAggregatorPadClass *) klass;
588 
589   gobject_class->finalize = gst_video_aggregator_convert_pad_finalize;
590   gobject_class->get_property =
591       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_get_property);
592   gobject_class->set_property =
593       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_set_property);
594 
595   g_object_class_install_property (gobject_class,
596       PROP_CONVERT_PAD_CONVERTER_CONFIG, g_param_spec_boxed ("converter-config",
597           "Converter configuration",
598           "A GstStructure describing the configuration that should be used "
599           "when scaling and converting this pad's video frames",
600           GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
601 
602   vaggpadclass->update_conversion_info =
603       GST_DEBUG_FUNCPTR
604       (gst_video_aggregator_convert_pad_update_conversion_info_internal);
605   vaggpadclass->prepare_frame =
606       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_prepare_frame);
607   vaggpadclass->clean_frame =
608       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_clean_frame);
609 
610   klass->create_conversion_info =
611       gst_video_aggregator_convert_pad_create_conversion_info;
612 }
613 
614 static void
gst_video_aggregator_convert_pad_init(GstVideoAggregatorConvertPad * vaggpad)615 gst_video_aggregator_convert_pad_init (GstVideoAggregatorConvertPad * vaggpad)
616 {
617   vaggpad->priv =
618       gst_video_aggregator_convert_pad_get_instance_private (vaggpad);
619 
620   vaggpad->priv->converted_buffer = NULL;
621   vaggpad->priv->convert = NULL;
622   vaggpad->priv->converter_config = NULL;
623   vaggpad->priv->converter_config_changed = FALSE;
624 }
625 
626 
627 /**
628  * gst_video_aggregator_convert_pad_update_conversion_info:
629  * @pad: a #GstVideoAggregatorPad
630  *
631  * Requests the pad to check and update the converter before the next usage to
632  * update for any changes that have happened.
633  *
634  */
gst_video_aggregator_convert_pad_update_conversion_info(GstVideoAggregatorConvertPad * pad)635 void gst_video_aggregator_convert_pad_update_conversion_info
636     (GstVideoAggregatorConvertPad * pad)
637 {
638   g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
639 
640   pad->priv->converter_config_changed = TRUE;
641 }
642 
643 /**************************************
644  * GstVideoAggregator implementation  *
645  **************************************/
646 
647 #define GST_VIDEO_AGGREGATOR_GET_LOCK(vagg) (&GST_VIDEO_AGGREGATOR(vagg)->priv->lock)
648 
649 #define GST_VIDEO_AGGREGATOR_LOCK(vagg)   G_STMT_START {       \
650   GST_LOG_OBJECT (vagg, "Taking EVENT lock from thread %p",    \
651         g_thread_self());                                      \
652   g_mutex_lock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg));           \
653   GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p",      \
654         g_thread_self());                                      \
655   } G_STMT_END
656 
657 #define GST_VIDEO_AGGREGATOR_UNLOCK(vagg)   G_STMT_START {     \
658   GST_LOG_OBJECT (vagg, "Releasing EVENT lock from thread %p", \
659         g_thread_self());                                      \
660   g_mutex_unlock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg));         \
661   GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p",      \
662         g_thread_self());                                      \
663   } G_STMT_END
664 
665 
666 struct _GstVideoAggregatorPrivate
667 {
668   /* Lock to prevent the state to change while aggregating */
669   GMutex lock;
670 
671   /* Current downstream segment */
672   GstClockTime ts_offset;
673   guint64 nframes;
674 
675   /* QoS stuff */
676   gdouble proportion;
677   GstClockTime earliest_time;
678   guint64 qos_processed, qos_dropped;
679 
680   /* current caps */
681   GstCaps *current_caps;
682 
683   gboolean live;
684 };
685 
686 /* Can't use the G_DEFINE_TYPE macros because we need the
687  * videoaggregator class in the _init to be able to set
688  * the sink pad non-alpha caps. Using the G_DEFINE_TYPE there
689  * seems to be no way of getting the real class being initialized */
690 static void gst_video_aggregator_init (GstVideoAggregator * self,
691     GstVideoAggregatorClass * klass);
692 static void gst_video_aggregator_class_init (GstVideoAggregatorClass * klass);
693 static gpointer gst_video_aggregator_parent_class = NULL;
694 static gint video_aggregator_private_offset = 0;
695 
696 GType
gst_video_aggregator_get_type(void)697 gst_video_aggregator_get_type (void)
698 {
699   static volatile gsize g_define_type_id_volatile = 0;
700 
701   if (g_once_init_enter (&g_define_type_id_volatile)) {
702     GType g_define_type_id = g_type_register_static_simple (GST_TYPE_AGGREGATOR,
703         g_intern_static_string ("GstVideoAggregator"),
704         sizeof (GstVideoAggregatorClass),
705         (GClassInitFunc) gst_video_aggregator_class_init,
706         sizeof (GstVideoAggregator),
707         (GInstanceInitFunc) gst_video_aggregator_init,
708         (GTypeFlags) G_TYPE_FLAG_ABSTRACT);
709 
710     video_aggregator_private_offset =
711         g_type_add_instance_private (g_define_type_id,
712         sizeof (GstVideoAggregatorPrivate));
713 
714     g_once_init_leave (&g_define_type_id_volatile, g_define_type_id);
715   }
716   return g_define_type_id_volatile;
717 }
718 
719 static inline GstVideoAggregatorPrivate *
gst_video_aggregator_get_instance_private(GstVideoAggregator * self)720 gst_video_aggregator_get_instance_private (GstVideoAggregator * self)
721 {
722   return (G_STRUCT_MEMBER_P (self, video_aggregator_private_offset));
723 }
724 
725 static void
gst_video_aggregator_find_best_format(GstVideoAggregator * vagg,GstCaps * downstream_caps,GstVideoInfo * best_info,gboolean * at_least_one_alpha)726 gst_video_aggregator_find_best_format (GstVideoAggregator * vagg,
727     GstCaps * downstream_caps, GstVideoInfo * best_info,
728     gboolean * at_least_one_alpha)
729 {
730   GList *tmp;
731   GstCaps *possible_caps;
732   GstVideoAggregatorPad *pad;
733   gboolean need_alpha = FALSE;
734   gint best_format_number = 0;
735   GHashTable *formats_table = g_hash_table_new (g_direct_hash, g_direct_equal);
736 
737   GST_OBJECT_LOCK (vagg);
738   for (tmp = GST_ELEMENT (vagg)->sinkpads; tmp; tmp = tmp->next) {
739     GstStructure *s;
740     gint format_number;
741 
742     pad = tmp->data;
743 
744     if (!pad->info.finfo)
745       continue;
746 
747     if (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
748       *at_least_one_alpha = TRUE;
749 
750     /* If we want alpha, disregard all the other formats */
751     if (need_alpha && !(pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA))
752       continue;
753 
754     /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
755     if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
756       continue;
757 
758     possible_caps = gst_video_info_to_caps (&pad->info);
759 
760     s = gst_caps_get_structure (possible_caps, 0);
761     gst_structure_remove_fields (s, "width", "height", "framerate",
762         "pixel-aspect-ratio", "interlace-mode", NULL);
763 
764     /* Can downstream accept this format ? */
765     if (!gst_caps_can_intersect (downstream_caps, possible_caps)) {
766       gst_caps_unref (possible_caps);
767       continue;
768     }
769 
770     gst_caps_unref (possible_caps);
771 
772     format_number =
773         GPOINTER_TO_INT (g_hash_table_lookup (formats_table,
774             GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info))));
775     format_number += pad->info.width * pad->info.height;
776 
777     g_hash_table_replace (formats_table,
778         GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info)),
779         GINT_TO_POINTER (format_number));
780 
781     /* If that pad is the first with alpha, set it as the new best format */
782     if (!need_alpha && (pad->priv->needs_alpha
783             && (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (pad->info.finfo)))) {
784       need_alpha = TRUE;
785       /* Just fallback to ARGB in case we require alpha but the input pad
786        * does not have alpha.
787        * Do not increment best_format_number in that case. */
788       gst_video_info_set_format (best_info,
789           GST_VIDEO_FORMAT_ARGB,
790           GST_VIDEO_INFO_WIDTH (&pad->info),
791           GST_VIDEO_INFO_HEIGHT (&pad->info));
792     } else if (!need_alpha
793         && (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
794       need_alpha = TRUE;
795       *best_info = pad->info;
796       best_format_number = format_number;
797     } else if (format_number > best_format_number) {
798       *best_info = pad->info;
799       best_format_number = format_number;
800     }
801   }
802   GST_OBJECT_UNLOCK (vagg);
803 
804   g_hash_table_unref (formats_table);
805 }
806 
807 static GstCaps *
gst_video_aggregator_default_fixate_src_caps(GstAggregator * agg,GstCaps * caps)808 gst_video_aggregator_default_fixate_src_caps (GstAggregator * agg,
809     GstCaps * caps)
810 {
811   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
812   gint best_width = -1, best_height = -1;
813   gint best_fps_n = -1, best_fps_d = -1;
814   gdouble best_fps = -1.;
815   GstStructure *s;
816   GList *l;
817 
818   GST_OBJECT_LOCK (vagg);
819   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
820     GstVideoAggregatorPad *mpad = l->data;
821     gint fps_n, fps_d;
822     gint width, height;
823     gdouble cur_fps;
824 
825     fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
826     fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
827     width = GST_VIDEO_INFO_WIDTH (&mpad->info);
828     height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
829 
830     if (width == 0 || height == 0)
831       continue;
832 
833     if (best_width < width)
834       best_width = width;
835     if (best_height < height)
836       best_height = height;
837 
838     if (fps_d == 0)
839       cur_fps = 0.0;
840     else
841       gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
842 
843     if (best_fps < cur_fps) {
844       best_fps = cur_fps;
845       best_fps_n = fps_n;
846       best_fps_d = fps_d;
847     }
848   }
849   GST_OBJECT_UNLOCK (vagg);
850 
851   if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) {
852     best_fps_n = 25;
853     best_fps_d = 1;
854     best_fps = 25.0;
855   }
856 
857   caps = gst_caps_make_writable (caps);
858   s = gst_caps_get_structure (caps, 0);
859   gst_structure_fixate_field_nearest_int (s, "width", best_width);
860   gst_structure_fixate_field_nearest_int (s, "height", best_height);
861   gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
862       best_fps_d);
863   if (gst_structure_has_field (s, "pixel-aspect-ratio"))
864     gst_structure_fixate_field_nearest_fraction (s, "pixel-aspect-ratio", 1, 1);
865   caps = gst_caps_fixate (caps);
866 
867   return caps;
868 }
869 
870 static GstCaps *
gst_video_aggregator_default_update_caps(GstVideoAggregator * vagg,GstCaps * caps)871 gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg,
872     GstCaps * caps)
873 {
874   GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg);
875   GstCaps *ret, *best_format_caps;
876   gboolean at_least_one_alpha = FALSE;
877   GstVideoFormat best_format;
878   GstVideoInfo best_info;
879   gchar *color_name;
880 
881   best_format = GST_VIDEO_FORMAT_UNKNOWN;
882   gst_video_info_init (&best_info);
883 
884   if (vagg_klass->find_best_format) {
885     vagg_klass->find_best_format (vagg, caps, &best_info, &at_least_one_alpha);
886 
887     best_format = GST_VIDEO_INFO_FORMAT (&best_info);
888   }
889 
890   if (best_format == GST_VIDEO_FORMAT_UNKNOWN) {
891     GstCaps *tmp = gst_caps_fixate (gst_caps_ref (caps));
892     gst_video_info_from_caps (&best_info, tmp);
893     best_format = GST_VIDEO_INFO_FORMAT (&best_info);
894     gst_caps_unref (tmp);
895   }
896 
897   color_name = gst_video_colorimetry_to_string (&best_info.colorimetry);
898 
899   GST_DEBUG_OBJECT (vagg,
900       "The output format will now be : %d with chroma : %s and colorimetry %s",
901       best_format, gst_video_chroma_to_string (best_info.chroma_site),
902       color_name);
903 
904   best_format_caps = gst_caps_copy (caps);
905   gst_caps_set_simple (best_format_caps, "format", G_TYPE_STRING,
906       gst_video_format_to_string (best_format), "chroma-site", G_TYPE_STRING,
907       gst_video_chroma_to_string (best_info.chroma_site), "colorimetry",
908       G_TYPE_STRING, color_name, NULL);
909   g_free (color_name);
910   ret = gst_caps_merge (best_format_caps, gst_caps_ref (caps));
911 
912   return ret;
913 }
914 
915 static GstFlowReturn
gst_video_aggregator_default_update_src_caps(GstAggregator * agg,GstCaps * caps,GstCaps ** ret)916 gst_video_aggregator_default_update_src_caps (GstAggregator * agg,
917     GstCaps * caps, GstCaps ** ret)
918 {
919   GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (agg);
920   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
921   gboolean at_least_one_pad_configured = FALSE;
922   GList *l;
923 
924   GST_OBJECT_LOCK (vagg);
925   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
926     GstVideoAggregatorPad *mpad = l->data;
927 
928     if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
929         || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
930       continue;
931 
932     at_least_one_pad_configured = TRUE;
933   }
934   GST_OBJECT_UNLOCK (vagg);
935 
936   if (!at_least_one_pad_configured) {
937     /* We couldn't decide the output video info because the sinkpads don't have
938      * all the caps yet, so we mark the pad as needing a reconfigure. This
939      * allows aggregate() to skip ahead a bit and try again later. */
940     GST_DEBUG_OBJECT (vagg, "Couldn't decide output video info");
941     gst_pad_mark_reconfigure (agg->srcpad);
942     return GST_AGGREGATOR_FLOW_NEED_DATA;
943   }
944 
945   g_assert (vagg_klass->update_caps);
946 
947   *ret = vagg_klass->update_caps (vagg, caps);
948 
949   return GST_FLOW_OK;
950 }
951 
952 static gboolean
_update_conversion_info(GstElement * element,GstPad * pad,gpointer user_data)953 _update_conversion_info (GstElement * element, GstPad * pad, gpointer user_data)
954 {
955   GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
956   GstVideoAggregatorPadClass *vaggpad_klass =
957       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (vaggpad);
958 
959   if (vaggpad_klass->update_conversion_info) {
960     vaggpad_klass->update_conversion_info (vaggpad);
961   }
962 
963   return TRUE;
964 }
965 
966 static gboolean
gst_video_aggregator_default_negotiated_src_caps(GstAggregator * agg,GstCaps * caps)967 gst_video_aggregator_default_negotiated_src_caps (GstAggregator * agg,
968     GstCaps * caps)
969 {
970   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
971   gboolean at_least_one_alpha = FALSE;
972   const GstVideoFormatInfo *finfo;
973   GstVideoInfo info;
974   GList *l;
975 
976   GST_INFO_OBJECT (agg->srcpad, "set src caps: %" GST_PTR_FORMAT, caps);
977 
978   GST_OBJECT_LOCK (vagg);
979   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
980     GstVideoAggregatorPad *mpad = l->data;
981 
982     if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
983         || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
984       continue;
985 
986     if (mpad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
987       at_least_one_alpha = TRUE;
988   }
989   GST_OBJECT_UNLOCK (vagg);
990 
991   if (!gst_video_info_from_caps (&info, caps))
992     return FALSE;
993 
994   if (GST_VIDEO_INFO_FPS_N (&vagg->info) != GST_VIDEO_INFO_FPS_N (&info) ||
995       GST_VIDEO_INFO_FPS_D (&vagg->info) != GST_VIDEO_INFO_FPS_D (&info)) {
996     if (GST_AGGREGATOR_PAD (agg->srcpad)->segment.position != -1) {
997       vagg->priv->nframes = 0;
998       /* The timestamp offset will be updated based on the
999        * segment position the next time we aggregate */
1000       GST_DEBUG_OBJECT (vagg,
1001           "Resetting frame counter because of framerate change");
1002     }
1003     gst_video_aggregator_reset_qos (vagg);
1004   }
1005 
1006   vagg->info = info;
1007 
1008   finfo = vagg->info.finfo;
1009 
1010   if (at_least_one_alpha && !(finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
1011     GST_ELEMENT_ERROR (vagg, CORE, NEGOTIATION,
1012         ("At least one of the input pads contains alpha, but configured caps don't support alpha."),
1013         ("Either convert your inputs to not contain alpha or add a videoconvert after the aggregator"));
1014     return FALSE;
1015   }
1016 
1017   /* Then browse the sinks once more, setting or unsetting conversion if needed */
1018   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg),
1019       _update_conversion_info, NULL);
1020 
1021   if (vagg->priv->current_caps == NULL ||
1022       gst_caps_is_equal (caps, vagg->priv->current_caps) == FALSE) {
1023     GstClockTime latency;
1024 
1025     gst_caps_replace (&vagg->priv->current_caps, caps);
1026 
1027     gst_aggregator_set_src_caps (agg, caps);
1028     latency = gst_util_uint64_scale (GST_SECOND,
1029         GST_VIDEO_INFO_FPS_D (&vagg->info), GST_VIDEO_INFO_FPS_N (&vagg->info));
1030     gst_aggregator_set_latency (agg, latency, latency);
1031   }
1032 
1033   return TRUE;
1034 }
1035 
1036 static gboolean
gst_video_aggregator_get_sinkpads_interlace_mode(GstVideoAggregator * vagg,GstVideoAggregatorPad * skip_pad,GstVideoInterlaceMode * mode)1037 gst_video_aggregator_get_sinkpads_interlace_mode (GstVideoAggregator * vagg,
1038     GstVideoAggregatorPad * skip_pad, GstVideoInterlaceMode * mode)
1039 {
1040   GList *walk;
1041 
1042   GST_OBJECT_LOCK (vagg);
1043   for (walk = GST_ELEMENT (vagg)->sinkpads; walk; walk = g_list_next (walk)) {
1044     GstVideoAggregatorPad *vaggpad = walk->data;
1045 
1046     if (skip_pad && vaggpad == skip_pad)
1047       continue;
1048     if (vaggpad->info.finfo
1049         && GST_VIDEO_INFO_FORMAT (&vaggpad->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1050       *mode = GST_VIDEO_INFO_INTERLACE_MODE (&vaggpad->info);
1051       GST_OBJECT_UNLOCK (vagg);
1052       return TRUE;
1053     }
1054   }
1055   GST_OBJECT_UNLOCK (vagg);
1056   return FALSE;
1057 }
1058 
1059 static gboolean
gst_video_aggregator_pad_sink_setcaps(GstPad * pad,GstObject * parent,GstCaps * caps)1060 gst_video_aggregator_pad_sink_setcaps (GstPad * pad, GstObject * parent,
1061     GstCaps * caps)
1062 {
1063   GstVideoAggregator *vagg;
1064   GstVideoAggregatorPad *vaggpad;
1065   GstVideoInfo info;
1066   gboolean ret = FALSE;
1067 
1068   GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
1069 
1070   vagg = GST_VIDEO_AGGREGATOR (parent);
1071   vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1072 
1073   if (!gst_video_info_from_caps (&info, caps)) {
1074     GST_DEBUG_OBJECT (pad, "Failed to parse caps");
1075     goto beach;
1076   }
1077 
1078   GST_VIDEO_AGGREGATOR_LOCK (vagg);
1079   {
1080     GstVideoInterlaceMode pads_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1081     gboolean has_mode = FALSE;
1082 
1083     /* get the current output setting or fallback to other pads settings */
1084     if (GST_VIDEO_INFO_FORMAT (&vagg->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1085       pads_mode = GST_VIDEO_INFO_INTERLACE_MODE (&vagg->info);
1086       has_mode = TRUE;
1087     } else {
1088       has_mode =
1089           gst_video_aggregator_get_sinkpads_interlace_mode (vagg, vaggpad,
1090           &pads_mode);
1091     }
1092 
1093     if (has_mode) {
1094       if (pads_mode != GST_VIDEO_INFO_INTERLACE_MODE (&info)) {
1095         GST_ERROR_OBJECT (pad,
1096             "got input caps %" GST_PTR_FORMAT ", but current caps are %"
1097             GST_PTR_FORMAT, caps, vagg->priv->current_caps);
1098         GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1099         return FALSE;
1100       }
1101     }
1102   }
1103 
1104   if (!vaggpad->info.finfo ||
1105       GST_VIDEO_INFO_FORMAT (&vaggpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
1106     /* no video info was already set, so this is the first time
1107      * that this pad is getting configured; configure immediately to avoid
1108      * problems with the initial negotiation */
1109     vaggpad->info = info;
1110     gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1111   } else {
1112     /* this pad already had caps but received new ones; keep the new caps
1113      * pending until we pick the next buffer from the queue, otherwise we
1114      * might use an old buffer with the new caps and crash */
1115     vaggpad->priv->pending_vinfo = info;
1116     GST_DEBUG_OBJECT (pad, "delaying caps change");
1117   }
1118   ret = TRUE;
1119 
1120   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1121 
1122 beach:
1123   return ret;
1124 }
1125 
1126 static gboolean
gst_video_aggregator_caps_has_alpha(GstCaps * caps)1127 gst_video_aggregator_caps_has_alpha (GstCaps * caps)
1128 {
1129   guint size = gst_caps_get_size (caps);
1130   guint i;
1131 
1132   for (i = 0; i < size; i++) {
1133     GstStructure *s = gst_caps_get_structure (caps, i);
1134     const GValue *formats = gst_structure_get_value (s, "format");
1135 
1136     if (formats) {
1137       const GstVideoFormatInfo *info;
1138 
1139       if (GST_VALUE_HOLDS_LIST (formats)) {
1140         guint list_size = gst_value_list_get_size (formats);
1141         guint index;
1142 
1143         for (index = 0; index < list_size; index++) {
1144           const GValue *list_item = gst_value_list_get_value (formats, index);
1145           info =
1146               gst_video_format_get_info (gst_video_format_from_string
1147               (g_value_get_string (list_item)));
1148           if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1149             return TRUE;
1150         }
1151 
1152       } else if (G_VALUE_HOLDS_STRING (formats)) {
1153         info =
1154             gst_video_format_get_info (gst_video_format_from_string
1155             (g_value_get_string (formats)));
1156         if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1157           return TRUE;
1158 
1159       } else {
1160         g_assert_not_reached ();
1161         GST_WARNING ("Unexpected type for video 'format' field: %s",
1162             G_VALUE_TYPE_NAME (formats));
1163       }
1164 
1165     } else {
1166       return TRUE;
1167     }
1168   }
1169   return FALSE;
1170 }
1171 
1172 static GstCaps *
_get_non_alpha_caps(GstCaps * caps)1173 _get_non_alpha_caps (GstCaps * caps)
1174 {
1175   GstCaps *result;
1176   guint i, size;
1177 
1178   size = gst_caps_get_size (caps);
1179   result = gst_caps_new_empty ();
1180   for (i = 0; i < size; i++) {
1181     GstStructure *s = gst_caps_get_structure (caps, i);
1182     const GValue *formats = gst_structure_get_value (s, "format");
1183     GValue new_formats = { 0, };
1184     gboolean has_format = FALSE;
1185 
1186     /* FIXME what to do if formats are missing? */
1187     if (formats) {
1188       const GstVideoFormatInfo *info;
1189 
1190       if (GST_VALUE_HOLDS_LIST (formats)) {
1191         guint list_size = gst_value_list_get_size (formats);
1192         guint index;
1193 
1194         g_value_init (&new_formats, GST_TYPE_LIST);
1195 
1196         for (index = 0; index < list_size; index++) {
1197           const GValue *list_item = gst_value_list_get_value (formats, index);
1198 
1199           info =
1200               gst_video_format_get_info (gst_video_format_from_string
1201               (g_value_get_string (list_item)));
1202           if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1203             has_format = TRUE;
1204             gst_value_list_append_value (&new_formats, list_item);
1205           }
1206         }
1207 
1208       } else if (G_VALUE_HOLDS_STRING (formats)) {
1209         info =
1210             gst_video_format_get_info (gst_video_format_from_string
1211             (g_value_get_string (formats)));
1212         if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1213           has_format = TRUE;
1214           gst_value_init_and_copy (&new_formats, formats);
1215         }
1216 
1217       } else {
1218         g_assert_not_reached ();
1219         GST_WARNING ("Unexpected type for video 'format' field: %s",
1220             G_VALUE_TYPE_NAME (formats));
1221       }
1222 
1223       if (has_format) {
1224         s = gst_structure_copy (s);
1225         gst_structure_take_value (s, "format", &new_formats);
1226         gst_caps_append_structure (result, s);
1227       }
1228 
1229     }
1230   }
1231 
1232   return result;
1233 }
1234 
1235 static GstCaps *
gst_video_aggregator_pad_sink_getcaps(GstPad * pad,GstVideoAggregator * vagg,GstCaps * filter)1236 gst_video_aggregator_pad_sink_getcaps (GstPad * pad, GstVideoAggregator * vagg,
1237     GstCaps * filter)
1238 {
1239   GstCaps *srccaps;
1240   GstCaps *template_caps, *sink_template_caps;
1241   GstCaps *returned_caps;
1242   GstStructure *s;
1243   gint i, n;
1244   GstAggregator *agg = GST_AGGREGATOR (vagg);
1245   GstPad *srcpad = GST_PAD (agg->srcpad);
1246   gboolean has_alpha;
1247   GstVideoInterlaceMode interlace_mode;
1248   gboolean has_interlace_mode;
1249 
1250   template_caps = gst_pad_get_pad_template_caps (srcpad);
1251 
1252   GST_DEBUG_OBJECT (pad, "Get caps with filter: %" GST_PTR_FORMAT, filter);
1253 
1254   srccaps = gst_pad_peer_query_caps (srcpad, template_caps);
1255   srccaps = gst_caps_make_writable (srccaps);
1256   has_alpha = gst_video_aggregator_caps_has_alpha (srccaps);
1257 
1258   has_interlace_mode =
1259       gst_video_aggregator_get_sinkpads_interlace_mode (vagg, NULL,
1260       &interlace_mode);
1261 
1262   n = gst_caps_get_size (srccaps);
1263   for (i = 0; i < n; i++) {
1264     s = gst_caps_get_structure (srccaps, i);
1265     gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
1266         1, NULL);
1267 
1268     if (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
1269       gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
1270           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
1271       gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
1272           "pixel-aspect-ratio", NULL);
1273     }
1274 
1275     if (has_interlace_mode)
1276       gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
1277           gst_video_interlace_mode_to_string (interlace_mode), NULL);
1278   }
1279 
1280   if (filter) {
1281     returned_caps = gst_caps_intersect (srccaps, filter);
1282     gst_caps_unref (srccaps);
1283   } else {
1284     returned_caps = srccaps;
1285   }
1286 
1287   sink_template_caps = gst_pad_get_pad_template_caps (pad);
1288   if (!has_alpha) {
1289     GstCaps *tmp = _get_non_alpha_caps (sink_template_caps);
1290     gst_caps_unref (sink_template_caps);
1291     sink_template_caps = tmp;
1292   }
1293 
1294   {
1295     GstCaps *intersect = gst_caps_intersect (returned_caps, sink_template_caps);
1296     gst_caps_unref (returned_caps);
1297     returned_caps = intersect;
1298   }
1299 
1300   gst_caps_unref (template_caps);
1301   gst_caps_unref (sink_template_caps);
1302 
1303   GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, returned_caps);
1304 
1305   return returned_caps;
1306 }
1307 
1308 static void
gst_video_aggregator_update_qos(GstVideoAggregator * vagg,gdouble proportion,GstClockTimeDiff diff,GstClockTime timestamp)1309 gst_video_aggregator_update_qos (GstVideoAggregator * vagg, gdouble proportion,
1310     GstClockTimeDiff diff, GstClockTime timestamp)
1311 {
1312   gboolean live;
1313 
1314   GST_DEBUG_OBJECT (vagg,
1315       "Updating QoS: proportion %lf, diff %" GST_STIME_FORMAT ", timestamp %"
1316       GST_TIME_FORMAT, proportion, GST_STIME_ARGS (diff),
1317       GST_TIME_ARGS (timestamp));
1318 
1319   live =
1320       GST_CLOCK_TIME_IS_VALID (gst_aggregator_get_latency (GST_AGGREGATOR
1321           (vagg)));
1322 
1323   GST_OBJECT_LOCK (vagg);
1324 
1325   vagg->priv->proportion = proportion;
1326   if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
1327     if (!live && G_UNLIKELY (diff > 0))
1328       vagg->priv->earliest_time =
1329           timestamp + 2 * diff + gst_util_uint64_scale_int_round (GST_SECOND,
1330           GST_VIDEO_INFO_FPS_D (&vagg->info),
1331           GST_VIDEO_INFO_FPS_N (&vagg->info));
1332     else
1333       vagg->priv->earliest_time = timestamp + diff;
1334   } else {
1335     vagg->priv->earliest_time = GST_CLOCK_TIME_NONE;
1336   }
1337   GST_OBJECT_UNLOCK (vagg);
1338 }
1339 
1340 static void
gst_video_aggregator_reset_qos(GstVideoAggregator * vagg)1341 gst_video_aggregator_reset_qos (GstVideoAggregator * vagg)
1342 {
1343   gst_video_aggregator_update_qos (vagg, 0.5, 0, GST_CLOCK_TIME_NONE);
1344   vagg->priv->qos_processed = vagg->priv->qos_dropped = 0;
1345 }
1346 
1347 static void
gst_video_aggregator_read_qos(GstVideoAggregator * vagg,gdouble * proportion,GstClockTime * time)1348 gst_video_aggregator_read_qos (GstVideoAggregator * vagg, gdouble * proportion,
1349     GstClockTime * time)
1350 {
1351   GST_OBJECT_LOCK (vagg);
1352   *proportion = vagg->priv->proportion;
1353   *time = vagg->priv->earliest_time;
1354   GST_OBJECT_UNLOCK (vagg);
1355 }
1356 
1357 static void
gst_video_aggregator_reset(GstVideoAggregator * vagg)1358 gst_video_aggregator_reset (GstVideoAggregator * vagg)
1359 {
1360   GstAggregator *agg = GST_AGGREGATOR (vagg);
1361   GList *l;
1362 
1363   gst_video_info_init (&vagg->info);
1364   vagg->priv->ts_offset = 0;
1365   vagg->priv->nframes = 0;
1366   vagg->priv->live = FALSE;
1367 
1368   GST_AGGREGATOR_PAD (agg->srcpad)->segment.position = -1;
1369 
1370   gst_video_aggregator_reset_qos (vagg);
1371 
1372   GST_OBJECT_LOCK (vagg);
1373   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1374     GstVideoAggregatorPad *p = l->data;
1375 
1376     gst_buffer_replace (&p->priv->buffer, NULL);
1377     p->priv->start_time = -1;
1378     p->priv->end_time = -1;
1379 
1380     gst_video_info_init (&p->info);
1381   }
1382   GST_OBJECT_UNLOCK (vagg);
1383 }
1384 
1385 static GstFlowReturn
gst_video_aggregator_fill_queues(GstVideoAggregator * vagg,GstClockTime output_start_running_time,GstClockTime output_end_running_time)1386 gst_video_aggregator_fill_queues (GstVideoAggregator * vagg,
1387     GstClockTime output_start_running_time,
1388     GstClockTime output_end_running_time)
1389 {
1390   GList *l;
1391   gboolean eos = TRUE;
1392   gboolean need_more_data = FALSE;
1393   gboolean need_reconfigure = FALSE;
1394 
1395   /* get a set of buffers into pad->priv->buffer that are within output_start_running_time
1396    * and output_end_running_time taking into account finished and unresponsive pads */
1397 
1398   GST_OBJECT_LOCK (vagg);
1399   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1400     GstVideoAggregatorPad *pad = l->data;
1401     GstSegment segment;
1402     GstAggregatorPad *bpad;
1403     GstBuffer *buf;
1404     gboolean is_eos;
1405 
1406     bpad = GST_AGGREGATOR_PAD (pad);
1407     GST_OBJECT_LOCK (bpad);
1408     segment = bpad->segment;
1409     GST_OBJECT_UNLOCK (bpad);
1410     is_eos = gst_aggregator_pad_is_eos (bpad);
1411 
1412     if (!is_eos)
1413       eos = FALSE;
1414     buf = gst_aggregator_pad_peek_buffer (bpad);
1415     if (buf) {
1416       GstClockTime start_time, end_time;
1417 
1418       start_time = GST_BUFFER_TIMESTAMP (buf);
1419       if (start_time == -1) {
1420         gst_buffer_unref (buf);
1421         GST_ERROR_OBJECT (pad, "Need timestamped buffers!");
1422         GST_OBJECT_UNLOCK (vagg);
1423         return GST_FLOW_ERROR;
1424       }
1425 
1426       /* FIXME: Make all this work with negative rates */
1427       end_time = GST_BUFFER_DURATION (buf);
1428 
1429       if (end_time == -1) {
1430         start_time = MAX (start_time, segment.start);
1431         start_time =
1432             gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1433 
1434         if (start_time >= output_end_running_time) {
1435           if (pad->priv->buffer) {
1436             GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1437                 "output_end_running_time. Keeping previous buffer");
1438           } else {
1439             GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1440                 "output_end_running_time. No previous buffer.");
1441           }
1442           gst_buffer_unref (buf);
1443           continue;
1444         } else if (start_time < output_start_running_time) {
1445           GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time < "
1446               "output_start_running_time.  Discarding old buffer");
1447           gst_buffer_replace (&pad->priv->buffer, buf);
1448           if (pad->priv->pending_vinfo.finfo) {
1449             pad->info = pad->priv->pending_vinfo;
1450             need_reconfigure = TRUE;
1451             pad->priv->pending_vinfo.finfo = NULL;
1452           }
1453           gst_buffer_unref (buf);
1454           gst_aggregator_pad_drop_buffer (bpad);
1455           need_more_data = TRUE;
1456           continue;
1457         }
1458         gst_buffer_unref (buf);
1459         buf = gst_aggregator_pad_pop_buffer (bpad);
1460         gst_buffer_replace (&pad->priv->buffer, buf);
1461         if (pad->priv->pending_vinfo.finfo) {
1462           pad->info = pad->priv->pending_vinfo;
1463           need_reconfigure = TRUE;
1464           pad->priv->pending_vinfo.finfo = NULL;
1465         }
1466         /* FIXME: Set start_time and end_time to something here? */
1467         gst_buffer_unref (buf);
1468         GST_DEBUG_OBJECT (pad, "buffer duration is -1");
1469         continue;
1470       }
1471 
1472       g_assert (start_time != -1 && end_time != -1);
1473       end_time += start_time;   /* convert from duration to position */
1474 
1475       /* Check if it's inside the segment */
1476       if (start_time >= segment.stop || end_time < segment.start) {
1477         GST_DEBUG_OBJECT (pad,
1478             "Buffer outside the segment : segment: [%" GST_TIME_FORMAT " -- %"
1479             GST_TIME_FORMAT "]" " Buffer [%" GST_TIME_FORMAT " -- %"
1480             GST_TIME_FORMAT "]", GST_TIME_ARGS (segment.stop),
1481             GST_TIME_ARGS (segment.start), GST_TIME_ARGS (start_time),
1482             GST_TIME_ARGS (end_time));
1483 
1484         gst_buffer_unref (buf);
1485         gst_aggregator_pad_drop_buffer (bpad);
1486 
1487         need_more_data = TRUE;
1488         continue;
1489       }
1490 
1491       /* Clip to segment and convert to running time */
1492       start_time = MAX (start_time, segment.start);
1493       if (segment.stop != -1)
1494         end_time = MIN (end_time, segment.stop);
1495       start_time =
1496           gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1497       end_time =
1498           gst_segment_to_running_time (&segment, GST_FORMAT_TIME, end_time);
1499       g_assert (start_time != -1 && end_time != -1);
1500 
1501       GST_TRACE_OBJECT (pad, "dealing with buffer %p start %" GST_TIME_FORMAT
1502           " end %" GST_TIME_FORMAT " out start %" GST_TIME_FORMAT
1503           " out end %" GST_TIME_FORMAT, buf, GST_TIME_ARGS (start_time),
1504           GST_TIME_ARGS (end_time), GST_TIME_ARGS (output_start_running_time),
1505           GST_TIME_ARGS (output_end_running_time));
1506 
1507       if (pad->priv->end_time != -1 && pad->priv->end_time > end_time) {
1508         GST_DEBUG_OBJECT (pad, "Buffer from the past, dropping");
1509         gst_buffer_unref (buf);
1510         gst_aggregator_pad_drop_buffer (bpad);
1511         continue;
1512       }
1513 
1514       if (end_time >= output_start_running_time
1515           && start_time < output_end_running_time) {
1516         GST_DEBUG_OBJECT (pad,
1517             "Taking new buffer with start time %" GST_TIME_FORMAT,
1518             GST_TIME_ARGS (start_time));
1519         gst_buffer_replace (&pad->priv->buffer, buf);
1520         if (pad->priv->pending_vinfo.finfo) {
1521           pad->info = pad->priv->pending_vinfo;
1522           need_reconfigure = TRUE;
1523           pad->priv->pending_vinfo.finfo = NULL;
1524         }
1525         pad->priv->start_time = start_time;
1526         pad->priv->end_time = end_time;
1527 
1528         gst_buffer_unref (buf);
1529         gst_aggregator_pad_drop_buffer (bpad);
1530         eos = FALSE;
1531       } else if (start_time >= output_end_running_time) {
1532         GST_DEBUG_OBJECT (pad, "Keeping buffer until %" GST_TIME_FORMAT,
1533             GST_TIME_ARGS (start_time));
1534         gst_buffer_unref (buf);
1535         eos = FALSE;
1536       } else {
1537         gst_buffer_replace (&pad->priv->buffer, buf);
1538         if (pad->priv->pending_vinfo.finfo) {
1539           pad->info = pad->priv->pending_vinfo;
1540           need_reconfigure = TRUE;
1541           pad->priv->pending_vinfo.finfo = NULL;
1542         }
1543         pad->priv->start_time = start_time;
1544         pad->priv->end_time = end_time;
1545         GST_DEBUG_OBJECT (pad,
1546             "replacing old buffer with a newer buffer, start %" GST_TIME_FORMAT
1547             " out end %" GST_TIME_FORMAT, GST_TIME_ARGS (start_time),
1548             GST_TIME_ARGS (output_end_running_time));
1549         gst_buffer_unref (buf);
1550         gst_aggregator_pad_drop_buffer (bpad);
1551 
1552         need_more_data = TRUE;
1553         continue;
1554       }
1555     } else {
1556       if (is_eos && pad->priv->repeat_after_eos) {
1557         eos = FALSE;
1558         GST_DEBUG_OBJECT (pad, "ignoring EOS and re-using previous buffer");
1559         continue;
1560       }
1561 
1562       if (pad->priv->end_time != -1) {
1563         if (pad->priv->end_time <= output_start_running_time) {
1564           pad->priv->start_time = pad->priv->end_time = -1;
1565           if (!is_eos) {
1566             GST_DEBUG ("I just need more data");
1567             need_more_data = TRUE;
1568           } else {
1569             gst_buffer_replace (&pad->priv->buffer, NULL);
1570           }
1571         } else if (is_eos) {
1572           eos = FALSE;
1573         }
1574       } else if (is_eos) {
1575         gst_buffer_replace (&pad->priv->buffer, NULL);
1576       }
1577     }
1578   }
1579   GST_OBJECT_UNLOCK (vagg);
1580 
1581   if (need_reconfigure)
1582     gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1583 
1584   if (need_more_data)
1585     return GST_AGGREGATOR_FLOW_NEED_DATA;
1586   if (eos)
1587     return GST_FLOW_EOS;
1588 
1589   return GST_FLOW_OK;
1590 }
1591 
1592 static gboolean
sync_pad_values(GstElement * vagg,GstPad * pad,gpointer user_data)1593 sync_pad_values (GstElement * vagg, GstPad * pad, gpointer user_data)
1594 {
1595   gint64 *out_stream_time = user_data;
1596 
1597   /* sync object properties on stream time */
1598   if (GST_CLOCK_TIME_IS_VALID (*out_stream_time))
1599     gst_object_sync_values (GST_OBJECT_CAST (pad), *out_stream_time);
1600 
1601   return TRUE;
1602 }
1603 
1604 static gboolean
prepare_frames(GstElement * agg,GstPad * pad,gpointer user_data)1605 prepare_frames (GstElement * agg, GstPad * pad, gpointer user_data)
1606 {
1607   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
1608   GstVideoAggregatorPadClass *vaggpad_class =
1609       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1610 
1611   memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
1612 
1613   if (vpad->priv->buffer == NULL || !vaggpad_class->prepare_frame)
1614     return TRUE;
1615 
1616   return vaggpad_class->prepare_frame (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
1617       vpad->priv->buffer, &vpad->priv->prepared_frame);
1618 }
1619 
1620 static gboolean
clean_pad(GstElement * agg,GstPad * pad,gpointer user_data)1621 clean_pad (GstElement * agg, GstPad * pad, gpointer user_data)
1622 {
1623   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR_CAST (agg);
1624   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
1625   GstVideoAggregatorPadClass *vaggpad_class =
1626       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1627 
1628   if (vaggpad_class->clean_frame)
1629     vaggpad_class->clean_frame (vpad, vagg, &vpad->priv->prepared_frame);
1630 
1631   memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
1632 
1633   return TRUE;
1634 }
1635 
1636 static GstFlowReturn
gst_video_aggregator_do_aggregate(GstVideoAggregator * vagg,GstClockTime output_start_time,GstClockTime output_end_time,GstBuffer ** outbuf)1637 gst_video_aggregator_do_aggregate (GstVideoAggregator * vagg,
1638     GstClockTime output_start_time, GstClockTime output_end_time,
1639     GstBuffer ** outbuf)
1640 {
1641   GstAggregator *agg = GST_AGGREGATOR (vagg);
1642   GstFlowReturn ret = GST_FLOW_OK;
1643   GstElementClass *klass = GST_ELEMENT_GET_CLASS (vagg);
1644   GstVideoAggregatorClass *vagg_klass = (GstVideoAggregatorClass *) klass;
1645   GstClockTime out_stream_time;
1646 
1647   g_assert (vagg_klass->aggregate_frames != NULL);
1648   g_assert (vagg_klass->create_output_buffer != NULL);
1649 
1650   if ((ret = vagg_klass->create_output_buffer (vagg, outbuf)) != GST_FLOW_OK) {
1651     GST_WARNING_OBJECT (vagg, "Could not get an output buffer, reason: %s",
1652         gst_flow_get_name (ret));
1653     return ret;
1654   }
1655   if (*outbuf == NULL) {
1656     /* sub-class doesn't want to generate output right now */
1657     return GST_FLOW_OK;
1658   }
1659 
1660   GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
1661   GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
1662 
1663   GST_OBJECT_LOCK (agg->srcpad);
1664   out_stream_time =
1665       gst_segment_to_stream_time (&GST_AGGREGATOR_PAD (agg->srcpad)->segment,
1666       GST_FORMAT_TIME, output_start_time);
1667   GST_OBJECT_UNLOCK (agg->srcpad);
1668 
1669   /* Sync pad properties to the stream time */
1670   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), sync_pad_values,
1671       &out_stream_time);
1672 
1673   /* Convert all the frames the subclass has before aggregating */
1674   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), prepare_frames, NULL);
1675 
1676   ret = vagg_klass->aggregate_frames (vagg, *outbuf);
1677 
1678   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), clean_pad, NULL);
1679 
1680   return ret;
1681 }
1682 
1683 /* Perform qos calculations before processing the next frame. Returns TRUE if
1684  * the frame should be processed, FALSE if the frame can be dropped entirely */
1685 static gint64
gst_video_aggregator_do_qos(GstVideoAggregator * vagg,GstClockTime timestamp)1686 gst_video_aggregator_do_qos (GstVideoAggregator * vagg, GstClockTime timestamp)
1687 {
1688   GstAggregator *agg = GST_AGGREGATOR (vagg);
1689   GstClockTime qostime, earliest_time;
1690   gdouble proportion;
1691   gint64 jitter;
1692 
1693   /* no timestamp, can't do QoS => process frame */
1694   if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
1695     GST_LOG_OBJECT (vagg, "invalid timestamp, can't do QoS, process frame");
1696     return -1;
1697   }
1698 
1699   /* get latest QoS observation values */
1700   gst_video_aggregator_read_qos (vagg, &proportion, &earliest_time);
1701 
1702   /* skip qos if we have no observation (yet) => process frame */
1703   if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
1704     GST_LOG_OBJECT (vagg, "no observation yet, process frame");
1705     return -1;
1706   }
1707 
1708   /* qos is done on running time */
1709   qostime =
1710       gst_segment_to_running_time (&GST_AGGREGATOR_PAD (agg->srcpad)->segment,
1711       GST_FORMAT_TIME, timestamp);
1712 
1713   /* see how our next timestamp relates to the latest qos timestamp */
1714   GST_LOG_OBJECT (vagg, "qostime %" GST_TIME_FORMAT ", earliest %"
1715       GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
1716 
1717   jitter = GST_CLOCK_DIFF (qostime, earliest_time);
1718   if (qostime != GST_CLOCK_TIME_NONE && jitter > 0) {
1719     GST_DEBUG_OBJECT (vagg, "we are late, drop frame");
1720     return jitter;
1721   }
1722 
1723   GST_LOG_OBJECT (vagg, "process frame");
1724   return jitter;
1725 }
1726 
1727 static void
gst_video_aggregator_advance_on_timeout(GstVideoAggregator * vagg)1728 gst_video_aggregator_advance_on_timeout (GstVideoAggregator * vagg)
1729 {
1730   GstAggregator *agg = GST_AGGREGATOR (vagg);
1731   guint64 frame_duration;
1732   gint fps_d, fps_n;
1733   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1734 
1735   GST_OBJECT_LOCK (agg);
1736   if (agg_segment->position == -1) {
1737     if (agg_segment->rate > 0.0)
1738       agg_segment->position = agg_segment->start;
1739     else
1740       agg_segment->position = agg_segment->stop;
1741   }
1742 
1743   /* Advance position */
1744   fps_d = GST_VIDEO_INFO_FPS_D (&vagg->info) ?
1745       GST_VIDEO_INFO_FPS_D (&vagg->info) : 1;
1746   fps_n = GST_VIDEO_INFO_FPS_N (&vagg->info) ?
1747       GST_VIDEO_INFO_FPS_N (&vagg->info) : 25;
1748   /* Default to 25/1 if no "best fps" is known */
1749   frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
1750   if (agg_segment->rate > 0.0)
1751     agg_segment->position += frame_duration;
1752   else if (agg_segment->position > frame_duration)
1753     agg_segment->position -= frame_duration;
1754   else
1755     agg_segment->position = 0;
1756   vagg->priv->nframes++;
1757   GST_OBJECT_UNLOCK (agg);
1758 }
1759 
1760 static GstFlowReturn
gst_video_aggregator_aggregate(GstAggregator * agg,gboolean timeout)1761 gst_video_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
1762 {
1763   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1764   GstClockTime output_start_time, output_end_time;
1765   GstClockTime output_start_running_time, output_end_running_time;
1766   GstBuffer *outbuf = NULL;
1767   GstFlowReturn flow_ret;
1768   gint64 jitter;
1769   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1770 
1771   GST_VIDEO_AGGREGATOR_LOCK (vagg);
1772 
1773   if (GST_VIDEO_INFO_FORMAT (&vagg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
1774     if (timeout)
1775       gst_video_aggregator_advance_on_timeout (vagg);
1776     flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
1777     goto unlock_and_return;
1778   }
1779 
1780   output_start_time = agg_segment->position;
1781   if (agg_segment->position == -1 || agg_segment->position < agg_segment->start)
1782     output_start_time = agg_segment->start;
1783 
1784   if (vagg->priv->nframes == 0) {
1785     vagg->priv->ts_offset = output_start_time;
1786     GST_DEBUG_OBJECT (vagg, "New ts offset %" GST_TIME_FORMAT,
1787         GST_TIME_ARGS (output_start_time));
1788   }
1789 
1790   if (GST_VIDEO_INFO_FPS_N (&vagg->info) == 0) {
1791     output_end_time = -1;
1792   } else {
1793     output_end_time =
1794         vagg->priv->ts_offset +
1795         gst_util_uint64_scale_round (vagg->priv->nframes + 1,
1796         GST_SECOND * GST_VIDEO_INFO_FPS_D (&vagg->info),
1797         GST_VIDEO_INFO_FPS_N (&vagg->info));
1798   }
1799 
1800   if (agg_segment->stop != -1)
1801     output_end_time = MIN (output_end_time, agg_segment->stop);
1802 
1803   output_start_running_time =
1804       gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
1805       output_start_time);
1806   output_end_running_time =
1807       gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
1808       output_end_time);
1809 
1810   if (output_end_time == output_start_time) {
1811     flow_ret = GST_FLOW_EOS;
1812   } else {
1813     flow_ret =
1814         gst_video_aggregator_fill_queues (vagg, output_start_running_time,
1815         output_end_running_time);
1816   }
1817 
1818   if (flow_ret == GST_AGGREGATOR_FLOW_NEED_DATA && !timeout) {
1819     GST_DEBUG_OBJECT (vagg, "Need more data for decisions");
1820     goto unlock_and_return;
1821   } else if (flow_ret == GST_FLOW_EOS) {
1822     GST_DEBUG_OBJECT (vagg, "All sinkpads are EOS -- forwarding");
1823     goto unlock_and_return;
1824   } else if (flow_ret == GST_FLOW_ERROR) {
1825     GST_WARNING_OBJECT (vagg, "Error collecting buffers");
1826     goto unlock_and_return;
1827   }
1828 
1829   /* It is possible that gst_video_aggregator_fill_queues() marked the pad
1830    * for reconfiguration. In this case we have to reconfigure before continuing
1831    * because we have picked a new buffer with different caps than before from
1832    * one one of the sink pads and continuing here may lead to a crash.
1833    * https://bugzilla.gnome.org/show_bug.cgi?id=780682
1834    */
1835   if (gst_pad_needs_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg))) {
1836     GST_DEBUG_OBJECT (vagg, "Need reconfigure");
1837     flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
1838     goto unlock_and_return;
1839   }
1840 
1841   GST_DEBUG_OBJECT (vagg,
1842       "Producing buffer for %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
1843       ", running time start %" GST_TIME_FORMAT ", running time end %"
1844       GST_TIME_FORMAT, GST_TIME_ARGS (output_start_time),
1845       GST_TIME_ARGS (output_end_time),
1846       GST_TIME_ARGS (output_start_running_time),
1847       GST_TIME_ARGS (output_end_running_time));
1848 
1849   jitter = gst_video_aggregator_do_qos (vagg, output_start_time);
1850   if (jitter <= 0) {
1851     flow_ret = gst_video_aggregator_do_aggregate (vagg, output_start_time,
1852         output_end_time, &outbuf);
1853     if (flow_ret != GST_FLOW_OK)
1854       goto done;
1855     vagg->priv->qos_processed++;
1856   } else {
1857     GstMessage *msg;
1858 
1859     vagg->priv->qos_dropped++;
1860 
1861     msg =
1862         gst_message_new_qos (GST_OBJECT_CAST (vagg), vagg->priv->live,
1863         output_start_running_time, gst_segment_to_stream_time (agg_segment,
1864             GST_FORMAT_TIME, output_start_time), output_start_time,
1865         output_end_time - output_start_time);
1866     gst_message_set_qos_values (msg, jitter, vagg->priv->proportion, 1000000);
1867     gst_message_set_qos_stats (msg, GST_FORMAT_BUFFERS,
1868         vagg->priv->qos_processed, vagg->priv->qos_dropped);
1869     gst_element_post_message (GST_ELEMENT_CAST (vagg), msg);
1870 
1871     flow_ret = GST_FLOW_OK;
1872   }
1873 
1874   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1875   if (outbuf) {
1876     GST_DEBUG_OBJECT (vagg,
1877         "Pushing buffer with ts %" GST_TIME_FORMAT " and duration %"
1878         GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
1879         GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
1880 
1881     flow_ret = gst_aggregator_finish_buffer (agg, outbuf);
1882   }
1883 
1884   GST_VIDEO_AGGREGATOR_LOCK (vagg);
1885   vagg->priv->nframes++;
1886   agg_segment->position = output_end_time;
1887   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1888 
1889   return flow_ret;
1890 
1891 done:
1892   if (outbuf)
1893     gst_buffer_unref (outbuf);
1894 unlock_and_return:
1895   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1896   return flow_ret;
1897 }
1898 
1899 /* FIXME, the duration query should reflect how long you will produce
1900  * data, that is the amount of stream time until you will emit EOS.
1901  *
1902  * For synchronized aggregating this is always the max of all the durations
1903  * of upstream since we emit EOS when all of them finished.
1904  *
1905  * We don't do synchronized aggregating so this really depends on where the
1906  * streams where punched in and what their relative offsets are against
1907  * each other which we can get from the first timestamps we see.
1908  *
1909  * When we add a new stream (or remove a stream) the duration might
1910  * also become invalid again and we need to post a new DURATION
1911  * message to notify this fact to the parent.
1912  * For now we take the max of all the upstream elements so the simple
1913  * cases work at least somewhat.
1914  */
1915 static gboolean
gst_video_aggregator_query_duration(GstVideoAggregator * vagg,GstQuery * query)1916 gst_video_aggregator_query_duration (GstVideoAggregator * vagg,
1917     GstQuery * query)
1918 {
1919   GValue item = { 0 };
1920   gint64 max;
1921   gboolean res;
1922   GstFormat format;
1923   GstIterator *it;
1924   gboolean done;
1925 
1926   /* parse format */
1927   gst_query_parse_duration (query, &format, NULL);
1928 
1929   max = -1;
1930   res = TRUE;
1931   done = FALSE;
1932 
1933   /* Take maximum of all durations */
1934   it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (vagg));
1935   while (!done) {
1936     switch (gst_iterator_next (it, &item)) {
1937       case GST_ITERATOR_DONE:
1938         done = TRUE;
1939         break;
1940       case GST_ITERATOR_OK:
1941       {
1942         GstPad *pad;
1943         gint64 duration;
1944 
1945         pad = g_value_get_object (&item);
1946 
1947         /* ask sink peer for duration */
1948         res &= gst_pad_peer_query_duration (pad, format, &duration);
1949         /* take max from all valid return values */
1950         if (res) {
1951           /* valid unknown length, stop searching */
1952           if (duration == -1) {
1953             max = duration;
1954             done = TRUE;
1955           }
1956           /* else see if bigger than current max */
1957           else if (duration > max)
1958             max = duration;
1959         }
1960         g_value_reset (&item);
1961         break;
1962       }
1963       case GST_ITERATOR_RESYNC:
1964         max = -1;
1965         res = TRUE;
1966         gst_iterator_resync (it);
1967         break;
1968       default:
1969         res = FALSE;
1970         done = TRUE;
1971         break;
1972     }
1973   }
1974   g_value_unset (&item);
1975   gst_iterator_free (it);
1976 
1977   if (res) {
1978     /* and store the max */
1979     GST_DEBUG_OBJECT (vagg, "Total duration in format %s: %"
1980         GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
1981     gst_query_set_duration (query, format, max);
1982   }
1983 
1984   return res;
1985 }
1986 
1987 static gboolean
gst_video_aggregator_src_query(GstAggregator * agg,GstQuery * query)1988 gst_video_aggregator_src_query (GstAggregator * agg, GstQuery * query)
1989 {
1990   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1991   gboolean res = FALSE;
1992   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1993 
1994   switch (GST_QUERY_TYPE (query)) {
1995     case GST_QUERY_POSITION:
1996     {
1997       GstFormat format;
1998 
1999       gst_query_parse_position (query, &format, NULL);
2000 
2001       switch (format) {
2002         case GST_FORMAT_TIME:
2003           gst_query_set_position (query, format,
2004               gst_segment_to_stream_time (agg_segment, GST_FORMAT_TIME,
2005                   agg_segment->position));
2006           res = TRUE;
2007           break;
2008         default:
2009           break;
2010       }
2011       break;
2012     }
2013     case GST_QUERY_DURATION:
2014       res = gst_video_aggregator_query_duration (vagg, query);
2015       break;
2016     case GST_QUERY_LATENCY:
2017       res =
2018           GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2019           (agg, query);
2020 
2021       if (res) {
2022         gst_query_parse_latency (query, &vagg->priv->live, NULL, NULL);
2023       }
2024       break;
2025     default:
2026       res =
2027           GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2028           (agg, query);
2029       break;
2030   }
2031   return res;
2032 }
2033 
2034 static gboolean
gst_video_aggregator_src_event(GstAggregator * agg,GstEvent * event)2035 gst_video_aggregator_src_event (GstAggregator * agg, GstEvent * event)
2036 {
2037   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2038 
2039   switch (GST_EVENT_TYPE (event)) {
2040     case GST_EVENT_QOS:
2041     {
2042       GstQOSType type;
2043       GstClockTimeDiff diff;
2044       GstClockTime timestamp;
2045       gdouble proportion;
2046 
2047       gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
2048       gst_video_aggregator_update_qos (vagg, proportion, diff, timestamp);
2049       break;
2050     }
2051     case GST_EVENT_SEEK:
2052     {
2053       GST_DEBUG_OBJECT (vagg, "Handling SEEK event");
2054     }
2055     default:
2056       break;
2057   }
2058 
2059   return
2060       GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_event (agg,
2061       event);
2062 }
2063 
2064 static GstFlowReturn
gst_video_aggregator_flush(GstAggregator * agg)2065 gst_video_aggregator_flush (GstAggregator * agg)
2066 {
2067   GList *l;
2068   gdouble abs_rate;
2069   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2070   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2071 
2072   GST_INFO_OBJECT (agg, "Flushing");
2073   GST_OBJECT_LOCK (vagg);
2074   abs_rate = ABS (agg_segment->rate);
2075   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
2076     GstVideoAggregatorPad *p = l->data;
2077 
2078     /* Convert to the output segment rate */
2079     if (ABS (agg_segment->rate) != abs_rate) {
2080       if (ABS (agg_segment->rate) != 1.0 && p->priv->buffer) {
2081         p->priv->start_time /= ABS (agg_segment->rate);
2082         p->priv->end_time /= ABS (agg_segment->rate);
2083       }
2084       if (abs_rate != 1.0 && p->priv->buffer) {
2085         p->priv->start_time *= abs_rate;
2086         p->priv->end_time *= abs_rate;
2087       }
2088     }
2089   }
2090   GST_OBJECT_UNLOCK (vagg);
2091 
2092   agg_segment->position = -1;
2093   vagg->priv->ts_offset = 0;
2094   vagg->priv->nframes = 0;
2095 
2096   gst_video_aggregator_reset_qos (vagg);
2097   return GST_FLOW_OK;
2098 }
2099 
2100 static gboolean
gst_video_aggregator_sink_event(GstAggregator * agg,GstAggregatorPad * bpad,GstEvent * event)2101 gst_video_aggregator_sink_event (GstAggregator * agg, GstAggregatorPad * bpad,
2102     GstEvent * event)
2103 {
2104   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2105   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2106   gboolean ret = TRUE;
2107 
2108   GST_DEBUG_OBJECT (pad, "Got %s event on pad %s:%s",
2109       GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad));
2110 
2111   switch (GST_EVENT_TYPE (event)) {
2112     case GST_EVENT_CAPS:
2113     {
2114       GstCaps *caps;
2115 
2116       gst_event_parse_caps (event, &caps);
2117       ret =
2118           gst_video_aggregator_pad_sink_setcaps (GST_PAD (pad),
2119           GST_OBJECT (vagg), caps);
2120       gst_event_unref (event);
2121       event = NULL;
2122       break;
2123     }
2124     case GST_EVENT_SEGMENT:{
2125       GstSegment seg;
2126       gst_event_copy_segment (event, &seg);
2127 
2128       g_assert (seg.format == GST_FORMAT_TIME);
2129       gst_video_aggregator_reset_qos (vagg);
2130       break;
2131     }
2132     default:
2133       break;
2134   }
2135 
2136   if (event != NULL)
2137     return GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_event
2138         (agg, bpad, event);
2139 
2140   return ret;
2141 }
2142 
2143 static gboolean
gst_video_aggregator_start(GstAggregator * agg)2144 gst_video_aggregator_start (GstAggregator * agg)
2145 {
2146   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2147 
2148   gst_caps_replace (&vagg->priv->current_caps, NULL);
2149 
2150   return TRUE;
2151 }
2152 
2153 static gboolean
gst_video_aggregator_stop(GstAggregator * agg)2154 gst_video_aggregator_stop (GstAggregator * agg)
2155 {
2156   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2157 
2158   gst_video_aggregator_reset (vagg);
2159 
2160   return TRUE;
2161 }
2162 
2163 /* GstElement vmethods */
2164 static GstPad *
gst_video_aggregator_request_new_pad(GstElement * element,GstPadTemplate * templ,const gchar * req_name,const GstCaps * caps)2165 gst_video_aggregator_request_new_pad (GstElement * element,
2166     GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
2167 {
2168   GstVideoAggregator *vagg;
2169   GstVideoAggregatorPad *vaggpad;
2170 
2171   vagg = GST_VIDEO_AGGREGATOR (element);
2172 
2173   vaggpad = (GstVideoAggregatorPad *)
2174       GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->request_new_pad
2175       (element, templ, req_name, caps);
2176 
2177   if (vaggpad == NULL)
2178     return NULL;
2179 
2180   GST_OBJECT_LOCK (vagg);
2181   vaggpad->priv->zorder = GST_ELEMENT (vagg)->numsinkpads;
2182   vaggpad->priv->start_time = -1;
2183   vaggpad->priv->end_time = -1;
2184   element->sinkpads = g_list_sort (element->sinkpads,
2185       (GCompareFunc) pad_zorder_compare);
2186   GST_OBJECT_UNLOCK (vagg);
2187 
2188   return GST_PAD (vaggpad);
2189 }
2190 
2191 static void
gst_video_aggregator_release_pad(GstElement * element,GstPad * pad)2192 gst_video_aggregator_release_pad (GstElement * element, GstPad * pad)
2193 {
2194   GstVideoAggregator *vagg = NULL;
2195   GstVideoAggregatorPad *vaggpad;
2196   gboolean last_pad;
2197 
2198   vagg = GST_VIDEO_AGGREGATOR (element);
2199   vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
2200 
2201   GST_VIDEO_AGGREGATOR_LOCK (vagg);
2202 
2203   GST_OBJECT_LOCK (vagg);
2204   last_pad = (GST_ELEMENT (vagg)->numsinkpads - 1 == 0);
2205   GST_OBJECT_UNLOCK (vagg);
2206 
2207   if (last_pad)
2208     gst_video_aggregator_reset (vagg);
2209 
2210   gst_buffer_replace (&vaggpad->priv->buffer, NULL);
2211 
2212   GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->release_pad
2213       (GST_ELEMENT (vagg), pad);
2214 
2215   gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
2216 
2217   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2218   return;
2219 }
2220 
2221 static gboolean
gst_video_aggregator_propose_allocation(GstAggregator * agg,GstAggregatorPad * pad,GstQuery * decide_query,GstQuery * query)2222 gst_video_aggregator_propose_allocation (GstAggregator * agg,
2223     GstAggregatorPad * pad, GstQuery * decide_query, GstQuery * query)
2224 {
2225   gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
2226 
2227   return TRUE;
2228 }
2229 
2230 static gboolean
gst_video_aggregator_decide_allocation(GstAggregator * agg,GstQuery * query)2231 gst_video_aggregator_decide_allocation (GstAggregator * agg, GstQuery * query)
2232 {
2233   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2234   GstAllocationParams params = { 0, 15, 0, 0 };
2235   guint i;
2236   GstBufferPool *pool;
2237   GstAllocator *allocator;
2238   guint size, min, max;
2239   gboolean update = FALSE;
2240   GstStructure *config = NULL;
2241   GstCaps *caps = NULL;
2242 
2243   if (gst_query_get_n_allocation_params (query) == 0) {
2244     gst_query_add_allocation_param (query, NULL, &params);
2245   } else {
2246     for (i = 0; i < gst_query_get_n_allocation_params (query); i++) {
2247       GstAllocator *allocator;
2248 
2249       gst_query_parse_nth_allocation_param (query, i, &allocator, &params);
2250       params.align = MAX (params.align, 15);
2251       gst_query_set_nth_allocation_param (query, i, allocator, &params);
2252     }
2253   }
2254 
2255   gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
2256 
2257   if (gst_query_get_n_allocation_pools (query) > 0) {
2258     gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
2259 
2260     /* adjust size */
2261     size = MAX (size, vagg->info.size);
2262     update = TRUE;
2263   } else {
2264     pool = NULL;
2265     size = vagg->info.size;
2266     min = max = 0;
2267     update = FALSE;
2268   }
2269 
2270   gst_query_parse_allocation (query, &caps, NULL);
2271 
2272   /* no downstream pool, make our own */
2273   if (pool == NULL)
2274     pool = gst_video_buffer_pool_new ();
2275 
2276   config = gst_buffer_pool_get_config (pool);
2277 
2278   gst_buffer_pool_config_set_params (config, caps, size, min, max);
2279   gst_buffer_pool_config_set_allocator (config, allocator, &params);
2280   if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2281     gst_buffer_pool_config_add_option (config,
2282         GST_BUFFER_POOL_OPTION_VIDEO_META);
2283   }
2284 
2285   /* buffer pool may have to do some changes */
2286   if (!gst_buffer_pool_set_config (pool, config)) {
2287     config = gst_buffer_pool_get_config (pool);
2288 
2289     /* If change are not acceptable, fallback to generic pool */
2290     if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) {
2291       GST_DEBUG_OBJECT (agg, "unsupported pool, making new pool");
2292 
2293       gst_object_unref (pool);
2294       pool = gst_video_buffer_pool_new ();
2295       gst_buffer_pool_config_set_params (config, caps, size, min, max);
2296       gst_buffer_pool_config_set_allocator (config, allocator, &params);
2297 
2298       if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2299         gst_buffer_pool_config_add_option (config,
2300             GST_BUFFER_POOL_OPTION_VIDEO_META);
2301       }
2302     }
2303 
2304     if (!gst_buffer_pool_set_config (pool, config))
2305       goto config_failed;
2306   }
2307 
2308   if (update)
2309     gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
2310   else
2311     gst_query_add_allocation_pool (query, pool, size, min, max);
2312 
2313   if (pool)
2314     gst_object_unref (pool);
2315   if (allocator)
2316     gst_object_unref (allocator);
2317 
2318   return TRUE;
2319 
2320 config_failed:
2321   if (pool)
2322     gst_object_unref (pool);
2323   if (allocator)
2324     gst_object_unref (allocator);
2325 
2326   GST_ELEMENT_ERROR (agg, RESOURCE, SETTINGS,
2327       ("Failed to configure the buffer pool"),
2328       ("Configuration is most likely invalid, please report this issue."));
2329   return FALSE;
2330 }
2331 
2332 static GstFlowReturn
gst_video_aggregator_create_output_buffer(GstVideoAggregator * videoaggregator,GstBuffer ** outbuf)2333 gst_video_aggregator_create_output_buffer (GstVideoAggregator * videoaggregator,
2334     GstBuffer ** outbuf)
2335 {
2336   GstAggregator *aggregator = GST_AGGREGATOR (videoaggregator);
2337   GstBufferPool *pool;
2338   GstFlowReturn ret = GST_FLOW_OK;
2339 
2340   pool = gst_aggregator_get_buffer_pool (aggregator);
2341 
2342   if (pool) {
2343     if (!gst_buffer_pool_is_active (pool)) {
2344       if (!gst_buffer_pool_set_active (pool, TRUE)) {
2345         GST_ELEMENT_ERROR (videoaggregator, RESOURCE, SETTINGS,
2346             ("failed to activate bufferpool"),
2347             ("failed to activate bufferpool"));
2348         return GST_FLOW_ERROR;
2349       }
2350     }
2351 
2352     ret = gst_buffer_pool_acquire_buffer (pool, outbuf, NULL);
2353     gst_object_unref (pool);
2354   } else {
2355     guint outsize;
2356     GstAllocator *allocator;
2357     GstAllocationParams params;
2358 
2359     gst_aggregator_get_allocator (aggregator, &allocator, &params);
2360 
2361     outsize = GST_VIDEO_INFO_SIZE (&videoaggregator->info);
2362     *outbuf = gst_buffer_new_allocate (allocator, outsize, &params);
2363 
2364     if (allocator)
2365       gst_object_unref (allocator);
2366 
2367     if (*outbuf == NULL) {
2368       GST_ELEMENT_ERROR (videoaggregator, RESOURCE, NO_SPACE_LEFT,
2369           (NULL), ("Could not acquire buffer of size: %d", outsize));
2370       ret = GST_FLOW_ERROR;
2371     }
2372   }
2373   return ret;
2374 }
2375 
2376 static gboolean
gst_video_aggregator_pad_sink_acceptcaps(GstPad * pad,GstVideoAggregator * vagg,GstCaps * caps)2377 gst_video_aggregator_pad_sink_acceptcaps (GstPad * pad,
2378     GstVideoAggregator * vagg, GstCaps * caps)
2379 {
2380   gboolean ret;
2381   GstCaps *modified_caps;
2382   GstCaps *accepted_caps;
2383   GstCaps *template_caps;
2384   gboolean had_current_caps = TRUE;
2385   gint i, n;
2386   GstStructure *s;
2387   GstAggregator *agg = GST_AGGREGATOR (vagg);
2388 
2389   GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
2390 
2391   accepted_caps = gst_pad_get_current_caps (GST_PAD (agg->srcpad));
2392 
2393   template_caps = gst_pad_get_pad_template_caps (GST_PAD (agg->srcpad));
2394 
2395   if (accepted_caps == NULL) {
2396     accepted_caps = template_caps;
2397     had_current_caps = FALSE;
2398   }
2399 
2400   accepted_caps = gst_caps_make_writable (accepted_caps);
2401 
2402   GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
2403 
2404   n = gst_caps_get_size (accepted_caps);
2405   for (i = 0; i < n; i++) {
2406     s = gst_caps_get_structure (accepted_caps, i);
2407     gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2408         1, NULL);
2409 
2410     if (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
2411       gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
2412           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
2413       gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
2414           "pixel-aspect-ratio", NULL);
2415     }
2416   }
2417 
2418   modified_caps = gst_caps_intersect (accepted_caps, template_caps);
2419 
2420   ret = gst_caps_can_intersect (caps, accepted_caps);
2421   GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT,
2422       (ret ? "" : "not "), caps);
2423   gst_caps_unref (accepted_caps);
2424   gst_caps_unref (modified_caps);
2425   if (had_current_caps)
2426     gst_caps_unref (template_caps);
2427   return ret;
2428 }
2429 
2430 static gboolean
gst_video_aggregator_sink_query(GstAggregator * agg,GstAggregatorPad * bpad,GstQuery * query)2431 gst_video_aggregator_sink_query (GstAggregator * agg, GstAggregatorPad * bpad,
2432     GstQuery * query)
2433 {
2434   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2435   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2436   gboolean ret = FALSE;
2437 
2438   switch (GST_QUERY_TYPE (query)) {
2439     case GST_QUERY_CAPS:
2440     {
2441       GstCaps *filter, *caps;
2442 
2443       gst_query_parse_caps (query, &filter);
2444       caps =
2445           gst_video_aggregator_pad_sink_getcaps (GST_PAD (pad), vagg, filter);
2446       gst_query_set_caps_result (query, caps);
2447       gst_caps_unref (caps);
2448       ret = TRUE;
2449       break;
2450     }
2451     case GST_QUERY_ACCEPT_CAPS:
2452     {
2453       GstCaps *caps;
2454 
2455       gst_query_parse_accept_caps (query, &caps);
2456       ret =
2457           gst_video_aggregator_pad_sink_acceptcaps (GST_PAD (pad), vagg, caps);
2458       gst_query_set_accept_caps_result (query, ret);
2459       ret = TRUE;
2460       break;
2461     }
2462     default:
2463       ret =
2464           GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_query
2465           (agg, bpad, query);
2466       break;
2467   }
2468   return ret;
2469 }
2470 
2471 /* GObject vmethods */
2472 static void
gst_video_aggregator_finalize(GObject * o)2473 gst_video_aggregator_finalize (GObject * o)
2474 {
2475   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2476 
2477   g_mutex_clear (&vagg->priv->lock);
2478 
2479   G_OBJECT_CLASS (gst_video_aggregator_parent_class)->finalize (o);
2480 }
2481 
2482 static void
gst_video_aggregator_dispose(GObject * o)2483 gst_video_aggregator_dispose (GObject * o)
2484 {
2485   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2486 
2487   gst_caps_replace (&vagg->priv->current_caps, NULL);
2488 
2489   G_OBJECT_CLASS (gst_video_aggregator_parent_class)->dispose (o);
2490 }
2491 
2492 static void
gst_video_aggregator_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)2493 gst_video_aggregator_get_property (GObject * object,
2494     guint prop_id, GValue * value, GParamSpec * pspec)
2495 {
2496   switch (prop_id) {
2497     default:
2498       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2499       break;
2500   }
2501 }
2502 
2503 static void
gst_video_aggregator_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)2504 gst_video_aggregator_set_property (GObject * object,
2505     guint prop_id, const GValue * value, GParamSpec * pspec)
2506 {
2507   switch (prop_id) {
2508     default:
2509       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2510       break;
2511   }
2512 }
2513 
2514 /* GObject boilerplate */
2515 static void
gst_video_aggregator_class_init(GstVideoAggregatorClass * klass)2516 gst_video_aggregator_class_init (GstVideoAggregatorClass * klass)
2517 {
2518   GObjectClass *gobject_class = (GObjectClass *) klass;
2519   GstElementClass *gstelement_class = (GstElementClass *) klass;
2520   GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
2521 
2522   GST_DEBUG_CATEGORY_INIT (gst_video_aggregator_debug, "videoaggregator", 0,
2523       "base video aggregator");
2524 
2525   gst_video_aggregator_parent_class = g_type_class_peek_parent (klass);
2526 
2527   if (video_aggregator_private_offset != 0)
2528     g_type_class_adjust_private_offset (klass,
2529         &video_aggregator_private_offset);
2530 
2531   gobject_class->finalize = gst_video_aggregator_finalize;
2532   gobject_class->dispose = gst_video_aggregator_dispose;
2533 
2534   gobject_class->get_property = gst_video_aggregator_get_property;
2535   gobject_class->set_property = gst_video_aggregator_set_property;
2536 
2537   gstelement_class->request_new_pad =
2538       GST_DEBUG_FUNCPTR (gst_video_aggregator_request_new_pad);
2539   gstelement_class->release_pad =
2540       GST_DEBUG_FUNCPTR (gst_video_aggregator_release_pad);
2541 
2542   agg_class->start = gst_video_aggregator_start;
2543   agg_class->stop = gst_video_aggregator_stop;
2544   agg_class->sink_query = gst_video_aggregator_sink_query;
2545   agg_class->sink_event = gst_video_aggregator_sink_event;
2546   agg_class->flush = gst_video_aggregator_flush;
2547   agg_class->aggregate = gst_video_aggregator_aggregate;
2548   agg_class->src_event = gst_video_aggregator_src_event;
2549   agg_class->src_query = gst_video_aggregator_src_query;
2550   agg_class->get_next_time = gst_aggregator_simple_get_next_time;
2551   agg_class->update_src_caps = gst_video_aggregator_default_update_src_caps;
2552   agg_class->fixate_src_caps = gst_video_aggregator_default_fixate_src_caps;
2553   agg_class->negotiated_src_caps =
2554       gst_video_aggregator_default_negotiated_src_caps;
2555   agg_class->decide_allocation = gst_video_aggregator_decide_allocation;
2556   agg_class->propose_allocation = gst_video_aggregator_propose_allocation;
2557 
2558   klass->find_best_format = gst_video_aggregator_find_best_format;
2559   klass->create_output_buffer = gst_video_aggregator_create_output_buffer;
2560   klass->update_caps = gst_video_aggregator_default_update_caps;
2561 
2562   /* Register the pad class */
2563   g_type_class_ref (GST_TYPE_VIDEO_AGGREGATOR_PAD);
2564 }
2565 
2566 static void
gst_video_aggregator_init(GstVideoAggregator * vagg,GstVideoAggregatorClass * klass)2567 gst_video_aggregator_init (GstVideoAggregator * vagg,
2568     GstVideoAggregatorClass * klass)
2569 {
2570   vagg->priv = gst_video_aggregator_get_instance_private (vagg);
2571 
2572   vagg->priv->current_caps = NULL;
2573 
2574   g_mutex_init (&vagg->priv->lock);
2575 
2576   /* initialize variables */
2577   gst_video_aggregator_reset (vagg);
2578 }
2579