1 /* GStreamer
2  * Copyright (C) 2011 David Schleef <ds@entropywave.com>
3  * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
4  * Copyright (C) 2015 Florian Langlois <florian.langlois@fr.thalesgroup.com>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public
17  * License along with this library; if not, write to the
18  * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
19  * Boston, MA 02110-1335, USA.
20  */
21 /**
22  * SECTION:element-decklinkvideosrc
23  * @short_description: Inputs Video from a BlackMagic DeckLink Device
24  *
25  * Capture Video from a BlackMagic DeckLink Device.
26  *
27  * ## Sample pipeline
28  * |[
29  * gst-launch-1.0 \
30  *   decklinkvideosrc device-number=0 connection=sdi mode=1080p25 ! \
31  *   autovideosink
32  * ]|
33  * Capturing 1080p25 video from the SDI-In of Card 0. Devices are numbered
34  * starting with 0.
35  *
36  * # Duplex-Mode:
37  * Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
38  * independant SDI units with two connectors each. These units can operate either
39  * in half- or in full-duplex mode.
40  *
41  * The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
42  * Cards that to not support Duplex-Modes are not influenced by the property.
43  *
44  * ## Half-Duplex-Mode (default):
45  * By default decklinkvideosrc will configure them into half-duplex mode, so that
46  * each connector acts as if it were an independant DeckLink Card which can either
47  * be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
48  * In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
49  *
50  * |[
51  * gst-launch-1.0 \
52  *  decklinkvideosrc device-number=0 mode=1080p25 ! c. \
53  *  decklinkvideosrc device-number=1 mode=1080p25 ! c. \
54  *  decklinkvideosrc device-number=2 mode=1080p25 ! c. \
55  *  decklinkvideosrc device-number=3 mode=1080p25 ! c. \
56  *  compositor name=c \
57  *    sink_0::xpos=0   sink_0::ypos=0   sink_0::width=960 sink_0::height=540 \
58  *    sink_1::xpos=960 sink_1::ypos=0   sink_1::width=960 sink_1::height=540 \
59  *    sink_2::xpos=0   sink_2::ypos=540 sink_2::width=960 sink_2::height=540 \
60  *    sink_3::xpos=960 sink_3::ypos=540 sink_3::width=960 sink_3::height=540 ! \
61  *    video/x-raw,width=1920,height=1080 ! \
62  *    autovideosink
63  * ]|
64  * Capture 1080p25 from the first 4 units in the System (ie. the 4 Connectors of
65  * a Duo2 Card) and compose them into a 2x2 grid.
66  *
67  * |[
68  *  gst-launch-1.0 \
69  *    videotestsrc foreground-color=0x0000ff00 ! decklinkvideosink device-number=0 mode=1080p25 \
70  *    decklinkvideosrc device-number=1 mode=1080p25 ! autovideosink \
71  *    decklinkvideosrc device-number=2 mode=1080p25 ! autovideosink \
72  *    videotestsrc foreground-color=0x00ff0000 ! decklinkvideosink device-number=3 mode=1080p25
73  * ]|
74  * Capture 1080p25 from the second and third unit in the System,
75  * Playout a Test-Screen with colored Snow on the first and fourth unit
76  * (ie. the Connectors 1-4 of a Duo2 unit).
77  *
78  * ## Device-Number-Mapping in Half-Duplex-Mode
79  * The device-number to connector-mapping in half-duplex-mode is as follows for the Duo2
80  * - `device-number=0` SDI1
81  * - `device-number=1` SDI3
82  * - `device-number=2` SDI2
83  * - `device-number=3` SDI4
84  *
85  * And for the Quad2
86  * - `device-number=0` SDI1
87  * - `device-number=1` SDI3
88  * - `device-number=2` SDI5
89  * - `device-number=3` SDI7
90  * - `device-number=4` SDI2
91  * - `device-number=5` SDI4
92  * - `device-number=6` SDI6
93  * - `device-number=7` SDI8
94  *
95  * ## Full-Duplex-Mode:
96  * When operating in full-duplex mode, two connectors of a unit are combined to
97  * a single device, performing extra processing with the second connection.
98  *
99  * This mode is most useful for Playout. See @decklinkvideosink.
100  * For Capturing the options are as follows:
101  *
102  * When capturing from a duplex-unit, the secondary port outputs the captured image
103  * unchanged.
104  * |[
105  * gst-launch-1.0 \
106  *   decklinkvideosrc device-number=0 mode=1080p25 duplex-mode=full ! \
107  *   autovideosink
108  * ]|
109  *
110  * When simultaneously capturing and playing out onto the same device, the
111  * secondary port outputs the played out video. Note, that this can also be
112  * achieved using half-duplex mode.
113  * |[
114  * gst-launch-1.0 \
115  *   decklinkvideosrc device-number=0 mode=1080p25 duplex-mode=full ! \
116  *   videoflip video-direction=vert ! \
117  *   decklinkvideosink device-number=0 mode=1080p25 duplex-mode=full
118  * ]|
119  * Capturing Video on the primary port of device 0, output flipped version of the
120  * video on secondary port of the same device.
121  *
122  * ## Device-Number-Mapping in Full-Duplex-Mode
123  * The device-number to connector-mapping in full-duplex-mode is as follows for the Duo2
124  * - `device-number=0` SDI1 primary, SDI2 secondary
125  * - `device-number=1` SDI3 primaty, SDI4 secondary
126  *
127  * And for the Quad2
128  * - `device-number=0` SDI1 primary, SDI2 secondary
129  * - `device-number=1` SDI3 primaty, SDI4 secondary
130  * - `device-number=2` SDI5 primary, SDI6 secondary
131  * - `device-number=3` SDI7 primary, SDI8 secondary
132  */
133 
134 #ifdef HAVE_CONFIG_H
135 #include "config.h"
136 #endif
137 
138 #include "gstdecklinkvideosrc.h"
139 #include <string.h>
140 
141 GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_src_debug);
142 #define GST_CAT_DEFAULT gst_decklink_video_src_debug
143 
144 #define DEFAULT_MODE (GST_DECKLINK_MODE_AUTO)
145 #define DEFAULT_CONNECTION (GST_DECKLINK_CONNECTION_AUTO)
146 #define DEFAULT_BUFFER_SIZE (5)
147 #define DEFAULT_OUTPUT_STREAM_TIME (FALSE)
148 #define DEFAULT_SKIP_FIRST_TIME (0)
149 #define DEFAULT_DROP_NO_SIGNAL_FRAMES (FALSE)
150 #define DEFAULT_OUTPUT_CC (FALSE)
151 
152 #ifndef ABSDIFF
153 #define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
154 #endif
155 
156 enum
157 {
158   PROP_0,
159   PROP_MODE,
160   PROP_CONNECTION,
161   PROP_DEVICE_NUMBER,
162   PROP_BUFFER_SIZE,
163   PROP_VIDEO_FORMAT,
164   PROP_DUPLEX_MODE,
165   PROP_TIMECODE_FORMAT,
166   PROP_OUTPUT_STREAM_TIME,
167   PROP_SKIP_FIRST_TIME,
168   PROP_DROP_NO_SIGNAL_FRAMES,
169   PROP_SIGNAL,
170   PROP_HW_SERIAL_NUMBER,
171   PROP_OUTPUT_CC
172 };
173 
174 typedef struct
175 {
176   IDeckLinkVideoInputFrame *frame;
177   GstClockTime timestamp, duration;
178   GstClockTime stream_timestamp;
179   GstClockTime stream_duration;
180   GstClockTime hardware_timestamp;
181   GstClockTime hardware_duration;
182   GstDecklinkModeEnum mode;
183   BMDPixelFormat format;
184   GstVideoTimeCode *tc;
185   gboolean no_signal;
186 } CaptureFrame;
187 
188 static void
capture_frame_clear(CaptureFrame * frame)189 capture_frame_clear (CaptureFrame * frame)
190 {
191   frame->frame->Release ();
192   if (frame->tc)
193     gst_video_time_code_free (frame->tc);
194   memset (frame, 0, sizeof (*frame));
195 }
196 
197 typedef struct
198 {
199   IDeckLinkVideoInputFrame *frame;
200   IDeckLinkInput *input;
201 } VideoFrame;
202 
203 static void
video_frame_free(void * data)204 video_frame_free (void *data)
205 {
206   VideoFrame *frame = (VideoFrame *) data;
207 
208   frame->frame->Release ();
209   frame->input->Release ();
210   g_free (frame);
211 }
212 
213 static void gst_decklink_video_src_set_property (GObject * object,
214     guint property_id, const GValue * value, GParamSpec * pspec);
215 static void gst_decklink_video_src_get_property (GObject * object,
216     guint property_id, GValue * value, GParamSpec * pspec);
217 static void gst_decklink_video_src_finalize (GObject * object);
218 
219 static GstStateChangeReturn
220 gst_decklink_video_src_change_state (GstElement * element,
221     GstStateChange transition);
222 
223 static gboolean gst_decklink_video_src_query (GstBaseSrc * bsrc,
224     GstQuery * query);
225 static gboolean gst_decklink_video_src_unlock (GstBaseSrc * bsrc);
226 static gboolean gst_decklink_video_src_unlock_stop (GstBaseSrc * bsrc);
227 
228 static GstFlowReturn gst_decklink_video_src_create (GstPushSrc * psrc,
229     GstBuffer ** buffer);
230 
231 static gboolean gst_decklink_video_src_open (GstDecklinkVideoSrc * self);
232 static gboolean gst_decklink_video_src_close (GstDecklinkVideoSrc * self);
233 
234 static gboolean gst_decklink_video_src_stop (GstDecklinkVideoSrc * self);
235 
236 static void gst_decklink_video_src_start_streams (GstElement * element);
237 
238 #define parent_class gst_decklink_video_src_parent_class
239 G_DEFINE_TYPE (GstDecklinkVideoSrc, gst_decklink_video_src, GST_TYPE_PUSH_SRC);
240 
241 static void
gst_decklink_video_src_class_init(GstDecklinkVideoSrcClass * klass)242 gst_decklink_video_src_class_init (GstDecklinkVideoSrcClass * klass)
243 {
244   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
245   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
246   GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
247   GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS (klass);
248   GstCaps *templ_caps;
249 
250   gobject_class->set_property = gst_decklink_video_src_set_property;
251   gobject_class->get_property = gst_decklink_video_src_get_property;
252   gobject_class->finalize = gst_decklink_video_src_finalize;
253 
254   element_class->change_state =
255       GST_DEBUG_FUNCPTR (gst_decklink_video_src_change_state);
256 
257   basesrc_class->query = GST_DEBUG_FUNCPTR (gst_decklink_video_src_query);
258   basesrc_class->negotiate = NULL;
259   basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_decklink_video_src_unlock);
260   basesrc_class->unlock_stop =
261       GST_DEBUG_FUNCPTR (gst_decklink_video_src_unlock_stop);
262 
263   pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_decklink_video_src_create);
264 
265   g_object_class_install_property (gobject_class, PROP_MODE,
266       g_param_spec_enum ("mode", "Playback Mode",
267           "Video Mode to use for playback",
268           GST_TYPE_DECKLINK_MODE, DEFAULT_MODE,
269           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
270               G_PARAM_CONSTRUCT)));
271 
272   g_object_class_install_property (gobject_class, PROP_CONNECTION,
273       g_param_spec_enum ("connection", "Connection",
274           "Video input connection to use",
275           GST_TYPE_DECKLINK_CONNECTION, DEFAULT_CONNECTION,
276           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
277               G_PARAM_CONSTRUCT)));
278 
279   g_object_class_install_property (gobject_class, PROP_DEVICE_NUMBER,
280       g_param_spec_int ("device-number", "Device number",
281           "Output device instance to use", 0, G_MAXINT, 0,
282           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
283               G_PARAM_CONSTRUCT)));
284 
285   g_object_class_install_property (gobject_class, PROP_BUFFER_SIZE,
286       g_param_spec_uint ("buffer-size", "Buffer Size",
287           "Size of internal buffer in number of video frames", 1,
288           G_MAXINT, DEFAULT_BUFFER_SIZE,
289           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
290 
291   g_object_class_install_property (gobject_class, PROP_VIDEO_FORMAT,
292       g_param_spec_enum ("video-format", "Video format",
293           "Video format type to use for input (Only use auto for mode=auto)",
294           GST_TYPE_DECKLINK_VIDEO_FORMAT, GST_DECKLINK_VIDEO_FORMAT_AUTO,
295           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
296               G_PARAM_CONSTRUCT)));
297 
298   g_object_class_install_property (gobject_class, PROP_DUPLEX_MODE,
299       g_param_spec_enum ("duplex-mode", "Duplex mode",
300           "Certain DeckLink devices such as the DeckLink Quad 2 and the "
301           "DeckLink Duo 2 support configuration of the duplex mode of "
302           "individual sub-devices."
303           "A sub-device configured as full-duplex will use two connectors, "
304           "which allows simultaneous capture and playback, internal keying, "
305           "and fill & key scenarios."
306           "A half-duplex sub-device will use a single connector as an "
307           "individual capture or playback channel.",
308           GST_TYPE_DECKLINK_DUPLEX_MODE, GST_DECKLINK_DUPLEX_MODE_HALF,
309           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
310               G_PARAM_CONSTRUCT)));
311 
312   g_object_class_install_property (gobject_class, PROP_TIMECODE_FORMAT,
313       g_param_spec_enum ("timecode-format", "Timecode format",
314           "Timecode format type to use for input",
315           GST_TYPE_DECKLINK_TIMECODE_FORMAT,
316           GST_DECKLINK_TIMECODE_FORMAT_RP188ANY,
317           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
318               G_PARAM_CONSTRUCT)));
319 
320   g_object_class_install_property (gobject_class, PROP_OUTPUT_STREAM_TIME,
321       g_param_spec_boolean ("output-stream-time", "Output Stream Time",
322           "Output stream time directly instead of translating to pipeline clock",
323           DEFAULT_OUTPUT_STREAM_TIME,
324           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
325 
326   g_object_class_install_property (gobject_class, PROP_SKIP_FIRST_TIME,
327       g_param_spec_uint64 ("skip-first-time", "Skip First Time",
328           "Skip that much time of initial frames after starting", 0,
329           G_MAXUINT64, DEFAULT_SKIP_FIRST_TIME,
330           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
331 
332   g_object_class_install_property (gobject_class, PROP_DROP_NO_SIGNAL_FRAMES,
333       g_param_spec_boolean ("drop-no-signal-frames", "Drop No Signal Frames",
334           "Drop frames that are marked as having no input signal",
335           DEFAULT_DROP_NO_SIGNAL_FRAMES,
336           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
337 
338   g_object_class_install_property (gobject_class, PROP_SIGNAL,
339       g_param_spec_boolean ("signal", "Input signal available",
340           "True if there is a valid input signal available",
341           FALSE, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
342 
343   g_object_class_install_property (gobject_class, PROP_HW_SERIAL_NUMBER,
344       g_param_spec_string ("hw-serial-number", "Hardware serial number",
345           "The serial number (hardware ID) of the Decklink card",
346           NULL, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
347 
348   g_object_class_install_property (gobject_class, PROP_OUTPUT_CC,
349       g_param_spec_boolean ("output-cc", "Output Closed Caption",
350           "Extract and output CC as GstMeta (if present)",
351           DEFAULT_OUTPUT_CC,
352           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
353 
354   templ_caps = gst_decklink_mode_get_template_caps (TRUE);
355   gst_element_class_add_pad_template (element_class,
356       gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, templ_caps));
357   gst_caps_unref (templ_caps);
358 
359   gst_element_class_set_static_metadata (element_class, "Decklink Video Source",
360       "Video/Source/Hardware", "Decklink Source",
361       "David Schleef <ds@entropywave.com>, "
362       "Sebastian Dröge <sebastian@centricular.com>");
363 
364   GST_DEBUG_CATEGORY_INIT (gst_decklink_video_src_debug, "decklinkvideosrc",
365       0, "debug category for decklinkvideosrc element");
366 }
367 
368 static void
gst_decklink_video_src_init(GstDecklinkVideoSrc * self)369 gst_decklink_video_src_init (GstDecklinkVideoSrc * self)
370 {
371   self->mode = DEFAULT_MODE;
372   self->caps_mode = GST_DECKLINK_MODE_AUTO;
373   self->caps_format = bmdFormat8BitYUV;
374   self->connection = DEFAULT_CONNECTION;
375   self->device_number = 0;
376   self->buffer_size = DEFAULT_BUFFER_SIZE;
377   self->video_format = GST_DECKLINK_VIDEO_FORMAT_AUTO;
378   self->duplex_mode = bmdDuplexModeHalf;
379   self->timecode_format = bmdTimecodeRP188Any;
380   self->no_signal = FALSE;
381   self->output_stream_time = DEFAULT_OUTPUT_STREAM_TIME;
382   self->skip_first_time = DEFAULT_SKIP_FIRST_TIME;
383   self->drop_no_signal_frames = DEFAULT_DROP_NO_SIGNAL_FRAMES;
384 
385   self->window_size = 64;
386   self->times = g_new (GstClockTime, 4 * self->window_size);
387   self->times_temp = self->times + 2 * self->window_size;
388   self->window_fill = 0;
389   self->window_skip = 1;
390   self->window_skip_count = 0;
391 
392   gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
393   gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
394 
395   gst_pad_use_fixed_caps (GST_BASE_SRC_PAD (self));
396 
397   g_mutex_init (&self->lock);
398   g_cond_init (&self->cond);
399 
400   self->current_frames =
401       gst_queue_array_new_for_struct (sizeof (CaptureFrame),
402       DEFAULT_BUFFER_SIZE);
403 }
404 
405 void
gst_decklink_video_src_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)406 gst_decklink_video_src_set_property (GObject * object, guint property_id,
407     const GValue * value, GParamSpec * pspec)
408 {
409   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (object);
410 
411   switch (property_id) {
412     case PROP_MODE:
413       self->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
414       /* setting the default value for caps_mode here: if mode==auto then we
415        * configure caps_mode from the caps, if mode!=auto we set caps_mode to
416        * the same value as the mode. so self->caps_mode is essentially
417        * self->mode with mode=auto filtered into whatever we got from the
418        * negotiation */
419       if (self->mode != GST_DECKLINK_MODE_AUTO)
420         self->caps_mode = self->mode;
421       break;
422     case PROP_CONNECTION:
423       self->connection = (GstDecklinkConnectionEnum) g_value_get_enum (value);
424       break;
425     case PROP_DEVICE_NUMBER:
426       self->device_number = g_value_get_int (value);
427       break;
428     case PROP_BUFFER_SIZE:
429       self->buffer_size = g_value_get_uint (value);
430       break;
431     case PROP_VIDEO_FORMAT:
432       self->video_format = (GstDecklinkVideoFormat) g_value_get_enum (value);
433       switch (self->video_format) {
434         case GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV:
435         case GST_DECKLINK_VIDEO_FORMAT_10BIT_YUV:
436         case GST_DECKLINK_VIDEO_FORMAT_8BIT_ARGB:
437         case GST_DECKLINK_VIDEO_FORMAT_8BIT_BGRA:
438           self->caps_format =
439               gst_decklink_pixel_format_from_type (self->video_format);
440         case GST_DECKLINK_VIDEO_FORMAT_AUTO:
441           break;
442         default:
443           GST_ELEMENT_WARNING (GST_ELEMENT (self), CORE, NOT_IMPLEMENTED,
444               ("Format %d not supported", self->video_format), (NULL));
445           break;
446       }
447       break;
448     case PROP_DUPLEX_MODE:
449       self->duplex_mode =
450           gst_decklink_duplex_mode_from_enum ((GstDecklinkDuplexMode)
451           g_value_get_enum (value));
452       break;
453     case PROP_TIMECODE_FORMAT:
454       self->timecode_format =
455           gst_decklink_timecode_format_from_enum ((GstDecklinkTimecodeFormat)
456           g_value_get_enum (value));
457       break;
458     case PROP_OUTPUT_STREAM_TIME:
459       self->output_stream_time = g_value_get_boolean (value);
460       break;
461     case PROP_SKIP_FIRST_TIME:
462       self->skip_first_time = g_value_get_uint64 (value);
463       break;
464     case PROP_DROP_NO_SIGNAL_FRAMES:
465       self->drop_no_signal_frames = g_value_get_boolean (value);
466       break;
467     case PROP_OUTPUT_CC:
468       self->output_cc = g_value_get_boolean (value);
469       break;
470     default:
471       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
472       break;
473   }
474 }
475 
476 void
gst_decklink_video_src_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)477 gst_decklink_video_src_get_property (GObject * object, guint property_id,
478     GValue * value, GParamSpec * pspec)
479 {
480   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (object);
481 
482   switch (property_id) {
483     case PROP_MODE:
484       g_value_set_enum (value, self->mode);
485       break;
486     case PROP_CONNECTION:
487       g_value_set_enum (value, self->connection);
488       break;
489     case PROP_DEVICE_NUMBER:
490       g_value_set_int (value, self->device_number);
491       break;
492     case PROP_BUFFER_SIZE:
493       g_value_set_uint (value, self->buffer_size);
494       break;
495     case PROP_VIDEO_FORMAT:
496       g_value_set_enum (value, self->video_format);
497       break;
498     case PROP_DUPLEX_MODE:
499       g_value_set_enum (value,
500           gst_decklink_duplex_mode_to_enum (self->duplex_mode));
501       break;
502     case PROP_TIMECODE_FORMAT:
503       g_value_set_enum (value,
504           gst_decklink_timecode_format_to_enum (self->timecode_format));
505       break;
506     case PROP_OUTPUT_STREAM_TIME:
507       g_value_set_boolean (value, self->output_stream_time);
508       break;
509     case PROP_SKIP_FIRST_TIME:
510       g_value_set_uint64 (value, self->skip_first_time);
511       break;
512     case PROP_DROP_NO_SIGNAL_FRAMES:
513       g_value_set_boolean (value, self->drop_no_signal_frames);
514       break;
515     case PROP_SIGNAL:
516       g_value_set_boolean (value, !self->no_signal);
517       break;
518     case PROP_HW_SERIAL_NUMBER:
519       if (self->input)
520         g_value_set_string (value, self->input->hw_serial_number);
521       else
522         g_value_set_string (value, NULL);
523       break;
524     case PROP_OUTPUT_CC:
525       g_value_set_boolean (value, self->output_cc);
526       break;
527     default:
528       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
529       break;
530   }
531 }
532 
533 void
gst_decklink_video_src_finalize(GObject * object)534 gst_decklink_video_src_finalize (GObject * object)
535 {
536   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (object);
537 
538   g_free (self->times);
539   self->times = NULL;
540   g_mutex_clear (&self->lock);
541   g_cond_clear (&self->cond);
542 
543   if (self->current_frames) {
544     while (gst_queue_array_get_length (self->current_frames) > 0) {
545       CaptureFrame *tmp = (CaptureFrame *)
546           gst_queue_array_pop_head_struct (self->current_frames);
547       capture_frame_clear (tmp);
548     }
549     gst_queue_array_free (self->current_frames);
550     self->current_frames = NULL;
551   }
552 
553   G_OBJECT_CLASS (parent_class)->finalize (object);
554 }
555 
556 static gboolean
gst_decklink_video_src_start(GstDecklinkVideoSrc * self)557 gst_decklink_video_src_start (GstDecklinkVideoSrc * self)
558 {
559   const GstDecklinkMode *mode;
560   BMDVideoInputFlags flags;
561   HRESULT ret;
562   BMDPixelFormat format;
563 
564   g_mutex_lock (&self->input->lock);
565   if (self->input->video_enabled) {
566     g_mutex_unlock (&self->input->lock);
567     return TRUE;
568   }
569   g_mutex_unlock (&self->input->lock);
570 
571   if (self->input->config && self->connection != GST_DECKLINK_CONNECTION_AUTO) {
572     ret = self->input->config->SetInt (bmdDeckLinkConfigVideoInputConnection,
573         gst_decklink_get_connection (self->connection));
574     if (ret != S_OK) {
575       GST_ERROR_OBJECT (self,
576           "Failed to set configuration (input source): 0x%08lx",
577           (unsigned long) ret);
578       return FALSE;
579     }
580 
581     if (self->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
582       ret = self->input->config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
583           bmdAnalogVideoFlagCompositeSetup75);
584       if (ret != S_OK) {
585         GST_ERROR_OBJECT (self,
586             "Failed to set configuration (composite setup): 0x%08lx",
587             (unsigned long) ret);
588         return FALSE;
589       }
590     }
591   }
592 
593   flags = bmdVideoInputFlagDefault;
594   if (self->mode == GST_DECKLINK_MODE_AUTO) {
595     bool autoDetection = false;
596 
597     if (self->input->attributes) {
598       ret =
599           self->input->
600           attributes->GetFlag (BMDDeckLinkSupportsInputFormatDetection,
601           &autoDetection);
602       if (ret != S_OK) {
603         GST_ERROR_OBJECT (self,
604             "Failed to get attribute (autodetection): 0x%08lx",
605             (unsigned long) ret);
606         return FALSE;
607       }
608       if (autoDetection)
609         flags |= bmdVideoInputEnableFormatDetection;
610     }
611     if (!autoDetection) {
612       GST_ERROR_OBJECT (self, "Failed to activate auto-detection");
613       return FALSE;
614     }
615   }
616 
617   mode = gst_decklink_get_mode (self->mode);
618   g_assert (mode != NULL);
619 
620   format = self->caps_format;
621   ret = self->input->input->EnableVideoInput (mode->mode, format, flags);
622   if (ret != S_OK) {
623     GST_WARNING_OBJECT (self, "Failed to enable video input: 0x%08lx",
624         (unsigned long) ret);
625     return FALSE;
626   }
627 
628   g_mutex_lock (&self->input->lock);
629   self->input->mode = mode;
630   self->input->video_enabled = TRUE;
631   if (self->input->start_streams)
632     self->input->start_streams (self->input->videosrc);
633   g_mutex_unlock (&self->input->lock);
634 
635   return TRUE;
636 }
637 
638 static void
gst_decklink_video_src_update_time_mapping(GstDecklinkVideoSrc * self,GstClockTime capture_time,GstClockTime stream_time)639 gst_decklink_video_src_update_time_mapping (GstDecklinkVideoSrc * self,
640     GstClockTime capture_time, GstClockTime stream_time)
641 {
642   if (self->window_skip_count == 0) {
643     GstClockTime num, den, b, xbase;
644     gdouble r_squared;
645 
646     self->times[2 * self->window_fill] = stream_time;
647     self->times[2 * self->window_fill + 1] = capture_time;
648 
649     self->window_fill++;
650     self->window_skip_count++;
651     if (self->window_skip_count >= self->window_skip)
652       self->window_skip_count = 0;
653 
654     if (self->window_fill >= self->window_size) {
655       guint fps =
656           ((gdouble) self->info.fps_n + self->info.fps_d -
657           1) / ((gdouble) self->info.fps_d);
658 
659       /* Start by updating first every frame, once full every second frame,
660        * etc. until we update once every 4 seconds */
661       if (self->window_skip < 4 * fps)
662         self->window_skip *= 2;
663       if (self->window_skip >= 4 * fps)
664         self->window_skip = 4 * fps;
665 
666       self->window_fill = 0;
667       self->window_filled = TRUE;
668     }
669 
670     /* First sample ever, create some basic mapping to start */
671     if (!self->window_filled && self->window_fill == 1) {
672       self->current_time_mapping.xbase = stream_time;
673       self->current_time_mapping.b = capture_time;
674       self->current_time_mapping.num = 1;
675       self->current_time_mapping.den = 1;
676       self->next_time_mapping_pending = FALSE;
677     }
678 
679     /* Only bother calculating anything here once we had enough measurements,
680      * i.e. let's take the window size as a start */
681     if (self->window_filled &&
682         gst_calculate_linear_regression (self->times, self->times_temp,
683             self->window_size, &num, &den, &b, &xbase, &r_squared)) {
684 
685       GST_DEBUG_OBJECT (self,
686           "Calculated new time mapping: pipeline time = %lf * (stream time - %"
687           G_GUINT64_FORMAT ") + %" G_GUINT64_FORMAT " (%lf)",
688           ((gdouble) num) / ((gdouble) den), xbase, b, r_squared);
689 
690       self->next_time_mapping.xbase = xbase;
691       self->next_time_mapping.b = b;
692       self->next_time_mapping.num = num;
693       self->next_time_mapping.den = den;
694       self->next_time_mapping_pending = TRUE;
695     }
696   } else {
697     self->window_skip_count++;
698     if (self->window_skip_count >= self->window_skip)
699       self->window_skip_count = 0;
700   }
701 
702   if (self->next_time_mapping_pending) {
703     GstClockTime expected, new_calculated, diff, max_diff;
704 
705     expected =
706         gst_clock_adjust_with_calibration (NULL, stream_time,
707         self->current_time_mapping.xbase, self->current_time_mapping.b,
708         self->current_time_mapping.num, self->current_time_mapping.den);
709     new_calculated =
710         gst_clock_adjust_with_calibration (NULL, stream_time,
711         self->next_time_mapping.xbase, self->next_time_mapping.b,
712         self->next_time_mapping.num, self->next_time_mapping.den);
713 
714     if (new_calculated > expected)
715       diff = new_calculated - expected;
716     else
717       diff = expected - new_calculated;
718 
719     /* At most 5% frame duration change per update */
720     max_diff =
721         gst_util_uint64_scale (GST_SECOND / 20, self->info.fps_d,
722         self->info.fps_n);
723 
724     GST_DEBUG_OBJECT (self,
725         "New time mapping causes difference of %" GST_TIME_FORMAT,
726         GST_TIME_ARGS (diff));
727     GST_DEBUG_OBJECT (self, "Maximum allowed per frame %" GST_TIME_FORMAT,
728         GST_TIME_ARGS (max_diff));
729 
730     if (diff > max_diff) {
731       /* adjust so that we move that much closer */
732       if (new_calculated > expected) {
733         self->current_time_mapping.b = expected + max_diff;
734         self->current_time_mapping.xbase = stream_time;
735       } else {
736         self->current_time_mapping.b = expected - max_diff;
737         self->current_time_mapping.xbase = stream_time;
738       }
739     } else {
740       self->current_time_mapping.xbase = self->next_time_mapping.xbase;
741       self->current_time_mapping.b = self->next_time_mapping.b;
742       self->current_time_mapping.num = self->next_time_mapping.num;
743       self->current_time_mapping.den = self->next_time_mapping.den;
744       self->next_time_mapping_pending = FALSE;
745     }
746   }
747 }
748 
749 static void
gst_decklink_video_src_got_frame(GstElement * element,IDeckLinkVideoInputFrame * frame,GstDecklinkModeEnum mode,GstClockTime capture_time,GstClockTime stream_time,GstClockTime stream_duration,GstClockTime hardware_time,GstClockTime hardware_duration,IDeckLinkTimecode * dtc,gboolean no_signal)750 gst_decklink_video_src_got_frame (GstElement * element,
751     IDeckLinkVideoInputFrame * frame, GstDecklinkModeEnum mode,
752     GstClockTime capture_time, GstClockTime stream_time,
753     GstClockTime stream_duration, GstClockTime hardware_time,
754     GstClockTime hardware_duration, IDeckLinkTimecode * dtc, gboolean no_signal)
755 {
756   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
757   GstClockTime timestamp, duration;
758 
759   GST_LOG_OBJECT (self,
760       "Got video frame at %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT " (%"
761       GST_TIME_FORMAT "), no signal: %d", GST_TIME_ARGS (capture_time),
762       GST_TIME_ARGS (stream_time), GST_TIME_ARGS (stream_duration), no_signal);
763 
764   if (self->drop_no_signal_frames && no_signal)
765     return;
766 
767   g_mutex_lock (&self->lock);
768   if (self->first_time == GST_CLOCK_TIME_NONE)
769     self->first_time = stream_time;
770 
771   if (self->skip_first_time > 0
772       && stream_time - self->first_time < self->skip_first_time) {
773     g_mutex_unlock (&self->lock);
774     GST_DEBUG_OBJECT (self,
775         "Skipping frame as requested: %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT,
776         GST_TIME_ARGS (stream_time),
777         GST_TIME_ARGS (self->skip_first_time + self->first_time));
778     return;
779   }
780 
781   gst_decklink_video_src_update_time_mapping (self, capture_time, stream_time);
782   if (self->output_stream_time) {
783     timestamp = stream_time;
784     duration = stream_duration;
785   } else {
786     timestamp =
787         gst_clock_adjust_with_calibration (NULL, stream_time,
788         self->current_time_mapping.xbase, self->current_time_mapping.b,
789         self->current_time_mapping.num, self->current_time_mapping.den);
790     duration =
791         gst_util_uint64_scale (stream_duration, self->current_time_mapping.num,
792         self->current_time_mapping.den);
793   }
794 
795   GST_LOG_OBJECT (self,
796       "Converted times to %" GST_TIME_FORMAT " (%"
797       GST_TIME_FORMAT ")", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
798 
799   if (!self->flushing) {
800     CaptureFrame f;
801     const GstDecklinkMode *bmode;
802     GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE;
803     guint field_count = 0;
804     guint skipped_frames = 0;
805     GstClockTime from_timestamp = GST_CLOCK_TIME_NONE;
806     GstClockTime to_timestamp = GST_CLOCK_TIME_NONE;
807 
808     while (gst_queue_array_get_length (self->current_frames) >=
809         self->buffer_size) {
810       CaptureFrame *tmp = (CaptureFrame *)
811           gst_queue_array_pop_head_struct (self->current_frames);
812       if (skipped_frames == 0)
813         from_timestamp = tmp->timestamp;
814       skipped_frames++;
815       to_timestamp = tmp->timestamp;
816       capture_frame_clear (tmp);
817     }
818 
819     if (skipped_frames > 0)
820       GST_WARNING_OBJECT (self,
821           "Dropped %u old frames from %" GST_TIME_FORMAT " to %"
822           GST_TIME_FORMAT, skipped_frames, GST_TIME_ARGS (from_timestamp),
823           GST_TIME_ARGS (to_timestamp));
824 
825     memset (&f, 0, sizeof (f));
826     f.frame = frame;
827     f.timestamp = timestamp;
828     f.duration = duration;
829     f.stream_timestamp = stream_time;
830     f.stream_duration = stream_duration;
831     f.hardware_timestamp = hardware_time;
832     f.hardware_duration = hardware_duration;
833     f.mode = mode;
834     f.format = frame->GetPixelFormat ();
835     f.no_signal = no_signal;
836     if (dtc != NULL) {
837       uint8_t hours, minutes, seconds, frames;
838       HRESULT res;
839 
840       res = dtc->GetComponents (&hours, &minutes, &seconds, &frames);
841       if (res != S_OK) {
842         GST_ERROR ("Could not get components for timecode %p: 0x%08lx", dtc,
843             (unsigned long) res);
844         f.tc = NULL;
845       } else {
846         GST_DEBUG_OBJECT (self, "Got timecode %02d:%02d:%02d:%02d",
847             hours, minutes, seconds, frames);
848         bmode = gst_decklink_get_mode (mode);
849         if (bmode->interlaced)
850           flags =
851               (GstVideoTimeCodeFlags) (flags |
852               GST_VIDEO_TIME_CODE_FLAGS_INTERLACED);
853         if (bmode->fps_d == 1001) {
854           if (bmode->fps_n == 30000 || bmode->fps_n == 60000) {
855             /* Some occurrences have been spotted where the driver mistakenly
856              * fails to set the drop-frame flag for drop-frame timecodes.
857              * Assume always drop-frame for 29.97 and 59.94 FPS */
858             flags =
859                 (GstVideoTimeCodeFlags) (flags |
860                 GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME);
861           } else {
862             /* Drop-frame isn't defined for any other framerates (e.g. 23.976)
863              * */
864             flags =
865                 (GstVideoTimeCodeFlags) (flags &
866                 ~GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME);
867           }
868         }
869         f.tc =
870             gst_video_time_code_new (bmode->fps_n, bmode->fps_d, NULL, flags,
871             hours, minutes, seconds, frames, field_count);
872       }
873       dtc->Release ();
874     } else {
875       f.tc = NULL;
876     }
877 
878     frame->AddRef ();
879     gst_queue_array_push_tail_struct (self->current_frames, &f);
880     g_cond_signal (&self->cond);
881   }
882   g_mutex_unlock (&self->lock);
883 }
884 
885 static void
extract_cc_from_vbi(GstDecklinkVideoSrc * self,GstBuffer ** buffer,VideoFrame * vf)886 extract_cc_from_vbi (GstDecklinkVideoSrc * self, GstBuffer ** buffer,
887     VideoFrame * vf)
888 {
889   IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
890   gint fi;
891   guint8 *vancdata;
892   GstVideoFormat videoformat;
893   gboolean found = FALSE;
894   GstDecklinkModeEnum mode_enum;
895   const GstDecklinkMode *mode;
896 
897   if (vf->frame->GetAncillaryData (&vanc_frame) != S_OK)
898     return;
899 
900   videoformat =
901       gst_decklink_video_format_from_type (vanc_frame->GetPixelFormat ());
902   mode_enum =
903       gst_decklink_get_mode_enum_from_bmd (vanc_frame->GetDisplayMode ());
904   mode = gst_decklink_get_mode (mode_enum);
905 
906   if (videoformat == GST_VIDEO_FORMAT_UNKNOWN) {
907     GST_DEBUG_OBJECT (self, "Unknown video format for Ancillary data");
908     vanc_frame->Release ();
909     return;
910   }
911 
912   if ((videoformat != self->anc_vformat || mode->width != self->anc_width)
913       && self->vbiparser) {
914     gst_video_vbi_parser_free (self->vbiparser);
915     self->vbiparser = NULL;
916   }
917 
918   GST_DEBUG_OBJECT (self, "Checking for ancillary data in VBI");
919 
920   fi = self->last_cc_vbi_line;
921   if (fi == -1)
922     fi = 1;
923 
924   while (fi < 22 && !found) {
925     if (vanc_frame->GetBufferForVerticalBlankingLine (fi,
926             (void **) &vancdata) == S_OK) {
927       GstVideoAncillary gstanc;
928       if (self->vbiparser == NULL) {
929         self->vbiparser = gst_video_vbi_parser_new (videoformat, mode->width);
930         self->anc_vformat = videoformat;
931         self->anc_width = mode->width;
932       }
933       GST_DEBUG_OBJECT (self, "Might have data on line %d", fi);
934       gst_video_vbi_parser_add_line (self->vbiparser, vancdata);
935 
936       while (gst_video_vbi_parser_get_ancillary (self->vbiparser,
937               &gstanc) == GST_VIDEO_VBI_PARSER_RESULT_OK) {
938         switch (GST_VIDEO_ANCILLARY_DID16 (&gstanc)) {
939           case GST_VIDEO_ANCILLARY_DID16_S334_EIA_708:
940             GST_DEBUG_OBJECT (self,
941                 "Adding CEA-708 CDP meta to buffer for line %d", fi);
942             GST_MEMDUMP_OBJECT (self, "CDP", gstanc.data, gstanc.data_count);
943             gst_buffer_add_video_caption_meta (*buffer,
944                 GST_VIDEO_CAPTION_TYPE_CEA708_CDP, gstanc.data,
945                 gstanc.data_count);
946 
947             break;
948           case GST_VIDEO_ANCILLARY_DID16_S334_EIA_608:
949             GST_DEBUG_OBJECT (self,
950                 "Adding CEA-608 meta to buffer for line %d", fi);
951             GST_MEMDUMP_OBJECT (self, "CEA608", gstanc.data, gstanc.data_count);
952             gst_buffer_add_video_caption_meta (*buffer,
953                 GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, gstanc.data,
954                 gstanc.data_count);
955             break;
956           default:
957             /* otherwise continue looking */
958             continue;
959         }
960 
961         found = TRUE;
962         self->last_cc_vbi_line = fi;
963         break;
964       }
965     }
966 
967     /* If we didn't find it at the previous line, start again searching from
968      * line 1 onwards */
969     if (!found && (gint) self->last_cc_vbi_line != -1) {
970       self->last_cc_vbi_line = -1;
971       fi = 1;
972     } else {
973       fi++;
974     }
975   }
976 
977   if (!found)
978     self->last_cc_vbi_line = -1;
979 
980   vanc_frame->Release ();
981 }
982 
983 static GstFlowReturn
gst_decklink_video_src_create(GstPushSrc * bsrc,GstBuffer ** buffer)984 gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
985 {
986   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
987   GstFlowReturn flow_ret = GST_FLOW_OK;
988   const guint8 *data;
989   gsize data_size;
990   VideoFrame *vf;
991   CaptureFrame f;
992   GstCaps *caps;
993   gboolean caps_changed = FALSE;
994   const GstDecklinkMode *mode;
995   static GstStaticCaps stream_reference =
996       GST_STATIC_CAPS ("timestamp/x-decklink-stream");
997   static GstStaticCaps hardware_reference =
998       GST_STATIC_CAPS ("timestamp/x-decklink-hardware");
999 
1000   if (!gst_decklink_video_src_start (self)) {
1001     return GST_FLOW_NOT_NEGOTIATED;
1002   }
1003 
1004   g_mutex_lock (&self->lock);
1005   while (gst_queue_array_is_empty (self->current_frames) && !self->flushing) {
1006     g_cond_wait (&self->cond, &self->lock);
1007   }
1008 
1009   if (self->flushing) {
1010     GST_DEBUG_OBJECT (self, "Flushing");
1011     g_mutex_unlock (&self->lock);
1012     return GST_FLOW_FLUSHING;
1013   }
1014 
1015   f = *(CaptureFrame *) gst_queue_array_pop_head_struct (self->current_frames);
1016   g_mutex_unlock (&self->lock);
1017   // If we're not flushing, we should have a valid frame from the queue
1018   g_assert (f.frame != NULL);
1019 
1020   g_mutex_lock (&self->lock);
1021 
1022   if (!gst_pad_has_current_caps (GST_BASE_SRC_PAD (self))) {
1023     caps_changed = TRUE;
1024   }
1025 
1026   if (self->caps_mode != f.mode) {
1027     if (self->mode == GST_DECKLINK_MODE_AUTO
1028         || !gst_pad_has_current_caps (GST_BASE_SRC_PAD (self))) {
1029       GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
1030           f.mode);
1031       caps_changed = TRUE;
1032       self->caps_mode = f.mode;
1033     } else {
1034       g_mutex_unlock (&self->lock);
1035       GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
1036           ("Invalid mode in captured frame"),
1037           ("Mode set to %d but captured %d", self->caps_mode, f.mode));
1038       capture_frame_clear (&f);
1039       return GST_FLOW_NOT_NEGOTIATED;
1040     }
1041   }
1042   if (self->caps_format != f.format) {
1043     if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO
1044         || !gst_pad_has_current_caps (GST_BASE_SRC_PAD (self))) {
1045       GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
1046           f.format);
1047       caps_changed = TRUE;
1048       self->caps_format = f.format;
1049     } else {
1050       g_mutex_unlock (&self->lock);
1051       GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
1052           ("Invalid pixel format in captured frame"),
1053           ("Format set to %d but captured %d", self->caps_format, f.format));
1054       capture_frame_clear (&f);
1055       return GST_FLOW_NOT_NEGOTIATED;
1056     }
1057   }
1058 
1059   /* 1 ns error can be just a rounding error, so that's OK. The Decklink
1060    * drivers give us a really steady stream time, so anything above 1 ns can't
1061    * be a rounding error and is therefore something to worry about */
1062   if (self->expected_stream_time != GST_CLOCK_TIME_NONE &&
1063       ABSDIFF (self->expected_stream_time, f.stream_timestamp) > 1) {
1064     GstMessage *msg;
1065     GstClockTime running_time;
1066 
1067     self->dropped += f.stream_timestamp - self->expected_stream_time;
1068     running_time = gst_segment_to_running_time (&GST_BASE_SRC (self)->segment,
1069         GST_FORMAT_TIME, f.timestamp);
1070 
1071     msg = gst_message_new_qos (GST_OBJECT (self), TRUE, running_time, f.stream_timestamp,
1072         f.timestamp, f.duration);
1073     gst_message_set_qos_stats (msg, GST_FORMAT_TIME, self->processed,
1074         self->dropped);
1075     gst_element_post_message (GST_ELEMENT (self), msg);
1076   }
1077   if (self->first_stream_time == GST_CLOCK_TIME_NONE)
1078     self->first_stream_time = f.stream_timestamp;
1079   self->processed = f.stream_timestamp - self->dropped - self->first_stream_time;
1080   self->expected_stream_time = f.stream_timestamp + f.stream_duration;
1081 
1082   g_mutex_unlock (&self->lock);
1083   if (caps_changed) {
1084     self->last_cc_vbi_line = -1;
1085     caps = gst_decklink_mode_get_caps (f.mode, f.format, TRUE);
1086     gst_video_info_from_caps (&self->info, caps);
1087     gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
1088     gst_element_post_message (GST_ELEMENT_CAST (self),
1089         gst_message_new_latency (GST_OBJECT_CAST (self)));
1090     gst_caps_unref (caps);
1091     if (self->vbiparser) {
1092       gst_video_vbi_parser_free (self->vbiparser);
1093       self->vbiparser = NULL;
1094       self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1095       self->anc_width = 0;
1096     }
1097   }
1098 
1099   f.frame->GetBytes ((gpointer *) & data);
1100   data_size = self->info.size;
1101 
1102   vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));
1103 
1104   *buffer =
1105       gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
1106       (gpointer) data, data_size, 0, data_size, vf,
1107       (GDestroyNotify) video_frame_free);
1108 
1109   vf->frame = f.frame;
1110   f.frame->AddRef ();
1111   vf->input = self->input->input;
1112   vf->input->AddRef ();
1113 
1114   if (f.no_signal) {
1115     if (!self->no_signal) {
1116       self->no_signal = TRUE;
1117       g_object_notify (G_OBJECT (self), "signal");
1118       GST_ELEMENT_WARNING (GST_ELEMENT (self), RESOURCE, READ, ("No signal"),
1119           ("No input source was detected - video frames invalid"));
1120     }
1121   } else {
1122     if (self->no_signal) {
1123       self->no_signal = FALSE;
1124       g_object_notify (G_OBJECT (self), "signal");
1125       GST_ELEMENT_INFO (GST_ELEMENT (self), RESOURCE, READ, ("Signal found"),
1126           ("Input source detected"));
1127     }
1128   }
1129 
1130   // If we have a format that supports VANC and we are asked to extract CC,
1131   // then do it here.
1132   if (self->output_cc && !self->no_signal)
1133     extract_cc_from_vbi (self, buffer, vf);
1134 
1135   if (f.no_signal)
1136     GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_GAP);
1137   GST_BUFFER_TIMESTAMP (*buffer) = f.timestamp;
1138   GST_BUFFER_DURATION (*buffer) = f.duration;
1139   if (f.tc != NULL)
1140     gst_buffer_add_video_time_code_meta (*buffer, f.tc);
1141   gst_buffer_add_reference_timestamp_meta (*buffer,
1142       gst_static_caps_get (&stream_reference), f.stream_timestamp,
1143       f.stream_duration);
1144   gst_buffer_add_reference_timestamp_meta (*buffer,
1145       gst_static_caps_get (&hardware_reference), f.hardware_timestamp,
1146       f.hardware_duration);
1147 
1148   mode = gst_decklink_get_mode (self->caps_mode);
1149   if (mode->interlaced && mode->tff)
1150     GST_BUFFER_FLAG_SET (*buffer,
1151         GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_INTERLACED);
1152   else if (mode->interlaced)
1153     GST_BUFFER_FLAG_SET (*buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1154 
1155   GST_DEBUG_OBJECT (self,
1156       "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
1157       GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
1158       GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));
1159 
1160   capture_frame_clear (&f);
1161 
1162   return flow_ret;
1163 }
1164 
1165 static gboolean
gst_decklink_video_src_query(GstBaseSrc * bsrc,GstQuery * query)1166 gst_decklink_video_src_query (GstBaseSrc * bsrc, GstQuery * query)
1167 {
1168   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1169   gboolean ret = TRUE;
1170 
1171   switch (GST_QUERY_TYPE (query)) {
1172     case GST_QUERY_LATENCY:{
1173       if (self->input) {
1174         GstClockTime min, max;
1175         const GstDecklinkMode *mode;
1176 
1177         g_mutex_lock (&self->lock);
1178         mode = gst_decklink_get_mode (self->caps_mode);
1179         g_mutex_unlock (&self->lock);
1180 
1181         min = gst_util_uint64_scale_ceil (GST_SECOND, mode->fps_d, mode->fps_n);
1182         max = self->buffer_size * min;
1183 
1184         gst_query_set_latency (query, TRUE, min, max);
1185         ret = TRUE;
1186       } else {
1187         ret = FALSE;
1188       }
1189 
1190       break;
1191     }
1192     default:
1193       ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
1194       break;
1195   }
1196 
1197   return ret;
1198 }
1199 
1200 static gboolean
gst_decklink_video_src_unlock(GstBaseSrc * bsrc)1201 gst_decklink_video_src_unlock (GstBaseSrc * bsrc)
1202 {
1203   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1204 
1205   g_mutex_lock (&self->lock);
1206   self->flushing = TRUE;
1207   g_cond_signal (&self->cond);
1208   g_mutex_unlock (&self->lock);
1209 
1210   return TRUE;
1211 }
1212 
1213 static gboolean
gst_decklink_video_src_unlock_stop(GstBaseSrc * bsrc)1214 gst_decklink_video_src_unlock_stop (GstBaseSrc * bsrc)
1215 {
1216   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1217 
1218   g_mutex_lock (&self->lock);
1219   self->flushing = FALSE;
1220   while (gst_queue_array_get_length (self->current_frames) > 0) {
1221     CaptureFrame *tmp =
1222         (CaptureFrame *) gst_queue_array_pop_head_struct (self->current_frames);
1223     capture_frame_clear (tmp);
1224   }
1225   g_mutex_unlock (&self->lock);
1226 
1227   return TRUE;
1228 }
1229 
1230 static gboolean
gst_decklink_video_src_open(GstDecklinkVideoSrc * self)1231 gst_decklink_video_src_open (GstDecklinkVideoSrc * self)
1232 {
1233   const GstDecklinkMode *mode;
1234 
1235   GST_DEBUG_OBJECT (self, "Opening");
1236 
1237   self->input =
1238       gst_decklink_acquire_nth_input (self->device_number,
1239       GST_ELEMENT_CAST (self), FALSE);
1240   if (!self->input) {
1241     GST_ERROR_OBJECT (self, "Failed to acquire input");
1242     return FALSE;
1243   }
1244 
1245   g_object_notify (G_OBJECT (self), "hw-serial-number");
1246 
1247   mode = gst_decklink_get_mode (self->mode);
1248   g_assert (mode != NULL);
1249   g_mutex_lock (&self->input->lock);
1250   self->input->mode = mode;
1251   self->input->format = self->caps_format;
1252   self->input->got_video_frame = gst_decklink_video_src_got_frame;
1253   self->input->start_streams = gst_decklink_video_src_start_streams;
1254   g_mutex_unlock (&self->input->lock);
1255 
1256   return TRUE;
1257 }
1258 
1259 static gboolean
gst_decklink_video_src_close(GstDecklinkVideoSrc * self)1260 gst_decklink_video_src_close (GstDecklinkVideoSrc * self)
1261 {
1262 
1263   GST_DEBUG_OBJECT (self, "Closing");
1264 
1265   if (self->input) {
1266     g_mutex_lock (&self->input->lock);
1267     self->input->got_video_frame = NULL;
1268     self->input->mode = NULL;
1269     self->input->video_enabled = FALSE;
1270     self->input->start_streams = NULL;
1271     g_mutex_unlock (&self->input->lock);
1272 
1273     gst_decklink_release_nth_input (self->device_number,
1274         GST_ELEMENT_CAST (self), FALSE);
1275     self->input = NULL;
1276   }
1277 
1278   return TRUE;
1279 }
1280 
1281 static gboolean
gst_decklink_video_src_stop(GstDecklinkVideoSrc * self)1282 gst_decklink_video_src_stop (GstDecklinkVideoSrc * self)
1283 {
1284   GST_DEBUG_OBJECT (self, "Stopping");
1285 
1286   while (gst_queue_array_get_length (self->current_frames) > 0) {
1287     CaptureFrame *tmp =
1288         (CaptureFrame *) gst_queue_array_pop_head_struct (self->current_frames);
1289     capture_frame_clear (tmp);
1290   }
1291   self->caps_mode = GST_DECKLINK_MODE_AUTO;
1292 
1293   if (self->input && self->input->video_enabled) {
1294     g_mutex_lock (&self->input->lock);
1295     self->input->video_enabled = FALSE;
1296     g_mutex_unlock (&self->input->lock);
1297 
1298     self->input->input->DisableVideoInput ();
1299   }
1300 
1301   if (self->vbiparser) {
1302     gst_video_vbi_parser_free (self->vbiparser);
1303     self->vbiparser = NULL;
1304     self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1305     self->anc_width = 0;
1306   }
1307 
1308   return TRUE;
1309 }
1310 
1311 static void
gst_decklink_video_src_start_streams(GstElement * element)1312 gst_decklink_video_src_start_streams (GstElement * element)
1313 {
1314   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
1315   HRESULT res;
1316 
1317   if (self->input->video_enabled && (!self->input->audiosrc
1318           || self->input->audio_enabled)
1319       && (GST_STATE (self) == GST_STATE_PLAYING
1320           || GST_STATE_PENDING (self) == GST_STATE_PLAYING)) {
1321     GST_DEBUG_OBJECT (self, "Starting streams");
1322 
1323     g_mutex_lock (&self->lock);
1324     self->first_time = GST_CLOCK_TIME_NONE;
1325     self->window_fill = 0;
1326     self->window_filled = FALSE;
1327     self->window_skip = 1;
1328     self->window_skip_count = 0;
1329     self->current_time_mapping.xbase = 0;
1330     self->current_time_mapping.b = 0;
1331     self->current_time_mapping.num = 1;
1332     self->current_time_mapping.den = 1;
1333     self->next_time_mapping.xbase = 0;
1334     self->next_time_mapping.b = 0;
1335     self->next_time_mapping.num = 1;
1336     self->next_time_mapping.den = 1;
1337     g_mutex_unlock (&self->lock);
1338     res = self->input->input->StartStreams ();
1339     if (res != S_OK) {
1340       GST_ELEMENT_ERROR (self, STREAM, FAILED,
1341           (NULL), ("Failed to start streams: 0x%08lx", (unsigned long) res));
1342       return;
1343     }
1344   } else {
1345     GST_DEBUG_OBJECT (self, "Not starting streams yet");
1346   }
1347 }
1348 
1349 static GstStateChangeReturn
gst_decklink_video_src_change_state(GstElement * element,GstStateChange transition)1350 gst_decklink_video_src_change_state (GstElement * element,
1351     GstStateChange transition)
1352 {
1353   GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
1354   GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1355 
1356   switch (transition) {
1357     case GST_STATE_CHANGE_NULL_TO_READY:
1358       self->processed = 0;
1359       self->dropped = 0;
1360       self->expected_stream_time = GST_CLOCK_TIME_NONE;
1361       self->first_stream_time = GST_CLOCK_TIME_NONE;
1362       if (!gst_decklink_video_src_open (self)) {
1363         ret = GST_STATE_CHANGE_FAILURE;
1364         goto out;
1365       }
1366       if (self->mode == GST_DECKLINK_MODE_AUTO &&
1367           self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO) {
1368         GST_WARNING_OBJECT (self, "Warning: mode=auto and format!=auto may \
1369                             not work");
1370       }
1371       self->vbiparser = NULL;
1372       self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1373       self->anc_width = 0;
1374       break;
1375     case GST_STATE_CHANGE_READY_TO_PAUSED:
1376       self->flushing = FALSE;
1377       break;
1378     default:
1379       break;
1380   }
1381 
1382   if (ret == GST_STATE_CHANGE_FAILURE)
1383     return ret;
1384   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1385   if (ret == GST_STATE_CHANGE_FAILURE)
1386     return ret;
1387 
1388   switch (transition) {
1389     case GST_STATE_CHANGE_PAUSED_TO_READY:
1390       self->no_signal = FALSE;
1391 
1392       gst_decklink_video_src_stop (self);
1393       break;
1394     case GST_STATE_CHANGE_PLAYING_TO_PAUSED:{
1395       HRESULT res;
1396 
1397       GST_DEBUG_OBJECT (self, "Stopping streams");
1398 
1399       res = self->input->input->StopStreams ();
1400       if (res != S_OK) {
1401         GST_ELEMENT_ERROR (self, STREAM, FAILED,
1402             (NULL), ("Failed to stop streams: 0x%08lx", (unsigned long) res));
1403         ret = GST_STATE_CHANGE_FAILURE;
1404       }
1405       break;
1406     }
1407     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
1408       g_mutex_lock (&self->input->lock);
1409       if (self->input->start_streams)
1410         self->input->start_streams (self->input->videosrc);
1411       g_mutex_unlock (&self->input->lock);
1412 
1413       break;
1414     }
1415     case GST_STATE_CHANGE_READY_TO_NULL:
1416       gst_decklink_video_src_close (self);
1417       break;
1418     default:
1419       break;
1420   }
1421 out:
1422 
1423   return ret;
1424 }
1425