1 /* GStreamer
2 * Copyright (C) 2011 David Schleef <ds@entropywave.com>
3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
18 * Boston, MA 02110-1335, USA.
19 */
20 /**
21 * SECTION:element-decklinkvideosink
22 * @short_description: Outputs Video to a BlackMagic DeckLink Device
23 *
24 * Playout Video to a BlackMagic DeckLink Device.
25 *
26 * ## Sample pipeline
27 * |[
28 * gst-launch-1.0 \
29 * videotestsrc ! \
30 * decklinkvideosink device-number=0 mode=1080p25
31 * ]|
32 * Playout a 1080p25 test-video to the SDI-Out of Card 0. Devices are numbered
33 * starting with 0.
34 *
35 * # Duplex-Mode:
36 * Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
37 * independant SDI units with two connectors each. These units can operate either
38 * in half- or in full-duplex mode.
39 *
40 * The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
41 * Cards that to not support Duplex-Modes are not influenced by the property.
42 *
43 * ## Half-Duplex-Mode (default):
44 * By default decklinkvideosink will configure them into half-duplex mode, so that
45 * each connector acts as if it were an independant DeckLink Card which can either
46 * be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
47 * In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
48 *
49 * |[
50 * gst-launch-1.0 \
51 * videotestsrc foreground-color=0x00ff0000 ! decklinkvideosink device-number=0 mode=1080p25 \
52 * videotestsrc foreground-color=0x0000ff00 ! decklinkvideosink device-number=1 mode=1080p25 \
53 * videotestsrc foreground-color=0x000000ff ! decklinkvideosink device-number=2 mode=1080p25 \
54 * videotestsrc foreground-color=0x00ffffff ! decklinkvideosink device-number=3 mode=1080p25
55 * ]|
56 * Playout four Test-Screen with colored Snow on the first four units in the System
57 * (ie. the Connectors 1-4 of a Duo2 unit).
58 *
59 * |[
60 * gst-launch-1.0 \
61 * videotestsrc is-live=true foreground-color=0x0000ff00 ! decklinkvideosink device-number=0 mode=1080p25 \
62 * decklinkvideosrc device-number=1 mode=1080p25 ! autovideosink \
63 * decklinkvideosrc device-number=2 mode=1080p25 ! autovideosink \
64 * videotestsrc is-live=true foreground-color=0x00ff0000 ! decklinkvideosink device-number=3 mode=1080p25
65 * ]|
66 * Capture 1080p25 from the second and third unit in the System,
67 * Playout a Test-Screen with colored Snow on the first and fourth unit
68 * (ie. the Connectors 1-4 of a Duo2 unit).
69 *
70 * ## Device-Number-Mapping in Half-Duplex-Mode
71 * The device-number to connector-mapping is as follows for the Duo2
72 * - `device-number=0` SDI1
73 * - `device-number=1` SDI3
74 * - `device-number=2` SDI2
75 * - `device-number=3` SDI4
76 *
77 * And for the Quad2
78 * - `device-number=0` SDI1
79 * - `device-number=1` SDI3
80 * - `device-number=2` SDI5
81 * - `device-number=3` SDI7
82 * - `device-number=4` SDI2
83 * - `device-number=5` SDI4
84 * - `device-number=6` SDI6
85 * - `device-number=7` SDI8
86 *
87 * ## Full-Duplex-Mode:
88 * When operating in full-duplex mode, two connectors of a unit are combined to
89 * a single device, performing keying with the second connection.
90 *
91 * ## Device-Number-Mapping in Full-Duplex-Mode
92 * The device-number to connector-mapping in full-duplex-mode is as follows for the Duo2
93 * - `device-number=0` SDI1 primary, SDI2 secondary
94 * - `device-number=1` SDI3 primaty, SDI4 secondary
95 *
96 * And for the Quad2
97 * - `device-number=0` SDI1 primary, SDI2 secondary
98 * - `device-number=1` SDI3 primaty, SDI4 secondary
99 * - `device-number=2` SDI5 primary, SDI6 secondary
100 * - `device-number=3` SDI7 primary, SDI8 secondary
101 *
102 * # Keying
103 * Keying is the process of overlaing Video with an Alpha-Channel on top of an
104 * existing Video-Stream. The Duo2 and Quad2-Cards can perform two different
105 * Keying-Modes when operated in full-duplex mode. Both modes expect Video with
106 * an Alpha-Channel.
107 *
108 * ## Internal Keyer:
109 * In internal Keying-Mode the primary port becomes an Input and the secondary port
110 * an Output. The unit overlays Video played back from the Computer onto the Input
111 * and outputs the combined Video-Stream to the Output.
112 *
113 * |[
114 * gst-launch-1.0 \
115 * videotestsrc foreground-color=0x00000000 background-color=0x00000000 ! \
116 * video/x-raw,format=BGRA,width=1920,height=1080 ! \
117 * decklinkvideosink device-number=0 duplex-mode=full keyer-mode=internal video-format=8bit-bgra mode=1080p25
118 * ]|
119 *
120 * ## External Keyer:
121 * In external Keying-Mode the primary port outputs the alpha-chanel as the
122 * luma-value (key-channel). Transparent pixels are black, opaque pixels are white.
123 * The RGB-Component of the Video are output on the secondary channel.
124 *
125 * |[
126 * gst-launch-1.0 \
127 * videotestsrc foreground-color=0x00000000 background-color=0x00000000 ! \
128 * video/x-raw,format=BGRA,width=1920,height=1080 ! \
129 * decklinkvideosink device-number=0 duplex-mode=full keyer-mode=external video-format=8bit-bgra mode=1080p25
130 * ]|
131 */
132
133 #ifdef HAVE_CONFIG_H
134 #include "config.h"
135 #endif
136
137 #include "gstdecklinkvideosink.h"
138 #include <string.h>
139
140 GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_sink_debug);
141 #define GST_CAT_DEFAULT gst_decklink_video_sink_debug
142
143 class GStreamerVideoOutputCallback:public IDeckLinkVideoOutputCallback
144 {
145 public:
GStreamerVideoOutputCallback(GstDecklinkVideoSink * sink)146 GStreamerVideoOutputCallback (GstDecklinkVideoSink * sink)
147 :IDeckLinkVideoOutputCallback (), m_refcount (1)
148 {
149 m_sink = GST_DECKLINK_VIDEO_SINK_CAST (gst_object_ref (sink));
150 g_mutex_init (&m_mutex);
151 }
152
QueryInterface(REFIID,LPVOID *)153 virtual HRESULT WINAPI QueryInterface (REFIID, LPVOID *)
154 {
155 return E_NOINTERFACE;
156 }
157
AddRef(void)158 virtual ULONG WINAPI AddRef (void)
159 {
160 ULONG ret;
161
162 g_mutex_lock (&m_mutex);
163 m_refcount++;
164 ret = m_refcount;
165 g_mutex_unlock (&m_mutex);
166
167 return ret;
168 }
169
Release(void)170 virtual ULONG WINAPI Release (void)
171 {
172 ULONG ret;
173
174 g_mutex_lock (&m_mutex);
175 m_refcount--;
176 ret = m_refcount;
177 g_mutex_unlock (&m_mutex);
178
179 if (ret == 0) {
180 delete this;
181 }
182
183 return ret;
184 }
185
ScheduledFrameCompleted(IDeckLinkVideoFrame * completedFrame,BMDOutputFrameCompletionResult result)186 virtual HRESULT WINAPI ScheduledFrameCompleted (IDeckLinkVideoFrame *
187 completedFrame, BMDOutputFrameCompletionResult result)
188 {
189 switch (result) {
190 case bmdOutputFrameCompleted:
191 GST_LOG_OBJECT (m_sink, "Completed frame %p", completedFrame);
192 break;
193 case bmdOutputFrameDisplayedLate:
194 GST_INFO_OBJECT (m_sink, "Late Frame %p", completedFrame);
195 break;
196 case bmdOutputFrameDropped:
197 GST_INFO_OBJECT (m_sink, "Dropped Frame %p", completedFrame);
198 break;
199 case bmdOutputFrameFlushed:
200 GST_DEBUG_OBJECT (m_sink, "Flushed Frame %p", completedFrame);
201 break;
202 default:
203 GST_INFO_OBJECT (m_sink, "Unknown Frame %p: %d", completedFrame,
204 (gint) result);
205 break;
206 }
207
208 return S_OK;
209 }
210
ScheduledPlaybackHasStopped(void)211 virtual HRESULT WINAPI ScheduledPlaybackHasStopped (void)
212 {
213 GST_LOG_OBJECT (m_sink, "Scheduled playback stopped");
214
215 if (m_sink->output) {
216 g_mutex_lock (&m_sink->output->lock);
217 g_cond_signal (&m_sink->output->cond);
218 g_mutex_unlock (&m_sink->output->lock);
219 }
220
221 return S_OK;
222 }
223
~GStreamerVideoOutputCallback()224 virtual ~ GStreamerVideoOutputCallback () {
225 gst_object_unref (m_sink);
226 g_mutex_clear (&m_mutex);
227 }
228
229 private:
230 GstDecklinkVideoSink * m_sink;
231 GMutex m_mutex;
232 gint m_refcount;
233 };
234
235 enum
236 {
237 PROP_0,
238 PROP_MODE,
239 PROP_DEVICE_NUMBER,
240 PROP_VIDEO_FORMAT,
241 PROP_DUPLEX_MODE,
242 PROP_TIMECODE_FORMAT,
243 PROP_KEYER_MODE,
244 PROP_KEYER_LEVEL,
245 PROP_HW_SERIAL_NUMBER,
246 PROP_CC_LINE,
247 };
248
249 static void gst_decklink_video_sink_set_property (GObject * object,
250 guint property_id, const GValue * value, GParamSpec * pspec);
251 static void gst_decklink_video_sink_get_property (GObject * object,
252 guint property_id, GValue * value, GParamSpec * pspec);
253 static void gst_decklink_video_sink_finalize (GObject * object);
254
255 static GstStateChangeReturn
256 gst_decklink_video_sink_change_state (GstElement * element,
257 GstStateChange transition);
258 static GstClock *gst_decklink_video_sink_provide_clock (GstElement * element);
259
260 static GstCaps *gst_decklink_video_sink_get_caps (GstBaseSink * bsink,
261 GstCaps * filter);
262 static gboolean gst_decklink_video_sink_set_caps (GstBaseSink * bsink,
263 GstCaps * caps);
264 static GstFlowReturn gst_decklink_video_sink_prepare (GstBaseSink * bsink,
265 GstBuffer * buffer);
266 static GstFlowReturn gst_decklink_video_sink_render (GstBaseSink * bsink,
267 GstBuffer * buffer);
268 static gboolean gst_decklink_video_sink_open (GstBaseSink * bsink);
269 static gboolean gst_decklink_video_sink_close (GstBaseSink * bsink);
270 static gboolean gst_decklink_video_sink_stop (GstDecklinkVideoSink * self);
271 static gboolean gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink,
272 GstQuery * query);
273 static gboolean gst_decklink_video_sink_event (GstBaseSink * bsink,
274 GstEvent * event);
275
276 static void
277 gst_decklink_video_sink_start_scheduled_playback (GstElement * element);
278
279 #define parent_class gst_decklink_video_sink_parent_class
280 G_DEFINE_TYPE (GstDecklinkVideoSink, gst_decklink_video_sink,
281 GST_TYPE_BASE_SINK);
282
283 static gboolean
reset_framerate(GstCapsFeatures * features,GstStructure * structure,gpointer user_data)284 reset_framerate (GstCapsFeatures * features, GstStructure * structure,
285 gpointer user_data)
286 {
287 gst_structure_set (structure, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
288 G_MAXINT, 1, NULL);
289
290 return TRUE;
291 }
292
293 static void
gst_decklink_video_sink_class_init(GstDecklinkVideoSinkClass * klass)294 gst_decklink_video_sink_class_init (GstDecklinkVideoSinkClass * klass)
295 {
296 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
297 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
298 GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
299 GstCaps *templ_caps;
300
301 gobject_class->set_property = gst_decklink_video_sink_set_property;
302 gobject_class->get_property = gst_decklink_video_sink_get_property;
303 gobject_class->finalize = gst_decklink_video_sink_finalize;
304
305 element_class->change_state =
306 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_change_state);
307 element_class->provide_clock =
308 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_provide_clock);
309
310 basesink_class->get_caps =
311 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_get_caps);
312 basesink_class->set_caps =
313 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_set_caps);
314 basesink_class->prepare = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_prepare);
315 basesink_class->render = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_render);
316 // FIXME: These are misnamed in basesink!
317 basesink_class->start = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_open);
318 basesink_class->stop = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_close);
319 basesink_class->propose_allocation =
320 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_propose_allocation);
321 basesink_class->event = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_event);
322
323 g_object_class_install_property (gobject_class, PROP_MODE,
324 g_param_spec_enum ("mode", "Playback Mode",
325 "Video Mode to use for playback",
326 GST_TYPE_DECKLINK_MODE, GST_DECKLINK_MODE_NTSC,
327 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
328 G_PARAM_CONSTRUCT)));
329
330 g_object_class_install_property (gobject_class, PROP_DEVICE_NUMBER,
331 g_param_spec_int ("device-number", "Device number",
332 "Output device instance to use", 0, G_MAXINT, 0,
333 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
334 G_PARAM_CONSTRUCT)));
335
336 g_object_class_install_property (gobject_class, PROP_VIDEO_FORMAT,
337 g_param_spec_enum ("video-format", "Video format",
338 "Video format type to use for playback",
339 GST_TYPE_DECKLINK_VIDEO_FORMAT, GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV,
340 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
341 G_PARAM_CONSTRUCT)));
342
343 g_object_class_install_property (gobject_class, PROP_DUPLEX_MODE,
344 g_param_spec_enum ("duplex-mode", "Duplex mode",
345 "Certain DeckLink devices such as the DeckLink Quad 2 and the "
346 "DeckLink Duo 2 support configuration of the duplex mode of "
347 "individual sub-devices."
348 "A sub-device configured as full-duplex will use two connectors, "
349 "which allows simultaneous capture and playback, internal keying, "
350 "and fill & key scenarios."
351 "A half-duplex sub-device will use a single connector as an "
352 "individual capture or playback channel.",
353 GST_TYPE_DECKLINK_DUPLEX_MODE, GST_DECKLINK_DUPLEX_MODE_HALF,
354 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
355 G_PARAM_CONSTRUCT)));
356
357 g_object_class_install_property (gobject_class, PROP_TIMECODE_FORMAT,
358 g_param_spec_enum ("timecode-format", "Timecode format",
359 "Timecode format type to use for playback",
360 GST_TYPE_DECKLINK_TIMECODE_FORMAT,
361 GST_DECKLINK_TIMECODE_FORMAT_RP188ANY,
362 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
363 G_PARAM_CONSTRUCT)));
364
365 g_object_class_install_property (gobject_class, PROP_KEYER_MODE,
366 g_param_spec_enum ("keyer-mode", "Keyer mode",
367 "Keyer mode to be enabled",
368 GST_TYPE_DECKLINK_KEYER_MODE,
369 GST_DECKLINK_KEYER_MODE_OFF,
370 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
371 G_PARAM_CONSTRUCT)));
372
373 g_object_class_install_property (gobject_class, PROP_KEYER_LEVEL,
374 g_param_spec_int ("keyer-level", "Keyer level",
375 "Keyer level", 0, 255, 255,
376 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
377 G_PARAM_CONSTRUCT)));
378
379 g_object_class_install_property (gobject_class, PROP_HW_SERIAL_NUMBER,
380 g_param_spec_string ("hw-serial-number", "Hardware serial number",
381 "The serial number (hardware ID) of the Decklink card",
382 NULL, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
383
384 g_object_class_install_property (gobject_class, PROP_CC_LINE,
385 g_param_spec_int ("cc-line", "CC Line",
386 "Line number to use for inserting closed captions (0 = disabled)", 0,
387 22, 0,
388 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
389 G_PARAM_CONSTRUCT)));
390
391 templ_caps = gst_decklink_mode_get_template_caps (FALSE);
392 templ_caps = gst_caps_make_writable (templ_caps);
393 /* For output we support any framerate and only really care about timestamps */
394 gst_caps_map_in_place (templ_caps, reset_framerate, NULL);
395 gst_element_class_add_pad_template (element_class,
396 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, templ_caps));
397 gst_caps_unref (templ_caps);
398
399 gst_element_class_set_static_metadata (element_class, "Decklink Video Sink",
400 "Video/Sink/Hardware", "Decklink Sink",
401 "David Schleef <ds@entropywave.com>, "
402 "Sebastian Dröge <sebastian@centricular.com>");
403
404 GST_DEBUG_CATEGORY_INIT (gst_decklink_video_sink_debug, "decklinkvideosink",
405 0, "debug category for decklinkvideosink element");
406 }
407
408 static void
gst_decklink_video_sink_init(GstDecklinkVideoSink * self)409 gst_decklink_video_sink_init (GstDecklinkVideoSink * self)
410 {
411 self->mode = GST_DECKLINK_MODE_NTSC;
412 self->device_number = 0;
413 self->video_format = GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV;
414 self->duplex_mode = bmdDuplexModeHalf;
415 /* VITC is legacy, we should expect RP188 in modern use cases */
416 self->timecode_format = bmdTimecodeRP188Any;
417 self->caption_line = 0;
418
419 gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
420 gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE);
421 }
422
423 void
gst_decklink_video_sink_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)424 gst_decklink_video_sink_set_property (GObject * object, guint property_id,
425 const GValue * value, GParamSpec * pspec)
426 {
427 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
428
429 switch (property_id) {
430 case PROP_MODE:
431 self->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
432 break;
433 case PROP_DEVICE_NUMBER:
434 self->device_number = g_value_get_int (value);
435 break;
436 case PROP_VIDEO_FORMAT:
437 self->video_format = (GstDecklinkVideoFormat) g_value_get_enum (value);
438 switch (self->video_format) {
439 case GST_DECKLINK_VIDEO_FORMAT_AUTO:
440 case GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV:
441 case GST_DECKLINK_VIDEO_FORMAT_10BIT_YUV:
442 case GST_DECKLINK_VIDEO_FORMAT_8BIT_ARGB:
443 case GST_DECKLINK_VIDEO_FORMAT_8BIT_BGRA:
444 break;
445 default:
446 GST_ELEMENT_WARNING (GST_ELEMENT (self), CORE, NOT_IMPLEMENTED,
447 ("Format %d not supported", self->video_format), (NULL));
448 break;
449 }
450 break;
451 case PROP_DUPLEX_MODE:
452 self->duplex_mode =
453 gst_decklink_duplex_mode_from_enum ((GstDecklinkDuplexMode)
454 g_value_get_enum (value));
455 break;
456 case PROP_TIMECODE_FORMAT:
457 self->timecode_format =
458 gst_decklink_timecode_format_from_enum ((GstDecklinkTimecodeFormat)
459 g_value_get_enum (value));
460 break;
461 case PROP_KEYER_MODE:
462 self->keyer_mode =
463 gst_decklink_keyer_mode_from_enum ((GstDecklinkKeyerMode)
464 g_value_get_enum (value));
465 break;
466 case PROP_KEYER_LEVEL:
467 self->keyer_level = g_value_get_int (value);
468 break;
469 case PROP_CC_LINE:
470 self->caption_line = g_value_get_int (value);
471 break;
472 default:
473 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
474 break;
475 }
476 }
477
478 void
gst_decklink_video_sink_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)479 gst_decklink_video_sink_get_property (GObject * object, guint property_id,
480 GValue * value, GParamSpec * pspec)
481 {
482 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
483
484 switch (property_id) {
485 case PROP_MODE:
486 g_value_set_enum (value, self->mode);
487 break;
488 case PROP_DEVICE_NUMBER:
489 g_value_set_int (value, self->device_number);
490 break;
491 case PROP_VIDEO_FORMAT:
492 g_value_set_enum (value, self->video_format);
493 break;
494 case PROP_DUPLEX_MODE:
495 g_value_set_enum (value,
496 gst_decklink_duplex_mode_to_enum (self->duplex_mode));
497 break;
498 case PROP_TIMECODE_FORMAT:
499 g_value_set_enum (value,
500 gst_decklink_timecode_format_to_enum (self->timecode_format));
501 break;
502 case PROP_KEYER_MODE:
503 g_value_set_enum (value,
504 gst_decklink_keyer_mode_to_enum (self->keyer_mode));
505 break;
506 case PROP_KEYER_LEVEL:
507 g_value_set_int (value, self->keyer_level);
508 break;
509 case PROP_HW_SERIAL_NUMBER:
510 if (self->output)
511 g_value_set_string (value, self->output->hw_serial_number);
512 else
513 g_value_set_string (value, NULL);
514 break;
515 case PROP_CC_LINE:
516 g_value_set_int (value, self->caption_line);
517 break;
518 default:
519 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
520 break;
521 }
522 }
523
524 void
gst_decklink_video_sink_finalize(GObject * object)525 gst_decklink_video_sink_finalize (GObject * object)
526 {
527 //GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
528
529 G_OBJECT_CLASS (parent_class)->finalize (object);
530 }
531
532 static gboolean
gst_decklink_video_sink_set_caps(GstBaseSink * bsink,GstCaps * caps)533 gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
534 {
535 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
536 const GstDecklinkMode *mode;
537 HRESULT ret;
538 BMDVideoOutputFlags flags;
539 GstVideoInfo info;
540
541 GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps);
542
543 if (!gst_video_info_from_caps (&info, caps))
544 return FALSE;
545
546
547 g_mutex_lock (&self->output->lock);
548 if (self->output->video_enabled) {
549 if (self->info.finfo->format == info.finfo->format &&
550 self->info.width == info.width && self->info.height == info.height) {
551 // FIXME: We should also consider the framerate as it is used
552 // for mode selection below in auto mode
553 GST_DEBUG_OBJECT (self, "Nothing relevant has changed");
554 self->info = info;
555 g_mutex_unlock (&self->output->lock);
556 return TRUE;
557 } else {
558 GST_DEBUG_OBJECT (self, "Reconfiguration not supported at this point");
559 g_mutex_unlock (&self->output->lock);
560 return FALSE;
561 }
562 }
563 g_mutex_unlock (&self->output->lock);
564
565 self->output->output->SetScheduledFrameCompletionCallback (new
566 GStreamerVideoOutputCallback (self));
567
568 if (self->mode == GST_DECKLINK_MODE_AUTO) {
569 BMDPixelFormat f;
570 mode = gst_decklink_find_mode_and_format_for_caps (caps, &f);
571 if (mode == NULL) {
572 GST_WARNING_OBJECT (self,
573 "Failed to find compatible mode for caps %" GST_PTR_FORMAT, caps);
574 return FALSE;
575 }
576 if (self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO &&
577 gst_decklink_pixel_format_from_type (self->video_format) != f) {
578 GST_WARNING_OBJECT (self, "Failed to set pixel format to %d",
579 self->video_format);
580 return FALSE;
581 }
582 } else {
583 /* We don't have to give the format in EnableVideoOutput. Therefore,
584 * even if it's AUTO, we have it stored in self->info and set it in
585 * gst_decklink_video_sink_prepare */
586 mode = gst_decklink_get_mode (self->mode);
587 g_assert (mode != NULL);
588 };
589
590 /* enable or disable keyer */
591 if (self->output->keyer != NULL) {
592 if (self->keyer_mode == bmdKeyerModeOff) {
593 self->output->keyer->Disable ();
594 } else if (self->keyer_mode == bmdKeyerModeInternal) {
595 self->output->keyer->Enable (false);
596 self->output->keyer->SetLevel (self->keyer_level);
597 } else if (self->keyer_mode == bmdKeyerModeExternal) {
598 self->output->keyer->Enable (true);
599 self->output->keyer->SetLevel (self->keyer_level);
600 } else {
601 g_assert_not_reached ();
602 }
603 } else if (self->keyer_mode != bmdKeyerModeOff) {
604 GST_WARNING_OBJECT (self, "Failed to set keyer to mode %d",
605 self->keyer_mode);
606 }
607
608 /* The timecode_format itself is used when we embed the actual timecode data
609 * into the frame. Now we only need to know which of the two standards the
610 * timecode format will adhere to: VITC or RP188, and send the appropriate
611 * flag to EnableVideoOutput. The exact format is specified later.
612 *
613 * Note that this flag will have no effect in practice if the video stream
614 * does not contain timecode metadata.
615 */
616 if ((gint64) self->timecode_format == (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITC ||
617 (gint64) self->timecode_format == (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITCFIELD2)
618 flags = bmdVideoOutputVITC;
619 else
620 flags = bmdVideoOutputRP188;
621
622 if (self->caption_line > 0)
623 flags = (BMDVideoOutputFlags) (flags | bmdVideoOutputVANC);
624
625 ret = self->output->output->EnableVideoOutput (mode->mode, flags);
626 if (ret != S_OK) {
627 GST_WARNING_OBJECT (self, "Failed to enable video output: 0x%08lx",
628 (unsigned long) ret);
629 return FALSE;
630 }
631
632 self->info = info;
633 g_mutex_lock (&self->output->lock);
634 self->output->mode = mode;
635 self->output->video_enabled = TRUE;
636 if (self->output->start_scheduled_playback)
637 self->output->start_scheduled_playback (self->output->videosink);
638 g_mutex_unlock (&self->output->lock);
639
640 if (self->vbiencoder) {
641 gst_video_vbi_encoder_free (self->vbiencoder);
642 self->vbiencoder = NULL;
643 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
644 }
645
646 return TRUE;
647 }
648
649 static GstCaps *
gst_decklink_video_sink_get_caps(GstBaseSink * bsink,GstCaps * filter)650 gst_decklink_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
651 {
652 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
653 GstCaps *mode_caps, *caps;
654
655 if (self->mode == GST_DECKLINK_MODE_AUTO
656 && self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
657 mode_caps = gst_decklink_mode_get_template_caps (FALSE);
658 else if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
659 mode_caps = gst_decklink_mode_get_caps_all_formats (self->mode, FALSE);
660 else if (self->mode == GST_DECKLINK_MODE_AUTO)
661 mode_caps =
662 gst_decklink_pixel_format_get_caps (gst_decklink_pixel_format_from_type
663 (self->video_format), FALSE);
664 else
665 mode_caps =
666 gst_decklink_mode_get_caps (self->mode,
667 gst_decklink_pixel_format_from_type (self->video_format), FALSE);
668 mode_caps = gst_caps_make_writable (mode_caps);
669 /* For output we support any framerate and only really care about timestamps */
670 gst_caps_map_in_place (mode_caps, reset_framerate, NULL);
671
672 if (filter) {
673 caps =
674 gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
675 gst_caps_unref (mode_caps);
676 } else {
677 caps = mode_caps;
678 }
679
680 return caps;
681 }
682
683 static GstFlowReturn
gst_decklink_video_sink_render(GstBaseSink * bsink,GstBuffer * buffer)684 gst_decklink_video_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
685 {
686 return GST_FLOW_OK;
687 }
688
689 void
gst_decklink_video_sink_convert_to_internal_clock(GstDecklinkVideoSink * self,GstClockTime * timestamp,GstClockTime * duration)690 gst_decklink_video_sink_convert_to_internal_clock (GstDecklinkVideoSink * self,
691 GstClockTime * timestamp, GstClockTime * duration)
692 {
693 GstClock *clock;
694 GstClockTime internal_base, external_base, internal_offset;
695
696 g_assert (timestamp != NULL);
697
698 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
699 GST_OBJECT_LOCK (self);
700 internal_base = self->internal_base_time;
701 external_base = self->external_base_time;
702 internal_offset = self->internal_time_offset;
703 GST_OBJECT_UNLOCK (self);
704
705 if (!clock || clock != self->output->clock) {
706 GstClockTime internal, external, rate_n, rate_d;
707 GstClockTime external_timestamp = *timestamp;
708 GstClockTime base_time;
709
710 gst_clock_get_calibration (self->output->clock, &internal, &external,
711 &rate_n, &rate_d);
712
713 // Convert to the running time corresponding to both clock times
714 if (!GST_CLOCK_TIME_IS_VALID (internal_base) || internal < internal_base)
715 internal = 0;
716 else
717 internal -= internal_base;
718
719 if (!GST_CLOCK_TIME_IS_VALID (external_base) || external < external_base)
720 external = 0;
721 else
722 external -= external_base;
723
724 // Convert timestamp to the "running time" since we started scheduled
725 // playback, that is the difference between the pipeline's base time
726 // and our own base time.
727 base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
728 if (base_time > external_base)
729 base_time = 0;
730 else
731 base_time = external_base - base_time;
732
733 if (external_timestamp < base_time)
734 external_timestamp = 0;
735 else
736 external_timestamp = external_timestamp - base_time;
737
738 // Get the difference in the external time, note
739 // that the running time is external time.
740 // Then scale this difference and offset it to
741 // our internal time. Now we have the running time
742 // according to our internal clock.
743 //
744 // For the duration we just scale
745 *timestamp =
746 gst_clock_unadjust_with_calibration (NULL, external_timestamp,
747 internal, external, rate_n, rate_d);
748
749 GST_LOG_OBJECT (self,
750 "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (internal: %"
751 GST_TIME_FORMAT " external %" GST_TIME_FORMAT " rate: %lf)",
752 GST_TIME_ARGS (external_timestamp), GST_TIME_ARGS (*timestamp),
753 GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
754 ((gdouble) rate_n) / ((gdouble) rate_d));
755
756 if (duration) {
757 GstClockTime external_duration = *duration;
758
759 *duration = gst_util_uint64_scale (external_duration, rate_d, rate_n);
760
761 GST_LOG_OBJECT (self,
762 "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
763 " (internal: %" GST_TIME_FORMAT " external %" GST_TIME_FORMAT
764 " rate: %lf)", GST_TIME_ARGS (external_duration),
765 GST_TIME_ARGS (*duration), GST_TIME_ARGS (internal),
766 GST_TIME_ARGS (external), ((gdouble) rate_n) / ((gdouble) rate_d));
767 }
768 } else {
769 GST_LOG_OBJECT (self, "No clock conversion needed, same clocks: %"
770 GST_TIME_FORMAT, GST_TIME_ARGS (*timestamp));
771 }
772
773 if (external_base != GST_CLOCK_TIME_NONE &&
774 internal_base != GST_CLOCK_TIME_NONE)
775 *timestamp += internal_offset;
776 else
777 *timestamp = gst_clock_get_internal_time (self->output->clock);
778
779 GST_LOG_OBJECT (self, "Output timestamp %" GST_TIME_FORMAT
780 " using clock epoch %" GST_TIME_FORMAT,
781 GST_TIME_ARGS (*timestamp), GST_TIME_ARGS (self->output->clock_epoch));
782
783 if (clock)
784 gst_object_unref (clock);
785 }
786
787 /* Copied from ext/closedcaption/gstccconverter.c */
788 /* Converts raw CEA708 cc_data and an optional timecode into CDP */
789 static guint
convert_cea708_cc_data_cea708_cdp_internal(GstDecklinkVideoSink * self,const guint8 * cc_data,guint cc_data_len,guint8 * cdp,guint cdp_len,const GstVideoTimeCodeMeta * tc_meta)790 convert_cea708_cc_data_cea708_cdp_internal (GstDecklinkVideoSink * self,
791 const guint8 * cc_data, guint cc_data_len, guint8 * cdp, guint cdp_len,
792 const GstVideoTimeCodeMeta * tc_meta)
793 {
794 GstByteWriter bw;
795 guint8 flags, checksum;
796 guint i, len;
797 const GstDecklinkMode *mode = gst_decklink_get_mode (self->mode);
798
799 gst_byte_writer_init_with_data (&bw, cdp, cdp_len, FALSE);
800 gst_byte_writer_put_uint16_be_unchecked (&bw, 0x9669);
801 /* Write a length of 0 for now */
802 gst_byte_writer_put_uint8_unchecked (&bw, 0);
803 if (mode->fps_n == 24000 && mode->fps_d == 1001) {
804 gst_byte_writer_put_uint8_unchecked (&bw, 0x1f);
805 } else if (mode->fps_n == 24 && mode->fps_d == 1) {
806 gst_byte_writer_put_uint8_unchecked (&bw, 0x2f);
807 } else if (mode->fps_n == 25 && mode->fps_d == 1) {
808 gst_byte_writer_put_uint8_unchecked (&bw, 0x3f);
809 } else if (mode->fps_n == 30 && mode->fps_d == 1001) {
810 gst_byte_writer_put_uint8_unchecked (&bw, 0x4f);
811 } else if (mode->fps_n == 30 && mode->fps_d == 1) {
812 gst_byte_writer_put_uint8_unchecked (&bw, 0x5f);
813 } else if (mode->fps_n == 50 && mode->fps_d == 1) {
814 gst_byte_writer_put_uint8_unchecked (&bw, 0x6f);
815 } else if (mode->fps_n == 60000 && mode->fps_d == 1001) {
816 gst_byte_writer_put_uint8_unchecked (&bw, 0x7f);
817 } else if (mode->fps_n == 60 && mode->fps_d == 1) {
818 gst_byte_writer_put_uint8_unchecked (&bw, 0x8f);
819 } else {
820 g_assert_not_reached ();
821 }
822
823 /* ccdata_present | caption_service_active */
824 flags = 0x42;
825
826 /* time_code_present */
827 if (tc_meta)
828 flags |= 0x80;
829
830 /* reserved */
831 flags |= 0x01;
832
833 gst_byte_writer_put_uint8_unchecked (&bw, flags);
834
835 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
836
837 if (tc_meta) {
838 const GstVideoTimeCode *tc = &tc_meta->tc;
839
840 gst_byte_writer_put_uint8_unchecked (&bw, 0x71);
841 gst_byte_writer_put_uint8_unchecked (&bw, 0xc0 |
842 (((tc->hours % 10) & 0x3) << 4) |
843 ((tc->hours - (tc->hours % 10)) & 0xf));
844
845 gst_byte_writer_put_uint8_unchecked (&bw, 0x80 |
846 (((tc->minutes % 10) & 0x7) << 4) |
847 ((tc->minutes - (tc->minutes % 10)) & 0xf));
848
849 gst_byte_writer_put_uint8_unchecked (&bw,
850 (tc->field_count <
851 2 ? 0x00 : 0x80) | (((tc->seconds %
852 10) & 0x7) << 4) | ((tc->seconds -
853 (tc->seconds % 10)) & 0xf));
854
855 gst_byte_writer_put_uint8_unchecked (&bw,
856 ((tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) ? 0x80 :
857 0x00) | (((tc->frames % 10) & 0x3) << 4) | ((tc->frames -
858 (tc->frames % 10)) & 0xf));
859 }
860
861 gst_byte_writer_put_uint8_unchecked (&bw, 0x72);
862 gst_byte_writer_put_uint8_unchecked (&bw, 0xe0 | cc_data_len / 3);
863 gst_byte_writer_put_data_unchecked (&bw, cc_data, cc_data_len);
864
865 gst_byte_writer_put_uint8_unchecked (&bw, 0x74);
866 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
867 self->cdp_hdr_sequence_cntr++;
868 /* We calculate the checksum afterwards */
869 gst_byte_writer_put_uint8_unchecked (&bw, 0);
870
871 len = gst_byte_writer_get_pos (&bw);
872 gst_byte_writer_set_pos (&bw, 2);
873 gst_byte_writer_put_uint8_unchecked (&bw, len);
874
875 checksum = 0;
876 for (i = 0; i < len; i++) {
877 checksum += cdp[i];
878 }
879 checksum &= 0xff;
880 checksum = 256 - checksum;
881 cdp[len - 1] = checksum;
882
883 return len;
884 }
885
886 static GstFlowReturn
gst_decklink_video_sink_prepare(GstBaseSink * bsink,GstBuffer * buffer)887 gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
888 {
889 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
890 GstVideoFrame vframe;
891 IDeckLinkMutableVideoFrame *frame;
892 guint8 *outdata, *indata;
893 GstFlowReturn flow_ret;
894 HRESULT ret;
895 GstClockTime timestamp, duration;
896 GstClockTime running_time, running_time_duration;
897 GstClockTime latency, render_delay;
898 GstClockTimeDiff ts_offset;
899 gint i;
900 GstDecklinkVideoFormat caps_format;
901 BMDPixelFormat format;
902 gint stride;
903 GstVideoTimeCodeMeta *tc_meta;
904
905 GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);
906
907 // FIXME: Handle no timestamps
908 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
909 return GST_FLOW_ERROR;
910 }
911
912 caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
913 format = gst_decklink_pixel_format_from_type (caps_format);
914
915 timestamp = GST_BUFFER_TIMESTAMP (buffer);
916 duration = GST_BUFFER_DURATION (buffer);
917 if (duration == GST_CLOCK_TIME_NONE) {
918 duration =
919 gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
920 self->info.fps_n);
921 }
922 running_time =
923 gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
924 GST_FORMAT_TIME, timestamp);
925 running_time_duration =
926 gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
927 GST_FORMAT_TIME, timestamp + duration) - running_time;
928
929 /* See gst_base_sink_adjust_time() */
930 latency = gst_base_sink_get_latency (bsink);
931 render_delay = gst_base_sink_get_render_delay (bsink);
932 ts_offset = gst_base_sink_get_ts_offset (bsink);
933
934 running_time += latency;
935
936 if (ts_offset < 0) {
937 ts_offset = -ts_offset;
938 if ((GstClockTime) ts_offset < running_time)
939 running_time -= ts_offset;
940 else
941 running_time = 0;
942 } else {
943 running_time += ts_offset;
944 }
945
946 if (running_time > render_delay)
947 running_time -= render_delay;
948 else
949 running_time = 0;
950
951 ret = self->output->output->CreateVideoFrame (self->info.width,
952 self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
953 &frame);
954 if (ret != S_OK) {
955 GST_ELEMENT_ERROR (self, STREAM, FAILED,
956 (NULL), ("Failed to create video frame: 0x%08lx", (unsigned long) ret));
957 return GST_FLOW_ERROR;
958 }
959
960 if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
961 GST_ERROR_OBJECT (self, "Failed to map video frame");
962 flow_ret = GST_FLOW_ERROR;
963 goto out;
964 }
965
966 frame->GetBytes ((void **) &outdata);
967 indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
968 stride =
969 MIN (GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0), frame->GetRowBytes ());
970 for (i = 0; i < self->info.height; i++) {
971 memcpy (outdata, indata, stride);
972 indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
973 outdata += frame->GetRowBytes ();
974 }
975 gst_video_frame_unmap (&vframe);
976
977 tc_meta = gst_buffer_get_video_time_code_meta (buffer);
978 if (tc_meta) {
979 BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
980 gchar *tc_str;
981
982 if (((GstVideoTimeCodeFlags) (tc_meta->tc.
983 config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
984 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
985 else
986 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
987 if (tc_meta->tc.field_count == 2)
988 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);
989
990 tc_str = gst_video_time_code_to_string (&tc_meta->tc);
991 ret = frame->SetTimecodeFromComponents (self->timecode_format,
992 (uint8_t) tc_meta->tc.hours,
993 (uint8_t) tc_meta->tc.minutes,
994 (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
995 if (ret != S_OK) {
996 GST_ERROR_OBJECT (self,
997 "Failed to set timecode %s to video frame: 0x%08lx", tc_str,
998 (unsigned long) ret);
999 flow_ret = GST_FLOW_ERROR;
1000 g_free (tc_str);
1001 goto out;
1002 }
1003 GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
1004 g_free (tc_str);
1005 }
1006
1007 if (self->caption_line != 0) {
1008 IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
1009 gpointer iter = NULL;
1010 GstVideoCaptionMeta *cc_meta;
1011 guint8 *vancdata;
1012 gboolean got_captions = FALSE;
1013
1014 /* Put any closed captions into the configured line */
1015 while ((cc_meta =
1016 (GstVideoCaptionMeta *) gst_buffer_iterate_meta_filtered (buffer,
1017 &iter, GST_VIDEO_CAPTION_META_API_TYPE))) {
1018 if (self->vbiencoder == NULL) {
1019 self->vbiencoder =
1020 gst_video_vbi_encoder_new (self->info.finfo->format,
1021 self->info.width);
1022 self->anc_vformat = self->info.finfo->format;
1023 }
1024
1025 switch (cc_meta->caption_type) {
1026 case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:{
1027 guint8 data[138];
1028 guint i, n;
1029
1030 n = cc_meta->size / 2;
1031 if (cc_meta->size > 46) {
1032 GST_WARNING_OBJECT (self, "Too big raw CEA608 buffer");
1033 break;
1034 }
1035
1036 /* This is the offset from line 9 for 525-line fields and from line
1037 * 5 for 625-line fields.
1038 *
1039 * The highest bit is set for field 1 but not for field 0, but we
1040 * have no way of knowning the field here
1041 */
1042 for (i = 0; i < n; i++) {
1043 data[3 * i] = 0x80 | (self->info.height ==
1044 525 ? self->caption_line - 9 : self->caption_line - 5);
1045 data[3 * i + 1] = cc_meta->data[2 * i];
1046 data[3 * i + 2] = cc_meta->data[2 * i + 1];
1047 }
1048
1049 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1050 FALSE,
1051 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
1052 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, data, 3))
1053 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1054
1055 got_captions = TRUE;
1056
1057 break;
1058 }
1059 case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:{
1060 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1061 FALSE,
1062 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
1063 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, cc_meta->data,
1064 cc_meta->size))
1065 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1066
1067 got_captions = TRUE;
1068
1069 break;
1070 }
1071 case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:{
1072 guint8 data[256];
1073 guint n;
1074
1075 n = cc_meta->size / 3;
1076 if (cc_meta->size > 46) {
1077 GST_WARNING_OBJECT (self, "Too big raw CEA708 buffer");
1078 break;
1079 }
1080
1081 n = convert_cea708_cc_data_cea708_cdp_internal (self, cc_meta->data,
1082 cc_meta->size, data, sizeof (data), tc_meta);
1083 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder, FALSE,
1084 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
1085 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, data, n))
1086 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1087
1088 got_captions = TRUE;
1089
1090 break;
1091 }
1092 case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:{
1093 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1094 FALSE,
1095 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
1096 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, cc_meta->data,
1097 cc_meta->size))
1098 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1099
1100 got_captions = TRUE;
1101
1102 break;
1103 }
1104 default:{
1105 GST_FIXME_OBJECT (self, "Caption type %d not supported",
1106 cc_meta->caption_type);
1107 break;
1108 }
1109 }
1110 }
1111
1112 if (got_captions
1113 && self->output->output->CreateAncillaryData (format,
1114 &vanc_frame) == S_OK) {
1115 if (vanc_frame->GetBufferForVerticalBlankingLine (self->caption_line,
1116 (void **) &vancdata) == S_OK) {
1117 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1118 if (frame->SetAncillaryData (vanc_frame) != S_OK) {
1119 GST_WARNING_OBJECT (self, "Failed to set ancillary data");
1120 }
1121 } else {
1122 GST_WARNING_OBJECT (self,
1123 "Failed to get buffer for line %d ancillary data",
1124 self->caption_line);
1125 }
1126 vanc_frame->Release ();
1127 } else if (got_captions) {
1128 GST_WARNING_OBJECT (self, "Failed to allocate ancillary data frame");
1129 }
1130
1131 }
1132
1133 gst_decklink_video_sink_convert_to_internal_clock (self, &running_time,
1134 &running_time_duration);
1135
1136 GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
1137 " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
1138 GST_TIME_ARGS (running_time_duration));
1139
1140 ret = self->output->output->ScheduleVideoFrame (frame,
1141 running_time, running_time_duration, GST_SECOND);
1142 if (ret != S_OK) {
1143 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1144 (NULL), ("Failed to schedule frame: 0x%08lx", (unsigned long) ret));
1145 flow_ret = GST_FLOW_ERROR;
1146 goto out;
1147 }
1148
1149 flow_ret = GST_FLOW_OK;
1150
1151 out:
1152
1153 frame->Release ();
1154
1155 return flow_ret;
1156 }
1157
1158 static gboolean
gst_decklink_video_sink_open(GstBaseSink * bsink)1159 gst_decklink_video_sink_open (GstBaseSink * bsink)
1160 {
1161 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1162 const GstDecklinkMode *mode;
1163
1164 GST_DEBUG_OBJECT (self, "Starting");
1165
1166 self->output =
1167 gst_decklink_acquire_nth_output (self->device_number,
1168 GST_ELEMENT_CAST (self), FALSE);
1169 if (!self->output) {
1170 GST_ERROR_OBJECT (self, "Failed to acquire output");
1171 return FALSE;
1172 }
1173
1174 g_object_notify (G_OBJECT (self), "hw-serial-number");
1175
1176 mode = gst_decklink_get_mode (self->mode);
1177 g_assert (mode != NULL);
1178
1179 g_mutex_lock (&self->output->lock);
1180 self->output->mode = mode;
1181 self->output->start_scheduled_playback =
1182 gst_decklink_video_sink_start_scheduled_playback;
1183 self->output->clock_start_time = GST_CLOCK_TIME_NONE;
1184 self->output->clock_epoch += self->output->clock_last_time;
1185 self->output->clock_last_time = 0;
1186 self->output->clock_offset = 0;
1187 GST_OBJECT_LOCK (self);
1188 self->internal_base_time = GST_CLOCK_TIME_NONE;
1189 self->external_base_time = GST_CLOCK_TIME_NONE;
1190 GST_OBJECT_UNLOCK (self);
1191 g_mutex_unlock (&self->output->lock);
1192
1193 return TRUE;
1194 }
1195
1196 static gboolean
gst_decklink_video_sink_close(GstBaseSink * bsink)1197 gst_decklink_video_sink_close (GstBaseSink * bsink)
1198 {
1199 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1200
1201 GST_DEBUG_OBJECT (self, "Closing");
1202
1203 if (self->output) {
1204 g_mutex_lock (&self->output->lock);
1205 self->output->mode = NULL;
1206 self->output->video_enabled = FALSE;
1207 if (self->output->start_scheduled_playback && self->output->videosink)
1208 self->output->start_scheduled_playback (self->output->videosink);
1209 g_mutex_unlock (&self->output->lock);
1210
1211 self->output->output->DisableVideoOutput ();
1212 gst_decklink_release_nth_output (self->device_number,
1213 GST_ELEMENT_CAST (self), FALSE);
1214 self->output = NULL;
1215 }
1216
1217 return TRUE;
1218 }
1219
1220 static gboolean
gst_decklink_video_sink_stop(GstDecklinkVideoSink * self)1221 gst_decklink_video_sink_stop (GstDecklinkVideoSink * self)
1222 {
1223 GST_DEBUG_OBJECT (self, "Stopping");
1224
1225 if (self->output && self->output->video_enabled) {
1226 g_mutex_lock (&self->output->lock);
1227 self->output->video_enabled = FALSE;
1228 g_mutex_unlock (&self->output->lock);
1229
1230 self->output->output->DisableVideoOutput ();
1231 self->output->output->SetScheduledFrameCompletionCallback (NULL);
1232 }
1233
1234 if (self->vbiencoder) {
1235 gst_video_vbi_encoder_free (self->vbiencoder);
1236 self->vbiencoder = NULL;
1237 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1238 }
1239
1240 return TRUE;
1241 }
1242
1243 static void
_wait_for_stop_notify(GstDecklinkVideoSink * self)1244 _wait_for_stop_notify (GstDecklinkVideoSink * self)
1245 {
1246 bool active = false;
1247
1248 self->output->output->IsScheduledPlaybackRunning (&active);
1249 while (active) {
1250 /* cause sometimes decklink stops without notifying us... */
1251 guint64 wait_time = g_get_monotonic_time () + G_TIME_SPAN_SECOND;
1252 if (!g_cond_wait_until (&self->output->cond, &self->output->lock,
1253 wait_time))
1254 GST_WARNING_OBJECT (self, "Failed to wait for stop notification");
1255 self->output->output->IsScheduledPlaybackRunning (&active);
1256 }
1257 }
1258
1259 static void
gst_decklink_video_sink_start_scheduled_playback(GstElement * element)1260 gst_decklink_video_sink_start_scheduled_playback (GstElement * element)
1261 {
1262 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1263 GstClockTime start_time;
1264 HRESULT res;
1265 bool active;
1266
1267 // Check if we're already started
1268 if (self->output->started) {
1269 GST_DEBUG_OBJECT (self, "Already started");
1270 return;
1271 }
1272 // Check if we're ready to start:
1273 // we need video and audio enabled, if there is audio
1274 // and both of the two elements need to be set to PLAYING already
1275 if (!self->output->video_enabled) {
1276 GST_DEBUG_OBJECT (self,
1277 "Not starting scheduled playback yet: video not enabled yet!");
1278 return;
1279 }
1280
1281 if (self->output->audiosink && !self->output->audio_enabled) {
1282 GST_DEBUG_OBJECT (self,
1283 "Not starting scheduled playback yet: "
1284 "have audio but not enabled yet!");
1285 return;
1286 }
1287
1288 if ((GST_STATE (self) < GST_STATE_PAUSED
1289 && GST_STATE_PENDING (self) < GST_STATE_PAUSED)
1290 || (self->output->audiosink &&
1291 GST_STATE (self->output->audiosink) < GST_STATE_PAUSED
1292 && GST_STATE_PENDING (self->output->audiosink) < GST_STATE_PAUSED)) {
1293 GST_DEBUG_OBJECT (self,
1294 "Not starting scheduled playback yet: "
1295 "Elements are not set to PAUSED yet");
1296 return;
1297 }
1298 // Need to unlock to get the clock time
1299 g_mutex_unlock (&self->output->lock);
1300
1301 start_time = gst_clock_get_internal_time (self->output->clock);
1302
1303 g_mutex_lock (&self->output->lock);
1304 // Check if someone else started in the meantime
1305 if (self->output->started) {
1306 return;
1307 }
1308
1309 active = false;
1310 self->output->output->IsScheduledPlaybackRunning (&active);
1311 if (active) {
1312 GST_DEBUG_OBJECT (self, "Stopping scheduled playback");
1313
1314 self->output->started = FALSE;
1315
1316 res = self->output->output->StopScheduledPlayback (0, 0, 0);
1317 if (res != S_OK) {
1318 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1319 (NULL), ("Failed to stop scheduled playback: 0x%08lx",
1320 (unsigned long) res));
1321 return;
1322 }
1323 // Wait until scheduled playback actually stopped
1324 _wait_for_stop_notify (self);
1325 }
1326
1327 GST_DEBUG_OBJECT (self,
1328 "Starting scheduled playback at %" GST_TIME_FORMAT,
1329 GST_TIME_ARGS (start_time));
1330
1331 res =
1332 self->output->output->StartScheduledPlayback (start_time,
1333 GST_SECOND, 1.0);
1334 if (res != S_OK) {
1335 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1336 (NULL), ("Failed to start scheduled playback: 0x%08lx",
1337 (unsigned long) res));
1338 return;
1339 }
1340
1341 self->output->started = TRUE;
1342 }
1343
1344 static GstStateChangeReturn
gst_decklink_video_sink_stop_scheduled_playback(GstDecklinkVideoSink * self)1345 gst_decklink_video_sink_stop_scheduled_playback (GstDecklinkVideoSink * self)
1346 {
1347 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1348 GstClockTime start_time;
1349 HRESULT res;
1350
1351 if (!self->output->started)
1352 return ret;
1353
1354 start_time = gst_clock_get_internal_time (self->output->clock);
1355
1356 GST_DEBUG_OBJECT (self,
1357 "Stopping scheduled playback at %" GST_TIME_FORMAT,
1358 GST_TIME_ARGS (start_time));
1359
1360 g_mutex_lock (&self->output->lock);
1361 self->output->started = FALSE;
1362 res = self->output->output->StopScheduledPlayback (start_time, 0, GST_SECOND);
1363 if (res != S_OK) {
1364 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1365 (NULL), ("Failed to stop scheduled playback: 0x%08lx", (unsigned long)
1366 res));
1367 ret = GST_STATE_CHANGE_FAILURE;
1368 } else {
1369
1370 // Wait until scheduled playback actually stopped
1371 _wait_for_stop_notify (self);
1372 }
1373 g_mutex_unlock (&self->output->lock);
1374 GST_OBJECT_LOCK (self);
1375 self->internal_base_time = GST_CLOCK_TIME_NONE;
1376 self->external_base_time = GST_CLOCK_TIME_NONE;
1377 GST_OBJECT_UNLOCK (self);
1378
1379 return ret;
1380 }
1381
1382 static GstStateChangeReturn
gst_decklink_video_sink_change_state(GstElement * element,GstStateChange transition)1383 gst_decklink_video_sink_change_state (GstElement * element,
1384 GstStateChange transition)
1385 {
1386 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1387 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1388
1389 GST_DEBUG_OBJECT (self, "changing state: %s => %s",
1390 gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
1391 gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
1392
1393 switch (transition) {
1394 case GST_STATE_CHANGE_READY_TO_PAUSED:
1395 self->vbiencoder = NULL;
1396 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1397 self->cdp_hdr_sequence_cntr = 0;
1398
1399 g_mutex_lock (&self->output->lock);
1400 self->output->clock_epoch += self->output->clock_last_time;
1401 self->output->clock_last_time = 0;
1402 self->output->clock_offset = 0;
1403 g_mutex_unlock (&self->output->lock);
1404 gst_element_post_message (element,
1405 gst_message_new_clock_provide (GST_OBJECT_CAST (element),
1406 self->output->clock, TRUE));
1407 g_mutex_lock (&self->output->lock);
1408 if (self->output->start_scheduled_playback)
1409 self->output->start_scheduled_playback (self->output->videosink);
1410 g_mutex_unlock (&self->output->lock);
1411 break;
1412 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
1413 GstClock *clock;
1414
1415 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
1416 if (clock) {
1417 if (clock != self->output->clock) {
1418 gst_clock_set_master (self->output->clock, clock);
1419 }
1420
1421 GST_OBJECT_LOCK (self);
1422 if (self->external_base_time == GST_CLOCK_TIME_NONE || self->internal_base_time == GST_CLOCK_TIME_NONE) {
1423 self->external_base_time = gst_clock_get_internal_time (clock);
1424 self->internal_base_time = gst_clock_get_internal_time (self->output->clock);
1425 self->internal_time_offset = self->internal_base_time;
1426 }
1427
1428 GST_INFO_OBJECT (self, "clock has been set to %" GST_PTR_FORMAT
1429 ", updated base times - internal: %" GST_TIME_FORMAT
1430 " external: %" GST_TIME_FORMAT " internal offset %"
1431 GST_TIME_FORMAT, clock,
1432 GST_TIME_ARGS (self->internal_base_time),
1433 GST_TIME_ARGS (self->external_base_time),
1434 GST_TIME_ARGS (self->internal_time_offset));
1435 GST_OBJECT_UNLOCK (self);
1436
1437 gst_object_unref (clock);
1438 } else {
1439 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1440 (NULL), ("Need a clock to go to PLAYING"));
1441 ret = GST_STATE_CHANGE_FAILURE;
1442 }
1443 break;
1444 }
1445 case GST_STATE_CHANGE_PAUSED_TO_READY:
1446 if (gst_decklink_video_sink_stop_scheduled_playback (self) ==
1447 GST_STATE_CHANGE_FAILURE)
1448 ret = GST_STATE_CHANGE_FAILURE;
1449 break;
1450 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1451 break;
1452 default:
1453 break;
1454 }
1455
1456 if (ret == GST_STATE_CHANGE_FAILURE)
1457 return ret;
1458 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1459 if (ret == GST_STATE_CHANGE_FAILURE)
1460 return ret;
1461
1462 switch (transition) {
1463 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1464 gst_element_post_message (element,
1465 gst_message_new_clock_lost (GST_OBJECT_CAST (element),
1466 self->output->clock));
1467 gst_clock_set_master (self->output->clock, NULL);
1468 // Reset calibration to make the clock reusable next time we use it
1469 gst_clock_set_calibration (self->output->clock, 0, 0, 1, 1);
1470 g_mutex_lock (&self->output->lock);
1471 self->output->clock_epoch += self->output->clock_last_time;
1472 self->output->clock_last_time = 0;
1473 self->output->clock_offset = 0;
1474 g_mutex_unlock (&self->output->lock);
1475 gst_decklink_video_sink_stop (self);
1476 GST_OBJECT_LOCK (self);
1477 self->internal_base_time = GST_CLOCK_TIME_NONE;
1478 self->external_base_time = GST_CLOCK_TIME_NONE;
1479 GST_OBJECT_UNLOCK (self);
1480 break;
1481 }
1482 case GST_STATE_CHANGE_READY_TO_PAUSED:{
1483 break;
1484 }
1485 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
1486 break;
1487 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1488 break;
1489 default:
1490 break;
1491 }
1492
1493 return ret;
1494 }
1495
1496 static gboolean
gst_decklink_video_sink_event(GstBaseSink * bsink,GstEvent * event)1497 gst_decklink_video_sink_event (GstBaseSink * bsink, GstEvent * event)
1498 {
1499 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1500
1501 switch (GST_EVENT_TYPE (event)) {
1502 case GST_EVENT_FLUSH_START:
1503 {
1504 break;
1505 }
1506 case GST_EVENT_FLUSH_STOP:
1507 {
1508 gboolean reset_time;
1509
1510 gst_event_parse_flush_stop (event, &reset_time);
1511 if (reset_time) {
1512 GST_OBJECT_LOCK (self);
1513 /* force a recalculation of clock base times */
1514 self->external_base_time = GST_CLOCK_TIME_NONE;
1515 self->internal_base_time = GST_CLOCK_TIME_NONE;
1516 GST_OBJECT_UNLOCK (self);
1517 }
1518 break;
1519 }
1520 default:
1521 break;
1522 }
1523
1524 return GST_BASE_SINK_CLASS (parent_class)->event (bsink, event);
1525 }
1526
1527 static GstClock *
gst_decklink_video_sink_provide_clock(GstElement * element)1528 gst_decklink_video_sink_provide_clock (GstElement * element)
1529 {
1530 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1531
1532 if (!self->output)
1533 return NULL;
1534
1535 return GST_CLOCK_CAST (gst_object_ref (self->output->clock));
1536 }
1537
1538 static gboolean
gst_decklink_video_sink_propose_allocation(GstBaseSink * bsink,GstQuery * query)1539 gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink,
1540 GstQuery * query)
1541 {
1542 GstCaps *caps;
1543 GstVideoInfo info;
1544 GstBufferPool *pool;
1545 guint size;
1546
1547 gst_query_parse_allocation (query, &caps, NULL);
1548
1549 if (caps == NULL)
1550 return FALSE;
1551
1552 if (!gst_video_info_from_caps (&info, caps))
1553 return FALSE;
1554
1555 size = GST_VIDEO_INFO_SIZE (&info);
1556
1557 if (gst_query_get_n_allocation_pools (query) == 0) {
1558 GstStructure *structure;
1559 GstAllocator *allocator = NULL;
1560 GstAllocationParams params = { (GstMemoryFlags) 0, 15, 0, 0 };
1561
1562 if (gst_query_get_n_allocation_params (query) > 0)
1563 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1564 else
1565 gst_query_add_allocation_param (query, allocator, ¶ms);
1566
1567 pool = gst_video_buffer_pool_new ();
1568
1569 structure = gst_buffer_pool_get_config (pool);
1570 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
1571 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
1572
1573 if (allocator)
1574 gst_object_unref (allocator);
1575
1576 if (!gst_buffer_pool_set_config (pool, structure))
1577 goto config_failed;
1578
1579 gst_query_add_allocation_pool (query, pool, size, 0, 0);
1580 gst_object_unref (pool);
1581 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
1582 }
1583
1584 return TRUE;
1585 // ERRORS
1586 config_failed:
1587 {
1588 GST_ERROR_OBJECT (bsink, "failed to set config");
1589 gst_object_unref (pool);
1590 return FALSE;
1591 }
1592 }
1593