1 /*
2 * GStreamer
3 * Copyright (C) 2016 Vivia Nikolaidou <vivia@toolsonair.com>
4 *
5 * Based on gstvideoframe-audiolevel.c:
6 * Copyright (C) 2015 Vivia Nikolaidou <vivia@toolsonair.com>
7 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Library General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Library General Public License for more details.
17 *
18 * You should have received a copy of the GNU Library General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
21 * Boston, MA 02110-1301, USA.
22 */
23
24 /**
25 * SECTION:element-avwait
26 * @title: avwait
27 *
28 * This element will drop all buffers until a specific timecode or running
29 * time has been reached. It will then pass-through both audio and video,
30 * starting from that specific timecode or running time, making sure that
31 * audio starts as early as possible after the video (or at the same time as
32 * the video). In the "video-first" mode, it only drops audio buffers until
33 * video has started.
34 *
35 * The "recording" property acts essentially like a valve connected before
36 * everything else. If recording is FALSE, all buffers are dropped regardless
37 * of settings. If recording is TRUE, the other settings (mode,
38 * target-timecode, target-running-time, etc) are taken into account. Audio
39 * will always start and end together with the video, as long as the stream
40 * itself doesn't start too late or end too early.
41 *
42 * ## Example launch line
43 * |[
44 * gst-launch-1.0 filesrc location="my_file" ! decodebin name=d ! "audio/x-raw" ! avwait name=l target-timecode-str="00:00:04:00" ! autoaudiosink d. ! "video/x-raw" ! timecodestamper ! l. l. ! queue ! timeoverlay time-mode=time-code ! autovideosink
45 *
46 */
47
48 #ifdef HAVE_CONFIG_H
49 #include "config.h"
50 #endif
51
52 #include "gstavwait.h"
53
54 #define GST_CAT_DEFAULT gst_avwait_debug
55 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
56
57 static GstStaticPadTemplate audio_sink_template =
58 GST_STATIC_PAD_TEMPLATE ("asink",
59 GST_PAD_SINK,
60 GST_PAD_ALWAYS,
61 GST_STATIC_CAPS ("audio/x-raw")
62 );
63
64 static GstStaticPadTemplate audio_src_template =
65 GST_STATIC_PAD_TEMPLATE ("asrc",
66 GST_PAD_SRC,
67 GST_PAD_ALWAYS,
68 GST_STATIC_CAPS ("audio/x-raw")
69 );
70
71 static GstStaticPadTemplate video_sink_template =
72 GST_STATIC_PAD_TEMPLATE ("vsink",
73 GST_PAD_SINK,
74 GST_PAD_ALWAYS,
75 GST_STATIC_CAPS ("video/x-raw")
76 );
77
78 static GstStaticPadTemplate video_src_template =
79 GST_STATIC_PAD_TEMPLATE ("vsrc",
80 GST_PAD_SRC,
81 GST_PAD_ALWAYS,
82 GST_STATIC_CAPS ("video/x-raw")
83 );
84
85 #define parent_class gst_avwait_parent_class
86 G_DEFINE_TYPE (GstAvWait, gst_avwait, GST_TYPE_ELEMENT);
87
88 enum
89 {
90 PROP_0,
91 PROP_TARGET_TIME_CODE,
92 PROP_TARGET_TIME_CODE_STRING,
93 PROP_TARGET_RUNNING_TIME,
94 PROP_END_TIME_CODE,
95 PROP_RECORDING,
96 PROP_MODE
97 };
98
99 #define DEFAULT_TARGET_TIMECODE_STR "00:00:00:00"
100 #define DEFAULT_TARGET_RUNNING_TIME GST_CLOCK_TIME_NONE
101 #define DEFAULT_MODE MODE_TIMECODE
102
103 /* flags for self->must_send_end_message */
104 enum
105 {
106 END_MESSAGE_NORMAL = 0,
107 END_MESSAGE_STREAM_ENDED = 1,
108 END_MESSAGE_VIDEO_PUSHED = 2,
109 END_MESSAGE_AUDIO_PUSHED = 4
110 };
111
112 static void gst_avwait_set_property (GObject * object,
113 guint prop_id, const GValue * value, GParamSpec * pspec);
114 static void gst_avwait_get_property (GObject * object,
115 guint prop_id, GValue * value, GParamSpec * pspec);
116
117 static GstFlowReturn gst_avwait_asink_chain (GstPad * pad,
118 GstObject * parent, GstBuffer * inbuf);
119 static GstFlowReturn gst_avwait_vsink_chain (GstPad * pad,
120 GstObject * parent, GstBuffer * inbuf);
121 static gboolean gst_avwait_asink_event (GstPad * pad,
122 GstObject * parent, GstEvent * event);
123 static gboolean gst_avwait_vsink_event (GstPad * pad,
124 GstObject * parent, GstEvent * event);
125 static GstIterator *gst_avwait_iterate_internal_links (GstPad *
126 pad, GstObject * parent);
127
128 static void gst_avwait_finalize (GObject * gobject);
129
130 static GstStateChangeReturn gst_avwait_change_state (GstElement *
131 element, GstStateChange transition);
132
133 static GType
gst_avwait_mode_get_type(void)134 gst_avwait_mode_get_type (void)
135 {
136 static GType gtype = 0;
137
138 if (gtype == 0) {
139 static const GEnumValue values[] = {
140 {MODE_TIMECODE, "time code (default)", "timecode"},
141 {MODE_RUNNING_TIME, "running time", "running-time"},
142 {MODE_VIDEO_FIRST, "video first", "video-first"},
143 {0, NULL, NULL}
144 };
145
146 gtype = g_enum_register_static ("GstAvWaitMode", values);
147 }
148 return gtype;
149 }
150
151 static void
gst_avwait_class_init(GstAvWaitClass * klass)152 gst_avwait_class_init (GstAvWaitClass * klass)
153 {
154 GstElementClass *gstelement_class;
155 GObjectClass *gobject_class = (GObjectClass *) klass;
156
157 GST_DEBUG_CATEGORY_INIT (gst_avwait_debug, "avwait", 0, "avwait");
158
159 gstelement_class = (GstElementClass *) klass;
160
161 gst_element_class_set_static_metadata (gstelement_class,
162 "Timecode Wait", "Filter/Audio/Video",
163 "Drops all audio/video until a specific timecode or running time has been reached",
164 "Vivia Nikolaidou <vivia@toolsonair.com>");
165
166 gobject_class->set_property = gst_avwait_set_property;
167 gobject_class->get_property = gst_avwait_get_property;
168
169 g_object_class_install_property (gobject_class, PROP_TARGET_TIME_CODE_STRING,
170 g_param_spec_string ("target-timecode-string", "Target timecode (string)",
171 "Timecode to wait for in timecode mode (string). Must take the form 00:00:00:00",
172 DEFAULT_TARGET_TIMECODE_STR,
173 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
174
175 g_object_class_install_property (gobject_class, PROP_TARGET_TIME_CODE,
176 g_param_spec_boxed ("target-timecode", "Target timecode (object)",
177 "Timecode to wait for in timecode mode (object)",
178 GST_TYPE_VIDEO_TIME_CODE,
179 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
180 G_PARAM_STATIC_STRINGS));
181
182 g_object_class_install_property (gobject_class, PROP_TARGET_RUNNING_TIME,
183 g_param_spec_uint64 ("target-running-time", "Target running time",
184 "Running time to wait for in running-time mode",
185 0, G_MAXUINT64,
186 DEFAULT_TARGET_RUNNING_TIME,
187 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
188 G_PARAM_STATIC_STRINGS));
189
190 g_object_class_install_property (gobject_class, PROP_MODE,
191 g_param_spec_enum ("mode", "Mode",
192 "Operation mode: What to wait for",
193 GST_TYPE_AVWAIT_MODE,
194 DEFAULT_MODE,
195 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
196 G_PARAM_STATIC_STRINGS));
197
198 g_object_class_install_property (gobject_class, PROP_END_TIME_CODE,
199 g_param_spec_boxed ("end-timecode", "End timecode (object)",
200 "Timecode to end at in timecode mode (object)",
201 GST_TYPE_VIDEO_TIME_CODE,
202 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
203
204 g_object_class_install_property (gobject_class, PROP_RECORDING,
205 g_param_spec_boolean ("recording",
206 "Recording state",
207 "Whether the element is stopped or recording. "
208 "If set to FALSE, all buffers will be dropped regardless of settings.",
209 TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
210
211 gobject_class->finalize = gst_avwait_finalize;
212 gstelement_class->change_state = gst_avwait_change_state;
213
214 gst_element_class_add_static_pad_template (gstelement_class,
215 &audio_src_template);
216 gst_element_class_add_static_pad_template (gstelement_class,
217 &audio_sink_template);
218
219 gst_element_class_add_static_pad_template (gstelement_class,
220 &video_src_template);
221 gst_element_class_add_static_pad_template (gstelement_class,
222 &video_sink_template);
223 }
224
225 static void
gst_avwait_init(GstAvWait * self)226 gst_avwait_init (GstAvWait * self)
227 {
228 self->asinkpad =
229 gst_pad_new_from_static_template (&audio_sink_template, "asink");
230 gst_pad_set_chain_function (self->asinkpad,
231 GST_DEBUG_FUNCPTR (gst_avwait_asink_chain));
232 gst_pad_set_event_function (self->asinkpad,
233 GST_DEBUG_FUNCPTR (gst_avwait_asink_event));
234 gst_pad_set_iterate_internal_links_function (self->asinkpad,
235 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
236 gst_element_add_pad (GST_ELEMENT (self), self->asinkpad);
237
238 self->vsinkpad =
239 gst_pad_new_from_static_template (&video_sink_template, "vsink");
240 gst_pad_set_chain_function (self->vsinkpad,
241 GST_DEBUG_FUNCPTR (gst_avwait_vsink_chain));
242 gst_pad_set_event_function (self->vsinkpad,
243 GST_DEBUG_FUNCPTR (gst_avwait_vsink_event));
244 gst_pad_set_iterate_internal_links_function (self->vsinkpad,
245 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
246 gst_element_add_pad (GST_ELEMENT (self), self->vsinkpad);
247
248 self->asrcpad =
249 gst_pad_new_from_static_template (&audio_src_template, "asrc");
250 gst_pad_set_iterate_internal_links_function (self->asrcpad,
251 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
252 gst_element_add_pad (GST_ELEMENT (self), self->asrcpad);
253
254 self->vsrcpad =
255 gst_pad_new_from_static_template (&video_src_template, "vsrc");
256 gst_pad_set_iterate_internal_links_function (self->vsrcpad,
257 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
258 gst_element_add_pad (GST_ELEMENT (self), self->vsrcpad);
259
260 GST_PAD_SET_PROXY_CAPS (self->asinkpad);
261 GST_PAD_SET_PROXY_ALLOCATION (self->asinkpad);
262
263 GST_PAD_SET_PROXY_CAPS (self->asrcpad);
264 GST_PAD_SET_PROXY_SCHEDULING (self->asrcpad);
265
266 GST_PAD_SET_PROXY_CAPS (self->vsinkpad);
267 GST_PAD_SET_PROXY_ALLOCATION (self->vsinkpad);
268
269 GST_PAD_SET_PROXY_CAPS (self->vsrcpad);
270 GST_PAD_SET_PROXY_SCHEDULING (self->vsrcpad);
271
272 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
273 self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
274 self->first_audio_running_time = GST_CLOCK_TIME_NONE;
275 self->last_seen_tc = NULL;
276
277 self->video_eos_flag = FALSE;
278 self->audio_eos_flag = FALSE;
279 self->video_flush_flag = FALSE;
280 self->audio_flush_flag = FALSE;
281 self->shutdown_flag = FALSE;
282 self->dropping = TRUE;
283 self->tc = gst_video_time_code_new_empty ();
284 self->end_tc = NULL;
285 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
286 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
287 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
288 self->recording = TRUE;
289
290 self->target_running_time = DEFAULT_TARGET_RUNNING_TIME;
291 self->mode = DEFAULT_MODE;
292
293 gst_video_info_init (&self->vinfo);
294 g_mutex_init (&self->mutex);
295 g_cond_init (&self->cond);
296 g_cond_init (&self->audio_cond);
297 }
298
299 static void
gst_avwait_send_element_message(GstAvWait * self,gboolean dropping,GstClockTime running_time)300 gst_avwait_send_element_message (GstAvWait * self, gboolean dropping,
301 GstClockTime running_time)
302 {
303 if (!gst_element_post_message (GST_ELEMENT (self),
304 gst_message_new_element (GST_OBJECT (self),
305 gst_structure_new ("avwait-status",
306 "dropping", G_TYPE_BOOLEAN, dropping,
307 "running-time", GST_TYPE_CLOCK_TIME, running_time, NULL)))) {
308 GST_ERROR_OBJECT (self, "Unable to send element message!");
309 g_assert_not_reached ();
310 }
311 }
312
313 static GstStateChangeReturn
gst_avwait_change_state(GstElement * element,GstStateChange transition)314 gst_avwait_change_state (GstElement * element, GstStateChange transition)
315 {
316 GstStateChangeReturn ret;
317 GstAvWait *self = GST_AVWAIT (element);
318
319 switch (transition) {
320 case GST_STATE_CHANGE_PAUSED_TO_READY:
321 g_mutex_lock (&self->mutex);
322 self->shutdown_flag = TRUE;
323 g_cond_signal (&self->cond);
324 g_cond_signal (&self->audio_cond);
325 g_mutex_unlock (&self->mutex);
326 break;
327 case GST_STATE_CHANGE_READY_TO_PAUSED:
328 g_mutex_lock (&self->mutex);
329 self->shutdown_flag = FALSE;
330 self->video_eos_flag = FALSE;
331 self->audio_eos_flag = FALSE;
332 self->video_flush_flag = FALSE;
333 self->audio_flush_flag = FALSE;
334 self->must_send_end_message = END_MESSAGE_NORMAL;
335 g_mutex_unlock (&self->mutex);
336 default:
337 break;
338 }
339
340 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
341
342 switch (transition) {
343 case GST_STATE_CHANGE_PAUSED_TO_READY:
344 g_mutex_lock (&self->mutex);
345 if (self->mode != MODE_RUNNING_TIME) {
346 GST_DEBUG_OBJECT (self, "First time reset in paused to ready");
347 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
348 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
349 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
350 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
351 }
352 if (!self->dropping) {
353 self->dropping = TRUE;
354 gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
355 }
356 gst_segment_init (&self->asegment, GST_FORMAT_UNDEFINED);
357 self->asegment.position = GST_CLOCK_TIME_NONE;
358 gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED);
359 self->vsegment.position = GST_CLOCK_TIME_NONE;
360 gst_video_info_init (&self->vinfo);
361 self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
362 self->first_audio_running_time = GST_CLOCK_TIME_NONE;
363 if (self->last_seen_tc)
364 gst_video_time_code_free (self->last_seen_tc);
365 self->last_seen_tc = NULL;
366 g_mutex_unlock (&self->mutex);
367 break;
368 default:
369 break;
370 }
371
372 return ret;
373 }
374
375 static void
gst_avwait_finalize(GObject * object)376 gst_avwait_finalize (GObject * object)
377 {
378 GstAvWait *self = GST_AVWAIT (object);
379
380 if (self->tc) {
381 gst_video_time_code_free (self->tc);
382 self->tc = NULL;
383 }
384
385 if (self->end_tc) {
386 gst_video_time_code_free (self->end_tc);
387 self->end_tc = NULL;
388 }
389
390 g_mutex_clear (&self->mutex);
391 g_cond_clear (&self->cond);
392 g_cond_clear (&self->audio_cond);
393
394 G_OBJECT_CLASS (parent_class)->finalize (object);
395 }
396
397 static void
gst_avwait_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)398 gst_avwait_get_property (GObject * object, guint prop_id,
399 GValue * value, GParamSpec * pspec)
400 {
401 GstAvWait *self = GST_AVWAIT (object);
402
403 switch (prop_id) {
404 case PROP_TARGET_TIME_CODE_STRING:{
405 g_mutex_lock (&self->mutex);
406 if (self->tc)
407 g_value_take_string (value, gst_video_time_code_to_string (self->tc));
408 else
409 g_value_set_string (value, DEFAULT_TARGET_TIMECODE_STR);
410 g_mutex_unlock (&self->mutex);
411 break;
412 }
413 case PROP_TARGET_TIME_CODE:{
414 g_mutex_lock (&self->mutex);
415 g_value_set_boxed (value, self->tc);
416 g_mutex_unlock (&self->mutex);
417 break;
418 }
419 case PROP_END_TIME_CODE:{
420 g_mutex_lock (&self->mutex);
421 g_value_set_boxed (value, self->end_tc);
422 g_mutex_unlock (&self->mutex);
423 break;
424 }
425 case PROP_TARGET_RUNNING_TIME:{
426 g_mutex_lock (&self->mutex);
427 g_value_set_uint64 (value, self->target_running_time);
428 g_mutex_unlock (&self->mutex);
429 break;
430 }
431 case PROP_RECORDING:{
432 g_mutex_lock (&self->mutex);
433 g_value_set_boolean (value, self->recording);
434 g_mutex_unlock (&self->mutex);
435 break;
436 }
437 case PROP_MODE:{
438 g_mutex_lock (&self->mutex);
439 g_value_set_enum (value, self->mode);
440 g_mutex_unlock (&self->mutex);
441 break;
442 }
443 default:
444 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
445 break;
446 }
447 }
448
449 static void
gst_avwait_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)450 gst_avwait_set_property (GObject * object, guint prop_id,
451 const GValue * value, GParamSpec * pspec)
452 {
453 GstAvWait *self = GST_AVWAIT (object);
454
455 switch (prop_id) {
456 case PROP_TARGET_TIME_CODE_STRING:{
457 gchar **parts;
458 const gchar *tc_str;
459 guint hours, minutes, seconds, frames;
460
461 tc_str = g_value_get_string (value);
462 parts = g_strsplit (tc_str, ":", 4);
463 if (!parts || parts[3] == NULL) {
464 GST_ERROR_OBJECT (self,
465 "Error: Could not parse timecode %s. Please input a timecode in the form 00:00:00:00",
466 tc_str);
467 g_strfreev (parts);
468 return;
469 }
470 hours = g_ascii_strtoll (parts[0], NULL, 10);
471 minutes = g_ascii_strtoll (parts[1], NULL, 10);
472 seconds = g_ascii_strtoll (parts[2], NULL, 10);
473 frames = g_ascii_strtoll (parts[3], NULL, 10);
474 g_mutex_lock (&self->mutex);
475 if (self->tc)
476 gst_video_time_code_free (self->tc);
477 self->tc = gst_video_time_code_new (0, 1, NULL, 0, hours, minutes,
478 seconds, frames, 0);
479 if (GST_VIDEO_INFO_FORMAT (&self->vinfo) != GST_VIDEO_FORMAT_UNKNOWN
480 && self->vinfo.fps_n != 0) {
481 self->tc->config.fps_n = self->vinfo.fps_n;
482 self->tc->config.fps_d = self->vinfo.fps_d;
483 }
484 g_mutex_unlock (&self->mutex);
485 g_strfreev (parts);
486 break;
487 }
488 case PROP_TARGET_TIME_CODE:{
489 g_mutex_lock (&self->mutex);
490 if (self->tc)
491 gst_video_time_code_free (self->tc);
492 self->tc = g_value_dup_boxed (value);
493 if (self->tc && self->tc->config.fps_n == 0
494 && GST_VIDEO_INFO_FORMAT (&self->vinfo) !=
495 GST_VIDEO_FORMAT_UNKNOWN && self->vinfo.fps_n != 0) {
496 self->tc->config.fps_n = self->vinfo.fps_n;
497 self->tc->config.fps_d = self->vinfo.fps_d;
498 }
499 g_mutex_unlock (&self->mutex);
500 break;
501 }
502 case PROP_END_TIME_CODE:{
503 g_mutex_lock (&self->mutex);
504 if (self->end_tc)
505 gst_video_time_code_free (self->end_tc);
506 self->end_tc = g_value_dup_boxed (value);
507 if (self->end_tc && self->end_tc->config.fps_n == 0
508 && GST_VIDEO_INFO_FORMAT (&self->vinfo) !=
509 GST_VIDEO_FORMAT_UNKNOWN && self->vinfo.fps_n != 0) {
510 self->end_tc->config.fps_n = self->vinfo.fps_n;
511 self->end_tc->config.fps_d = self->vinfo.fps_d;
512 }
513 g_mutex_unlock (&self->mutex);
514 break;
515 }
516 case PROP_TARGET_RUNNING_TIME:{
517 g_mutex_lock (&self->mutex);
518 self->target_running_time = g_value_get_uint64 (value);
519 if (self->mode == MODE_RUNNING_TIME) {
520 self->running_time_to_wait_for = self->target_running_time;
521 if (self->recording) {
522 self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
523 }
524 if (self->target_running_time < self->last_seen_video_running_time) {
525 self->dropping = TRUE;
526 }
527 }
528 g_mutex_unlock (&self->mutex);
529 break;
530 }
531 case PROP_MODE:{
532 GstAvWaitMode old_mode;
533
534 g_mutex_lock (&self->mutex);
535 old_mode = self->mode;
536 self->mode = g_value_get_enum (value);
537 if (self->mode != old_mode) {
538 switch (self->mode) {
539 case MODE_TIMECODE:
540 if (self->last_seen_tc && self->tc &&
541 gst_video_time_code_compare (self->last_seen_tc,
542 self->tc) < 0) {
543 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
544 self->dropping = TRUE;
545 }
546 break;
547 case MODE_RUNNING_TIME:
548 self->running_time_to_wait_for = self->target_running_time;
549 if (self->recording) {
550 self->audio_running_time_to_wait_for =
551 self->running_time_to_wait_for;
552 }
553 if (self->target_running_time < self->last_seen_video_running_time) {
554 self->dropping = TRUE;
555 }
556 break;
557 /* Let the chain functions handle the rest */
558 case MODE_VIDEO_FIRST:
559 /* pass-through */
560 default:
561 break;
562 }
563 }
564 g_mutex_unlock (&self->mutex);
565 break;
566 }
567 case PROP_RECORDING:{
568 g_mutex_lock (&self->mutex);
569 self->recording = g_value_get_boolean (value);
570 g_mutex_unlock (&self->mutex);
571 break;
572 }
573 default:
574 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
575 break;
576 }
577 }
578
579 static gboolean
gst_avwait_vsink_event(GstPad * pad,GstObject * parent,GstEvent * event)580 gst_avwait_vsink_event (GstPad * pad, GstObject * parent, GstEvent * event)
581 {
582 GstAvWait *self = GST_AVWAIT (parent);
583 GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
584
585 switch (GST_EVENT_TYPE (event)) {
586 case GST_EVENT_SEGMENT:
587 g_mutex_lock (&self->mutex);
588 gst_event_copy_segment (event, &self->vsegment);
589 if (self->vsegment.format != GST_FORMAT_TIME) {
590 GST_ERROR_OBJECT (self, "Invalid segment format");
591 g_mutex_unlock (&self->mutex);
592 gst_event_unref (event);
593 return FALSE;
594 }
595 if (self->mode != MODE_RUNNING_TIME) {
596 GST_DEBUG_OBJECT (self, "First time reset in video segment");
597 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
598 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
599 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
600 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
601 if (!self->dropping) {
602 self->dropping = TRUE;
603 gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
604 }
605 }
606 self->vsegment.position = GST_CLOCK_TIME_NONE;
607 g_mutex_unlock (&self->mutex);
608 break;
609 case GST_EVENT_GAP:
610 gst_event_unref (event);
611 return TRUE;
612 case GST_EVENT_EOS:
613 g_mutex_lock (&self->mutex);
614 self->video_eos_flag = TRUE;
615 g_cond_signal (&self->cond);
616 g_mutex_unlock (&self->mutex);
617 break;
618 case GST_EVENT_FLUSH_START:
619 g_mutex_lock (&self->mutex);
620 self->video_flush_flag = TRUE;
621 g_cond_signal (&self->audio_cond);
622 g_mutex_unlock (&self->mutex);
623 break;
624 case GST_EVENT_FLUSH_STOP:
625 g_mutex_lock (&self->mutex);
626 self->video_flush_flag = FALSE;
627 if (self->mode != MODE_RUNNING_TIME) {
628 GST_DEBUG_OBJECT (self, "First time reset in video flush");
629 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
630 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
631 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
632 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
633 if (!self->dropping) {
634 self->dropping = TRUE;
635 gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
636 }
637 }
638 gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED);
639 self->vsegment.position = GST_CLOCK_TIME_NONE;
640 g_mutex_unlock (&self->mutex);
641 break;
642 case GST_EVENT_CAPS:{
643 GstCaps *caps;
644 gst_event_parse_caps (event, &caps);
645 GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps);
646 g_mutex_lock (&self->mutex);
647 if (!gst_video_info_from_caps (&self->vinfo, caps)) {
648 gst_event_unref (event);
649 g_mutex_unlock (&self->mutex);
650 return FALSE;
651 }
652 if (self->tc && self->tc->config.fps_n == 0 && self->vinfo.fps_n != 0) {
653 self->tc->config.fps_n = self->vinfo.fps_n;
654 self->tc->config.fps_d = self->vinfo.fps_d;
655 }
656 if (self->end_tc && self->end_tc->config.fps_n == 0
657 && self->vinfo.fps_n != 0) {
658 self->end_tc->config.fps_n = self->vinfo.fps_n;
659 self->end_tc->config.fps_d = self->vinfo.fps_d;
660 }
661 g_mutex_unlock (&self->mutex);
662 break;
663 }
664 default:
665 break;
666 }
667 return gst_pad_event_default (pad, parent, event);
668 }
669
670 static gboolean
gst_avwait_asink_event(GstPad * pad,GstObject * parent,GstEvent * event)671 gst_avwait_asink_event (GstPad * pad, GstObject * parent, GstEvent * event)
672 {
673 GstAvWait *self = GST_AVWAIT (parent);
674 GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
675
676 switch (GST_EVENT_TYPE (event)) {
677 case GST_EVENT_SEGMENT:
678 g_mutex_lock (&self->mutex);
679 gst_event_copy_segment (event, &self->asegment);
680 if (self->asegment.format != GST_FORMAT_TIME) {
681 GST_ERROR_OBJECT (self, "Invalid segment format");
682 g_mutex_unlock (&self->mutex);
683 gst_event_unref (event);
684 return FALSE;
685 }
686 self->asegment.position = GST_CLOCK_TIME_NONE;
687 g_mutex_unlock (&self->mutex);
688 break;
689 case GST_EVENT_FLUSH_START:
690 g_mutex_lock (&self->mutex);
691 self->audio_flush_flag = TRUE;
692 g_cond_signal (&self->cond);
693 g_mutex_unlock (&self->mutex);
694 break;
695 case GST_EVENT_EOS:
696 g_mutex_lock (&self->mutex);
697 self->audio_eos_flag = TRUE;
698 self->must_send_end_message = END_MESSAGE_NORMAL;
699 g_cond_signal (&self->audio_cond);
700 g_mutex_unlock (&self->mutex);
701 break;
702 case GST_EVENT_FLUSH_STOP:
703 g_mutex_lock (&self->mutex);
704 self->audio_flush_flag = FALSE;
705 gst_segment_init (&self->asegment, GST_FORMAT_UNDEFINED);
706 self->asegment.position = GST_CLOCK_TIME_NONE;
707 g_mutex_unlock (&self->mutex);
708 break;
709 case GST_EVENT_CAPS:{
710 GstCaps *caps;
711 gst_event_parse_caps (event, &caps);
712 GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps);
713 g_mutex_lock (&self->mutex);
714 if (!gst_audio_info_from_caps (&self->ainfo, caps)) {
715 g_mutex_unlock (&self->mutex);
716 gst_event_unref (event);
717 return FALSE;
718 }
719 g_mutex_unlock (&self->mutex);
720 break;
721 }
722 default:
723 break;
724 }
725
726 return gst_pad_event_default (pad, parent, event);
727 }
728
729 static GstFlowReturn
gst_avwait_vsink_chain(GstPad * pad,GstObject * parent,GstBuffer * inbuf)730 gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
731 {
732 GstClockTime timestamp;
733 GstAvWait *self = GST_AVWAIT (parent);
734 GstClockTime running_time;
735 GstVideoTimeCode *tc = NULL;
736 GstVideoTimeCodeMeta *tc_meta;
737 gboolean retry = FALSE;
738 gboolean ret = GST_FLOW_OK;
739
740 timestamp = GST_BUFFER_TIMESTAMP (inbuf);
741 if (timestamp == GST_CLOCK_TIME_NONE) {
742 gst_buffer_unref (inbuf);
743 return GST_FLOW_ERROR;
744 }
745
746 g_mutex_lock (&self->mutex);
747 self->vsegment.position = timestamp;
748 running_time =
749 gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
750 self->vsegment.position);
751 self->last_seen_video_running_time = running_time;
752
753 tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
754 if (tc_meta) {
755 tc = gst_video_time_code_copy (&tc_meta->tc);
756 if (self->last_seen_tc) {
757 gst_video_time_code_free (self->last_seen_tc);
758 }
759 self->last_seen_tc = tc;
760 }
761
762 while (self->mode == MODE_VIDEO_FIRST
763 && self->first_audio_running_time == GST_CLOCK_TIME_NONE
764 && !self->audio_eos_flag
765 && !self->shutdown_flag && !self->video_flush_flag) {
766 g_cond_wait (&self->audio_cond, &self->mutex);
767 }
768
769 if (self->video_flush_flag || self->shutdown_flag) {
770 GST_DEBUG_OBJECT (self, "Shutting down, ignoring buffer");
771 gst_buffer_unref (inbuf);
772 g_mutex_unlock (&self->mutex);
773 return GST_FLOW_FLUSHING;
774 }
775
776 switch (self->mode) {
777 case MODE_TIMECODE:{
778 if (self->tc && self->end_tc
779 && gst_video_time_code_compare (self->tc, self->end_tc) != -1) {
780 gchar *tc_str, *end_tc;
781
782 tc_str = gst_video_time_code_to_string (self->tc);
783 end_tc = gst_video_time_code_to_string (self->end_tc);
784 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
785 ("End timecode %s must be after start timecode %s. Start timecode rejected",
786 end_tc, tc_str));
787 g_free (end_tc);
788 g_free (tc_str);
789 gst_buffer_unref (inbuf);
790 g_mutex_unlock (&self->mutex);
791 return GST_FLOW_ERROR;
792 }
793
794 if (self->tc != NULL && tc != NULL) {
795 gboolean emit_passthrough_signal = FALSE;
796
797 if (gst_video_time_code_compare (tc, self->tc) < 0
798 && self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
799 GST_DEBUG_OBJECT (self, "Timecode not yet reached, ignoring frame");
800 gst_buffer_unref (inbuf);
801 inbuf = NULL;
802 } else if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
803 GST_INFO_OBJECT (self, "Target timecode reached at %" GST_TIME_FORMAT,
804 GST_TIME_ARGS (self->vsegment.position));
805 /* Don't emit a signal if we weren't dropping (e.g. settings changed
806 * mid-flight) */
807 emit_passthrough_signal = self->dropping;
808 self->dropping = FALSE;
809 self->running_time_to_wait_for =
810 gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
811 self->vsegment.position);
812 if (self->recording) {
813 self->audio_running_time_to_wait_for =
814 self->running_time_to_wait_for;
815 }
816 }
817
818 if (self->end_tc && gst_video_time_code_compare (tc, self->end_tc) >= 0) {
819 if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
820 GST_INFO_OBJECT (self, "End timecode reached at %" GST_TIME_FORMAT,
821 GST_TIME_ARGS (self->vsegment.position));
822 self->dropping = TRUE;
823 self->running_time_to_end_at =
824 gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
825 self->vsegment.position);
826 if (self->recording) {
827 self->audio_running_time_to_end_at = self->running_time_to_end_at;
828 self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
829 }
830 }
831 gst_buffer_unref (inbuf);
832 inbuf = NULL;
833 } else if (emit_passthrough_signal && self->recording) {
834 gst_avwait_send_element_message (self, FALSE,
835 self->running_time_to_wait_for);
836 }
837 }
838 break;
839 }
840 case MODE_RUNNING_TIME:{
841 if (running_time < self->running_time_to_wait_for) {
842 GST_DEBUG_OBJECT (self,
843 "Have %" GST_TIME_FORMAT ", waiting for %" GST_TIME_FORMAT,
844 GST_TIME_ARGS (running_time),
845 GST_TIME_ARGS (self->running_time_to_wait_for));
846 gst_buffer_unref (inbuf);
847 inbuf = NULL;
848 } else {
849 if (self->dropping) {
850 self->dropping = FALSE;
851 if (self->recording)
852 gst_avwait_send_element_message (self, FALSE, running_time);
853 }
854 GST_INFO_OBJECT (self,
855 "Have %" GST_TIME_FORMAT ", waiting for %" GST_TIME_FORMAT,
856 GST_TIME_ARGS (running_time),
857 GST_TIME_ARGS (self->running_time_to_wait_for));
858 }
859 break;
860 }
861 case MODE_VIDEO_FIRST:{
862 if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
863 self->running_time_to_wait_for =
864 gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
865 self->vsegment.position);
866 GST_DEBUG_OBJECT (self, "First video running time is %" GST_TIME_FORMAT,
867 GST_TIME_ARGS (self->running_time_to_wait_for));
868 if (self->recording) {
869 self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
870 }
871 if (self->dropping) {
872 self->dropping = FALSE;
873 if (self->recording)
874 gst_avwait_send_element_message (self, FALSE,
875 self->running_time_to_wait_for);
876 }
877 }
878 break;
879 }
880 }
881
882 if (!self->recording) {
883 if (self->was_recording) {
884 GST_INFO_OBJECT (self, "Recording stopped at %" GST_TIME_FORMAT,
885 GST_TIME_ARGS (running_time));
886 if (running_time > self->running_time_to_wait_for
887 && running_time <= self->running_time_to_end_at) {
888 /* We just stopped recording: synchronise the audio */
889 self->audio_running_time_to_end_at = running_time;
890 self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
891 } else if (running_time < self->running_time_to_wait_for
892 && self->running_time_to_wait_for != GST_CLOCK_TIME_NONE) {
893 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
894 }
895 }
896 /* Recording is FALSE: we drop all buffers */
897 if (inbuf) {
898 gst_buffer_unref (inbuf);
899 inbuf = NULL;
900 }
901 } else {
902 if (!self->was_recording) {
903 GST_INFO_OBJECT (self,
904 "Recording started at %" GST_TIME_FORMAT " waiting for %"
905 GST_TIME_FORMAT " inbuf %p", GST_TIME_ARGS (running_time),
906 GST_TIME_ARGS (self->running_time_to_wait_for), inbuf);
907 if (self->mode != MODE_VIDEO_FIRST ||
908 self->first_audio_running_time <= running_time ||
909 self->audio_eos_flag) {
910 if (running_time < self->running_time_to_end_at ||
911 self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
912 /* We are before the end of the recording. Check if we just actually
913 * started */
914 if (running_time > self->running_time_to_wait_for) {
915 /* We just started recording: synchronise the audio */
916 self->audio_running_time_to_wait_for = running_time;
917 gst_avwait_send_element_message (self, FALSE, running_time);
918 } else {
919 /* We will start in the future when running_time_to_wait_for is
920 * reached */
921 self->audio_running_time_to_wait_for =
922 self->running_time_to_wait_for;
923 }
924 self->audio_running_time_to_end_at = self->running_time_to_end_at;
925 }
926 } else {
927 /* We are in video-first mode and behind the first audio timestamp. We
928 * should drop all video buffers until the first audio timestamp, so
929 * we can catch up with it. (In timecode mode and running-time mode, we
930 * don't care about when the audio starts, we start as soon as the
931 * target timecode or running time has been reached) */
932 gst_buffer_unref (inbuf);
933 inbuf = NULL;
934 retry = TRUE;
935 }
936 }
937 }
938
939 if (!retry)
940 self->was_recording = self->recording;
941 g_cond_signal (&self->cond);
942 g_mutex_unlock (&self->mutex);
943
944 if (inbuf) {
945 GST_DEBUG_OBJECT (self, "Pass video buffer ending at %" GST_TIME_FORMAT,
946 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf) +
947 GST_BUFFER_DURATION (inbuf)));
948 ret = gst_pad_push (self->vsrcpad, inbuf);
949 }
950
951 g_mutex_lock (&self->mutex);
952 if (self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED) {
953 self->must_send_end_message = END_MESSAGE_NORMAL;
954 g_mutex_unlock (&self->mutex);
955 gst_avwait_send_element_message (self, TRUE,
956 self->audio_running_time_to_end_at);
957 } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
958 if (self->audio_eos_flag) {
959 self->must_send_end_message = END_MESSAGE_NORMAL;
960 g_mutex_unlock (&self->mutex);
961 gst_avwait_send_element_message (self, TRUE,
962 self->audio_running_time_to_end_at);
963 } else {
964 self->must_send_end_message |= END_MESSAGE_VIDEO_PUSHED;
965 g_mutex_unlock (&self->mutex);
966 }
967 } else {
968 g_mutex_unlock (&self->mutex);
969 }
970
971 return ret;
972 }
973
974 /*
975 * assumes sign1 and sign2 are either 1 or -1
976 * returns 0 if sign1*num1 == sign2*num2
977 * -1 if sign1*num1 < sign2*num2
978 * 1 if sign1*num1 > sign2*num2
979 */
980 static gint
gst_avwait_compare_guint64_with_signs(gint sign1,guint64 num1,gint sign2,guint64 num2)981 gst_avwait_compare_guint64_with_signs (gint sign1,
982 guint64 num1, gint sign2, guint64 num2)
983 {
984 if (sign1 != sign2)
985 return sign1;
986 else if (num1 == num2)
987 return 0;
988 else
989 return num1 > num2 ? sign1 : -sign1;
990 }
991
992 static GstFlowReturn
gst_avwait_asink_chain(GstPad * pad,GstObject * parent,GstBuffer * inbuf)993 gst_avwait_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
994 {
995 GstClockTime timestamp;
996 GstAvWait *self = GST_AVWAIT (parent);
997 GstClockTime current_running_time;
998 GstClockTime video_running_time = GST_CLOCK_TIME_NONE;
999 GstClockTime duration;
1000 GstClockTime running_time_at_end = GST_CLOCK_TIME_NONE;
1001 gint asign, vsign = 1, esign = 1;
1002 GstFlowReturn ret = GST_FLOW_OK;
1003 /* Make sure the video thread doesn't send the element message before we
1004 * actually call gst_pad_push */
1005 gboolean send_element_message = FALSE;
1006
1007 timestamp = GST_BUFFER_TIMESTAMP (inbuf);
1008 if (timestamp == GST_CLOCK_TIME_NONE) {
1009 gst_buffer_unref (inbuf);
1010 return GST_FLOW_ERROR;
1011 }
1012
1013 g_mutex_lock (&self->mutex);
1014 self->asegment.position = timestamp;
1015 asign =
1016 gst_segment_to_running_time_full (&self->asegment, GST_FORMAT_TIME,
1017 self->asegment.position, ¤t_running_time);
1018 if (asign == 0) {
1019 g_mutex_unlock (&self->mutex);
1020 gst_buffer_unref (inbuf);
1021 GST_ERROR_OBJECT (self, "Could not get current running time");
1022 return GST_FLOW_ERROR;
1023 }
1024
1025 if (self->first_audio_running_time == GST_CLOCK_TIME_NONE) {
1026 self->first_audio_running_time = current_running_time;
1027 }
1028
1029 g_cond_signal (&self->audio_cond);
1030 if (self->vsegment.format == GST_FORMAT_TIME) {
1031 vsign =
1032 gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
1033 self->vsegment.position, &video_running_time);
1034 if (vsign == 0) {
1035 video_running_time = GST_CLOCK_TIME_NONE;
1036 }
1037 }
1038
1039 duration =
1040 gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
1041 GST_SECOND, self->ainfo.rate);
1042 if (duration != GST_CLOCK_TIME_NONE) {
1043 esign =
1044 gst_segment_to_running_time_full (&self->asegment, GST_FORMAT_TIME,
1045 self->asegment.position + duration, &running_time_at_end);
1046 if (esign == 0) {
1047 g_mutex_unlock (&self->mutex);
1048 GST_ERROR_OBJECT (self, "Could not get running time at end");
1049 gst_buffer_unref (inbuf);
1050 return GST_FLOW_ERROR;
1051 }
1052 }
1053
1054 while (!(self->video_eos_flag || self->audio_flush_flag
1055 || self->shutdown_flag) &&
1056 /* Start at timecode */
1057 /* Wait if we haven't received video yet */
1058 (video_running_time == GST_CLOCK_TIME_NONE
1059 /* Wait if audio is after the video: dunno what to do */
1060 || gst_avwait_compare_guint64_with_signs (asign,
1061 running_time_at_end, vsign, video_running_time) == 1)) {
1062 g_cond_wait (&self->cond, &self->mutex);
1063 vsign =
1064 gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
1065 self->vsegment.position, &video_running_time);
1066 if (vsign == 0) {
1067 video_running_time = GST_CLOCK_TIME_NONE;
1068 }
1069 }
1070
1071 if (self->audio_flush_flag || self->shutdown_flag) {
1072 GST_DEBUG_OBJECT (self, "Shutting down, ignoring frame");
1073 gst_buffer_unref (inbuf);
1074 g_mutex_unlock (&self->mutex);
1075 return GST_FLOW_FLUSHING;
1076 }
1077
1078 if (self->audio_running_time_to_wait_for == GST_CLOCK_TIME_NONE
1079 /* Audio ends before start : drop */
1080 || gst_avwait_compare_guint64_with_signs (esign,
1081 running_time_at_end, 1, self->audio_running_time_to_wait_for) == -1
1082 /* Audio starts after end: drop */
1083 || current_running_time >= self->audio_running_time_to_end_at) {
1084 GST_DEBUG_OBJECT (self,
1085 "Dropped an audio buf at %" GST_TIME_FORMAT " waiting for %"
1086 GST_TIME_FORMAT " video time %" GST_TIME_FORMAT,
1087 GST_TIME_ARGS (current_running_time),
1088 GST_TIME_ARGS (self->audio_running_time_to_wait_for),
1089 GST_TIME_ARGS (video_running_time));
1090 GST_DEBUG_OBJECT (self, "Would have ended at %i %" GST_TIME_FORMAT,
1091 esign, GST_TIME_ARGS (running_time_at_end));
1092 gst_buffer_unref (inbuf);
1093 inbuf = NULL;
1094 if (current_running_time >= self->audio_running_time_to_end_at &&
1095 (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) &&
1096 !(self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED)) {
1097 send_element_message = TRUE;
1098 }
1099 } else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
1100 1, self->audio_running_time_to_wait_for) >= 0
1101 && gst_avwait_compare_guint64_with_signs (esign, running_time_at_end, 1,
1102 self->audio_running_time_to_end_at) == -1) {
1103 /* Audio ends after start, but before end: clip */
1104 GstSegment asegment2 = self->asegment;
1105
1106 gst_segment_set_running_time (&asegment2, GST_FORMAT_TIME,
1107 self->audio_running_time_to_wait_for);
1108 inbuf =
1109 gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
1110 self->ainfo.bpf);
1111 } else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
1112 1, self->audio_running_time_to_end_at) >= 0) {
1113 /* Audio starts after start, but before end: clip from the other side */
1114 GstSegment asegment2 = self->asegment;
1115 guint64 stop;
1116 gint ssign;
1117
1118 ssign =
1119 gst_segment_position_from_running_time_full (&asegment2,
1120 GST_FORMAT_TIME, self->audio_running_time_to_end_at, &stop);
1121 if (ssign > 0) {
1122 asegment2.stop = stop;
1123 } else {
1124 /* Stopping before the start of the audio segment?! */
1125 /* This shouldn't happen: we already know that the current audio is
1126 * inside the segment, and that the end is after the current audio
1127 * position */
1128 GST_ELEMENT_ERROR (self, CORE, FAILED,
1129 ("Failed to clip audio: it should have ended before the current segment"),
1130 NULL);
1131 }
1132 inbuf =
1133 gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
1134 self->ainfo.bpf);
1135 if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
1136 send_element_message = TRUE;
1137 }
1138 } else {
1139 /* Programming error? Shouldn't happen */
1140 g_assert_not_reached ();
1141 }
1142 g_mutex_unlock (&self->mutex);
1143
1144 if (inbuf) {
1145 GstClockTime new_duration =
1146 gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
1147 GST_SECOND, self->ainfo.rate);
1148 GstClockTime new_running_time_at_end =
1149 gst_segment_to_running_time (&self->asegment, GST_FORMAT_TIME,
1150 self->asegment.position + new_duration);
1151 GST_DEBUG_OBJECT (self, "Pass audio buffer ending at %" GST_TIME_FORMAT,
1152 GST_TIME_ARGS (new_running_time_at_end));
1153 ret = gst_pad_push (self->asrcpad, inbuf);
1154 }
1155
1156 if (send_element_message) {
1157 g_mutex_lock (&self->mutex);
1158 if ((self->must_send_end_message & END_MESSAGE_VIDEO_PUSHED) ||
1159 self->video_eos_flag) {
1160 self->must_send_end_message = END_MESSAGE_NORMAL;
1161 g_mutex_unlock (&self->mutex);
1162 gst_avwait_send_element_message (self, TRUE,
1163 self->audio_running_time_to_end_at);
1164 } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
1165 self->must_send_end_message |= END_MESSAGE_AUDIO_PUSHED;
1166 g_mutex_unlock (&self->mutex);
1167 } else {
1168 g_assert_not_reached ();
1169 g_mutex_unlock (&self->mutex);
1170 }
1171 }
1172 send_element_message = FALSE;
1173 return ret;
1174 }
1175
1176 static GstIterator *
gst_avwait_iterate_internal_links(GstPad * pad,GstObject * parent)1177 gst_avwait_iterate_internal_links (GstPad * pad, GstObject * parent)
1178 {
1179 GstIterator *it = NULL;
1180 GstPad *opad;
1181 GValue val = G_VALUE_INIT;
1182 GstAvWait *self = GST_AVWAIT (parent);
1183
1184 if (self->asinkpad == pad)
1185 opad = gst_object_ref (self->asrcpad);
1186 else if (self->asrcpad == pad)
1187 opad = gst_object_ref (self->asinkpad);
1188 else if (self->vsinkpad == pad)
1189 opad = gst_object_ref (self->vsrcpad);
1190 else if (self->vsrcpad == pad)
1191 opad = gst_object_ref (self->vsinkpad);
1192 else
1193 goto out;
1194
1195 g_value_init (&val, GST_TYPE_PAD);
1196 g_value_set_object (&val, opad);
1197 it = gst_iterator_new_single (GST_TYPE_PAD, &val);
1198 g_value_unset (&val);
1199
1200 gst_object_unref (opad);
1201
1202 out:
1203 return it;
1204 }
1205