1 /*
2 * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3 * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 /**
22 * SECTION:element-assrender
23 * @title: assrender
24 *
25 * Renders timestamped SSA/ASS subtitles on top of a video stream.
26 *
27 * ## Example launch line
28 * |[
29 * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30 * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
31 *
32 */
33
34 #ifdef HAVE_CONFIG_H
35 # include <config.h>
36 #endif
37
38 #include <gst/video/gstvideometa.h>
39
40 #include "gstassrender.h"
41
42 #include <string.h>
43
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
45 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
46 #define GST_CAT_DEFAULT gst_ass_render_debug
47
48 /* Filter signals and props */
49 enum
50 {
51 LAST_SIGNAL
52 };
53
54 enum
55 {
56 PROP_0,
57 PROP_ENABLE,
58 PROP_EMBEDDEDFONTS,
59 PROP_WAIT_TEXT
60 };
61
62 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
63 * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
64 * v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
65 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
66 I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
67 NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
68
69 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
70
71 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
72 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
73
74 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
75
76 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
77 GST_PAD_SRC,
78 GST_PAD_ALWAYS,
79 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
80 );
81
82 static GstStaticPadTemplate video_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("video_sink",
84 GST_PAD_SINK,
85 GST_PAD_ALWAYS,
86 GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
87 );
88
89 static GstStaticPadTemplate text_sink_factory =
90 GST_STATIC_PAD_TEMPLATE ("text_sink",
91 GST_PAD_SINK,
92 GST_PAD_ALWAYS,
93 GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
94 );
95
96 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
97 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
98 #define GST_ASS_RENDER_LOCK(ass) (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
99 #define GST_ASS_RENDER_UNLOCK(ass) (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
100 #define GST_ASS_RENDER_WAIT(ass) (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
101 #define GST_ASS_RENDER_SIGNAL(ass) (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
102 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
103
104 static void gst_ass_render_set_property (GObject * object, guint prop_id,
105 const GValue * value, GParamSpec * pspec);
106 static void gst_ass_render_get_property (GObject * object, guint prop_id,
107 GValue * value, GParamSpec * pspec);
108
109 static void gst_ass_render_finalize (GObject * object);
110
111 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
112 GstStateChange transition);
113
114 #define gst_ass_render_parent_class parent_class
115 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
116
117 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
118 GstAssRender * render, GstCaps * filter);
119 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
120 GstAssRender * render, GstCaps * filter);
121
122 static gboolean gst_ass_render_setcaps_video (GstPad * pad,
123 GstAssRender * render, GstCaps * caps);
124 static gboolean gst_ass_render_setcaps_text (GstPad * pad,
125 GstAssRender * render, GstCaps * caps);
126
127 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
128 GstObject * parent, GstBuffer * buf);
129 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
130 GstObject * parent, GstBuffer * buf);
131
132 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
133 GstEvent * event);
134 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
135 GstEvent * event);
136 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
137 GstEvent * event);
138
139 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
140 GstQuery * query);
141 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
142 GstQuery * query);
143
144 /* initialize the plugin's class */
145 static void
gst_ass_render_class_init(GstAssRenderClass * klass)146 gst_ass_render_class_init (GstAssRenderClass * klass)
147 {
148 GObjectClass *gobject_class = (GObjectClass *) klass;
149 GstElementClass *gstelement_class = (GstElementClass *) klass;
150
151 gobject_class->set_property = gst_ass_render_set_property;
152 gobject_class->get_property = gst_ass_render_get_property;
153 gobject_class->finalize = gst_ass_render_finalize;
154
155 g_object_class_install_property (gobject_class, PROP_ENABLE,
156 g_param_spec_boolean ("enable", "Enable",
157 "Enable rendering of subtitles", TRUE,
158 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
159
160 g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
161 g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
162 "Extract and use fonts embedded in the stream", TRUE,
163 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
164
165 g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
166 g_param_spec_boolean ("wait-text", "Wait Text",
167 "Whether to wait for subtitles", TRUE,
168 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
169
170 gstelement_class->change_state =
171 GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
172
173 gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
174 gst_element_class_add_static_pad_template (gstelement_class,
175 &video_sink_factory);
176 gst_element_class_add_static_pad_template (gstelement_class,
177 &text_sink_factory);
178
179 gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
180 "Mixer/Video/Overlay/Subtitle",
181 "Renders ASS/SSA subtitles with libass",
182 "Benjamin Schmitz <vortex@wolpzone.de>, "
183 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
184 }
185
186 static void
_libass_message_cb(gint level,const gchar * fmt,va_list args,gpointer render)187 _libass_message_cb (gint level, const gchar * fmt, va_list args,
188 gpointer render)
189 {
190 gchar *message = g_strdup_vprintf (fmt, args);
191
192 if (level < 2)
193 GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
194 else if (level < 4)
195 GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
196 else if (level < 5)
197 GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
198 else if (level < 6)
199 GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
200 else
201 GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
202
203 g_free (message);
204 }
205
206 static void
gst_ass_render_init(GstAssRender * render)207 gst_ass_render_init (GstAssRender * render)
208 {
209 GST_DEBUG_OBJECT (render, "init");
210
211 render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
212 render->video_sinkpad =
213 gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
214 render->text_sinkpad =
215 gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
216
217 gst_pad_set_chain_function (render->video_sinkpad,
218 GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
219 gst_pad_set_chain_function (render->text_sinkpad,
220 GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
221
222 gst_pad_set_event_function (render->video_sinkpad,
223 GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
224 gst_pad_set_event_function (render->text_sinkpad,
225 GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
226 gst_pad_set_event_function (render->srcpad,
227 GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
228
229 gst_pad_set_query_function (render->srcpad,
230 GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
231 gst_pad_set_query_function (render->video_sinkpad,
232 GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
233
234 GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
235
236 gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
237 gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
238 gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
239
240 gst_video_info_init (&render->info);
241
242 g_mutex_init (&render->lock);
243 g_cond_init (&render->cond);
244
245 render->renderer_init_ok = FALSE;
246 render->track_init_ok = FALSE;
247 render->enable = TRUE;
248 render->embeddedfonts = TRUE;
249 render->wait_text = FALSE;
250
251 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
252 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
253
254 g_mutex_init (&render->ass_mutex);
255 render->ass_library = ass_library_init ();
256 ass_set_message_cb (render->ass_library, _libass_message_cb, render);
257 ass_set_extract_fonts (render->ass_library, 1);
258
259 render->ass_renderer = ass_renderer_init (render->ass_library);
260 if (!render->ass_renderer) {
261 GST_WARNING_OBJECT (render, "cannot create renderer instance");
262 g_assert_not_reached ();
263 }
264
265 render->ass_track = NULL;
266
267 GST_DEBUG_OBJECT (render, "init complete");
268 }
269
270 static void
gst_ass_render_finalize(GObject * object)271 gst_ass_render_finalize (GObject * object)
272 {
273 GstAssRender *render = GST_ASS_RENDER (object);
274
275 g_mutex_clear (&render->lock);
276 g_cond_clear (&render->cond);
277
278 if (render->ass_track) {
279 ass_free_track (render->ass_track);
280 }
281
282 if (render->ass_renderer) {
283 ass_renderer_done (render->ass_renderer);
284 }
285
286 if (render->ass_library) {
287 ass_library_done (render->ass_library);
288 }
289
290 g_mutex_clear (&render->ass_mutex);
291
292 G_OBJECT_CLASS (parent_class)->finalize (object);
293 }
294
295 static void
gst_ass_render_reset_composition(GstAssRender * render)296 gst_ass_render_reset_composition (GstAssRender * render)
297 {
298 if (render->composition) {
299 gst_video_overlay_composition_unref (render->composition);
300 render->composition = NULL;
301 }
302 }
303
304 static void
gst_ass_render_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)305 gst_ass_render_set_property (GObject * object, guint prop_id,
306 const GValue * value, GParamSpec * pspec)
307 {
308 GstAssRender *render = GST_ASS_RENDER (object);
309
310 GST_ASS_RENDER_LOCK (render);
311 switch (prop_id) {
312 case PROP_ENABLE:
313 render->enable = g_value_get_boolean (value);
314 break;
315 case PROP_EMBEDDEDFONTS:
316 render->embeddedfonts = g_value_get_boolean (value);
317 g_mutex_lock (&render->ass_mutex);
318 ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
319 g_mutex_unlock (&render->ass_mutex);
320 break;
321 case PROP_WAIT_TEXT:
322 render->wait_text = g_value_get_boolean (value);
323 break;
324 default:
325 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
326 break;
327 }
328 GST_ASS_RENDER_UNLOCK (render);
329 }
330
331 static void
gst_ass_render_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)332 gst_ass_render_get_property (GObject * object, guint prop_id,
333 GValue * value, GParamSpec * pspec)
334 {
335 GstAssRender *render = GST_ASS_RENDER (object);
336
337 GST_ASS_RENDER_LOCK (render);
338 switch (prop_id) {
339 case PROP_ENABLE:
340 g_value_set_boolean (value, render->enable);
341 break;
342 case PROP_EMBEDDEDFONTS:
343 g_value_set_boolean (value, render->embeddedfonts);
344 break;
345 case PROP_WAIT_TEXT:
346 g_value_set_boolean (value, render->wait_text);
347 break;
348 default:
349 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
350 break;
351 }
352 GST_ASS_RENDER_UNLOCK (render);
353 }
354
355 /* Called with lock held */
356 static void
gst_ass_render_pop_text(GstAssRender * render)357 gst_ass_render_pop_text (GstAssRender * render)
358 {
359 while (render->subtitle_pending) {
360 GST_DEBUG_OBJECT (render, "releasing text buffer %p",
361 render->subtitle_pending->data);
362 gst_buffer_unref (render->subtitle_pending->data);
363 render->subtitle_pending =
364 g_slist_delete_link (render->subtitle_pending,
365 render->subtitle_pending);
366 }
367
368 /* Let the text task know we used that buffer */
369 GST_ASS_RENDER_BROADCAST (render);
370 }
371
372 static GstStateChangeReturn
gst_ass_render_change_state(GstElement * element,GstStateChange transition)373 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
374 {
375 GstAssRender *render = GST_ASS_RENDER (element);
376 GstStateChangeReturn ret;
377
378 switch (transition) {
379 case GST_STATE_CHANGE_PAUSED_TO_READY:
380 GST_ASS_RENDER_LOCK (render);
381 render->subtitle_flushing = TRUE;
382 render->video_flushing = TRUE;
383 gst_ass_render_pop_text (render);
384 GST_ASS_RENDER_UNLOCK (render);
385 break;
386 default:
387 break;
388 }
389
390 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
391 if (ret == GST_STATE_CHANGE_FAILURE)
392 return ret;
393
394 switch (transition) {
395 case GST_STATE_CHANGE_PAUSED_TO_READY:
396 g_mutex_lock (&render->ass_mutex);
397 if (render->ass_track)
398 ass_free_track (render->ass_track);
399 render->ass_track = NULL;
400 render->track_init_ok = FALSE;
401 render->renderer_init_ok = FALSE;
402 gst_ass_render_reset_composition (render);
403 g_mutex_unlock (&render->ass_mutex);
404 break;
405 case GST_STATE_CHANGE_READY_TO_PAUSED:
406 GST_ASS_RENDER_LOCK (render);
407 render->subtitle_flushing = FALSE;
408 render->video_flushing = FALSE;
409 render->video_eos = FALSE;
410 render->subtitle_eos = FALSE;
411 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
412 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
413 GST_ASS_RENDER_UNLOCK (render);
414 break;
415 default:
416 break;
417 }
418
419
420 return ret;
421 }
422
423 static gboolean
gst_ass_render_query_src(GstPad * pad,GstObject * parent,GstQuery * query)424 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
425 {
426 gboolean res = FALSE;
427
428 switch (GST_QUERY_TYPE (query)) {
429 case GST_QUERY_CAPS:
430 {
431 GstCaps *filter, *caps;
432
433 gst_query_parse_caps (query, &filter);
434 caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
435 gst_query_set_caps_result (query, caps);
436 gst_caps_unref (caps);
437 res = TRUE;
438 break;
439 }
440 default:
441 res = gst_pad_query_default (pad, parent, query);
442 break;
443 }
444
445 return res;
446 }
447
448 static gboolean
gst_ass_render_event_src(GstPad * pad,GstObject * parent,GstEvent * event)449 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
450 {
451 GstAssRender *render = GST_ASS_RENDER (parent);
452 gboolean ret;
453
454 GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
455
456 /* FIXME: why not just always push it on text pad? */
457 if (render->track_init_ok) {
458 ret = gst_pad_push_event (render->video_sinkpad, gst_event_ref (event));
459 gst_pad_push_event (render->text_sinkpad, event);
460 } else {
461 ret = gst_pad_push_event (render->video_sinkpad, event);
462 }
463
464 return ret;
465 }
466
467 /**
468 * gst_ass_render_add_feature_and_intersect:
469 *
470 * Creates a new #GstCaps containing the (given caps +
471 * given caps feature) + (given caps intersected by the
472 * given filter).
473 *
474 * Returns: the new #GstCaps
475 */
476 static GstCaps *
gst_ass_render_add_feature_and_intersect(GstCaps * caps,const gchar * feature,GstCaps * filter)477 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
478 const gchar * feature, GstCaps * filter)
479 {
480 int i, caps_size;
481 GstCaps *new_caps;
482
483 new_caps = gst_caps_copy (caps);
484
485 caps_size = gst_caps_get_size (new_caps);
486 for (i = 0; i < caps_size; i++) {
487 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
488 if (!gst_caps_features_is_any (features)) {
489 gst_caps_features_add (features, feature);
490 }
491 }
492
493 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
494 filter, GST_CAPS_INTERSECT_FIRST));
495
496 return new_caps;
497 }
498
499 /**
500 * gst_ass_render_intersect_by_feature:
501 *
502 * Creates a new #GstCaps based on the following filtering rule.
503 *
504 * For each individual caps contained in given caps, if the
505 * caps uses the given caps feature, keep a version of the caps
506 * with the feature and an another one without. Otherwise, intersect
507 * the caps with the given filter.
508 *
509 * Returns: the new #GstCaps
510 */
511 static GstCaps *
gst_ass_render_intersect_by_feature(GstCaps * caps,const gchar * feature,GstCaps * filter)512 gst_ass_render_intersect_by_feature (GstCaps * caps,
513 const gchar * feature, GstCaps * filter)
514 {
515 int i, caps_size;
516 GstCaps *new_caps;
517
518 new_caps = gst_caps_new_empty ();
519
520 caps_size = gst_caps_get_size (caps);
521 for (i = 0; i < caps_size; i++) {
522 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
523 GstCapsFeatures *caps_features =
524 gst_caps_features_copy (gst_caps_get_features (caps, i));
525 GstCaps *filtered_caps;
526 GstCaps *simple_caps =
527 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
528 gst_caps_set_features (simple_caps, 0, caps_features);
529
530 if (gst_caps_features_contains (caps_features, feature)) {
531 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
532
533 gst_caps_features_remove (caps_features, feature);
534 filtered_caps = gst_caps_ref (simple_caps);
535 } else {
536 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
537 GST_CAPS_INTERSECT_FIRST);
538 }
539
540 gst_caps_unref (simple_caps);
541 gst_caps_append (new_caps, filtered_caps);
542 }
543
544 return new_caps;
545 }
546
547 static GstCaps *
gst_ass_render_get_videosink_caps(GstPad * pad,GstAssRender * render,GstCaps * filter)548 gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
549 GstCaps * filter)
550 {
551 GstPad *srcpad = render->srcpad;
552 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
553
554 if (filter) {
555 /* filter caps + composition feature + filter caps
556 * filtered by the software caps. */
557 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
558 assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
559 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
560 gst_caps_unref (sw_caps);
561
562 GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
563 assrender_filter);
564 }
565
566 peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
567
568 if (assrender_filter)
569 gst_caps_unref (assrender_filter);
570
571 if (peer_caps) {
572
573 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
574
575 if (gst_caps_is_any (peer_caps)) {
576
577 /* if peer returns ANY caps, return filtered src pad template caps */
578 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
579 } else {
580
581 /* duplicate caps which contains the composition into one version with
582 * the meta and one without. Filter the other caps by the software caps */
583 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
584 caps = gst_ass_render_intersect_by_feature (peer_caps,
585 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
586 gst_caps_unref (sw_caps);
587 }
588
589 gst_caps_unref (peer_caps);
590
591 } else {
592 /* no peer, our padtemplate is enough then */
593 caps = gst_pad_get_pad_template_caps (pad);
594 }
595
596 if (filter) {
597 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
598 GST_CAPS_INTERSECT_FIRST);
599 gst_caps_unref (caps);
600 caps = intersection;
601 }
602
603 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
604
605 return caps;
606 }
607
608 static GstCaps *
gst_ass_render_get_src_caps(GstPad * pad,GstAssRender * render,GstCaps * filter)609 gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
610 GstCaps * filter)
611 {
612 GstPad *sinkpad = render->video_sinkpad;
613 GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
614
615 if (filter) {
616 /* duplicate filter caps which contains the composition into one version
617 * with the meta and one without. Filter the other caps by the software
618 * caps */
619 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
620 assrender_filter =
621 gst_ass_render_intersect_by_feature (filter,
622 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
623 gst_caps_unref (sw_caps);
624 }
625
626 peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
627
628 if (assrender_filter)
629 gst_caps_unref (assrender_filter);
630
631 if (peer_caps) {
632
633 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
634
635 if (gst_caps_is_any (peer_caps)) {
636
637 /* if peer returns ANY caps, return filtered sink pad template caps */
638 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
639
640 } else {
641
642 /* return upstream caps + composition feature + upstream caps
643 * filtered by the software caps. */
644 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
645 caps = gst_ass_render_add_feature_and_intersect (peer_caps,
646 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
647 gst_caps_unref (sw_caps);
648 }
649
650 gst_caps_unref (peer_caps);
651
652 } else {
653 /* no peer, our padtemplate is enough then */
654 caps = gst_pad_get_pad_template_caps (pad);
655 }
656
657 if (filter) {
658 GstCaps *intersection;
659
660 intersection =
661 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
662 gst_caps_unref (caps);
663 caps = intersection;
664 }
665
666 GST_DEBUG_OBJECT (render, "returning %" GST_PTR_FORMAT, caps);
667
668 return caps;
669 }
670
671 static void
blit_bgra_premultiplied(GstAssRender * render,ASS_Image * ass_image,guint8 * data,gint width,gint height,gint stride,gint x_off,gint y_off)672 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
673 guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
674 {
675 guint counter = 0;
676 gint alpha, r, g, b, k;
677 const guint8 *src;
678 guint8 *dst;
679 gint x, y, w, h;
680 gint dst_skip;
681 gint src_skip;
682 gint dst_x, dst_y;
683
684 memset (data, 0, stride * height);
685
686 while (ass_image) {
687 dst_x = ass_image->dst_x + x_off;
688 dst_y = ass_image->dst_y + y_off;
689
690 w = MIN (ass_image->w, width - dst_x);
691 h = MIN (ass_image->h, height - dst_y);
692 if (w <= 0 || h <= 0)
693 goto next;
694
695 alpha = 255 - (ass_image->color & 0xff);
696 if (!alpha)
697 goto next;
698
699 r = ((ass_image->color) >> 24) & 0xff;
700 g = ((ass_image->color) >> 16) & 0xff;
701 b = ((ass_image->color) >> 8) & 0xff;
702
703 src = ass_image->bitmap;
704 dst = data + dst_y * stride + dst_x * 4;
705
706 src_skip = ass_image->stride - w;
707 dst_skip = stride - w * 4;
708
709 for (y = 0; y < h; y++) {
710 for (x = 0; x < w; x++) {
711 if (src[0]) {
712 k = src[0] * alpha / 255;
713 if (dst[3] == 0) {
714 dst[3] = k;
715 dst[2] = (k * r) / 255;
716 dst[1] = (k * g) / 255;
717 dst[0] = (k * b) / 255;
718 } else {
719 dst[3] = k + (255 - k) * dst[3] / 255;
720 dst[2] = (k * r + (255 - k) * dst[2]) / 255;
721 dst[1] = (k * g + (255 - k) * dst[1]) / 255;
722 dst[0] = (k * b + (255 - k) * dst[0]) / 255;
723 }
724 }
725 src++;
726 dst += 4;
727 }
728 src += src_skip;
729 dst += dst_skip;
730 }
731 next:
732 counter++;
733 ass_image = ass_image->next;
734 }
735 GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
736 }
737
738 static gboolean
gst_ass_render_can_handle_caps(GstCaps * incaps)739 gst_ass_render_can_handle_caps (GstCaps * incaps)
740 {
741 static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
742 gboolean ret;
743 GstCaps *caps;
744
745 caps = gst_static_caps_get (&static_caps);
746 ret = gst_caps_is_subset (incaps, caps);
747 gst_caps_unref (caps);
748
749 return ret;
750 }
751
752 static void
gst_ass_render_update_render_size(GstAssRender * render)753 gst_ass_render_update_render_size (GstAssRender * render)
754 {
755 gdouble video_aspect = (gdouble) render->info.width /
756 (gdouble) render->info.height;
757 gdouble window_aspect = (gdouble) render->window_width /
758 (gdouble) render->window_height;
759
760 /* render at the window size, with the video aspect ratio */
761 if (video_aspect >= window_aspect) {
762 render->ass_frame_width = render->window_width;
763 render->ass_frame_height = render->window_width / video_aspect;
764 } else {
765 render->ass_frame_width = render->window_height * video_aspect;
766 render->ass_frame_height = render->window_height;
767 }
768 }
769
770 static gboolean
gst_ass_render_negotiate(GstAssRender * render,GstCaps * caps)771 gst_ass_render_negotiate (GstAssRender * render, GstCaps * caps)
772 {
773 gboolean upstream_has_meta = FALSE;
774 gboolean caps_has_meta = FALSE;
775 gboolean alloc_has_meta = FALSE;
776 gboolean attach = FALSE;
777 gboolean ret = TRUE;
778 guint width, height;
779 GstCapsFeatures *f;
780 GstCaps *overlay_caps;
781 GstQuery *query;
782 guint alloc_index;
783
784 GST_DEBUG_OBJECT (render, "performing negotiation");
785
786 /* Clear cached composition */
787 gst_ass_render_reset_composition (render);
788
789 /* Clear any pending reconfigure flag */
790 gst_pad_check_reconfigure (render->srcpad);
791
792 if (!caps)
793 caps = gst_pad_get_current_caps (render->video_sinkpad);
794 else
795 gst_caps_ref (caps);
796
797 if (!caps || gst_caps_is_empty (caps))
798 goto no_format;
799
800 /* Check if upstream caps have meta */
801 if ((f = gst_caps_get_features (caps, 0))) {
802 upstream_has_meta = gst_caps_features_contains (f,
803 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
804 }
805
806 /* Initialize dimensions */
807 width = render->info.width;
808 height = render->info.height;
809
810 if (upstream_has_meta) {
811 overlay_caps = gst_caps_ref (caps);
812 } else {
813 GstCaps *peercaps;
814
815 /* BaseTransform requires caps for the allocation query to work */
816 overlay_caps = gst_caps_copy (caps);
817 f = gst_caps_get_features (overlay_caps, 0);
818 gst_caps_features_add (f,
819 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
820
821 /* Then check if downstream accept overlay composition in caps */
822 /* FIXME: We should probably check if downstream *prefers* the
823 * overlay meta, and only enforce usage of it if we can't handle
824 * the format ourselves and thus would have to drop the overlays.
825 * Otherwise we should prefer what downstream wants here.
826 */
827 peercaps = gst_pad_peer_query_caps (render->srcpad, NULL);
828 caps_has_meta = gst_caps_can_intersect (peercaps, overlay_caps);
829 gst_caps_unref (peercaps);
830
831 GST_DEBUG ("caps have overlay meta %d", caps_has_meta);
832 }
833
834 if (upstream_has_meta || caps_has_meta) {
835 /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
836 * from allocation query */
837 ret = gst_pad_set_caps (render->srcpad, overlay_caps);
838
839 /* First check if the allocation meta has compositon */
840 query = gst_query_new_allocation (overlay_caps, FALSE);
841
842 if (!gst_pad_peer_query (render->srcpad, query)) {
843 /* no problem, we use the query defaults */
844 GST_DEBUG_OBJECT (render, "ALLOCATION query failed");
845
846 /* In case we were flushing, mark reconfigure and fail this method,
847 * will make it retry */
848 if (render->video_flushing)
849 ret = FALSE;
850 }
851
852 alloc_has_meta = gst_query_find_allocation_meta (query,
853 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);
854
855 GST_DEBUG ("sink alloc has overlay meta %d", alloc_has_meta);
856
857 if (alloc_has_meta) {
858 const GstStructure *params;
859
860 gst_query_parse_nth_allocation_meta (query, alloc_index, ¶ms);
861 if (params) {
862 if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
863 "height", G_TYPE_UINT, &height, NULL)) {
864 GST_DEBUG ("received window size: %dx%d", width, height);
865 g_assert (width != 0 && height != 0);
866 }
867 }
868 }
869
870 gst_query_unref (query);
871 }
872
873 /* Update render size if needed */
874 render->window_width = width;
875 render->window_height = height;
876 gst_ass_render_update_render_size (render);
877
878 /* For backward compatbility, we will prefer bliting if downstream
879 * allocation does not support the meta. In other case we will prefer
880 * attaching, and will fail the negotiation in the unlikely case we are
881 * force to blit, but format isn't supported. */
882
883 if (upstream_has_meta) {
884 attach = TRUE;
885 } else if (caps_has_meta) {
886 if (alloc_has_meta) {
887 attach = TRUE;
888 } else {
889 /* Don't attach unless we cannot handle the format */
890 attach = !gst_ass_render_can_handle_caps (caps);
891 }
892 } else {
893 ret = gst_ass_render_can_handle_caps (caps);
894 }
895
896 /* If we attach, then pick the overlay caps */
897 if (attach) {
898 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, overlay_caps);
899 /* Caps where already sent */
900 } else if (ret) {
901 GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
902 ret = gst_pad_set_caps (render->srcpad, caps);
903 }
904
905 render->attach_compo_to_buffer = attach;
906
907 if (!ret) {
908 GST_DEBUG_OBJECT (render, "negotiation failed, schedule reconfigure");
909 gst_pad_mark_reconfigure (render->srcpad);
910 } else {
911 g_mutex_lock (&render->ass_mutex);
912 ass_set_frame_size (render->ass_renderer,
913 render->ass_frame_width, render->ass_frame_height);
914 ass_set_storage_size (render->ass_renderer,
915 render->info.width, render->info.height);
916 ass_set_pixel_aspect (render->ass_renderer,
917 (gdouble) render->info.par_n / (gdouble) render->info.par_d);
918 ass_set_font_scale (render->ass_renderer, 1.0);
919 ass_set_hinting (render->ass_renderer, ASS_HINTING_LIGHT);
920
921 ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
922 ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
923 ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
924 ass_set_use_margins (render->ass_renderer, 0);
925 g_mutex_unlock (&render->ass_mutex);
926
927 render->renderer_init_ok = TRUE;
928
929 GST_DEBUG_OBJECT (render, "ass renderer setup complete");
930 }
931
932 gst_caps_unref (overlay_caps);
933 gst_caps_unref (caps);
934
935 if (!ret)
936 gst_pad_mark_reconfigure (render->srcpad);
937
938 return ret;
939
940 no_format:
941 {
942 if (caps)
943 gst_caps_unref (caps);
944 gst_pad_mark_reconfigure (render->srcpad);
945 return FALSE;
946 }
947 }
948
949 static gboolean
gst_ass_render_setcaps_video(GstPad * pad,GstAssRender * render,GstCaps * caps)950 gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
951 GstCaps * caps)
952 {
953 GstVideoInfo info;
954 gboolean ret;
955
956 if (!gst_video_info_from_caps (&info, caps))
957 goto invalid_caps;
958
959 render->info = info;
960
961 ret = gst_ass_render_negotiate (render, caps);
962
963 GST_ASS_RENDER_LOCK (render);
964
965 if (!render->attach_compo_to_buffer && !gst_ass_render_can_handle_caps (caps)) {
966 GST_DEBUG_OBJECT (render, "unsupported caps %" GST_PTR_FORMAT, caps);
967 ret = FALSE;
968 }
969 GST_ASS_RENDER_UNLOCK (render);
970
971 return ret;
972
973 /* ERRORS */
974 invalid_caps:
975 {
976 GST_ERROR_OBJECT (render, "could not parse caps");
977 return FALSE;
978 }
979 }
980
981 static gboolean
gst_ass_render_setcaps_text(GstPad * pad,GstAssRender * render,GstCaps * caps)982 gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
983 GstCaps * caps)
984 {
985 GstStructure *structure;
986 const GValue *value;
987 GstBuffer *priv;
988 GstMapInfo map;
989 gboolean ret = FALSE;
990
991 structure = gst_caps_get_structure (caps, 0);
992
993 GST_DEBUG_OBJECT (render, "text pad linked with caps: %" GST_PTR_FORMAT,
994 caps);
995
996 value = gst_structure_get_value (structure, "codec_data");
997
998 g_mutex_lock (&render->ass_mutex);
999 if (value != NULL) {
1000 priv = gst_value_get_buffer (value);
1001 g_return_val_if_fail (priv != NULL, FALSE);
1002
1003 gst_buffer_map (priv, &map, GST_MAP_READ);
1004
1005 if (!render->ass_track)
1006 render->ass_track = ass_new_track (render->ass_library);
1007
1008 ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
1009
1010 gst_buffer_unmap (priv, &map);
1011
1012 GST_DEBUG_OBJECT (render, "ass track created");
1013
1014 render->track_init_ok = TRUE;
1015
1016 ret = TRUE;
1017 } else if (!render->ass_track) {
1018 render->ass_track = ass_new_track (render->ass_library);
1019
1020 render->track_init_ok = TRUE;
1021
1022 ret = TRUE;
1023 }
1024 g_mutex_unlock (&render->ass_mutex);
1025
1026 return ret;
1027 }
1028
1029
1030 static void
gst_ass_render_process_text(GstAssRender * render,GstBuffer * buffer,GstClockTime running_time,GstClockTime duration)1031 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
1032 GstClockTime running_time, GstClockTime duration)
1033 {
1034 GstMapInfo map;
1035 gdouble pts_start, pts_end;
1036
1037 pts_start = running_time;
1038 pts_start /= GST_MSECOND;
1039 pts_end = duration;
1040 pts_end /= GST_MSECOND;
1041
1042 GST_DEBUG_OBJECT (render,
1043 "Processing subtitles with running time %" GST_TIME_FORMAT
1044 " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
1045 GST_TIME_ARGS (duration));
1046
1047 gst_buffer_map (buffer, &map, GST_MAP_READ);
1048
1049 g_mutex_lock (&render->ass_mutex);
1050 ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
1051 pts_start, pts_end);
1052 g_mutex_unlock (&render->ass_mutex);
1053
1054 gst_buffer_unmap (buffer, &map);
1055 }
1056
1057 static GstVideoOverlayComposition *
gst_ass_render_composite_overlay(GstAssRender * render,ASS_Image * images)1058 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
1059 {
1060 GstVideoOverlayComposition *composition;
1061 GstVideoOverlayRectangle *rectangle;
1062 GstVideoMeta *vmeta;
1063 GstMapInfo map;
1064 GstBuffer *buffer;
1065 ASS_Image *image;
1066 gint min_x, min_y;
1067 gint max_x, max_y;
1068 gint width, height;
1069 gint stride;
1070 gdouble hscale, vscale;
1071 gpointer data;
1072
1073 min_x = G_MAXINT;
1074 min_y = G_MAXINT;
1075 max_x = 0;
1076 max_y = 0;
1077
1078 /* find bounding box of all images, to limit the overlay rectangle size */
1079 for (image = images; image; image = image->next) {
1080 if (min_x > image->dst_x)
1081 min_x = image->dst_x;
1082 if (min_y > image->dst_y)
1083 min_y = image->dst_y;
1084 if (max_x < image->dst_x + image->w)
1085 max_x = image->dst_x + image->w;
1086 if (max_y < image->dst_y + image->h)
1087 max_y = image->dst_y + image->h;
1088 }
1089
1090 width = MIN (max_x - min_x, render->ass_frame_width);
1091 height = MIN (max_y - min_y, render->ass_frame_height);
1092
1093 GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
1094 width, height, min_x, min_y);
1095
1096 buffer = gst_buffer_new_and_alloc (4 * width * height);
1097 if (!buffer) {
1098 GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1099 return NULL;
1100 }
1101
1102 vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1103 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1104
1105 if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1106 GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1107 gst_buffer_unref (buffer);
1108 return NULL;
1109 }
1110
1111 blit_bgra_premultiplied (render, images, data, width, height, stride,
1112 -min_x, -min_y);
1113 gst_video_meta_unmap (vmeta, 0, &map);
1114
1115 hscale = (gdouble) render->info.width / (gdouble) render->ass_frame_width;
1116 vscale = (gdouble) render->info.height / (gdouble) render->ass_frame_height;
1117
1118 rectangle = gst_video_overlay_rectangle_new_raw (buffer,
1119 hscale * min_x, vscale * min_y, hscale * width, vscale * height,
1120 GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1121
1122 gst_buffer_unref (buffer);
1123
1124 composition = gst_video_overlay_composition_new (rectangle);
1125 gst_video_overlay_rectangle_unref (rectangle);
1126
1127 return composition;
1128 }
1129
1130 static gboolean
gst_ass_render_push_frame(GstAssRender * render,GstBuffer * video_frame)1131 gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
1132 {
1133 GstVideoFrame frame;
1134
1135 if (!render->composition)
1136 goto done;
1137
1138 video_frame = gst_buffer_make_writable (video_frame);
1139
1140 if (render->attach_compo_to_buffer) {
1141 gst_buffer_add_video_overlay_composition_meta (video_frame,
1142 render->composition);
1143 goto done;
1144 }
1145
1146 if (!gst_video_frame_map (&frame, &render->info, video_frame,
1147 GST_MAP_READWRITE)) {
1148 GST_WARNING_OBJECT (render, "failed to map video frame for blending");
1149 goto done;
1150 }
1151
1152 gst_video_overlay_composition_blend (render->composition, &frame);
1153 gst_video_frame_unmap (&frame);
1154
1155 done:
1156 return gst_pad_push (render->srcpad, video_frame);
1157 }
1158
1159 static GstFlowReturn
gst_ass_render_chain_video(GstPad * pad,GstObject * parent,GstBuffer * buffer)1160 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1161 GstBuffer * buffer)
1162 {
1163 GstAssRender *render = GST_ASS_RENDER (parent);
1164 GstFlowReturn ret = GST_FLOW_OK;
1165 gboolean in_seg = FALSE;
1166 guint64 start, stop, clip_start = 0, clip_stop = 0;
1167 ASS_Image *ass_image;
1168 guint n = 0;
1169
1170 if (gst_pad_check_reconfigure (render->srcpad)) {
1171 if (!gst_ass_render_negotiate (render, NULL)) {
1172 gst_pad_mark_reconfigure (render->srcpad);
1173 if (GST_PAD_IS_FLUSHING (render->srcpad))
1174 goto flushing;
1175 else
1176 goto not_negotiated;
1177 }
1178 }
1179
1180 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1181 goto missing_timestamp;
1182
1183 /* ignore buffers that are outside of the current segment */
1184 start = GST_BUFFER_TIMESTAMP (buffer);
1185
1186 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1187 stop = GST_CLOCK_TIME_NONE;
1188 } else {
1189 stop = start + GST_BUFFER_DURATION (buffer);
1190 }
1191
1192 /* segment_clip() will adjust start unconditionally to segment_start if
1193 * no stop time is provided, so handle this ourselves */
1194 if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1195 goto out_of_segment;
1196
1197 in_seg =
1198 gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1199 &clip_start, &clip_stop);
1200
1201 if (!in_seg)
1202 goto out_of_segment;
1203
1204 /* if the buffer is only partially in the segment, fix up stamps */
1205 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1206 GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1207 buffer = gst_buffer_make_writable (buffer);
1208 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1209 if (stop != -1)
1210 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1211 }
1212
1213 /* now, after we've done the clipping, fix up end time if there's no
1214 * duration (we only use those estimated values internally though, we
1215 * don't want to set bogus values on the buffer itself) */
1216 if (stop == -1) {
1217 if (render->info.fps_n && render->info.fps_d) {
1218 GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1219 stop =
1220 start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1221 render->info.fps_n);
1222 } else {
1223 GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1224 stop = start + 1; /* we need to assume some interval */
1225 }
1226 }
1227
1228 wait_for_text_buf:
1229
1230 GST_ASS_RENDER_LOCK (render);
1231
1232 if (render->video_flushing)
1233 goto flushing;
1234
1235 if (render->video_eos)
1236 goto have_eos;
1237
1238 if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1239 /* Text pad linked, check if we have a text buffer queued */
1240 if (render->subtitle_pending) {
1241 GSList *subtitle_pending = render->subtitle_pending;
1242 GstClockTime text_start = GST_CLOCK_TIME_NONE;
1243 GstClockTime text_end = GST_CLOCK_TIME_NONE;
1244 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1245 GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1246 GstClockTime vid_running_time, vid_running_time_end;
1247 gdouble timestamp;
1248 gint changed = 0;
1249
1250 vid_running_time =
1251 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1252 start);
1253 vid_running_time_end =
1254 gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1255 stop);
1256
1257 GST_LOG_OBJECT (render, "V : %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1258 GST_TIME_ARGS (vid_running_time),
1259 GST_TIME_ARGS (vid_running_time_end));
1260
1261 if (subtitle_pending == NULL)
1262 GST_LOG_OBJECT (render, "T : no pending subtitles");
1263
1264 while (subtitle_pending != NULL) {
1265 ++n;
1266
1267 /* if the text buffer isn't stamped right, pop it off the
1268 * queue and display it for the current video frame only */
1269 if (!GST_BUFFER_TIMESTAMP_IS_VALID (subtitle_pending->data) ||
1270 !GST_BUFFER_DURATION_IS_VALID (subtitle_pending->data)) {
1271 GSList *bad = subtitle_pending;
1272 GST_WARNING_OBJECT (render,
1273 "Got text buffer with invalid timestamp or duration %"
1274 GST_PTR_FORMAT, bad->data);
1275 gst_buffer_unref (bad->data);
1276 subtitle_pending = bad->next;
1277 render->subtitle_pending =
1278 g_slist_delete_link (render->subtitle_pending, bad);
1279 GST_ASS_RENDER_BROADCAST (render);
1280 continue;
1281 }
1282
1283 text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1284 text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1285
1286 /* If timestamp and duration are valid */
1287 text_running_time =
1288 gst_segment_to_running_time (&render->subtitle_segment,
1289 GST_FORMAT_TIME, text_start);
1290 text_running_time_end =
1291 gst_segment_to_running_time (&render->subtitle_segment,
1292 GST_FORMAT_TIME, text_end);
1293
1294 GST_LOG_OBJECT (render, "T%u: %" GST_TIME_FORMAT " - "
1295 "%" GST_TIME_FORMAT, n, GST_TIME_ARGS (text_running_time),
1296 GST_TIME_ARGS (text_running_time_end));
1297
1298 /* Text too old */
1299 if (text_running_time_end <= vid_running_time) {
1300 GSList *old = subtitle_pending;
1301 GST_DEBUG_OBJECT (render,
1302 "text buffer too old, popping %" GST_PTR_FORMAT, old->data);
1303 gst_buffer_unref (old->data);
1304 subtitle_pending = old->next;
1305 render->subtitle_pending =
1306 g_slist_delete_link (render->subtitle_pending, old);
1307 GST_ASS_RENDER_BROADCAST (render);
1308 continue;
1309 }
1310
1311 if (render->need_process) {
1312 GST_DEBUG_OBJECT (render, "process text buffer");
1313 gst_ass_render_process_text (render, subtitle_pending->data,
1314 text_running_time, text_running_time_end - text_running_time);
1315 }
1316
1317 subtitle_pending = subtitle_pending->next;
1318 }
1319
1320 if (render->need_process) {
1321 render->need_process = FALSE;
1322 }
1323
1324 GST_ASS_RENDER_UNLOCK (render);
1325
1326 /* libass needs timestamps in ms */
1327 timestamp = vid_running_time / GST_MSECOND;
1328
1329 g_mutex_lock (&render->ass_mutex);
1330 ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1331 timestamp, &changed);
1332 g_mutex_unlock (&render->ass_mutex);
1333
1334 if ((!ass_image || changed) && render->composition) {
1335 GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1336 gst_ass_render_reset_composition (render);
1337 }
1338
1339 if (ass_image != NULL) {
1340 if (!render->composition)
1341 render->composition = gst_ass_render_composite_overlay (render,
1342 ass_image);
1343 } else {
1344 GST_DEBUG_OBJECT (render, "nothing to render right now");
1345 }
1346
1347 /* Push the video frame */
1348 ret = gst_ass_render_push_frame (render, buffer);
1349
1350 subtitle_pending = render->subtitle_pending;
1351 while (subtitle_pending != NULL) {
1352
1353 text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1354 text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1355
1356 text_running_time_end =
1357 gst_segment_to_running_time (&render->video_segment,
1358 GST_FORMAT_TIME, text_end);
1359
1360 if (text_running_time_end <= vid_running_time_end) {
1361 GSList *old = subtitle_pending;
1362 GST_DEBUG_OBJECT (render,
1363 "finished text buffer, popping %" GST_PTR_FORMAT, old->data);
1364 GST_ASS_RENDER_LOCK (render);
1365 gst_buffer_unref (old->data);
1366 subtitle_pending = old->next;
1367 render->subtitle_pending =
1368 g_slist_delete_link (render->subtitle_pending, old);
1369 GST_ASS_RENDER_BROADCAST (render);
1370 GST_ASS_RENDER_UNLOCK (render);
1371 render->need_process = TRUE;
1372 if (g_slist_length (render->subtitle_pending) == 0) {
1373 render->need_process = FALSE;
1374 }
1375 } else {
1376 subtitle_pending = subtitle_pending->next;
1377 }
1378 }
1379 } else {
1380 gboolean wait_for_text_buf = TRUE;
1381
1382 if (render->subtitle_eos)
1383 wait_for_text_buf = FALSE;
1384
1385 if (!render->wait_text)
1386 wait_for_text_buf = FALSE;
1387
1388 /* Text pad linked, but no text buffer available - what now? */
1389 if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1390 GstClockTime text_start_running_time, text_last_stop_running_time;
1391 GstClockTime vid_running_time;
1392
1393 vid_running_time =
1394 gst_segment_to_running_time (&render->video_segment,
1395 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1396 text_start_running_time =
1397 gst_segment_to_running_time (&render->subtitle_segment,
1398 GST_FORMAT_TIME, render->subtitle_segment.start);
1399 text_last_stop_running_time =
1400 gst_segment_to_running_time (&render->subtitle_segment,
1401 GST_FORMAT_TIME, render->subtitle_segment.position);
1402
1403 if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1404 vid_running_time < text_start_running_time) ||
1405 (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1406 vid_running_time < text_last_stop_running_time)) {
1407 wait_for_text_buf = FALSE;
1408 }
1409 }
1410
1411 if (wait_for_text_buf) {
1412 GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1413 GST_ASS_RENDER_WAIT (render);
1414 GST_DEBUG_OBJECT (render, "resuming");
1415 GST_ASS_RENDER_UNLOCK (render);
1416 goto wait_for_text_buf;
1417 } else {
1418 GST_ASS_RENDER_UNLOCK (render);
1419 GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1420 ret = gst_pad_push (render->srcpad, buffer);
1421 }
1422 }
1423 } else {
1424 GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1425
1426 GST_ASS_RENDER_UNLOCK (render);
1427 ret = gst_pad_push (render->srcpad, buffer);
1428 return ret;
1429 }
1430
1431 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1432
1433 /* Update last_stop */
1434 render->video_segment.position = clip_start;
1435
1436 return ret;
1437
1438 missing_timestamp:
1439 {
1440 GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1441 gst_buffer_unref (buffer);
1442 return GST_FLOW_OK;
1443 }
1444 not_negotiated:
1445 {
1446 GST_ASS_RENDER_UNLOCK (render);
1447 GST_DEBUG_OBJECT (render, "not negotiated");
1448 gst_buffer_unref (buffer);
1449 return GST_FLOW_NOT_NEGOTIATED;
1450 }
1451 flushing:
1452 {
1453 GST_ASS_RENDER_UNLOCK (render);
1454 GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1455 gst_buffer_unref (buffer);
1456 return GST_FLOW_FLUSHING;
1457 }
1458 have_eos:
1459 {
1460 GST_ASS_RENDER_UNLOCK (render);
1461 GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1462 gst_buffer_unref (buffer);
1463 return GST_FLOW_EOS;
1464 }
1465 out_of_segment:
1466 {
1467 GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1468 gst_buffer_unref (buffer);
1469 return GST_FLOW_OK;
1470 }
1471 }
1472
1473 static GstFlowReturn
gst_ass_render_chain_text(GstPad * pad,GstObject * parent,GstBuffer * buffer)1474 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1475 {
1476 GstFlowReturn ret = GST_FLOW_OK;
1477 GstAssRender *render = GST_ASS_RENDER (parent);
1478 gboolean in_seg = FALSE;
1479 guint64 clip_start = 0, clip_stop = 0;
1480
1481 GST_DEBUG_OBJECT (render, "entering chain for buffer %" GST_PTR_FORMAT,
1482 buffer);
1483
1484 GST_ASS_RENDER_LOCK (render);
1485
1486 if (render->subtitle_flushing) {
1487 GST_ASS_RENDER_UNLOCK (render);
1488 ret = GST_FLOW_FLUSHING;
1489 GST_LOG_OBJECT (render, "text flushing");
1490 goto beach;
1491 }
1492
1493 if (render->subtitle_eos) {
1494 GST_ASS_RENDER_UNLOCK (render);
1495 ret = GST_FLOW_EOS;
1496 GST_LOG_OBJECT (render, "text EOS");
1497 goto beach;
1498 }
1499
1500 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1501 GstClockTime stop;
1502
1503 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1504 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1505 else
1506 stop = GST_CLOCK_TIME_NONE;
1507
1508 in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1509 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1510 } else {
1511 in_seg = TRUE;
1512 }
1513
1514 if (in_seg) {
1515 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1516 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1517 else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1518 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1519
1520 if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1521 render->subtitle_segment.position = clip_start;
1522
1523 GST_DEBUG_OBJECT (render, "New buffer arrived %" GST_PTR_FORMAT, buffer);
1524 render->subtitle_pending = g_slist_append (render->subtitle_pending,
1525 gst_buffer_ref (buffer));
1526 render->need_process = TRUE;
1527
1528 /* in case the video chain is waiting for a text buffer, wake it up */
1529 GST_ASS_RENDER_BROADCAST (render);
1530 }
1531
1532 GST_ASS_RENDER_UNLOCK (render);
1533
1534 beach:
1535 GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1536
1537 gst_buffer_unref (buffer);
1538 return ret;
1539 }
1540
1541 static void
gst_ass_render_handle_tag_sample(GstAssRender * render,GstSample * sample)1542 gst_ass_render_handle_tag_sample (GstAssRender * render, GstSample * sample)
1543 {
1544 static const gchar *mimetypes[] = {
1545 "application/x-font-ttf",
1546 "application/x-font-otf",
1547 "application/x-truetype-font"
1548 };
1549 static const gchar *extensions[] = {
1550 ".otf",
1551 ".ttf"
1552 };
1553
1554 GstBuffer *buf;
1555 const GstStructure *structure;
1556 gboolean valid_mimetype, valid_extension;
1557 guint i;
1558 const gchar *filename;
1559
1560 buf = gst_sample_get_buffer (sample);
1561 structure = gst_sample_get_info (sample);
1562
1563 if (!buf || !structure)
1564 return;
1565
1566 valid_mimetype = FALSE;
1567 valid_extension = FALSE;
1568
1569 for (i = 0; i < G_N_ELEMENTS (mimetypes); i++) {
1570 if (gst_structure_has_name (structure, mimetypes[i])) {
1571 valid_mimetype = TRUE;
1572 break;
1573 }
1574 }
1575
1576 filename = gst_structure_get_string (structure, "filename");
1577 if (!filename)
1578 return;
1579
1580 if (!valid_mimetype) {
1581 guint len = strlen (filename);
1582 const gchar *extension = filename + len - 4;
1583 for (i = 0; i < G_N_ELEMENTS (extensions); i++) {
1584 if (g_ascii_strcasecmp (extension, extensions[i]) == 0) {
1585 valid_extension = TRUE;
1586 break;
1587 }
1588 }
1589 }
1590
1591 if (valid_mimetype || valid_extension) {
1592 GstMapInfo map;
1593
1594 g_mutex_lock (&render->ass_mutex);
1595 gst_buffer_map (buf, &map, GST_MAP_READ);
1596 ass_add_font (render->ass_library, (gchar *) filename,
1597 (gchar *) map.data, map.size);
1598 gst_buffer_unmap (buf, &map);
1599 GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1600 g_mutex_unlock (&render->ass_mutex);
1601 }
1602 }
1603
1604 static void
gst_ass_render_handle_tags(GstAssRender * render,GstTagList * taglist)1605 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1606 {
1607 guint tag_size;
1608
1609 if (!taglist)
1610 return;
1611
1612 tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1613 if (tag_size > 0 && render->embeddedfonts) {
1614 guint index;
1615 GstSample *sample;
1616
1617 GST_DEBUG_OBJECT (render, "TAG event has attachments");
1618
1619 for (index = 0; index < tag_size; index++) {
1620 if (gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1621 &sample)) {
1622 gst_ass_render_handle_tag_sample (render, sample);
1623 gst_sample_unref (sample);
1624 }
1625 }
1626 }
1627 }
1628
1629 static gboolean
gst_ass_render_event_video(GstPad * pad,GstObject * parent,GstEvent * event)1630 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1631 {
1632 gboolean ret = FALSE;
1633 GstAssRender *render = GST_ASS_RENDER (parent);
1634
1635 GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1636
1637 switch (GST_EVENT_TYPE (event)) {
1638 case GST_EVENT_CAPS:
1639 {
1640 GstCaps *caps;
1641
1642 gst_event_parse_caps (event, &caps);
1643 ret = gst_ass_render_setcaps_video (pad, render, caps);
1644 gst_event_unref (event);
1645 break;
1646 }
1647 case GST_EVENT_SEGMENT:
1648 {
1649 GstSegment segment;
1650
1651 GST_DEBUG_OBJECT (render, "received new segment");
1652
1653 gst_event_copy_segment (event, &segment);
1654
1655 if (segment.format == GST_FORMAT_TIME) {
1656 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1657 &render->video_segment);
1658
1659 render->video_segment = segment;
1660
1661 GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1662 &render->video_segment);
1663 ret = gst_pad_event_default (pad, parent, event);
1664 } else {
1665 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1666 ("received non-TIME newsegment event on video input"));
1667 ret = FALSE;
1668 gst_event_unref (event);
1669 }
1670 break;
1671 }
1672 case GST_EVENT_TAG:
1673 {
1674 GstTagList *taglist = NULL;
1675
1676 /* tag events may contain attachments which might be fonts */
1677 GST_DEBUG_OBJECT (render, "got TAG event");
1678
1679 gst_event_parse_tag (event, &taglist);
1680 gst_ass_render_handle_tags (render, taglist);
1681 ret = gst_pad_event_default (pad, parent, event);
1682 break;
1683 }
1684 case GST_EVENT_EOS:
1685 GST_ASS_RENDER_LOCK (render);
1686 GST_INFO_OBJECT (render, "video EOS");
1687 render->video_eos = TRUE;
1688 GST_ASS_RENDER_UNLOCK (render);
1689 ret = gst_pad_event_default (pad, parent, event);
1690 break;
1691 case GST_EVENT_FLUSH_START:
1692 GST_ASS_RENDER_LOCK (render);
1693 GST_INFO_OBJECT (render, "video flush start");
1694 render->video_flushing = TRUE;
1695 GST_ASS_RENDER_BROADCAST (render);
1696 GST_ASS_RENDER_UNLOCK (render);
1697 ret = gst_pad_event_default (pad, parent, event);
1698 break;
1699 case GST_EVENT_FLUSH_STOP:
1700 GST_ASS_RENDER_LOCK (render);
1701 GST_INFO_OBJECT (render, "video flush stop");
1702 render->video_flushing = FALSE;
1703 render->video_eos = FALSE;
1704 gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1705 GST_ASS_RENDER_UNLOCK (render);
1706 ret = gst_pad_event_default (pad, parent, event);
1707 break;
1708 default:
1709 ret = gst_pad_event_default (pad, parent, event);
1710 break;
1711 }
1712
1713 return ret;
1714 }
1715
1716 static gboolean
gst_ass_render_query_video(GstPad * pad,GstObject * parent,GstQuery * query)1717 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1718 {
1719 gboolean res = FALSE;
1720
1721 switch (GST_QUERY_TYPE (query)) {
1722 case GST_QUERY_CAPS:
1723 {
1724 GstCaps *filter, *caps;
1725
1726 gst_query_parse_caps (query, &filter);
1727 caps =
1728 gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
1729 filter);
1730 gst_query_set_caps_result (query, caps);
1731 gst_caps_unref (caps);
1732 res = TRUE;
1733 break;
1734 }
1735 default:
1736 res = gst_pad_query_default (pad, parent, query);
1737 break;
1738 }
1739
1740 return res;
1741 }
1742
1743 static gboolean
gst_ass_render_event_text(GstPad * pad,GstObject * parent,GstEvent * event)1744 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1745 {
1746 gboolean ret = FALSE;
1747 GstAssRender *render = GST_ASS_RENDER (parent);
1748
1749 GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1750
1751 switch (GST_EVENT_TYPE (event)) {
1752 case GST_EVENT_CAPS:
1753 {
1754 GstCaps *caps;
1755
1756 gst_event_parse_caps (event, &caps);
1757 ret = gst_ass_render_setcaps_text (pad, render, caps);
1758 gst_event_unref (event);
1759 break;
1760 }
1761 case GST_EVENT_SEGMENT:
1762 {
1763 GstSegment segment;
1764
1765 GST_ASS_RENDER_LOCK (render);
1766 render->subtitle_eos = FALSE;
1767 GST_ASS_RENDER_UNLOCK (render);
1768
1769 gst_event_copy_segment (event, &segment);
1770
1771 GST_ASS_RENDER_LOCK (render);
1772 if (segment.format == GST_FORMAT_TIME) {
1773 GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1774 &render->subtitle_segment);
1775
1776 render->subtitle_segment = segment;
1777
1778 GST_DEBUG_OBJECT (render,
1779 "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1780 &render->subtitle_segment);
1781 } else {
1782 GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1783 ("received non-TIME newsegment event on subtitle input"));
1784 }
1785
1786 gst_event_unref (event);
1787 ret = TRUE;
1788
1789 /* wake up the video chain, it might be waiting for a text buffer or
1790 * a text segment update */
1791 GST_ASS_RENDER_BROADCAST (render);
1792 GST_ASS_RENDER_UNLOCK (render);
1793 break;
1794 }
1795 case GST_EVENT_GAP:{
1796 GstClockTime start, duration;
1797
1798 gst_event_parse_gap (event, &start, &duration);
1799 if (GST_CLOCK_TIME_IS_VALID (duration))
1800 start += duration;
1801 /* we do not expect another buffer until after gap,
1802 * so that is our position now */
1803 GST_ASS_RENDER_LOCK (render);
1804 render->subtitle_segment.position = start;
1805
1806 /* wake up the video chain, it might be waiting for a text buffer or
1807 * a text segment update */
1808 GST_ASS_RENDER_BROADCAST (render);
1809 GST_ASS_RENDER_UNLOCK (render);
1810
1811 gst_event_unref (event);
1812 ret = TRUE;
1813 break;
1814 }
1815 case GST_EVENT_FLUSH_STOP:
1816 g_mutex_lock (&render->ass_mutex);
1817 if (render->ass_track) {
1818 ass_flush_events (render->ass_track);
1819 }
1820 g_mutex_unlock (&render->ass_mutex);
1821 GST_ASS_RENDER_LOCK (render);
1822 GST_INFO_OBJECT (render, "text flush stop");
1823 render->subtitle_flushing = FALSE;
1824 render->subtitle_eos = FALSE;
1825 gst_ass_render_pop_text (render);
1826 gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1827 GST_ASS_RENDER_UNLOCK (render);
1828 gst_event_unref (event);
1829 ret = TRUE;
1830 break;
1831 case GST_EVENT_FLUSH_START:
1832 GST_DEBUG_OBJECT (render, "text flush start");
1833 GST_ASS_RENDER_LOCK (render);
1834 render->subtitle_flushing = TRUE;
1835 GST_ASS_RENDER_BROADCAST (render);
1836 GST_ASS_RENDER_UNLOCK (render);
1837 gst_event_unref (event);
1838 ret = TRUE;
1839 break;
1840 case GST_EVENT_EOS:
1841 GST_ASS_RENDER_LOCK (render);
1842 render->subtitle_eos = TRUE;
1843 GST_INFO_OBJECT (render, "text EOS");
1844 /* wake up the video chain, it might be waiting for a text buffer or
1845 * a text segment update */
1846 GST_ASS_RENDER_BROADCAST (render);
1847 GST_ASS_RENDER_UNLOCK (render);
1848 gst_event_unref (event);
1849 ret = TRUE;
1850 break;
1851 case GST_EVENT_TAG:
1852 {
1853 GstTagList *taglist = NULL;
1854
1855 /* tag events may contain attachments which might be fonts */
1856 GST_DEBUG_OBJECT (render, "got TAG event");
1857
1858 gst_event_parse_tag (event, &taglist);
1859 gst_ass_render_handle_tags (render, taglist);
1860 ret = gst_pad_event_default (pad, parent, event);
1861 break;
1862 }
1863 default:
1864 ret = gst_pad_event_default (pad, parent, event);
1865 break;
1866 }
1867
1868 return ret;
1869 }
1870
1871 static gboolean
plugin_init(GstPlugin * plugin)1872 plugin_init (GstPlugin * plugin)
1873 {
1874 GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender",
1875 0, "ASS/SSA subtitle renderer");
1876 GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",
1877 0, "ASS/SSA subtitle renderer library");
1878
1879 return gst_element_register (plugin, "assrender",
1880 GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER);
1881 }
1882
1883 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1884 GST_VERSION_MINOR,
1885 assrender,
1886 "ASS/SSA subtitle renderer",
1887 plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
1888