1 /*
2 * Combine video streams to 3D stereo
3 *
4 * GStreamer
5 * Copyright (C) 2009 Julien Isorce <julien.isorce@gmail.com>
6 * Copyright (C) 2014 Jan Schmidt <jan@noraisin.net>
7 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Library General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Library General Public License for more details.
17 *
18 * You should have received a copy of the GNU Library General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
21 * Boston, MA 02110-1301, USA.
22 */
23
24 /**
25 * SECTION:element-glstereomix
26 * @title: glstereomix
27 *
28 * Combine 2 input streams to produce a stereoscopic output
29 * stream. Input views are taken from the left pad and right pad
30 * respectively, and mixed according to their timelines.
31 *
32 * If either input stream is stereoscopic, the approproriate view
33 * (left or right) is taken from each stream and placed into the output.
34 *
35 * The multiview representation on the output is chosen according to
36 * the downstream caps.
37 *
38 * ## Examples
39 * |[
40 * gst-launch-1.0 -v videotestsrc pattern=ball name=left \
41 * videotestsrc name=right glstereomix name=mix \
42 * left. ! vid/x-raw,width=640,height=480! glupload ! mix. \
43 * right. ! video/x-raw,width=640,height=480! glupload ! mix. \
44 * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=side-by-side ! \
45 * queue ! glimagesink output-multiview-mode=side-by-side
46 * ]| Mix 2 different videotestsrc patterns into a side-by-side stereo image and display it.
47 * |[
48 * gst-launch-1.0 -ev v4l2src name=left \
49 * videotestsrc name=right \
50 * glstereomix name=mix \
51 * left. ! video/x-raw,width=640,height=480 ! glupload ! glcolorconvert ! mix. \
52 * right. ! video/x-raw,width=640,height=480 ! glupload ! mix. \
53 * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=top-bottom ! \
54 * glcolorconvert ! gldownload ! queue ! x264enc ! h264parse ! \
55 * mp4mux ! progressreport ! filesink location=output.mp4
56 * ]| Mix the input from a camera to the left view, and videotestsrc to the right view,
57 * and encode as a top-bottom frame packed H.264 video.
58 *
59 */
60 #ifdef HAVE_CONFIG_H
61 #include "config.h"
62 #endif
63
64 #include "gstglstereomix.h"
65
66 #define GST_CAT_DEFAULT gst_gl_stereo_mix_debug
67 GST_DEBUG_CATEGORY (gst_gl_stereo_mix_debug);
68
69 G_DEFINE_TYPE (GstGLStereoMixPad, gst_gl_stereo_mix_pad, GST_TYPE_GL_MIXER_PAD);
70
71 static void
gst_gl_stereo_mix_pad_class_init(GstGLStereoMixPadClass * klass)72 gst_gl_stereo_mix_pad_class_init (GstGLStereoMixPadClass * klass)
73 {
74 }
75
76 static void
gst_gl_stereo_mix_pad_init(GstGLStereoMixPad * pad)77 gst_gl_stereo_mix_pad_init (GstGLStereoMixPad * pad)
78 {
79 }
80
81 static void gst_gl_stereo_mix_child_proxy_init (gpointer g_iface,
82 gpointer iface_data);
83
84 #define gst_gl_stereo_mix_parent_class parent_class
85 G_DEFINE_TYPE_WITH_CODE (GstGLStereoMix, gst_gl_stereo_mix, GST_TYPE_GL_MIXER,
86 G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
87 gst_gl_stereo_mix_child_proxy_init));
88
89 static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps);
90 static gboolean _negotiated_caps (GstAggregator * aggregator, GstCaps * caps);
91 static gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix);
92 static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer);
93
94 #define DEFAULT_DOWNMIX GST_GL_STEREO_DOWNMIX_ANAGLYPH_GREEN_MAGENTA_DUBOIS
95
96 /* GLStereoMix signals and args */
97 enum
98 {
99 /* FILL ME */
100 LAST_SIGNAL
101 };
102
103 enum
104 {
105 PROP_0,
106 PROP_DOWNMIX_MODE
107 };
108
109 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
110 GST_PAD_SRC,
111 GST_PAD_ALWAYS,
112 GST_STATIC_CAPS ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), "
113 "format = (string) RGBA, "
114 "width = " GST_VIDEO_SIZE_RANGE ", "
115 "height = " GST_VIDEO_SIZE_RANGE ", "
116 "framerate = " GST_VIDEO_FPS_RANGE ","
117 "texture-target = (string) 2D"
118 "; "
119 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
120 (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
121 "RGBA")
122 "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS))
123 );
124
125 static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
126 GST_PAD_SINK,
127 GST_PAD_REQUEST,
128 GST_STATIC_CAPS ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), "
129 "format = (string) RGBA, "
130 "width = " GST_VIDEO_SIZE_RANGE ", "
131 "height = " GST_VIDEO_SIZE_RANGE ", "
132 "framerate = " GST_VIDEO_FPS_RANGE ","
133 "texture-target = (string) 2D"
134 "; "
135 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
136 (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
137 "RGBA")
138 "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS))
139 );
140
141 static GstPad *gst_gl_stereo_mix_request_new_pad (GstElement * element,
142 GstPadTemplate * temp, const gchar * req_name, const GstCaps * caps);
143 static void gst_gl_stereo_mix_release_pad (GstElement * element, GstPad * pad);
144
145 static GstFlowReturn gst_gl_stereo_mix_create_output_buffer (GstVideoAggregator
146 * videoaggregator, GstBuffer ** outbuf);
147 static gboolean gst_gl_stereo_mix_stop (GstAggregator * agg);
148 static gboolean gst_gl_stereo_mix_start (GstAggregator * agg);
149 static gboolean gst_gl_stereo_mix_src_query (GstAggregator * agg,
150 GstQuery * query);
151
152 static void gst_gl_stereo_mix_set_property (GObject * object, guint prop_id,
153 const GValue * value, GParamSpec * pspec);
154 static void gst_gl_stereo_mix_get_property (GObject * object, guint prop_id,
155 GValue * value, GParamSpec * pspec);
156
157 static void gst_gl_stereo_mix_finalize (GObject * object);
158
159 static GstFlowReturn
160 gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg,
161 GstBuffer * outbuffer);
162
163 static void
gst_gl_stereo_mix_class_init(GstGLStereoMixClass * klass)164 gst_gl_stereo_mix_class_init (GstGLStereoMixClass * klass)
165 {
166 GObjectClass *gobject_class = (GObjectClass *) klass;
167 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
168 GstVideoAggregatorClass *videoaggregator_class =
169 (GstVideoAggregatorClass *) klass;
170 GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
171 GstGLBaseMixerClass *base_mix_class = (GstGLBaseMixerClass *) klass;
172
173 GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "glstereomixer", 0,
174 "opengl stereoscopic mixer");
175
176 gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_finalize);
177
178 gobject_class->get_property = gst_gl_stereo_mix_get_property;
179 gobject_class->set_property = gst_gl_stereo_mix_set_property;
180
181 gst_element_class_set_metadata (element_class, "OpenGL stereo video combiner",
182 "Filter/Effect/Video", "OpenGL stereo video combiner",
183 "Jan Schmidt <jan@centricular.com>");
184
185 g_object_class_install_property (gobject_class, PROP_DOWNMIX_MODE,
186 g_param_spec_enum ("downmix-mode", "Mode for mono downmixed output",
187 "Output anaglyph type to generate when downmixing to mono",
188 GST_TYPE_GL_STEREO_DOWNMIX, DEFAULT_DOWNMIX,
189 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
190
191 gst_element_class_add_static_pad_template_with_gtype (element_class,
192 &src_factory, GST_TYPE_AGGREGATOR_PAD);
193 gst_element_class_add_static_pad_template_with_gtype (element_class,
194 &sink_factory, GST_TYPE_GL_STEREO_MIX_PAD);
195
196 element_class->request_new_pad =
197 GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_request_new_pad);
198 element_class->release_pad =
199 GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_release_pad);
200
201 agg_class->stop = gst_gl_stereo_mix_stop;
202 agg_class->start = gst_gl_stereo_mix_start;
203 agg_class->src_query = gst_gl_stereo_mix_src_query;
204 agg_class->negotiated_src_caps = _negotiated_caps;
205
206 videoaggregator_class->aggregate_frames = gst_gl_stereo_mix_aggregate_frames;
207 videoaggregator_class->update_caps = _update_caps;
208 videoaggregator_class->create_output_buffer =
209 gst_gl_stereo_mix_create_output_buffer;
210
211 base_mix_class->supported_gl_api =
212 GST_GL_API_GLES2 | GST_GL_API_OPENGL | GST_GL_API_OPENGL3;
213 }
214
215 static void
gst_gl_stereo_mix_init(GstGLStereoMix * mix)216 gst_gl_stereo_mix_init (GstGLStereoMix * mix)
217 {
218 }
219
220 static void
gst_gl_stereo_mix_finalize(GObject * object)221 gst_gl_stereo_mix_finalize (GObject * object)
222 {
223 //GstGLStereoMix *mix = GST_GL_STEREO_MIX (object);
224
225 G_OBJECT_CLASS (parent_class)->finalize (object);
226 }
227
228 static gboolean
gst_gl_stereo_mix_query_caps(GstPad * pad,GstAggregator * agg,GstQuery * query)229 gst_gl_stereo_mix_query_caps (GstPad * pad, GstAggregator * agg,
230 GstQuery * query)
231 {
232 GstCaps *filter, *caps;
233
234 gst_query_parse_caps (query, &filter);
235
236 caps = gst_pad_get_current_caps (agg->srcpad);
237 if (caps == NULL) {
238 caps = gst_pad_get_pad_template_caps (agg->srcpad);
239 }
240
241 if (filter)
242 caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
243
244 gst_query_set_caps_result (query, caps);
245 gst_caps_unref (caps);
246
247 return TRUE;
248 }
249
250 static gboolean
gst_gl_stereo_mix_src_query(GstAggregator * agg,GstQuery * query)251 gst_gl_stereo_mix_src_query (GstAggregator * agg, GstQuery * query)
252 {
253 switch (GST_QUERY_TYPE (query)) {
254 case GST_QUERY_CAPS:
255 return gst_gl_stereo_mix_query_caps (agg->srcpad, agg, query);
256 break;
257 default:
258 break;
259 }
260
261 return GST_AGGREGATOR_CLASS (parent_class)->src_query (agg, query);
262 }
263
264
265 static GstFlowReturn
gst_gl_stereo_mix_create_output_buffer(GstVideoAggregator * videoaggregator,GstBuffer ** outbuf)266 gst_gl_stereo_mix_create_output_buffer (GstVideoAggregator * videoaggregator,
267 GstBuffer ** outbuf)
268 {
269 GstGLStereoMix *mix = GST_GL_STEREO_MIX (videoaggregator);
270 GstFlowReturn ret = GST_FLOW_OK;
271
272 #if 0
273
274 if (!mix->priv->pool_active) {
275 if (!gst_buffer_pool_set_active (mix->priv->pool, TRUE)) {
276 GST_ELEMENT_ERROR (mix, RESOURCE, SETTINGS,
277 ("failed to activate bufferpool"), ("failed to activate bufferpool"));
278 return GST_FLOW_ERROR;
279 }
280 mix->priv->pool_active = TRUE;
281 }
282
283 return gst_buffer_pool_acquire_buffer (mix->priv->pool, outbuf, NULL);
284 #endif
285
286 if (!gst_gl_stereo_mix_make_output (mix)) {
287 gst_buffer_replace (&mix->primary_out, NULL);
288 gst_buffer_replace (&mix->auxilliary_out, NULL);
289 GST_ELEMENT_ERROR (mix, RESOURCE, SETTINGS,
290 ("Failed to generate output"), ("failed to generate output"));
291 ret = GST_FLOW_ERROR;
292 }
293
294 if (mix->auxilliary_out) {
295 *outbuf = mix->auxilliary_out;
296 mix->auxilliary_out = NULL;
297 } else {
298 *outbuf = mix->primary_out;
299 mix->primary_out = NULL;
300 }
301 return ret;
302 }
303
304 static gboolean
gst_gl_stereo_mix_make_output(GstGLStereoMix * mix)305 gst_gl_stereo_mix_make_output (GstGLStereoMix * mix)
306 {
307 GList *walk;
308 gboolean res = FALSE;
309 GstElement *element = GST_ELEMENT (mix);
310 gboolean missing_buffer = FALSE;
311
312 GST_LOG_OBJECT (mix, "Processing buffers");
313
314 GST_OBJECT_LOCK (mix);
315 walk = element->sinkpads;
316 while (walk) {
317 GstVideoAggregatorPad *vaggpad = walk->data;
318 GstGLStereoMixPad *pad = walk->data;
319 GstBuffer *buffer = gst_video_aggregator_pad_get_current_buffer (vaggpad);
320
321 GST_LOG_OBJECT (mix, "Checking pad %" GST_PTR_FORMAT, vaggpad);
322
323 if (buffer != NULL) {
324 pad->current_buffer = buffer;
325
326 GST_DEBUG_OBJECT (pad, "Got buffer %" GST_PTR_FORMAT,
327 pad->current_buffer);
328 } else {
329 GST_LOG_OBJECT (mix, "No buffer on pad %" GST_PTR_FORMAT, vaggpad);
330 pad->current_buffer = NULL;
331 missing_buffer = TRUE;
332 }
333 walk = g_list_next (walk);
334 }
335 if (missing_buffer) {
336 /* We're still waiting for a buffer to turn up on at least one input */
337 GST_WARNING_OBJECT (mix, "Not generating output - need more input buffers");
338 res = TRUE;
339 goto out;
340 }
341
342 /* Copy GL memory from each input frame to the output */
343 if (!gst_gl_stereo_mix_process_frames (mix)) {
344 GST_LOG_OBJECT (mix, "Failed to process frames to output");
345 goto out;
346 }
347
348 if (mix->primary_out == NULL)
349 goto out;
350
351 res = TRUE;
352
353 out:
354 GST_OBJECT_UNLOCK (mix);
355
356 return res;
357 }
358
359 static GstFlowReturn
gst_gl_stereo_mix_aggregate_frames(GstVideoAggregator * vagg,GstBuffer * outbuf)360 gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg,
361 GstBuffer * outbuf)
362 {
363 GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
364 /* If we're operating in frame-by-frame mode, push
365 * the primary view now, and let the parent class
366 * push the remaining auxilliary view */
367 if (GST_VIDEO_INFO_MULTIVIEW_MODE (&vagg->info) ==
368 GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
369 /* Transfer the timestamps video-agg put on the aux buffer */
370 gst_buffer_copy_into (mix->primary_out, outbuf,
371 GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
372 gst_aggregator_finish_buffer (GST_AGGREGATOR (vagg), mix->primary_out);
373 mix->primary_out = NULL;
374
375 /* And actually, we don't want timestamps on the aux buffer */
376 GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE;
377 GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
378 }
379 return GST_FLOW_OK;
380 }
381
382 static void
gst_gl_stereo_mix_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)383 gst_gl_stereo_mix_get_property (GObject * object,
384 guint prop_id, GValue * value, GParamSpec * pspec)
385 {
386 GstGLStereoMix *mix = GST_GL_STEREO_MIX (object);
387
388 switch (prop_id) {
389 case PROP_DOWNMIX_MODE:
390 g_value_set_enum (value, mix->downmix_mode);
391 break;
392 default:
393 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
394 break;
395 }
396 }
397
398 static void
gst_gl_stereo_mix_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)399 gst_gl_stereo_mix_set_property (GObject * object,
400 guint prop_id, const GValue * value, GParamSpec * pspec)
401 {
402 GstGLStereoMix *mix = GST_GL_STEREO_MIX (object);
403
404 switch (prop_id) {
405 case PROP_DOWNMIX_MODE:
406 mix->downmix_mode = g_value_get_enum (value);
407 if (mix->viewconvert)
408 g_object_set_property (G_OBJECT (mix->viewconvert), "downmix-mode",
409 value);
410 break;
411 default:
412 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
413 break;
414 }
415 }
416
417 static GstPad *
gst_gl_stereo_mix_request_new_pad(GstElement * element,GstPadTemplate * templ,const gchar * req_name,const GstCaps * caps)418 gst_gl_stereo_mix_request_new_pad (GstElement * element, GstPadTemplate * templ,
419 const gchar * req_name, const GstCaps * caps)
420 {
421 GstPad *newpad;
422
423 newpad = (GstPad *)
424 GST_ELEMENT_CLASS (parent_class)->request_new_pad (element,
425 templ, req_name, caps);
426
427 if (newpad == NULL)
428 goto could_not_create;
429
430 gst_child_proxy_child_added (GST_CHILD_PROXY (element), G_OBJECT (newpad),
431 GST_OBJECT_NAME (newpad));
432
433 return GST_PAD_CAST (newpad);
434
435 could_not_create:
436 {
437 GST_DEBUG_OBJECT (element, "could not create/add pad");
438 return NULL;
439 }
440 }
441
442 static void
gst_gl_stereo_mix_release_pad(GstElement * element,GstPad * pad)443 gst_gl_stereo_mix_release_pad (GstElement * element, GstPad * pad)
444 {
445 GST_DEBUG_OBJECT (element, "release pad %s:%s", GST_DEBUG_PAD_NAME (pad));
446
447 gst_child_proxy_child_removed (GST_CHILD_PROXY (element), G_OBJECT (pad),
448 GST_OBJECT_NAME (pad));
449
450 GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad);
451 }
452
453 static gboolean
gst_gl_stereo_mix_start(GstAggregator * agg)454 gst_gl_stereo_mix_start (GstAggregator * agg)
455 {
456 GstGLStereoMix *mix = GST_GL_STEREO_MIX (agg);
457
458 if (!GST_AGGREGATOR_CLASS (parent_class)->start (agg))
459 return FALSE;
460
461 GST_OBJECT_LOCK (mix);
462 mix->viewconvert = gst_gl_view_convert_new ();
463 g_object_set (G_OBJECT (mix->viewconvert), "downmix-mode",
464 mix->downmix_mode, NULL);
465 GST_OBJECT_UNLOCK (mix);
466
467 return TRUE;
468 }
469
470 static gboolean
gst_gl_stereo_mix_stop(GstAggregator * agg)471 gst_gl_stereo_mix_stop (GstAggregator * agg)
472 {
473 GstGLStereoMix *mix = GST_GL_STEREO_MIX (agg);
474
475 if (!GST_AGGREGATOR_CLASS (parent_class)->stop (agg))
476 return FALSE;
477
478 if (mix->viewconvert) {
479 gst_object_unref (mix->viewconvert);
480 mix->viewconvert = NULL;
481 }
482
483 return TRUE;
484 }
485
486 /* Convert to caps that can be accepted by this element... */
487 static GstCaps *
get_converted_caps(GstGLStereoMix * mix,GstCaps * caps)488 get_converted_caps (GstGLStereoMix * mix, GstCaps * caps)
489 {
490 #if 0
491 GstGLContext *context = GST_GL_BASE_MIXER (mix)->context;
492 GstCaps *result, *tmp;
493
494 GST_LOG_OBJECT (mix, "Converting caps %" GST_PTR_FORMAT, caps);
495 result = gst_gl_upload_transform_caps (context, GST_PAD_SINK, caps, NULL);
496 tmp = result;
497 GST_TRACE_OBJECT (mix, "transfer returned caps %" GST_PTR_FORMAT, tmp);
498
499 result =
500 gst_gl_color_convert_transform_caps (context, GST_PAD_SINK, tmp, NULL);
501 gst_caps_unref (tmp);
502 GST_TRACE_OBJECT (mix, "convert returned caps %" GST_PTR_FORMAT, tmp);
503
504 tmp = result;
505 result = gst_gl_view_convert_transform_caps (mix->viewconvert,
506 GST_PAD_SINK, tmp, NULL);
507 gst_caps_unref (tmp);
508 #else
509 GstCaps *result;
510
511 GST_LOG_OBJECT (mix, "Converting caps %" GST_PTR_FORMAT, caps);
512 result = gst_gl_view_convert_transform_caps (mix->viewconvert,
513 GST_PAD_SINK, caps, NULL);
514 #endif
515
516 GST_LOG_OBJECT (mix, "returning caps %" GST_PTR_FORMAT, result);
517
518 return result;
519 }
520
521 /* Return the possible output caps based on inputs and downstream prefs */
522 static GstCaps *
_update_caps(GstVideoAggregator * vagg,GstCaps * caps)523 _update_caps (GstVideoAggregator * vagg, GstCaps * caps)
524 {
525 GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
526 GList *l;
527 gint best_width = -1, best_height = -1;
528 gdouble best_fps = -1, cur_fps;
529 gint best_fps_n = 0, best_fps_d = 1;
530 GstVideoInfo *mix_info;
531 GstCaps *blend_caps, *tmp_caps;
532 GstCaps *out_caps;
533
534 GST_OBJECT_LOCK (vagg);
535
536 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
537 GstVideoAggregatorPad *pad = l->data;
538 GstVideoInfo tmp = pad->info;
539 gint this_width, this_height;
540 gint fps_n, fps_d;
541
542 if (!pad->info.finfo)
543 continue;
544
545 /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
546 if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
547 continue;
548
549 /* Convert to per-view width/height for unpacked forms */
550 gst_video_multiview_video_info_change_mode (&tmp,
551 GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
552
553 this_width = GST_VIDEO_INFO_WIDTH (&tmp);
554 this_height = GST_VIDEO_INFO_HEIGHT (&tmp);
555 fps_n = GST_VIDEO_INFO_FPS_N (&tmp);
556 fps_d = GST_VIDEO_INFO_FPS_D (&tmp);
557
558 GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT
559 " w %u h %u", pad, this_width, this_height);
560
561 if (this_width == 0 || this_height == 0)
562 continue;
563
564 if (best_width < this_width)
565 best_width = this_width;
566 if (best_height < this_height)
567 best_height = this_height;
568
569 if (fps_d == 0)
570 cur_fps = 0.0;
571 else
572 gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
573
574 if (best_fps < cur_fps) {
575 best_fps = cur_fps;
576 best_fps_n = fps_n;
577 best_fps_d = fps_d;
578 }
579
580 /* FIXME: Preserve PAR for at least one input when different sized inputs */
581 }
582 GST_OBJECT_UNLOCK (vagg);
583
584 mix_info = &mix->mix_info;
585 gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width,
586 best_height);
587
588 GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n;
589 GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d;
590
591 GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED;
592 GST_VIDEO_INFO_VIEWS (mix_info) = 2;
593
594 /* FIXME: If input is marked as flipped or flopped, preserve those flags */
595 GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
596
597 /* Choose our output format based on downstream preferences */
598 blend_caps = gst_video_info_to_caps (mix_info);
599
600 gst_caps_set_features (blend_caps, 0,
601 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
602
603 tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps);
604 gst_caps_unref (blend_caps);
605
606 out_caps = gst_caps_intersect (caps, tmp_caps);
607 gst_caps_unref (tmp_caps);
608
609 GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, out_caps);
610
611 return out_caps;
612 }
613
614 /* Called after videoaggregator fixates our caps */
615 static gboolean
_negotiated_caps(GstAggregator * agg,GstCaps * caps)616 _negotiated_caps (GstAggregator * agg, GstCaps * caps)
617 {
618 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
619 GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
620 GstCaps *in_caps;
621
622 GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps);
623
624 if (GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps)
625 if (!GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps))
626 return FALSE;
627
628 /* Update the glview_convert output */
629
630 /* We can configure the view_converter now */
631 gst_gl_view_convert_set_context (mix->viewconvert,
632 GST_GL_BASE_MIXER (mix)->context);
633
634 in_caps = gst_video_info_to_caps (&mix->mix_info);
635 gst_caps_set_features (in_caps, 0,
636 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
637 gst_caps_set_simple (in_caps, "texture-target", G_TYPE_STRING,
638 GST_GL_TEXTURE_TARGET_2D_STR, NULL);
639
640 gst_gl_view_convert_set_caps (mix->viewconvert, in_caps, caps);
641
642 return TRUE;
643 }
644
645 /* called with the object lock held */
646 static gboolean
gst_gl_stereo_mix_process_frames(GstGLStereoMix * mixer)647 gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer)
648 {
649 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer);
650 GstBuffer *converted_buffer, *inbuf;
651 GstVideoInfo *out_info = &vagg->info;
652 #ifndef G_DISABLE_ASSERT
653 gint n;
654 #endif
655 gint v, views;
656 gint valid_views = 0;
657 GList *walk;
658
659 inbuf = gst_buffer_new ();
660 walk = GST_ELEMENT (mixer)->sinkpads;
661 while (walk) {
662 GstGLStereoMixPad *pad = walk->data;
663 GstMemory *in_mem;
664
665 GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views);
666
667 if (!pad || !pad->current_buffer) {
668 GST_DEBUG ("skipping texture, null frame");
669 walk = g_list_next (walk);
670 continue;
671 }
672
673 in_mem = gst_buffer_get_memory (pad->current_buffer, 0);
674
675 GST_LOG_OBJECT (mixer,
676 "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem);
677 /* Appending the memory to a 2nd buffer locks it
678 * exclusive a 2nd time, which will mark it for
679 * copy-on-write. The ref will keep the memory
680 * alive but we add a parent_buffer_meta to also
681 * prevent the input buffer from returning to any buffer
682 * pool it might belong to
683 */
684 gst_buffer_append_memory (inbuf, in_mem);
685 /* Use parent buffer meta to keep input buffer alive */
686 gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer);
687
688 valid_views++;
689 walk = g_list_next (walk);
690 }
691
692 if (mixer->mix_info.views != valid_views) {
693 GST_WARNING_OBJECT (mixer, "Not enough input views to process");
694 return FALSE;
695 }
696
697 if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
698 GST_VIDEO_MULTIVIEW_MODE_SEPARATED)
699 views = out_info->views;
700 else
701 views = 1;
702
703 if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert,
704 FALSE, inbuf) != GST_FLOW_OK)
705 return FALSE;
706
707 /* Clear any existing buffers, just in case */
708 gst_buffer_replace (&mixer->primary_out, NULL);
709 gst_buffer_replace (&mixer->auxilliary_out, NULL);
710
711 if (gst_gl_view_convert_get_output (mixer->viewconvert,
712 &mixer->primary_out) != GST_FLOW_OK)
713 return FALSE;
714
715 if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
716 GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
717 if (gst_gl_view_convert_get_output (mixer->viewconvert,
718 &mixer->auxilliary_out) != GST_FLOW_OK)
719 return FALSE;
720 }
721
722 if (mixer->primary_out == NULL)
723 return FALSE;
724
725 converted_buffer = mixer->primary_out;
726
727 #ifndef G_DISABLE_ASSERT
728 n = gst_buffer_n_memory (converted_buffer);
729 g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views);
730 #endif
731
732 for (v = 0; v < views; v++) {
733 gst_buffer_add_video_meta_full (converted_buffer, v,
734 GST_VIDEO_INFO_FORMAT (out_info),
735 GST_VIDEO_INFO_WIDTH (out_info),
736 GST_VIDEO_INFO_HEIGHT (out_info),
737 GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride);
738 if (mixer->auxilliary_out) {
739 gst_buffer_add_video_meta_full (mixer->auxilliary_out, v,
740 GST_VIDEO_INFO_FORMAT (out_info),
741 GST_VIDEO_INFO_WIDTH (out_info),
742 GST_VIDEO_INFO_HEIGHT (out_info),
743 GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset,
744 out_info->stride);
745 }
746 }
747
748 return TRUE;
749 }
750
751 /* GstChildProxy implementation */
752 static GObject *
gst_gl_stereo_mix_child_proxy_get_child_by_index(GstChildProxy * child_proxy,guint index)753 gst_gl_stereo_mix_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
754 guint index)
755 {
756 GstGLStereoMix *gl_stereo_mix = GST_GL_STEREO_MIX (child_proxy);
757 GObject *obj = NULL;
758
759 GST_OBJECT_LOCK (gl_stereo_mix);
760 obj = g_list_nth_data (GST_ELEMENT_CAST (gl_stereo_mix)->sinkpads, index);
761 if (obj)
762 gst_object_ref (obj);
763 GST_OBJECT_UNLOCK (gl_stereo_mix);
764
765 return obj;
766 }
767
768 static guint
gst_gl_stereo_mix_child_proxy_get_children_count(GstChildProxy * child_proxy)769 gst_gl_stereo_mix_child_proxy_get_children_count (GstChildProxy * child_proxy)
770 {
771 guint count = 0;
772 GstGLStereoMix *gl_stereo_mix = GST_GL_STEREO_MIX (child_proxy);
773
774 GST_OBJECT_LOCK (gl_stereo_mix);
775 count = GST_ELEMENT_CAST (gl_stereo_mix)->numsinkpads;
776 GST_OBJECT_UNLOCK (gl_stereo_mix);
777 GST_INFO_OBJECT (gl_stereo_mix, "Children Count: %d", count);
778
779 return count;
780 }
781
782 static void
gst_gl_stereo_mix_child_proxy_init(gpointer g_iface,gpointer iface_data)783 gst_gl_stereo_mix_child_proxy_init (gpointer g_iface, gpointer iface_data)
784 {
785 GstChildProxyInterface *iface = g_iface;
786
787 iface->get_child_by_index = gst_gl_stereo_mix_child_proxy_get_child_by_index;
788 iface->get_children_count = gst_gl_stereo_mix_child_proxy_get_children_count;
789 }
790