1 /*
2  * Initially based on gst-omx/omx/gstomxvideodec.c
3  *
4  * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
5  *   Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
6  *
7  * Copyright (C) 2012, Collabora Ltd.
8  *   Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
9  *
10  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
11  *
12  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
13  *
14  * Copyright (C) 2014-2015, Collabora Ltd.
15  *   Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
16  *
17  * Copyright (C) 2015, Edward Hervey
18  *   Author: Edward Hervey <bilboed@gmail.com>
19  *
20  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
21  *
22  * This library is free software; you can redistribute it and/or
23  * modify it under the terms of the GNU Lesser General Public
24  * License as published by the Free Software Foundation
25  * version 2.1 of the License.
26  *
27  * This library is distributed in the hope that it will be useful,
28  * but WITHOUT ANY WARRANTY; without even the implied warranty of
29  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
30  * Lesser General Public License for more details.
31  *
32  * You should have received a copy of the GNU Lesser General Public
33  * License along with this library; if not, write to the Free Software
34  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
35  *
36  */
37 
38 #ifdef HAVE_CONFIG_H
39 #include "config.h"
40 #endif
41 
42 #include <gst/gst.h>
43 #include <gst/gl/gl.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideoaffinetransformationmeta.h>
46 #include <gst/video/gstvideopool.h>
47 #include <string.h>
48 
49 #ifdef HAVE_ORC
50 #include <orc/orc.h>
51 #else
52 #define orc_memcpy memcpy
53 #endif
54 
55 #include "gstamcvideodec.h"
56 #include "gstamc-constants.h"
57 
58 GST_DEBUG_CATEGORY_STATIC (gst_amc_video_dec_debug_category);
59 #define GST_CAT_DEFAULT gst_amc_video_dec_debug_category
60 
61 #define GST_VIDEO_DECODER_ERROR_FROM_ERROR(el, err) G_STMT_START { \
62   gchar *__dbg = g_strdup (err->message);                               \
63   GstVideoDecoder *__dec = GST_VIDEO_DECODER (el);                      \
64   GST_WARNING_OBJECT (el, "error: %s", __dbg);                          \
65   _gst_video_decoder_error (__dec, 1,                                   \
66     err->domain, err->code,                                             \
67     NULL, __dbg, __FILE__, GST_FUNCTION, __LINE__);                     \
68   g_clear_error (&err); \
69 } G_STMT_END
70 
71 #if GLIB_SIZEOF_VOID_P == 8
72 #define JLONG_TO_GST_AMC_VIDEO_DEC(value) (GstAmcVideoDec *)(value)
73 #define GST_AMC_VIDEO_DEC_TO_JLONG(value) (jlong)(value)
74 #else
75 #define JLONG_TO_GST_AMC_VIDEO_DEC(value) (GstAmcVideoDec *)(jint)(value)
76 #define GST_AMC_VIDEO_DEC_TO_JLONG(value) (jlong)(jint)(value)
77 #endif
78 
79 typedef struct _BufferIdentification BufferIdentification;
80 struct _BufferIdentification
81 {
82   guint64 timestamp;
83 };
84 
85 struct gl_sync_result
86 {
87   gint refcount;
88   gint64 frame_available_ts;
89   gboolean updated;             /* only every call update_tex_image once */
90   gboolean released;            /* only every call release_output_buffer once */
91   gboolean rendered;            /* whether the release resulted in a render */
92 };
93 
94 static struct gl_sync_result *
_gl_sync_result_ref(struct gl_sync_result * result)95 _gl_sync_result_ref (struct gl_sync_result *result)
96 {
97   g_assert (result != NULL);
98 
99   g_atomic_int_inc (&result->refcount);
100 
101   GST_TRACE ("gl_sync result %p ref", result);
102 
103   return result;
104 }
105 
106 static void
_gl_sync_result_unref(struct gl_sync_result * result)107 _gl_sync_result_unref (struct gl_sync_result *result)
108 {
109   g_assert (result != NULL);
110 
111   GST_TRACE ("gl_sync result %p unref", result);
112 
113   if (g_atomic_int_dec_and_test (&result->refcount)) {
114     GST_TRACE ("freeing gl_sync result %p", result);
115     g_free (result);
116   }
117 }
118 
119 struct gl_sync
120 {
121   gint refcount;
122   GstAmcVideoDec *sink;         /* back reference for statistics, lock, cond, etc */
123   gint buffer_idx;              /* idx of the AMC buffer we should render */
124   GstBuffer *buffer;            /* back reference to the buffer */
125   GstGLMemory *oes_mem;         /* where amc is rendering into. The same for every gl_sync */
126   GstAmcSurface *surface;       /* java wrapper for where amc is rendering into */
127   guint gl_frame_no;            /* effectively the frame id */
128   gint64 released_ts;           /* microseconds from g_get_monotonic_time() */
129   struct gl_sync_result *result;
130 };
131 
132 static struct gl_sync *
_gl_sync_ref(struct gl_sync * sync)133 _gl_sync_ref (struct gl_sync *sync)
134 {
135   g_assert (sync != NULL);
136 
137   g_atomic_int_inc (&sync->refcount);
138 
139   GST_TRACE ("gl_sync %p ref", sync);
140 
141   return sync;
142 }
143 
144 static void
_gl_sync_unref(struct gl_sync * sync)145 _gl_sync_unref (struct gl_sync *sync)
146 {
147   g_assert (sync != NULL);
148 
149   GST_TRACE ("gl_sync %p unref", sync);
150 
151   if (g_atomic_int_dec_and_test (&sync->refcount)) {
152     GST_TRACE ("freeing gl_sync %p", sync);
153 
154     _gl_sync_result_unref (sync->result);
155 
156     g_object_unref (sync->sink);
157     g_object_unref (sync->surface);
158     gst_memory_unref ((GstMemory *) sync->oes_mem);
159 
160     g_free (sync);
161   }
162 }
163 
164 static gint
_queue_compare_gl_sync(gconstpointer a,gconstpointer b)165 _queue_compare_gl_sync (gconstpointer a, gconstpointer b)
166 {
167   const struct gl_sync *sync = a;
168   guint frame = GPOINTER_TO_INT (b);
169 
170   return sync->gl_frame_no - frame;
171 }
172 
173 static GList *
_find_gl_sync_for_frame(GstAmcVideoDec * dec,guint frame)174 _find_gl_sync_for_frame (GstAmcVideoDec * dec, guint frame)
175 {
176   return g_queue_find_custom (dec->gl_queue, GINT_TO_POINTER (frame),
177       (GCompareFunc) _queue_compare_gl_sync);
178 }
179 
180 static void
_attach_mem_to_context(GstGLContext * context,GstAmcVideoDec * self)181 _attach_mem_to_context (GstGLContext * context, GstAmcVideoDec * self)
182 {
183   GST_TRACE_OBJECT (self, "attaching texture %p id %u to current context",
184       self->surface->texture, self->oes_mem->tex_id);
185   if (!gst_amc_surface_texture_attach_to_gl_context (self->surface->texture,
186           self->oes_mem->tex_id, &self->gl_error)) {
187     GST_ERROR_OBJECT (self, "Failed to attach texture to the GL context");
188     GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
189   } else {
190     self->gl_mem_attached = TRUE;
191   }
192 }
193 
194 static void
_dettach_mem_from_context(GstGLContext * context,GstAmcVideoDec * self)195 _dettach_mem_from_context (GstGLContext * context, GstAmcVideoDec * self)
196 {
197   if (self->surface) {
198     guint tex_id = self->oes_mem ? self->oes_mem->tex_id : 0;
199 
200     GST_TRACE_OBJECT (self, "detaching texture %p id %u from current context",
201         self->surface->texture, tex_id);
202 
203     if (!gst_amc_surface_texture_detach_from_gl_context (self->surface->texture,
204             &self->gl_error)) {
205       GST_ERROR_OBJECT (self, "Failed to attach texture to the GL context");
206       GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
207     }
208   }
209   self->gl_mem_attached = FALSE;
210 }
211 
212 static BufferIdentification *
buffer_identification_new(GstClockTime timestamp)213 buffer_identification_new (GstClockTime timestamp)
214 {
215   BufferIdentification *id = g_slice_new (BufferIdentification);
216 
217   id->timestamp = timestamp;
218 
219   return id;
220 }
221 
222 static void
buffer_identification_free(BufferIdentification * id)223 buffer_identification_free (BufferIdentification * id)
224 {
225   g_slice_free (BufferIdentification, id);
226 }
227 
228 /* prototypes */
229 static void gst_amc_video_dec_finalize (GObject * object);
230 
231 static GstStateChangeReturn
232 gst_amc_video_dec_change_state (GstElement * element,
233     GstStateChange transition);
234 static void gst_amc_video_dec_set_context (GstElement * element,
235     GstContext * context);
236 
237 static gboolean gst_amc_video_dec_open (GstVideoDecoder * decoder);
238 static gboolean gst_amc_video_dec_close (GstVideoDecoder * decoder);
239 static gboolean gst_amc_video_dec_start (GstVideoDecoder * decoder);
240 static gboolean gst_amc_video_dec_stop (GstVideoDecoder * decoder);
241 static gboolean gst_amc_video_dec_set_format (GstVideoDecoder * decoder,
242     GstVideoCodecState * state);
243 static gboolean gst_amc_video_dec_flush (GstVideoDecoder * decoder);
244 static GstFlowReturn gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder,
245     GstVideoCodecFrame * frame);
246 static GstFlowReturn gst_amc_video_dec_finish (GstVideoDecoder * decoder);
247 static gboolean gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec,
248     GstQuery * query);
249 static gboolean gst_amc_video_dec_src_query (GstVideoDecoder * bdec,
250     GstQuery * query);
251 
252 static GstFlowReturn gst_amc_video_dec_drain (GstAmcVideoDec * self);
253 static gboolean gst_amc_video_dec_check_codec_config (GstAmcVideoDec * self);
254 static void
255 gst_amc_video_dec_on_frame_available (JNIEnv * env, jobject thiz,
256     long long context, jobject surfaceTexture);
257 
258 enum
259 {
260   PROP_0
261 };
262 
263 /* class initialization */
264 
265 static void gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass);
266 static void gst_amc_video_dec_init (GstAmcVideoDec * self);
267 static void gst_amc_video_dec_base_init (gpointer g_class);
268 
269 static GstVideoDecoderClass *parent_class = NULL;
270 
271 GType
gst_amc_video_dec_get_type(void)272 gst_amc_video_dec_get_type (void)
273 {
274   static volatile gsize type = 0;
275 
276   if (g_once_init_enter (&type)) {
277     GType _type;
278     static const GTypeInfo info = {
279       sizeof (GstAmcVideoDecClass),
280       gst_amc_video_dec_base_init,
281       NULL,
282       (GClassInitFunc) gst_amc_video_dec_class_init,
283       NULL,
284       NULL,
285       sizeof (GstAmcVideoDec),
286       0,
287       (GInstanceInitFunc) gst_amc_video_dec_init,
288       NULL
289     };
290 
291     _type = g_type_register_static (GST_TYPE_VIDEO_DECODER, "GstAmcVideoDec",
292         &info, 0);
293 
294     GST_DEBUG_CATEGORY_INIT (gst_amc_video_dec_debug_category, "amcvideodec", 0,
295         "Android MediaCodec video decoder");
296 
297     g_once_init_leave (&type, _type);
298   }
299   return type;
300 }
301 
302 static const gchar *
caps_to_mime(GstCaps * caps)303 caps_to_mime (GstCaps * caps)
304 {
305   GstStructure *s;
306   const gchar *name;
307 
308   s = gst_caps_get_structure (caps, 0);
309   if (!s)
310     return NULL;
311 
312   name = gst_structure_get_name (s);
313 
314   if (strcmp (name, "video/mpeg") == 0) {
315     gint mpegversion;
316 
317     if (!gst_structure_get_int (s, "mpegversion", &mpegversion))
318       return NULL;
319 
320     if (mpegversion == 4)
321       return "video/mp4v-es";
322     else if (mpegversion == 1 || mpegversion == 2)
323       return "video/mpeg2";
324   } else if (strcmp (name, "video/x-h263") == 0) {
325     return "video/3gpp";
326   } else if (strcmp (name, "video/x-h264") == 0) {
327     return "video/avc";
328   } else if (strcmp (name, "video/x-h265") == 0) {
329     return "video/hevc";
330   } else if (strcmp (name, "video/x-vp8") == 0) {
331     return "video/x-vnd.on2.vp8";
332   } else if (strcmp (name, "video/x-vp9") == 0) {
333     return "video/x-vnd.on2.vp9";
334   } else if (strcmp (name, "video/x-divx") == 0) {
335     return "video/mp4v-es";
336   }
337 
338   return NULL;
339 }
340 
341 static void
gst_amc_video_dec_base_init(gpointer g_class)342 gst_amc_video_dec_base_init (gpointer g_class)
343 {
344   GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
345   GstAmcVideoDecClass *amcvideodec_class = GST_AMC_VIDEO_DEC_CLASS (g_class);
346   const GstAmcCodecInfo *codec_info;
347   GstPadTemplate *templ;
348   GstCaps *sink_caps, *src_caps, *all_src_caps;
349   gchar *longname;
350 
351   codec_info =
352       g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), gst_amc_codec_info_quark);
353   /* This happens for the base class and abstract subclasses */
354   if (!codec_info)
355     return;
356 
357   amcvideodec_class->codec_info = codec_info;
358 
359   gst_amc_codec_info_to_caps (codec_info, &sink_caps, &src_caps);
360 
361   all_src_caps =
362       gst_caps_from_string ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY
363       "), format = (string) RGBA, texture-target = (string) external-oes");
364 
365   if (codec_info->gl_output_only) {
366     gst_caps_unref (src_caps);
367   } else {
368     gst_caps_append (all_src_caps, src_caps);
369   }
370 
371   /* Add pad templates */
372   templ =
373       gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sink_caps);
374   gst_element_class_add_pad_template (element_class, templ);
375   gst_caps_unref (sink_caps);
376 
377   templ =
378       gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, all_src_caps);
379   gst_element_class_add_pad_template (element_class, templ);
380   gst_caps_unref (all_src_caps);
381 
382   longname = g_strdup_printf ("Android MediaCodec %s", codec_info->name);
383   gst_element_class_set_metadata (element_class,
384       codec_info->name,
385       "Codec/Decoder/Video/Hardware",
386       longname, "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
387   g_free (longname);
388 }
389 
390 static void
gst_amc_video_dec_class_init(GstAmcVideoDecClass * klass)391 gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass)
392 {
393   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
394   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
395   GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass);
396 
397   parent_class = g_type_class_peek_parent (klass);
398 
399   gobject_class->finalize = gst_amc_video_dec_finalize;
400 
401   element_class->change_state =
402       GST_DEBUG_FUNCPTR (gst_amc_video_dec_change_state);
403   element_class->set_context =
404       GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_context);
405 
406   videodec_class->start = GST_DEBUG_FUNCPTR (gst_amc_video_dec_start);
407   videodec_class->stop = GST_DEBUG_FUNCPTR (gst_amc_video_dec_stop);
408   videodec_class->open = GST_DEBUG_FUNCPTR (gst_amc_video_dec_open);
409   videodec_class->close = GST_DEBUG_FUNCPTR (gst_amc_video_dec_close);
410   videodec_class->flush = GST_DEBUG_FUNCPTR (gst_amc_video_dec_flush);
411   videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_format);
412   videodec_class->handle_frame =
413       GST_DEBUG_FUNCPTR (gst_amc_video_dec_handle_frame);
414   videodec_class->finish = GST_DEBUG_FUNCPTR (gst_amc_video_dec_finish);
415   videodec_class->decide_allocation =
416       GST_DEBUG_FUNCPTR (gst_amc_video_dec_decide_allocation);
417   videodec_class->src_query = GST_DEBUG_FUNCPTR (gst_amc_video_dec_src_query);
418 }
419 
420 static void
gst_amc_video_dec_init(GstAmcVideoDec * self)421 gst_amc_video_dec_init (GstAmcVideoDec * self)
422 {
423   gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
424   gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (self), TRUE);
425 
426   g_mutex_init (&self->drain_lock);
427   g_cond_init (&self->drain_cond);
428 
429   g_mutex_init (&self->gl_lock);
430   g_cond_init (&self->gl_cond);
431 
432   self->gl_queue = g_queue_new ();
433 }
434 
435 static gboolean
gst_amc_video_dec_open(GstVideoDecoder * decoder)436 gst_amc_video_dec_open (GstVideoDecoder * decoder)
437 {
438   GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder);
439   GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
440   GError *err = NULL;
441 
442   GST_DEBUG_OBJECT (self, "Opening decoder");
443 
444   self->codec = gst_amc_codec_new (klass->codec_info->name, &err);
445   if (!self->codec) {
446     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
447     return FALSE;
448   }
449   self->codec_config = AMC_CODEC_CONFIG_NONE;
450 
451   self->started = FALSE;
452   self->flushing = TRUE;
453 
454   GST_DEBUG_OBJECT (self, "Opened decoder");
455 
456   return TRUE;
457 }
458 
459 static gboolean
gst_amc_video_dec_close(GstVideoDecoder * decoder)460 gst_amc_video_dec_close (GstVideoDecoder * decoder)
461 {
462   GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder);
463 
464   GST_DEBUG_OBJECT (self, "Closing decoder");
465 
466   if (self->downstream_supports_gl
467       && self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
468     g_mutex_lock (&self->gl_lock);
469     GST_INFO_OBJECT (self, "shutting down gl queue pushed %u ready %u "
470         "released %u", self->gl_pushed_frame_count, self->gl_ready_frame_count,
471         self->gl_released_frame_count);
472 
473     g_queue_free_full (self->gl_queue, (GDestroyNotify) _gl_sync_unref);
474     self->gl_queue = g_queue_new ();
475     g_mutex_unlock (&self->gl_lock);
476 
477     if (self->gl_mem_attached)
478       gst_gl_context_thread_add (self->gl_context,
479           (GstGLContextThreadFunc) _dettach_mem_from_context, self);
480   }
481   self->gl_pushed_frame_count = 0;
482   self->gl_ready_frame_count = 0;
483   self->gl_released_frame_count = 0;
484   self->gl_last_rendered_frame = 0;
485 
486   if (self->surface) {
487     gst_object_unref (self->surface);
488     self->surface = NULL;
489   }
490 
491   if (self->listener) {
492     JNIEnv *env = gst_amc_jni_get_env ();
493     GError *err = NULL;
494 
495     if (!gst_amc_jni_call_void_method (env, &err, self->listener,
496             self->set_context_id, GST_AMC_VIDEO_DEC_TO_JLONG (NULL))) {
497       GST_ERROR_OBJECT (self, "Failed to unset back pointer on the listener. "
498           "crashes/hangs may ensue: %s", err ? err->message : "Unknown");
499       GST_ELEMENT_ERROR_FROM_ERROR (self, err);
500     }
501 
502     gst_amc_jni_object_unref (env, self->listener);
503   }
504   self->listener = NULL;
505 
506   if (self->codec) {
507     GError *err = NULL;
508 
509     gst_amc_codec_release (self->codec, &err);
510     if (err)
511       GST_ELEMENT_WARNING_FROM_ERROR (self, err);
512 
513     gst_amc_codec_free (self->codec);
514   }
515 
516   self->started = FALSE;
517   self->flushing = TRUE;
518   self->downstream_supports_gl = FALSE;
519 
520   self->codec = NULL;
521   self->codec_config = AMC_CODEC_CONFIG_NONE;
522 
523   GST_DEBUG_OBJECT (self, "Freeing GL context: %" GST_PTR_FORMAT,
524       self->gl_context);
525   if (self->gl_context) {
526     gst_object_unref (self->gl_context);
527     self->gl_context = NULL;
528   }
529 
530   if (self->oes_mem) {
531     gst_memory_unref ((GstMemory *) self->oes_mem);
532     self->oes_mem = NULL;
533   }
534 
535   if (self->gl_display) {
536     gst_object_unref (self->gl_display);
537     self->gl_display = NULL;
538   }
539 
540   if (self->other_gl_context) {
541     gst_object_unref (self->other_gl_context);
542     self->other_gl_context = NULL;
543   }
544 
545   GST_DEBUG_OBJECT (self, "Closed decoder");
546 
547   return TRUE;
548 }
549 
550 static void
gst_amc_video_dec_finalize(GObject * object)551 gst_amc_video_dec_finalize (GObject * object)
552 {
553   GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (object);
554 
555   g_mutex_clear (&self->drain_lock);
556   g_cond_clear (&self->drain_cond);
557 
558   g_mutex_clear (&self->gl_lock);
559   g_cond_clear (&self->gl_cond);
560 
561   if (self->gl_queue) {
562     g_queue_free_full (self->gl_queue, (GDestroyNotify) _gl_sync_unref);
563     self->gl_queue = NULL;
564   }
565 
566   G_OBJECT_CLASS (parent_class)->finalize (object);
567 }
568 
569 static void
gst_amc_video_dec_set_context(GstElement * element,GstContext * context)570 gst_amc_video_dec_set_context (GstElement * element, GstContext * context)
571 {
572   GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (element);
573 
574   gst_gl_handle_set_context (element, context, &self->gl_display,
575       &self->other_gl_context);
576 
577   GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
578 }
579 
580 static GstStateChangeReturn
gst_amc_video_dec_change_state(GstElement * element,GstStateChange transition)581 gst_amc_video_dec_change_state (GstElement * element, GstStateChange transition)
582 {
583   GstAmcVideoDec *self;
584   GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
585   GError *err = NULL;
586 
587   g_return_val_if_fail (GST_IS_AMC_VIDEO_DEC (element),
588       GST_STATE_CHANGE_FAILURE);
589   self = GST_AMC_VIDEO_DEC (element);
590 
591   GST_DEBUG_OBJECT (element, "changing state: %s => %s",
592       gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
593       gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
594 
595   switch (transition) {
596     case GST_STATE_CHANGE_NULL_TO_READY:
597       break;
598     case GST_STATE_CHANGE_READY_TO_PAUSED:
599       self->downstream_flow_ret = GST_FLOW_OK;
600       self->draining = FALSE;
601       self->started = FALSE;
602       break;
603     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
604       break;
605     case GST_STATE_CHANGE_PAUSED_TO_READY:
606       self->flushing = TRUE;
607       if (self->started) {
608         gst_amc_codec_flush (self->codec, &err);
609         if (err)
610           GST_ELEMENT_WARNING_FROM_ERROR (self, err);
611       }
612       g_mutex_lock (&self->drain_lock);
613       self->draining = FALSE;
614       g_cond_broadcast (&self->drain_cond);
615       g_mutex_unlock (&self->drain_lock);
616       break;
617     default:
618       break;
619   }
620 
621   if (ret == GST_STATE_CHANGE_FAILURE)
622     return ret;
623 
624   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
625 
626   if (ret == GST_STATE_CHANGE_FAILURE)
627     return ret;
628 
629   switch (transition) {
630     case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
631       break;
632     case GST_STATE_CHANGE_PAUSED_TO_READY:
633       self->downstream_flow_ret = GST_FLOW_FLUSHING;
634       self->started = FALSE;
635       break;
636     default:
637       break;
638   }
639 
640   return ret;
641 }
642 
643 #define MAX_FRAME_DIST_TIME  (5 * GST_SECOND)
644 #define MAX_FRAME_DIST_FRAMES (100)
645 
646 static GstVideoCodecFrame *
_find_nearest_frame(GstAmcVideoDec * self,GstClockTime reference_timestamp)647 _find_nearest_frame (GstAmcVideoDec * self, GstClockTime reference_timestamp)
648 {
649   GList *l, *best_l = NULL;
650   GList *finish_frames = NULL;
651   GstVideoCodecFrame *best = NULL;
652   guint64 best_timestamp = 0;
653   guint64 best_diff = G_MAXUINT64;
654   BufferIdentification *best_id = NULL;
655   GList *frames;
656 
657   frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
658 
659   for (l = frames; l; l = l->next) {
660     GstVideoCodecFrame *tmp = l->data;
661     BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
662     guint64 timestamp, diff;
663 
664     /* This happens for frames that were just added but
665      * which were not passed to the component yet. Ignore
666      * them here!
667      */
668     if (!id)
669       continue;
670 
671     timestamp = id->timestamp;
672 
673     if (timestamp > reference_timestamp)
674       diff = timestamp - reference_timestamp;
675     else
676       diff = reference_timestamp - timestamp;
677 
678     if (best == NULL || diff < best_diff) {
679       best = tmp;
680       best_timestamp = timestamp;
681       best_diff = diff;
682       best_l = l;
683       best_id = id;
684 
685       /* For frames without timestamp we simply take the first frame */
686       if ((reference_timestamp == 0 && !GST_CLOCK_TIME_IS_VALID (timestamp))
687           || diff == 0)
688         break;
689     }
690   }
691 
692   if (best_id) {
693     for (l = frames; l && l != best_l; l = l->next) {
694       GstVideoCodecFrame *tmp = l->data;
695       BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
696       guint64 diff_time, diff_frames;
697 
698       if (id->timestamp > best_timestamp)
699         break;
700 
701       if (id->timestamp == 0 || best_timestamp == 0)
702         diff_time = 0;
703       else
704         diff_time = best_timestamp - id->timestamp;
705       diff_frames = best->system_frame_number - tmp->system_frame_number;
706 
707       if (diff_time > MAX_FRAME_DIST_TIME
708           || diff_frames > MAX_FRAME_DIST_FRAMES) {
709         finish_frames =
710             g_list_prepend (finish_frames, gst_video_codec_frame_ref (tmp));
711       }
712     }
713   }
714 
715   if (finish_frames) {
716     g_warning ("%s: Too old frames, bug in decoder -- please file a bug",
717         GST_ELEMENT_NAME (self));
718     for (l = finish_frames; l; l = l->next) {
719       gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), l->data);
720     }
721   }
722 
723   if (best)
724     gst_video_codec_frame_ref (best);
725 
726   g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
727   g_list_free (frames);
728 
729   return best;
730 }
731 
732 static gboolean
gst_amc_video_dec_check_codec_config(GstAmcVideoDec * self)733 gst_amc_video_dec_check_codec_config (GstAmcVideoDec * self)
734 {
735   gboolean ret = (self->codec_config == AMC_CODEC_CONFIG_NONE
736       || (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE
737           && self->downstream_supports_gl)
738       || (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE
739           && !self->downstream_supports_gl));
740 
741   if (!ret) {
742     GST_ERROR_OBJECT
743         (self,
744         "Codec configuration (%d) is not compatible with downstream which %s support GL output",
745         self->codec_config, self->downstream_supports_gl ? "does" : "does not");
746   }
747 
748   return ret;
749 }
750 
751 static gboolean
gst_amc_video_dec_set_src_caps(GstAmcVideoDec * self,GstAmcFormat * format)752 gst_amc_video_dec_set_src_caps (GstAmcVideoDec * self, GstAmcFormat * format)
753 {
754   GstVideoCodecState *output_state;
755   const gchar *mime;
756   gint color_format, width, height;
757   gint stride, slice_height;
758   gint crop_left, crop_right;
759   gint crop_top, crop_bottom;
760   GstVideoFormat gst_format;
761   GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
762   GError *err = NULL;
763   gboolean ret;
764 
765   if (!gst_amc_format_get_int (format, "color-format", &color_format, &err) ||
766       !gst_amc_format_get_int (format, "width", &width, &err) ||
767       !gst_amc_format_get_int (format, "height", &height, &err)) {
768     GST_ERROR_OBJECT (self, "Failed to get output format metadata: %s",
769         err->message);
770     g_clear_error (&err);
771     return FALSE;
772   }
773 
774   if (!gst_amc_format_get_int (format, "stride", &stride, &err) ||
775       !gst_amc_format_get_int (format, "slice-height", &slice_height, &err)) {
776     GST_ERROR_OBJECT (self, "Failed to get stride and slice-height: %s",
777         err->message);
778     g_clear_error (&err);
779     return FALSE;
780   }
781 
782   if (!gst_amc_format_get_int (format, "crop-left", &crop_left, &err) ||
783       !gst_amc_format_get_int (format, "crop-right", &crop_right, &err) ||
784       !gst_amc_format_get_int (format, "crop-top", &crop_top, &err) ||
785       !gst_amc_format_get_int (format, "crop-bottom", &crop_bottom, &err)) {
786     GST_ERROR_OBJECT (self, "Failed to get crop rectangle: %s", err->message);
787     g_clear_error (&err);
788     return FALSE;
789   }
790 
791   if (width == 0 || height == 0) {
792     GST_ERROR_OBJECT (self, "Height or width not set");
793     return FALSE;
794   }
795 
796   if (crop_bottom)
797     height = height - (height - crop_bottom - 1);
798   if (crop_top)
799     height = height - crop_top;
800 
801   if (crop_right)
802     width = width - (width - crop_right - 1);
803   if (crop_left)
804     width = width - crop_left;
805 
806   mime = caps_to_mime (self->input_state->caps);
807   if (!mime) {
808     GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
809     return FALSE;
810   }
811 
812   if (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
813     gst_format = GST_VIDEO_FORMAT_RGBA;
814   } else {
815     gst_format =
816         gst_amc_color_format_to_video_format (klass->codec_info, mime,
817         color_format);
818   }
819 
820   if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
821     GST_ERROR_OBJECT (self, "Unknown color format 0x%08x", color_format);
822     return FALSE;
823   }
824 
825   output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self),
826       gst_format, width, height, self->input_state);
827 
828   /* FIXME: Special handling for multiview, untested */
829   if (color_format == COLOR_QCOM_FormatYVU420SemiPlanar32mMultiView) {
830     gst_video_multiview_video_info_change_mode (&output_state->info,
831         GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
832   }
833 
834   memset (&self->color_format_info, 0, sizeof (self->color_format_info));
835   if (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
836     if (output_state->caps)
837       gst_caps_unref (output_state->caps);
838     output_state->caps = gst_video_info_to_caps (&output_state->info);
839     gst_caps_set_features (output_state->caps, 0,
840         gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
841     gst_caps_set_simple (output_state->caps, "texture-target", G_TYPE_STRING,
842         "external-oes", NULL);
843     GST_DEBUG_OBJECT (self, "Configuring for Surface output");
844 
845     /* The width/height values are used in other places for
846      * checking if the resolution changed. Set everything
847      * that makes sense here
848      */
849     self->color_format_info.color_format = COLOR_FormatAndroidOpaque;
850     self->color_format_info.width = width;
851     self->color_format_info.height = height;
852     self->color_format_info.crop_left = crop_left;
853     self->color_format_info.crop_right = crop_right;
854     self->color_format_info.crop_top = crop_top;
855     self->color_format_info.crop_bottom = crop_bottom;
856 
857     goto out;
858   }
859 
860   self->format = gst_format;
861   self->width = width;
862   self->height = height;
863   if (!gst_amc_color_format_info_set (&self->color_format_info,
864           klass->codec_info, mime, color_format, width, height, stride,
865           slice_height, crop_left, crop_right, crop_top, crop_bottom)) {
866     GST_ERROR_OBJECT (self, "Failed to set up GstAmcColorFormatInfo");
867     return FALSE;
868   }
869 
870   GST_DEBUG_OBJECT (self,
871       "Color format info: {color_format=%d (0x%08x), width=%d, height=%d, "
872       "stride=%d, slice-height=%d, crop-left=%d, crop-top=%d, "
873       "crop-right=%d, crop-bottom=%d, frame-size=%d}",
874       self->color_format_info.color_format,
875       self->color_format_info.color_format, self->color_format_info.width,
876       self->color_format_info.height, self->color_format_info.stride,
877       self->color_format_info.slice_height, self->color_format_info.crop_left,
878       self->color_format_info.crop_top, self->color_format_info.crop_right,
879       self->color_format_info.crop_bottom, self->color_format_info.frame_size);
880 
881 out:
882   ret = gst_video_decoder_negotiate (GST_VIDEO_DECODER (self));
883 
884   gst_video_codec_state_unref (output_state);
885   self->input_state_changed = FALSE;
886 
887   return ret;
888 }
889 
890 static gboolean
gst_amc_video_dec_fill_buffer(GstAmcVideoDec * self,GstAmcBuffer * buf,const GstAmcBufferInfo * buffer_info,GstBuffer * outbuf)891 gst_amc_video_dec_fill_buffer (GstAmcVideoDec * self, GstAmcBuffer * buf,
892     const GstAmcBufferInfo * buffer_info, GstBuffer * outbuf)
893 {
894   GstVideoCodecState *state =
895       gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
896   GstVideoInfo *info = &state->info;
897   gboolean ret = FALSE;
898 
899   if (self->color_format_info.color_format == COLOR_FormatAndroidOpaque)
900     return FALSE;
901 
902   ret =
903       gst_amc_color_format_copy (&self->color_format_info, buf, buffer_info,
904       info, outbuf, COLOR_FORMAT_COPY_OUT);
905 
906   gst_video_codec_state_unref (state);
907   return ret;
908 }
909 
910 static const gfloat yflip_matrix[16] = {
911   1.0f, 0.0f, 0.0f, 0.0f,
912   0.0f, -1.0f, 0.0f, 0.0f,
913   0.0f, 0.0f, 1.0f, 0.0f,
914   0.0f, 1.0f, 0.0f, 1.0f
915 };
916 
917 static void
_amc_gl_set_sync(GstGLSyncMeta * sync_meta,GstGLContext * context)918 _amc_gl_set_sync (GstGLSyncMeta * sync_meta, GstGLContext * context)
919 {
920 }
921 
922 static void
_gl_sync_release_buffer(struct gl_sync * sync,gboolean render)923 _gl_sync_release_buffer (struct gl_sync *sync, gboolean render)
924 {
925   GError *error = NULL;
926 
927   if (!sync->result->released) {
928     sync->released_ts = g_get_monotonic_time ();
929 
930     if ((gint) (sync->sink->gl_released_frame_count -
931             sync->sink->gl_ready_frame_count) > 0) {
932       guint diff =
933           sync->sink->gl_released_frame_count -
934           sync->sink->gl_ready_frame_count - 1u;
935       sync->sink->gl_ready_frame_count += diff;
936       GST_LOG ("gl_sync %p possible \'on_frame_available\' listener miss "
937           "detected, attempting to work around.  Jumping forward %u "
938           "frames for frame %u", sync, diff, sync->gl_frame_no);
939     }
940 
941     GST_TRACE ("gl_sync %p release_output_buffer idx %u frame %u render %s",
942         sync, sync->buffer_idx, sync->gl_frame_no, render ? "TRUE" : "FALSE");
943 
944     /* Release the frame into the surface */
945     sync->sink->gl_released_frame_count++;
946     if (!render) {
947       /* Advance the ready counter ourselves if we aren't going to render
948        * and therefore receive a listener callback */
949       sync->sink->gl_ready_frame_count++;
950     }
951 
952     if (!gst_amc_codec_release_output_buffer (sync->sink->codec,
953             sync->buffer_idx, render, &error)) {
954       GST_ERROR_OBJECT (sync->sink,
955           "gl_sync %p Failed to render buffer, index %d frame %u", sync,
956           sync->buffer_idx, sync->gl_frame_no);
957       goto out;
958     }
959     sync->result->released = TRUE;
960     sync->result->rendered = render;
961   }
962 
963 out:
964   if (error) {
965     if (sync->sink->gl_error == NULL)
966       sync->sink->gl_error = error;
967     else
968       g_clear_error (&error);
969   }
970 }
971 
972 static void
_gl_sync_release_next_buffer(struct gl_sync * sync,gboolean render)973 _gl_sync_release_next_buffer (struct gl_sync *sync, gboolean render)
974 {
975   GList *l;
976 
977   if ((l = _find_gl_sync_for_frame (sync->sink, sync->gl_frame_no + 1))) {
978     struct gl_sync *next = l->data;
979 
980     _gl_sync_release_buffer (next, render);
981   } else {
982     GST_TRACE ("gl_sync %p no next frame available", sync);
983   }
984 }
985 
986 #define I(x,y) ((y)*4+(x))
987 static int
affine_inverse(float in[],float out[])988 affine_inverse (float in[], float out[])
989 {
990   float s0, s1, s2, s3, s4, s5;
991   float c0, c1, c2, c3, c4, c5;
992   float det, invdet;
993 
994   s0 = in[0] * in[I (1, 1)] - in[I (1, 0)] * in[I (0, 1)];
995   s1 = in[0] * in[I (1, 2)] - in[I (1, 0)] * in[I (0, 2)];
996   s2 = in[0] * in[I (1, 3)] - in[I (1, 0)] * in[I (0, 3)];
997   s3 = in[1] * in[I (1, 2)] - in[I (1, 1)] * in[I (0, 2)];
998   s4 = in[1] * in[I (1, 3)] - in[I (1, 1)] * in[I (0, 3)];
999   s5 = in[2] * in[I (1, 3)] - in[I (1, 2)] * in[I (0, 3)];
1000 
1001   c0 = in[I (2, 0)] * in[I (3, 1)] - in[I (3, 0)] * in[I (2, 1)];
1002   c1 = in[I (2, 0)] * in[I (3, 2)] - in[I (3, 0)] * in[I (2, 2)];
1003   c2 = in[I (2, 0)] * in[I (3, 3)] - in[I (3, 0)] * in[I (2, 3)];
1004   c3 = in[I (2, 1)] * in[I (3, 2)] - in[I (3, 1)] * in[I (2, 2)];
1005   c4 = in[I (2, 1)] * in[I (3, 3)] - in[I (3, 1)] * in[I (2, 3)];
1006   c5 = in[I (2, 2)] * in[I (3, 3)] - in[I (3, 2)] * in[I (2, 3)];
1007 
1008   det = s0 * c5 - s1 * c4 + s2 * c3 + s3 * c2 - s4 * c1 + s5 * c0;
1009   if (det == 0.0)
1010     return 0;
1011   invdet = 1.0 / det;
1012 
1013   out[I (0, 0)] =
1014       (in[I (1, 1)] * c5 - in[I (1, 2)] * c4 + in[I (1, 3)] * c3) * invdet;
1015   out[I (0, 1)] =
1016       (-in[I (0, 1)] * c5 + in[I (0, 2)] * c4 - in[I (0, 3)] * c3) * invdet;
1017   out[I (0, 2)] =
1018       (in[I (3, 1)] * s5 - in[I (3, 2)] * s4 + in[I (3, 3)] * s3) * invdet;
1019   out[I (0, 3)] =
1020       (-in[I (2, 1)] * s5 + in[I (2, 2)] * s4 - in[I (2, 3)] * s3) * invdet;
1021 
1022   out[I (1, 0)] =
1023       (-in[I (1, 0)] * c5 + in[I (1, 2)] * c2 - in[I (1, 3)] * c1) * invdet;
1024   out[I (1, 1)] =
1025       (in[I (0, 0)] * c5 - in[I (0, 2)] * c2 + in[I (0, 3)] * c1) * invdet;
1026   out[I (1, 2)] =
1027       (-in[I (3, 0)] * s5 + in[I (3, 2)] * s2 - in[I (3, 3)] * s1) * invdet;
1028   out[I (1, 3)] =
1029       (in[I (2, 0)] * s5 - in[I (2, 2)] * s2 + in[I (2, 3)] * s1) * invdet;
1030 
1031   out[I (2, 0)] =
1032       (in[I (1, 0)] * c4 - in[I (1, 1)] * c2 + in[I (1, 3)] * c0) * invdet;
1033   out[I (2, 1)] =
1034       (-in[I (0, 0)] * c4 + in[I (0, 1)] * c2 - in[I (0, 3)] * c0) * invdet;
1035   out[I (2, 2)] =
1036       (in[I (3, 0)] * s4 - in[I (3, 1)] * s2 + in[I (3, 3)] * s0) * invdet;
1037   out[I (2, 3)] =
1038       (-in[I (2, 0)] * s4 + in[I (2, 1)] * s2 - in[I (2, 3)] * s0) * invdet;
1039 
1040   out[I (3, 0)] =
1041       (-in[I (1, 0)] * c3 + in[I (1, 1)] * c1 - in[I (1, 2)] * c0) * invdet;
1042   out[I (3, 1)] =
1043       (in[I (0, 0)] * c3 - in[I (0, 1)] * c1 + in[I (0, 2)] * c0) * invdet;
1044   out[I (3, 2)] =
1045       (-in[I (3, 0)] * s3 + in[I (3, 1)] * s1 - in[I (3, 2)] * s0) * invdet;
1046   out[I (3, 3)] =
1047       (in[I (2, 0)] * s3 - in[I (2, 1)] * s1 + in[I (2, 2)] * s0) * invdet;
1048 
1049   return 1;
1050 }
1051 
1052 #undef I
1053 
1054 /* caller should remove from the gl_queue after calling this function.
1055  * _gl_sync_release_buffer must be called before this function */
1056 static void
_gl_sync_render_unlocked(struct gl_sync * sync)1057 _gl_sync_render_unlocked (struct gl_sync *sync)
1058 {
1059   GstVideoAffineTransformationMeta *af_meta;
1060   GError *error = NULL;
1061   gfloat matrix[16];
1062   gint64 ts = 0;
1063 
1064   GST_TRACE ("gl_sync %p result %p render (updated:%u)", sync, sync->result,
1065       sync->result->updated);
1066 
1067   if (sync->result->updated || !sync->result->rendered)
1068     return;
1069 
1070   /* FIXME: if this ever starts returning valid values we should attempt
1071    * to use it */
1072   if (!gst_amc_surface_texture_get_timestamp (sync->surface->texture, &ts,
1073           &error)) {
1074     GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1075     GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1076     goto out;
1077   }
1078   GST_TRACE ("gl_sync %p rendering timestamp before update %" G_GINT64_FORMAT,
1079       sync, ts);
1080 
1081   GST_TRACE ("gl_sync %p update_tex_image", sync);
1082   if (!gst_amc_surface_texture_update_tex_image (sync->surface->texture,
1083           &error)) {
1084     GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1085     GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1086     goto out;
1087   }
1088   GST_TRACE ("gl_sync result %p updated", sync->result);
1089   sync->result->updated = TRUE;
1090   sync->sink->gl_last_rendered_frame = sync->gl_frame_no;
1091 
1092   if (!gst_amc_surface_texture_get_timestamp (sync->surface->texture, &ts,
1093           &error)) {
1094     GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1095     GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1096     goto out;
1097   }
1098   GST_TRACE ("gl_sync %p rendering timestamp after update %" G_GINT64_FORMAT,
1099       sync, ts);
1100 
1101   af_meta = gst_buffer_get_video_affine_transformation_meta (sync->buffer);
1102   if (!af_meta) {
1103     GST_WARNING ("Failed to retreive the transformation meta from the "
1104         "gl_sync %p buffer %p", sync, sync->buffer);
1105   } else if (gst_amc_surface_texture_get_transform_matrix (sync->surface->
1106           texture, matrix, &error)) {
1107     gfloat inv_mat[16];
1108 
1109     /* The transform from mediacodec applies to the texture coords, but
1110      * GStreamer affine meta applies to the video geometry, which is the
1111      * opposite - so we invert it */
1112     if (affine_inverse (matrix, inv_mat)) {
1113       gst_video_affine_transformation_meta_apply_matrix (af_meta, inv_mat);
1114     } else {
1115       GST_WARNING
1116           ("Failed to invert display transform - the video won't display right. "
1117           "Transform matrix [ %f %f %f %f, %f %f %f %f, %f %f %f %f, %f %f %f %f ]",
1118           matrix[0], matrix[1], matrix[2], matrix[3], matrix[4], matrix[5],
1119           matrix[6], matrix[7], matrix[8], matrix[9], matrix[10], matrix[11],
1120           matrix[12], matrix[13], matrix[14], matrix[15]);
1121     }
1122     gst_video_affine_transformation_meta_apply_matrix (af_meta, yflip_matrix);
1123   }
1124 
1125   GST_LOG ("gl_sync %p successfully updated SurfaceTexture %p into "
1126       "OES texture %u", sync, sync->surface->texture, sync->oes_mem->tex_id);
1127 
1128 out:
1129   if (error) {
1130     if (sync->sink->gl_error == NULL)
1131       sync->sink->gl_error = error;
1132     else
1133       g_clear_error (&error);
1134   }
1135 
1136   _gl_sync_release_next_buffer (sync, TRUE);
1137 }
1138 
1139 static gboolean
_amc_gl_possibly_wait_for_gl_sync(struct gl_sync * sync,gint64 end_time)1140 _amc_gl_possibly_wait_for_gl_sync (struct gl_sync *sync, gint64 end_time)
1141 {
1142   GST_TRACE ("gl_sync %p waiting for frame %u current %u updated %u ", sync,
1143       sync->gl_frame_no, sync->sink->gl_ready_frame_count,
1144       sync->result->updated);
1145 
1146   if ((gint) (sync->sink->gl_last_rendered_frame - sync->gl_frame_no) > 0) {
1147     GST_ERROR ("gl_sync %p unsuccessfully waited for frame %u. out of order "
1148         "wait detected", sync, sync->gl_frame_no);
1149     return FALSE;
1150   }
1151 
1152   /* The number of frame callbacks (gl_ready_frame_count) is not a direct
1153    * relationship with the number of pushed buffers (gl_pushed_frame_count)
1154    * or even, the number of released buffers (gl_released_frame_count)
1155    * as, from the frameworks/native/include/gui/ConsumerBase.h file,
1156    *
1157    *    "...frames that are queued while in asynchronous mode only trigger the
1158    *    callback if no previous frames are pending."
1159    *
1160    * As a result, we need to advance the ready counter somehow ourselves when
1161    * such events happen. There is no reliable way of knowing when/if the frame
1162    * listener is going to fire.  The only uniqueu identifier,
1163    * SurfaceTexture::get_timestamp seems to always return 0.
1164    *
1165    * The maximum queue size as defined in
1166    * frameworks/native/include/gui/BufferQueue.h
1167    * is 32 of which a maximum of 30 can be acquired at a time so we picked a
1168    * number less than that to wait for before updating the ready frame count.
1169    */
1170 
1171   while (!sync->result->updated
1172       && (gint) (sync->sink->gl_ready_frame_count - sync->gl_frame_no) < 0) {
1173     /* The time limit is need otherwise when amc decides to not emit the
1174      * frame listener (say, on orientation changes) we don't wait foreever */
1175     if (end_time == -1 || !g_cond_wait_until (&sync->sink->gl_cond,
1176             &sync->sink->gl_lock, end_time)) {
1177       GST_LOG ("gl_sync %p unsuccessfully waited for frame %u", sync,
1178           sync->gl_frame_no);
1179       return FALSE;
1180     }
1181   }
1182   GST_LOG ("gl_sync %p successfully waited for frame %u", sync,
1183       sync->gl_frame_no);
1184 
1185   return TRUE;
1186 }
1187 
1188 static gboolean
_amc_gl_iterate_queue_unlocked(GstGLSyncMeta * sync_meta,gboolean wait)1189 _amc_gl_iterate_queue_unlocked (GstGLSyncMeta * sync_meta, gboolean wait)
1190 {
1191   struct gl_sync *sync = sync_meta->data;
1192   struct gl_sync *tmp;
1193   gboolean ret = TRUE;
1194   gint64 end_time;
1195 
1196   while ((tmp = g_queue_peek_head (sync->sink->gl_queue))) {
1197     /* skip frames that are ahead of the current wait frame */
1198     if ((gint) (sync->gl_frame_no - tmp->gl_frame_no) < 0) {
1199       GST_TRACE ("gl_sync %p frame %u is ahead of gl_sync %p frame %u", tmp,
1200           tmp->gl_frame_no, sync, sync->gl_frame_no);
1201       break;
1202     }
1203 
1204     _gl_sync_release_buffer (tmp, wait);
1205 
1206     /* Frames are currently pushed in order and waits need to be performed
1207      * in the same order */
1208 
1209     end_time = wait ? 30 * G_TIME_SPAN_MILLISECOND + tmp->released_ts : -1;
1210     if (!_amc_gl_possibly_wait_for_gl_sync (tmp, end_time))
1211       ret = FALSE;
1212 
1213     _gl_sync_render_unlocked (tmp);
1214 
1215     g_queue_pop_head (tmp->sink->gl_queue);
1216     _gl_sync_unref (tmp);
1217   }
1218 
1219   return ret;
1220 }
1221 
1222 struct gl_wait
1223 {
1224   GstGLSyncMeta *sync_meta;
1225   gboolean ret;
1226 };
1227 
1228 static void
_amc_gl_wait_gl(GstGLContext * context,struct gl_wait * wait)1229 _amc_gl_wait_gl (GstGLContext * context, struct gl_wait *wait)
1230 {
1231   struct gl_sync *sync = wait->sync_meta->data;
1232 
1233   g_mutex_lock (&sync->sink->gl_lock);
1234   wait->ret = _amc_gl_iterate_queue_unlocked (wait->sync_meta, TRUE);
1235   g_mutex_unlock (&sync->sink->gl_lock);
1236 }
1237 
1238 static void
_amc_gl_wait(GstGLSyncMeta * sync_meta,GstGLContext * context)1239 _amc_gl_wait (GstGLSyncMeta * sync_meta, GstGLContext * context)
1240 {
1241   struct gl_sync *sync = sync_meta->data;
1242   struct gl_wait wait;
1243 
1244   wait.sync_meta = sync_meta;
1245   wait.ret = FALSE;
1246   gst_gl_context_thread_add (context,
1247       (GstGLContextThreadFunc) _amc_gl_wait_gl, &wait);
1248 
1249   if (!wait.ret)
1250     GST_WARNING ("gl_sync %p could not wait for frame, took too long", sync);
1251 }
1252 
1253 static void
_amc_gl_copy(GstGLSyncMeta * src,GstBuffer * sbuffer,GstGLSyncMeta * dest,GstBuffer * dbuffer)1254 _amc_gl_copy (GstGLSyncMeta * src, GstBuffer * sbuffer, GstGLSyncMeta * dest,
1255     GstBuffer * dbuffer)
1256 {
1257   struct gl_sync *sync = src->data;
1258   struct gl_sync *tmp;
1259 
1260   tmp = g_new0 (struct gl_sync, 1);
1261 
1262   GST_TRACE ("copying gl_sync %p to %p", sync, tmp);
1263 
1264   g_mutex_lock (&sync->sink->gl_lock);
1265 
1266   tmp->refcount = 1;
1267   tmp->sink = sync->sink;
1268   tmp->buffer = dbuffer;
1269   tmp->oes_mem = (GstGLMemory *) gst_memory_ref ((GstMemory *) sync->oes_mem);
1270   tmp->surface = g_object_ref (sync->surface);
1271   tmp->gl_frame_no = sync->gl_frame_no;
1272   tmp->released_ts = sync->released_ts;
1273   tmp->result = sync->result;
1274   _gl_sync_result_ref (tmp->result);
1275   dest->data = tmp;
1276 
1277   g_mutex_unlock (&sync->sink->gl_lock);
1278 }
1279 
1280 static void
_amc_gl_render_on_free(GstGLContext * context,GstGLSyncMeta * sync_meta)1281 _amc_gl_render_on_free (GstGLContext * context, GstGLSyncMeta * sync_meta)
1282 {
1283   struct gl_sync *sync = sync_meta->data;
1284 
1285   g_mutex_lock (&sync->sink->gl_lock);
1286   /* just render as many frames as we have */
1287   _amc_gl_iterate_queue_unlocked (sync_meta, FALSE);
1288   g_mutex_unlock (&sync->sink->gl_lock);
1289 }
1290 
1291 static void
_amc_gl_free(GstGLSyncMeta * sync_meta,GstGLContext * context)1292 _amc_gl_free (GstGLSyncMeta * sync_meta, GstGLContext * context)
1293 {
1294   struct gl_sync *sync = sync_meta->data;
1295 
1296   /* The wait render queue inside android is not very deep so when we drop
1297    * frames we need to signal that we have rendered them if we have any chance
1298    * of keeping up between the decoder, the android GL queue and downstream
1299    * OpenGL. If we don't do this, once we start dropping frames downstream,
1300    * it is very near to impossible for the pipeline to catch up. */
1301   gst_gl_context_thread_add (context,
1302       (GstGLContextThreadFunc) _amc_gl_render_on_free, sync_meta);
1303   _gl_sync_unref (sync);
1304 }
1305 
1306 static void
gst_amc_video_dec_loop(GstAmcVideoDec * self)1307 gst_amc_video_dec_loop (GstAmcVideoDec * self)
1308 {
1309   GstVideoCodecFrame *frame;
1310   GstFlowReturn flow_ret = GST_FLOW_OK;
1311   GstClockTimeDiff deadline;
1312   gboolean is_eos;
1313   GstAmcBuffer *buf;
1314   GstAmcBufferInfo buffer_info;
1315   gint idx;
1316   GError *err = NULL;
1317   gboolean release_buffer = TRUE;
1318 
1319   GST_VIDEO_DECODER_STREAM_LOCK (self);
1320 
1321 retry:
1322   /*if (self->input_state_changed) {
1323      idx = INFO_OUTPUT_FORMAT_CHANGED;
1324      } else { */
1325   GST_DEBUG_OBJECT (self, "Waiting for available output buffer");
1326   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1327   /* Wait at most 100ms here, some codecs don't fail dequeueing if
1328    * the codec is flushing, causing deadlocks during shutdown */
1329   idx =
1330       gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000,
1331       &err);
1332   GST_VIDEO_DECODER_STREAM_LOCK (self);
1333   /*} */
1334 
1335   GST_DEBUG_OBJECT (self, "dequeueOutputBuffer() returned %d (0x%x)", idx, idx);
1336 
1337   if (idx < 0) {
1338     if (self->flushing) {
1339       g_clear_error (&err);
1340       goto flushing;
1341     }
1342 
1343     switch (idx) {
1344       case INFO_OUTPUT_BUFFERS_CHANGED:
1345         /* Handled internally */
1346         g_assert_not_reached ();
1347         break;
1348       case INFO_OUTPUT_FORMAT_CHANGED:{
1349         GstAmcFormat *format;
1350         gchar *format_string;
1351 
1352         GST_DEBUG_OBJECT (self, "Output format has changed");
1353 
1354         format = gst_amc_codec_get_output_format (self->codec, &err);
1355         if (!format)
1356           goto format_error;
1357 
1358         format_string = gst_amc_format_to_string (format, &err);
1359         if (!format) {
1360           gst_amc_format_free (format);
1361           goto format_error;
1362         }
1363         GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string);
1364         g_free (format_string);
1365 
1366         if (!gst_amc_video_dec_set_src_caps (self, format)) {
1367           gst_amc_format_free (format);
1368           goto format_error;
1369         }
1370         gst_amc_format_free (format);
1371 
1372         goto retry;
1373       }
1374       case INFO_TRY_AGAIN_LATER:
1375         GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out");
1376         goto retry;
1377       case G_MININT:
1378         GST_ERROR_OBJECT (self, "Failure dequeueing output buffer");
1379         goto dequeue_error;
1380       default:
1381         g_assert_not_reached ();
1382         break;
1383     }
1384 
1385     goto retry;
1386   }
1387 
1388   GST_DEBUG_OBJECT (self,
1389       "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT
1390       " flags 0x%08x", idx, buffer_info.offset, buffer_info.size,
1391       buffer_info.presentation_time_us, buffer_info.flags);
1392 
1393   buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err);
1394   if (err) {
1395     if (self->flushing) {
1396       g_clear_error (&err);
1397       goto flushing;
1398     }
1399     goto failed_to_get_output_buffer;
1400   }
1401 
1402   if (self->codec_config != AMC_CODEC_CONFIG_WITH_SURFACE && !buf)
1403     goto got_null_output_buffer;
1404 
1405   frame =
1406       _find_nearest_frame (self,
1407       gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1));
1408 
1409   is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM);
1410 
1411   if (frame
1412       && (deadline =
1413           gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
1414               frame)) < 0) {
1415     GST_WARNING_OBJECT (self,
1416         "Frame is too late, dropping (deadline %" GST_STIME_FORMAT ")",
1417         GST_STIME_ARGS (deadline));
1418     flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1419   } else if (frame && self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
1420     GstBuffer *outbuf;
1421     GstGLSyncMeta *sync_meta;
1422     GstVideoCodecState *state;
1423     struct gl_sync *sync;
1424     gboolean first_buffer = FALSE;
1425 
1426     g_mutex_lock (&self->gl_lock);
1427     if (self->gl_error) {
1428       GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
1429       g_mutex_unlock (&self->gl_lock);
1430       goto gl_output_error;
1431     }
1432     g_mutex_unlock (&self->gl_lock);
1433 
1434     outbuf = gst_buffer_new ();
1435 
1436     state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
1437 
1438     if (!self->oes_mem) {
1439       GstGLBaseMemoryAllocator *base_mem_alloc;
1440       GstGLVideoAllocationParams *params;
1441 
1442       base_mem_alloc =
1443           GST_GL_BASE_MEMORY_ALLOCATOR (gst_allocator_find
1444           (GST_GL_MEMORY_ALLOCATOR_NAME));
1445 
1446       params = gst_gl_video_allocation_params_new (self->gl_context, NULL,
1447           &state->info, 0, NULL, GST_GL_TEXTURE_TARGET_EXTERNAL_OES,
1448           GST_GL_RGBA);
1449 
1450       self->oes_mem = (GstGLMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
1451           (GstGLAllocationParams *) params);
1452       gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
1453       gst_object_unref (base_mem_alloc);
1454 
1455       gst_gl_context_thread_add (self->gl_context,
1456           (GstGLContextThreadFunc) _attach_mem_to_context, self);
1457 
1458       first_buffer = TRUE;
1459     }
1460 
1461     gst_video_codec_state_unref (state);
1462 
1463     gst_buffer_append_memory (outbuf,
1464         gst_memory_ref ((GstMemory *) self->oes_mem));
1465 
1466     sync = g_new0 (struct gl_sync, 1);
1467     sync->refcount = 1;
1468     sync->sink = g_object_ref (self);
1469     sync->buffer = outbuf;
1470     sync->surface = g_object_ref (self->surface);
1471     sync->oes_mem =
1472         (GstGLMemory *) gst_memory_ref ((GstMemory *) self->oes_mem);
1473     sync->buffer_idx = idx;
1474     sync->result = g_new0 (struct gl_sync_result, 1);
1475     sync->result->refcount = 1;
1476     sync->result->updated = FALSE;
1477 
1478     GST_TRACE ("new gl_sync %p result %p", sync, sync->result);
1479 
1480     sync_meta = gst_buffer_add_gl_sync_meta_full (self->gl_context, outbuf,
1481         sync);
1482     sync_meta->set_sync = _amc_gl_set_sync;
1483     sync_meta->wait = _amc_gl_wait;
1484     sync_meta->wait_cpu = _amc_gl_wait;
1485     sync_meta->copy = _amc_gl_copy;
1486     sync_meta->free = _amc_gl_free;
1487 
1488     /* The meta needs to be created now:
1489      * Later (in _gl_sync_render_unlocked) the buffer will be locked.
1490      */
1491     gst_buffer_add_video_affine_transformation_meta (outbuf);
1492 
1493     g_mutex_lock (&self->gl_lock);
1494 
1495     self->gl_pushed_frame_count++;
1496     sync->gl_frame_no = self->gl_pushed_frame_count;
1497     g_queue_push_tail (self->gl_queue, _gl_sync_ref (sync));
1498 
1499     if (first_buffer) {
1500       _gl_sync_release_buffer (sync, TRUE);
1501       if (self->gl_error) {
1502         gst_buffer_unref (outbuf);
1503         g_mutex_unlock (&self->gl_lock);
1504         goto gl_output_error;
1505       }
1506     }
1507     g_mutex_unlock (&self->gl_lock);
1508 
1509     GST_DEBUG_OBJECT (self, "push GL frame %u", sync->gl_frame_no);
1510     frame->output_buffer = outbuf;
1511     flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
1512 
1513     release_buffer = FALSE;
1514   } else if (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE && !frame
1515       && buffer_info.size > 0) {
1516     GstBuffer *outbuf;
1517 
1518     /* This sometimes happens at EOS or if the input is not properly framed,
1519      * let's handle it gracefully by allocating a new buffer for the current
1520      * caps and filling it
1521      */
1522     GST_ERROR_OBJECT (self, "No corresponding frame found");
1523 
1524     outbuf =
1525         gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
1526 
1527     if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info, outbuf)) {
1528       gst_buffer_unref (outbuf);
1529       if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1530         GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1531             idx);
1532       if (err && !self->flushing)
1533         GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1534       g_clear_error (&err);
1535       gst_amc_buffer_free (buf);
1536       buf = NULL;
1537       goto invalid_buffer;
1538     }
1539 
1540     GST_BUFFER_PTS (outbuf) =
1541         gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND,
1542         1);
1543     flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
1544   } else if (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE && frame
1545       && buffer_info.size > 0) {
1546     if ((flow_ret =
1547             gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self),
1548                 frame)) != GST_FLOW_OK) {
1549       GST_ERROR_OBJECT (self, "Failed to allocate buffer");
1550       if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1551         GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1552             idx);
1553       if (err && !self->flushing)
1554         GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1555       g_clear_error (&err);
1556       gst_amc_buffer_free (buf);
1557       buf = NULL;
1558       goto flow_error;
1559     }
1560 
1561     if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info,
1562             frame->output_buffer)) {
1563       gst_buffer_replace (&frame->output_buffer, NULL);
1564       gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1565       if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1566         GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1567             idx);
1568       if (err && !self->flushing)
1569         GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1570       g_clear_error (&err);
1571       gst_amc_buffer_free (buf);
1572       buf = NULL;
1573       goto invalid_buffer;
1574     }
1575 
1576     flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
1577   } else if (frame != NULL) {
1578     flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1579   }
1580 
1581   if (buf) {
1582     gst_amc_buffer_free (buf);
1583     buf = NULL;
1584   }
1585 
1586   if (release_buffer) {
1587     if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err)) {
1588       if (self->flushing) {
1589         g_clear_error (&err);
1590         goto flushing;
1591       }
1592       goto failed_release;
1593     }
1594   }
1595 
1596   if (is_eos || flow_ret == GST_FLOW_EOS) {
1597     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1598     g_mutex_lock (&self->drain_lock);
1599     if (self->draining) {
1600       GST_DEBUG_OBJECT (self, "Drained");
1601       self->draining = FALSE;
1602       g_cond_broadcast (&self->drain_cond);
1603     } else if (flow_ret == GST_FLOW_OK) {
1604       GST_DEBUG_OBJECT (self, "Component signalled EOS");
1605       flow_ret = GST_FLOW_EOS;
1606     }
1607     g_mutex_unlock (&self->drain_lock);
1608     GST_VIDEO_DECODER_STREAM_LOCK (self);
1609   } else {
1610     GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));
1611   }
1612 
1613   self->downstream_flow_ret = flow_ret;
1614 
1615   if (flow_ret != GST_FLOW_OK)
1616     goto flow_error;
1617 
1618   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1619 
1620   return;
1621 
1622 dequeue_error:
1623   {
1624     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1625     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1626     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1627     self->downstream_flow_ret = GST_FLOW_ERROR;
1628     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1629     g_mutex_lock (&self->drain_lock);
1630     self->draining = FALSE;
1631     g_cond_broadcast (&self->drain_cond);
1632     g_mutex_unlock (&self->drain_lock);
1633     return;
1634   }
1635 
1636 format_error:
1637   {
1638     if (err)
1639       GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1640     else
1641       GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
1642           ("Failed to handle format"));
1643     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1644     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1645     self->downstream_flow_ret = GST_FLOW_ERROR;
1646     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1647     g_mutex_lock (&self->drain_lock);
1648     self->draining = FALSE;
1649     g_cond_broadcast (&self->drain_cond);
1650     g_mutex_unlock (&self->drain_lock);
1651     return;
1652   }
1653 failed_release:
1654   {
1655     GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
1656     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1657     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1658     self->downstream_flow_ret = GST_FLOW_ERROR;
1659     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1660     g_mutex_lock (&self->drain_lock);
1661     self->draining = FALSE;
1662     g_cond_broadcast (&self->drain_cond);
1663     g_mutex_unlock (&self->drain_lock);
1664     return;
1665   }
1666 flushing:
1667   {
1668     GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1669     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1670     self->downstream_flow_ret = GST_FLOW_FLUSHING;
1671     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1672     return;
1673   }
1674 
1675 flow_error:
1676   {
1677     if (flow_ret == GST_FLOW_EOS) {
1678       GST_DEBUG_OBJECT (self, "EOS");
1679       gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1680           gst_event_new_eos ());
1681       gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1682     } else if (flow_ret < GST_FLOW_EOS) {
1683       GST_ELEMENT_FLOW_ERROR (self, flow_ret);
1684       gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1685           gst_event_new_eos ());
1686       gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1687     } else if (flow_ret == GST_FLOW_FLUSHING) {
1688       GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1689       gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1690     }
1691     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1692     g_mutex_lock (&self->drain_lock);
1693     self->draining = FALSE;
1694     g_cond_broadcast (&self->drain_cond);
1695     g_mutex_unlock (&self->drain_lock);
1696     return;
1697   }
1698 
1699 failed_to_get_output_buffer:
1700   {
1701     GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
1702     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1703     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1704     self->downstream_flow_ret = GST_FLOW_ERROR;
1705     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1706     g_mutex_lock (&self->drain_lock);
1707     self->draining = FALSE;
1708     g_cond_broadcast (&self->drain_cond);
1709     g_mutex_unlock (&self->drain_lock);
1710     return;
1711   }
1712 
1713 got_null_output_buffer:
1714   {
1715     GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
1716         ("Got no output buffer"));
1717     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1718     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1719     self->downstream_flow_ret = GST_FLOW_ERROR;
1720     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1721     g_mutex_lock (&self->drain_lock);
1722     self->draining = FALSE;
1723     g_cond_broadcast (&self->drain_cond);
1724     g_mutex_unlock (&self->drain_lock);
1725     return;
1726   }
1727 
1728 invalid_buffer:
1729   {
1730     GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
1731         ("Invalid sized input buffer"));
1732     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1733     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1734     self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
1735     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1736     g_mutex_lock (&self->drain_lock);
1737     self->draining = FALSE;
1738     g_cond_broadcast (&self->drain_cond);
1739     g_mutex_unlock (&self->drain_lock);
1740     return;
1741   }
1742 gl_output_error:
1743   {
1744     if (buf) {
1745       gst_amc_buffer_free (buf);
1746       buf = NULL;
1747     }
1748     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1749     gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1750     self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
1751     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1752     g_mutex_lock (&self->drain_lock);
1753     self->draining = FALSE;
1754     g_cond_broadcast (&self->drain_cond);
1755     g_mutex_unlock (&self->drain_lock);
1756     return;
1757   }
1758 }
1759 
1760 static gboolean
gst_amc_video_dec_start(GstVideoDecoder * decoder)1761 gst_amc_video_dec_start (GstVideoDecoder * decoder)
1762 {
1763   GstAmcVideoDec *self;
1764 
1765   self = GST_AMC_VIDEO_DEC (decoder);
1766   self->last_upstream_ts = 0;
1767   self->drained = TRUE;
1768   self->downstream_flow_ret = GST_FLOW_OK;
1769   self->started = FALSE;
1770   self->flushing = TRUE;
1771 
1772   return TRUE;
1773 }
1774 
1775 static gboolean
gst_amc_video_dec_stop(GstVideoDecoder * decoder)1776 gst_amc_video_dec_stop (GstVideoDecoder * decoder)
1777 {
1778   GstAmcVideoDec *self;
1779   GError *err = NULL;
1780 
1781   self = GST_AMC_VIDEO_DEC (decoder);
1782   GST_DEBUG_OBJECT (self, "Stopping decoder");
1783   self->flushing = TRUE;
1784   if (self->started) {
1785     gst_amc_codec_flush (self->codec, &err);
1786     if (err)
1787       GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1788     gst_amc_codec_stop (self->codec, &err);
1789     if (err)
1790       GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1791     self->started = FALSE;
1792   }
1793   gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (decoder));
1794 
1795   self->downstream_flow_ret = GST_FLOW_FLUSHING;
1796   self->drained = TRUE;
1797   g_mutex_lock (&self->drain_lock);
1798   self->draining = FALSE;
1799   g_cond_broadcast (&self->drain_cond);
1800   g_mutex_unlock (&self->drain_lock);
1801   g_free (self->codec_data);
1802   self->codec_data_size = 0;
1803   if (self->input_state)
1804     gst_video_codec_state_unref (self->input_state);
1805   self->input_state = NULL;
1806   GST_DEBUG_OBJECT (self, "Stopped decoder");
1807   return TRUE;
1808 }
1809 
1810 static jobject
gst_amc_video_dec_new_on_frame_available_listener(GstAmcVideoDec * decoder,JNIEnv * env,GError ** err)1811 gst_amc_video_dec_new_on_frame_available_listener (GstAmcVideoDec * decoder,
1812     JNIEnv * env, GError ** err)
1813 {
1814   jobject listener = NULL;
1815   jclass listener_cls = NULL;
1816   jmethodID constructor_id = 0;
1817 
1818   JNINativeMethod amcOnFrameAvailableListener = {
1819     "native_onFrameAvailable",
1820     "(JLandroid/graphics/SurfaceTexture;)V",
1821     (void *) gst_amc_video_dec_on_frame_available,
1822   };
1823 
1824   listener_cls =
1825       gst_amc_jni_get_application_class (env,
1826       "org/freedesktop/gstreamer/androidmedia/GstAmcOnFrameAvailableListener",
1827       err);
1828   if (!listener_cls) {
1829     return FALSE;
1830   }
1831 
1832   (*env)->RegisterNatives (env, listener_cls, &amcOnFrameAvailableListener, 1);
1833   if ((*env)->ExceptionCheck (env)) {
1834     (*env)->ExceptionClear (env);
1835     goto done;
1836   }
1837 
1838   constructor_id =
1839       gst_amc_jni_get_method_id (env, err, listener_cls, "<init>", "()V");
1840   if (!constructor_id) {
1841     goto done;
1842   }
1843 
1844   decoder->set_context_id =
1845       gst_amc_jni_get_method_id (env, err, listener_cls, "setContext", "(J)V");
1846   if (!decoder->set_context_id) {
1847     goto done;
1848   }
1849 
1850   listener =
1851       gst_amc_jni_new_object (env, err, TRUE, listener_cls, constructor_id);
1852   if (!listener) {
1853     goto done;
1854   }
1855 
1856   if (!gst_amc_jni_call_void_method (env, err, listener,
1857           decoder->set_context_id, GST_AMC_VIDEO_DEC_TO_JLONG (decoder))) {
1858     gst_amc_jni_object_unref (env, listener);
1859     listener = NULL;
1860   }
1861 
1862 done:
1863   gst_amc_jni_object_unref (env, listener_cls);
1864 
1865   return listener;
1866 }
1867 
1868 static gboolean
gst_amc_video_dec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)1869 gst_amc_video_dec_set_format (GstVideoDecoder * decoder,
1870     GstVideoCodecState * state)
1871 {
1872   GstAmcVideoDec *self;
1873   GstAmcVideoDecClass *klass;
1874   GstAmcFormat *format;
1875   const gchar *mime;
1876   gboolean is_format_change = FALSE;
1877   gboolean needs_disable = FALSE;
1878   gchar *format_string;
1879   guint8 *codec_data = NULL;
1880   gsize codec_data_size = 0;
1881   GError *err = NULL;
1882   jobject jsurface = NULL;
1883 
1884   self = GST_AMC_VIDEO_DEC (decoder);
1885   klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
1886 
1887   GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps);
1888 
1889   /* Check if the caps change is a real format change or if only irrelevant
1890    * parts of the caps have changed or nothing at all.
1891    */
1892   is_format_change |= self->color_format_info.width != state->info.width;
1893   is_format_change |= self->color_format_info.height != state->info.height;
1894   if (state->codec_data) {
1895     GstMapInfo cminfo;
1896 
1897     gst_buffer_map (state->codec_data, &cminfo, GST_MAP_READ);
1898     codec_data = g_memdup (cminfo.data, cminfo.size);
1899     codec_data_size = cminfo.size;
1900 
1901     is_format_change |= (!self->codec_data
1902         || self->codec_data_size != codec_data_size
1903         || memcmp (self->codec_data, codec_data, codec_data_size) != 0);
1904     gst_buffer_unmap (state->codec_data, &cminfo);
1905   } else if (self->codec_data) {
1906     is_format_change |= TRUE;
1907   }
1908 
1909   needs_disable = self->started;
1910 
1911   /* If the component is not started and a real format change happens
1912    * we have to restart the component. If no real format change
1913    * happened we can just exit here.
1914    */
1915   if (needs_disable && !is_format_change) {
1916     g_free (codec_data);
1917     codec_data = NULL;
1918     codec_data_size = 0;
1919 
1920     /* Framerate or something minor changed */
1921     self->input_state_changed = TRUE;
1922     if (self->input_state)
1923       gst_video_codec_state_unref (self->input_state);
1924     self->input_state = gst_video_codec_state_ref (state);
1925     GST_DEBUG_OBJECT (self,
1926         "Already running and caps did not change the format");
1927     return TRUE;
1928   }
1929 
1930   if (needs_disable && is_format_change) {
1931     gst_amc_video_dec_drain (self);
1932     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1933     gst_amc_video_dec_stop (GST_VIDEO_DECODER (self));
1934     GST_VIDEO_DECODER_STREAM_LOCK (self);
1935     gst_amc_video_dec_close (GST_VIDEO_DECODER (self));
1936     if (!gst_amc_video_dec_open (GST_VIDEO_DECODER (self))) {
1937       GST_ERROR_OBJECT (self, "Failed to open codec again");
1938       return FALSE;
1939     }
1940 
1941     if (!gst_amc_video_dec_start (GST_VIDEO_DECODER (self))) {
1942       GST_ERROR_OBJECT (self, "Failed to start codec again");
1943     }
1944   }
1945   /* srcpad task is not running at this point */
1946   if (self->input_state)
1947     gst_video_codec_state_unref (self->input_state);
1948   self->input_state = NULL;
1949 
1950   g_free (self->codec_data);
1951   self->codec_data = codec_data;
1952   self->codec_data_size = codec_data_size;
1953 
1954   mime = caps_to_mime (state->caps);
1955   if (!mime) {
1956     GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
1957     return FALSE;
1958   }
1959 
1960   format =
1961       gst_amc_format_new_video (mime, state->info.width, state->info.height,
1962       &err);
1963   if (!format) {
1964     GST_ERROR_OBJECT (self, "Failed to create video format");
1965     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1966     return FALSE;
1967   }
1968 
1969   /* FIXME: This buffer needs to be valid until the codec is stopped again */
1970   if (self->codec_data) {
1971     gst_amc_format_set_buffer (format, "csd-0", self->codec_data,
1972         self->codec_data_size, &err);
1973     if (err)
1974       GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1975   }
1976 
1977   {
1978     gboolean downstream_supports_gl = FALSE;
1979     GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
1980     GstPad *src_pad = GST_VIDEO_DECODER_SRC_PAD (decoder);
1981     GstCaps *templ_caps = gst_pad_get_pad_template_caps (src_pad);
1982     GstCaps *downstream_caps = gst_pad_peer_query_caps (src_pad, templ_caps);
1983 
1984     gst_caps_unref (templ_caps);
1985 
1986     if (downstream_caps) {
1987       guint i, n;
1988       GstStaticCaps static_caps =
1989           GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
1990           (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, "RGBA"));
1991       GstCaps *gl_memory_caps = gst_static_caps_get (&static_caps);
1992 
1993       GST_DEBUG_OBJECT (self, "Available downstream caps: %" GST_PTR_FORMAT,
1994           downstream_caps);
1995 
1996       /* Check if downstream caps supports
1997        * video/x-raw(memory:GLMemory),format=RGBA */
1998       n = gst_caps_get_size (downstream_caps);
1999       for (i = 0; i < n; i++) {
2000         GstCaps *caps = NULL;
2001         GstStructure *structure = gst_caps_get_structure (downstream_caps, i);
2002         GstCapsFeatures *features = gst_caps_get_features (downstream_caps, i);
2003 
2004         caps = gst_caps_new_full (gst_structure_copy (structure), NULL);
2005         if (!caps)
2006           continue;
2007 
2008         gst_caps_set_features (caps, 0, gst_caps_features_copy (features));
2009 
2010         if (gst_caps_can_intersect (caps, gl_memory_caps)) {
2011           downstream_supports_gl = TRUE;
2012         }
2013 
2014         gst_caps_unref (caps);
2015         if (downstream_supports_gl)
2016           break;
2017       }
2018 
2019       gst_caps_unref (gl_memory_caps);
2020 
2021       /* If video/x-raw(memory:GLMemory),format=RGBA is supported,
2022        * update the video decoder output state accordingly and negotiate */
2023       if (downstream_supports_gl) {
2024         GstVideoCodecState *output_state = NULL;
2025         GstVideoCodecState *prev_output_state = NULL;
2026 
2027         prev_output_state = gst_video_decoder_get_output_state (decoder);
2028 
2029         output_state =
2030             gst_video_decoder_set_output_state (decoder, GST_VIDEO_FORMAT_RGBA,
2031             state->info.width, state->info.height, state);
2032 
2033         if (output_state->caps) {
2034           gst_caps_unref (output_state->caps);
2035         }
2036 
2037         output_state->caps = gst_video_info_to_caps (&output_state->info);
2038         gst_caps_set_features (output_state->caps, 0,
2039             gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
2040 
2041         /* gst_amc_video_dec_decide_allocation will update
2042          * self->downstream_supports_gl */
2043         if (!gst_video_decoder_negotiate (decoder)) {
2044           GST_ERROR_OBJECT (self, "Failed to negotiate");
2045 
2046           /* Rollback output state changes */
2047           if (prev_output_state) {
2048             output_state->info = prev_output_state->info;
2049             gst_caps_replace (&output_state->caps, prev_output_state->caps);
2050           } else {
2051             gst_video_info_init (&output_state->info);
2052             gst_caps_replace (&output_state->caps, NULL);
2053           }
2054         }
2055         if (prev_output_state) {
2056           gst_video_codec_state_unref (prev_output_state);
2057         }
2058       }
2059 
2060       gst_caps_unref (downstream_caps);
2061     }
2062   }
2063 
2064   GST_INFO_OBJECT (self, "GL output: %s",
2065       self->downstream_supports_gl ? "enabled" : "disabled");
2066 
2067   if (klass->codec_info->gl_output_only && !self->downstream_supports_gl) {
2068     GST_ERROR_OBJECT (self,
2069         "Codec only supports GL output but downstream does not");
2070     return FALSE;
2071   }
2072 
2073   if (self->downstream_supports_gl && self->surface) {
2074     jsurface = self->surface->jobject;
2075   } else if (self->downstream_supports_gl && !self->surface) {
2076     int ret = TRUE;
2077     JNIEnv *env = NULL;
2078     GstAmcSurfaceTexture *surface_texture = NULL;
2079 
2080     env = gst_amc_jni_get_env ();
2081     surface_texture = gst_amc_surface_texture_new (&err);
2082     if (!surface_texture) {
2083       GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2084       return FALSE;
2085     }
2086 
2087     if (self->listener) {
2088       if (!gst_amc_jni_call_void_method (env, &err, self->listener,
2089               self->set_context_id, GST_AMC_VIDEO_DEC_TO_JLONG (NULL))) {
2090         ret = FALSE;
2091         goto done;
2092       }
2093 
2094       gst_amc_jni_object_unref (env, self->listener);
2095     }
2096     self->listener =
2097         gst_amc_video_dec_new_on_frame_available_listener (self, env, &err);
2098     if (!self->listener) {
2099       ret = FALSE;
2100       goto done;
2101     }
2102 
2103     if (!gst_amc_surface_texture_set_on_frame_available_listener
2104         (surface_texture, self->listener, &err)) {
2105       ret = FALSE;
2106       goto done;
2107     }
2108 
2109     self->surface = gst_amc_surface_new (surface_texture, &err);
2110     jsurface = self->surface->jobject;
2111 
2112   done:
2113     g_object_unref (surface_texture);
2114     if (!ret) {
2115       GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2116       return FALSE;
2117     }
2118   }
2119 
2120   format_string = gst_amc_format_to_string (format, &err);
2121   if (err)
2122     GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2123   GST_DEBUG_OBJECT (self, "Configuring codec with format: %s",
2124       GST_STR_NULL (format_string));
2125   g_free (format_string);
2126 
2127   if (!gst_amc_codec_configure (self->codec, format, jsurface, 0, &err)) {
2128     GST_ERROR_OBJECT (self, "Failed to configure codec");
2129     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2130     return FALSE;
2131   }
2132   if (jsurface) {
2133     self->codec_config = AMC_CODEC_CONFIG_WITH_SURFACE;
2134   } else {
2135     self->codec_config = AMC_CODEC_CONFIG_WITHOUT_SURFACE;
2136   }
2137 
2138   gst_amc_format_free (format);
2139 
2140   if (!gst_amc_codec_start (self->codec, &err)) {
2141     GST_ERROR_OBJECT (self, "Failed to start codec");
2142     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2143     return FALSE;
2144   }
2145 
2146   self->started = TRUE;
2147   self->input_state = gst_video_codec_state_ref (state);
2148   self->input_state_changed = TRUE;
2149 
2150   /* Start the srcpad loop again */
2151   self->flushing = FALSE;
2152   self->downstream_flow_ret = GST_FLOW_OK;
2153   gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
2154       (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL);
2155 
2156   return TRUE;
2157 }
2158 
2159 static gboolean
gst_amc_video_dec_flush(GstVideoDecoder * decoder)2160 gst_amc_video_dec_flush (GstVideoDecoder * decoder)
2161 {
2162   GstAmcVideoDec *self;
2163   GError *err = NULL;
2164 
2165   self = GST_AMC_VIDEO_DEC (decoder);
2166 
2167   GST_DEBUG_OBJECT (self, "Flushing decoder");
2168 
2169   if (!self->started) {
2170     GST_DEBUG_OBJECT (self, "Codec not started yet");
2171     return TRUE;
2172   }
2173 
2174   self->flushing = TRUE;
2175   /* Wait until the srcpad loop is finished,
2176    * unlock GST_VIDEO_DECODER_STREAM_LOCK to prevent deadlocks
2177    * caused by using this lock from inside the loop function */
2178   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2179   GST_PAD_STREAM_LOCK (GST_VIDEO_DECODER_SRC_PAD (self));
2180   GST_PAD_STREAM_UNLOCK (GST_VIDEO_DECODER_SRC_PAD (self));
2181   GST_VIDEO_DECODER_STREAM_LOCK (self);
2182   gst_amc_codec_flush (self->codec, &err);
2183   if (err)
2184     GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2185   self->flushing = FALSE;
2186 
2187   /* Start the srcpad loop again */
2188   self->last_upstream_ts = 0;
2189   self->drained = TRUE;
2190   self->downstream_flow_ret = GST_FLOW_OK;
2191   gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
2192       (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL);
2193 
2194   GST_DEBUG_OBJECT (self, "Flushed decoder");
2195 
2196   return TRUE;
2197 }
2198 
2199 static GstFlowReturn
gst_amc_video_dec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)2200 gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder,
2201     GstVideoCodecFrame * frame)
2202 {
2203   GstAmcVideoDec *self;
2204   gint idx;
2205   GstAmcBuffer *buf;
2206   GstAmcBufferInfo buffer_info;
2207   guint offset = 0;
2208   GstClockTime timestamp, duration, timestamp_offset = 0;
2209   GstMapInfo minfo;
2210   GError *err = NULL;
2211 
2212   memset (&minfo, 0, sizeof (minfo));
2213 
2214   self = GST_AMC_VIDEO_DEC (decoder);
2215 
2216   GST_DEBUG_OBJECT (self, "Handling frame");
2217 
2218   if (!self->started) {
2219     GST_ERROR_OBJECT (self, "Codec not started yet");
2220     gst_video_codec_frame_unref (frame);
2221     return GST_FLOW_NOT_NEGOTIATED;
2222   }
2223 
2224   if (self->flushing)
2225     goto flushing;
2226 
2227   if (self->downstream_flow_ret != GST_FLOW_OK)
2228     goto downstream_error;
2229 
2230   timestamp = frame->pts;
2231   duration = frame->duration;
2232 
2233   gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ);
2234 
2235   while (offset < minfo.size) {
2236     /* Make sure to release the base class stream lock, otherwise
2237      * _loop() can't call _finish_frame() and we might block forever
2238      * because no input buffers are released */
2239     GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2240     /* Wait at most 100ms here, some codecs don't fail dequeueing if
2241      * the codec is flushing, causing deadlocks during shutdown */
2242     idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err);
2243     GST_VIDEO_DECODER_STREAM_LOCK (self);
2244 
2245     if (idx < 0) {
2246       if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) {
2247         g_clear_error (&err);
2248         goto flushing;
2249       }
2250 
2251       switch (idx) {
2252         case INFO_TRY_AGAIN_LATER:
2253           GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
2254           continue;             /* next try */
2255           break;
2256         case G_MININT:
2257           GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
2258           goto dequeue_error;
2259         default:
2260           g_assert_not_reached ();
2261           break;
2262       }
2263 
2264       continue;
2265     }
2266 
2267     if (self->flushing) {
2268       memset (&buffer_info, 0, sizeof (buffer_info));
2269       gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL);
2270       goto flushing;
2271     }
2272 
2273     if (self->downstream_flow_ret != GST_FLOW_OK) {
2274       memset (&buffer_info, 0, sizeof (buffer_info));
2275       gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err);
2276       if (err && !self->flushing)
2277         GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2278       g_clear_error (&err);
2279       goto downstream_error;
2280     }
2281 
2282     /* Now handle the frame */
2283 
2284     /* Copy the buffer content in chunks of size as requested
2285      * by the port */
2286     buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
2287     if (err)
2288       goto failed_to_get_input_buffer;
2289     else if (!buf)
2290       goto got_null_input_buffer;
2291 
2292     memset (&buffer_info, 0, sizeof (buffer_info));
2293     buffer_info.offset = 0;
2294     buffer_info.size = MIN (minfo.size - offset, buf->size);
2295     gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset,
2296         buffer_info.size);
2297 
2298     orc_memcpy (buf->data, minfo.data + offset, buffer_info.size);
2299 
2300     gst_amc_buffer_free (buf);
2301     buf = NULL;
2302 
2303     /* Interpolate timestamps if we're passing the buffer
2304      * in multiple chunks */
2305     if (offset != 0 && duration != GST_CLOCK_TIME_NONE) {
2306       timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size);
2307     }
2308 
2309     if (timestamp != GST_CLOCK_TIME_NONE) {
2310       buffer_info.presentation_time_us =
2311           gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND);
2312       self->last_upstream_ts = timestamp + timestamp_offset;
2313     }
2314     if (duration != GST_CLOCK_TIME_NONE)
2315       self->last_upstream_ts += duration;
2316 
2317     if (offset == 0) {
2318       BufferIdentification *id =
2319           buffer_identification_new (timestamp + timestamp_offset);
2320       if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame))
2321         buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME;
2322       gst_video_codec_frame_set_user_data (frame, id,
2323           (GDestroyNotify) buffer_identification_free);
2324     }
2325 
2326     offset += buffer_info.size;
2327     GST_DEBUG_OBJECT (self,
2328         "Queueing buffer %d: size %d time %" G_GINT64_FORMAT
2329         " flags 0x%08x", idx, buffer_info.size,
2330         buffer_info.presentation_time_us, buffer_info.flags);
2331     if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info,
2332             &err)) {
2333       if (self->flushing) {
2334         g_clear_error (&err);
2335         goto flushing;
2336       }
2337       goto queue_error;
2338     }
2339     self->drained = FALSE;
2340   }
2341 
2342   gst_buffer_unmap (frame->input_buffer, &minfo);
2343   gst_video_codec_frame_unref (frame);
2344 
2345   return self->downstream_flow_ret;
2346 
2347 downstream_error:
2348   {
2349     GST_ERROR_OBJECT (self, "Downstream returned %s",
2350         gst_flow_get_name (self->downstream_flow_ret));
2351     if (minfo.data)
2352       gst_buffer_unmap (frame->input_buffer, &minfo);
2353     gst_video_codec_frame_unref (frame);
2354     return self->downstream_flow_ret;
2355   }
2356 failed_to_get_input_buffer:
2357   {
2358     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2359     if (minfo.data)
2360       gst_buffer_unmap (frame->input_buffer, &minfo);
2361     gst_video_codec_frame_unref (frame);
2362     return GST_FLOW_ERROR;
2363   }
2364 got_null_input_buffer:
2365   {
2366     GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
2367         ("Got no input buffer"));
2368     if (minfo.data)
2369       gst_buffer_unmap (frame->input_buffer, &minfo);
2370     gst_video_codec_frame_unref (frame);
2371     return GST_FLOW_ERROR;
2372   }
2373 dequeue_error:
2374   {
2375     GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2376     if (minfo.data)
2377       gst_buffer_unmap (frame->input_buffer, &minfo);
2378     gst_video_codec_frame_unref (frame);
2379     return GST_FLOW_ERROR;
2380   }
2381 queue_error:
2382   {
2383     GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
2384     if (minfo.data)
2385       gst_buffer_unmap (frame->input_buffer, &minfo);
2386     gst_video_codec_frame_unref (frame);
2387     return GST_FLOW_ERROR;
2388   }
2389 flushing:
2390   {
2391     GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING");
2392     if (minfo.data)
2393       gst_buffer_unmap (frame->input_buffer, &minfo);
2394     gst_video_codec_frame_unref (frame);
2395     return GST_FLOW_FLUSHING;
2396   }
2397 }
2398 
2399 static GstFlowReturn
gst_amc_video_dec_finish(GstVideoDecoder * decoder)2400 gst_amc_video_dec_finish (GstVideoDecoder * decoder)
2401 {
2402   GstAmcVideoDec *self;
2403 
2404   self = GST_AMC_VIDEO_DEC (decoder);
2405 
2406   return gst_amc_video_dec_drain (self);
2407 }
2408 
2409 static GstFlowReturn
gst_amc_video_dec_drain(GstAmcVideoDec * self)2410 gst_amc_video_dec_drain (GstAmcVideoDec * self)
2411 {
2412   GstFlowReturn ret;
2413   gint idx;
2414   GError *err = NULL;
2415 
2416   GST_DEBUG_OBJECT (self, "Draining codec");
2417   if (!self->started) {
2418     GST_DEBUG_OBJECT (self, "Codec not started yet");
2419     return GST_FLOW_OK;
2420   }
2421 
2422   /* Don't send drain buffer twice, this doesn't work */
2423   if (self->drained) {
2424     GST_DEBUG_OBJECT (self, "Codec is drained already");
2425     return GST_FLOW_OK;
2426   }
2427 
2428   /* Make sure to release the base class stream lock, otherwise
2429    * _loop() can't call _finish_frame() and we might block forever
2430    * because no input buffers are released */
2431   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2432   /* Send an EOS buffer to the component and let the base
2433    * class drop the EOS event. We will send it later when
2434    * the EOS buffer arrives on the output port.
2435    * Wait at most 0.5s here. */
2436   idx = gst_amc_codec_dequeue_input_buffer (self->codec, 500000, &err);
2437   GST_VIDEO_DECODER_STREAM_LOCK (self);
2438 
2439   if (idx >= 0) {
2440     GstAmcBuffer *buf;
2441     GstAmcBufferInfo buffer_info;
2442 
2443     buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
2444     if (buf) {
2445       GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2446       g_mutex_lock (&self->drain_lock);
2447       self->draining = TRUE;
2448 
2449       memset (&buffer_info, 0, sizeof (buffer_info));
2450       buffer_info.size = 0;
2451       buffer_info.presentation_time_us =
2452           gst_util_uint64_scale (self->last_upstream_ts, 1, GST_USECOND);
2453       buffer_info.flags |= BUFFER_FLAG_END_OF_STREAM;
2454 
2455       gst_amc_buffer_set_position_and_limit (buf, NULL, 0, 0);
2456       gst_amc_buffer_free (buf);
2457       buf = NULL;
2458 
2459       if (gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info,
2460               &err)) {
2461         GST_DEBUG_OBJECT (self, "Waiting until codec is drained");
2462         g_cond_wait (&self->drain_cond, &self->drain_lock);
2463         GST_DEBUG_OBJECT (self, "Drained codec");
2464         ret = GST_FLOW_OK;
2465       } else {
2466         GST_ERROR_OBJECT (self, "Failed to queue input buffer");
2467         if (self->flushing) {
2468           g_clear_error (&err);
2469           ret = GST_FLOW_FLUSHING;
2470         } else {
2471           GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2472           ret = GST_FLOW_ERROR;
2473         }
2474       }
2475 
2476       self->drained = TRUE;
2477       self->draining = FALSE;
2478       g_mutex_unlock (&self->drain_lock);
2479       GST_VIDEO_DECODER_STREAM_LOCK (self);
2480     } else {
2481       GST_ERROR_OBJECT (self, "Failed to get buffer for EOS: %d", idx);
2482       if (err)
2483         GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2484       ret = GST_FLOW_ERROR;
2485     }
2486   } else {
2487     GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", idx);
2488     if (err)
2489       GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2490     ret = GST_FLOW_ERROR;
2491   }
2492 
2493   return ret;
2494 }
2495 
2496 static gboolean
gst_amc_video_dec_src_query(GstVideoDecoder * bdec,GstQuery * query)2497 gst_amc_video_dec_src_query (GstVideoDecoder * bdec, GstQuery * query)
2498 {
2499   GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (bdec);
2500 
2501   switch (GST_QUERY_TYPE (query)) {
2502     case GST_QUERY_CONTEXT:
2503     {
2504       if (gst_gl_handle_context_query ((GstElement *) self, query,
2505               self->gl_display, self->gl_context, self->other_gl_context))
2506         return TRUE;
2507       break;
2508     }
2509     default:
2510       break;
2511   }
2512 
2513   return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (bdec, query);
2514 }
2515 
2516 static gboolean
_caps_are_rgba_with_gl_memory(GstCaps * caps)2517 _caps_are_rgba_with_gl_memory (GstCaps * caps)
2518 {
2519   GstVideoInfo info;
2520   GstCapsFeatures *features;
2521 
2522   if (!caps)
2523     return FALSE;
2524 
2525   if (!gst_video_info_from_caps (&info, caps))
2526     return FALSE;
2527 
2528   if (info.finfo->format != GST_VIDEO_FORMAT_RGBA)
2529     return FALSE;
2530 
2531   if (!(features = gst_caps_get_features (caps, 0)))
2532     return FALSE;
2533 
2534   return gst_caps_features_contains (features,
2535       GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
2536 }
2537 
2538 static gboolean
_find_local_gl_context(GstAmcVideoDec * self)2539 _find_local_gl_context (GstAmcVideoDec * self)
2540 {
2541   if (gst_gl_query_local_gl_context (GST_ELEMENT (self), GST_PAD_SRC,
2542           &self->gl_context))
2543     return TRUE;
2544   return FALSE;
2545 }
2546 
2547 static gboolean
gst_amc_video_dec_decide_allocation(GstVideoDecoder * bdec,GstQuery * query)2548 gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
2549 {
2550   GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (bdec);
2551   gboolean need_pool = FALSE;
2552   GstCaps *caps = NULL;
2553 //  GError *error = NULL;
2554 
2555   if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
2556     return FALSE;
2557 
2558   self->downstream_supports_gl = FALSE;
2559   gst_query_parse_allocation (query, &caps, &need_pool);
2560   if (_caps_are_rgba_with_gl_memory (caps)) {
2561 
2562     if (!gst_gl_ensure_element_data (self, &self->gl_display,
2563             &self->other_gl_context))
2564       return FALSE;
2565 
2566     if (!_find_local_gl_context (self))
2567       goto out;
2568 #if 0
2569     if (!self->gl_context) {
2570       GST_OBJECT_LOCK (self->gl_display);
2571       do {
2572         if (self->gl_context) {
2573           gst_object_unref (self->gl_context);
2574           self->gl_context = NULL;
2575         }
2576         /* just get a GL context.  we don't care */
2577         self->gl_context =
2578             gst_gl_display_get_gl_context_for_thread (self->gl_display, NULL);
2579         if (!self->gl_context) {
2580           if (!gst_gl_display_create_context (self->gl_display,
2581                   self->other_gl_context, &self->gl_context, &error)) {
2582             GST_OBJECT_UNLOCK (mix->display);
2583             goto context_error;
2584           }
2585         }
2586       } while (!gst_gl_display_add_context (self->gl_display,
2587               self->gl_context));
2588       GST_OBJECT_UNLOCK (self->gl_display);
2589     }
2590 #endif
2591 
2592     self->downstream_supports_gl = TRUE;
2593   }
2594 
2595 out:
2596   return gst_amc_video_dec_check_codec_config (self);
2597 #if 0
2598 context_error:
2599   {
2600     GST_ELEMENT_ERROR (self, RESOURCE, NOT_FOUND, ("%s", error->message),
2601         (NULL));
2602     g_clear_error (&error);
2603     return FALSE;
2604   }
2605 #endif
2606 }
2607 
2608 static void
gst_amc_video_dec_on_frame_available(JNIEnv * env,jobject thiz,long long context,jobject surfaceTexture)2609 gst_amc_video_dec_on_frame_available (JNIEnv * env, jobject thiz,
2610     long long context, jobject surfaceTexture)
2611 {
2612   GstAmcVideoDec *self = JLONG_TO_GST_AMC_VIDEO_DEC (context);
2613 
2614   /* apparently we can be called after the decoder has been closed */
2615   if (!self)
2616     return;
2617 
2618   g_mutex_lock (&self->gl_lock);
2619   self->gl_ready_frame_count++;
2620   GST_LOG_OBJECT (self, "frame %u available", self->gl_ready_frame_count);
2621   g_cond_broadcast (&self->gl_cond);
2622   g_mutex_unlock (&self->gl_lock);
2623 }
2624