1 /* GStreamer
2  *
3  * uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG
4  *
5  * Copyright (C) 2012 Cisco Systems, Inc.
6  *   Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
7  *
8  * This library is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * This library is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with this library; if not, write to the
20  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
21  * Boston, MA 02110-1301, USA.
22  */
23 
24 /**
25  * SECTION:element-uvch264mjpgdemux
26  * @title: uvch264mjpgdemux
27  * @short_description: UVC H264 compliant MJPG demuxer
28  *
29  * Parses a MJPG stream from a UVC H264 compliant encoding camera and extracts
30  * each muxed stream into separate pads.
31  *
32  */
33 
34 #ifdef HAVE_CONFIG_H
35 #include <config.h>
36 #endif
37 
38 #include <string.h>
39 #include <linux/uvcvideo.h>
40 #include <linux/usb/video.h>
41 #include <sys/ioctl.h>
42 
43 #ifndef UVCIOC_GET_LAST_SCR
44 #include <time.h>
45 
46 struct uvc_last_scr_sample
47 {
48   __u32 dev_frequency;
49   __u32 dev_stc;
50   __u16 dev_sof;
51   struct timespec host_ts;
52   __u16 host_sof;
53 };
54 
55 #define UVCIOC_GET_LAST_SCR	_IOR('u', 0x23, struct uvc_last_scr_sample)
56 #endif
57 
58 #include "gstuvch264_mjpgdemux.h"
59 
60 enum
61 {
62   PROP_0,
63   PROP_DEVICE_FD,
64   PROP_NUM_CLOCK_SAMPLES
65 };
66 
67 #define DEFAULT_NUM_CLOCK_SAMPLES 32
68 
69 static GstStaticPadTemplate mjpgsink_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("sink",
71     GST_PAD_SINK,
72     GST_PAD_ALWAYS,
73     GST_STATIC_CAPS ("image/jpeg, "
74         "width = (int) [ 0, MAX ],"
75         "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
76     );
77 
78 static GstStaticPadTemplate jpegsrc_pad_template =
79 GST_STATIC_PAD_TEMPLATE ("jpeg",
80     GST_PAD_SRC,
81     GST_PAD_ALWAYS,
82     GST_STATIC_CAPS ("image/jpeg, "
83         "width = (int) [ 0, MAX ],"
84         "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
85     );
86 
87 static GstStaticPadTemplate h264src_pad_template =
88 GST_STATIC_PAD_TEMPLATE ("h264",
89     GST_PAD_SRC,
90     GST_PAD_ALWAYS,
91     GST_STATIC_CAPS ("video/x-h264, "
92         "width = (int) [ 0, MAX ], "
93         "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
94     );
95 
96 static GstStaticPadTemplate yuy2src_pad_template =
97 GST_STATIC_PAD_TEMPLATE ("yuy2",
98     GST_PAD_SRC,
99     GST_PAD_ALWAYS,
100     GST_STATIC_CAPS ("video/x-raw, "
101         "format = (string) YUY2, "
102         "width = (int) [ 0, MAX ], "
103         "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
104     );
105 static GstStaticPadTemplate nv12src_pad_template =
106 GST_STATIC_PAD_TEMPLATE ("nv12",
107     GST_PAD_SRC,
108     GST_PAD_ALWAYS,
109     GST_STATIC_CAPS ("video/x-raw, "
110         "format = (string) NV12, "
111         "width = (int) [ 0, MAX ], "
112         "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
113     );
114 
115 
116 GST_DEBUG_CATEGORY_STATIC (uvc_h264_mjpg_demux_debug);
117 #define GST_CAT_DEFAULT uvc_h264_mjpg_demux_debug
118 
119 static void gst_uvc_h264_mjpg_demux_set_property (GObject * object,
120     guint prop_id, const GValue * value, GParamSpec * pspec);
121 static void gst_uvc_h264_mjpg_demux_get_property (GObject * object,
122     guint prop_id, GValue * value, GParamSpec * pspec);
123 static void gst_uvc_h264_mjpg_demux_dispose (GObject * object);
124 static GstFlowReturn gst_uvc_h264_mjpg_demux_chain (GstPad * pad,
125     GstObject * parent, GstBuffer * buffer);
126 static gboolean gst_uvc_h264_mjpg_demux_sink_event (GstPad * pad,
127     GstObject * parent, GstEvent * event);
128 static gboolean gst_uvc_h264_mjpg_demux_query (GstPad * pad,
129     GstObject * parent, GstQuery * query);
130 
131 #define gst_uvc_h264_mjpg_demux_parent_class parent_class
132 G_DEFINE_TYPE (GstUvcH264MjpgDemux, gst_uvc_h264_mjpg_demux, GST_TYPE_ELEMENT);
133 
134 static void
gst_uvc_h264_mjpg_demux_class_init(GstUvcH264MjpgDemuxClass * klass)135 gst_uvc_h264_mjpg_demux_class_init (GstUvcH264MjpgDemuxClass * klass)
136 {
137   GObjectClass *gobject_class = (GObjectClass *) klass;
138   GstElementClass *element_class = (GstElementClass *) klass;
139 
140   parent_class = g_type_class_peek_parent (klass);
141 
142   gobject_class->set_property = gst_uvc_h264_mjpg_demux_set_property;
143   gobject_class->get_property = gst_uvc_h264_mjpg_demux_get_property;
144   gobject_class->dispose = gst_uvc_h264_mjpg_demux_dispose;
145 
146   gst_element_class_add_static_pad_template (element_class,
147       &mjpgsink_pad_template);
148   gst_element_class_add_static_pad_template (element_class,
149       &jpegsrc_pad_template);
150   gst_element_class_add_static_pad_template (element_class,
151       &h264src_pad_template);
152   gst_element_class_add_static_pad_template (element_class,
153       &yuy2src_pad_template);
154   gst_element_class_add_static_pad_template (element_class,
155       &nv12src_pad_template);
156 
157   gst_element_class_set_static_metadata (element_class,
158       "UVC H264 MJPG Demuxer",
159       "Video/Demuxer",
160       "Demux UVC H264 auxiliary streams from MJPG images",
161       "Youness Alaoui <youness.alaoui@collabora.co.uk>");
162 
163   g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
164       g_param_spec_int ("device-fd", "device-fd",
165           "File descriptor of the v4l2 device",
166           -1, G_MAXINT, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
167 
168   g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES,
169       g_param_spec_int ("num-clock-samples", "num-clock-samples",
170           "Number of clock samples to gather for the PTS synchronization"
171           " (-1 = unlimited)",
172           0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES,
173           G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
174 
175   GST_DEBUG_CATEGORY_INIT (uvc_h264_mjpg_demux_debug,
176       "uvch264mjpgdemux", 0, "UVC H264 MJPG Demuxer");
177 }
178 
179 static void
gst_uvc_h264_mjpg_demux_init(GstUvcH264MjpgDemux * self)180 gst_uvc_h264_mjpg_demux_init (GstUvcH264MjpgDemux * self)
181 {
182   self->last_pts = GST_CLOCK_TIME_NONE;
183   self->pts_reordered_warning = FALSE;
184   self->device_fd = -1;
185 
186   /* create the sink and src pads */
187   self->sink_pad =
188       gst_pad_new_from_static_template (&mjpgsink_pad_template, "sink");
189   gst_pad_set_chain_function (self->sink_pad,
190       GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_chain));
191   gst_pad_set_event_function (self->sink_pad,
192       GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_sink_event));
193   gst_pad_set_query_function (self->sink_pad,
194       GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_query));
195   gst_element_add_pad (GST_ELEMENT (self), self->sink_pad);
196 
197   /* JPEG */
198   self->jpeg_pad =
199       gst_pad_new_from_static_template (&jpegsrc_pad_template, "jpeg");
200   gst_pad_set_query_function (self->jpeg_pad,
201       GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_query));
202   gst_element_add_pad (GST_ELEMENT (self), self->jpeg_pad);
203 
204   /* H264 */
205   self->h264_pad =
206       gst_pad_new_from_static_template (&h264src_pad_template, "h264");
207   gst_pad_use_fixed_caps (self->h264_pad);
208   gst_element_add_pad (GST_ELEMENT (self), self->h264_pad);
209 
210   /* YUY2 */
211   self->yuy2_pad =
212       gst_pad_new_from_static_template (&yuy2src_pad_template, "yuy2");
213   gst_pad_use_fixed_caps (self->yuy2_pad);
214   gst_element_add_pad (GST_ELEMENT (self), self->yuy2_pad);
215 
216   /* NV12 */
217   self->nv12_pad =
218       gst_pad_new_from_static_template (&nv12src_pad_template, "nv12");
219   gst_pad_use_fixed_caps (self->nv12_pad);
220   gst_element_add_pad (GST_ELEMENT (self), self->nv12_pad);
221 
222   self->h264_caps = gst_caps_new_empty_simple ("video/x-h264");
223   self->yuy2_caps = gst_caps_new_simple ("video/x-raw",
224       "format", G_TYPE_STRING, "YUY2", NULL);
225   self->nv12_caps = gst_caps_new_simple ("video/x-raw",
226       "format", G_TYPE_STRING, "NV12", NULL);
227   self->h264_width = self->h264_height = 0;
228   self->yuy2_width = self->yuy2_height = 0;
229   self->nv12_width = self->nv12_height = 0;
230 }
231 
232 static void
gst_uvc_h264_mjpg_demux_dispose(GObject * object)233 gst_uvc_h264_mjpg_demux_dispose (GObject * object)
234 {
235   GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
236 
237   if (self->h264_caps)
238     gst_caps_unref (self->h264_caps);
239   self->h264_caps = NULL;
240   if (self->yuy2_caps)
241     gst_caps_unref (self->yuy2_caps);
242   self->yuy2_caps = NULL;
243   if (self->nv12_caps)
244     gst_caps_unref (self->nv12_caps);
245   self->nv12_caps = NULL;
246   g_free (self->clock_samples);
247   self->clock_samples = NULL;
248 
249   G_OBJECT_CLASS (parent_class)->dispose (object);
250 }
251 
252 static void
gst_uvc_h264_mjpg_demux_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)253 gst_uvc_h264_mjpg_demux_set_property (GObject * object,
254     guint prop_id, const GValue * value, GParamSpec * pspec)
255 {
256   GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
257 
258   switch (prop_id) {
259     case PROP_DEVICE_FD:
260       self->device_fd = g_value_get_int (value);
261       break;
262     case PROP_NUM_CLOCK_SAMPLES:
263       self->num_clock_samples = g_value_get_int (value);
264       if (self->clock_samples) {
265         if (self->num_clock_samples) {
266           self->clock_samples = g_realloc_n (self->clock_samples,
267               self->num_clock_samples, sizeof (GstUvcH264ClockSample));
268           if (self->num_samples > self->num_clock_samples) {
269             self->num_samples = self->num_clock_samples;
270             if (self->last_sample >= self->num_samples)
271               self->last_sample = self->num_samples - 1;
272           }
273         } else {
274           g_free (self->clock_samples);
275           self->clock_samples = NULL;
276           self->last_sample = -1;
277           self->num_samples = 0;
278         }
279       }
280       if (self->num_clock_samples > 0) {
281         self->clock_samples = g_malloc0_n (self->num_clock_samples,
282             sizeof (GstUvcH264ClockSample));
283         self->last_sample = -1;
284         self->num_samples = 0;
285       }
286       break;
287     default:
288       G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
289       break;
290   }
291 }
292 
293 static void
gst_uvc_h264_mjpg_demux_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)294 gst_uvc_h264_mjpg_demux_get_property (GObject * object,
295     guint prop_id, GValue * value, GParamSpec * pspec)
296 {
297   GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
298 
299   switch (prop_id) {
300     case PROP_DEVICE_FD:
301       g_value_set_int (value, self->device_fd);
302       break;
303     case PROP_NUM_CLOCK_SAMPLES:
304       g_value_set_int (value, self->num_clock_samples);
305       break;
306     default:
307       G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
308       break;
309   }
310 }
311 
312 static gboolean
gst_uvc_h264_mjpg_demux_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)313 gst_uvc_h264_mjpg_demux_sink_event (GstPad * pad, GstObject * parent,
314     GstEvent * event)
315 {
316   GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (parent);
317   gboolean res;
318 
319   switch (GST_EVENT_TYPE (event)) {
320     case GST_EVENT_SEGMENT:
321       gst_event_copy_segment (event, &self->segment);
322       self->last_pts = GST_CLOCK_TIME_NONE;
323       res = gst_pad_push_event (self->jpeg_pad, event);
324       break;
325     case GST_EVENT_CAPS:
326       res = gst_pad_push_event (self->jpeg_pad, event);
327       break;
328     default:
329       res = gst_pad_event_default (pad, parent, event);
330       break;
331   }
332   return res;
333 }
334 
335 static gboolean
gst_uvc_h264_mjpg_demux_query(GstPad * pad,GstObject * parent,GstQuery * query)336 gst_uvc_h264_mjpg_demux_query (GstPad * pad, GstObject * parent,
337     GstQuery * query)
338 {
339   GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (parent);
340   gboolean ret = FALSE;
341 
342   switch (GST_QUERY_TYPE (query)) {
343     case GST_QUERY_CAPS:
344       if (pad == self->sink_pad)
345         ret = gst_pad_peer_query (self->jpeg_pad, query);
346       else
347         ret = gst_pad_peer_query (self->sink_pad, query);
348       break;
349     default:
350       ret = gst_pad_query_default (pad, parent, query);
351   }
352 
353   return ret;
354 }
355 
356 static gboolean
_pts_to_timestamp(GstUvcH264MjpgDemux * self,GstBuffer * buf,guint32 pts)357 _pts_to_timestamp (GstUvcH264MjpgDemux * self, GstBuffer * buf, guint32 pts)
358 {
359   GstUvcH264ClockSample *current_sample = NULL;
360   GstUvcH264ClockSample *oldest_sample = NULL;
361   guint32 next_sample;
362   struct uvc_last_scr_sample sample;
363   guint32 dev_sof;
364 
365   if (self->device_fd == -1 || self->clock_samples == NULL)
366     return FALSE;
367 
368   if (-1 == ioctl (self->device_fd, UVCIOC_GET_LAST_SCR, &sample)) {
369     //GST_WARNING_OBJECT (self, " GET_LAST_SCR error");
370     return FALSE;
371   }
372 
373   dev_sof = (guint32) (sample.dev_sof + 2048) << 16;
374   if (self->num_samples > 0 &&
375       self->clock_samples[self->last_sample].dev_sof == dev_sof) {
376     current_sample = &self->clock_samples[self->last_sample];
377   } else {
378     next_sample = (self->last_sample + 1) % self->num_clock_samples;
379     current_sample = &self->clock_samples[next_sample];
380     current_sample->dev_stc = sample.dev_stc;
381     current_sample->dev_sof = dev_sof;
382     current_sample->host_ts = sample.host_ts.tv_sec * GST_SECOND +
383         sample.host_ts.tv_nsec * GST_NSECOND;
384     current_sample->host_sof = (guint32) (sample.host_sof + 2048) << 16;
385 
386     self->num_samples++;
387     self->last_sample = next_sample;
388 
389     /* Debug printing */
390     GST_DEBUG_OBJECT (self, "device frequency: %u", sample.dev_frequency);
391     GST_DEBUG_OBJECT (self, "dev_sof: %u", sample.dev_sof);
392     GST_DEBUG_OBJECT (self, "dev_stc: %u", sample.dev_stc);
393     GST_DEBUG_OBJECT (self,
394         "host_ts: %" G_GUINT64_FORMAT " -- %" GST_TIME_FORMAT,
395         current_sample->host_ts, GST_TIME_ARGS (current_sample->host_ts));
396     GST_DEBUG_OBJECT (self, "host_sof: %u", sample.host_sof);
397     GST_DEBUG_OBJECT (self, "PTS: %u", pts);
398     GST_DEBUG_OBJECT (self, "Diff: %u - %f\n", sample.dev_stc - pts,
399         (gdouble) (sample.dev_stc - pts) / sample.dev_frequency);
400   }
401 
402   if (self->num_samples < self->num_clock_samples)
403     return FALSE;
404 
405   next_sample = (self->last_sample + 1) % self->num_clock_samples;
406   oldest_sample = &self->clock_samples[next_sample];
407 
408   /* TODO: Use current_sample and oldest_sample to do the
409    * double linear regression and calculate a new PTS */
410   (void) oldest_sample;
411 
412   return TRUE;
413 }
414 
415 static GstFlowReturn
gst_uvc_h264_mjpg_demux_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)416 gst_uvc_h264_mjpg_demux_chain (GstPad * pad,
417     GstObject * parent, GstBuffer * buf)
418 {
419   GstUvcH264MjpgDemux *self;
420   GstFlowReturn ret = GST_FLOW_OK;
421   GstBuffer *jpeg_buf = NULL;
422   GstBuffer *aux_buf = NULL;
423   AuxiliaryStreamHeader aux_header = { 0 };
424   guint32 aux_size = 0;
425   GstPad *aux_pad = NULL;
426   GstCaps **aux_caps = NULL;
427   guint last_offset;
428   guint i;
429   GstMapInfo info;
430   guint16 segment_size;
431 
432   self = GST_UVC_H264_MJPG_DEMUX (GST_PAD_PARENT (pad));
433 
434   if (gst_buffer_get_size (buf) == 0) {
435     return gst_pad_push (self->jpeg_pad, buf);
436   }
437 
438   last_offset = 0;
439   gst_buffer_map (buf, &info, GST_MAP_READ);
440 
441   jpeg_buf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_METADATA, 0, 0);
442 
443   for (i = 0; i < info.size - 1; i++) {
444     /* Check for APP4 (0xe4) marker in the jpeg */
445     if (info.data[i] == 0xff && info.data[i + 1] == 0xe4) {
446 
447       /* Sanity check sizes and get segment size */
448       if (i + 4 >= info.size) {
449         GST_ELEMENT_ERROR (self, STREAM, DEMUX,
450             ("Not enough data to read marker size"), (NULL));
451         ret = GST_FLOW_ERROR;
452         goto done;
453       }
454       segment_size = GUINT16_FROM_BE (*((guint16 *) (info.data + i + 2)));
455 
456       if (i + segment_size + 2 >= info.size) {
457         GST_ELEMENT_ERROR (self, STREAM, DEMUX,
458             ("Not enough data to read marker content"), (NULL));
459         ret = GST_FLOW_ERROR;
460         goto done;
461       }
462       GST_DEBUG_OBJECT (self,
463           "Found APP4 marker (%d). JPG: %d-%d - APP4: %d - %d", segment_size,
464           last_offset, i, i, i + 2 + segment_size);
465 
466       /* Add JPEG data between the last offset and this market */
467       if (i - last_offset > 0) {
468         GstMemory *m = gst_memory_copy (info.memory, last_offset,
469             i - last_offset);
470         gst_buffer_append_memory (jpeg_buf, m);
471       }
472       last_offset = i + 2 + segment_size;
473 
474       /* Reset i/segment size to the app4 data (ignore marker header/size) */
475       i += 4;
476       segment_size -= 2;
477 
478       /* If this is a new auxiliary stream, initialize everything properly */
479       if (aux_buf == NULL) {
480         if (segment_size < sizeof (aux_header) + sizeof (aux_size)) {
481           GST_ELEMENT_ERROR (self, STREAM, DEMUX,
482               ("Not enough data to read aux header"), (NULL));
483           ret = GST_FLOW_ERROR;
484           goto done;
485         }
486 
487         aux_header = *((AuxiliaryStreamHeader *) (info.data + i));
488         /* version should be little endian but it looks more like BE */
489         aux_header.version = GUINT16_FROM_BE (aux_header.version);
490         aux_header.header_len = GUINT16_FROM_LE (aux_header.header_len);
491         aux_header.width = GUINT16_FROM_LE (aux_header.width);
492         aux_header.height = GUINT16_FROM_LE (aux_header.height);
493         aux_header.frame_interval = GUINT32_FROM_LE (aux_header.frame_interval);
494         aux_header.delay = GUINT16_FROM_LE (aux_header.delay);
495         aux_header.pts = GUINT32_FROM_LE (aux_header.pts);
496         GST_DEBUG_OBJECT (self, "New auxiliary stream : v%d - %d bytes - %"
497             GST_FOURCC_FORMAT " %dx%d -- %d *100ns -- %d ms -- %d",
498             aux_header.version, aux_header.header_len,
499             GST_FOURCC_ARGS (aux_header.type),
500             aux_header.width, aux_header.height,
501             aux_header.frame_interval, aux_header.delay, aux_header.pts);
502         aux_size = *((guint32 *) (info.data + i + aux_header.header_len));
503         GST_DEBUG_OBJECT (self, "Auxiliary stream size : %d bytes", aux_size);
504 
505         if (aux_size > 0) {
506           guint16 *width = NULL;
507           guint16 *height = NULL;
508 
509           /* Find the auxiliary stream's pad and caps */
510           switch (aux_header.type) {
511             case GST_MAKE_FOURCC ('H', '2', '6', '4'):
512               aux_pad = self->h264_pad;
513               aux_caps = &self->h264_caps;
514               width = &self->h264_width;
515               height = &self->h264_height;
516               break;
517             case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
518               aux_pad = self->yuy2_pad;
519               aux_caps = &self->yuy2_caps;
520               width = &self->yuy2_width;
521               height = &self->yuy2_height;
522               break;
523             case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
524               aux_pad = self->nv12_pad;
525               aux_caps = &self->nv12_caps;
526               width = &self->nv12_width;
527               height = &self->nv12_height;
528               break;
529             default:
530               GST_ELEMENT_ERROR (self, STREAM, DEMUX,
531                   ("Unknown auxiliary stream format : %" GST_FOURCC_FORMAT,
532                       GST_FOURCC_ARGS (aux_header.type)), (NULL));
533               ret = GST_FLOW_ERROR;
534               break;
535           }
536 
537           if (ret != GST_FLOW_OK)
538             goto done;
539 
540           if (*width != aux_header.width || *height != aux_header.height) {
541             GstCaps *peercaps = gst_pad_peer_query_caps (aux_pad, NULL);
542             GstStructure *s = NULL;
543             gint fps_num = 1000000000 / aux_header.frame_interval;
544             gint fps_den = 100;
545 
546             /* TODO: intersect with pad template */
547             GST_DEBUG ("peercaps : %" GST_PTR_FORMAT, peercaps);
548             if (peercaps && !gst_caps_is_any (peercaps)) {
549               peercaps = gst_caps_make_writable (peercaps);
550               s = gst_caps_get_structure (peercaps, 0);
551             }
552             if (s && gst_structure_has_field (s, "framerate")) {
553               /* TODO: make sure it contains the right format/width/height */
554               gst_structure_fixate_field_nearest_fraction (s, "framerate",
555                   fps_num, fps_den);
556               GST_DEBUG ("Fixated struct : %" GST_PTR_FORMAT, s);
557               gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
558             }
559             if (peercaps)
560               gst_caps_unref (peercaps);
561 
562             *width = aux_header.width;
563             *height = aux_header.height;
564             *aux_caps = gst_caps_make_writable (*aux_caps);
565             /* FIXME: fps must match the caps and be allowed and represent
566                our first buffer */
567             gst_caps_set_simple (*aux_caps,
568                 "width", G_TYPE_INT, aux_header.width,
569                 "height", G_TYPE_INT, aux_header.height,
570                 "framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL);
571             gst_pad_push_event (aux_pad, gst_event_new_caps (*aux_caps));
572             gst_pad_push_event (aux_pad,
573                 gst_event_new_segment (&self->segment));
574           }
575 
576           /* Create new auxiliary buffer list and adjust i/segment size */
577           aux_buf = gst_buffer_new ();
578         }
579 
580         i += sizeof (aux_header) + sizeof (aux_size);
581         segment_size -= sizeof (aux_header) + sizeof (aux_size);
582       }
583 
584       if (segment_size > aux_size) {
585         GST_ELEMENT_ERROR (self, STREAM, DEMUX,
586             ("Expected %d auxiliary data, got %d bytes", aux_size,
587                 segment_size), (NULL));
588         ret = GST_FLOW_ERROR;
589         goto done;
590       }
591 
592       if (segment_size > 0) {
593         GstMemory *m;
594         m = gst_memory_copy (info.memory, i, segment_size);
595 
596         GST_BUFFER_DURATION (aux_buf) =
597             aux_header.frame_interval * 100 * GST_NSECOND;
598 
599         _pts_to_timestamp (self, aux_buf, aux_header.pts);
600 
601         gst_buffer_append_memory (aux_buf, m);
602 
603         aux_size -= segment_size;
604 
605         /* Push completed aux data */
606         if (aux_size == 0) {
607           /* Last attempt to apply timestamp. FIXME: This
608            * is broken for H.264 with B-frames */
609           if (GST_BUFFER_PTS (aux_buf) == GST_CLOCK_TIME_NONE) {
610             if (!self->pts_reordered_warning &&
611                 self->last_pts != GST_CLOCK_TIME_NONE &&
612                 self->last_pts > GST_BUFFER_PTS (buf)) {
613               GST_WARNING_OBJECT (self, "PTS went backward, timestamping "
614                   "might be broken");
615               self->pts_reordered_warning = TRUE;
616             }
617             self->last_pts = GST_BUFFER_PTS (buf);
618 
619             GST_BUFFER_PTS (aux_buf) = GST_BUFFER_PTS (buf);
620           }
621           if (GST_BUFFER_DTS (aux_buf) == GST_CLOCK_TIME_NONE) {
622             GstClockTime dts = GST_BUFFER_PTS (aux_buf);
623             GstClockTime delay = aux_header.delay * GST_MSECOND;
624             if (dts > delay)
625               dts -= delay;
626             else
627               dts = 0;
628             GST_BUFFER_DTS (aux_buf) = dts;
629             GST_LOG_OBJECT (self, "Applied DTS %" GST_TIME_FORMAT
630                 " to aux_buf", GST_TIME_ARGS (dts));
631           }
632 
633           GST_DEBUG_OBJECT (self, "Pushing %" GST_FOURCC_FORMAT
634               " auxiliary buffer %" GST_PTR_FORMAT,
635               GST_FOURCC_ARGS (aux_header.type), *aux_caps);
636           ret = gst_pad_push (aux_pad, aux_buf);
637           aux_buf = NULL;
638           if (ret != GST_FLOW_OK) {
639             GST_WARNING_OBJECT (self, "Error pushing %" GST_FOURCC_FORMAT
640                 " auxiliary data", GST_FOURCC_ARGS (aux_header.type));
641             goto done;
642           }
643         }
644       }
645 
646       i += segment_size - 1;
647     } else if (info.data[i] == 0xff && info.data[i + 1] == 0xda) {
648       GstMemory *m;
649 
650       /* The APP4 markers must be before the SOS marker, so this is the end */
651       GST_DEBUG_OBJECT (self, "Found SOS marker.");
652 
653       m = gst_memory_copy (info.memory, last_offset, info.size - last_offset);
654       gst_buffer_append_memory (jpeg_buf, m);
655       last_offset = info.size;
656       break;
657     }
658   }
659 
660   if (aux_buf != NULL) {
661     GST_DEBUG_OBJECT (self, "Incomplete auxiliary stream: %d bytes missing, "
662         "%d segment size remaining -- missing segment, C920 bug?",
663         aux_size, segment_size);
664     ret = GST_FLOW_OK;
665     goto done;
666   }
667 
668   if (last_offset != info.size) {
669     /* this means there was no SOS marker in the jpg, so we assume the JPG was
670        just a container */
671     GST_DEBUG_OBJECT (self, "SOS marker wasn't found. MJPG is container only");
672     gst_buffer_unref (jpeg_buf);
673     jpeg_buf = NULL;
674   } else {
675     ret = gst_pad_push (self->jpeg_pad, jpeg_buf);
676     jpeg_buf = NULL;
677   }
678 
679   if (ret != GST_FLOW_OK) {
680     GST_WARNING_OBJECT (self, "Error pushing jpeg data");
681     goto done;
682   }
683 
684 done:
685   /* In case of error, unref whatever was left */
686   if (aux_buf)
687     gst_buffer_unref (aux_buf);
688   if (jpeg_buf)
689     gst_buffer_unref (jpeg_buf);
690 
691   gst_buffer_unmap (buf, &info);
692 
693   /* We must always unref the input buffer since we never push it out */
694   gst_buffer_unref (buf);
695 
696   return ret;
697 }
698