1 /* GStreamer
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
16 *
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
21 */
22
23 /**
24 * SECTION:element-jpegdec
25 *
26 * Decodes jpeg images.
27 *
28 * <refsect2>
29 * <title>Example launch line</title>
30 * |[
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
33 * </refsect2>
34 */
35
36 #ifdef HAVE_CONFIG_H
37 #include "config.h"
38 #endif
39 #include <string.h>
40
41 #include "gstjpegdec.h"
42 #include "gstjpeg.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
47 #include <jerror.h>
48
49 #define MIN_WIDTH 1
50 #define MAX_WIDTH 65535
51 #define MIN_HEIGHT 1
52 #define MAX_HEIGHT 65535
53
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
56
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
59
60 enum
61 {
62 PROP_0,
63 PROP_IDCT_METHOD,
64 PROP_MAX_ERRORS
65 };
66
67 /* *INDENT-OFF* */
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
70 GST_PAD_SRC,
71 GST_PAD_ALWAYS,
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
74 );
75 /* *INDENT-ON* */
76
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
82 GST_PAD_SINK,
83 GST_PAD_ALWAYS,
84 GST_STATIC_CAPS ("image/jpeg")
85 );
86
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
90
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
95
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
106 GstQuery * query);
107 static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
108 GstEvent * event);
109
110 #define gst_jpeg_dec_parent_class parent_class
111 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112
113 static void
gst_jpeg_dec_finalize(GObject * object)114 gst_jpeg_dec_finalize (GObject * object)
115 {
116 GstJpegDec *dec = GST_JPEG_DEC (object);
117
118 jpeg_destroy_decompress (&dec->cinfo);
119 if (dec->input_state)
120 gst_video_codec_state_unref (dec->input_state);
121
122 G_OBJECT_CLASS (parent_class)->finalize (object);
123 }
124
125 static void
gst_jpeg_dec_class_init(GstJpegDecClass * klass)126 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
127 {
128 GObjectClass *gobject_class;
129 GstElementClass *element_class;
130 GstVideoDecoderClass *vdec_class;
131
132 gobject_class = (GObjectClass *) klass;
133 element_class = (GstElementClass *) klass;
134 vdec_class = (GstVideoDecoderClass *) klass;
135
136 parent_class = g_type_class_peek_parent (klass);
137
138 gobject_class->finalize = gst_jpeg_dec_finalize;
139 gobject_class->set_property = gst_jpeg_dec_set_property;
140 gobject_class->get_property = gst_jpeg_dec_get_property;
141
142 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
143 g_param_spec_enum ("idct-method", "IDCT Method",
144 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
145 JPEG_DEFAULT_IDCT_METHOD,
146 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147
148 /**
149 * GstJpegDec:max-errors:
150 *
151 * Error out after receiving N consecutive decoding errors
152 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
153 *
154 * Deprecated: 1.3.1: Property wasn't used internally
155 */
156 #ifndef GST_REMOVE_DEPRECATED
157 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
158 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
159 "(Deprecated) Error out after receiving N consecutive decoding errors"
160 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
161 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
162 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
163 #endif
164
165 gst_element_class_add_static_pad_template (element_class,
166 &gst_jpeg_dec_src_pad_template);
167 gst_element_class_add_static_pad_template (element_class,
168 &gst_jpeg_dec_sink_pad_template);
169 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
170 "Codec/Decoder/Image", "Decode images from JPEG format",
171 "Wim Taymans <wim@fluendo.com>");
172
173 vdec_class->start = gst_jpeg_dec_start;
174 vdec_class->stop = gst_jpeg_dec_stop;
175 vdec_class->flush = gst_jpeg_dec_flush;
176 vdec_class->parse = gst_jpeg_dec_parse;
177 vdec_class->set_format = gst_jpeg_dec_set_format;
178 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
179 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
180 vdec_class->sink_event = gst_jpeg_dec_sink_event;
181
182 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
183 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
184 }
185
186 static boolean
gst_jpeg_dec_fill_input_buffer(j_decompress_ptr cinfo)187 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
188 {
189 /* We pass in full frame initially, if this get called, the frame is most likely
190 * corrupted */
191 return FALSE;
192 }
193
194 static void
gst_jpeg_dec_init_source(j_decompress_ptr cinfo)195 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
196 {
197 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
198 }
199
200
201 static void
gst_jpeg_dec_skip_input_data(j_decompress_ptr cinfo,glong num_bytes)202 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
203 {
204 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
205
206 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
207
208 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
209 cinfo->src->next_input_byte += (size_t) num_bytes;
210 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
211 }
212 }
213
214 static boolean
gst_jpeg_dec_resync_to_restart(j_decompress_ptr cinfo,gint desired)215 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
216 {
217 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
218 return TRUE;
219 }
220
221 static void
gst_jpeg_dec_term_source(j_decompress_ptr cinfo)222 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
223 {
224 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
225 return;
226 }
227
228 METHODDEF (void)
gst_jpeg_dec_my_output_message(j_common_ptr cinfo)229 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
230 {
231 return; /* do nothing */
232 }
233
234 METHODDEF (void)
gst_jpeg_dec_my_emit_message(j_common_ptr cinfo,int msg_level)235 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
236 {
237 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
238 return;
239 }
240
241 METHODDEF (void)
gst_jpeg_dec_my_error_exit(j_common_ptr cinfo)242 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
243 {
244 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
245
246 (*cinfo->err->output_message) (cinfo);
247 longjmp (err_mgr->setjmp_buffer, 1);
248 }
249
250 static void
gst_jpeg_dec_init(GstJpegDec * dec)251 gst_jpeg_dec_init (GstJpegDec * dec)
252 {
253 GST_DEBUG ("initializing");
254
255 /* setup jpeglib */
256 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
257 memset (&dec->jerr, 0, sizeof (dec->jerr));
258 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
259 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
260 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
261 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
262
263 jpeg_create_decompress (&dec->cinfo);
264
265 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
266 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
267 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
268 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
269 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
270 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
271 dec->jsrc.dec = dec;
272
273 /* init properties */
274 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
275 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
276
277 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
278 (dec), TRUE);
279 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
280 }
281
282 static inline gboolean
gst_jpeg_dec_parse_tag_has_entropy_segment(guint8 tag)283 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
284 {
285 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
286 return TRUE;
287 return FALSE;
288 }
289
290 static GstFlowReturn
gst_jpeg_dec_parse(GstVideoDecoder * bdec,GstVideoCodecFrame * frame,GstAdapter * adapter,gboolean at_eos)291 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
292 GstAdapter * adapter, gboolean at_eos)
293 {
294 guint size;
295 gint toadd = 0;
296 gboolean resync;
297 gint offset = 0, noffset;
298 GstJpegDec *dec = (GstJpegDec *) bdec;
299
300 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
301
302 /* FIXME : The overhead of using scan_uint32 is massive */
303
304 size = gst_adapter_available (adapter);
305 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
306
307 if (at_eos) {
308 GST_DEBUG ("Flushing all data out");
309 toadd = size;
310
311 /* If we have leftover data, throw it away */
312 if (!dec->saw_header)
313 goto drop_frame;
314 goto have_full_frame;
315 }
316
317 if (size < 8)
318 goto need_more_data;
319
320 if (!dec->saw_header) {
321 gint ret;
322 /* we expect at least 4 bytes, first of which start marker */
323 ret =
324 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
325 size - 4);
326
327 GST_DEBUG ("ret:%d", ret);
328 if (ret < 0)
329 goto need_more_data;
330
331 if (ret) {
332 gst_adapter_flush (adapter, ret);
333 size -= ret;
334 }
335 dec->saw_header = TRUE;
336 }
337
338 while (1) {
339 guint frame_len;
340 guint32 value;
341
342 GST_DEBUG ("offset:%d, size:%d", offset, size);
343
344 noffset =
345 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
346 offset, size - offset, &value);
347
348 /* lost sync if 0xff marker not where expected */
349 if ((resync = (noffset != offset))) {
350 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
351 }
352 /* may have marker, but could have been resyncng */
353 resync = resync || dec->parse_resync;
354 /* Skip over extra 0xff */
355 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
356 noffset++;
357 noffset =
358 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
359 noffset, size - noffset, &value);
360 }
361 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
362 if (noffset < 0) {
363 GST_DEBUG ("at end of input and no EOI marker found, need more data");
364 goto need_more_data;
365 }
366
367 /* now lock on the marker we found */
368 offset = noffset;
369 value = value & 0xff;
370 if (value == 0xd9) {
371 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
372 /* clear parse state */
373 dec->saw_header = FALSE;
374 dec->parse_resync = FALSE;
375 toadd = offset + 4;
376 goto have_full_frame;
377 }
378 if (value == 0xd8) {
379 GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
380
381 /* clear parse state */
382 dec->saw_header = FALSE;
383 dec->parse_resync = FALSE;
384 toadd = offset;
385 goto have_full_frame;
386 }
387
388
389 if (value >= 0xd0 && value <= 0xd7)
390 frame_len = 0;
391 else {
392 /* peek tag and subsequent length */
393 if (offset + 2 + 4 > size)
394 goto need_more_data;
395 else
396 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
397 &frame_len);
398 frame_len = frame_len & 0xffff;
399 }
400 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
401 /* the frame length includes the 2 bytes for the length; here we want at
402 * least 2 more bytes at the end for an end marker */
403 if (offset + 2 + 2 + frame_len + 2 > size) {
404 goto need_more_data;
405 }
406
407 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
408 guint eseglen = dec->parse_entropy_len;
409
410 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
411 offset + 2, eseglen);
412 if (size < offset + 2 + frame_len + eseglen)
413 goto need_more_data;
414 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
415 while (1) {
416 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
417 noffset, size, size - noffset);
418 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
419 0x0000ff00, noffset, size - noffset, &value);
420 if (noffset < 0) {
421 /* need more data */
422 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
423 goto need_more_data;
424 }
425 if ((value & 0xff) != 0x00) {
426 eseglen = noffset - offset - frame_len - 2;
427 break;
428 }
429 noffset++;
430 }
431 dec->parse_entropy_len = 0;
432 frame_len += eseglen;
433 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
434 frame_len);
435 }
436 if (resync) {
437 /* check if we will still be in sync if we interpret
438 * this as a sync point and skip this frame */
439 noffset = offset + frame_len + 2;
440 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
441 noffset, 4);
442 if (noffset < 0) {
443 /* ignore and continue resyncing until we hit the end
444 * of our data or find a sync point that looks okay */
445 offset++;
446 continue;
447 }
448 GST_DEBUG ("found sync at 0x%x", offset + 2);
449 }
450
451 /* Add current data to output buffer */
452 toadd += frame_len + 2;
453 offset += frame_len + 2;
454 }
455
456 need_more_data:
457 if (toadd)
458 gst_video_decoder_add_to_frame (bdec, toadd);
459 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
460
461 have_full_frame:
462 if (toadd)
463 gst_video_decoder_add_to_frame (bdec, toadd);
464 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
465 return gst_video_decoder_have_frame (bdec);
466
467 drop_frame:
468 gst_adapter_flush (adapter, size);
469 return GST_FLOW_OK;
470 }
471
472
473 /* shamelessly ripped from jpegutils.c in mjpegtools */
474 static void
add_huff_table(j_decompress_ptr dinfo,JHUFF_TBL ** htblptr,const UINT8 * bits,const UINT8 * val)475 add_huff_table (j_decompress_ptr dinfo,
476 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
477 /* Define a Huffman table */
478 {
479 int nsymbols, len;
480
481 if (*htblptr == NULL)
482 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
483
484 g_assert (*htblptr);
485
486 /* Copy the number-of-symbols-of-each-code-length counts */
487 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
488
489 /* Validate the counts. We do this here mainly so we can copy the right
490 * number of symbols from the val[] array, without risking marching off
491 * the end of memory. jchuff.c will do a more thorough test later.
492 */
493 nsymbols = 0;
494 for (len = 1; len <= 16; len++)
495 nsymbols += bits[len];
496 if (nsymbols < 1 || nsymbols > 256)
497 g_error ("jpegutils.c: add_huff_table failed badly. ");
498
499 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
500 }
501
502
503
504 static void
std_huff_tables(j_decompress_ptr dinfo)505 std_huff_tables (j_decompress_ptr dinfo)
506 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
507 /* IMPORTANT: these are only valid for 8-bit data precision! */
508 {
509 static const UINT8 bits_dc_luminance[17] =
510 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
511 static const UINT8 val_dc_luminance[] =
512 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
513
514 static const UINT8 bits_dc_chrominance[17] =
515 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
516 static const UINT8 val_dc_chrominance[] =
517 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
518
519 static const UINT8 bits_ac_luminance[17] =
520 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
521 static const UINT8 val_ac_luminance[] =
522 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
523 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
524 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
525 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
526 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
527 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
528 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
529 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
530 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
531 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
532 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
533 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
534 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
535 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
536 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
537 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
538 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
539 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
540 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
541 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
542 0xf9, 0xfa
543 };
544
545 static const UINT8 bits_ac_chrominance[17] =
546 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
547 static const UINT8 val_ac_chrominance[] =
548 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
549 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
550 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
551 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
552 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
553 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
554 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
555 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
556 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
557 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
558 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
559 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
560 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
561 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
562 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
563 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
564 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
565 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
566 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
567 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
568 0xf9, 0xfa
569 };
570
571 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
572 bits_dc_luminance, val_dc_luminance);
573 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
574 bits_ac_luminance, val_ac_luminance);
575 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
576 bits_dc_chrominance, val_dc_chrominance);
577 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
578 bits_ac_chrominance, val_ac_chrominance);
579 }
580
581
582
583 static void
guarantee_huff_tables(j_decompress_ptr dinfo)584 guarantee_huff_tables (j_decompress_ptr dinfo)
585 {
586 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
587 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
588 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
589 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
590 GST_DEBUG ("Generating standard Huffman tables for this frame.");
591 std_huff_tables (dinfo);
592 }
593 }
594
595 static gboolean
gst_jpeg_dec_set_format(GstVideoDecoder * dec,GstVideoCodecState * state)596 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
597 {
598 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
599
600 if (jpeg->input_state)
601 gst_video_codec_state_unref (jpeg->input_state);
602 jpeg->input_state = gst_video_codec_state_ref (state);
603
604 return TRUE;
605 }
606
607
608 /* yuk */
609 static void
hresamplecpy1(guint8 * dest,const guint8 * src,guint len)610 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
611 {
612 gint i;
613
614 for (i = 0; i < len; ++i) {
615 /* equivalent to: dest[i] = src[i << 1] */
616 *dest = *src;
617 ++dest;
618 ++src;
619 ++src;
620 }
621 }
622
623 static void
gst_jpeg_dec_free_buffers(GstJpegDec * dec)624 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
625 {
626 gint i;
627
628 for (i = 0; i < 16; i++) {
629 g_free (dec->idr_y[i]);
630 g_free (dec->idr_u[i]);
631 g_free (dec->idr_v[i]);
632 dec->idr_y[i] = NULL;
633 dec->idr_u[i] = NULL;
634 dec->idr_v[i] = NULL;
635 }
636
637 dec->idr_width_allocated = 0;
638 }
639
640 static inline gboolean
gst_jpeg_dec_ensure_buffers(GstJpegDec * dec,guint maxrowbytes)641 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
642 {
643 gint i;
644
645 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
646 return TRUE;
647
648 /* FIXME: maybe just alloc one or three blocks altogether? */
649 for (i = 0; i < 16; i++) {
650 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
651 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
652 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
653
654 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
655 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
656 return FALSE;
657 }
658 }
659
660 dec->idr_width_allocated = maxrowbytes;
661 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
662 return TRUE;
663 }
664
665 static void
gst_jpeg_dec_decode_grayscale(GstJpegDec * dec,GstVideoFrame * frame,guint field,guint num_fields)666 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
667 guint field, guint num_fields)
668 {
669 guchar *rows[16];
670 guchar **scanarray[1] = { rows };
671 gint i, j, k;
672 gint lines;
673 guint8 *base[1];
674 gint width, height;
675 gint pstride, rstride;
676
677 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
678
679 width = GST_VIDEO_FRAME_WIDTH (frame);
680 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
681
682 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
683 return;
684
685 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
686 if (field == 2) {
687 base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
688 }
689
690 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
691 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
692
693 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
694
695 i = 0;
696 while (i < height) {
697 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
698 if (G_LIKELY (lines > 0)) {
699 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
700 gint p;
701
702 p = 0;
703 for (k = 0; k < width; k++) {
704 base[0][p] = rows[j][k];
705 p += pstride;
706 }
707 base[0] += rstride;
708 }
709 } else {
710 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
711 }
712 }
713 }
714
715 static void
gst_jpeg_dec_decode_rgb(GstJpegDec * dec,GstVideoFrame * frame,guint field,guint num_fields)716 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
717 guint field, guint num_fields)
718 {
719 guchar *r_rows[16], *g_rows[16], *b_rows[16];
720 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
721 gint i, j, k;
722 gint lines;
723 guint8 *base[3];
724 guint pstride, rstride;
725 gint width, height;
726
727 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
728
729 width = GST_VIDEO_FRAME_WIDTH (frame);
730 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
731
732 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
733 return;
734
735 for (i = 0; i < 3; i++) {
736 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
737 if (field == 2)
738 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
739 }
740
741 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
742 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
743
744 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
745 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
746 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
747
748 i = 0;
749 while (i < height) {
750 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
751 if (G_LIKELY (lines > 0)) {
752 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
753 gint p;
754
755 p = 0;
756 for (k = 0; k < width; k++) {
757 base[0][p] = r_rows[j][k];
758 base[1][p] = g_rows[j][k];
759 base[2][p] = b_rows[j][k];
760 p += pstride;
761 }
762 base[0] += rstride;
763 base[1] += rstride;
764 base[2] += rstride;
765 }
766 } else {
767 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
768 }
769 }
770 }
771
772 static void
gst_jpeg_dec_decode_indirect(GstJpegDec * dec,GstVideoFrame * frame,gint r_v,gint r_h,gint comp,guint field,guint num_fields)773 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
774 gint r_h, gint comp, guint field, guint num_fields)
775 {
776 guchar *y_rows[16], *u_rows[16], *v_rows[16];
777 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
778 gint i, j, k;
779 gint lines;
780 guchar *base[3], *last[3];
781 gint rowsize[3], stride[3];
782 gint width, height;
783
784 GST_DEBUG_OBJECT (dec,
785 "unadvantageous width or r_h, taking slow route involving memcpy");
786
787 width = GST_VIDEO_FRAME_WIDTH (frame);
788 height = GST_VIDEO_FRAME_HEIGHT (frame);
789
790 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
791 return;
792
793 for (i = 0; i < 3; i++) {
794 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
795 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
796 rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
797 /* make sure we don't make jpeglib write beyond our buffer,
798 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
799 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
800 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
801
802 if (field == 2) {
803 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
804 }
805 }
806
807 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
808 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
809 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
810
811 /* fill chroma components for grayscale */
812 if (comp == 1) {
813 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
814 for (i = 0; i < 16; i++) {
815 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
816 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
817 }
818 }
819
820 for (i = 0; i < height; i += r_v * DCTSIZE) {
821 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
822 if (G_LIKELY (lines > 0)) {
823 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
824 if (G_LIKELY (base[0] <= last[0])) {
825 memcpy (base[0], y_rows[j], rowsize[0]);
826 base[0] += stride[0];
827 }
828 if (r_v == 2) {
829 if (G_LIKELY (base[0] <= last[0])) {
830 memcpy (base[0], y_rows[j + 1], rowsize[0]);
831 base[0] += stride[0];
832 }
833 }
834 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
835 if (r_h == 2) {
836 memcpy (base[1], u_rows[k], rowsize[1]);
837 memcpy (base[2], v_rows[k], rowsize[2]);
838 } else if (r_h == 1) {
839 hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
840 hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
841 } else {
842 /* FIXME: implement (at least we avoid crashing by doing nothing) */
843 }
844 }
845
846 if (r_v == 2 || (k & 1) != 0) {
847 base[1] += stride[1];
848 base[2] += stride[2];
849 }
850 }
851 } else {
852 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
853 }
854 }
855 }
856
857 static GstFlowReturn
gst_jpeg_dec_decode_direct(GstJpegDec * dec,GstVideoFrame * frame,guint field,guint num_fields)858 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
859 guint field, guint num_fields)
860 {
861 guchar **line[3]; /* the jpeg line buffer */
862 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
863 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
864 guchar *v[4 * DCTSIZE] = { NULL, };
865 gint i, j;
866 gint lines, v_samp[3];
867 guchar *base[3], *last[3];
868 gint stride[3];
869 guint height;
870
871 line[0] = y;
872 line[1] = u;
873 line[2] = v;
874
875 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
876 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
877 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
878
879 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
880 goto format_not_supported;
881
882 height = GST_VIDEO_FRAME_HEIGHT (frame);
883
884 for (i = 0; i < 3; i++) {
885 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
886 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
887 /* make sure we don't make jpeglib write beyond our buffer,
888 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
889 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
890 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
891
892 if (field == 2) {
893 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
894 }
895 }
896
897 if (height % (v_samp[0] * DCTSIZE) && (dec->scratch_size < stride[0])) {
898 g_free (dec->scratch);
899 dec->scratch = g_malloc (stride[0]);
900 dec->scratch_size = stride[0];
901 }
902
903 /* let jpeglib decode directly into our final buffer */
904 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
905
906 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
907 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
908 /* Y */
909 line[0][j] = base[0] + (i + j) * stride[0];
910 if (G_UNLIKELY (line[0][j] > last[0]))
911 line[0][j] = dec->scratch;
912 /* U */
913 if (v_samp[1] == v_samp[0]) {
914 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
915 } else if (j < (v_samp[1] * DCTSIZE)) {
916 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
917 }
918 if (G_UNLIKELY (line[1][j] > last[1]))
919 line[1][j] = dec->scratch;
920 /* V */
921 if (v_samp[2] == v_samp[0]) {
922 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
923 } else if (j < (v_samp[2] * DCTSIZE)) {
924 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
925 }
926 if (G_UNLIKELY (line[2][j] > last[2]))
927 line[2][j] = dec->scratch;
928 }
929
930 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
931 if (G_UNLIKELY (!lines)) {
932 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
933 }
934 }
935 return GST_FLOW_OK;
936
937 format_not_supported:
938 {
939 gboolean ret = GST_FLOW_OK;
940
941 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
942 (_("Failed to decode JPEG image")),
943 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
944 v_samp[1], v_samp[2]), ret);
945
946 return ret;
947 }
948 }
949
950 static void
gst_jpeg_dec_negotiate(GstJpegDec * dec,gint width,gint height,gint clrspc,gboolean interlaced)951 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
952 gboolean interlaced)
953 {
954 GstVideoCodecState *outstate;
955 GstVideoInfo *info;
956 GstVideoFormat format;
957
958 switch (clrspc) {
959 case JCS_RGB:
960 format = GST_VIDEO_FORMAT_RGB;
961 break;
962 case JCS_GRAYSCALE:
963 format = GST_VIDEO_FORMAT_GRAY8;
964 break;
965 default:
966 format = GST_VIDEO_FORMAT_I420;
967 break;
968 }
969
970 /* Compare to currently configured output state */
971 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
972 if (outstate) {
973 info = &outstate->info;
974
975 if (width == GST_VIDEO_INFO_WIDTH (info) &&
976 height == GST_VIDEO_INFO_HEIGHT (info) &&
977 format == GST_VIDEO_INFO_FORMAT (info)) {
978 gst_video_codec_state_unref (outstate);
979 return;
980 }
981 gst_video_codec_state_unref (outstate);
982 }
983
984 outstate =
985 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
986 width, height, dec->input_state);
987
988 switch (clrspc) {
989 case JCS_RGB:
990 case JCS_GRAYSCALE:
991 break;
992 default:
993 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
994 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
995 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
996 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
997 break;
998 }
999
1000 if (interlaced) {
1001 outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1002 GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
1003 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
1004 }
1005
1006 gst_video_codec_state_unref (outstate);
1007
1008 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1009
1010 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1011 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1012 }
1013
1014 static GstFlowReturn
gst_jpeg_dec_prepare_decode(GstJpegDec * dec)1015 gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
1016 {
1017 G_GNUC_UNUSED GstFlowReturn ret;
1018 guint r_h, r_v, hdr_ok;
1019
1020 /* read header */
1021 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1022 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1023 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1024 }
1025
1026 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1027 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1028
1029 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1030 goto components_not_supported;
1031
1032 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1033 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1034
1035 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1036
1037 if (dec->cinfo.num_components > 3)
1038 goto components_not_supported;
1039
1040 /* verify color space expectation to avoid going *boom* or bogus output */
1041 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1042 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1043 dec->cinfo.jpeg_color_space != JCS_RGB)
1044 goto unsupported_colorspace;
1045
1046 #ifndef GST_DISABLE_GST_DEBUG
1047 {
1048 gint i;
1049
1050 for (i = 0; i < dec->cinfo.num_components; ++i) {
1051 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1052 i, dec->cinfo.comp_info[i].h_samp_factor,
1053 dec->cinfo.comp_info[i].v_samp_factor,
1054 dec->cinfo.comp_info[i].component_id);
1055 }
1056 }
1057 #endif
1058
1059 /* prepare for raw output */
1060 dec->cinfo.do_fancy_upsampling = FALSE;
1061 dec->cinfo.do_block_smoothing = FALSE;
1062 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1063 dec->cinfo.dct_method = dec->idct_method;
1064 dec->cinfo.raw_data_out = TRUE;
1065
1066 GST_LOG_OBJECT (dec, "starting decompress");
1067 guarantee_huff_tables (&dec->cinfo);
1068 if (!jpeg_start_decompress (&dec->cinfo)) {
1069 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1070 }
1071
1072 /* sanity checks to get safe and reasonable output */
1073 switch (dec->cinfo.jpeg_color_space) {
1074 case JCS_GRAYSCALE:
1075 if (dec->cinfo.num_components != 1)
1076 goto invalid_yuvrgbgrayscale;
1077 break;
1078 case JCS_RGB:
1079 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1080 dec->cinfo.max_h_samp_factor > 1)
1081 goto invalid_yuvrgbgrayscale;
1082 break;
1083 case JCS_YCbCr:
1084 if (dec->cinfo.num_components != 3 ||
1085 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1086 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1087 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1088 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1089 goto invalid_yuvrgbgrayscale;
1090 break;
1091 default:
1092 g_assert_not_reached ();
1093 break;
1094 }
1095
1096 if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
1097 dec->cinfo.output_width > MAX_WIDTH ||
1098 dec->cinfo.output_height < MIN_HEIGHT ||
1099 dec->cinfo.output_height > MAX_HEIGHT))
1100 goto wrong_size;
1101
1102 return GST_FLOW_OK;
1103
1104 /* ERRORS */
1105 wrong_size:
1106 {
1107 ret = GST_FLOW_ERROR;
1108 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1109 (_("Failed to decode JPEG image")),
1110 ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
1111 dec->cinfo.output_height), ret);
1112 return GST_FLOW_ERROR;
1113 }
1114 components_not_supported:
1115 {
1116 ret = GST_FLOW_ERROR;
1117 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1118 (_("Failed to decode JPEG image")),
1119 ("number of components not supported: %d (max 3)",
1120 dec->cinfo.num_components), ret);
1121 jpeg_abort_decompress (&dec->cinfo);
1122 return GST_FLOW_ERROR;
1123 }
1124 unsupported_colorspace:
1125 {
1126 ret = GST_FLOW_ERROR;
1127 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1128 (_("Failed to decode JPEG image")),
1129 ("Picture has unknown or unsupported colourspace"), ret);
1130 jpeg_abort_decompress (&dec->cinfo);
1131 return GST_FLOW_ERROR;
1132 }
1133 invalid_yuvrgbgrayscale:
1134 {
1135 ret = GST_FLOW_ERROR;
1136 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1137 (_("Failed to decode JPEG image")),
1138 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1139 jpeg_abort_decompress (&dec->cinfo);
1140 return GST_FLOW_ERROR;
1141 }
1142 }
1143
1144 static GstFlowReturn
gst_jpeg_dec_decode(GstJpegDec * dec,GstVideoFrame * vframe,guint width,guint height,guint field,guint num_fields)1145 gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
1146 guint height, guint field, guint num_fields)
1147 {
1148 GstFlowReturn ret = GST_FLOW_OK;
1149
1150 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1151 gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
1152 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1153 gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
1154 } else {
1155 GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
1156 dec->cinfo.rec_outbuf_height);
1157
1158 /* For some widths jpeglib requires more horizontal padding than I420
1159 * provides. In those cases we need to decode into separate buffers and then
1160 * copy over the data into our final picture buffer, otherwise jpeglib might
1161 * write over the end of a line into the beginning of the next line,
1162 * resulting in blocky artifacts on the left side of the picture. */
1163 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1164 || dec->cinfo.comp_info[0].h_samp_factor != 2
1165 || dec->cinfo.comp_info[1].h_samp_factor != 1
1166 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1167 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1168 "indirect decoding using extra buffer copy");
1169 gst_jpeg_dec_decode_indirect (dec, vframe,
1170 dec->cinfo.comp_info[0].v_samp_factor,
1171 dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
1172 field, num_fields);
1173 } else {
1174 ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
1175 }
1176 }
1177
1178 GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
1179
1180 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1181 jpeg_abort_decompress (&dec->cinfo);
1182 } else {
1183 jpeg_finish_decompress (&dec->cinfo);
1184 }
1185
1186 return ret;
1187 }
1188
1189 static GstFlowReturn
gst_jpeg_dec_handle_frame(GstVideoDecoder * bdec,GstVideoCodecFrame * frame)1190 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1191 {
1192 GstFlowReturn ret = GST_FLOW_OK;
1193 GstJpegDec *dec = (GstJpegDec *) bdec;
1194 GstVideoFrame vframe;
1195 gint num_fields; /* number of fields (1 or 2) */
1196 gint output_height; /* height of output image (one or two fields) */
1197 gint height; /* height of current frame (whole image or a field) */
1198 gint width;
1199 guint code;
1200 gboolean need_unmap = TRUE;
1201 GstVideoCodecState *state = NULL;
1202 gboolean release_frame = TRUE;
1203 gboolean has_eoi;
1204 guint8 *data;
1205 gsize nbytes;
1206
1207 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1208
1209 data = dec->current_frame_map.data;
1210 nbytes = dec->current_frame_map.size;
1211 has_eoi = ((data[nbytes - 2] == 0xff) && (data[nbytes - 1] == 0xd9));
1212
1213 /* some cameras fail to send an end-of-image marker (EOI),
1214 * add it if that is the case. */
1215 if (!has_eoi) {
1216 GstMapInfo map;
1217 GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
1218
1219 /* unmap, will add EOI and remap at the end */
1220 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1221
1222 gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
1223 map.data[0] = 0xff;
1224 map.data[1] = 0xd9;
1225 gst_buffer_unmap (eoibuf, &map);
1226
1227 /* append to input buffer, and remap */
1228 frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
1229
1230 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1231 GST_DEBUG ("fixup EOI marker added");
1232 }
1233
1234 dec->current_frame = frame;
1235 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
1236 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1237
1238 if (setjmp (dec->jerr.setjmp_buffer)) {
1239 code = dec->jerr.pub.msg_code;
1240
1241 if (code == JERR_INPUT_EOF) {
1242 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1243 goto need_more_data;
1244 }
1245 goto decode_error;
1246 }
1247
1248 /* read header and check values */
1249 ret = gst_jpeg_dec_prepare_decode (dec);
1250 if (G_UNLIKELY (ret == GST_FLOW_ERROR))
1251 goto done;
1252
1253 width = dec->cinfo.output_width;
1254 height = dec->cinfo.output_height;
1255
1256 /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
1257 * to see if there are two SOF markers in the packet to detect this) */
1258 if (gst_video_decoder_get_packetized (bdec) &&
1259 dec->input_state &&
1260 dec->input_state->info.height > height &&
1261 dec->input_state->info.height <= (height * 2)
1262 && dec->input_state->info.width == width) {
1263 GST_LOG_OBJECT (dec,
1264 "looks like an interlaced image: "
1265 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1266 dec->input_state->info.width, dec->input_state->info.height, width,
1267 height);
1268 output_height = dec->input_state->info.height;
1269 height = dec->input_state->info.height / 2;
1270 num_fields = 2;
1271 GST_LOG_OBJECT (dec, "field height=%d", height);
1272 } else {
1273 output_height = height;
1274 num_fields = 1;
1275 }
1276
1277 gst_jpeg_dec_negotiate (dec, width, output_height,
1278 dec->cinfo.jpeg_color_space, num_fields == 2);
1279
1280 state = gst_video_decoder_get_output_state (bdec);
1281 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1282 if (G_UNLIKELY (ret != GST_FLOW_OK))
1283 goto alloc_failed;
1284
1285 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1286 GST_MAP_READWRITE))
1287 goto alloc_failed;
1288
1289 if (setjmp (dec->jerr.setjmp_buffer)) {
1290 code = dec->jerr.pub.msg_code;
1291 gst_video_frame_unmap (&vframe);
1292 goto decode_error;
1293 }
1294
1295 GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
1296 num_fields);
1297
1298 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
1299 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1300 gst_video_frame_unmap (&vframe);
1301 goto decode_failed;
1302 }
1303
1304 if (setjmp (dec->jerr.setjmp_buffer)) {
1305 code = dec->jerr.pub.msg_code;
1306 gst_video_frame_unmap (&vframe);
1307 goto decode_error;
1308 }
1309
1310 /* decode second field if there is one */
1311 if (num_fields == 2) {
1312 GstVideoFormat field2_format;
1313
1314 /* Checked above before setting num_fields to 2 */
1315 g_assert (dec->input_state != NULL);
1316
1317 /* skip any chunk or padding bytes before the next SOI marker; both fields
1318 * are in one single buffer here, so direct access should be fine here */
1319 while (dec->jsrc.pub.bytes_in_buffer > 2 &&
1320 GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
1321 --dec->jsrc.pub.bytes_in_buffer;
1322 ++dec->jsrc.pub.next_input_byte;
1323 }
1324
1325 if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
1326 GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
1327 /* FIXME: post a warning message here? */
1328 gst_video_frame_unmap (&vframe);
1329 goto decode_failed;
1330 }
1331
1332 /* check if format has changed for the second field */
1333 switch (dec->cinfo.jpeg_color_space) {
1334 case JCS_RGB:
1335 field2_format = GST_VIDEO_FORMAT_RGB;
1336 break;
1337 case JCS_GRAYSCALE:
1338 field2_format = GST_VIDEO_FORMAT_GRAY8;
1339 break;
1340 default:
1341 field2_format = GST_VIDEO_FORMAT_I420;
1342 break;
1343 }
1344
1345 GST_LOG_OBJECT (dec,
1346 "got for second field of interlaced image: "
1347 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1348 dec->input_state->info.width, dec->input_state->info.height,
1349 dec->cinfo.output_width, dec->cinfo.output_height);
1350
1351 if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
1352 GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
1353 GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
1354 field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
1355 GST_WARNING_OBJECT (dec, "second field has different format than first");
1356 gst_video_frame_unmap (&vframe);
1357 goto decode_failed;
1358 }
1359
1360 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
1361 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1362 gst_video_frame_unmap (&vframe);
1363 goto decode_failed;
1364 }
1365 }
1366 gst_video_frame_unmap (&vframe);
1367
1368 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1369 ret = gst_video_decoder_finish_frame (bdec, frame);
1370 release_frame = FALSE;
1371 need_unmap = FALSE;
1372
1373 done:
1374
1375 exit:
1376
1377 if (need_unmap)
1378 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1379
1380 if (release_frame)
1381 gst_video_decoder_release_frame (bdec, frame);
1382
1383 if (state)
1384 gst_video_codec_state_unref (state);
1385
1386 return ret;
1387
1388 /* special cases */
1389 need_more_data:
1390 {
1391 GST_LOG_OBJECT (dec, "we need more data");
1392 ret = GST_FLOW_OK;
1393 goto exit;
1394 }
1395 /* ERRORS */
1396 decode_error:
1397 {
1398 gchar err_msg[JMSG_LENGTH_MAX];
1399
1400 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1401
1402 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1403 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1404 err_msg), ret);
1405
1406 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1407 gst_video_decoder_drop_frame (bdec, frame);
1408 release_frame = FALSE;
1409 need_unmap = FALSE;
1410 jpeg_abort_decompress (&dec->cinfo);
1411
1412 goto done;
1413 }
1414 decode_failed:
1415 {
1416 /* already posted an error message */
1417 goto done;
1418 }
1419 alloc_failed:
1420 {
1421 const gchar *reason;
1422
1423 reason = gst_flow_get_name (ret);
1424
1425 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1426 /* Reset for next time */
1427 jpeg_abort_decompress (&dec->cinfo);
1428 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1429 ret != GST_FLOW_NOT_LINKED) {
1430 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1431 (_("Failed to decode JPEG image")),
1432 ("Buffer allocation failed, reason: %s", reason), ret);
1433 jpeg_abort_decompress (&dec->cinfo);
1434 }
1435 goto exit;
1436 }
1437 }
1438
1439 static gboolean
gst_jpeg_dec_decide_allocation(GstVideoDecoder * bdec,GstQuery * query)1440 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1441 {
1442 GstBufferPool *pool = NULL;
1443 GstStructure *config;
1444
1445 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1446 return FALSE;
1447
1448 if (gst_query_get_n_allocation_pools (query) > 0)
1449 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1450
1451 if (pool == NULL)
1452 return FALSE;
1453
1454 config = gst_buffer_pool_get_config (pool);
1455 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1456 gst_buffer_pool_config_add_option (config,
1457 GST_BUFFER_POOL_OPTION_VIDEO_META);
1458 }
1459 gst_buffer_pool_set_config (pool, config);
1460 gst_object_unref (pool);
1461
1462 return TRUE;
1463 }
1464
1465 static gboolean
gst_jpeg_dec_sink_event(GstVideoDecoder * bdec,GstEvent * event)1466 gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
1467 {
1468 const GstSegment *segment;
1469
1470 if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
1471 goto done;
1472
1473 gst_event_parse_segment (event, &segment);
1474
1475 if (segment->format == GST_FORMAT_TIME)
1476 gst_video_decoder_set_packetized (bdec, TRUE);
1477 else
1478 gst_video_decoder_set_packetized (bdec, FALSE);
1479
1480 done:
1481 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
1482 }
1483
1484 static gboolean
gst_jpeg_dec_start(GstVideoDecoder * bdec)1485 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1486 {
1487 GstJpegDec *dec = (GstJpegDec *) bdec;
1488
1489 dec->saw_header = FALSE;
1490 dec->parse_entropy_len = 0;
1491 dec->parse_resync = FALSE;
1492
1493 gst_video_decoder_set_packetized (bdec, FALSE);
1494
1495 return TRUE;
1496 }
1497
1498 static gboolean
gst_jpeg_dec_flush(GstVideoDecoder * bdec)1499 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1500 {
1501 GstJpegDec *dec = (GstJpegDec *) bdec;
1502
1503 jpeg_abort_decompress (&dec->cinfo);
1504 dec->parse_entropy_len = 0;
1505 dec->parse_resync = FALSE;
1506 dec->saw_header = FALSE;
1507
1508 return TRUE;
1509 }
1510
1511 static void
gst_jpeg_dec_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)1512 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1513 const GValue * value, GParamSpec * pspec)
1514 {
1515 GstJpegDec *dec;
1516
1517 dec = GST_JPEG_DEC (object);
1518
1519 switch (prop_id) {
1520 case PROP_IDCT_METHOD:
1521 dec->idct_method = g_value_get_enum (value);
1522 break;
1523 #ifndef GST_REMOVE_DEPRECATED
1524 case PROP_MAX_ERRORS:
1525 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1526 break;
1527 #endif
1528 default:
1529 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1530 break;
1531 }
1532 }
1533
1534 static void
gst_jpeg_dec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)1535 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1536 GParamSpec * pspec)
1537 {
1538 GstJpegDec *dec;
1539
1540 dec = GST_JPEG_DEC (object);
1541
1542 switch (prop_id) {
1543 case PROP_IDCT_METHOD:
1544 g_value_set_enum (value, dec->idct_method);
1545 break;
1546 #ifndef GST_REMOVE_DEPRECATED
1547 case PROP_MAX_ERRORS:
1548 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1549 break;
1550 #endif
1551 default:
1552 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1553 break;
1554 }
1555 }
1556
1557 static gboolean
gst_jpeg_dec_stop(GstVideoDecoder * bdec)1558 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1559 {
1560 GstJpegDec *dec = (GstJpegDec *) bdec;
1561
1562 gst_jpeg_dec_free_buffers (dec);
1563
1564 g_free (dec->scratch);
1565 dec->scratch = NULL;
1566 dec->scratch_size = 0;
1567
1568 return TRUE;
1569 }
1570