1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <assert.h>
12 #include <stdlib.h>
13 #include <string.h>
14 #include "./vp8_rtcd.h"
15 #include "./vpx_dsp_rtcd.h"
16 #include "./vpx_scale_rtcd.h"
17 #include "vpx/vpx_decoder.h"
18 #include "vpx/vp8dx.h"
19 #include "vpx/internal/vpx_codec_internal.h"
20 #include "vpx_version.h"
21 #include "common/alloccommon.h"
22 #include "common/common.h"
23 #include "common/onyxd.h"
24 #include "decoder/onyxd_int.h"
25 #include "vpx_dsp/vpx_dsp_common.h"
26 #include "vpx_mem/vpx_mem.h"
27 #include "vpx_ports/system_state.h"
28 #if CONFIG_ERROR_CONCEALMENT
29 #include "decoder/error_concealment.h"
30 #endif
31 #include "decoder/decoderthreading.h"
32
33 #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
34 #define VP8_CAP_ERROR_CONCEALMENT \
35 (CONFIG_ERROR_CONCEALMENT ? VPX_CODEC_CAP_ERROR_CONCEALMENT : 0)
36
37 typedef vpx_codec_stream_info_t vp8_stream_info_t;
38
39 /* Structures for handling memory allocations */
40 typedef enum { VP8_SEG_ALG_PRIV = 256, VP8_SEG_MAX } mem_seg_id_t;
41 #define NELEMENTS(x) ((int)(sizeof(x) / sizeof((x)[0])))
42
43 struct vpx_codec_alg_priv {
44 vpx_codec_priv_t base;
45 vpx_codec_dec_cfg_t cfg;
46 vp8_stream_info_t si;
47 int decoder_init;
48 #if CONFIG_MULTITHREAD
49 // Restart threads on next frame if set to 1.
50 // This is set when error happens in multithreaded decoding and all threads
51 // are shut down.
52 int restart_threads;
53 #endif
54 int postproc_cfg_set;
55 vp8_postproc_cfg_t postproc_cfg;
56 vpx_decrypt_cb decrypt_cb;
57 void *decrypt_state;
58 vpx_image_t img;
59 int img_setup;
60 struct frame_buffers yv12_frame_buffers;
61 void *user_priv;
62 FRAGMENT_DATA fragments;
63 };
64
vp8_init_ctx(vpx_codec_ctx_t * ctx)65 static int vp8_init_ctx(vpx_codec_ctx_t *ctx) {
66 vpx_codec_alg_priv_t *priv =
67 (vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
68 if (!priv) return 1;
69
70 ctx->priv = (vpx_codec_priv_t *)priv;
71 ctx->priv->init_flags = ctx->init_flags;
72
73 priv->si.sz = sizeof(priv->si);
74 priv->decrypt_cb = NULL;
75 priv->decrypt_state = NULL;
76
77 if (ctx->config.dec) {
78 /* Update the reference to the config structure to an internal copy. */
79 priv->cfg = *ctx->config.dec;
80 ctx->config.dec = &priv->cfg;
81 }
82
83 return 0;
84 }
85
vp8_init(vpx_codec_ctx_t * ctx,vpx_codec_priv_enc_mr_cfg_t * data)86 static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx,
87 vpx_codec_priv_enc_mr_cfg_t *data) {
88 vpx_codec_err_t res = VPX_CODEC_OK;
89 (void)data;
90
91 vp8_rtcd();
92 vpx_dsp_rtcd();
93 vpx_scale_rtcd();
94
95 /* This function only allocates space for the vpx_codec_alg_priv_t
96 * structure. More memory may be required at the time the stream
97 * information becomes known.
98 */
99 if (!ctx->priv) {
100 vpx_codec_alg_priv_t *priv;
101
102 if (vp8_init_ctx(ctx)) return VPX_CODEC_MEM_ERROR;
103
104 priv = (vpx_codec_alg_priv_t *)ctx->priv;
105
106 /* initialize number of fragments to zero */
107 priv->fragments.count = 0;
108 /* is input fragments enabled? */
109 priv->fragments.enabled =
110 (priv->base.init_flags & VPX_CODEC_USE_INPUT_FRAGMENTS);
111
112 /*post processing level initialized to do nothing */
113 }
114
115 return res;
116 }
117
vp8_destroy(vpx_codec_alg_priv_t * ctx)118 static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx) {
119 vp8_remove_decoder_instances(&ctx->yv12_frame_buffers);
120
121 vpx_free(ctx);
122
123 return VPX_CODEC_OK;
124 }
125
126 #ifdef __clang_analyzer__
127 #define FUNC_ATTR_NONNULL(...) __attribute__((nonnull(__VA_ARGS__)))
128 #else
129 #define FUNC_ATTR_NONNULL(...)
130 #endif
131
vp8_peek_si_internal(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si,vpx_decrypt_cb decrypt_cb,void * decrypt_state)132 static vpx_codec_err_t vp8_peek_si_internal(const uint8_t *data,
133 unsigned int data_sz,
134 vpx_codec_stream_info_t *si,
135 vpx_decrypt_cb decrypt_cb,
136 void *decrypt_state) FUNC_ATTR_NONNULL(1) {
137 vpx_codec_err_t res = VPX_CODEC_OK;
138
139 assert(data != NULL);
140
141 if (data + data_sz <= data) {
142 res = VPX_CODEC_INVALID_PARAM;
143 } else {
144 /* Parse uncompresssed part of key frame header.
145 * 3 bytes:- including version, frame type and an offset
146 * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
147 * 4 bytes:- including image width and height in the lowest 14 bits
148 * of each 2-byte value.
149 */
150 uint8_t clear_buffer[10];
151 const uint8_t *clear = data;
152 if (decrypt_cb) {
153 int n = VPXMIN(sizeof(clear_buffer), data_sz);
154 decrypt_cb(decrypt_state, data, clear_buffer, n);
155 clear = clear_buffer;
156 }
157 si->is_kf = 0;
158
159 if (data_sz >= 10 && !(clear[0] & 0x01)) { /* I-Frame */
160 si->is_kf = 1;
161
162 /* vet via sync code */
163 if (clear[3] != 0x9d || clear[4] != 0x01 || clear[5] != 0x2a) {
164 return VPX_CODEC_UNSUP_BITSTREAM;
165 }
166
167 si->w = (clear[6] | (clear[7] << 8)) & 0x3fff;
168 si->h = (clear[8] | (clear[9] << 8)) & 0x3fff;
169
170 /*printf("w=%d, h=%d\n", si->w, si->h);*/
171 if (!(si->h && si->w)) res = VPX_CODEC_CORRUPT_FRAME;
172 } else {
173 res = VPX_CODEC_UNSUP_BITSTREAM;
174 }
175 }
176
177 return res;
178 }
179
vp8_peek_si(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si)180 static vpx_codec_err_t vp8_peek_si(const uint8_t *data, unsigned int data_sz,
181 vpx_codec_stream_info_t *si) {
182 return vp8_peek_si_internal(data, data_sz, si, NULL, NULL);
183 }
184
vp8_get_si(vpx_codec_alg_priv_t * ctx,vpx_codec_stream_info_t * si)185 static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t *ctx,
186 vpx_codec_stream_info_t *si) {
187 unsigned int sz;
188
189 if (si->sz >= sizeof(vp8_stream_info_t)) {
190 sz = sizeof(vp8_stream_info_t);
191 } else {
192 sz = sizeof(vpx_codec_stream_info_t);
193 }
194
195 memcpy(si, &ctx->si, sz);
196 si->sz = sz;
197
198 return VPX_CODEC_OK;
199 }
200
update_error_state(vpx_codec_alg_priv_t * ctx,const struct vpx_internal_error_info * error)201 static vpx_codec_err_t update_error_state(
202 vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
203 vpx_codec_err_t res;
204
205 if ((res = error->error_code)) {
206 ctx->base.err_detail = error->has_detail ? error->detail : NULL;
207 }
208
209 return res;
210 }
211
yuvconfig2image(vpx_image_t * img,const YV12_BUFFER_CONFIG * yv12,void * user_priv)212 static void yuvconfig2image(vpx_image_t *img, const YV12_BUFFER_CONFIG *yv12,
213 void *user_priv) {
214 /** vpx_img_wrap() doesn't allow specifying independent strides for
215 * the Y, U, and V planes, nor other alignment adjustments that
216 * might be representable by a YV12_BUFFER_CONFIG, so we just
217 * initialize all the fields.*/
218 img->fmt = VPX_IMG_FMT_I420;
219 img->w = yv12->y_stride;
220 img->h = (yv12->y_height + 2 * VP8BORDERINPIXELS + 15) & ~15;
221 img->d_w = img->r_w = yv12->y_width;
222 img->d_h = img->r_h = yv12->y_height;
223 img->x_chroma_shift = 1;
224 img->y_chroma_shift = 1;
225 img->planes[VPX_PLANE_Y] = yv12->y_buffer;
226 img->planes[VPX_PLANE_U] = yv12->u_buffer;
227 img->planes[VPX_PLANE_V] = yv12->v_buffer;
228 img->planes[VPX_PLANE_ALPHA] = NULL;
229 img->stride[VPX_PLANE_Y] = yv12->y_stride;
230 img->stride[VPX_PLANE_U] = yv12->uv_stride;
231 img->stride[VPX_PLANE_V] = yv12->uv_stride;
232 img->stride[VPX_PLANE_ALPHA] = yv12->y_stride;
233 img->bit_depth = 8;
234 img->bps = 12;
235 img->user_priv = user_priv;
236 img->img_data = yv12->buffer_alloc;
237 img->img_data_owner = 0;
238 img->self_allocd = 0;
239 }
240
update_fragments(vpx_codec_alg_priv_t * ctx,const uint8_t * data,unsigned int data_sz,volatile vpx_codec_err_t * res)241 static int update_fragments(vpx_codec_alg_priv_t *ctx, const uint8_t *data,
242 unsigned int data_sz,
243 volatile vpx_codec_err_t *res) {
244 *res = VPX_CODEC_OK;
245
246 if (ctx->fragments.count == 0) {
247 /* New frame, reset fragment pointers and sizes */
248 memset((void *)ctx->fragments.ptrs, 0, sizeof(ctx->fragments.ptrs));
249 memset(ctx->fragments.sizes, 0, sizeof(ctx->fragments.sizes));
250 }
251 if (ctx->fragments.enabled && !(data == NULL && data_sz == 0)) {
252 /* Store a pointer to this fragment and return. We haven't
253 * received the complete frame yet, so we will wait with decoding.
254 */
255 ctx->fragments.ptrs[ctx->fragments.count] = data;
256 ctx->fragments.sizes[ctx->fragments.count] = data_sz;
257 ctx->fragments.count++;
258 if (ctx->fragments.count > (1 << EIGHT_PARTITION) + 1) {
259 ctx->fragments.count = 0;
260 *res = VPX_CODEC_INVALID_PARAM;
261 return -1;
262 }
263 return 0;
264 }
265
266 if (!ctx->fragments.enabled && (data == NULL && data_sz == 0)) {
267 return 0;
268 }
269
270 if (!ctx->fragments.enabled) {
271 ctx->fragments.ptrs[0] = data;
272 ctx->fragments.sizes[0] = data_sz;
273 ctx->fragments.count = 1;
274 }
275
276 return 1;
277 }
278
vp8_decode(vpx_codec_alg_priv_t * ctx,const uint8_t * data,unsigned int data_sz,void * user_priv,long deadline)279 static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx,
280 const uint8_t *data, unsigned int data_sz,
281 void *user_priv, long deadline) {
282 volatile vpx_codec_err_t res = VPX_CODEC_INVALID_PARAM;
283 volatile unsigned int resolution_change = 0;
284 unsigned int w, h;
285
286 if (!ctx->fragments.enabled && (data == NULL && data_sz == 0)) {
287 return 0;
288 }
289
290 /* Update the input fragment data */
291 if (update_fragments(ctx, data, data_sz, &res) <= 0) return res;
292
293 /* Determine the stream parameters. Note that we rely on peek_si to
294 * validate that we have a buffer that does not wrap around the top
295 * of the heap.
296 */
297 w = ctx->si.w;
298 h = ctx->si.h;
299
300 if (ctx->fragments.ptrs[0]) {
301 res = vp8_peek_si_internal(ctx->fragments.ptrs[0], ctx->fragments.sizes[0],
302 &ctx->si, ctx->decrypt_cb, ctx->decrypt_state);
303 }
304
305 if ((res == VPX_CODEC_UNSUP_BITSTREAM) && !ctx->si.is_kf) {
306 /* the peek function returns an error for non keyframes, however for
307 * this case, it is not an error */
308 res = VPX_CODEC_OK;
309 }
310
311 if (!ctx->decoder_init && !ctx->si.is_kf) res = VPX_CODEC_UNSUP_BITSTREAM;
312
313 if ((ctx->si.h != h) || (ctx->si.w != w)) resolution_change = 1;
314
315 #if CONFIG_MULTITHREAD
316 if (!res && ctx->restart_threads) {
317 struct frame_buffers *fb = &ctx->yv12_frame_buffers;
318 VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0];
319 VP8_COMMON *const pc = &pbi->common;
320 if (setjmp(pbi->common.error.jmp)) {
321 vp8_remove_decoder_instances(fb);
322 vp8_zero(fb->pbi);
323 vpx_clear_system_state();
324 return VPX_CODEC_ERROR;
325 }
326 pbi->common.error.setjmp = 1;
327 pbi->max_threads = ctx->cfg.threads;
328 vp8_decoder_create_threads(pbi);
329 if (vpx_atomic_load_acquire(&pbi->b_multithreaded_rd)) {
330 vp8mt_alloc_temp_buffers(pbi, pc->Width, pc->mb_rows);
331 }
332 ctx->restart_threads = 0;
333 pbi->common.error.setjmp = 0;
334 }
335 #endif
336 /* Initialize the decoder instance on the first frame*/
337 if (!res && !ctx->decoder_init) {
338 VP8D_CONFIG oxcf;
339
340 oxcf.Width = ctx->si.w;
341 oxcf.Height = ctx->si.h;
342 oxcf.Version = 9;
343 oxcf.postprocess = 0;
344 oxcf.max_threads = ctx->cfg.threads;
345 oxcf.error_concealment =
346 (ctx->base.init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT);
347
348 /* If postprocessing was enabled by the application and a
349 * configuration has not been provided, default it.
350 */
351 if (!ctx->postproc_cfg_set &&
352 (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) {
353 ctx->postproc_cfg.post_proc_flag =
354 VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE;
355 ctx->postproc_cfg.deblocking_level = 4;
356 ctx->postproc_cfg.noise_level = 0;
357 }
358
359 res = vp8_create_decoder_instances(&ctx->yv12_frame_buffers, &oxcf);
360 if (res == VPX_CODEC_OK) ctx->decoder_init = 1;
361 }
362
363 /* Set these even if already initialized. The caller may have changed the
364 * decrypt config between frames.
365 */
366 if (ctx->decoder_init) {
367 ctx->yv12_frame_buffers.pbi[0]->decrypt_cb = ctx->decrypt_cb;
368 ctx->yv12_frame_buffers.pbi[0]->decrypt_state = ctx->decrypt_state;
369 }
370
371 if (!res) {
372 VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0];
373 VP8_COMMON *const pc = &pbi->common;
374 if (resolution_change) {
375 MACROBLOCKD *const xd = &pbi->mb;
376 #if CONFIG_MULTITHREAD
377 int i;
378 #endif
379 pc->Width = ctx->si.w;
380 pc->Height = ctx->si.h;
381 {
382 int prev_mb_rows = pc->mb_rows;
383
384 if (setjmp(pbi->common.error.jmp)) {
385 pbi->common.error.setjmp = 0;
386 /* on failure clear the cached resolution to ensure a full
387 * reallocation is attempted on resync. */
388 ctx->si.w = 0;
389 ctx->si.h = 0;
390 vpx_clear_system_state();
391 /* same return value as used in vp8dx_receive_compressed_data */
392 return -1;
393 }
394
395 pbi->common.error.setjmp = 1;
396
397 if (pc->Width <= 0) {
398 pc->Width = w;
399 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
400 "Invalid frame width");
401 }
402
403 if (pc->Height <= 0) {
404 pc->Height = h;
405 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
406 "Invalid frame height");
407 }
408
409 if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height)) {
410 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
411 "Failed to allocate frame buffers");
412 }
413
414 xd->pre = pc->yv12_fb[pc->lst_fb_idx];
415 xd->dst = pc->yv12_fb[pc->new_fb_idx];
416
417 #if CONFIG_MULTITHREAD
418 for (i = 0; i < pbi->allocated_decoding_thread_count; ++i) {
419 pbi->mb_row_di[i].mbd.dst = pc->yv12_fb[pc->new_fb_idx];
420 vp8_build_block_doffsets(&pbi->mb_row_di[i].mbd);
421 }
422 #endif
423 vp8_build_block_doffsets(&pbi->mb);
424
425 /* allocate memory for last frame MODE_INFO array */
426 #if CONFIG_ERROR_CONCEALMENT
427
428 if (pbi->ec_enabled) {
429 /* old prev_mip was released by vp8_de_alloc_frame_buffers()
430 * called in vp8_alloc_frame_buffers() */
431 pc->prev_mip = vpx_calloc((pc->mb_cols + 1) * (pc->mb_rows + 1),
432 sizeof(MODE_INFO));
433
434 if (!pc->prev_mip) {
435 vp8_de_alloc_frame_buffers(pc);
436 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
437 "Failed to allocate"
438 "last frame MODE_INFO array");
439 }
440
441 pc->prev_mi = pc->prev_mip + pc->mode_info_stride + 1;
442
443 if (vp8_alloc_overlap_lists(pbi))
444 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
445 "Failed to allocate overlap lists "
446 "for error concealment");
447 }
448
449 #endif
450
451 #if CONFIG_MULTITHREAD
452 if (vpx_atomic_load_acquire(&pbi->b_multithreaded_rd)) {
453 vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows);
454 }
455 #else
456 (void)prev_mb_rows;
457 #endif
458 }
459
460 pbi->common.error.setjmp = 0;
461
462 /* required to get past the first get_free_fb() call */
463 pbi->common.fb_idx_ref_cnt[0] = 0;
464 }
465
466 if (setjmp(pbi->common.error.jmp)) {
467 /* We do not know if the missing frame(s) was supposed to update
468 * any of the reference buffers, but we act conservative and
469 * mark only the last buffer as corrupted.
470 */
471 pc->yv12_fb[pc->lst_fb_idx].corrupted = 1;
472
473 if (pc->fb_idx_ref_cnt[pc->new_fb_idx] > 0) {
474 pc->fb_idx_ref_cnt[pc->new_fb_idx]--;
475 }
476 pc->error.setjmp = 0;
477 #if CONFIG_MULTITHREAD
478 if (pbi->restart_threads) {
479 ctx->si.w = 0;
480 ctx->si.h = 0;
481 ctx->restart_threads = 1;
482 }
483 #endif
484 res = update_error_state(ctx, &pbi->common.error);
485 return res;
486 }
487
488 pbi->common.error.setjmp = 1;
489
490 /* update the pbi fragment data */
491 pbi->fragments = ctx->fragments;
492 #if CONFIG_MULTITHREAD
493 pbi->restart_threads = 0;
494 #endif
495 ctx->user_priv = user_priv;
496 if (vp8dx_receive_compressed_data(pbi, data_sz, data, deadline)) {
497 res = update_error_state(ctx, &pbi->common.error);
498 }
499
500 /* get ready for the next series of fragments */
501 ctx->fragments.count = 0;
502 }
503
504 return res;
505 }
506
vp8_get_frame(vpx_codec_alg_priv_t * ctx,vpx_codec_iter_t * iter)507 static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t *ctx,
508 vpx_codec_iter_t *iter) {
509 vpx_image_t *img = NULL;
510
511 /* iter acts as a flip flop, so an image is only returned on the first
512 * call to get_frame.
513 */
514 if (!(*iter) && ctx->yv12_frame_buffers.pbi[0]) {
515 YV12_BUFFER_CONFIG sd;
516 int64_t time_stamp = 0, time_end_stamp = 0;
517 vp8_ppflags_t flags;
518 vp8_zero(flags);
519
520 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) {
521 flags.post_proc_flag = ctx->postproc_cfg.post_proc_flag;
522 flags.deblocking_level = ctx->postproc_cfg.deblocking_level;
523 flags.noise_level = ctx->postproc_cfg.noise_level;
524 }
525
526 if (0 == vp8dx_get_raw_frame(ctx->yv12_frame_buffers.pbi[0], &sd,
527 &time_stamp, &time_end_stamp, &flags)) {
528 yuvconfig2image(&ctx->img, &sd, ctx->user_priv);
529
530 img = &ctx->img;
531 *iter = img;
532 }
533 }
534
535 return img;
536 }
537
image2yuvconfig(const vpx_image_t * img,YV12_BUFFER_CONFIG * yv12)538 static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
539 YV12_BUFFER_CONFIG *yv12) {
540 const int y_w = img->d_w;
541 const int y_h = img->d_h;
542 const int uv_w = (img->d_w + 1) / 2;
543 const int uv_h = (img->d_h + 1) / 2;
544 vpx_codec_err_t res = VPX_CODEC_OK;
545 yv12->y_buffer = img->planes[VPX_PLANE_Y];
546 yv12->u_buffer = img->planes[VPX_PLANE_U];
547 yv12->v_buffer = img->planes[VPX_PLANE_V];
548
549 yv12->y_crop_width = y_w;
550 yv12->y_crop_height = y_h;
551 yv12->y_width = y_w;
552 yv12->y_height = y_h;
553 yv12->uv_crop_width = uv_w;
554 yv12->uv_crop_height = uv_h;
555 yv12->uv_width = uv_w;
556 yv12->uv_height = uv_h;
557
558 yv12->y_stride = img->stride[VPX_PLANE_Y];
559 yv12->uv_stride = img->stride[VPX_PLANE_U];
560
561 yv12->border = (img->stride[VPX_PLANE_Y] - img->d_w) / 2;
562 return res;
563 }
564
vp8_set_reference(vpx_codec_alg_priv_t * ctx,va_list args)565 static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
566 va_list args) {
567 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
568
569 if (data) {
570 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
571 YV12_BUFFER_CONFIG sd;
572
573 image2yuvconfig(&frame->img, &sd);
574
575 return vp8dx_set_reference(ctx->yv12_frame_buffers.pbi[0],
576 frame->frame_type, &sd);
577 } else {
578 return VPX_CODEC_INVALID_PARAM;
579 }
580 }
581
vp8_get_reference(vpx_codec_alg_priv_t * ctx,va_list args)582 static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
583 va_list args) {
584 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
585
586 if (data) {
587 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
588 YV12_BUFFER_CONFIG sd;
589
590 image2yuvconfig(&frame->img, &sd);
591
592 return vp8dx_get_reference(ctx->yv12_frame_buffers.pbi[0],
593 frame->frame_type, &sd);
594 } else {
595 return VPX_CODEC_INVALID_PARAM;
596 }
597 }
598
vp8_get_quantizer(vpx_codec_alg_priv_t * ctx,va_list args)599 static vpx_codec_err_t vp8_get_quantizer(vpx_codec_alg_priv_t *ctx,
600 va_list args) {
601 int *const arg = va_arg(args, int *);
602 if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
603 *arg = vp8dx_get_quantizer(ctx->yv12_frame_buffers.pbi[0]);
604 return VPX_CODEC_OK;
605 }
606
vp8_set_postproc(vpx_codec_alg_priv_t * ctx,va_list args)607 static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
608 va_list args) {
609 #if CONFIG_POSTPROC
610 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
611
612 if (data) {
613 ctx->postproc_cfg_set = 1;
614 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
615 return VPX_CODEC_OK;
616 } else {
617 return VPX_CODEC_INVALID_PARAM;
618 }
619
620 #else
621 (void)ctx;
622 (void)args;
623 return VPX_CODEC_INCAPABLE;
624 #endif
625 }
626
vp8_get_last_ref_updates(vpx_codec_alg_priv_t * ctx,va_list args)627 static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
628 va_list args) {
629 int *update_info = va_arg(args, int *);
630
631 if (update_info) {
632 VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
633
634 *update_info = pbi->common.refresh_alt_ref_frame * (int)VP8_ALTR_FRAME +
635 pbi->common.refresh_golden_frame * (int)VP8_GOLD_FRAME +
636 pbi->common.refresh_last_frame * (int)VP8_LAST_FRAME;
637
638 return VPX_CODEC_OK;
639 } else {
640 return VPX_CODEC_INVALID_PARAM;
641 }
642 }
643
vp8_get_last_ref_frame(vpx_codec_alg_priv_t * ctx,va_list args)644 static vpx_codec_err_t vp8_get_last_ref_frame(vpx_codec_alg_priv_t *ctx,
645 va_list args) {
646 int *ref_info = va_arg(args, int *);
647
648 if (ref_info) {
649 VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
650 VP8_COMMON *oci = &pbi->common;
651 *ref_info =
652 (vp8dx_references_buffer(oci, ALTREF_FRAME) ? VP8_ALTR_FRAME : 0) |
653 (vp8dx_references_buffer(oci, GOLDEN_FRAME) ? VP8_GOLD_FRAME : 0) |
654 (vp8dx_references_buffer(oci, LAST_FRAME) ? VP8_LAST_FRAME : 0);
655
656 return VPX_CODEC_OK;
657 } else {
658 return VPX_CODEC_INVALID_PARAM;
659 }
660 }
661
vp8_get_frame_corrupted(vpx_codec_alg_priv_t * ctx,va_list args)662 static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
663 va_list args) {
664 int *corrupted = va_arg(args, int *);
665 VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
666
667 if (corrupted && pbi) {
668 const YV12_BUFFER_CONFIG *const frame = pbi->common.frame_to_show;
669 if (frame == NULL) return VPX_CODEC_ERROR;
670 *corrupted = frame->corrupted;
671 return VPX_CODEC_OK;
672 } else {
673 return VPX_CODEC_INVALID_PARAM;
674 }
675 }
676
vp8_set_decryptor(vpx_codec_alg_priv_t * ctx,va_list args)677 static vpx_codec_err_t vp8_set_decryptor(vpx_codec_alg_priv_t *ctx,
678 va_list args) {
679 vpx_decrypt_init *init = va_arg(args, vpx_decrypt_init *);
680
681 if (init) {
682 ctx->decrypt_cb = init->decrypt_cb;
683 ctx->decrypt_state = init->decrypt_state;
684 } else {
685 ctx->decrypt_cb = NULL;
686 ctx->decrypt_state = NULL;
687 }
688 return VPX_CODEC_OK;
689 }
690
691 vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] = {
692 { VP8_SET_REFERENCE, vp8_set_reference },
693 { VP8_COPY_REFERENCE, vp8_get_reference },
694 { VP8_SET_POSTPROC, vp8_set_postproc },
695 { VP8D_GET_LAST_REF_UPDATES, vp8_get_last_ref_updates },
696 { VP8D_GET_FRAME_CORRUPTED, vp8_get_frame_corrupted },
697 { VP8D_GET_LAST_REF_USED, vp8_get_last_ref_frame },
698 { VPXD_GET_LAST_QUANTIZER, vp8_get_quantizer },
699 { VPXD_SET_DECRYPTOR, vp8_set_decryptor },
700 { -1, NULL },
701 };
702
703 #ifndef VERSION_STRING
704 #define VERSION_STRING
705 #endif
706 CODEC_INTERFACE(vpx_codec_vp8_dx) = {
707 "WebM Project VP8 Decoder" VERSION_STRING,
708 VPX_CODEC_INTERNAL_ABI_VERSION,
709 VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC | VP8_CAP_ERROR_CONCEALMENT |
710 VPX_CODEC_CAP_INPUT_FRAGMENTS,
711 /* vpx_codec_caps_t caps; */
712 vp8_init, /* vpx_codec_init_fn_t init; */
713 vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
714 vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
715 {
716 vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
717 vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
718 vp8_decode, /* vpx_codec_decode_fn_t decode; */
719 vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
720 NULL,
721 },
722 {
723 /* encoder functions */
724 0, NULL, /* vpx_codec_enc_cfg_map_t */
725 NULL, /* vpx_codec_encode_fn_t */
726 NULL, /* vpx_codec_get_cx_data_fn_t */
727 NULL, /* vpx_codec_enc_config_set_fn_t */
728 NULL, /* vpx_codec_get_global_headers_fn_t */
729 NULL, /* vpx_codec_get_preview_frame_fn_t */
730 NULL /* vpx_codec_enc_mr_get_mem_loc_fn_t */
731 }
732 };
733