2
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4
* Use of this source code is governed by a BSD-style license
5
* that can be found in the LICENSE file in the root of the source
6
* tree. An additional intellectual property rights grant can be found
7
* in the file PATENTS. All contributing project authors may
8
* be found in the AUTHORS file in the root of the source tree.
14
#include "vpx/vpx_decoder.h"
15
#include "vpx/vp8dx.h"
16
#include "vpx/internal/vpx_codec_internal.h"
17
#include "./vpx_version.h"
18
#include "vp9/decoder/vp9_onyxd.h"
19
#include "vp9/decoder/vp9_onyxd_int.h"
20
#include "vp9/decoder/vp9_read_bit_buffer.h"
21
#include "vp9/vp9_iface_common.h"
23
#define VP9_CAP_POSTPROC (CONFIG_VP9_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
24
typedef vpx_codec_stream_info_t vp9_stream_info_t;
26
/* Structures for handling memory allocations */
28
VP9_SEG_ALG_PRIV = 256,
31
#define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0])))
33
static unsigned long priv_sz(const vpx_codec_dec_cfg_t *si,
34
vpx_codec_flags_t flags);
36
static const mem_req_t vp9_mem_req_segs[] = {
37
{VP9_SEG_ALG_PRIV, 0, 8, VPX_CODEC_MEM_ZERO, priv_sz},
38
{VP9_SEG_MAX, 0, 0, 0, NULL}
41
struct vpx_codec_alg_priv {
42
vpx_codec_priv_t base;
43
vpx_codec_mmap_t mmaps[NELEMENTS(vp9_mem_req_segs) - 1];
44
vpx_codec_dec_cfg_t cfg;
50
vp8_postproc_cfg_t postproc_cfg;
51
#if CONFIG_POSTPROC_VISUALIZER
52
unsigned int dbg_postproc_flag;
53
int dbg_color_ref_frame_flag;
54
int dbg_color_mb_modes_flag;
55
int dbg_color_b_modes_flag;
56
int dbg_display_mv_flag;
61
int invert_tile_order;
64
static unsigned long priv_sz(const vpx_codec_dec_cfg_t *si,
65
vpx_codec_flags_t flags) {
66
/* Although this declaration is constant, we can't use it in the requested
67
* segments list because we want to define the requested segments list
68
* before defining the private type (so that the number of memory maps is
72
return sizeof(vpx_codec_alg_priv_t);
75
static void vp9_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap) {
78
ctx->priv = mmap->base;
79
ctx->priv->sz = sizeof(*ctx->priv);
80
ctx->priv->iface = ctx->iface;
81
ctx->priv->alg_priv = mmap->base;
83
for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
84
ctx->priv->alg_priv->mmaps[i].id = vp9_mem_req_segs[i].id;
86
ctx->priv->alg_priv->mmaps[0] = *mmap;
87
ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si);
88
ctx->priv->init_flags = ctx->init_flags;
90
if (ctx->config.dec) {
91
/* Update the reference to the config structure to an internal copy. */
92
ctx->priv->alg_priv->cfg = *ctx->config.dec;
93
ctx->config.dec = &ctx->priv->alg_priv->cfg;
97
static void vp9_finalize_mmaps(vpx_codec_alg_priv_t *ctx) {
98
/* nothing to clean up */
101
static vpx_codec_err_t vp9_init(vpx_codec_ctx_t *ctx,
102
vpx_codec_priv_enc_mr_cfg_t *data) {
103
vpx_codec_err_t res = VPX_CODEC_OK;
105
/* This function only allocates space for the vpx_codec_alg_priv_t
106
* structure. More memory may be required at the time the stream
107
* information becomes known.
110
vpx_codec_mmap_t mmap;
112
mmap.id = vp9_mem_req_segs[0].id;
113
mmap.sz = sizeof(vpx_codec_alg_priv_t);
114
mmap.align = vp9_mem_req_segs[0].align;
115
mmap.flags = vp9_mem_req_segs[0].flags;
117
res = vpx_mmap_alloc(&mmap);
120
vp9_init_ctx(ctx, &mmap);
122
ctx->priv->alg_priv->defer_alloc = 1;
123
/*post processing level initialized to do nothing */
130
static vpx_codec_err_t vp9_destroy(vpx_codec_alg_priv_t *ctx) {
133
vp9_remove_decompressor(ctx->pbi);
135
for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--) {
136
if (ctx->mmaps[i].dtor)
137
ctx->mmaps[i].dtor(&ctx->mmaps[i]);
143
static vpx_codec_err_t vp9_peek_si(const uint8_t *data,
144
unsigned int data_sz,
145
vpx_codec_stream_info_t *si) {
146
if (data_sz <= 8) return VPX_CODEC_UNSUP_BITSTREAM;
147
if (data + data_sz <= data) return VPX_CODEC_INVALID_PARAM;
153
struct vp9_read_bit_buffer rb = { data, data + data_sz, 0, NULL, NULL };
154
const int frame_marker = vp9_rb_read_literal(&rb, 2);
155
const int version = vp9_rb_read_bit(&rb) | (vp9_rb_read_bit(&rb) << 1);
156
if (frame_marker != 0x2) return VPX_CODEC_UNSUP_BITSTREAM;
158
if (version > 1) return VPX_CODEC_UNSUP_BITSTREAM;
160
if (version != 0) return VPX_CODEC_UNSUP_BITSTREAM;
163
if (vp9_rb_read_bit(&rb)) { // show an existing frame
167
si->is_kf = !vp9_rb_read_bit(&rb);
172
rb.bit_offset += 1; // show frame
173
rb.bit_offset += 1; // error resilient
175
if (vp9_rb_read_literal(&rb, 8) != VP9_SYNC_CODE_0 ||
176
vp9_rb_read_literal(&rb, 8) != VP9_SYNC_CODE_1 ||
177
vp9_rb_read_literal(&rb, 8) != VP9_SYNC_CODE_2) {
178
return VPX_CODEC_UNSUP_BITSTREAM;
181
colorspace = vp9_rb_read_literal(&rb, 3);
182
if (colorspace != sRGB) {
183
rb.bit_offset += 1; // [16,235] (including xvycc) vs [0,255] range
185
rb.bit_offset += 2; // subsampling x/y
186
rb.bit_offset += 1; // has extra plane
190
rb.bit_offset += 1; // has extra plane
192
// RGB is only available in version 1
193
return VPX_CODEC_UNSUP_BITSTREAM;
197
// TODO(jzern): these are available on non-keyframes in intra only mode.
198
si->w = vp9_rb_read_literal(&rb, 16) + 1;
199
si->h = vp9_rb_read_literal(&rb, 16) + 1;
206
static vpx_codec_err_t vp9_get_si(vpx_codec_alg_priv_t *ctx,
207
vpx_codec_stream_info_t *si) {
210
if (si->sz >= sizeof(vp9_stream_info_t))
211
sz = sizeof(vp9_stream_info_t);
213
sz = sizeof(vpx_codec_stream_info_t);
215
memcpy(si, &ctx->si, sz);
222
static vpx_codec_err_t
223
update_error_state(vpx_codec_alg_priv_t *ctx,
224
const struct vpx_internal_error_info *error) {
227
if ((res = error->error_code))
228
ctx->base.err_detail = error->has_detail
235
static vpx_codec_err_t decode_one(vpx_codec_alg_priv_t *ctx,
236
const uint8_t **data,
237
unsigned int data_sz,
240
vpx_codec_err_t res = VPX_CODEC_OK;
244
/* Determine the stream parameters. Note that we rely on peek_si to
245
* validate that we have a buffer that does not wrap around the top
249
res = ctx->base.iface->dec.peek_si(*data, data_sz, &ctx->si);
252
/* Perform deferred allocations, if required */
253
if (!res && ctx->defer_alloc) {
256
for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++) {
257
vpx_codec_dec_cfg_t cfg;
261
ctx->mmaps[i].id = vp9_mem_req_segs[i].id;
262
ctx->mmaps[i].sz = vp9_mem_req_segs[i].sz;
263
ctx->mmaps[i].align = vp9_mem_req_segs[i].align;
264
ctx->mmaps[i].flags = vp9_mem_req_segs[i].flags;
266
if (!ctx->mmaps[i].sz)
267
ctx->mmaps[i].sz = vp9_mem_req_segs[i].calc_sz(&cfg,
268
ctx->base.init_flags);
270
res = vpx_mmap_alloc(&ctx->mmaps[i]);
274
vp9_finalize_mmaps(ctx);
276
ctx->defer_alloc = 0;
279
/* Initialize the decoder instance on the first frame*/
280
if (!res && !ctx->decoder_init) {
281
res = vpx_validate_mmaps(&ctx->si, ctx->mmaps,
282
vp9_mem_req_segs, NELEMENTS(vp9_mem_req_segs),
283
ctx->base.init_flags);
289
vp9_initialize_dec();
291
oxcf.width = ctx->si.w;
292
oxcf.height = ctx->si.h;
294
oxcf.postprocess = 0;
295
oxcf.max_threads = ctx->cfg.threads;
296
oxcf.inv_tile_order = ctx->invert_tile_order;
297
optr = vp9_create_decompressor(&oxcf);
299
/* If postprocessing was enabled by the application and a
300
* configuration has not been provided, default it.
302
if (!ctx->postproc_cfg_set
303
&& (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) {
304
ctx->postproc_cfg.post_proc_flag =
305
VP8_DEBLOCK | VP8_DEMACROBLOCK;
306
ctx->postproc_cfg.deblocking_level = 4;
307
ctx->postproc_cfg.noise_level = 0;
311
res = VPX_CODEC_ERROR;
316
ctx->decoder_init = 1;
319
if (!res && ctx->pbi) {
320
YV12_BUFFER_CONFIG sd;
321
int64_t time_stamp = 0, time_end_stamp = 0;
322
vp9_ppflags_t flags = {0};
324
if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) {
325
flags.post_proc_flag =
326
#if CONFIG_POSTPROC_VISUALIZER
327
((ctx->dbg_color_ref_frame_flag != 0) ?
328
VP9D_DEBUG_CLR_FRM_REF_BLKS : 0)
329
| ((ctx->dbg_color_mb_modes_flag != 0) ?
330
VP9D_DEBUG_CLR_BLK_MODES : 0)
331
| ((ctx->dbg_color_b_modes_flag != 0) ?
332
VP9D_DEBUG_CLR_BLK_MODES : 0)
333
| ((ctx->dbg_display_mv_flag != 0) ?
334
VP9D_DEBUG_DRAW_MV : 0)
337
ctx->postproc_cfg.post_proc_flag;
339
flags.deblocking_level = ctx->postproc_cfg.deblocking_level;
340
flags.noise_level = ctx->postproc_cfg.noise_level;
341
#if CONFIG_POSTPROC_VISUALIZER
342
flags.display_ref_frame_flag = ctx->dbg_color_ref_frame_flag;
343
flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag;
344
flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag;
345
flags.display_mv_flag = ctx->dbg_display_mv_flag;
349
if (vp9_receive_compressed_data(ctx->pbi, data_sz, data, deadline)) {
350
VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi;
351
res = update_error_state(ctx, &pbi->common.error);
354
if (!res && 0 == vp9_get_raw_frame(ctx->pbi, &sd, &time_stamp,
355
&time_end_stamp, &flags)) {
356
yuvconfig2image(&ctx->img, &sd, user_priv);
364
static void parse_superframe_index(const uint8_t *data,
371
marker = data[data_sz - 1];
374
if ((marker & 0xe0) == 0xc0) {
375
const uint32_t frames = (marker & 0x7) + 1;
376
const uint32_t mag = ((marker >> 3) & 0x3) + 1;
377
const size_t index_sz = 2 + mag * frames;
379
if (data_sz >= index_sz && data[data_sz - index_sz] == marker) {
380
// found a valid superframe index
382
const uint8_t *x = data + data_sz - index_sz + 1;
384
for (i = 0; i < frames; i++) {
385
uint32_t this_sz = 0;
387
for (j = 0; j < mag; j++)
388
this_sz |= (*x++) << (j * 8);
397
static vpx_codec_err_t vp9_decode(vpx_codec_alg_priv_t *ctx,
399
unsigned int data_sz,
402
const uint8_t *data_start = data;
403
const uint8_t *data_end = data + data_sz;
404
vpx_codec_err_t res = 0;
406
int frames_this_pts, frame_count = 0;
408
if (data == NULL || data_sz == 0) return VPX_CODEC_INVALID_PARAM;
410
parse_superframe_index(data, data_sz, sizes, &frames_this_pts);
413
// Skip over the superframe index, if present
414
if (data_sz && (*data_start & 0xe0) == 0xc0) {
415
const uint8_t marker = *data_start;
416
const uint32_t frames = (marker & 0x7) + 1;
417
const uint32_t mag = ((marker >> 3) & 0x3) + 1;
418
const uint32_t index_sz = 2 + mag * frames;
420
if (data_sz >= index_sz && data_start[index_sz - 1] == marker) {
421
data_start += index_sz;
423
if (data_start < data_end)
430
// Use the correct size for this frame, if an index is present.
431
if (frames_this_pts) {
432
uint32_t this_sz = sizes[frame_count];
434
if (data_sz < this_sz) {
435
ctx->base.err_detail = "Invalid frame size in index";
436
return VPX_CODEC_CORRUPT_FRAME;
443
res = decode_one(ctx, &data_start, data_sz, user_priv, deadline);
444
assert(data_start >= data);
445
assert(data_start <= data_end);
447
/* Early exit if there was a decode error */
451
/* Account for suboptimal termination by the encoder. */
452
while (data_start < data_end && *data_start == 0)
455
data_sz = data_end - data_start;
456
} while (data_start < data_end);
460
static vpx_image_t *vp9_get_frame(vpx_codec_alg_priv_t *ctx,
461
vpx_codec_iter_t *iter) {
462
vpx_image_t *img = NULL;
464
if (ctx->img_avail) {
465
/* iter acts as a flip flop, so an image is only returned on the first
478
static vpx_codec_err_t vp9_xma_get_mmap(const vpx_codec_ctx_t *ctx,
479
vpx_codec_mmap_t *mmap,
480
vpx_codec_iter_t *iter) {
482
const mem_req_t *seg_iter = *iter;
484
/* Get address of next segment request */
487
seg_iter = vp9_mem_req_segs;
488
else if (seg_iter->id != VP9_SEG_MAX)
491
*iter = (vpx_codec_iter_t)seg_iter;
493
if (seg_iter->id != VP9_SEG_MAX) {
494
mmap->id = seg_iter->id;
495
mmap->sz = seg_iter->sz;
496
mmap->align = seg_iter->align;
497
mmap->flags = seg_iter->flags;
500
mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags);
504
res = VPX_CODEC_LIST_END;
506
} while (!mmap->sz && res != VPX_CODEC_LIST_END);
511
static vpx_codec_err_t vp9_xma_set_mmap(vpx_codec_ctx_t *ctx,
512
const vpx_codec_mmap_t *mmap) {
513
vpx_codec_err_t res = VPX_CODEC_MEM_ERROR;
517
if (mmap->id == VP9_SEG_ALG_PRIV) {
519
vp9_init_ctx(ctx, mmap);
527
if (!res && ctx->priv->alg_priv) {
528
for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++) {
529
if (ctx->priv->alg_priv->mmaps[i].id == mmap->id)
530
if (!ctx->priv->alg_priv->mmaps[i].base) {
531
ctx->priv->alg_priv->mmaps[i] = *mmap;
535
done &= (ctx->priv->alg_priv->mmaps[i].base != NULL);
540
vp9_finalize_mmaps(ctx->priv->alg_priv);
541
res = ctx->iface->init(ctx, NULL);
547
static vpx_codec_err_t set_reference(vpx_codec_alg_priv_t *ctx,
550
vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
553
vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
554
YV12_BUFFER_CONFIG sd;
556
image2yuvconfig(&frame->img, &sd);
558
return vp9_set_reference_dec(ctx->pbi,
559
(VP9_REFFRAME)frame->frame_type, &sd);
561
return VPX_CODEC_INVALID_PARAM;
565
static vpx_codec_err_t copy_reference(vpx_codec_alg_priv_t *ctx,
568
vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
571
vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
572
YV12_BUFFER_CONFIG sd;
574
image2yuvconfig(&frame->img, &sd);
576
return vp9_copy_reference_dec(ctx->pbi,
577
(VP9_REFFRAME)frame->frame_type, &sd);
579
return VPX_CODEC_INVALID_PARAM;
583
static vpx_codec_err_t get_reference(vpx_codec_alg_priv_t *ctx,
586
vp9_ref_frame_t *data = va_arg(args, vp9_ref_frame_t *);
589
YV12_BUFFER_CONFIG* fb;
591
vp9_get_reference_dec(ctx->pbi, data->idx, &fb);
592
yuvconfig2image(&data->img, fb, NULL);
595
return VPX_CODEC_INVALID_PARAM;
599
static vpx_codec_err_t set_postproc(vpx_codec_alg_priv_t *ctx,
602
#if CONFIG_VP9_POSTPROC
603
vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
606
ctx->postproc_cfg_set = 1;
607
ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
610
return VPX_CODEC_INVALID_PARAM;
613
return VPX_CODEC_INCAPABLE;
617
static vpx_codec_err_t set_dbg_options(vpx_codec_alg_priv_t *ctx,
620
#if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC
621
int data = va_arg(args, int);
623
#define MAP(id, var) case id: var = data; break;
626
MAP(VP8_SET_DBG_COLOR_REF_FRAME, ctx->dbg_color_ref_frame_flag);
627
MAP(VP8_SET_DBG_COLOR_MB_MODES, ctx->dbg_color_mb_modes_flag);
628
MAP(VP8_SET_DBG_COLOR_B_MODES, ctx->dbg_color_b_modes_flag);
629
MAP(VP8_SET_DBG_DISPLAY_MV, ctx->dbg_display_mv_flag);
634
return VPX_CODEC_INCAPABLE;
638
static vpx_codec_err_t get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
641
int *update_info = va_arg(args, int *);
642
VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi;
645
*update_info = pbi->refresh_frame_flags;
649
return VPX_CODEC_INVALID_PARAM;
654
static vpx_codec_err_t get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
657
int *corrupted = va_arg(args, int *);
660
VP9D_COMP *pbi = (VP9D_COMP *)ctx->pbi;
662
*corrupted = pbi->common.frame_to_show->corrupted;
664
return VPX_CODEC_ERROR;
667
return VPX_CODEC_INVALID_PARAM;
671
static vpx_codec_err_t set_invert_tile_order(vpx_codec_alg_priv_t *ctx,
674
ctx->invert_tile_order = va_arg(args, int);
678
static vpx_codec_ctrl_fn_map_t ctf_maps[] = {
679
{VP8_SET_REFERENCE, set_reference},
680
{VP8_COPY_REFERENCE, copy_reference},
681
{VP8_SET_POSTPROC, set_postproc},
682
{VP8_SET_DBG_COLOR_REF_FRAME, set_dbg_options},
683
{VP8_SET_DBG_COLOR_MB_MODES, set_dbg_options},
684
{VP8_SET_DBG_COLOR_B_MODES, set_dbg_options},
685
{VP8_SET_DBG_DISPLAY_MV, set_dbg_options},
686
{VP8D_GET_LAST_REF_UPDATES, get_last_ref_updates},
687
{VP8D_GET_FRAME_CORRUPTED, get_frame_corrupted},
688
{VP9_GET_REFERENCE, get_reference},
689
{VP9_INVERT_TILE_DECODE_ORDER, set_invert_tile_order},
694
#ifndef VERSION_STRING
695
#define VERSION_STRING
697
CODEC_INTERFACE(vpx_codec_vp9_dx) = {
698
"WebM Project VP9 Decoder" VERSION_STRING,
699
VPX_CODEC_INTERNAL_ABI_VERSION,
700
VPX_CODEC_CAP_DECODER | VP9_CAP_POSTPROC,
701
/* vpx_codec_caps_t caps; */
702
vp9_init, /* vpx_codec_init_fn_t init; */
703
vp9_destroy, /* vpx_codec_destroy_fn_t destroy; */
704
ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
705
vp9_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */
706
vp9_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */
708
vp9_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
709
vp9_get_si, /* vpx_codec_get_si_fn_t get_si; */
710
vp9_decode, /* vpx_codec_decode_fn_t decode; */
711
vp9_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
714
/* encoder functions */