~ubuntu-branches/ubuntu/vivid/gstreamer-vaapi/vivid

« back to all changes in this revision

Viewing changes to gst-libs/gst/vaapi/gstvaapidecoder_h264.c

  • Committer: Package Import Robot
  • Author(s): Vincent Cheng
  • Date: 2014-08-06 23:56:00 UTC
  • mfrom: (0.1.4 sid) (1.1.3)
  • Revision ID: package-import@ubuntu.com-20140806235600-fg1kcmiu67k315q5
Tags: 0.5.9-2
* Remove spurious build-deps: libva-drm1, libavcodec-dev. (Closes: #757283)
* Drop Build-Depends-Indep and build docs unconditionally on all archs.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
1
/*
2
2
 *  gstvaapidecoder_h264.c - H.264 decoder
3
3
 *
4
 
 *  Copyright (C) 2011-2013 Intel Corporation
 
4
 *  Copyright (C) 2011-2014 Intel Corporation
5
5
 *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6
6
 *
7
7
 *  This library is free software; you can redistribute it and/or
34
34
#include "gstvaapidecoder_priv.h"
35
35
#include "gstvaapidisplay_priv.h"
36
36
#include "gstvaapiobject_priv.h"
 
37
#include "gstvaapiutils_h264_priv.h"
37
38
 
38
39
#define DEBUG 1
39
40
#include "gstvaapidebug.h"
56
57
/* --- H.264 Parser Info                                                 --- */
57
58
/* ------------------------------------------------------------------------- */
58
59
 
 
60
/*
 
61
 * Extended decoder unit flags:
 
62
 *
 
63
 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
 
64
 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
 
65
 */
 
66
enum {
 
67
    /* This flag does not strictly follow the definitions (7.4.1.2.3)
 
68
       for detecting the start of an access unit as we are only
 
69
       interested in knowing if the current slice is the first one or
 
70
       the last one in the current access unit */
 
71
    GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
 
72
        GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
 
73
    GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
 
74
        GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
 
75
 
 
76
    GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
 
77
        GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
 
78
        GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
 
79
};
 
80
 
59
81
#define GST_VAAPI_PARSER_INFO_H264(obj) \
60
82
    ((GstVaapiParserInfoH264 *)(obj))
61
83
 
65
87
    union {
66
88
        GstH264SPS      sps;
67
89
        GstH264PPS      pps;
 
90
        GArray         *sei;
68
91
        GstH264SliceHdr slice_hdr;
69
92
    }                   data;
 
93
    guint               state;
 
94
    guint               flags;      // Same as decoder unit flags (persistent)
 
95
    guint               view_id;    // View ID of slice
 
96
    guint               voc;        // View order index (VOIdx) of slice
70
97
};
71
98
 
 
99
static void
 
100
gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
 
101
{
 
102
    switch (pi->nalu.type) {
 
103
    case GST_H264_NAL_SPS:
 
104
    case GST_H264_NAL_SUBSET_SPS:
 
105
        gst_h264_sps_clear(&pi->data.sps);
 
106
        break;
 
107
    case GST_H264_NAL_PPS:
 
108
        gst_h264_pps_clear(&pi->data.pps);
 
109
        break;
 
110
    case GST_H264_NAL_SEI:
 
111
        if (pi->data.sei) {
 
112
            g_array_unref(pi->data.sei);
 
113
            pi->data.sei = NULL;
 
114
        }
 
115
        break;
 
116
    }
 
117
}
 
118
 
72
119
static inline const GstVaapiMiniObjectClass *
73
120
gst_vaapi_parser_info_h264_class(void)
74
121
{
75
122
    static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
76
 
        sizeof(GstVaapiParserInfoH264),
77
 
        NULL
 
123
        .size = sizeof(GstVaapiParserInfoH264),
 
124
        .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
78
125
    };
79
126
    return &GstVaapiParserInfoH264Class;
80
127
}
104
151
 * Extended picture flags:
105
152
 *
106
153
 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
 
154
 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
 
155
 *   may be used for inter-view prediction
 
156
 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
 
157
 *   i.e. a picture that is decoded with only inter-view prediction,
 
158
 *   and not inter prediction
 
159
 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
 
160
 *   access unit (AU)
 
161
 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
 
162
 *   access unit (AU)
107
163
 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
108
164
 *     "used for short-term reference"
109
165
 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
112
168
 *     reference picture (short-term reference or long-term reference)
113
169
 */
114
170
enum {
115
 
    GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
 
171
    GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
 
172
    GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
 
173
    GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
 
174
    GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
 
175
    GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
 
176
    GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
116
177
 
117
178
    GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
118
179
        GST_VAAPI_PICTURE_FLAG_REFERENCE),
119
180
    GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
120
 
        GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
 
181
        GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
121
182
    GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
122
183
        GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
123
184
        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
136
197
      GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
137
198
     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
138
199
 
 
200
#define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
 
201
    (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
 
202
 
 
203
#define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
 
204
    (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
 
205
 
 
206
#define GST_VAAPI_PICTURE_H264(picture) \
 
207
    ((GstVaapiPictureH264 *)(picture))
 
208
 
139
209
struct _GstVaapiPictureH264 {
140
210
    GstVaapiPicture             base;
141
 
    GstH264PPS                 *pps;
142
211
    GstH264SliceHdr            *last_slice_hdr;
143
212
    guint                       structure;
144
213
    gint32                      field_poc[2];
220
289
    /*< private >*/
221
290
    GstVaapiMiniObject          parent_instance;
222
291
 
 
292
    guint                       view_id;
223
293
    guint                       structure;
224
294
    GstVaapiPictureH264        *buffers[2];
225
295
    guint                       num_buffers;
251
321
    if (!fs)
252
322
        return NULL;
253
323
 
 
324
    fs->view_id         = picture->base.view_id;
254
325
    fs->structure       = picture->structure;
255
326
    fs->buffers[0]      = gst_vaapi_picture_ref(picture);
256
327
    fs->buffers[1]      = NULL;
257
328
    fs->num_buffers     = 1;
258
 
    fs->output_needed   = picture->output_needed;
 
329
    fs->output_needed   = 0;
 
330
 
 
331
    if (picture->output_flag) {
 
332
        picture->output_needed = TRUE;
 
333
        fs->output_needed++;
 
334
    }
259
335
    return fs;
260
336
}
261
337
 
320
396
}
321
397
 
322
398
static inline gboolean
 
399
gst_vaapi_frame_store_is_complete(GstVaapiFrameStore *fs)
 
400
{
 
401
    return gst_vaapi_frame_store_has_frame(fs) ||
 
402
        GST_VAAPI_PICTURE_IS_ONEFIELD(fs->buffers[0]);
 
403
}
 
404
 
 
405
static inline gboolean
323
406
gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
324
407
{
325
408
    guint i;
331
414
    return FALSE;
332
415
}
333
416
 
 
417
static gboolean
 
418
gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
 
419
{
 
420
    guint i;
 
421
 
 
422
    for (i = 0; i < fs->num_buffers; i++) {
 
423
        if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
 
424
            return TRUE;
 
425
    }
 
426
    return FALSE;
 
427
}
 
428
 
334
429
#define gst_vaapi_frame_store_ref(fs) \
335
430
    gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
336
431
 
348
443
#define GST_VAAPI_DECODER_H264_CAST(decoder) \
349
444
    ((GstVaapiDecoderH264 *)(decoder))
350
445
 
 
446
typedef enum {
 
447
    GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
 
448
    GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
 
449
    GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
 
450
 
 
451
    GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
 
452
        GST_H264_VIDEO_STATE_GOT_SPS |
 
453
        GST_H264_VIDEO_STATE_GOT_PPS),
 
454
    GST_H264_VIDEO_STATE_VALID_PICTURE = (
 
455
        GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
 
456
        GST_H264_VIDEO_STATE_GOT_SLICE)
 
457
} GstH264VideoState;
 
458
 
351
459
struct _GstVaapiDecoderH264Private {
352
460
    GstH264NalParser           *parser;
 
461
    guint                       parser_state;
 
462
    guint                       decoder_state;
 
463
    GstVaapiStreamAlignH264     stream_alignment;
353
464
    GstVaapiPictureH264        *current_picture;
 
465
    GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
 
466
    GstVaapiParserInfoH264     *active_sps;
 
467
    GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
 
468
    GstVaapiParserInfoH264     *active_pps;
 
469
    GstVaapiParserInfoH264     *prev_pi;
354
470
    GstVaapiParserInfoH264     *prev_slice_pi;
355
 
    GstVaapiFrameStore         *prev_frame;
356
 
    GstVaapiFrameStore         *dpb[16];
 
471
    GstVaapiFrameStore        **prev_frames;
 
472
    guint                       prev_frames_alloc;
 
473
    GstVaapiFrameStore        **dpb;
357
474
    guint                       dpb_count;
358
475
    guint                       dpb_size;
 
476
    guint                       dpb_size_max;
 
477
    guint                       max_views;
359
478
    GstVaapiProfile             profile;
360
479
    GstVaapiEntrypoint          entrypoint;
361
480
    GstVaapiChromaType          chroma_type;
 
481
    GPtrArray                  *inter_views;
362
482
    GstVaapiPictureH264        *short_ref[32];
363
483
    guint                       short_ref_count;
364
484
    GstVaapiPictureH264        *long_ref[32];
382
502
    gboolean                    prev_pic_structure;     // previous picture structure
383
503
    guint                       is_opened               : 1;
384
504
    guint                       is_avcC                 : 1;
385
 
    guint                       got_sps                 : 1;
386
 
    guint                       got_pps                 : 1;
387
505
    guint                       has_context             : 1;
388
506
    guint                       progressive_sequence    : 1;
389
507
};
412
530
static gboolean
413
531
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
414
532
 
 
533
static gboolean
 
534
is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
 
535
    GstVaapiPictureH264 *picture);
 
536
 
 
537
static inline gboolean
 
538
is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
 
539
    GstVaapiFrameStore *fs)
 
540
{
 
541
    return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
 
542
}
 
543
 
 
544
/* Determines if the supplied profile is one of the MVC set */
 
545
static gboolean
 
546
is_mvc_profile(GstH264Profile profile)
 
547
{
 
548
    return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
 
549
        profile == GST_H264_PROFILE_STEREO_HIGH;
 
550
}
 
551
 
 
552
/* Determines the view_id from the supplied NAL unit */
 
553
static inline guint
 
554
get_view_id(GstH264NalUnit *nalu)
 
555
{
 
556
    return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
 
557
}
 
558
 
 
559
/* Determines the view order index (VOIdx) from the supplied view_id */
 
560
static gint
 
561
get_view_order_index(GstH264SPS *sps, guint16 view_id)
 
562
{
 
563
    GstH264SPSExtMVC *mvc;
 
564
    gint i;
 
565
 
 
566
    if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
 
567
        return 0;
 
568
 
 
569
    mvc = &sps->extension.mvc;
 
570
    for (i = 0; i <= mvc->num_views_minus1; i++) {
 
571
        if (mvc->view[i].view_id == view_id)
 
572
            return i;
 
573
    }
 
574
    GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
 
575
    return -1;
 
576
}
 
577
 
 
578
/* Determines NumViews */
 
579
static guint
 
580
get_num_views(GstH264SPS *sps)
 
581
{
 
582
    return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
 
583
        sps->extension.mvc.num_views_minus1 : 0);
 
584
}
 
585
 
415
586
/* Get number of reference frames to use */
416
587
static guint
417
588
get_max_dec_frame_buffering(GstH264SPS *sps)
418
589
{
419
 
    guint max_dec_frame_buffering, MaxDpbMbs, PicSizeMbs;
 
590
    guint num_views, max_dpb_frames;
 
591
    guint max_dec_frame_buffering, PicSizeMbs;
 
592
    GstVaapiLevelH264 level;
 
593
    const GstVaapiH264LevelLimits *level_limits;
420
594
 
421
595
    /* Table A-1 - Level limits */
422
 
    switch (sps->level_idc) {
423
 
    case 10: MaxDpbMbs = 396;    break;
424
 
    case 11: MaxDpbMbs = 900;    break;
425
 
    case 12: MaxDpbMbs = 2376;   break;
426
 
    case 13: MaxDpbMbs = 2376;   break;
427
 
    case 20: MaxDpbMbs = 2376;   break;
428
 
    case 21: MaxDpbMbs = 4752;   break;
429
 
    case 22: MaxDpbMbs = 8100;   break;
430
 
    case 30: MaxDpbMbs = 8100;   break;
431
 
    case 31: MaxDpbMbs = 18000;  break;
432
 
    case 32: MaxDpbMbs = 20480;  break;
433
 
    case 40: MaxDpbMbs = 32768;  break;
434
 
    case 41: MaxDpbMbs = 32768;  break;
435
 
    case 42: MaxDpbMbs = 34816;  break;
436
 
    case 50: MaxDpbMbs = 110400; break;
437
 
    case 51: MaxDpbMbs = 184320; break;
438
 
    default:
439
 
        g_assert(0 && "unhandled level");
440
 
        break;
441
 
    }
442
 
 
443
 
    PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
444
 
                  (sps->pic_height_in_map_units_minus1 + 1) *
445
 
                  (sps->frame_mbs_only_flag ? 1 : 2));
446
 
    max_dec_frame_buffering = MaxDpbMbs / PicSizeMbs;
 
596
    if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
 
597
        level = GST_VAAPI_LEVEL_H264_L1b;
 
598
    else
 
599
        level = gst_vaapi_utils_h264_get_level(sps->level_idc);
 
600
    level_limits = gst_vaapi_utils_h264_get_level_limits(level);
 
601
    if (G_UNLIKELY(!level_limits)) {
 
602
        GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
 
603
        max_dec_frame_buffering = 16;
 
604
    }
 
605
    else {
 
606
        PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
 
607
                      (sps->pic_height_in_map_units_minus1 + 1) *
 
608
                      (sps->frame_mbs_only_flag ? 1 : 2));
 
609
        max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
 
610
    }
 
611
    if (is_mvc_profile(sps->profile_idc))
 
612
        max_dec_frame_buffering <<= 1;
447
613
 
448
614
    /* VUI parameters */
449
615
    if (sps->vui_parameters_present_flag) {
465
631
        }
466
632
    }
467
633
 
468
 
    if (max_dec_frame_buffering > 16)
469
 
        max_dec_frame_buffering = 16;
 
634
    num_views = get_num_views(sps);
 
635
    max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
 
636
    if (max_dec_frame_buffering > max_dpb_frames)
 
637
        max_dec_frame_buffering = max_dpb_frames;
470
638
    else if (max_dec_frame_buffering < sps->num_ref_frames)
471
639
        max_dec_frame_buffering = sps->num_ref_frames;
472
640
    return MAX(1, max_dec_frame_buffering);
528
696
}
529
697
 
530
698
static gboolean
531
 
dpb_output(
532
 
    GstVaapiDecoderH264 *decoder,
533
 
    GstVaapiFrameStore  *fs,
534
 
    GstVaapiPictureH264 *picture
535
 
)
 
699
dpb_output(GstVaapiDecoderH264 *decoder, GstVaapiFrameStore *fs)
536
700
{
 
701
    GstVaapiPictureH264 *picture;
 
702
 
 
703
    g_return_val_if_fail(fs != NULL, FALSE);
 
704
 
 
705
    if (!gst_vaapi_frame_store_is_complete(fs))
 
706
        return TRUE;
 
707
 
 
708
    picture = fs->buffers[0];
 
709
    g_return_val_if_fail(picture != NULL, FALSE);
537
710
    picture->output_needed = FALSE;
538
711
 
539
 
    if (fs) {
540
 
        if (--fs->output_needed > 0)
541
 
            return TRUE;
542
 
        picture = fs->buffers[0];
 
712
    if (fs->num_buffers > 1) {
 
713
        picture = fs->buffers[1];
 
714
        g_return_val_if_fail(picture != NULL, FALSE);
 
715
        picture->output_needed = FALSE;
543
716
    }
 
717
 
 
718
    fs->output_needed = 0;
544
719
    return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
545
720
}
546
721
 
554
729
        dpb_remove_index(decoder, i);
555
730
}
556
731
 
557
 
static gboolean
558
 
dpb_bump(GstVaapiDecoderH264 *decoder)
 
732
/* Finds the frame store holding the supplied picture */
 
733
static gint
 
734
dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
 
735
{
 
736
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
737
    gint i, j;
 
738
 
 
739
    for (i = 0; i < priv->dpb_count; i++) {
 
740
        GstVaapiFrameStore * const fs = priv->dpb[i];
 
741
        for (j = 0; j < fs->num_buffers; j++) {
 
742
            if (fs->buffers[j] == picture)
 
743
                return i;
 
744
        }
 
745
    }
 
746
    return -1;
 
747
}
 
748
 
 
749
/* Finds the picture with the lowest POC that needs to be output */
 
750
static gint
 
751
dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
 
752
    GstVaapiPictureH264 **found_picture_ptr)
559
753
{
560
754
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
561
755
    GstVaapiPictureH264 *found_picture = NULL;
562
756
    guint i, j, found_index;
563
 
    gboolean success;
564
757
 
565
758
    for (i = 0; i < priv->dpb_count; i++) {
566
759
        GstVaapiFrameStore * const fs = priv->dpb[i];
567
760
        if (!fs->output_needed)
568
761
            continue;
569
 
        for (j = 0; j < fs->num_buffers; j++) {
570
 
            GstVaapiPictureH264 * const picture = fs->buffers[j];
571
 
            if (!picture->output_needed)
572
 
                continue;
573
 
            if (!found_picture || found_picture->base.poc > picture->base.poc)
574
 
                found_picture = picture, found_index = i;
575
 
        }
576
 
    }
577
 
    if (!found_picture)
 
762
        if (picture && picture->base.view_id != fs->view_id)
 
763
            continue;
 
764
        for (j = 0; j < fs->num_buffers; j++) {
 
765
            GstVaapiPictureH264 * const pic = fs->buffers[j];
 
766
            if (!pic->output_needed)
 
767
                continue;
 
768
            if (!found_picture || found_picture->base.poc > pic->base.poc ||
 
769
                (found_picture->base.poc == pic->base.poc &&
 
770
                 found_picture->base.voc > pic->base.voc))
 
771
                found_picture = pic, found_index = i;
 
772
        }
 
773
    }
 
774
 
 
775
    if (found_picture_ptr)
 
776
        *found_picture_ptr = found_picture;
 
777
    return found_picture ? found_index : -1;
 
778
}
 
779
 
 
780
/* Finds the picture with the lowest VOC that needs to be output */
 
781
static gint
 
782
dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
 
783
    GstVaapiPictureH264 **found_picture_ptr)
 
784
{
 
785
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
786
    GstVaapiPictureH264 *found_picture = NULL;
 
787
    guint i, j, found_index;
 
788
 
 
789
    for (i = 0; i < priv->dpb_count; i++) {
 
790
        GstVaapiFrameStore * const fs = priv->dpb[i];
 
791
        if (!fs->output_needed || fs->view_id == picture->base.view_id)
 
792
            continue;
 
793
        for (j = 0; j < fs->num_buffers; j++) {
 
794
            GstVaapiPictureH264 * const pic = fs->buffers[j];
 
795
            if (!pic->output_needed || pic->base.poc != picture->base.poc)
 
796
                continue;
 
797
            if (!found_picture || found_picture->base.voc > pic->base.voc)
 
798
                found_picture = pic, found_index = i;
 
799
        }
 
800
    }
 
801
 
 
802
    if (found_picture_ptr)
 
803
        *found_picture_ptr = found_picture;
 
804
    return found_picture ? found_index : -1;
 
805
}
 
806
 
 
807
static gboolean
 
808
dpb_output_other_views(GstVaapiDecoderH264 *decoder,
 
809
    GstVaapiPictureH264 *picture, guint voc)
 
810
{
 
811
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
812
    GstVaapiPictureH264 *found_picture;
 
813
    gint found_index;
 
814
    gboolean success;
 
815
 
 
816
    if (priv->max_views == 1)
 
817
        return TRUE;
 
818
 
 
819
    /* Emit all other view components that were in the same access
 
820
       unit than the picture we have just found */
 
821
    found_picture = picture;
 
822
    for (;;) {
 
823
        found_index = dpb_find_lowest_voc(decoder, found_picture,
 
824
            &found_picture);
 
825
        if (found_index < 0 || found_picture->base.voc >= voc)
 
826
            break;
 
827
        success = dpb_output(decoder, priv->dpb[found_index]);
 
828
        dpb_evict(decoder, found_picture, found_index);
 
829
        if (!success)
 
830
            return FALSE;
 
831
    }
 
832
    return TRUE;
 
833
}
 
834
 
 
835
static gboolean
 
836
dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
 
837
{
 
838
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
839
    GstVaapiPictureH264 *found_picture;
 
840
    gint found_index;
 
841
    gboolean success;
 
842
 
 
843
    found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
 
844
    if (found_index < 0)
578
845
        return FALSE;
579
846
 
580
 
    success = dpb_output(decoder, priv->dpb[found_index], found_picture);
 
847
    if (picture && picture->base.poc != found_picture->base.poc)
 
848
        dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
 
849
 
 
850
    success = dpb_output(decoder, priv->dpb[found_index]);
581
851
    dpb_evict(decoder, found_picture, found_index);
 
852
    if (priv->max_views == 1)
 
853
        return success;
 
854
 
 
855
    if (picture && picture->base.poc != found_picture->base.poc)
 
856
        dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
582
857
    return success;
583
858
}
584
859
 
585
860
static void
586
 
dpb_clear(GstVaapiDecoderH264 *decoder)
 
861
dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
587
862
{
588
863
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
589
 
    guint i;
 
864
    guint i, n;
590
865
 
591
 
    for (i = 0; i < priv->dpb_count; i++)
 
866
    for (i = 0; i < priv->dpb_count; i++) {
 
867
        if (picture && picture->base.view_id != priv->dpb[i]->view_id)
 
868
            continue;
592
869
        gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
593
 
    priv->dpb_count = 0;
594
 
 
595
 
    gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
 
870
    }
 
871
 
 
872
    /* Compact the resulting DPB, i.e. remove holes */
 
873
    for (i = 0, n = 0; i < priv->dpb_count; i++) {
 
874
        if (priv->dpb[i]) {
 
875
            if (i != n) {
 
876
                priv->dpb[n] = priv->dpb[i];
 
877
                priv->dpb[i] = NULL;
 
878
            }
 
879
            n++;
 
880
        }
 
881
    }
 
882
    priv->dpb_count = n;
 
883
 
 
884
    /* Clear previous frame buffers only if this is a "flush-all" operation,
 
885
       or if the picture is the first one in the access unit */
 
886
    if (priv->prev_frames && (!picture ||
 
887
            GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
 
888
                GST_VAAPI_PICTURE_FLAG_AU_START))) {
 
889
        for (i = 0; i < priv->max_views; i++)
 
890
            gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
 
891
    }
596
892
}
597
893
 
598
894
static void
599
 
dpb_flush(GstVaapiDecoderH264 *decoder)
 
895
dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
600
896
{
601
 
    while (dpb_bump(decoder))
 
897
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
898
    guint i;
 
899
 
 
900
    /* Detect broken frames and mark them as having a single field if
 
901
       needed */
 
902
    for (i = 0; i < priv->dpb_count; i++) {
 
903
        GstVaapiFrameStore * const fs = priv->dpb[i];
 
904
        if (!fs->output_needed || gst_vaapi_frame_store_is_complete(fs))
 
905
            continue;
 
906
        GST_VAAPI_PICTURE_FLAG_SET(fs->buffers[0],
 
907
            GST_VAAPI_PICTURE_FLAG_ONEFIELD);
 
908
    }
 
909
 
 
910
    /* Output any frame remaining in DPB */
 
911
    while (dpb_bump(decoder, picture))
602
912
        ;
603
 
    dpb_clear(decoder);
 
913
    dpb_clear(decoder, picture);
 
914
}
 
915
 
 
916
static void
 
917
dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
 
918
{
 
919
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
920
    const gboolean is_last_picture = /* in the access unit */
 
921
        GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
 
922
    guint i;
 
923
 
 
924
    // Remove all unused inter-view only reference components of the current AU
 
925
    i = 0;
 
926
    while (i < priv->dpb_count) {
 
927
        GstVaapiFrameStore * const fs = priv->dpb[i];
 
928
        if (fs->view_id != picture->base.view_id &&
 
929
            !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
 
930
            (is_last_picture ||
 
931
             !is_inter_view_reference_for_next_frames(decoder, fs)))
 
932
            dpb_remove_index(decoder, i);
 
933
        else
 
934
            i++;
 
935
    }
604
936
}
605
937
 
606
938
static gboolean
608
940
{
609
941
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
610
942
    GstVaapiFrameStore *fs;
611
 
    guint i, j;
 
943
    guint i;
 
944
 
 
945
    if (priv->max_views > 1)
 
946
        dpb_prune_mvc(decoder, picture);
612
947
 
613
948
    // Remove all unused pictures
614
949
    if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
615
950
        i = 0;
616
951
        while (i < priv->dpb_count) {
617
952
            GstVaapiFrameStore * const fs = priv->dpb[i];
618
 
            if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
 
953
            if (fs->view_id == picture->base.view_id &&
 
954
                !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
619
955
                dpb_remove_index(decoder, i);
620
956
            else
621
957
                i++;
623
959
    }
624
960
 
625
961
    // Check if picture is the second field and the first field is still in DPB
626
 
    fs = priv->prev_frame;
627
 
    if (fs && !gst_vaapi_frame_store_has_frame(fs))
628
 
        return gst_vaapi_frame_store_add(fs, picture);
 
962
    if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
 
963
        !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
 
964
        const gint found_index = dpb_find_picture(decoder,
 
965
            GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
 
966
        if (found_index >= 0)
 
967
            return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
 
968
 
 
969
        // ... also check the previous picture that was immediately output
 
970
        fs = priv->prev_frames[picture->base.voc];
 
971
        if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
 
972
            if (!gst_vaapi_frame_store_add(fs, picture))
 
973
                return FALSE;
 
974
            return dpb_output(decoder, fs);
 
975
        }
 
976
    }
629
977
 
630
978
    // Create new frame store, and split fields if necessary
631
979
    fs = gst_vaapi_frame_store_new(picture);
632
980
    if (!fs)
633
981
        return FALSE;
634
 
    gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
 
982
    gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
635
983
    gst_vaapi_frame_store_unref(fs);
636
984
 
637
985
    if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
642
990
    // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
643
991
    if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
644
992
        while (priv->dpb_count == priv->dpb_size) {
645
 
            if (!dpb_bump(decoder))
 
993
            if (!dpb_bump(decoder, picture))
646
994
                return FALSE;
647
995
        }
648
 
        gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
649
 
        if (picture->output_flag) {
650
 
            picture->output_needed = TRUE;
651
 
            fs->output_needed++;
652
 
        }
653
996
    }
654
997
 
655
998
    // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
656
999
    else {
657
 
        if (!picture->output_flag)
 
1000
        const gboolean StoreInterViewOnlyRefFlag =
 
1001
            !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
 
1002
                GST_VAAPI_PICTURE_FLAG_AU_END) &&
 
1003
            GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
 
1004
                GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
 
1005
        if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
658
1006
            return TRUE;
659
1007
        while (priv->dpb_count == priv->dpb_size) {
660
 
            gboolean found_picture = FALSE;
661
 
            for (i = 0; !found_picture && i < priv->dpb_count; i++) {
662
 
                GstVaapiFrameStore * const fs = priv->dpb[i];
663
 
                if (!fs->output_needed)
664
 
                    continue;
665
 
                for (j = 0; !found_picture && j < fs->num_buffers; j++)
666
 
                    found_picture = fs->buffers[j]->output_needed &&
667
 
                        fs->buffers[j]->base.poc < picture->base.poc;
 
1008
            GstVaapiPictureH264 *found_picture;
 
1009
            if (!StoreInterViewOnlyRefFlag) {
 
1010
                if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
 
1011
                    found_picture->base.poc > picture->base.poc)
 
1012
                    return dpb_output(decoder, fs);
668
1013
            }
669
 
            if (!found_picture)
670
 
                return dpb_output(decoder, NULL, picture);
671
 
            if (!dpb_bump(decoder))
 
1014
            if (!dpb_bump(decoder, picture))
672
1015
                return FALSE;
673
1016
        }
674
 
        gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
675
 
        picture->output_needed = TRUE;
676
 
        fs->output_needed++;
677
1017
    }
 
1018
    gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
678
1019
    return TRUE;
679
1020
}
680
1021
 
681
 
static inline void
682
 
dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
 
1022
static gboolean
 
1023
dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
683
1024
{
684
1025
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
685
1026
 
686
 
    priv->dpb_size = get_max_dec_frame_buffering(sps);
 
1027
    if (dpb_size > priv->dpb_size_max) {
 
1028
        priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
 
1029
        if (!priv->dpb)
 
1030
            return FALSE;
 
1031
        memset(&priv->dpb[priv->dpb_size_max], 0,
 
1032
            (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
 
1033
        priv->dpb_size_max = dpb_size;
 
1034
    }
 
1035
    priv->dpb_size = dpb_size;
 
1036
 
687
1037
    GST_DEBUG("DPB size %u", priv->dpb_size);
 
1038
    return TRUE;
 
1039
}
 
1040
 
 
1041
static void
 
1042
unref_inter_view(GstVaapiPictureH264 *picture)
 
1043
{
 
1044
    if (!picture)
 
1045
        return;
 
1046
    GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
 
1047
    gst_vaapi_picture_unref(picture);
 
1048
}
 
1049
 
 
1050
/* Resets MVC resources */
 
1051
static gboolean
 
1052
mvc_reset(GstVaapiDecoderH264 *decoder)
 
1053
{
 
1054
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1055
    guint i;
 
1056
 
 
1057
    // Resize array of inter-view references
 
1058
    if (!priv->inter_views) {
 
1059
        priv->inter_views = g_ptr_array_new_full(priv->max_views,
 
1060
            (GDestroyNotify)unref_inter_view);
 
1061
        if (!priv->inter_views)
 
1062
            return FALSE;
 
1063
    }
 
1064
 
 
1065
    // Resize array of previous frame buffers
 
1066
    for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
 
1067
        gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
 
1068
 
 
1069
    priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
 
1070
        sizeof(*priv->prev_frames));
 
1071
    if (!priv->prev_frames) {
 
1072
        priv->prev_frames_alloc = 0;
 
1073
        return FALSE;
 
1074
    }
 
1075
    for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
 
1076
        priv->prev_frames[i] = NULL;
 
1077
    priv->prev_frames_alloc = priv->max_views;
 
1078
    return TRUE;
688
1079
}
689
1080
 
690
1081
static GstVaapiDecoderStatus
716
1107
 
717
1108
    gst_vaapi_picture_replace(&priv->current_picture, NULL);
718
1109
    gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
719
 
 
720
 
    dpb_clear(decoder);
 
1110
    gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
 
1111
 
 
1112
    dpb_clear(decoder, NULL);
 
1113
 
 
1114
    if (priv->inter_views) {
 
1115
        g_ptr_array_unref(priv->inter_views);
 
1116
        priv->inter_views = NULL;
 
1117
    }
721
1118
 
722
1119
    if (priv->parser) {
723
1120
        gst_h264_nal_parser_free(priv->parser);
743
1140
{
744
1141
    GstVaapiDecoderH264 * const decoder =
745
1142
        GST_VAAPI_DECODER_H264_CAST(base_decoder);
 
1143
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1144
    guint i;
746
1145
 
747
1146
    gst_vaapi_decoder_h264_close(decoder);
 
1147
 
 
1148
    g_free(priv->dpb);
 
1149
    priv->dpb = NULL;
 
1150
    priv->dpb_size = 0;
 
1151
 
 
1152
    g_free(priv->prev_frames);
 
1153
    priv->prev_frames = NULL;
 
1154
    priv->prev_frames_alloc = 0;
 
1155
 
 
1156
    for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
 
1157
        gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
 
1158
    gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
 
1159
 
 
1160
    for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
 
1161
        gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
 
1162
    gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
748
1163
}
749
1164
 
750
1165
static gboolean
762
1177
    return TRUE;
763
1178
}
764
1179
 
765
 
static guint
766
 
h264_get_profile(GstH264SPS *sps)
767
 
{
768
 
    guint profile = 0;
769
 
 
770
 
    switch (sps->profile_idc) {
771
 
    case GST_H264_PROFILE_BASELINE:
772
 
        profile = GST_VAAPI_PROFILE_H264_BASELINE;
773
 
        break;
774
 
    case GST_H264_PROFILE_MAIN:
775
 
        profile = GST_VAAPI_PROFILE_H264_MAIN;
776
 
        break;
777
 
    case GST_H264_PROFILE_HIGH:
778
 
        profile = GST_VAAPI_PROFILE_H264_HIGH;
779
 
        break;
780
 
    }
781
 
    return profile;
782
 
}
783
 
 
784
 
static guint
785
 
h264_get_chroma_type(GstH264SPS *sps)
786
 
{
787
 
    guint chroma_type = 0;
788
 
 
789
 
    switch (sps->chroma_format_idc) {
790
 
    case 1:
791
 
        chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
792
 
        break;
793
 
    case 2:
794
 
        chroma_type = GST_VAAPI_CHROMA_TYPE_YUV422;
795
 
        break;
796
 
    case 3:
797
 
        if (!sps->separate_colour_plane_flag)
798
 
            chroma_type = GST_VAAPI_CHROMA_TYPE_YUV444;
799
 
        break;
800
 
    }
801
 
    return chroma_type;
 
1180
/* Activates the supplied PPS */
 
1181
static GstH264PPS *
 
1182
ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
 
1183
{
 
1184
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1185
    GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
 
1186
 
 
1187
    gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
 
1188
    return pi ? &pi->data.pps : NULL;
 
1189
}
 
1190
 
 
1191
/* Returns the active PPS */
 
1192
static inline GstH264PPS *
 
1193
get_pps(GstVaapiDecoderH264 *decoder)
 
1194
{
 
1195
    GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
 
1196
 
 
1197
    return pi ? &pi->data.pps : NULL;
 
1198
}
 
1199
 
 
1200
/* Activate the supplied SPS */
 
1201
static GstH264SPS *
 
1202
ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
 
1203
{
 
1204
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1205
    GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
 
1206
 
 
1207
    gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
 
1208
    return pi ? &pi->data.sps : NULL;
 
1209
}
 
1210
 
 
1211
/* Returns the active SPS */
 
1212
static inline GstH264SPS *
 
1213
get_sps(GstVaapiDecoderH264 *decoder)
 
1214
{
 
1215
    GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
 
1216
 
 
1217
    return pi ? &pi->data.sps : NULL;
 
1218
}
 
1219
 
 
1220
static void
 
1221
fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
 
1222
    GstVaapiProfile profile)
 
1223
{
 
1224
    guint n_profiles = *n_profiles_ptr;
 
1225
 
 
1226
    profiles[n_profiles++] = profile;
 
1227
    switch (profile) {
 
1228
    case GST_VAAPI_PROFILE_H264_MAIN:
 
1229
        profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
 
1230
        break;
 
1231
    default:
 
1232
        break;
 
1233
    }
 
1234
    *n_profiles_ptr = n_profiles;
 
1235
}
 
1236
 
 
1237
/* Fills in compatible profiles for MVC decoding */
 
1238
static void
 
1239
fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
 
1240
    guint *n_profiles_ptr, guint dpb_size)
 
1241
{
 
1242
    const gchar * const vendor_string =
 
1243
        gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
 
1244
 
 
1245
    gboolean add_high_profile = FALSE;
 
1246
    struct map {
 
1247
        const gchar *str;
 
1248
        guint str_len;
 
1249
    };
 
1250
    const struct map *m;
 
1251
 
 
1252
    // Drivers that support slice level decoding
 
1253
    if (vendor_string && dpb_size <= 16) {
 
1254
        static const struct map drv_names[] = {
 
1255
            { "Intel i965 driver", 17 },
 
1256
            { NULL, 0 }
 
1257
        };
 
1258
        for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
 
1259
            if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
 
1260
                add_high_profile = TRUE;
 
1261
        }
 
1262
    }
 
1263
 
 
1264
    if (add_high_profile)
 
1265
        fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
802
1266
}
803
1267
 
804
1268
static GstVaapiProfile
805
 
get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
 
1269
get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
806
1270
{
807
1271
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
808
1272
    GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
809
 
    GstVaapiProfile profile, profiles[2];
 
1273
    GstVaapiProfile profile, profiles[4];
810
1274
    guint i, n_profiles = 0;
811
1275
 
812
 
    profile = h264_get_profile(sps);
 
1276
    profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
813
1277
    if (!profile)
814
1278
        return GST_VAAPI_PROFILE_UNKNOWN;
815
1279
 
816
 
    profiles[n_profiles++] = profile;
 
1280
    fill_profiles(profiles, &n_profiles, profile);
817
1281
    switch (profile) {
818
 
    case GST_VAAPI_PROFILE_H264_MAIN:
819
 
        profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
 
1282
    case GST_VAAPI_PROFILE_H264_BASELINE:
 
1283
        if (sps->constraint_set1_flag) { // A.2.2 (main profile)
 
1284
            fill_profiles(profiles, &n_profiles,
 
1285
                GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
 
1286
            fill_profiles(profiles, &n_profiles,
 
1287
                GST_VAAPI_PROFILE_H264_MAIN);
 
1288
        }
 
1289
        break;
 
1290
    case GST_VAAPI_PROFILE_H264_EXTENDED:
 
1291
        if (sps->constraint_set1_flag) { // A.2.2 (main profile)
 
1292
            fill_profiles(profiles, &n_profiles,
 
1293
                GST_VAAPI_PROFILE_H264_MAIN);
 
1294
        }
 
1295
        break;
 
1296
    case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
 
1297
        if (priv->max_views == 2) {
 
1298
            fill_profiles(profiles, &n_profiles,
 
1299
                GST_VAAPI_PROFILE_H264_STEREO_HIGH);
 
1300
        }
 
1301
        fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
 
1302
        break;
 
1303
    case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
 
1304
        if (sps->frame_mbs_only_flag) {
 
1305
            fill_profiles(profiles, &n_profiles,
 
1306
                GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
 
1307
        }
 
1308
        fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
820
1309
        break;
821
1310
    default:
822
1311
        break;
843
1332
    GstVaapiProfile profile;
844
1333
    GstVaapiChromaType chroma_type;
845
1334
    gboolean reset_context = FALSE;
846
 
    guint mb_width, mb_height;
847
 
 
848
 
    profile = get_profile(decoder, sps);
 
1335
    guint mb_width, mb_height, dpb_size, num_views;
 
1336
 
 
1337
    num_views = get_num_views(sps);
 
1338
    if (priv->max_views < num_views) {
 
1339
        priv->max_views = num_views;
 
1340
        GST_DEBUG("maximum number of views changed to %u", num_views);
 
1341
    }
 
1342
 
 
1343
    dpb_size = get_max_dec_frame_buffering(sps);
 
1344
    if (priv->dpb_size < dpb_size) {
 
1345
        GST_DEBUG("DPB size increased");
 
1346
        reset_context = TRUE;
 
1347
    }
 
1348
 
 
1349
    profile = get_profile(decoder, sps, dpb_size);
849
1350
    if (!profile) {
850
1351
        GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
851
1352
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
852
1353
    }
853
1354
 
854
 
    if (priv->profile != profile) {
 
1355
    if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
855
1356
        GST_DEBUG("profile changed");
856
1357
        reset_context = TRUE;
857
1358
        priv->profile = profile;
858
1359
    }
859
1360
 
860
 
    chroma_type = h264_get_chroma_type(sps);
861
 
    if (!chroma_type || chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420) {
 
1361
    chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
 
1362
    if (!chroma_type) {
862
1363
        GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
863
1364
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
864
1365
    }
880
1381
    }
881
1382
 
882
1383
    priv->progressive_sequence = sps->frame_mbs_only_flag;
883
 
#if 0
884
 
    /* XXX: we only output complete frames for now */
885
1384
    gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
886
 
#endif
887
1385
 
888
1386
    gst_vaapi_decoder_set_pixel_aspect_ratio(
889
1387
        base_decoder,
897
1395
    /* XXX: fix surface size when cropping is implemented */
898
1396
    info.profile    = priv->profile;
899
1397
    info.entrypoint = priv->entrypoint;
 
1398
    info.chroma_type = priv->chroma_type;
900
1399
    info.width      = sps->width;
901
1400
    info.height     = sps->height;
902
 
    info.ref_frames = get_max_dec_frame_buffering(sps);
 
1401
    info.ref_frames = dpb_size;
903
1402
 
904
1403
    if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
905
1404
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
906
1405
    priv->has_context = TRUE;
907
1406
 
908
1407
    /* Reset DPB */
909
 
    dpb_reset(decoder, sps);
 
1408
    if (!dpb_reset(decoder, dpb_size))
 
1409
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
 
1410
 
 
1411
    /* Reset MVC data */
 
1412
    if (!mvc_reset(decoder))
 
1413
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
910
1414
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
911
1415
}
912
1416
 
913
1417
static void
914
 
fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
 
1418
fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
 
1419
    const GstH264SPS *sps)
915
1420
{
916
 
    const guint8 (* const ScalingList4x4)[6][16] = &pps->scaling_lists_4x4;
917
 
    guint i, j;
 
1421
    guint i;
918
1422
 
919
1423
    /* There are always 6 4x4 scaling lists */
920
1424
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
921
1425
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
922
1426
 
923
 
    if (sizeof(iq_matrix->ScalingList4x4[0][0]) == 1)
924
 
        memcpy(iq_matrix->ScalingList4x4, *ScalingList4x4,
925
 
               sizeof(iq_matrix->ScalingList4x4));
926
 
    else {
927
 
        for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++) {
928
 
            for (j = 0; j < G_N_ELEMENTS(iq_matrix->ScalingList4x4[i]); j++)
929
 
                iq_matrix->ScalingList4x4[i][j] = (*ScalingList4x4)[i][j];
930
 
        }
931
 
    }
 
1427
    for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
 
1428
        gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
 
1429
            iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
932
1430
}
933
1431
 
934
1432
static void
935
 
fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
 
1433
fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
 
1434
    const GstH264SPS *sps)
936
1435
{
937
 
    const guint8 (* const ScalingList8x8)[6][64] = &pps->scaling_lists_8x8;
938
 
    const GstH264SPS * const sps = pps->sequence;
939
 
    guint i, j, n;
 
1436
    guint i, n;
940
1437
 
941
1438
    /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
942
1439
    if (!pps->transform_8x8_mode_flag)
945
1442
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
946
1443
    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
947
1444
 
948
 
    if (sizeof(iq_matrix->ScalingList8x8[0][0]) == 1)
949
 
        memcpy(iq_matrix->ScalingList8x8, *ScalingList8x8,
950
 
               sizeof(iq_matrix->ScalingList8x8));
951
 
    else {
952
 
        n = (sps->chroma_format_idc != 3) ? 2 : 6;
953
 
        for (i = 0; i < n; i++) {
954
 
            for (j = 0; j < G_N_ELEMENTS(iq_matrix->ScalingList8x8[i]); j++)
955
 
                iq_matrix->ScalingList8x8[i][j] = (*ScalingList8x8)[i][j];
956
 
        }
 
1445
    n = (sps->chroma_format_idc != 3) ? 2 : 6;
 
1446
    for (i = 0; i < n; i++) {
 
1447
        gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
 
1448
            iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
957
1449
    }
958
1450
}
959
1451
 
961
1453
ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
962
1454
{
963
1455
    GstVaapiPicture * const base_picture = &picture->base;
964
 
    GstH264PPS * const pps = picture->pps;
965
 
    GstH264SPS * const sps = pps->sequence;
 
1456
    GstH264PPS * const pps = get_pps(decoder);
 
1457
    GstH264SPS * const sps = get_sps(decoder);
966
1458
    VAIQMatrixBufferH264 *iq_matrix;
967
1459
 
968
1460
    base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
977
1469
    if (sps->chroma_format_idc == 3)
978
1470
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
979
1471
 
980
 
    fill_iq_matrix_4x4(iq_matrix, pps);
981
 
    fill_iq_matrix_8x8(iq_matrix, pps);
 
1472
    fill_iq_matrix_4x4(iq_matrix, pps, sps);
 
1473
    fill_iq_matrix_8x8(iq_matrix, pps, sps);
982
1474
 
983
1475
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
984
1476
}
985
1477
 
 
1478
static inline gboolean
 
1479
is_valid_state(guint state, guint ref_state)
 
1480
{
 
1481
    return (state & ref_state) == ref_state;
 
1482
}
 
1483
 
986
1484
static GstVaapiDecoderStatus
987
1485
decode_current_picture(GstVaapiDecoderH264 *decoder)
988
1486
{
989
1487
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
990
1488
    GstVaapiPictureH264 * const picture = priv->current_picture;
991
1489
 
 
1490
    if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
 
1491
        goto drop_frame;
 
1492
    priv->decoder_state = 0;
 
1493
 
992
1494
    if (!picture)
993
1495
        return GST_VAAPI_DECODER_STATUS_SUCCESS;
994
1496
 
 
1497
    if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
 
1498
        goto error;
995
1499
    if (!exec_ref_pic_marking(decoder, picture))
996
1500
        goto error;
997
1501
    if (!dpb_add(decoder, picture))
998
1502
        goto error;
999
 
    if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1000
 
        goto error;
1001
 
    if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
1002
 
        gst_vaapi_picture_replace(&priv->current_picture, NULL);
 
1503
    gst_vaapi_picture_replace(&priv->current_picture, NULL);
1003
1504
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1004
1505
 
1005
1506
error:
1006
1507
    /* XXX: fix for cases where first field failed to be decoded */
1007
1508
    gst_vaapi_picture_replace(&priv->current_picture, NULL);
1008
1509
    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
1510
 
 
1511
drop_frame:
 
1512
    priv->decoder_state = 0;
 
1513
    return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1009
1514
}
1010
1515
 
1011
1516
static GstVaapiDecoderStatus
1018
1523
 
1019
1524
    GST_DEBUG("parse SPS");
1020
1525
 
 
1526
    priv->parser_state = 0;
 
1527
 
1021
1528
    /* Variables that don't have inferred values per the H.264
1022
1529
       standard but that should get a default value anyway */
1023
1530
    sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1026
1533
    if (result != GST_H264_PARSER_OK)
1027
1534
        return get_status(result);
1028
1535
 
1029
 
    priv->got_sps = TRUE;
 
1536
    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
 
1537
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
1538
}
 
1539
 
 
1540
static GstVaapiDecoderStatus
 
1541
parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
 
1542
{
 
1543
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1544
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
 
1545
    GstH264SPS * const sps = &pi->data.sps;
 
1546
    GstH264ParserResult result;
 
1547
 
 
1548
    GST_DEBUG("parse subset SPS");
 
1549
 
 
1550
    /* Variables that don't have inferred values per the H.264
 
1551
       standard but that should get a default value anyway */
 
1552
    sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
 
1553
 
 
1554
    result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
 
1555
        TRUE);
 
1556
    if (result != GST_H264_PARSER_OK)
 
1557
        return get_status(result);
 
1558
 
 
1559
    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1030
1560
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1031
1561
}
1032
1562
 
1040
1570
 
1041
1571
    GST_DEBUG("parse PPS");
1042
1572
 
 
1573
    priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
 
1574
 
1043
1575
    /* Variables that don't have inferred values per the H.264
1044
1576
       standard but that should get a default value anyway */
1045
1577
    pps->slice_group_map_type = 0;
1049
1581
    if (result != GST_H264_PARSER_OK)
1050
1582
        return get_status(result);
1051
1583
 
1052
 
    priv->got_pps = TRUE;
 
1584
    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1053
1585
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1054
1586
}
1055
1587
 
1058
1590
{
1059
1591
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1060
1592
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1061
 
    GstH264SEIMessage sei;
 
1593
    GArray ** const sei_ptr = &pi->data.sei;
1062
1594
    GstH264ParserResult result;
1063
1595
 
1064
1596
    GST_DEBUG("parse SEI");
1065
1597
 
1066
 
    memset(&sei, 0, sizeof(sei));
1067
 
    result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, &sei);
 
1598
    result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1068
1599
    if (result != GST_H264_PARSER_OK) {
1069
 
        GST_WARNING("failed to parse SEI, payload type:%d", sei.payloadType);
 
1600
        GST_WARNING("failed to parse SEI messages");
1070
1601
        return get_status(result);
1071
1602
    }
1072
 
 
1073
1603
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1074
1604
}
1075
1605
 
1079
1609
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1080
1610
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1081
1611
    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
 
1612
    GstH264NalUnit * const nalu = &pi->nalu;
 
1613
    GstH264SPS *sps;
1082
1614
    GstH264ParserResult result;
1083
1615
 
1084
1616
    GST_DEBUG("parse slice");
1085
1617
 
 
1618
    priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
 
1619
                           GST_H264_VIDEO_STATE_GOT_PPS);
 
1620
 
 
1621
    /* Propagate Prefix NAL unit info, if necessary */
 
1622
    switch (nalu->type) {
 
1623
    case GST_H264_NAL_SLICE:
 
1624
    case GST_H264_NAL_SLICE_IDR: {
 
1625
        GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
 
1626
        if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
 
1627
            /* MVC sequences shall have a Prefix NAL unit immediately
 
1628
               preceding this NAL unit */
 
1629
            pi->nalu.extension_type = prev_pi->nalu.extension_type;
 
1630
            pi->nalu.extension = prev_pi->nalu.extension;
 
1631
        }
 
1632
        else {
 
1633
            /* In the very unlikely case there is no Prefix NAL unit
 
1634
               immediately preceding this NAL unit, try to infer some
 
1635
               defaults (H.7.4.1.1) */
 
1636
            GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
 
1637
            mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
 
1638
            nalu->idr_pic_flag = !mvc->non_idr_flag;
 
1639
            mvc->priority_id = 0;
 
1640
            mvc->view_id = 0;
 
1641
            mvc->temporal_id = 0;
 
1642
            mvc->anchor_pic_flag = 0;
 
1643
            mvc->inter_view_flag = 1;
 
1644
        }
 
1645
        break;
 
1646
    }
 
1647
    }
 
1648
 
1086
1649
    /* Variables that don't have inferred values per the H.264
1087
1650
       standard but that should get a default value anyway */
1088
1651
    slice_hdr->cabac_init_idc = 0;
1093
1656
    if (result != GST_H264_PARSER_OK)
1094
1657
        return get_status(result);
1095
1658
 
 
1659
    sps = slice_hdr->pps->sequence;
 
1660
 
 
1661
    /* Update MVC data */
 
1662
    pi->view_id = get_view_id(&pi->nalu);
 
1663
    pi->voc = get_view_order_index(sps, pi->view_id);
 
1664
 
 
1665
    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
 
1666
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
1667
}
 
1668
 
 
1669
static GstVaapiDecoderStatus
 
1670
decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
 
1671
{
 
1672
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1673
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
 
1674
    GstH264SPS * const sps = &pi->data.sps;
 
1675
 
 
1676
    GST_DEBUG("decode SPS");
 
1677
 
 
1678
    gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
 
1679
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
1680
}
 
1681
 
 
1682
static GstVaapiDecoderStatus
 
1683
decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
 
1684
{
 
1685
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1686
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
 
1687
    GstH264SPS * const sps = &pi->data.sps;
 
1688
 
 
1689
    GST_DEBUG("decode subset SPS");
 
1690
 
 
1691
    gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
 
1692
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
1693
}
 
1694
 
 
1695
static GstVaapiDecoderStatus
 
1696
decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
 
1697
{
 
1698
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
1699
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
 
1700
    GstH264PPS * const pps = &pi->data.pps;
 
1701
 
 
1702
    GST_DEBUG("decode PPS");
 
1703
 
 
1704
    gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1096
1705
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1097
1706
}
1098
1707
 
1099
1708
static GstVaapiDecoderStatus
1100
1709
decode_sequence_end(GstVaapiDecoderH264 *decoder)
1101
1710
{
 
1711
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1102
1712
    GstVaapiDecoderStatus status;
1103
1713
 
1104
1714
    GST_DEBUG("decode sequence-end");
1107
1717
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1108
1718
        return status;
1109
1719
 
1110
 
    dpb_flush(decoder);
 
1720
    dpb_flush(decoder, NULL);
 
1721
 
 
1722
    /* Reset defaults, should there be a new sequence available next */
 
1723
    priv->max_views = 1;
1111
1724
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
1112
1725
}
1113
1726
 
1120
1733
)
1121
1734
{
1122
1735
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1123
 
    GstH264PPS * const pps = slice_hdr->pps;
1124
 
    GstH264SPS * const sps = pps->sequence;
 
1736
    GstH264SPS * const sps = get_sps(decoder);
1125
1737
    const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1126
1738
    gint32 temp_poc;
1127
1739
 
1181
1793
)
1182
1794
{
1183
1795
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1184
 
    GstH264PPS * const pps = slice_hdr->pps;
1185
 
    GstH264SPS * const sps = pps->sequence;
 
1796
    GstH264SPS * const sps = get_sps(decoder);
1186
1797
    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1187
1798
    gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1188
1799
    guint i;
1264
1875
)
1265
1876
{
1266
1877
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1267
 
    GstH264PPS * const pps = slice_hdr->pps;
1268
 
    GstH264SPS * const sps = pps->sequence;
 
1878
    GstH264SPS * const sps = get_sps(decoder);
1269
1879
    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1270
1880
    gint32 prev_frame_num_offset, temp_poc;
1271
1881
 
1308
1918
)
1309
1919
{
1310
1920
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1311
 
    GstH264PPS * const pps = slice_hdr->pps;
1312
 
    GstH264SPS * const sps = pps->sequence;
 
1921
    GstH264SPS * const sps = get_sps(decoder);
1313
1922
 
1314
1923
    switch (sps->pic_order_cnt_type) {
1315
1924
    case 0:
1393
2002
)
1394
2003
{
1395
2004
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1396
 
    GstH264PPS * const pps = slice_hdr->pps;
1397
 
    GstH264SPS * const sps = pps->sequence;
 
2005
    GstH264SPS * const sps = get_sps(decoder);
1398
2006
    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1399
2007
    guint i;
1400
2008
 
1403
2011
    for (i = 0; i < priv->short_ref_count; i++) {
1404
2012
        GstVaapiPictureH264 * const pic = priv->short_ref[i];
1405
2013
 
 
2014
        // (H.8.2)
 
2015
        if (pic->base.view_id != picture->base.view_id)
 
2016
            continue;
 
2017
 
1406
2018
        // (8-27)
1407
2019
        if (pic->frame_num > priv->frame_num)
1408
2020
            pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1423
2035
    for (i = 0; i < priv->long_ref_count; i++) {
1424
2036
        GstVaapiPictureH264 * const pic = priv->long_ref[i];
1425
2037
 
 
2038
        // (H.8.2)
 
2039
        if (pic->base.view_id != picture->base.view_id)
 
2040
            continue;
 
2041
 
1426
2042
        // (8-29, 8-32, 8-33)
1427
2043
        if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1428
2044
            pic->long_term_pic_num = pic->long_term_frame_idx;
1491
2107
    *RefPicList_count = n;
1492
2108
}
1493
2109
 
 
2110
/* Finds the inter-view reference picture with the supplied view id */
 
2111
static GstVaapiPictureH264 *
 
2112
find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
 
2113
{
 
2114
    GPtrArray * const inter_views = decoder->priv.inter_views;
 
2115
    guint i;
 
2116
 
 
2117
    for (i = 0; i < inter_views->len; i++) {
 
2118
        GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
 
2119
        if (picture->base.view_id == view_id)
 
2120
            return picture;
 
2121
    }
 
2122
 
 
2123
    GST_WARNING("failed to find inter-view reference picture for view_id: %d",
 
2124
        view_id);
 
2125
    return NULL;
 
2126
}
 
2127
 
 
2128
/* Checks whether the view id exists in the supplied list of view ids */
 
2129
static gboolean
 
2130
find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
 
2131
{
 
2132
    guint i;
 
2133
 
 
2134
    for (i = 0; i < num_view_ids; i++) {
 
2135
        if (view_ids[i] == view_id)
 
2136
            return TRUE;
 
2137
    }
 
2138
    return FALSE;
 
2139
}
 
2140
 
 
2141
static gboolean
 
2142
find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
 
2143
    gboolean is_anchor)
 
2144
{
 
2145
    if (is_anchor)
 
2146
        return (find_view_id(view_id, view->anchor_ref_l0,
 
2147
                    view->num_anchor_refs_l0) ||
 
2148
                find_view_id(view_id, view->anchor_ref_l1,
 
2149
                    view->num_anchor_refs_l1));
 
2150
 
 
2151
    return (find_view_id(view_id, view->non_anchor_ref_l0,
 
2152
                view->num_non_anchor_refs_l0) ||
 
2153
            find_view_id(view_id, view->non_anchor_ref_l1,
 
2154
                view->num_non_anchor_refs_l1));
 
2155
}
 
2156
 
 
2157
/* Checks whether the inter-view reference picture with the supplied
 
2158
   view id is used for decoding the current view component picture */
 
2159
static gboolean
 
2160
is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
 
2161
    guint16 view_id, GstVaapiPictureH264 *picture)
 
2162
{
 
2163
    const GstH264SPS * const sps = get_sps(decoder);
 
2164
    gboolean is_anchor;
 
2165
 
 
2166
    if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
 
2167
        sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
 
2168
        return FALSE;
 
2169
 
 
2170
    is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
 
2171
    return find_view_id_in_view(view_id,
 
2172
        &sps->extension.mvc.view[picture->base.voc], is_anchor);
 
2173
}
 
2174
 
 
2175
/* Checks whether the supplied inter-view reference picture is used
 
2176
   for decoding the next view component pictures */
 
2177
static gboolean
 
2178
is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
 
2179
    GstVaapiPictureH264 *picture)
 
2180
{
 
2181
    const GstH264SPS * const sps = get_sps(decoder);
 
2182
    gboolean is_anchor;
 
2183
    guint i, num_views;
 
2184
 
 
2185
    if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
 
2186
        sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
 
2187
        return FALSE;
 
2188
 
 
2189
    is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
 
2190
    num_views = sps->extension.mvc.num_views_minus1 + 1;
 
2191
    for (i = picture->base.voc + 1; i < num_views; i++) {
 
2192
        const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
 
2193
        if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
 
2194
            return TRUE;
 
2195
    }
 
2196
    return FALSE;
 
2197
}
 
2198
 
 
2199
/* H.8.2.1 - Initialization process for inter-view prediction references */
 
2200
static void
 
2201
init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
 
2202
    GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
 
2203
    const guint16 *view_ids, guint num_view_ids)
 
2204
{
 
2205
    guint j, n;
 
2206
 
 
2207
    n = *ref_list_count_ptr;
 
2208
    for (j = 0; j < num_view_ids && n < num_refs; j++) {
 
2209
        GstVaapiPictureH264 * const pic =
 
2210
            find_inter_view_reference(decoder, view_ids[j]);
 
2211
        if (pic)
 
2212
            ref_list[n++] = pic;
 
2213
    }
 
2214
    *ref_list_count_ptr = n;
 
2215
}
 
2216
 
 
2217
static inline void
 
2218
init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
 
2219
    GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
 
2220
{
 
2221
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
2222
    const GstH264SPS * const sps = get_sps(decoder);
 
2223
    const GstH264SPSExtMVCView *view;
 
2224
 
 
2225
    GST_DEBUG("initialize reference picture list for inter-view prediction");
 
2226
 
 
2227
    if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
 
2228
        return;
 
2229
    view = &sps->extension.mvc.view[picture->base.voc];
 
2230
 
 
2231
#define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
 
2232
        init_picture_refs_mvc_1(decoder,                                \
 
2233
            priv->RefPicList##ref_list,                                 \
 
2234
            &priv->RefPicList##ref_list##_count,                        \
 
2235
            slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
 
2236
            view->view_list##_l##ref_list,                              \
 
2237
            view->num_##view_list##s_l##ref_list);                      \
 
2238
    } while (0)
 
2239
 
 
2240
    if (list == 0) {
 
2241
        if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
 
2242
            INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
 
2243
        else
 
2244
            INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
 
2245
    }
 
2246
    else {
 
2247
        if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
 
2248
            INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
 
2249
        else
 
2250
            INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
 
2251
    }
 
2252
 
 
2253
#undef INVOKE_INIT_PICTURE_REFS_MVC
 
2254
}
 
2255
 
1494
2256
static void
1495
2257
init_picture_refs_p_slice(
1496
2258
    GstVaapiDecoderH264 *decoder,
1550
2312
            long_ref,           long_ref_count
1551
2313
        );
1552
2314
    }
 
2315
 
 
2316
    if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
 
2317
        /* RefPicList0 */
 
2318
        init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
 
2319
    }
1553
2320
}
1554
2321
 
1555
2322
static void
1706
2473
        priv->RefPicList1[0] = priv->RefPicList1[1];
1707
2474
        priv->RefPicList1[1] = tmp;
1708
2475
    }
 
2476
 
 
2477
    if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
 
2478
        /* RefPicList0 */
 
2479
        init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
 
2480
 
 
2481
        /* RefPicList1 */
 
2482
        init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
 
2483
    }
1709
2484
}
1710
2485
 
1711
2486
#undef SORT_REF_LIST
1749
2524
)
1750
2525
{
1751
2526
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1752
 
    GstH264PPS * const pps = slice_hdr->pps;
1753
 
    GstH264SPS * const sps = pps->sequence;
 
2527
    GstH264SPS * const sps = get_sps(decoder);
1754
2528
    GstH264RefPicListModification *ref_pic_list_modification;
1755
2529
    guint num_ref_pic_list_modifications;
1756
2530
    GstVaapiPictureH264 **ref_list;
1757
2531
    guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
1758
 
    guint i, j, n, num_refs;
 
2532
    const guint16 *view_ids = NULL;
 
2533
    guint i, j, n, num_refs, num_view_ids = 0;
1759
2534
    gint found_ref_idx;
1760
 
    gint32 MaxPicNum, CurrPicNum, picNumPred;
 
2535
    gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
1761
2536
 
1762
2537
    GST_DEBUG("modification process of reference picture list %u", list);
1763
2538
 
1767
2542
        ref_list                       = priv->RefPicList0;
1768
2543
        ref_list_count_ptr             = &priv->RefPicList0_count;
1769
2544
        num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
 
2545
 
 
2546
        if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
 
2547
            sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
 
2548
            const GstH264SPSExtMVCView * const view =
 
2549
                &sps->extension.mvc.view[picture->base.voc];
 
2550
            if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
 
2551
                view_ids = view->anchor_ref_l0;
 
2552
                num_view_ids = view->num_anchor_refs_l0;
 
2553
            }
 
2554
            else {
 
2555
                view_ids = view->non_anchor_ref_l0;
 
2556
                num_view_ids = view->num_non_anchor_refs_l0;
 
2557
            }
 
2558
        }
1770
2559
    }
1771
2560
    else {
1772
2561
        ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
1774
2563
        ref_list                       = priv->RefPicList1;
1775
2564
        ref_list_count_ptr             = &priv->RefPicList1_count;
1776
2565
        num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
 
2566
 
 
2567
        if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
 
2568
            sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
 
2569
            const GstH264SPSExtMVCView * const view =
 
2570
                &sps->extension.mvc.view[picture->base.voc];
 
2571
            if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
 
2572
                view_ids = view->anchor_ref_l1;
 
2573
                num_view_ids = view->num_anchor_refs_l1;
 
2574
            }
 
2575
            else {
 
2576
                view_ids = view->non_anchor_ref_l1;
 
2577
                num_view_ids = view->num_non_anchor_refs_l1;
 
2578
            }
 
2579
        }
1777
2580
    }
1778
2581
    ref_list_count = *ref_list_count_ptr;
1779
2582
 
1787
2590
    }
1788
2591
 
1789
2592
    picNumPred = CurrPicNum;
 
2593
    picViewIdxPred = -1;
1790
2594
 
1791
2595
    for (i = 0; i < num_ref_pic_list_modifications; i++) {
1792
2596
        GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
1832
2636
                PicNumF =
1833
2637
                    GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
1834
2638
                    ref_list[j]->pic_num : MaxPicNum;
1835
 
                if (PicNumF != picNum)
 
2639
                if (PicNumF != picNum ||
 
2640
                    ref_list[j]->base.view_id != picture->base.view_id)
1836
2641
                    ref_list[n++] = ref_list[j];
1837
2642
            }
1838
2643
        }
1839
2644
 
1840
2645
        /* 8.2.4.3.2 - Long-term reference pictures */
1841
 
        else {
 
2646
        else if (l->modification_of_pic_nums_idc == 2) {
1842
2647
 
1843
2648
            for (j = num_refs; j > ref_list_idx; j--)
1844
2649
                ref_list[j] = ref_list[j - 1];
1854
2659
                LongTermPicNumF =
1855
2660
                    GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
1856
2661
                    ref_list[j]->long_term_pic_num : INT_MAX;
1857
 
                if (LongTermPicNumF != l->value.long_term_pic_num)
 
2662
                if (LongTermPicNumF != l->value.long_term_pic_num ||
 
2663
                    ref_list[j]->base.view_id != picture->base.view_id)
 
2664
                    ref_list[n++] = ref_list[j];
 
2665
            }
 
2666
        }
 
2667
 
 
2668
        /* H.8.2.2.3 - Inter-view prediction reference pictures */
 
2669
        else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
 
2670
                  sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
 
2671
                 (l->modification_of_pic_nums_idc == 4 ||
 
2672
                  l->modification_of_pic_nums_idc == 5)) {
 
2673
            gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
 
2674
            gint32 picViewIdx, targetViewId;
 
2675
 
 
2676
            // (H-6)
 
2677
            if (l->modification_of_pic_nums_idc == 4) {
 
2678
                picViewIdx = picViewIdxPred - abs_diff_view_idx;
 
2679
                if (picViewIdx < 0)
 
2680
                    picViewIdx += num_view_ids;
 
2681
            }
 
2682
 
 
2683
            // (H-7)
 
2684
            else {
 
2685
                picViewIdx = picViewIdxPred + abs_diff_view_idx;
 
2686
                if (picViewIdx >= num_view_ids)
 
2687
                    picViewIdx -= num_view_ids;
 
2688
            }
 
2689
            picViewIdxPred = picViewIdx;
 
2690
 
 
2691
            // (H-8, H-9)
 
2692
            targetViewId = view_ids[picViewIdx];
 
2693
 
 
2694
            // (H-10)
 
2695
            for (j = num_refs; j > ref_list_idx; j--)
 
2696
                ref_list[j] = ref_list[j - 1];
 
2697
            ref_list[ref_list_idx++] =
 
2698
                find_inter_view_reference(decoder, targetViewId);
 
2699
            n = ref_list_idx;
 
2700
            for (j = ref_list_idx; j <= num_refs; j++) {
 
2701
                if (!ref_list[j])
 
2702
                    continue;
 
2703
                if (ref_list[j]->base.view_id != targetViewId ||
 
2704
                    ref_list[j]->base.poc != picture->base.poc)
1858
2705
                    ref_list[n++] = ref_list[j];
1859
2706
            }
1860
2707
        }
1890
2737
}
1891
2738
 
1892
2739
static void
1893
 
init_picture_ref_lists(GstVaapiDecoderH264 *decoder)
 
2740
init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
 
2741
    GstVaapiPictureH264 *picture)
1894
2742
{
1895
2743
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1896
2744
    guint i, j, short_ref_count, long_ref_count;
1897
2745
 
1898
2746
    short_ref_count = 0;
1899
2747
    long_ref_count  = 0;
1900
 
    if (GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture)) {
 
2748
    if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1901
2749
        for (i = 0; i < priv->dpb_count; i++) {
1902
2750
            GstVaapiFrameStore * const fs = priv->dpb[i];
1903
 
            GstVaapiPictureH264 *picture;
 
2751
            GstVaapiPictureH264 *pic;
1904
2752
            if (!gst_vaapi_frame_store_has_frame(fs))
1905
2753
                continue;
1906
 
            picture = fs->buffers[0];
1907
 
            if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1908
 
                priv->short_ref[short_ref_count++] = picture;
1909
 
            else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1910
 
                priv->long_ref[long_ref_count++] = picture;
1911
 
            picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1912
 
            picture->other_field = fs->buffers[1];
 
2754
            pic = fs->buffers[0];
 
2755
            if (pic->base.view_id != picture->base.view_id)
 
2756
                continue;
 
2757
            if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
 
2758
                priv->short_ref[short_ref_count++] = pic;
 
2759
            else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
 
2760
                priv->long_ref[long_ref_count++] = pic;
 
2761
            pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
 
2762
            pic->other_field = fs->buffers[1];
1913
2763
        }
1914
2764
    }
1915
2765
    else {
1916
2766
        for (i = 0; i < priv->dpb_count; i++) {
1917
2767
            GstVaapiFrameStore * const fs = priv->dpb[i];
1918
2768
            for (j = 0; j < fs->num_buffers; j++) {
1919
 
                GstVaapiPictureH264 * const picture = fs->buffers[j];
1920
 
                if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1921
 
                    priv->short_ref[short_ref_count++] = picture;
1922
 
                else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1923
 
                    priv->long_ref[long_ref_count++] = picture;
1924
 
                picture->structure = picture->base.structure;
1925
 
                picture->other_field = fs->buffers[j ^ 1];
 
2769
                GstVaapiPictureH264 * const pic = fs->buffers[j];
 
2770
                if (pic->base.view_id != picture->base.view_id)
 
2771
                    continue;
 
2772
                if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
 
2773
                    priv->short_ref[short_ref_count++] = pic;
 
2774
                else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
 
2775
                    priv->long_ref[long_ref_count++] = pic;
 
2776
                pic->structure = pic->base.structure;
 
2777
                pic->other_field = fs->buffers[j ^ 1];
1926
2778
            }
1927
2779
        }
1928
2780
    }
1944
2796
)
1945
2797
{
1946
2798
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
1947
 
    GstVaapiPicture * const base_picture = &picture->base;
1948
2799
    guint i, num_refs;
1949
2800
 
1950
 
    init_picture_ref_lists(decoder);
 
2801
    init_picture_ref_lists(decoder, picture);
1951
2802
    init_picture_refs_pic_num(decoder, picture, slice_hdr);
1952
2803
 
1953
2804
    priv->RefPicList0_count = 0;
1954
2805
    priv->RefPicList1_count = 0;
1955
2806
 
1956
 
    switch (base_picture->type) {
1957
 
    case GST_VAAPI_PICTURE_TYPE_P:
1958
 
    case GST_VAAPI_PICTURE_TYPE_SP:
 
2807
    switch (slice_hdr->type % 5) {
 
2808
    case GST_H264_P_SLICE:
 
2809
    case GST_H264_SP_SLICE:
1959
2810
        init_picture_refs_p_slice(decoder, picture, slice_hdr);
1960
2811
        break;
1961
 
    case GST_VAAPI_PICTURE_TYPE_B:
 
2812
    case GST_H264_B_SLICE:
1962
2813
        init_picture_refs_b_slice(decoder, picture, slice_hdr);
1963
2814
        break;
1964
2815
    default:
1967
2818
 
1968
2819
    exec_picture_refs_modification(decoder, picture, slice_hdr);
1969
2820
 
1970
 
    switch (base_picture->type) {
1971
 
    case GST_VAAPI_PICTURE_TYPE_B:
 
2821
    switch (slice_hdr->type % 5) {
 
2822
    case GST_H264_B_SLICE:
1972
2823
        num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
1973
2824
        for (i = priv->RefPicList1_count; i < num_refs; i++)
1974
2825
            priv->RefPicList1[i] = NULL;
1975
2826
        priv->RefPicList1_count = num_refs;
1976
2827
 
1977
2828
        // fall-through
1978
 
    case GST_VAAPI_PICTURE_TYPE_P:
1979
 
    case GST_VAAPI_PICTURE_TYPE_SP:
 
2829
    case GST_H264_P_SLICE:
 
2830
    case GST_H264_SP_SLICE:
1980
2831
        num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
1981
2832
        for (i = priv->RefPicList0_count; i < num_refs; i++)
1982
2833
            priv->RefPicList0[i] = NULL;
2002
2853
    picture->frame_num_wrap     = priv->frame_num;
2003
2854
    picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2004
2855
    base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
 
2856
    base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
 
2857
    base_picture->view_id       = pi->view_id;
 
2858
    base_picture->voc           = pi->voc;
 
2859
 
 
2860
    /* Initialize extensions */
 
2861
    switch (pi->nalu.extension_type) {
 
2862
    case GST_H264_NAL_EXTENSION_MVC: {
 
2863
        GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
 
2864
 
 
2865
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
 
2866
        if (mvc->inter_view_flag)
 
2867
            GST_VAAPI_PICTURE_FLAG_SET(picture,
 
2868
                GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
 
2869
        if (mvc->anchor_pic_flag)
 
2870
            GST_VAAPI_PICTURE_FLAG_SET(picture,
 
2871
                GST_VAAPI_PICTURE_FLAG_ANCHOR);
 
2872
        break;
 
2873
    }
 
2874
    }
2005
2875
 
2006
2876
    /* Reset decoder state for IDR pictures */
2007
 
    if (pi->nalu.type == GST_H264_NAL_SLICE_IDR) {
 
2877
    if (pi->nalu.idr_pic_flag) {
2008
2878
        GST_DEBUG("<IDR>");
2009
2879
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2010
 
        dpb_flush(decoder);
2011
 
    }
2012
 
 
2013
 
    /* Initialize slice type */
2014
 
    switch (slice_hdr->type % 5) {
2015
 
    case GST_H264_P_SLICE:
2016
 
        base_picture->type = GST_VAAPI_PICTURE_TYPE_P;
2017
 
        break;
2018
 
    case GST_H264_B_SLICE:
2019
 
        base_picture->type = GST_VAAPI_PICTURE_TYPE_B;
2020
 
        break;
2021
 
    case GST_H264_I_SLICE:
2022
 
        base_picture->type = GST_VAAPI_PICTURE_TYPE_I;
2023
 
        break;
2024
 
    case GST_H264_SP_SLICE:
2025
 
        base_picture->type = GST_VAAPI_PICTURE_TYPE_SP;
2026
 
        break;
2027
 
    case GST_H264_SI_SLICE:
2028
 
        base_picture->type = GST_VAAPI_PICTURE_TYPE_SI;
2029
 
        break;
 
2880
        dpb_flush(decoder, picture);
2030
2881
    }
2031
2882
 
2032
2883
    /* Initialize picture structure */
2056
2907
    }
2057
2908
 
2058
2909
    init_picture_poc(decoder, picture, slice_hdr);
2059
 
    init_picture_refs(decoder, picture, slice_hdr);
2060
2910
    return TRUE;
2061
2911
}
2062
2912
 
2065
2915
exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2066
2916
{
2067
2917
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2068
 
    GstH264PPS * const pps = priv->current_picture->pps;
2069
 
    GstH264SPS * const sps = pps->sequence;
 
2918
    GstH264SPS * const sps = get_sps(decoder);
2070
2919
    GstVaapiPictureH264 *ref_picture;
2071
2920
    guint i, m, max_num_ref_frames;
2072
2921
 
2172
3021
)
2173
3022
{
2174
3023
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2175
 
    GstVaapiPictureH264 *ref_picture;
 
3024
    GstVaapiPictureH264 *ref_picture, *other_field;
2176
3025
    gint32 i, picNumX;
2177
3026
 
2178
3027
    for (i = 0; i < priv->long_ref_count; i++) {
2196
3045
    ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2197
3046
    gst_vaapi_picture_h264_set_reference(ref_picture,
2198
3047
        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
2199
 
        GST_VAAPI_PICTURE_IS_FRAME(picture));
 
3048
        GST_VAAPI_PICTURE_IS_COMPLETE(picture));
 
3049
 
 
3050
    /* Assign LongTermFrameIdx to the other field if it was also
 
3051
       marked as "used for long-term reference */
 
3052
    other_field = ref_picture->other_field;
 
3053
    if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
 
3054
        other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2200
3055
}
2201
3056
 
2202
3057
/* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
2232
3087
{
2233
3088
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2234
3089
 
2235
 
    dpb_flush(decoder);
 
3090
    dpb_flush(decoder, picture);
2236
3091
 
2237
3092
    priv->prev_pic_has_mmco5 = TRUE;
2238
3093
 
2257
3112
    GstH264RefPicMarking *ref_pic_marking
2258
3113
)
2259
3114
{
 
3115
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
3116
    GstVaapiPictureH264 *other_field;
 
3117
    guint i;
 
3118
 
 
3119
    for (i = 0; i < priv->long_ref_count; i++) {
 
3120
        if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
 
3121
            break;
 
3122
    }
 
3123
    if (i != priv->long_ref_count) {
 
3124
        gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
 
3125
        ARRAY_REMOVE_INDEX(priv->long_ref, i);
 
3126
    }
 
3127
 
2260
3128
    picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2261
3129
    gst_vaapi_picture_h264_set_reference(picture,
2262
 
        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, FALSE);
 
3130
        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
 
3131
        GST_VAAPI_PICTURE_IS_COMPLETE(picture));
 
3132
 
 
3133
    /* Assign LongTermFrameIdx to the other field if it was also
 
3134
       marked as "used for long-term reference */
 
3135
    other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
 
3136
    if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
 
3137
        other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2263
3138
}
2264
3139
 
2265
3140
/* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
2314
3189
    priv->prev_pic_has_mmco5 = FALSE;
2315
3190
    priv->prev_pic_structure = picture->structure;
2316
3191
 
 
3192
    if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
 
3193
        g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
 
3194
 
2317
3195
    if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
2318
3196
        return TRUE;
2319
3197
 
2380
3258
    }
2381
3259
}
2382
3260
 
 
3261
static void
 
3262
vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
 
3263
    GstVaapiPictureH264 *picture)
 
3264
{
 
3265
    vaapi_fill_picture(pic, picture, 0);
 
3266
 
 
3267
    /* H.8.4 - MVC inter prediction and inter-view prediction process */
 
3268
    if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
 
3269
        /* The inter-view reference components and inter-view only
 
3270
           reference components that are included in the reference
 
3271
           picture lists are considered as not being marked as "used for
 
3272
           short-term reference" or "used for long-term reference" */
 
3273
        pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
 
3274
                        VA_PICTURE_H264_LONG_TERM_REFERENCE);
 
3275
    }
 
3276
}
 
3277
 
2383
3278
static gboolean
2384
 
fill_picture(GstVaapiDecoderH264 *decoder,
2385
 
    GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
 
3279
fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
2386
3280
{
2387
3281
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2388
3282
    GstVaapiPicture * const base_picture = &picture->base;
2389
 
    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2390
 
    GstH264PPS * const pps = picture->pps;
2391
 
    GstH264SPS * const sps = pps->sequence;
 
3283
    GstH264PPS * const pps = get_pps(decoder);
 
3284
    GstH264SPS * const sps = get_sps(decoder);
2392
3285
    VAPictureParameterBufferH264 * const pic_param = base_picture->param;
2393
3286
    guint i, n;
2394
3287
 
2397
3290
 
2398
3291
    for (i = 0, n = 0; i < priv->dpb_count; i++) {
2399
3292
        GstVaapiFrameStore * const fs = priv->dpb[i];
2400
 
        if (gst_vaapi_frame_store_has_reference(fs))
 
3293
        if ((gst_vaapi_frame_store_has_reference(fs) &&
 
3294
             fs->view_id == picture->base.view_id) ||
 
3295
            (gst_vaapi_frame_store_has_inter_view(fs) &&
 
3296
             is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
2401
3297
            vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
2402
3298
                fs->buffers[0], fs->structure);
 
3299
        if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
 
3300
            break;
2403
3301
    }
2404
3302
    for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
2405
3303
        vaapi_init_picture(&pic_param->ReferenceFrames[n]);
2440
3338
    COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
2441
3339
 
2442
3340
    pic_param->pic_fields.value                                         = 0; /* reset all bits */
2443
 
    pic_param->pic_fields.bits.field_pic_flag                           = slice_hdr->field_pic_flag;
 
3341
    pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
2444
3342
    pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
2445
3343
 
2446
3344
    COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
2477
3375
#define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
2478
3376
    CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
2479
3377
 
 
3378
    /* view_id differs in value and VOIdx of current slice_hdr is less
 
3379
       than the VOIdx of the prev_slice_hdr */
 
3380
    CHECK_VALUE(pi, prev_pi, view_id);
 
3381
 
2480
3382
    /* frame_num differs in value, regardless of inferred values to 0 */
2481
3383
    CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
2482
3384
 
2521
3423
    return FALSE;
2522
3424
}
2523
3425
 
 
3426
/* Detection of a new access unit, assuming we are already in presence
 
3427
   of a new picture */
 
3428
static inline gboolean
 
3429
is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
 
3430
{
 
3431
    if (!prev_pi || prev_pi->view_id == pi->view_id)
 
3432
        return TRUE;
 
3433
    return pi->voc < prev_pi->voc;
 
3434
}
 
3435
 
 
3436
/* Finds the first field picture corresponding to the supplied picture */
 
3437
static GstVaapiPictureH264 *
 
3438
find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
 
3439
{
 
3440
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
3441
    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
 
3442
    GstVaapiFrameStore *fs;
 
3443
 
 
3444
    if (!slice_hdr->field_pic_flag)
 
3445
        return NULL;
 
3446
 
 
3447
    fs = priv->prev_frames[pi->voc];
 
3448
    if (!fs || gst_vaapi_frame_store_has_frame(fs))
 
3449
        return NULL;
 
3450
 
 
3451
    if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
 
3452
        return fs->buffers[0];
 
3453
    return NULL;
 
3454
}
 
3455
 
2524
3456
static GstVaapiDecoderStatus
2525
3457
decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2526
3458
{
2527
3459
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2528
3460
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2529
3461
    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2530
 
    GstH264PPS * const pps = slice_hdr->pps;
2531
 
    GstH264SPS * const sps = pps->sequence;
2532
 
    GstVaapiPictureH264 *picture;
 
3462
    GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
 
3463
    GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
 
3464
    GstVaapiPictureH264 *picture, *first_field;
2533
3465
    GstVaapiDecoderStatus status;
2534
3466
 
 
3467
    g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
 
3468
    g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
 
3469
 
 
3470
    /* Only decode base stream for MVC */
 
3471
    switch (sps->profile_idc) {
 
3472
    case GST_H264_PROFILE_MULTIVIEW_HIGH:
 
3473
    case GST_H264_PROFILE_STEREO_HIGH:
 
3474
        if (0) {
 
3475
            GST_DEBUG("drop picture from substream");
 
3476
            return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
 
3477
        }
 
3478
        break;
 
3479
    }
 
3480
 
2535
3481
    status = ensure_context(decoder, sps);
2536
3482
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2537
3483
        return status;
2538
3484
 
2539
 
    if (priv->current_picture) {
 
3485
    priv->decoder_state = 0;
 
3486
 
 
3487
    first_field = find_first_field(decoder, pi);
 
3488
    if (first_field) {
2540
3489
        /* Re-use current picture where the first field was decoded */
2541
 
        picture = gst_vaapi_picture_h264_new_field(priv->current_picture);
 
3490
        picture = gst_vaapi_picture_h264_new_field(first_field);
2542
3491
        if (!picture) {
2543
3492
            GST_ERROR("failed to allocate field picture");
2544
3493
            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2555
3504
    gst_vaapi_picture_replace(&priv->current_picture, picture);
2556
3505
    gst_vaapi_picture_unref(picture);
2557
3506
 
 
3507
    /* Clear inter-view references list if this is the primary coded
 
3508
       picture of the current access unit */
 
3509
    if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
 
3510
        g_ptr_array_set_size(priv->inter_views, 0);
 
3511
 
2558
3512
    /* Update cropping rectangle */
2559
3513
    if (sps->frame_cropping_flag) {
2560
3514
        GstVaapiRectangle crop_rect;
2565
3519
        gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
2566
3520
    }
2567
3521
 
2568
 
    picture->pps = pps;
2569
 
 
2570
3522
    status = ensure_quant_matrix(decoder, picture);
2571
3523
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
2572
3524
        GST_ERROR("failed to reset quantizer matrix");
2575
3527
 
2576
3528
    if (!init_picture(decoder, picture, pi))
2577
3529
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2578
 
    if (!fill_picture(decoder, picture, pi))
 
3530
    if (!fill_picture(decoder, picture))
2579
3531
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
3532
 
 
3533
    priv->decoder_state = pi->state;
2580
3534
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
2581
3535
}
2582
3536
 
2583
3537
static inline guint
2584
 
get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr)
 
3538
get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
2585
3539
{
2586
3540
    guint epb_count;
2587
3541
 
2588
3542
    epb_count = slice_hdr->n_emulation_prevention_bytes;
2589
 
    return 8 /* nal_unit_type */ + slice_hdr->header_size - epb_count * 8;
 
3543
    return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
2590
3544
}
2591
3545
 
2592
3546
static gboolean
2594
3548
    GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2595
3549
{
2596
3550
    VASliceParameterBufferH264 * const slice_param = slice->param;
2597
 
    GstH264PPS * const pps = slice_hdr->pps;
2598
 
    GstH264SPS * const sps = pps->sequence;
 
3551
    GstH264PPS * const pps = get_pps(decoder);
 
3552
    GstH264SPS * const sps = get_sps(decoder);
2599
3553
    GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
2600
3554
    guint num_weight_tables = 0;
2601
3555
    gint i, j;
2683
3637
        slice_hdr->num_ref_idx_l0_active_minus1;
2684
3638
 
2685
3639
    for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
2686
 
        vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
 
3640
        vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
 
3641
            priv->RefPicList0[i]);
2687
3642
    for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
2688
3643
        vaapi_init_picture(&slice_param->RefPicList0[i]);
2689
3644
 
2694
3649
        slice_hdr->num_ref_idx_l1_active_minus1;
2695
3650
 
2696
3651
    for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
2697
 
        vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
 
3652
        vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
 
3653
            priv->RefPicList1[i]);
2698
3654
    for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
2699
3655
        vaapi_init_picture(&slice_param->RefPicList1[i]);
2700
3656
    return TRUE;
2702
3658
 
2703
3659
static gboolean
2704
3660
fill_slice(GstVaapiDecoderH264 *decoder,
2705
 
    GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
 
3661
    GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
2706
3662
{
2707
3663
    VASliceParameterBufferH264 * const slice_param = slice->param;
 
3664
    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2708
3665
 
2709
3666
    /* Fill in VASliceParameterBufferH264 */
2710
 
    slice_param->slice_data_bit_offset          = get_slice_data_bit_offset(slice_hdr);
 
3667
    slice_param->slice_data_bit_offset =
 
3668
        get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
2711
3669
    slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
2712
3670
    slice_param->slice_type                     = slice_hdr->type % 5;
2713
3671
    slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
2738
3696
 
2739
3697
    GST_DEBUG("slice (%u bytes)", pi->nalu.size);
2740
3698
 
2741
 
    if (!priv->got_sps || !priv->got_pps) {
2742
 
        GST_ERROR("not initialized yet");
 
3699
    if (!is_valid_state(pi->state,
 
3700
            GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
 
3701
        GST_WARNING("failed to receive enough headers to decode slice");
2743
3702
        return GST_VAAPI_DECODER_STATUS_SUCCESS;
2744
3703
    }
2745
3704
 
 
3705
    if (!ensure_pps(decoder, slice_hdr->pps)) {
 
3706
        GST_ERROR("failed to activate PPS");
 
3707
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
3708
    }
 
3709
 
 
3710
    if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
 
3711
        GST_ERROR("failed to activate SPS");
 
3712
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
3713
    }
 
3714
 
2746
3715
    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
2747
3716
        GST_ERROR("failed to map buffer");
2748
3717
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2749
3718
    }
2750
3719
 
 
3720
    /* Check wether this is the first/last slice in the current access unit */
 
3721
    if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
 
3722
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
 
3723
    if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
 
3724
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
 
3725
 
2751
3726
    slice = GST_VAAPI_SLICE_NEW(H264, decoder,
2752
3727
        (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
2753
3728
    gst_buffer_unmap(buffer, &map_info);
2756
3731
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2757
3732
    }
2758
3733
 
2759
 
    if (!fill_slice(decoder, slice, slice_hdr)) {
 
3734
    init_picture_refs(decoder, picture, slice_hdr);
 
3735
    if (!fill_slice(decoder, slice, pi)) {
2760
3736
        gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
2761
3737
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2762
3738
    }
2763
3739
 
2764
3740
    gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
2765
3741
    picture->last_slice_hdr = slice_hdr;
 
3742
    priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
2766
3743
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
2767
3744
}
2768
3745
 
2778
3755
static GstVaapiDecoderStatus
2779
3756
decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2780
3757
{
 
3758
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2781
3759
    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2782
3760
    GstVaapiDecoderStatus status;
2783
3761
 
 
3762
    priv->decoder_state |= pi->state;
2784
3763
    switch (pi->nalu.type) {
 
3764
    case GST_H264_NAL_SPS:
 
3765
        status = decode_sps(decoder, unit);
 
3766
        break;
 
3767
    case GST_H264_NAL_SUBSET_SPS:
 
3768
        status = decode_subset_sps(decoder, unit);
 
3769
        break;
 
3770
    case GST_H264_NAL_PPS:
 
3771
        status = decode_pps(decoder, unit);
 
3772
        break;
 
3773
    case GST_H264_NAL_SLICE_EXT:
2785
3774
    case GST_H264_NAL_SLICE_IDR:
2786
3775
        /* fall-through. IDR specifics are handled in init_picture() */
2787
3776
    case GST_H264_NAL_SLICE:
2811
3800
    GstVaapiDecoderH264Private * const priv = &decoder->priv;
2812
3801
    GstVaapiDecoderStatus status;
2813
3802
    GstVaapiDecoderUnit unit;
2814
 
    GstVaapiParserInfoH264 pi;
 
3803
    GstVaapiParserInfoH264 *pi = NULL;
2815
3804
    GstH264ParserResult result;
2816
3805
    guint i, ofs, num_sps, num_pps;
2817
3806
 
2818
 
    unit.parsed_info = &pi;
 
3807
    unit.parsed_info = NULL;
2819
3808
 
2820
3809
    if (buf_size < 8)
2821
3810
        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2831
3820
    ofs = 6;
2832
3821
 
2833
3822
    for (i = 0; i < num_sps; i++) {
 
3823
        pi = gst_vaapi_parser_info_h264_new();
 
3824
        if (!pi)
 
3825
            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
 
3826
        unit.parsed_info = pi;
 
3827
 
2834
3828
        result = gst_h264_parser_identify_nalu_avc(
2835
3829
            priv->parser,
2836
3830
            buf, ofs, buf_size, 2,
2837
 
            &pi.nalu
 
3831
            &pi->nalu
2838
3832
        );
2839
 
        if (result != GST_H264_PARSER_OK)
2840
 
            return get_status(result);
 
3833
        if (result != GST_H264_PARSER_OK) {
 
3834
            status = get_status(result);
 
3835
            goto cleanup;
 
3836
        }
2841
3837
 
2842
3838
        status = parse_sps(decoder, &unit);
2843
3839
        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2844
 
            return status;
2845
 
        ofs = pi.nalu.offset + pi.nalu.size;
 
3840
            goto cleanup;
 
3841
        ofs = pi->nalu.offset + pi->nalu.size;
 
3842
 
 
3843
        status = decode_sps(decoder, &unit);
 
3844
        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
 
3845
            goto cleanup;
 
3846
        gst_vaapi_parser_info_h264_replace(&pi, NULL);
2846
3847
    }
2847
3848
 
2848
3849
    num_pps = buf[ofs];
2849
3850
    ofs++;
2850
3851
 
2851
3852
    for (i = 0; i < num_pps; i++) {
 
3853
        pi = gst_vaapi_parser_info_h264_new();
 
3854
        if (!pi)
 
3855
            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
 
3856
        unit.parsed_info = pi;
 
3857
 
2852
3858
        result = gst_h264_parser_identify_nalu_avc(
2853
3859
            priv->parser,
2854
3860
            buf, ofs, buf_size, 2,
2855
 
            &pi.nalu
 
3861
            &pi->nalu
2856
3862
        );
2857
 
        if (result != GST_H264_PARSER_OK)
2858
 
            return get_status(result);
 
3863
        if (result != GST_H264_PARSER_OK) {
 
3864
            status = get_status(result);
 
3865
            goto cleanup;
 
3866
        }
2859
3867
 
2860
3868
        status = parse_pps(decoder, &unit);
2861
3869
        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2862
 
            return status;
2863
 
        ofs = pi.nalu.offset + pi.nalu.size;
 
3870
            goto cleanup;
 
3871
        ofs = pi->nalu.offset + pi->nalu.size;
 
3872
 
 
3873
        status = decode_pps(decoder, &unit);
 
3874
        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
 
3875
            goto cleanup;
 
3876
        gst_vaapi_parser_info_h264_replace(&pi, NULL);
2864
3877
    }
2865
3878
 
2866
3879
    priv->is_avcC = TRUE;
2867
 
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
3880
    status = GST_VAAPI_DECODER_STATUS_SUCCESS;
 
3881
 
 
3882
cleanup:
 
3883
    gst_vaapi_parser_info_h264_replace(&pi, NULL);
 
3884
    return status;
2868
3885
}
2869
3886
 
2870
3887
static GstVaapiDecoderStatus
2901
3918
    guint i, size, buf_size, nalu_size, flags;
2902
3919
    guint32 start_code;
2903
3920
    gint ofs, ofs2;
 
3921
    gboolean at_au_end = FALSE;
2904
3922
 
2905
3923
    status = ensure_decoder(decoder);
2906
3924
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2907
3925
        return status;
2908
3926
 
2909
 
    size = gst_adapter_available(adapter);
 
3927
    switch (priv->stream_alignment) {
 
3928
    case GST_VAAPI_STREAM_ALIGN_H264_NALU:
 
3929
    case GST_VAAPI_STREAM_ALIGN_H264_AU:
 
3930
        size = gst_adapter_available_fast(adapter);
 
3931
        break;
 
3932
    default:
 
3933
        size = gst_adapter_available(adapter);
 
3934
        break;
 
3935
    }
2910
3936
 
2911
3937
    if (priv->is_avcC) {
2912
3938
        if (size < priv->nal_length_size)
2923
3949
        buf_size = priv->nal_length_size + nalu_size;
2924
3950
        if (size < buf_size)
2925
3951
            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
 
3952
        else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
 
3953
            at_au_end = (buf_size == size);
2926
3954
    }
2927
3955
    else {
2928
3956
        if (size < 4)
2929
3957
            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2930
3958
 
2931
 
        ofs = scan_for_start_code(adapter, 0, size, NULL);
2932
 
        if (ofs < 0)
2933
 
            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2934
 
 
2935
 
        if (ofs > 0) {
2936
 
            gst_adapter_flush(adapter, ofs);
2937
 
            size -= ofs;
2938
 
        }
2939
 
 
2940
 
        ofs2 = ps->input_offset2 - ofs - 4;
2941
 
        if (ofs2 < 4)
2942
 
            ofs2 = 4;
2943
 
 
2944
 
        ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
2945
 
            scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
2946
 
        if (ofs < 0) {
2947
 
            // Assume the whole NAL unit is present if end-of-stream
2948
 
            if (!at_eos) {
2949
 
                ps->input_offset2 = size;
 
3959
        if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
 
3960
            buf_size = size;
 
3961
        else {
 
3962
            ofs = scan_for_start_code(adapter, 0, size, NULL);
 
3963
            if (ofs < 0)
2950
3964
                return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2951
 
            }
2952
 
            ofs = size;
 
3965
 
 
3966
            if (ofs > 0) {
 
3967
                gst_adapter_flush(adapter, ofs);
 
3968
                size -= ofs;
 
3969
            }
 
3970
 
 
3971
            ofs2 = ps->input_offset2 - ofs - 4;
 
3972
            if (ofs2 < 4)
 
3973
                ofs2 = 4;
 
3974
 
 
3975
            ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
 
3976
                scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
 
3977
            if (ofs < 0) {
 
3978
                // Assume the whole NAL unit is present if end-of-stream
 
3979
                // or stream buffers aligned on access unit boundaries
 
3980
                if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
 
3981
                    at_au_end = TRUE;
 
3982
                else if (!at_eos) {
 
3983
                    ps->input_offset2 = size;
 
3984
                    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
 
3985
                }
 
3986
                ofs = size;
 
3987
            }
 
3988
            buf_size = ofs;
2953
3989
        }
2954
 
        buf_size = ofs;
2955
3990
    }
2956
3991
    ps->input_offset2 = 0;
2957
3992
 
2982
4017
    case GST_H264_NAL_SPS:
2983
4018
        status = parse_sps(decoder, unit);
2984
4019
        break;
 
4020
    case GST_H264_NAL_SUBSET_SPS:
 
4021
        status = parse_subset_sps(decoder, unit);
 
4022
        break;
2985
4023
    case GST_H264_NAL_PPS:
2986
4024
        status = parse_pps(decoder, unit);
2987
4025
        break;
2988
4026
    case GST_H264_NAL_SEI:
2989
4027
        status = parse_sei(decoder, unit);
2990
4028
        break;
 
4029
    case GST_H264_NAL_SLICE_EXT:
 
4030
        if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
 
4031
            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
 
4032
            break;
 
4033
        }
 
4034
        /* fall-through */
2991
4035
    case GST_H264_NAL_SLICE_IDR:
2992
4036
    case GST_H264_NAL_SLICE:
2993
4037
        status = parse_slice(decoder, unit);
3000
4044
        return status;
3001
4045
 
3002
4046
    flags = 0;
 
4047
    if (at_au_end) {
 
4048
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
 
4049
            GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
 
4050
    }
3003
4051
    switch (pi->nalu.type) {
3004
4052
    case GST_H264_NAL_AU_DELIMITER:
 
4053
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3005
4054
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3006
4055
        /* fall-through */
3007
4056
    case GST_H264_NAL_FILLER_DATA:
3012
4061
        /* fall-through */
3013
4062
    case GST_H264_NAL_SEQ_END:
3014
4063
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
 
4064
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
3015
4065
        break;
3016
4066
    case GST_H264_NAL_SPS:
 
4067
    case GST_H264_NAL_SUBSET_SPS:
3017
4068
    case GST_H264_NAL_PPS:
3018
 
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3019
 
        /* fall-through */
3020
4069
    case GST_H264_NAL_SEI:
 
4070
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3021
4071
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3022
4072
        break;
 
4073
    case GST_H264_NAL_SLICE_EXT:
 
4074
        if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
 
4075
            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
 
4076
            break;
 
4077
        }
 
4078
        /* fall-through */
3023
4079
    case GST_H264_NAL_SLICE_IDR:
3024
4080
    case GST_H264_NAL_SLICE:
3025
4081
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
3026
 
        if (is_new_picture(pi, priv->prev_slice_pi))
 
4082
        if (priv->prev_pi &&
 
4083
            (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
 
4084
            flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
 
4085
                GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
 
4086
        }
 
4087
        else if (is_new_picture(pi, priv->prev_slice_pi)) {
3027
4088
            flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
 
4089
            if (is_new_access_unit(pi, priv->prev_slice_pi))
 
4090
                flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
 
4091
        }
3028
4092
        gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
3029
4093
        break;
 
4094
    case GST_H264_NAL_SPS_EXT:
 
4095
    case GST_H264_NAL_SLICE_AUX:
 
4096
        /* skip SPS extension and auxiliary slice for now */
 
4097
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
 
4098
        break;
 
4099
    case GST_H264_NAL_PREFIX_UNIT:
 
4100
        /* skip Prefix NAL units for now */
 
4101
        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
 
4102
            GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
 
4103
            GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
 
4104
        break;
3030
4105
    default:
3031
4106
        if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
3032
 
            flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
 
4107
            flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
 
4108
                GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3033
4109
        break;
3034
4110
    }
 
4111
    if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
 
4112
        priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
3035
4113
    GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
3036
4114
 
3037
4115
    pi->nalu.data = NULL;
 
4116
    pi->state = priv->parser_state;
 
4117
    pi->flags = flags;
 
4118
    gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
3038
4119
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
3039
4120
}
3040
4121
 
3077
4158
    GstVaapiDecoderH264 * const decoder =
3078
4159
        GST_VAAPI_DECODER_H264_CAST(base_decoder);
3079
4160
 
3080
 
    dpb_flush(decoder);
 
4161
    dpb_flush(decoder, NULL);
3081
4162
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
3082
4163
}
3083
4164
 
3117
4198
}
3118
4199
 
3119
4200
/**
 
4201
 * gst_vaapi_decoder_h264_set_alignment:
 
4202
 * @decoder: a #GstVaapiDecoderH264
 
4203
 * @alignment: the #GstVaapiStreamAlignH264
 
4204
 *
 
4205
 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
 
4206
 * of each buffer that is supplied to the decoder. This could be no
 
4207
 * specific alignment, NAL unit boundaries, or access unit boundaries.
 
4208
 */
 
4209
void
 
4210
gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
 
4211
    GstVaapiStreamAlignH264 alignment)
 
4212
{
 
4213
    g_return_if_fail(decoder != NULL);
 
4214
 
 
4215
    decoder->priv.stream_alignment = alignment;
 
4216
}
 
4217
 
 
4218
/**
3120
4219
 * gst_vaapi_decoder_h264_new:
3121
4220
 * @display: a #GstVaapiDisplay
3122
4221
 * @caps: a #GstCaps holding codec information