1
/* GStreamer H.264 Parser
2
* Copyright (C) <2010> Collabora ltd
3
* Copyright (C) <2010> Nokia Corporation
4
* Copyright (C) <2011> Intel Corporation
6
* Copyright (C) <2010> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
7
* Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
9
* This library is free software; you can redistribute it and/or
10
* modify it under the terms of the GNU Library General Public
11
* License as published by the Free Software Foundation; either
12
* version 2 of the License, or (at your option) any later version.
14
* This library is distributed in the hope that it will be useful,
15
* but WITHOUT ANY WARRANTY; without even the implied warranty of
16
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17
* Library General Public License for more details.
19
* You should have received a copy of the GNU Library General Public
20
* License along with this library; if not, write to the
21
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22
* Boston, MA 02110-1301, USA.
29
#include <gst/base/base.h>
30
#include <gst/pbutils/pbutils.h>
31
#include <gst/video/video.h>
32
#include "gsth264parse.h"
36
GST_DEBUG_CATEGORY (h264_parse_debug);
37
#define GST_CAT_DEFAULT h264_parse_debug
39
#define DEFAULT_CONFIG_INTERVAL (0)
50
GST_H264_PARSE_FORMAT_NONE,
51
GST_H264_PARSE_FORMAT_AVC,
52
GST_H264_PARSE_FORMAT_BYTE,
53
GST_H264_PARSE_FORMAT_AVC3
58
GST_H264_PARSE_ALIGN_NONE = 0,
59
GST_H264_PARSE_ALIGN_NAL,
60
GST_H264_PARSE_ALIGN_AU
65
GST_H264_PARSE_STATE_GOT_SPS = 1 << 0,
66
GST_H264_PARSE_STATE_GOT_PPS = 1 << 1,
67
GST_H264_PARSE_STATE_GOT_SLICE = 1 << 2,
69
GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS = (GST_H264_PARSE_STATE_GOT_SPS |
70
GST_H264_PARSE_STATE_GOT_PPS),
71
GST_H264_PARSE_STATE_VALID_PICTURE =
72
(GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS |
73
GST_H264_PARSE_STATE_GOT_SLICE)
76
#define GST_H264_PARSE_STATE_VALID(parse, expected_state) \
77
(((parse)->state & (expected_state)) == (expected_state))
79
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
82
GST_STATIC_CAPS ("video/x-h264"));
84
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
87
GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) true, "
88
"stream-format=(string) { avc, avc3, byte-stream }, "
89
"alignment=(string) { au, nal }"));
91
#define parent_class gst_h264_parse_parent_class
92
G_DEFINE_TYPE (GstH264Parse, gst_h264_parse, GST_TYPE_BASE_PARSE);
94
static void gst_h264_parse_finalize (GObject * object);
96
static gboolean gst_h264_parse_start (GstBaseParse * parse);
97
static gboolean gst_h264_parse_stop (GstBaseParse * parse);
98
static GstFlowReturn gst_h264_parse_handle_frame (GstBaseParse * parse,
99
GstBaseParseFrame * frame, gint * skipsize);
100
static GstFlowReturn gst_h264_parse_parse_frame (GstBaseParse * parse,
101
GstBaseParseFrame * frame);
102
static GstFlowReturn gst_h264_parse_pre_push_frame (GstBaseParse * parse,
103
GstBaseParseFrame * frame);
105
static void gst_h264_parse_set_property (GObject * object, guint prop_id,
106
const GValue * value, GParamSpec * pspec);
107
static void gst_h264_parse_get_property (GObject * object, guint prop_id,
108
GValue * value, GParamSpec * pspec);
110
static gboolean gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps);
111
static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse,
113
static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
114
static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
118
gst_h264_parse_class_init (GstH264ParseClass * klass)
120
GObjectClass *gobject_class = (GObjectClass *) klass;
121
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
122
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
124
GST_DEBUG_CATEGORY_INIT (h264_parse_debug, "h264parse", 0, "h264 parser");
126
gobject_class->finalize = gst_h264_parse_finalize;
127
gobject_class->set_property = gst_h264_parse_set_property;
128
gobject_class->get_property = gst_h264_parse_get_property;
130
g_object_class_install_property (gobject_class, PROP_CONFIG_INTERVAL,
131
g_param_spec_uint ("config-interval",
132
"SPS PPS Send Interval",
133
"Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
134
"will be multiplexed in the data stream when detected.) (0 = disabled)",
135
0, 3600, DEFAULT_CONFIG_INTERVAL,
136
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
138
/* Override BaseParse vfuncs */
139
parse_class->start = GST_DEBUG_FUNCPTR (gst_h264_parse_start);
140
parse_class->stop = GST_DEBUG_FUNCPTR (gst_h264_parse_stop);
141
parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_h264_parse_handle_frame);
142
parse_class->pre_push_frame =
143
GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
144
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
145
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
146
parse_class->sink_event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
147
parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
149
gst_element_class_add_pad_template (gstelement_class,
150
gst_static_pad_template_get (&srctemplate));
151
gst_element_class_add_pad_template (gstelement_class,
152
gst_static_pad_template_get (&sinktemplate));
154
gst_element_class_set_static_metadata (gstelement_class, "H.264 parser",
155
"Codec/Parser/Converter/Video",
156
"Parses H.264 streams",
157
"Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
161
gst_h264_parse_init (GstH264Parse * h264parse)
163
h264parse->frame_out = gst_adapter_new ();
164
gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (h264parse), FALSE);
165
GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (h264parse));
170
gst_h264_parse_finalize (GObject * object)
172
GstH264Parse *h264parse = GST_H264_PARSE (object);
174
g_object_unref (h264parse->frame_out);
176
G_OBJECT_CLASS (parent_class)->finalize (object);
180
gst_h264_parse_reset_frame (GstH264Parse * h264parse)
182
GST_DEBUG_OBJECT (h264parse, "reset frame");
184
/* done parsing; reset state */
185
h264parse->current_off = -1;
187
h264parse->picture_start = FALSE;
188
h264parse->update_caps = FALSE;
189
h264parse->idr_pos = -1;
190
h264parse->sei_pos = -1;
191
h264parse->keyframe = FALSE;
192
h264parse->frame_start = FALSE;
193
gst_adapter_clear (h264parse->frame_out);
197
gst_h264_parse_reset (GstH264Parse * h264parse)
199
h264parse->width = 0;
200
h264parse->height = 0;
201
h264parse->fps_num = 0;
202
h264parse->fps_den = 0;
203
h264parse->upstream_par_n = -1;
204
h264parse->upstream_par_d = -1;
205
h264parse->parsed_par_n = 0;
206
h264parse->parsed_par_d = 0;
207
gst_buffer_replace (&h264parse->codec_data, NULL);
208
gst_buffer_replace (&h264parse->codec_data_in, NULL);
209
h264parse->nal_length_size = 4;
210
h264parse->packetized = FALSE;
211
h264parse->transform = FALSE;
213
h264parse->align = GST_H264_PARSE_ALIGN_NONE;
214
h264parse->format = GST_H264_PARSE_FORMAT_NONE;
216
h264parse->last_report = GST_CLOCK_TIME_NONE;
217
h264parse->push_codec = FALSE;
218
h264parse->have_pps = FALSE;
219
h264parse->have_sps = FALSE;
221
h264parse->dts = GST_CLOCK_TIME_NONE;
222
h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
223
h264parse->do_ts = TRUE;
225
h264parse->sent_codec_tag = FALSE;
227
h264parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
228
gst_event_replace (&h264parse->force_key_unit_event, NULL);
230
h264parse->discont = FALSE;
232
gst_h264_parse_reset_frame (h264parse);
236
gst_h264_parse_start (GstBaseParse * parse)
238
GstH264Parse *h264parse = GST_H264_PARSE (parse);
240
GST_DEBUG_OBJECT (parse, "start");
241
gst_h264_parse_reset (h264parse);
243
h264parse->nalparser = gst_h264_nal_parser_new ();
245
h264parse->dts = GST_CLOCK_TIME_NONE;
246
h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
247
h264parse->sei_pic_struct_pres_flag = FALSE;
248
h264parse->sei_pic_struct = 0;
249
h264parse->field_pic_flag = 0;
251
gst_base_parse_set_min_frame_size (parse, 6);
257
gst_h264_parse_stop (GstBaseParse * parse)
260
GstH264Parse *h264parse = GST_H264_PARSE (parse);
262
GST_DEBUG_OBJECT (parse, "stop");
263
gst_h264_parse_reset (h264parse);
265
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++)
266
gst_buffer_replace (&h264parse->sps_nals[i], NULL);
267
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++)
268
gst_buffer_replace (&h264parse->pps_nals[i], NULL);
270
gst_h264_nal_parser_free (h264parse->nalparser);
276
gst_h264_parse_get_string (GstH264Parse * parse, gboolean format, gint code)
280
case GST_H264_PARSE_FORMAT_AVC:
282
case GST_H264_PARSE_FORMAT_BYTE:
283
return "byte-stream";
284
case GST_H264_PARSE_FORMAT_AVC3:
291
case GST_H264_PARSE_ALIGN_NAL:
293
case GST_H264_PARSE_ALIGN_AU:
302
gst_h264_parse_format_from_caps (GstCaps * caps, guint * format, guint * align)
305
*format = GST_H264_PARSE_FORMAT_NONE;
308
*align = GST_H264_PARSE_ALIGN_NONE;
310
g_return_if_fail (gst_caps_is_fixed (caps));
312
GST_DEBUG ("parsing caps: %" GST_PTR_FORMAT, caps);
314
if (caps && gst_caps_get_size (caps) > 0) {
315
GstStructure *s = gst_caps_get_structure (caps, 0);
316
const gchar *str = NULL;
319
if ((str = gst_structure_get_string (s, "stream-format"))) {
320
if (strcmp (str, "avc") == 0)
321
*format = GST_H264_PARSE_FORMAT_AVC;
322
else if (strcmp (str, "byte-stream") == 0)
323
*format = GST_H264_PARSE_FORMAT_BYTE;
324
else if (strcmp (str, "avc3") == 0)
325
*format = GST_H264_PARSE_FORMAT_AVC3;
330
if ((str = gst_structure_get_string (s, "alignment"))) {
331
if (strcmp (str, "au") == 0)
332
*align = GST_H264_PARSE_ALIGN_AU;
333
else if (strcmp (str, "nal") == 0)
334
*align = GST_H264_PARSE_ALIGN_NAL;
340
/* check downstream caps to configure format and alignment */
342
gst_h264_parse_negotiate (GstH264Parse * h264parse, gint in_format,
346
guint format = GST_H264_PARSE_FORMAT_NONE;
347
guint align = GST_H264_PARSE_ALIGN_NONE;
349
g_return_if_fail ((in_caps == NULL) || gst_caps_is_fixed (in_caps));
351
caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
352
GST_DEBUG_OBJECT (h264parse, "allowed caps: %" GST_PTR_FORMAT, caps);
354
/* concentrate on leading structure, since decodebin2 parser
355
* capsfilter always includes parser template caps */
357
caps = gst_caps_truncate (caps);
358
GST_DEBUG_OBJECT (h264parse, "negotiating with caps: %" GST_PTR_FORMAT,
362
if (in_caps && caps) {
363
if (gst_caps_can_intersect (in_caps, caps)) {
364
GST_DEBUG_OBJECT (h264parse, "downstream accepts upstream caps");
365
gst_h264_parse_format_from_caps (in_caps, &format, &align);
366
gst_caps_unref (caps);
372
/* fixate to avoid ambiguity with lists when parsing */
373
caps = gst_caps_fixate (caps);
374
gst_h264_parse_format_from_caps (caps, &format, &align);
375
gst_caps_unref (caps);
380
format = GST_H264_PARSE_FORMAT_BYTE;
382
align = GST_H264_PARSE_ALIGN_AU;
384
GST_DEBUG_OBJECT (h264parse, "selected format %s, alignment %s",
385
gst_h264_parse_get_string (h264parse, TRUE, format),
386
gst_h264_parse_get_string (h264parse, FALSE, align));
388
h264parse->format = format;
389
h264parse->align = align;
391
h264parse->transform = in_format != h264parse->format ||
392
align == GST_H264_PARSE_ALIGN_AU;
396
gst_h264_parse_wrap_nal (GstH264Parse * h264parse, guint format, guint8 * data,
400
guint nl = h264parse->nal_length_size;
403
GST_DEBUG_OBJECT (h264parse, "nal length %d", size);
405
buf = gst_buffer_new_allocate (NULL, 4 + size, NULL);
406
if (format == GST_H264_PARSE_FORMAT_AVC
407
|| format == GST_H264_PARSE_FORMAT_AVC3) {
408
tmp = GUINT32_TO_BE (size << (32 - 8 * nl));
410
/* HACK: nl should always be 4 here, otherwise this won't work.
411
* There are legit cases where nl in avc stream is 2, but byte-stream
412
* SC is still always 4 bytes. */
414
tmp = GUINT32_TO_BE (1);
417
gst_buffer_fill (buf, 0, &tmp, sizeof (guint32));
418
gst_buffer_fill (buf, nl, data, size);
419
gst_buffer_set_size (buf, size + nl);
425
gst_h264_parser_store_nal (GstH264Parse * h264parse, guint id,
426
GstH264NalUnitType naltype, GstH264NalUnit * nalu)
428
GstBuffer *buf, **store;
429
guint size = nalu->size, store_size;
431
if (naltype == GST_H264_NAL_SPS) {
432
store_size = GST_H264_MAX_SPS_COUNT;
433
store = h264parse->sps_nals;
434
GST_DEBUG_OBJECT (h264parse, "storing sps %u", id);
435
} else if (naltype == GST_H264_NAL_PPS) {
436
store_size = GST_H264_MAX_PPS_COUNT;
437
store = h264parse->pps_nals;
438
GST_DEBUG_OBJECT (h264parse, "storing pps %u", id);
442
if (id >= store_size) {
443
GST_DEBUG_OBJECT (h264parse, "unable to store nal, id out-of-range %d", id);
447
buf = gst_buffer_new_allocate (NULL, size, NULL);
448
gst_buffer_fill (buf, 0, nalu->data + nalu->offset, size);
451
gst_buffer_unref (store[id]);
456
#ifndef GST_DISABLE_GST_DEBUG
457
static const gchar *nal_names[] = {
474
_nal_name (GstH264NalUnitType nal_type)
476
if (nal_type <= GST_H264_NAL_FILLER_DATA)
477
return nal_names[nal_type];
483
gst_h264_parse_process_sei (GstH264Parse * h264parse, GstH264NalUnit * nalu)
485
GstH264SEIMessage sei;
486
GstH264NalParser *nalparser = h264parse->nalparser;
487
GstH264ParserResult pres;
491
pres = gst_h264_parser_parse_sei (nalparser, nalu, &messages);
492
if (pres != GST_H264_PARSER_OK)
493
GST_WARNING_OBJECT (h264parse, "failed to parse one ore more SEI message");
495
/* Even if pres != GST_H264_PARSER_OK, some message could have been parsed and
496
* stored in messages.
498
for (i = 0; i < messages->len; i++) {
499
sei = g_array_index (messages, GstH264SEIMessage, i);
500
switch (sei.payloadType) {
501
case GST_H264_SEI_PIC_TIMING:
502
h264parse->sei_pic_struct_pres_flag =
503
sei.payload.pic_timing.pic_struct_present_flag;
504
h264parse->sei_cpb_removal_delay =
505
sei.payload.pic_timing.cpb_removal_delay;
506
if (h264parse->sei_pic_struct_pres_flag)
507
h264parse->sei_pic_struct = sei.payload.pic_timing.pic_struct;
508
GST_LOG_OBJECT (h264parse, "pic timing updated");
510
case GST_H264_SEI_BUF_PERIOD:
511
if (h264parse->ts_trn_nb == GST_CLOCK_TIME_NONE ||
512
h264parse->dts == GST_CLOCK_TIME_NONE)
513
h264parse->ts_trn_nb = 0;
515
h264parse->ts_trn_nb = h264parse->dts;
517
GST_LOG_OBJECT (h264parse,
518
"new buffering period; ts_trn_nb updated: %" GST_TIME_FORMAT,
519
GST_TIME_ARGS (h264parse->ts_trn_nb));
522
/* Additional messages that are not innerly useful to the
523
* element but for debugging purposes */
524
case GST_H264_SEI_RECOVERY_POINT:
525
GST_LOG_OBJECT (h264parse, "recovery point found: %u %u %u %u",
526
sei.payload.recovery_point.recovery_frame_cnt,
527
sei.payload.recovery_point.exact_match_flag,
528
sei.payload.recovery_point.broken_link_flag,
529
sei.payload.recovery_point.changing_slice_group_idc);
533
g_array_free (messages, TRUE);
536
/* caller guarantees 2 bytes of nal payload */
538
gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
541
GstH264PPS pps = { 0, };
542
GstH264SPS sps = { 0, };
543
GstH264NalParser *nalparser = h264parse->nalparser;
544
GstH264ParserResult pres;
546
/* nothing to do for broken input */
547
if (G_UNLIKELY (nalu->size < 2)) {
548
GST_DEBUG_OBJECT (h264parse, "not processing nal size %u", nalu->size);
552
/* we have a peek as well */
553
nal_type = nalu->type;
555
GST_DEBUG_OBJECT (h264parse, "processing nal of type %u %s, size %u",
556
nal_type, _nal_name (nal_type), nalu->size);
559
case GST_H264_NAL_SPS:
560
/* reset state, everything else is obsolete */
561
h264parse->state = 0;
563
pres = gst_h264_parser_parse_sps (nalparser, nalu, &sps, TRUE);
564
/* arranged for a fallback sps.id, so use that one and only warn */
565
if (pres != GST_H264_PARSER_OK) {
566
GST_WARNING_OBJECT (h264parse, "failed to parse SPS:");
570
GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
571
h264parse->update_caps = TRUE;
572
h264parse->have_sps = TRUE;
573
if (h264parse->push_codec && h264parse->have_pps) {
574
/* SPS and PPS found in stream before the first pre_push_frame, no need
575
* to forcibly push at start */
576
GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
577
h264parse->push_codec = FALSE;
578
h264parse->have_sps = FALSE;
579
h264parse->have_pps = FALSE;
582
gst_h264_parser_store_nal (h264parse, sps.id, nal_type, nalu);
583
h264parse->state |= GST_H264_PARSE_STATE_GOT_SPS;
585
case GST_H264_NAL_PPS:
586
/* expected state: got-sps */
587
h264parse->state &= GST_H264_PARSE_STATE_GOT_SPS;
588
if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
591
pres = gst_h264_parser_parse_pps (nalparser, nalu, &pps);
592
/* arranged for a fallback pps.id, so use that one and only warn */
593
if (pres != GST_H264_PARSER_OK) {
594
GST_WARNING_OBJECT (h264parse, "failed to parse PPS:");
595
if (pres != GST_H264_PARSER_BROKEN_LINK)
599
/* parameters might have changed, force caps check */
600
if (!h264parse->have_pps) {
601
GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
602
h264parse->update_caps = TRUE;
604
h264parse->have_pps = TRUE;
605
if (h264parse->push_codec && h264parse->have_sps) {
606
/* SPS and PPS found in stream before the first pre_push_frame, no need
607
* to forcibly push at start */
608
GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
609
h264parse->push_codec = FALSE;
610
h264parse->have_sps = FALSE;
611
h264parse->have_pps = FALSE;
614
gst_h264_parser_store_nal (h264parse, pps.id, nal_type, nalu);
615
gst_h264_pps_clear (&pps);
616
h264parse->state |= GST_H264_PARSE_STATE_GOT_PPS;
618
case GST_H264_NAL_SEI:
619
/* expected state: got-sps */
620
if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
623
gst_h264_parse_process_sei (h264parse, nalu);
625
if (h264parse->sei_pos == -1) {
626
if (h264parse->transform)
627
h264parse->sei_pos = gst_adapter_available (h264parse->frame_out);
629
h264parse->sei_pos = nalu->sc_offset;
630
GST_DEBUG_OBJECT (h264parse, "marking SEI in frame at offset %d",
635
case GST_H264_NAL_SLICE:
636
case GST_H264_NAL_SLICE_DPA:
637
case GST_H264_NAL_SLICE_DPB:
638
case GST_H264_NAL_SLICE_DPC:
639
case GST_H264_NAL_SLICE_IDR:
640
/* expected state: got-sps|got-pps (valid picture headers) */
641
h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
642
if (!GST_H264_PARSE_STATE_VALID (h264parse,
643
GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS))
646
/* don't need to parse the whole slice (header) here */
647
if (*(nalu->data + nalu->offset + 1) & 0x80) {
648
/* means first_mb_in_slice == 0 */
649
/* real frame data */
650
GST_DEBUG_OBJECT (h264parse, "first_mb_in_slice = 0");
651
h264parse->frame_start = TRUE;
653
GST_DEBUG_OBJECT (h264parse, "frame start: %i", h264parse->frame_start);
655
GstH264SliceHdr slice;
657
pres = gst_h264_parser_parse_slice_hdr (nalparser, nalu, &slice,
659
GST_DEBUG_OBJECT (h264parse,
660
"parse result %d, first MB: %u, slice type: %u",
661
pres, slice.first_mb_in_slice, slice.type);
662
if (pres == GST_H264_PARSER_OK) {
663
if (GST_H264_IS_I_SLICE (&slice) || GST_H264_IS_SI_SLICE (&slice))
664
h264parse->keyframe |= TRUE;
666
h264parse->state |= GST_H264_PARSE_STATE_GOT_SLICE;
667
h264parse->field_pic_flag = slice.field_pic_flag;
670
if (G_LIKELY (nal_type != GST_H264_NAL_SLICE_IDR &&
671
!h264parse->push_codec))
673
/* if we need to sneak codec NALs into the stream,
674
* this is a good place, so fake it as IDR
675
* (which should be at start anyway) */
676
/* mark where config needs to go if interval expired */
677
/* mind replacement buffer if applicable */
678
if (h264parse->idr_pos == -1) {
679
if (h264parse->transform)
680
h264parse->idr_pos = gst_adapter_available (h264parse->frame_out);
682
h264parse->idr_pos = nalu->sc_offset;
683
GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
686
/* if SEI preceeds (faked) IDR, then we have to insert config there */
687
if (h264parse->sei_pos >= 0 && h264parse->idr_pos > h264parse->sei_pos) {
688
h264parse->idr_pos = h264parse->sei_pos;
689
GST_DEBUG_OBJECT (h264parse, "moved IDR mark to SEI position %d",
694
/* drop anything before the initial SPS */
695
if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
698
pres = gst_h264_parser_parse_nal (nalparser, nalu);
699
if (pres != GST_H264_PARSER_OK)
704
/* if AVC output needed, collect properly prefixed nal in adapter,
705
* and use that to replace outgoing buffer data later on */
706
if (h264parse->transform) {
709
GST_LOG_OBJECT (h264parse, "collecting NAL in AVC frame");
710
buf = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
711
nalu->data + nalu->offset, nalu->size);
712
gst_adapter_push (h264parse->frame_out, buf);
717
/* caller guarantees at least 2 bytes of nal payload for each nal
718
* returns TRUE if next_nal indicates that nal terminates an AU */
719
static inline gboolean
720
gst_h264_parse_collect_nal (GstH264Parse * h264parse, const guint8 * data,
721
guint size, GstH264NalUnit * nalu)
724
GstH264ParserResult parse_res;
725
GstH264NalUnitType nal_type = nalu->type;
726
GstH264NalUnit nnalu;
728
GST_DEBUG_OBJECT (h264parse, "parsing collected nal");
729
parse_res = gst_h264_parser_identify_nalu_unchecked (h264parse->nalparser,
730
data, nalu->offset + nalu->size, size, &nnalu);
732
if (parse_res != GST_H264_PARSER_OK)
735
/* determine if AU complete */
736
GST_LOG_OBJECT (h264parse, "nal type: %d %s", nal_type, _nal_name (nal_type));
737
/* coded slice NAL starts a picture,
738
* i.e. other types become aggregated in front of it */
739
h264parse->picture_start |= (nal_type == GST_H264_NAL_SLICE ||
740
nal_type == GST_H264_NAL_SLICE_DPA || nal_type == GST_H264_NAL_SLICE_IDR);
742
/* consider a coded slices (IDR or not) to start a picture,
743
* (so ending the previous one) if first_mb_in_slice == 0
744
* (non-0 is part of previous one) */
745
/* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
746
* but in practice it works in sane cases, needs not much parsing,
747
* and also works with broken frame_num in NAL
748
* (where spec-wise would fail) */
749
nal_type = nnalu.type;
750
complete = h264parse->picture_start && (nal_type >= GST_H264_NAL_SEI &&
751
nal_type <= GST_H264_NAL_AU_DELIMITER);
753
GST_LOG_OBJECT (h264parse, "next nal type: %d %s", nal_type,
754
_nal_name (nal_type));
755
complete |= h264parse->picture_start && (nal_type == GST_H264_NAL_SLICE
756
|| nal_type == GST_H264_NAL_SLICE_DPA
757
|| nal_type == GST_H264_NAL_SLICE_IDR) &&
758
/* first_mb_in_slice == 0 considered start of frame */
759
(nnalu.data[nnalu.offset + 1] & 0x80);
761
GST_LOG_OBJECT (h264parse, "au complete: %d", complete);
768
gst_h264_parse_handle_frame_packetized (GstBaseParse * parse,
769
GstBaseParseFrame * frame)
771
GstH264Parse *h264parse = GST_H264_PARSE (parse);
772
GstBuffer *buffer = frame->buffer;
773
GstFlowReturn ret = GST_FLOW_OK;
774
GstH264ParserResult parse_res;
776
const guint nl = h264parse->nal_length_size;
780
if (nl < 1 || nl > 4) {
781
GST_DEBUG_OBJECT (h264parse, "insufficient data to split input");
782
return GST_FLOW_NOT_NEGOTIATED;
785
/* need to save buffer from invalidation upon _finish_frame */
786
if (h264parse->split_packetized)
787
buffer = gst_buffer_copy (frame->buffer);
789
gst_buffer_map (buffer, &map, GST_MAP_READ);
793
GST_LOG_OBJECT (h264parse,
794
"processing packet buffer of size %" G_GSIZE_FORMAT, map.size);
796
parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
797
map.data, 0, map.size, nl, &nalu);
799
while (parse_res == GST_H264_PARSER_OK) {
800
GST_DEBUG_OBJECT (h264parse, "AVC nal offset %d", nalu.offset + nalu.size);
802
/* either way, have a look at it */
803
gst_h264_parse_process_nal (h264parse, &nalu);
805
/* dispatch per NALU if needed */
806
if (h264parse->split_packetized) {
807
GstBaseParseFrame tmp_frame;
809
gst_base_parse_frame_init (&tmp_frame);
810
tmp_frame.flags |= frame->flags;
811
tmp_frame.offset = frame->offset;
812
tmp_frame.overhead = frame->overhead;
813
tmp_frame.buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL,
814
nalu.offset, nalu.size);
816
/* note we don't need to come up with a sub-buffer, since
817
* subsequent code only considers input buffer's metadata.
818
* Real data is either taken from input by baseclass or
819
* a replacement output buffer is provided anyway. */
820
gst_h264_parse_parse_frame (parse, &tmp_frame);
821
ret = gst_base_parse_finish_frame (parse, &tmp_frame, nl + nalu.size);
822
left -= nl + nalu.size;
825
parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
826
map.data, nalu.offset + nalu.size, map.size, nl, &nalu);
829
gst_buffer_unmap (buffer, &map);
831
if (!h264parse->split_packetized) {
832
gst_h264_parse_parse_frame (parse, frame);
833
ret = gst_base_parse_finish_frame (parse, frame, map.size);
835
gst_buffer_unref (buffer);
836
if (G_UNLIKELY (left)) {
837
/* should not be happening for nice AVC */
838
GST_WARNING_OBJECT (parse, "skipping leftover AVC data %d", left);
839
frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
840
ret = gst_base_parse_finish_frame (parse, frame, map.size);
844
if (parse_res == GST_H264_PARSER_NO_NAL_END ||
845
parse_res == GST_H264_PARSER_BROKEN_DATA) {
847
if (h264parse->split_packetized) {
848
GST_ELEMENT_ERROR (h264parse, STREAM, FAILED, (NULL),
849
("invalid AVC input data"));
850
gst_buffer_unref (buffer);
852
return GST_FLOW_ERROR;
854
/* do not meddle to much in this case */
855
GST_DEBUG_OBJECT (h264parse, "parsing packet failed");
863
gst_h264_parse_handle_frame (GstBaseParse * parse,
864
GstBaseParseFrame * frame, gint * skipsize)
866
GstH264Parse *h264parse = GST_H264_PARSE (parse);
867
GstBuffer *buffer = frame->buffer;
871
gint current_off = 0;
872
gboolean drain, nonext;
873
GstH264NalParser *nalparser = h264parse->nalparser;
875
GstH264ParserResult pres;
878
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (frame->buffer,
879
GST_BUFFER_FLAG_DISCONT))) {
880
h264parse->discont = TRUE;
883
/* delegate in packetized case, no skipping should be needed */
884
if (h264parse->packetized)
885
return gst_h264_parse_handle_frame_packetized (parse, frame);
887
gst_buffer_map (buffer, &map, GST_MAP_READ);
891
/* expect at least 3 bytes startcode == sc, and 2 bytes NALU payload */
892
if (G_UNLIKELY (size < 5)) {
893
gst_buffer_unmap (buffer, &map);
898
/* need to configure aggregation */
899
if (G_UNLIKELY (h264parse->format == GST_H264_PARSE_FORMAT_NONE))
900
gst_h264_parse_negotiate (h264parse, GST_H264_PARSE_FORMAT_BYTE, NULL);
902
/* avoid stale cached parsing state */
903
if (frame->flags & GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME) {
904
GST_LOG_OBJECT (h264parse, "parsing new frame");
905
gst_h264_parse_reset_frame (h264parse);
907
GST_LOG_OBJECT (h264parse, "resuming frame parsing");
910
drain = GST_BASE_PARSE_DRAINING (parse);
913
current_off = h264parse->current_off;
916
g_assert (current_off < size);
917
GST_DEBUG_OBJECT (h264parse, "last parse position %d", current_off);
919
/* check for initial skip */
920
if (h264parse->current_off == -1) {
922
gst_h264_parser_identify_nalu_unchecked (nalparser, data, current_off,
925
case GST_H264_PARSER_OK:
926
if (nalu.sc_offset > 0) {
927
*skipsize = nalu.sc_offset;
931
case GST_H264_PARSER_NO_NAL:
932
*skipsize = size - 3;
936
g_assert_not_reached ();
943
gst_h264_parser_identify_nalu (nalparser, data, current_off, size,
947
case GST_H264_PARSER_OK:
948
GST_DEBUG_OBJECT (h264parse, "complete nal (offset, size): (%u, %u) ",
949
nalu.offset, nalu.size);
951
case GST_H264_PARSER_NO_NAL_END:
952
GST_DEBUG_OBJECT (h264parse, "not a complete nal found at offset %u",
954
/* if draining, accept it as complete nal */
957
nalu.size = size - nalu.offset;
958
GST_DEBUG_OBJECT (h264parse, "draining, accepting with size %u",
960
/* if it's not too short at least */
965
/* otherwise need more */
967
case GST_H264_PARSER_BROKEN_LINK:
968
GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
969
("Error parsing H.264 stream"),
970
("The link to structure needed for the parsing couldn't be found"));
972
case GST_H264_PARSER_ERROR:
973
/* should not really occur either */
974
GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
975
("Error parsing H.264 stream"), ("Invalid H.264 stream"));
977
case GST_H264_PARSER_NO_NAL:
978
GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
979
("Error parsing H.264 stream"), ("No H.264 NAL unit found"));
981
case GST_H264_PARSER_BROKEN_DATA:
982
GST_WARNING_OBJECT (h264parse, "input stream is corrupt; "
983
"it contains a NAL unit of length %u", nalu.size);
985
/* broken nal at start -> arrange to skip it,
986
* otherwise have it terminate current au
987
* (and so it will be skipped on next frame round) */
988
if (current_off == 0) {
989
GST_DEBUG_OBJECT (h264parse, "skipping broken nal");
990
*skipsize = nalu.offset;
993
GST_DEBUG_OBJECT (h264parse, "terminating au");
995
nalu.offset = nalu.sc_offset;
1000
g_assert_not_reached ();
1004
GST_DEBUG_OBJECT (h264parse, "%p complete nal found. Off: %u, Size: %u",
1005
data, nalu.offset, nalu.size);
1007
/* simulate no next nal if none needed */
1008
nonext = nonext || (h264parse->align == GST_H264_PARSE_ALIGN_NAL);
1011
if (nalu.offset + nalu.size + 4 + 2 > size) {
1012
GST_DEBUG_OBJECT (h264parse, "not enough data for next NALU");
1014
GST_DEBUG_OBJECT (h264parse, "but draining anyway");
1022
if (!gst_h264_parse_process_nal (h264parse, &nalu)) {
1023
GST_WARNING_OBJECT (h264parse,
1024
"broken/invalid nal Type: %d %s, Size: %u will be dropped",
1025
nalu.type, _nal_name (nalu.type), nalu.size);
1026
*skipsize = nalu.size;
1033
/* if no next nal, we know it's complete here */
1034
if (gst_h264_parse_collect_nal (h264parse, data, size, &nalu))
1037
GST_DEBUG_OBJECT (h264parse, "Looking for more");
1038
current_off = nalu.offset + nalu.size;
1042
framesize = nalu.offset + nalu.size;
1044
gst_buffer_unmap (buffer, &map);
1046
gst_h264_parse_parse_frame (parse, frame);
1048
return gst_base_parse_finish_frame (parse, frame, framesize);
1053
/* Restart parsing from here next time */
1054
if (current_off > 0)
1055
h264parse->current_off = current_off;
1059
gst_buffer_unmap (buffer, &map);
1063
GST_DEBUG_OBJECT (h264parse, "skipping %d", *skipsize);
1064
/* If we are collecting access units, we need to preserve the initial
1065
* config headers (SPS, PPS et al.) and only reset the frame if another
1066
* slice NAL was received. This means that broken pictures are discarded */
1067
if (h264parse->align != GST_H264_PARSE_ALIGN_AU ||
1068
!(h264parse->state & GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS) ||
1069
(h264parse->state & GST_H264_PARSE_STATE_GOT_SLICE))
1070
gst_h264_parse_reset_frame (h264parse);
1074
gst_buffer_unmap (buffer, &map);
1075
return GST_FLOW_ERROR;
1078
/* byte together avc codec data based on collected pps and sps so far */
1080
gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
1082
GstBuffer *buf, *nal;
1083
gint i, sps_size = 0, pps_size = 0, num_sps = 0, num_pps = 0;
1084
guint8 profile_idc = 0, profile_comp = 0, level_idc = 0;
1085
gboolean found = FALSE;
1090
/* only nal payload in stored nals */
1092
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1093
if ((nal = h264parse->sps_nals[i])) {
1094
gsize size = gst_buffer_get_size (nal);
1096
/* size bytes also count */
1097
sps_size += size + 2;
1101
gst_buffer_extract (nal, 1, tmp, 3);
1102
profile_idc = tmp[0];
1103
profile_comp = tmp[1];
1108
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1109
if ((nal = h264parse->pps_nals[i])) {
1111
/* size bytes also count */
1112
pps_size += gst_buffer_get_size (nal) + 2;
1116
/* AVC3 has SPS/PPS inside the stream, not in the codec_data */
1117
if (h264parse->format == GST_H264_PARSE_FORMAT_AVC3) {
1118
num_sps = sps_size = 0;
1119
num_pps = pps_size = 0;
1122
GST_DEBUG_OBJECT (h264parse,
1123
"constructing codec_data: num_sps=%d, num_pps=%d", num_sps, num_pps);
1125
if (!found || (0 == num_pps
1126
&& GST_H264_PARSE_FORMAT_AVC3 != h264parse->format))
1129
buf = gst_buffer_new_allocate (NULL, 5 + 1 + sps_size + 1 + pps_size, NULL);
1130
gst_buffer_map (buf, &map, GST_MAP_WRITE);
1132
nl = h264parse->nal_length_size;
1134
data[0] = 1; /* AVC Decoder Configuration Record ver. 1 */
1135
data[1] = profile_idc; /* profile_idc */
1136
data[2] = profile_comp; /* profile_compability */
1137
data[3] = level_idc; /* level_idc */
1138
data[4] = 0xfc | (nl - 1); /* nal_length_size_minus1 */
1139
data[5] = 0xe0 | num_sps; /* number of SPSs */
1142
if (h264parse->format != GST_H264_PARSE_FORMAT_AVC3) {
1143
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1144
if ((nal = h264parse->sps_nals[i])) {
1145
gsize nal_size = gst_buffer_get_size (nal);
1146
GST_WRITE_UINT16_BE (data, nal_size);
1147
gst_buffer_extract (nal, 0, data + 2, nal_size);
1148
data += 2 + nal_size;
1155
if (h264parse->format != GST_H264_PARSE_FORMAT_AVC3) {
1156
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1157
if ((nal = h264parse->pps_nals[i])) {
1158
gsize nal_size = gst_buffer_get_size (nal);
1159
GST_WRITE_UINT16_BE (data, nal_size);
1160
gst_buffer_extract (nal, 0, data + 2, nal_size);
1161
data += 2 + nal_size;
1166
gst_buffer_unmap (buf, &map);
1172
gst_h264_parse_get_par (GstH264Parse * h264parse, gint * num, gint * den)
1174
if (h264parse->upstream_par_n != -1 && h264parse->upstream_par_d != -1) {
1175
*num = h264parse->upstream_par_n;
1176
*den = h264parse->upstream_par_d;
1178
*num = h264parse->parsed_par_n;
1179
*den = h264parse->parsed_par_d;
1184
gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
1187
GstCaps *sink_caps, *src_caps;
1188
gboolean modified = FALSE;
1189
GstBuffer *buf = NULL;
1190
GstStructure *s = NULL;
1192
if (G_UNLIKELY (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD
1195
else if (G_UNLIKELY (!h264parse->update_caps))
1198
/* if this is being called from the first _setcaps call, caps on the sinkpad
1199
* aren't set yet and so they need to be passed as an argument */
1201
sink_caps = gst_caps_ref (caps);
1203
sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (h264parse));
1205
/* carry over input caps as much as possible; override with our own stuff */
1207
sink_caps = gst_caps_new_empty_simple ("video/x-h264");
1209
s = gst_caps_get_structure (sink_caps, 0);
1211
sps = h264parse->nalparser->last_sps;
1212
GST_DEBUG_OBJECT (h264parse, "sps: %p", sps);
1214
/* only codec-data for nice-and-clean au aligned packetized avc format */
1215
if ((h264parse->format == GST_H264_PARSE_FORMAT_AVC
1216
|| h264parse->format == GST_H264_PARSE_FORMAT_AVC3)
1217
&& h264parse->align == GST_H264_PARSE_ALIGN_AU) {
1218
buf = gst_h264_parse_make_codec_data (h264parse);
1219
if (buf && h264parse->codec_data) {
1222
gst_buffer_map (buf, &map, GST_MAP_READ);
1223
if (map.size != gst_buffer_get_size (h264parse->codec_data) ||
1224
gst_buffer_memcmp (h264parse->codec_data, 0, map.data, map.size))
1227
gst_buffer_unmap (buf, &map);
1229
if (!buf && h264parse->codec_data_in)
1230
buf = gst_buffer_ref (h264parse->codec_data_in);
1236
if (G_UNLIKELY (!sps)) {
1237
caps = gst_caps_copy (sink_caps);
1239
gint crop_width, crop_height;
1240
gint fps_num, fps_den;
1242
if (sps->frame_cropping_flag) {
1243
crop_width = sps->crop_rect_width;
1244
crop_height = sps->crop_rect_height;
1246
crop_width = sps->width;
1247
crop_height = sps->height;
1250
if (G_UNLIKELY (h264parse->width != crop_width ||
1251
h264parse->height != crop_height)) {
1252
GST_INFO_OBJECT (h264parse, "resolution changed %dx%d",
1253
crop_width, crop_height);
1254
h264parse->width = crop_width;
1255
h264parse->height = crop_height;
1259
/* 0/1 is set as the default in the codec parser, we will set
1260
* it in case we have no info */
1261
gst_h264_video_calculate_framerate (sps, h264parse->field_pic_flag,
1262
h264parse->sei_pic_struct, &fps_num, &fps_den);
1263
if (G_UNLIKELY (h264parse->fps_num != fps_num
1264
|| h264parse->fps_den != fps_den)) {
1265
GST_DEBUG_OBJECT (h264parse, "framerate changed %d/%d", fps_num, fps_den);
1266
h264parse->fps_num = fps_num;
1267
h264parse->fps_den = fps_den;
1271
if (sps->vui_parameters.aspect_ratio_info_present_flag) {
1272
if (G_UNLIKELY ((h264parse->parsed_par_n != sps->vui_parameters.par_n)
1273
|| (h264parse->parsed_par_d != sps->vui_parameters.par_d))) {
1274
h264parse->parsed_par_n = sps->vui_parameters.par_n;
1275
h264parse->parsed_par_d = sps->vui_parameters.par_d;
1276
GST_INFO_OBJECT (h264parse, "pixel aspect ratio has been changed %d/%d",
1277
h264parse->parsed_par_n, h264parse->parsed_par_d);
1281
if (G_UNLIKELY (modified || h264parse->update_caps)) {
1283
GstClockTime latency;
1285
fps_num = h264parse->fps_num;
1286
fps_den = h264parse->fps_den;
1288
caps = gst_caps_copy (sink_caps);
1290
/* sps should give this but upstream overrides */
1291
if (s && gst_structure_has_field (s, "width"))
1292
gst_structure_get_int (s, "width", &width);
1294
width = h264parse->width;
1296
if (s && gst_structure_has_field (s, "height"))
1297
gst_structure_get_int (s, "height", &height);
1299
height = h264parse->height;
1301
gst_caps_set_simple (caps, "width", G_TYPE_INT, width,
1302
"height", G_TYPE_INT, height, NULL);
1304
/* upstream overrides */
1305
if (s && gst_structure_has_field (s, "framerate"))
1306
gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
1308
/* but not necessarily or reliably this */
1310
gst_caps_set_simple (caps, "framerate",
1311
GST_TYPE_FRACTION, fps_num, fps_den, NULL);
1312
gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse),
1313
fps_num, fps_den, 0, 0);
1315
latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num);
1316
gst_base_parse_set_latency (GST_BASE_PARSE (h264parse), latency,
1326
gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE,
1327
"stream-format", G_TYPE_STRING,
1328
gst_h264_parse_get_string (h264parse, TRUE, h264parse->format),
1329
"alignment", G_TYPE_STRING,
1330
gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL);
1332
gst_h264_parse_get_par (h264parse, &par_n, &par_d);
1333
if (par_n != 0 && par_d != 0 &&
1334
(!s || !gst_structure_has_field (s, "pixel-aspect-ratio"))) {
1335
GST_INFO_OBJECT (h264parse, "PAR %d/%d", par_n, par_d);
1336
gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1337
par_n, par_d, NULL);
1340
src_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
1343
&& gst_structure_has_field (gst_caps_get_structure (src_caps, 0),
1345
/* use codec data from old caps for comparison; we don't want to resend caps
1346
if everything is same except codec data; */
1347
gst_caps_set_value (caps, "codec_data",
1348
gst_structure_get_value (gst_caps_get_structure (src_caps, 0),
1352
if (!(src_caps && gst_caps_is_strictly_equal (src_caps, caps))) {
1353
/* update codec data to new value */
1355
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
1356
gst_buffer_replace (&h264parse->codec_data, buf);
1357
gst_buffer_unref (buf);
1361
/* remove any left-over codec-data hanging around */
1362
s = gst_caps_get_structure (caps, 0);
1363
gst_structure_remove_field (s, "codec_data");
1364
gst_buffer_replace (&h264parse->codec_data, NULL);
1367
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (h264parse), caps);
1371
gst_caps_unref (src_caps);
1372
gst_caps_unref (caps);
1375
gst_caps_unref (sink_caps);
1377
gst_buffer_unref (buf);
1381
gst_h264_parse_get_timestamp (GstH264Parse * h264parse,
1382
GstClockTime * out_ts, GstClockTime * out_dur, gboolean frame)
1384
GstH264SPS *sps = h264parse->nalparser->last_sps;
1385
GstClockTime upstream;
1388
g_return_if_fail (out_dur != NULL);
1389
g_return_if_fail (out_ts != NULL);
1394
GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration");
1402
GST_DEBUG_OBJECT (h264parse, "referred SPS invalid");
1404
} else if (!sps->vui_parameters_present_flag) {
1405
GST_DEBUG_OBJECT (h264parse,
1406
"unable to compute timestamp: VUI not present");
1408
} else if (!sps->vui_parameters.timing_info_present_flag) {
1409
GST_DEBUG_OBJECT (h264parse,
1410
"unable to compute timestamp: timing info not present");
1412
} else if (sps->vui_parameters.time_scale == 0) {
1413
GST_DEBUG_OBJECT (h264parse,
1414
"unable to compute timestamp: time_scale = 0 "
1415
"(this is forbidden in spec; bitstream probably contains error)");
1419
if (h264parse->sei_pic_struct_pres_flag &&
1420
h264parse->sei_pic_struct != (guint8) - 1) {
1421
/* Note that when h264parse->sei_pic_struct == -1 (unspecified), there
1422
* are ways to infer its value. This is related to computing the
1423
* TopFieldOrderCnt and BottomFieldOrderCnt, which looks
1424
* complicated and thus not implemented for the time being. Yet
1425
* the value we have here is correct for many applications
1427
switch (h264parse->sei_pic_struct) {
1428
case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
1429
case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
1432
case GST_H264_SEI_PIC_STRUCT_FRAME:
1433
case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
1434
case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
1437
case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
1438
case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
1441
case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
1444
case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
1448
GST_DEBUG_OBJECT (h264parse,
1449
"h264parse->sei_pic_struct of unknown value %d. Not parsed",
1450
h264parse->sei_pic_struct);
1454
duration = h264parse->field_pic_flag ? 1 : 2;
1457
GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration);
1460
* h264parse.264 C.1.2 Timing of coded picture removal (equivalent to DTS):
1461
* Tr,n(0) = initial_cpb_removal_delay[ SchedSelIdx ] / 90000
1462
* Tr,n(n) = Tr,n(nb) + Tc * cpb_removal_delay(n)
1464
* Tc = num_units_in_tick / time_scale
1467
if (h264parse->ts_trn_nb != GST_CLOCK_TIME_NONE) {
1468
GST_LOG_OBJECT (h264parse, "buffering based ts");
1469
/* buffering period is present */
1470
if (upstream != GST_CLOCK_TIME_NONE) {
1471
/* If upstream timestamp is valid, we respect it and adjust current
1472
* reference point */
1473
h264parse->ts_trn_nb = upstream -
1474
(GstClockTime) gst_util_uint64_scale_int
1475
(h264parse->sei_cpb_removal_delay * GST_SECOND,
1476
sps->vui_parameters.num_units_in_tick,
1477
sps->vui_parameters.time_scale);
1479
/* If no upstream timestamp is given, we write in new timestamp */
1480
upstream = h264parse->dts = h264parse->ts_trn_nb +
1481
(GstClockTime) gst_util_uint64_scale_int
1482
(h264parse->sei_cpb_removal_delay * GST_SECOND,
1483
sps->vui_parameters.num_units_in_tick,
1484
sps->vui_parameters.time_scale);
1489
GST_LOG_OBJECT (h264parse, "duration based ts");
1490
/* naive method: no removal delay specified
1491
* track upstream timestamp and provide best guess frame duration */
1492
dur = gst_util_uint64_scale_int (duration * GST_SECOND,
1493
sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale);
1495
if (dur < GST_MSECOND) {
1496
GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT,
1497
GST_TIME_ARGS (dur));
1504
if (GST_CLOCK_TIME_IS_VALID (upstream))
1505
*out_ts = h264parse->dts = upstream;
1507
if (GST_CLOCK_TIME_IS_VALID (*out_dur) &&
1508
GST_CLOCK_TIME_IS_VALID (h264parse->dts))
1509
h264parse->dts += *out_dur;
1512
static GstFlowReturn
1513
gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
1515
GstH264Parse *h264parse;
1519
h264parse = GST_H264_PARSE (parse);
1520
buffer = frame->buffer;
1522
gst_h264_parse_update_src_caps (h264parse, NULL);
1524
/* don't mess with timestamps if provided by upstream,
1525
* particularly since our ts not that good they handle seeking etc */
1526
if (h264parse->do_ts)
1527
gst_h264_parse_get_timestamp (h264parse,
1528
&GST_BUFFER_TIMESTAMP (buffer), &GST_BUFFER_DURATION (buffer),
1529
h264parse->frame_start);
1531
if (h264parse->keyframe)
1532
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
1534
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
1536
if (h264parse->discont) {
1537
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
1538
h264parse->discont = FALSE;
1541
/* replace with transformed AVC output if applicable */
1542
av = gst_adapter_available (h264parse->frame_out);
1546
buf = gst_adapter_take_buffer (h264parse->frame_out, av);
1547
gst_buffer_copy_into (buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
1548
gst_buffer_replace (&frame->out_buffer, buf);
1549
gst_buffer_unref (buf);
1555
/* sends a codec NAL downstream, decorating and transforming as needed.
1556
* No ownership is taken of @nal */
1557
static GstFlowReturn
1558
gst_h264_parse_push_codec_buffer (GstH264Parse * h264parse,
1559
GstBuffer * nal, GstClockTime ts)
1563
gst_buffer_map (nal, &map, GST_MAP_READ);
1564
nal = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
1565
map.data, map.size);
1566
gst_buffer_unmap (nal, &map);
1568
GST_BUFFER_TIMESTAMP (nal) = ts;
1569
GST_BUFFER_DURATION (nal) = 0;
1571
return gst_pad_push (GST_BASE_PARSE_SRC_PAD (h264parse), nal);
1575
check_pending_key_unit_event (GstEvent * pending_event,
1576
GstSegment * segment, GstClockTime timestamp, guint flags,
1577
GstClockTime pending_key_unit_ts)
1579
GstClockTime running_time, stream_time;
1580
gboolean all_headers;
1582
GstEvent *event = NULL;
1584
g_return_val_if_fail (segment != NULL, NULL);
1586
if (pending_event == NULL)
1589
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
1590
timestamp == GST_CLOCK_TIME_NONE)
1593
running_time = gst_segment_to_running_time (segment,
1594
GST_FORMAT_TIME, timestamp);
1596
GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
1597
GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
1598
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
1599
running_time < pending_key_unit_ts)
1602
if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
1603
GST_DEBUG ("pending force key unit, waiting for keyframe");
1607
stream_time = gst_segment_to_stream_time (segment,
1608
GST_FORMAT_TIME, timestamp);
1610
gst_video_event_parse_upstream_force_key_unit (pending_event,
1611
NULL, &all_headers, &count);
1614
gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
1615
running_time, all_headers, count);
1616
gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
1623
gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
1625
GstClockTime running_time;
1627
#ifndef GST_DISABLE_GST_DEBUG
1628
gboolean have_sps, have_pps;
1632
parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
1633
gst_event_replace (&parse->force_key_unit_event, NULL);
1635
gst_video_event_parse_downstream_force_key_unit (event,
1636
NULL, NULL, &running_time, NULL, &count);
1638
GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
1639
"%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
1640
GST_TIME_ARGS (running_time), count);
1641
gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
1643
#ifndef GST_DISABLE_GST_DEBUG
1644
have_sps = have_pps = FALSE;
1645
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1646
if (parse->sps_nals[i] != NULL) {
1651
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1652
if (parse->pps_nals[i] != NULL) {
1658
GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
1659
have_sps, have_pps);
1662
/* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
1663
parse->push_codec = TRUE;
1666
static GstFlowReturn
1667
gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
1669
GstH264Parse *h264parse;
1673
h264parse = GST_H264_PARSE (parse);
1675
if (!h264parse->sent_codec_tag) {
1676
GstTagList *taglist;
1679
taglist = gst_tag_list_new_empty ();
1682
caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
1683
gst_pb_utils_add_codec_description_to_tag_list (taglist,
1684
GST_TAG_VIDEO_CODEC, caps);
1685
gst_caps_unref (caps);
1687
gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (h264parse),
1688
gst_event_new_tag (taglist));
1690
/* also signals the end of first-frame processing */
1691
h264parse->sent_codec_tag = TRUE;
1694
buffer = frame->buffer;
1696
if ((event = check_pending_key_unit_event (h264parse->force_key_unit_event,
1697
&parse->segment, GST_BUFFER_TIMESTAMP (buffer),
1698
GST_BUFFER_FLAGS (buffer), h264parse->pending_key_unit_ts))) {
1699
gst_h264_parse_prepare_key_unit (h264parse, event);
1702
/* periodic SPS/PPS sending */
1703
if (h264parse->interval > 0 || h264parse->push_codec) {
1704
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
1708
if (!GST_CLOCK_TIME_IS_VALID (h264parse->last_report)) {
1709
h264parse->last_report = timestamp;
1712
if (h264parse->idr_pos >= 0) {
1713
GST_LOG_OBJECT (h264parse, "IDR nal at offset %d", h264parse->idr_pos);
1715
if (timestamp > h264parse->last_report)
1716
diff = timestamp - h264parse->last_report;
1720
GST_LOG_OBJECT (h264parse,
1721
"now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
1722
GST_TIME_ARGS (timestamp), GST_TIME_ARGS (h264parse->last_report));
1724
GST_DEBUG_OBJECT (h264parse,
1725
"interval since last SPS/PPS %" GST_TIME_FORMAT,
1726
GST_TIME_ARGS (diff));
1728
if (GST_TIME_AS_SECONDS (diff) >= h264parse->interval ||
1729
h264parse->push_codec) {
1730
GstBuffer *codec_nal;
1732
GstClockTime new_ts;
1734
/* avoid overwriting a perfectly fine timestamp */
1735
new_ts = GST_CLOCK_TIME_IS_VALID (timestamp) ? timestamp :
1736
h264parse->last_report;
1738
if (h264parse->align == GST_H264_PARSE_ALIGN_NAL) {
1739
/* send separate config NAL buffers */
1740
GST_DEBUG_OBJECT (h264parse, "- sending SPS/PPS");
1741
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1742
if ((codec_nal = h264parse->sps_nals[i])) {
1743
GST_DEBUG_OBJECT (h264parse, "sending SPS nal");
1744
gst_h264_parse_push_codec_buffer (h264parse, codec_nal,
1746
h264parse->last_report = new_ts;
1749
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1750
if ((codec_nal = h264parse->pps_nals[i])) {
1751
GST_DEBUG_OBJECT (h264parse, "sending PPS nal");
1752
gst_h264_parse_push_codec_buffer (h264parse, codec_nal,
1754
h264parse->last_report = new_ts;
1758
/* insert config NALs into AU */
1761
const gboolean bs = h264parse->format == GST_H264_PARSE_FORMAT_BYTE;
1762
const gint nls = 4 - h264parse->nal_length_size;
1765
gst_byte_writer_init_with_size (&bw, gst_buffer_get_size (buffer),
1767
ok = gst_byte_writer_put_buffer (&bw, buffer, 0, h264parse->idr_pos);
1768
GST_DEBUG_OBJECT (h264parse, "- inserting SPS/PPS");
1769
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1770
if ((codec_nal = h264parse->sps_nals[i])) {
1771
gsize nal_size = gst_buffer_get_size (codec_nal);
1772
GST_DEBUG_OBJECT (h264parse, "inserting SPS nal");
1774
ok &= gst_byte_writer_put_uint32_be (&bw, 1);
1776
ok &= gst_byte_writer_put_uint32_be (&bw,
1777
(nal_size << (nls * 8)));
1778
ok &= gst_byte_writer_set_pos (&bw,
1779
gst_byte_writer_get_pos (&bw) - nls);
1782
ok &= gst_byte_writer_put_buffer (&bw, codec_nal, 0, nal_size);
1783
h264parse->last_report = new_ts;
1786
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1787
if ((codec_nal = h264parse->pps_nals[i])) {
1788
gsize nal_size = gst_buffer_get_size (codec_nal);
1789
GST_DEBUG_OBJECT (h264parse, "inserting PPS nal");
1791
ok &= gst_byte_writer_put_uint32_be (&bw, 1);
1793
ok &= gst_byte_writer_put_uint32_be (&bw,
1794
(nal_size << (nls * 8)));
1795
ok &= gst_byte_writer_set_pos (&bw,
1796
gst_byte_writer_get_pos (&bw) - nls);
1798
ok &= gst_byte_writer_put_buffer (&bw, codec_nal, 0, nal_size);
1799
h264parse->last_report = new_ts;
1803
gst_byte_writer_put_buffer (&bw, buffer, h264parse->idr_pos, -1);
1804
/* collect result and push */
1805
new_buf = gst_byte_writer_reset_and_get_buffer (&bw);
1806
gst_buffer_copy_into (new_buf, buffer, GST_BUFFER_COPY_METADATA, 0,
1808
/* should already be keyframe/IDR, but it may not have been,
1809
* so mark it as such to avoid being discarded by picky decoder */
1810
GST_BUFFER_FLAG_UNSET (new_buf, GST_BUFFER_FLAG_DELTA_UNIT);
1811
gst_buffer_replace (&frame->out_buffer, new_buf);
1812
gst_buffer_unref (new_buf);
1813
/* some result checking seems to make some compilers happy */
1814
if (G_UNLIKELY (!ok)) {
1815
GST_ERROR_OBJECT (h264parse, "failed to insert SPS/PPS");
1819
/* we pushed whatever we had */
1820
h264parse->push_codec = FALSE;
1821
h264parse->have_sps = FALSE;
1822
h264parse->have_pps = FALSE;
1823
h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
1827
gst_h264_parse_reset_frame (h264parse);
1833
gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
1835
GstH264Parse *h264parse;
1837
const GValue *value;
1838
GstBuffer *codec_data = NULL;
1840
guint format, align, off;
1841
GstH264NalUnit nalu;
1842
GstH264ParserResult parseres;
1844
h264parse = GST_H264_PARSE (parse);
1847
h264parse->push_codec = FALSE;
1849
str = gst_caps_get_structure (caps, 0);
1851
/* accept upstream info if provided */
1852
gst_structure_get_int (str, "width", &h264parse->width);
1853
gst_structure_get_int (str, "height", &h264parse->height);
1854
gst_structure_get_fraction (str, "framerate", &h264parse->fps_num,
1855
&h264parse->fps_den);
1856
gst_structure_get_fraction (str, "pixel-aspect-ratio",
1857
&h264parse->upstream_par_n, &h264parse->upstream_par_d);
1859
/* get upstream format and align from caps */
1860
gst_h264_parse_format_from_caps (caps, &format, &align);
1862
/* packetized video has a codec_data */
1863
if (format != GST_H264_PARSE_FORMAT_BYTE &&
1864
(value = gst_structure_get_value (str, "codec_data"))) {
1867
guint num_sps, num_pps;
1868
#ifndef GST_DISABLE_GST_DEBUG
1873
GST_DEBUG_OBJECT (h264parse, "have packetized h264");
1874
/* make note for optional split processing */
1875
h264parse->packetized = TRUE;
1877
codec_data = gst_value_get_buffer (value);
1880
gst_buffer_map (codec_data, &map, GST_MAP_READ);
1884
/* parse the avcC data */
1885
if (size < 7) { /* when numSPS==0 and numPPS==0, length is 7 bytes */
1886
gst_buffer_unmap (codec_data, &map);
1887
goto avcc_too_small;
1889
/* parse the version, this must be 1 */
1891
gst_buffer_unmap (codec_data, &map);
1894
#ifndef GST_DISABLE_GST_DEBUG
1895
/* AVCProfileIndication */
1896
/* profile_compat */
1897
/* AVCLevelIndication */
1898
profile = (data[1] << 16) | (data[2] << 8) | data[3];
1899
GST_DEBUG_OBJECT (h264parse, "profile %06x", profile);
1902
/* 6 bits reserved | 2 bits lengthSizeMinusOne */
1903
/* this is the number of bytes in front of the NAL units to mark their
1905
h264parse->nal_length_size = (data[4] & 0x03) + 1;
1906
GST_DEBUG_OBJECT (h264parse, "nal length size %u",
1907
h264parse->nal_length_size);
1909
num_sps = data[5] & 0x1f;
1911
for (i = 0; i < num_sps; i++) {
1912
parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
1913
data, off, size, 2, &nalu);
1914
if (parseres != GST_H264_PARSER_OK) {
1915
gst_buffer_unmap (codec_data, &map);
1916
goto avcc_too_small;
1919
gst_h264_parse_process_nal (h264parse, &nalu);
1920
off = nalu.offset + nalu.size;
1923
num_pps = data[off];
1926
for (i = 0; i < num_pps; i++) {
1927
parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
1928
data, off, size, 2, &nalu);
1929
if (parseres != GST_H264_PARSER_OK) {
1930
gst_buffer_unmap (codec_data, &map);
1931
goto avcc_too_small;
1934
gst_h264_parse_process_nal (h264parse, &nalu);
1935
off = nalu.offset + nalu.size;
1938
gst_buffer_unmap (codec_data, &map);
1940
gst_buffer_replace (&h264parse->codec_data_in, codec_data);
1942
/* if upstream sets codec_data without setting stream-format and alignment, we
1943
* assume stream-format=avc,alignment=au */
1944
if (format == GST_H264_PARSE_FORMAT_NONE)
1945
format = GST_H264_PARSE_FORMAT_AVC;
1946
if (align == GST_H264_PARSE_ALIGN_NONE)
1947
align = GST_H264_PARSE_ALIGN_AU;
1949
GST_DEBUG_OBJECT (h264parse, "have bytestream h264");
1950
/* nothing to pre-process */
1951
h264parse->packetized = FALSE;
1952
/* we have 4 sync bytes */
1953
h264parse->nal_length_size = 4;
1955
if (format == GST_H264_PARSE_FORMAT_NONE) {
1956
format = GST_H264_PARSE_FORMAT_BYTE;
1957
align = GST_H264_PARSE_ALIGN_AU;
1964
/* prefer input type determined above */
1965
in_caps = gst_caps_new_simple ("video/x-h264",
1966
"parsed", G_TYPE_BOOLEAN, TRUE,
1967
"stream-format", G_TYPE_STRING,
1968
gst_h264_parse_get_string (h264parse, TRUE, format),
1969
"alignment", G_TYPE_STRING,
1970
gst_h264_parse_get_string (h264parse, FALSE, align), NULL);
1971
/* negotiate with downstream, sets ->format and ->align */
1972
gst_h264_parse_negotiate (h264parse, format, in_caps);
1973
gst_caps_unref (in_caps);
1976
if (format == h264parse->format && align == h264parse->align) {
1977
/* do not set CAPS and passthrough mode if SPS/PPS have not been parsed */
1978
if (h264parse->have_sps && h264parse->have_pps) {
1979
gst_base_parse_set_passthrough (parse, TRUE);
1981
/* we did parse codec-data and might supplement src caps */
1982
gst_h264_parse_update_src_caps (h264parse, caps);
1984
} else if (format == GST_H264_PARSE_FORMAT_AVC
1985
|| format == GST_H264_PARSE_FORMAT_AVC3) {
1986
/* if input != output, and input is avc, must split before anything else */
1987
/* arrange to insert codec-data in-stream if needed.
1988
* src caps are only arranged for later on */
1989
h264parse->push_codec = TRUE;
1990
h264parse->have_sps = FALSE;
1991
h264parse->have_pps = FALSE;
1992
if (h264parse->align == GST_H264_PARSE_ALIGN_NAL)
1993
h264parse->split_packetized = TRUE;
1994
h264parse->packetized = TRUE;
2002
GST_DEBUG_OBJECT (h264parse, "avcC size %" G_GSIZE_FORMAT " < 8", size);
2007
GST_DEBUG_OBJECT (h264parse, "wrong avcC version");
2012
GST_DEBUG_OBJECT (h264parse, "wrong codec-data type");
2017
GST_WARNING_OBJECT (h264parse, "refused caps %" GST_PTR_FORMAT, caps);
2023
remove_fields (GstCaps * caps)
2027
n = gst_caps_get_size (caps);
2028
for (i = 0; i < n; i++) {
2029
GstStructure *s = gst_caps_get_structure (caps, i);
2031
gst_structure_remove_field (s, "alignment");
2032
gst_structure_remove_field (s, "stream-format");
2033
gst_structure_remove_field (s, "parsed");
2038
gst_h264_parse_get_caps (GstBaseParse * parse, GstCaps * filter)
2040
GstCaps *peercaps, *templ;
2043
templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
2045
GstCaps *fcopy = gst_caps_copy (filter);
2046
/* Remove the fields we convert */
2047
remove_fields (fcopy);
2048
peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
2049
gst_caps_unref (fcopy);
2051
peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
2054
peercaps = gst_caps_make_writable (peercaps);
2055
remove_fields (peercaps);
2057
res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
2058
gst_caps_unref (peercaps);
2059
gst_caps_unref (templ);
2065
GstCaps *tmp = gst_caps_intersect_full (res, filter,
2066
GST_CAPS_INTERSECT_FIRST);
2067
gst_caps_unref (res);
2075
gst_h264_parse_event (GstBaseParse * parse, GstEvent * event)
2078
GstH264Parse *h264parse = GST_H264_PARSE (parse);
2080
switch (GST_EVENT_TYPE (event)) {
2081
case GST_EVENT_CUSTOM_DOWNSTREAM:
2083
GstClockTime timestamp, stream_time, running_time;
2084
gboolean all_headers;
2087
if (gst_video_event_is_force_key_unit (event)) {
2088
gst_video_event_parse_downstream_force_key_unit (event,
2089
×tamp, &stream_time, &running_time, &all_headers, &count);
2091
GST_INFO_OBJECT (h264parse,
2092
"received downstream force key unit event, "
2093
"seqnum %d running_time %" GST_TIME_FORMAT
2094
" all_headers %d count %d", gst_event_get_seqnum (event),
2095
GST_TIME_ARGS (running_time), all_headers, count);
2096
if (h264parse->force_key_unit_event) {
2097
GST_INFO_OBJECT (h264parse, "ignoring force key unit event "
2098
"as one is already queued");
2100
h264parse->pending_key_unit_ts = running_time;
2101
gst_event_replace (&h264parse->force_key_unit_event, event);
2103
gst_event_unref (event);
2106
res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
2111
case GST_EVENT_FLUSH_STOP:
2112
h264parse->dts = GST_CLOCK_TIME_NONE;
2113
h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
2115
res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
2117
case GST_EVENT_SEGMENT:
2119
const GstSegment *segment;
2121
gst_event_parse_segment (event, &segment);
2122
/* don't try to mess with more subtle cases (e.g. seek) */
2123
if (segment->format == GST_FORMAT_TIME &&
2124
(segment->start != 0 || segment->rate != 1.0
2125
|| segment->applied_rate != 1.0))
2126
h264parse->do_ts = FALSE;
2128
res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
2132
res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
2139
gst_h264_parse_src_event (GstBaseParse * parse, GstEvent * event)
2142
GstH264Parse *h264parse = GST_H264_PARSE (parse);
2144
switch (GST_EVENT_TYPE (event)) {
2145
case GST_EVENT_CUSTOM_UPSTREAM:
2147
GstClockTime running_time;
2148
gboolean all_headers;
2151
if (gst_video_event_is_force_key_unit (event)) {
2152
gst_video_event_parse_upstream_force_key_unit (event,
2153
&running_time, &all_headers, &count);
2155
GST_INFO_OBJECT (h264parse, "received upstream force-key-unit event, "
2156
"seqnum %d running_time %" GST_TIME_FORMAT
2157
" all_headers %d count %d", gst_event_get_seqnum (event),
2158
GST_TIME_ARGS (running_time), all_headers, count);
2161
h264parse->pending_key_unit_ts = running_time;
2162
gst_event_replace (&h264parse->force_key_unit_event, event);
2165
res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
2169
res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
2177
gst_h264_parse_set_property (GObject * object, guint prop_id,
2178
const GValue * value, GParamSpec * pspec)
2180
GstH264Parse *parse;
2182
parse = GST_H264_PARSE (object);
2185
case PROP_CONFIG_INTERVAL:
2186
parse->interval = g_value_get_uint (value);
2189
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2195
gst_h264_parse_get_property (GObject * object, guint prop_id,
2196
GValue * value, GParamSpec * pspec)
2198
GstH264Parse *parse;
2200
parse = GST_H264_PARSE (object);
2203
case PROP_CONFIG_INTERVAL:
2204
g_value_set_uint (value, parse->interval);
2207
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);