codecs: h264decoder: Use GstFlowReturn everywhere

boolean return value is not sufficient for representing the reason
of error in most cases. For instance, any errors around new_sequence()
would mean negotiation error, not just *ERROR*.
And some subclasses will allocate buffer/memory/surface on new_picture()
but it could be failed because of expected error, likely flushing

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/1019>
This commit is contained in:
Seungha Yang 2021-09-21 22:21:51 +09:00 committed by Nicolas Dufresne
parent 5b405d1585
commit e322745763
6 changed files with 291 additions and 234 deletions

View File

@ -97,7 +97,6 @@ struct _GstH264DecoderPrivate
/* Cache last field which can not enter the DPB, should be a non ref */ /* Cache last field which can not enter the DPB, should be a non ref */
GstH264Picture *last_field; GstH264Picture *last_field;
GstFlowReturn last_ret;
/* used for low-latency vs. high throughput mode decision */ /* used for low-latency vs. high throughput mode decision */
gboolean is_live; gboolean is_live;
@ -164,6 +163,11 @@ typedef struct
GstH264Decoder *self; GstH264Decoder *self;
} GstH264DecoderOutputFrame; } GstH264DecoderOutputFrame;
#define UPDATE_FLOW_RETURN(ret,new_ret) G_STMT_START { \
if (*(ret) == GST_FLOW_OK) \
*(ret) = new_ret; \
} G_STMT_END
#define parent_class gst_h264_decoder_parent_class #define parent_class gst_h264_decoder_parent_class
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstH264Decoder, gst_h264_decoder, G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstH264Decoder, gst_h264_decoder,
GST_TYPE_VIDEO_DECODER, GST_TYPE_VIDEO_DECODER,
@ -183,11 +187,11 @@ static GstFlowReturn gst_h264_decoder_drain (GstVideoDecoder * decoder);
static GstFlowReturn gst_h264_decoder_handle_frame (GstVideoDecoder * decoder, static GstFlowReturn gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame); GstVideoCodecFrame * frame);
/* codec spcific functions */ /* codec specific functions */
static gboolean gst_h264_decoder_process_sps (GstH264Decoder * self, static GstFlowReturn gst_h264_decoder_process_sps (GstH264Decoder * self,
GstH264SPS * sps); GstH264SPS * sps);
static gboolean gst_h264_decoder_decode_slice (GstH264Decoder * self); static gboolean gst_h264_decoder_decode_slice (GstH264Decoder * self);
static gboolean gst_h264_decoder_decode_nal (GstH264Decoder * self, static GstFlowReturn gst_h264_decoder_decode_nal (GstH264Decoder * self,
GstH264NalUnit * nalu); GstH264NalUnit * nalu);
static gboolean gst_h264_decoder_fill_picture_from_slice (GstH264Decoder * self, static gboolean gst_h264_decoder_fill_picture_from_slice (GstH264Decoder * self,
const GstH264Slice * slice, GstH264Picture * picture); const GstH264Slice * slice, GstH264Picture * picture);
@ -195,10 +199,11 @@ static gboolean gst_h264_decoder_calculate_poc (GstH264Decoder * self,
GstH264Picture * picture); GstH264Picture * picture);
static gboolean gst_h264_decoder_init_gap_picture (GstH264Decoder * self, static gboolean gst_h264_decoder_init_gap_picture (GstH264Decoder * self,
GstH264Picture * picture, gint frame_num); GstH264Picture * picture, gint frame_num);
static gboolean gst_h264_decoder_drain_internal (GstH264Decoder * self); static GstFlowReturn gst_h264_decoder_drain_internal (GstH264Decoder * self);
static gboolean gst_h264_decoder_finish_current_picture (GstH264Decoder * self); static void gst_h264_decoder_finish_current_picture (GstH264Decoder * self,
static gboolean gst_h264_decoder_finish_picture (GstH264Decoder * self, GstFlowReturn * ret);
GstH264Picture * picture); static void gst_h264_decoder_finish_picture (GstH264Decoder * self,
GstH264Picture * picture, GstFlowReturn * ret);
static void gst_h264_decoder_prepare_ref_pic_lists (GstH264Decoder * self, static void gst_h264_decoder_prepare_ref_pic_lists (GstH264Decoder * self,
GstH264Picture * current_picture); GstH264Picture * current_picture);
static void gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self); static void gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self);
@ -207,7 +212,7 @@ static gboolean
gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self, gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
GstH264Picture * picture); GstH264Picture * picture);
static void gst_h264_decoder_do_output_picture (GstH264Decoder * self, static void gst_h264_decoder_do_output_picture (GstH264Decoder * self,
GstH264Picture * picture); GstH264Picture * picture, GstFlowReturn * ret);
static GstH264Picture *gst_h264_decoder_new_field_picture (GstH264Decoder * static GstH264Picture *gst_h264_decoder_new_field_picture (GstH264Decoder *
self, GstH264Picture * picture); self, GstH264Picture * picture);
static void static void
@ -495,13 +500,9 @@ static GstFlowReturn
gst_h264_decoder_drain (GstVideoDecoder * decoder) gst_h264_decoder_drain (GstVideoDecoder * decoder)
{ {
GstH264Decoder *self = GST_H264_DECODER (decoder); GstH264Decoder *self = GST_H264_DECODER (decoder);
GstH264DecoderPrivate *priv = self->priv;
priv->last_ret = GST_FLOW_OK;
/* dpb will be cleared by this method */ /* dpb will be cleared by this method */
gst_h264_decoder_drain_internal (self); return gst_h264_decoder_drain_internal (self);
return priv->last_ret;
} }
static GstFlowReturn static GstFlowReturn
@ -520,7 +521,7 @@ gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
GstH264NalUnit nalu; GstH264NalUnit nalu;
GstH264ParserResult pres; GstH264ParserResult pres;
GstMapInfo map; GstMapInfo map;
gboolean decode_ret = TRUE; GstFlowReturn decode_ret = GST_FLOW_OK;
GST_LOG_OBJECT (self, GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %" "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
@ -528,14 +529,13 @@ gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf))); GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
priv->current_frame = frame; priv->current_frame = frame;
priv->last_ret = GST_FLOW_OK;
gst_buffer_map (in_buf, &map, GST_MAP_READ); gst_buffer_map (in_buf, &map, GST_MAP_READ);
if (priv->in_format == GST_H264_DECODER_FORMAT_AVC) { if (priv->in_format == GST_H264_DECODER_FORMAT_AVC) {
pres = gst_h264_parser_identify_nalu_avc (priv->parser, pres = gst_h264_parser_identify_nalu_avc (priv->parser,
map.data, 0, map.size, priv->nal_length_size, &nalu); map.data, 0, map.size, priv->nal_length_size, &nalu);
while (pres == GST_H264_PARSER_OK && decode_ret) { while (pres == GST_H264_PARSER_OK && decode_ret == GST_FLOW_OK) {
decode_ret = gst_h264_decoder_decode_nal (self, &nalu); decode_ret = gst_h264_decoder_decode_nal (self, &nalu);
pres = gst_h264_parser_identify_nalu_avc (priv->parser, pres = gst_h264_parser_identify_nalu_avc (priv->parser,
@ -549,7 +549,7 @@ gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
if (pres == GST_H264_PARSER_NO_NAL_END) if (pres == GST_H264_PARSER_NO_NAL_END)
pres = GST_H264_PARSER_OK; pres = GST_H264_PARSER_OK;
while (pres == GST_H264_PARSER_OK && decode_ret) { while (pres == GST_H264_PARSER_OK && decode_ret == GST_FLOW_OK) {
decode_ret = gst_h264_decoder_decode_nal (self, &nalu); decode_ret = gst_h264_decoder_decode_nal (self, &nalu);
pres = gst_h264_parser_identify_nalu (priv->parser, pres = gst_h264_parser_identify_nalu (priv->parser,
@ -562,25 +562,30 @@ gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
gst_buffer_unmap (in_buf, &map); gst_buffer_unmap (in_buf, &map);
if (!decode_ret) { if (decode_ret != GST_FLOW_OK) {
GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE, GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
("Failed to decode data"), (NULL), priv->last_ret); ("Failed to decode data"), (NULL), decode_ret);
gst_video_decoder_drop_frame (decoder, frame); gst_video_decoder_drop_frame (decoder, frame);
gst_h264_picture_clear (&priv->current_picture); gst_h264_picture_clear (&priv->current_picture);
priv->current_frame = NULL; priv->current_frame = NULL;
return priv->last_ret; return decode_ret;
} }
gst_h264_decoder_finish_current_picture (self); gst_h264_decoder_finish_current_picture (self, &decode_ret);
gst_video_codec_frame_unref (frame); gst_video_codec_frame_unref (frame);
priv->current_frame = NULL; priv->current_frame = NULL;
return priv->last_ret; if (decode_ret != GST_FLOW_OK) {
GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
("Failed to decode data"), (NULL), decode_ret);
} }
static gboolean return decode_ret;
}
static GstFlowReturn
gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu) gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
@ -591,18 +596,18 @@ gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
pres = gst_h264_parse_sps (nalu, &sps); pres = gst_h264_parse_sps (nalu, &sps);
if (pres != GST_H264_PARSER_OK) { if (pres != GST_H264_PARSER_OK) {
GST_WARNING_OBJECT (self, "Failed to parse SPS, result %d", pres); GST_WARNING_OBJECT (self, "Failed to parse SPS, result %d", pres);
return FALSE; return GST_FLOW_ERROR;
} }
GST_LOG_OBJECT (self, "SPS parsed"); GST_LOG_OBJECT (self, "SPS parsed");
ret = gst_h264_decoder_process_sps (self, &sps); ret = gst_h264_decoder_process_sps (self, &sps);
if (!ret) { if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Failed to process SPS"); GST_WARNING_OBJECT (self, "Failed to process SPS");
} else if (gst_h264_parser_update_sps (priv->parser, } else if (gst_h264_parser_update_sps (priv->parser,
&sps) != GST_H264_PARSER_OK) { &sps) != GST_H264_PARSER_OK) {
GST_WARNING_OBJECT (self, "Failed to update SPS"); GST_WARNING_OBJECT (self, "Failed to update SPS");
ret = FALSE; ret = GST_FLOW_ERROR;
} }
gst_h264_sps_clear (&sps); gst_h264_sps_clear (&sps);
@ -610,29 +615,29 @@ gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
return ret; return ret;
} }
static gboolean static GstFlowReturn
gst_h264_decoder_parse_pps (GstH264Decoder * self, GstH264NalUnit * nalu) gst_h264_decoder_parse_pps (GstH264Decoder * self, GstH264NalUnit * nalu)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstH264PPS pps; GstH264PPS pps;
GstH264ParserResult pres; GstH264ParserResult pres;
gboolean ret = TRUE; GstFlowReturn ret = GST_FLOW_OK;
pres = gst_h264_parse_pps (priv->parser, nalu, &pps); pres = gst_h264_parse_pps (priv->parser, nalu, &pps);
if (pres != GST_H264_PARSER_OK) { if (pres != GST_H264_PARSER_OK) {
GST_WARNING_OBJECT (self, "Failed to parse PPS, result %d", pres); GST_WARNING_OBJECT (self, "Failed to parse PPS, result %d", pres);
return FALSE; return GST_FLOW_ERROR;
} }
GST_LOG_OBJECT (self, "PPS parsed"); GST_LOG_OBJECT (self, "PPS parsed");
if (pps.num_slice_groups_minus1 > 0) { if (pps.num_slice_groups_minus1 > 0) {
GST_FIXME_OBJECT (self, "FMO is not supported"); GST_FIXME_OBJECT (self, "FMO is not supported");
ret = FALSE; ret = GST_FLOW_ERROR;
} else if (gst_h264_parser_update_pps (priv->parser, &pps) } else if (gst_h264_parser_update_pps (priv->parser, &pps)
!= GST_H264_PARSER_OK) { != GST_H264_PARSER_OK) {
GST_WARNING_OBJECT (self, "Failed to update PPS"); GST_WARNING_OBJECT (self, "Failed to update PPS");
ret = FALSE; ret = GST_FLOW_ERROR;
} }
gst_h264_pps_clear (&pps); gst_h264_pps_clear (&pps);
@ -640,7 +645,7 @@ gst_h264_decoder_parse_pps (GstH264Decoder * self, GstH264NalUnit * nalu)
return ret; return ret;
} }
static gboolean static GstFlowReturn
gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data, gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data,
gsize size) gsize size)
{ {
@ -650,18 +655,19 @@ gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data,
gint i; gint i;
GstH264ParserResult pres; GstH264ParserResult pres;
GstH264NalUnit nalu; GstH264NalUnit nalu;
GstFlowReturn ret = GST_FLOW_OK;
#ifndef GST_DISABLE_GST_DEBUG #ifndef GST_DISABLE_GST_DEBUG
guint profile; guint profile;
#endif #endif
/* parse the avcC data */ /* parse the avcC data */
if (size < 7) { /* when numSPS==0 and numPPS==0, length is 7 bytes */ if (size < 7) { /* when numSPS==0 and numPPS==0, length is 7 bytes */
return FALSE; return GST_FLOW_ERROR;
} }
/* parse the version, this must be 1 */ /* parse the version, this must be 1 */
if (data[0] != 1) { if (data[0] != 1) {
return FALSE; return GST_FLOW_ERROR;
} }
#ifndef GST_DISABLE_GST_DEBUG #ifndef GST_DISABLE_GST_DEBUG
/* AVCProfileIndication */ /* AVCProfileIndication */
@ -684,19 +690,20 @@ gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data,
data, off, size, 2, &nalu); data, off, size, 2, &nalu);
if (pres != GST_H264_PARSER_OK) { if (pres != GST_H264_PARSER_OK) {
GST_WARNING_OBJECT (self, "Failed to identify SPS nalu"); GST_WARNING_OBJECT (self, "Failed to identify SPS nalu");
return FALSE; return GST_FLOW_ERROR;
} }
if (!gst_h264_decoder_parse_sps (self, &nalu)) { ret = gst_h264_decoder_parse_sps (self, &nalu);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Failed to parse SPS"); GST_WARNING_OBJECT (self, "Failed to parse SPS");
return FALSE; return ret;
} }
off = nalu.offset + nalu.size; off = nalu.offset + nalu.size;
} }
if (off >= size) { if (off >= size) {
GST_WARNING_OBJECT (self, "Too small avcC"); GST_WARNING_OBJECT (self, "Too small avcC");
return FALSE; return GST_FLOW_ERROR;
} }
num_pps = data[off]; num_pps = data[off];
@ -707,17 +714,18 @@ gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data,
data, off, size, 2, &nalu); data, off, size, 2, &nalu);
if (pres != GST_H264_PARSER_OK) { if (pres != GST_H264_PARSER_OK) {
GST_WARNING_OBJECT (self, "Failed to identify PPS nalu"); GST_WARNING_OBJECT (self, "Failed to identify PPS nalu");
return FALSE; return GST_FLOW_ERROR;
} }
if (!gst_h264_decoder_parse_pps (self, &nalu)) { ret = gst_h264_decoder_parse_pps (self, &nalu);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Failed to parse PPS"); GST_WARNING_OBJECT (self, "Failed to parse PPS");
return FALSE; return ret;
} }
off = nalu.offset + nalu.size; off = nalu.offset + nalu.size;
} }
return TRUE; return GST_FLOW_OK;
} }
static gboolean static gboolean
@ -817,17 +825,19 @@ gst_h264_decoder_split_frame (GstH264Decoder * self, GstH264Picture * picture)
return other_field; return other_field;
} }
static gboolean static void
output_picture_directly (GstH264Decoder * self, GstH264Picture * picture) output_picture_directly (GstH264Decoder * self, GstH264Picture * picture,
GstFlowReturn * ret)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstH264Picture *out_pic = NULL; GstH264Picture *out_pic = NULL;
gboolean ret = TRUE; GstFlowReturn flow_ret = GST_FLOW_OK;
g_assert (ret != NULL);
if (GST_H264_PICTURE_IS_FRAME (picture)) { if (GST_H264_PICTURE_IS_FRAME (picture)) {
g_assert (priv->last_field == NULL); g_assert (priv->last_field == NULL);
out_pic = g_steal_pointer (&picture); out_pic = g_steal_pointer (&picture);
ret = TRUE;
goto output; goto output;
} }
@ -836,13 +846,12 @@ output_picture_directly (GstH264Decoder * self, GstH264Picture * picture)
GST_WARNING ("Set the last output %p poc:%d, without first field", GST_WARNING ("Set the last output %p poc:%d, without first field",
picture, picture->pic_order_cnt); picture, picture->pic_order_cnt);
ret = FALSE; flow_ret = GST_FLOW_ERROR;
goto output; goto output;
} }
/* Just cache the first field. */ /* Just cache the first field. */
priv->last_field = g_steal_pointer (&picture); priv->last_field = g_steal_pointer (&picture);
ret = TRUE;
} else { } else {
if (!picture->second_field || !picture->other_field if (!picture->second_field || !picture->other_field
|| picture->other_field != priv->last_field) { || picture->other_field != priv->last_field) {
@ -852,7 +861,7 @@ output_picture_directly (GstH264Decoder * self, GstH264Picture * picture)
picture, picture->pic_order_cnt); picture, picture->pic_order_cnt);
gst_h264_picture_clear (&priv->last_field); gst_h264_picture_clear (&priv->last_field);
ret = FALSE; flow_ret = GST_FLOW_ERROR;
goto output; goto output;
} }
@ -870,12 +879,12 @@ output_picture_directly (GstH264Decoder * self, GstH264Picture * picture)
output: output:
if (out_pic) { if (out_pic) {
gst_h264_dpb_set_last_output (priv->dpb, out_pic); gst_h264_dpb_set_last_output (priv->dpb, out_pic);
gst_h264_decoder_do_output_picture (self, out_pic); gst_h264_decoder_do_output_picture (self, out_pic, &flow_ret);
} }
gst_h264_picture_clear (&picture); gst_h264_picture_clear (&picture);
return ret; UPDATE_FLOW_RETURN (ret, flow_ret);
} }
static void static void
@ -902,10 +911,12 @@ add_picture_to_dpb (GstH264Decoder * self, GstH264Picture * picture)
static void static void
_bump_dpb (GstH264Decoder * self, GstH264DpbBumpMode bump_level, _bump_dpb (GstH264Decoder * self, GstH264DpbBumpMode bump_level,
GstH264Picture * current_picture) GstH264Picture * current_picture, GstFlowReturn * ret)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
g_assert (ret != NULL);
while (gst_h264_dpb_needs_bump (priv->dpb, current_picture, bump_level)) { while (gst_h264_dpb_needs_bump (priv->dpb, current_picture, bump_level)) {
GstH264Picture *to_output; GstH264Picture *to_output;
@ -916,11 +927,11 @@ _bump_dpb (GstH264Decoder * self, GstH264DpbBumpMode bump_level,
break; break;
} }
gst_h264_decoder_do_output_picture (self, to_output); gst_h264_decoder_do_output_picture (self, to_output, ret);
} }
} }
static gboolean static GstFlowReturn
gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num) gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
@ -929,25 +940,25 @@ gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
if (!sps) { if (!sps) {
GST_ERROR_OBJECT (self, "No active sps"); GST_ERROR_OBJECT (self, "No active sps");
return FALSE; return GST_FLOW_ERROR;
} }
if (priv->prev_ref_frame_num == frame_num) { if (priv->prev_ref_frame_num == frame_num) {
GST_TRACE_OBJECT (self, GST_TRACE_OBJECT (self,
"frame_num == PrevRefFrameNum (%d), not a gap", frame_num); "frame_num == PrevRefFrameNum (%d), not a gap", frame_num);
return TRUE; return GST_FLOW_OK;
} }
if (((priv->prev_ref_frame_num + 1) % priv->max_frame_num) == frame_num) { if (((priv->prev_ref_frame_num + 1) % priv->max_frame_num) == frame_num) {
GST_TRACE_OBJECT (self, GST_TRACE_OBJECT (self,
"frame_num == (PrevRefFrameNum + 1) %% MaxFrameNum (%d), not a gap", "frame_num == (PrevRefFrameNum + 1) %% MaxFrameNum (%d), not a gap",
frame_num); frame_num);
return TRUE; return GST_FLOW_OK;
} }
if (gst_h264_dpb_get_size (priv->dpb) == 0) { if (gst_h264_dpb_get_size (priv->dpb) == 0) {
GST_TRACE_OBJECT (self, "DPB is empty, not a gap"); GST_TRACE_OBJECT (self, "DPB is empty, not a gap");
return TRUE; return GST_FLOW_OK;
} }
if (!sps->gaps_in_frame_num_value_allowed_flag) { if (!sps->gaps_in_frame_num_value_allowed_flag) {
@ -956,7 +967,7 @@ gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
GST_WARNING_OBJECT (self, "Invalid frame num %d, maybe frame drop", GST_WARNING_OBJECT (self, "Invalid frame num %d, maybe frame drop",
frame_num); frame_num);
return TRUE; return GST_FLOW_OK;
} }
GST_DEBUG_OBJECT (self, "Handling frame num gap %d -> %d (MaxFrameNum: %d)", GST_DEBUG_OBJECT (self, "Handling frame num gap %d -> %d (MaxFrameNum: %d)",
@ -967,10 +978,11 @@ gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
(priv->prev_ref_frame_num + 1) % priv->max_frame_num; (priv->prev_ref_frame_num + 1) % priv->max_frame_num;
while (unused_short_term_frame_num != frame_num) { while (unused_short_term_frame_num != frame_num) {
GstH264Picture *picture = gst_h264_picture_new (); GstH264Picture *picture = gst_h264_picture_new ();
GstFlowReturn ret = GST_FLOW_OK;
if (!gst_h264_decoder_init_gap_picture (self, picture, if (!gst_h264_decoder_init_gap_picture (self, picture,
unused_short_term_frame_num)) unused_short_term_frame_num))
return FALSE; return GST_FLOW_ERROR;
gst_h264_decoder_update_pic_nums (self, picture, gst_h264_decoder_update_pic_nums (self, picture,
unused_short_term_frame_num); unused_short_term_frame_num);
@ -979,12 +991,14 @@ gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
if (!gst_h264_decoder_sliding_window_picture_marking (self, picture)) { if (!gst_h264_decoder_sliding_window_picture_marking (self, picture)) {
GST_ERROR_OBJECT (self, GST_ERROR_OBJECT (self,
"Couldn't perform sliding window picture marking"); "Couldn't perform sliding window picture marking");
return FALSE; return GST_FLOW_ERROR;
} }
gst_h264_dpb_delete_unused (priv->dpb); gst_h264_dpb_delete_unused (priv->dpb);
_bump_dpb (self, GST_H264_DPB_BUMP_NORMAL_LATENCY, picture); _bump_dpb (self, GST_H264_DPB_BUMP_NORMAL_LATENCY, picture, &ret);
if (ret != GST_FLOW_OK)
return ret;
/* the picture is short term ref, add to DPB. */ /* the picture is short term ref, add to DPB. */
if (gst_h264_dpb_get_interlaced (priv->dpb)) { if (gst_h264_dpb_get_interlaced (priv->dpb)) {
@ -1001,7 +1015,7 @@ gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
unused_short_term_frame_num %= priv->max_frame_num; unused_short_term_frame_num %= priv->max_frame_num;
} }
return TRUE; return GST_FLOW_OK;
} }
static gboolean static gboolean
@ -1029,14 +1043,14 @@ gst_h264_decoder_init_current_picture (GstH264Decoder * self)
return TRUE; return TRUE;
} }
static gboolean static GstFlowReturn
gst_h264_decoder_start_current_picture (GstH264Decoder * self) gst_h264_decoder_start_current_picture (GstH264Decoder * self)
{ {
GstH264DecoderClass *klass; GstH264DecoderClass *klass;
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
const GstH264SPS *sps; const GstH264SPS *sps;
gint frame_num; gint frame_num;
gboolean ret = TRUE; GstFlowReturn ret = GST_FLOW_OK;
GstH264Picture *current_picture; GstH264Picture *current_picture;
g_assert (priv->current_picture != NULL); g_assert (priv->current_picture != NULL);
@ -1050,18 +1064,21 @@ gst_h264_decoder_start_current_picture (GstH264Decoder * self)
if (priv->current_slice.nalu.idr_pic_flag) if (priv->current_slice.nalu.idr_pic_flag)
priv->prev_ref_frame_num = 0; priv->prev_ref_frame_num = 0;
if (!gst_h264_decoder_handle_frame_num_gap (self, frame_num)) ret = gst_h264_decoder_handle_frame_num_gap (self, frame_num);
return FALSE; if (ret != GST_FLOW_OK)
return ret;
if (!gst_h264_decoder_init_current_picture (self)) if (!gst_h264_decoder_init_current_picture (self))
return FALSE; return GST_FLOW_ERROR;
current_picture = priv->current_picture; current_picture = priv->current_picture;
/* If the new picture is an IDR, flush DPB */ /* If the new picture is an IDR, flush DPB */
if (current_picture->idr) { if (current_picture->idr) {
if (!current_picture->dec_ref_pic_marking.no_output_of_prior_pics_flag) { if (!current_picture->dec_ref_pic_marking.no_output_of_prior_pics_flag) {
gst_h264_decoder_drain_internal (self); ret = gst_h264_decoder_drain_internal (self);
if (ret != GST_FLOW_OK)
return ret;
} else { } else {
/* C.4.4 Removal of pictures from the DPB before possible insertion /* C.4.4 Removal of pictures from the DPB before possible insertion
* of the current picture * of the current picture
@ -1081,16 +1098,17 @@ gst_h264_decoder_start_current_picture (GstH264Decoder * self)
gst_h264_decoder_prepare_ref_pic_lists (self, current_picture); gst_h264_decoder_prepare_ref_pic_lists (self, current_picture);
klass = GST_H264_DECODER_GET_CLASS (self); klass = GST_H264_DECODER_GET_CLASS (self);
if (klass->start_picture) if (klass->start_picture) {
ret = klass->start_picture (self, priv->current_picture, ret = klass->start_picture (self, priv->current_picture,
&priv->current_slice, priv->dpb); &priv->current_slice, priv->dpb);
if (!ret) { if (ret != GST_FLOW_OK) {
GST_ERROR_OBJECT (self, "subclass does not want to start picture"); GST_WARNING_OBJECT (self, "subclass does not want to start picture");
return FALSE; return ret;
}
} }
return TRUE; return GST_FLOW_OK;
} }
static GstH264Picture * static GstH264Picture *
@ -1107,13 +1125,17 @@ gst_h264_decoder_new_field_picture (GstH264Decoder * self,
new_picture = gst_h264_picture_new (); new_picture = gst_h264_picture_new ();
/* don't confuse subclass by non-existing picture */ /* don't confuse subclass by non-existing picture */
if (!picture->nonexisting && if (!picture->nonexisting) {
!klass->new_field_picture (self, picture, new_picture)) { GstFlowReturn ret;
GST_ERROR_OBJECT (self, "Subclass couldn't handle new field picture");
ret = klass->new_field_picture (self, picture, new_picture);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Subclass couldn't handle new field picture");
gst_h264_picture_unref (new_picture); gst_h264_picture_unref (new_picture);
return NULL; return NULL;
} }
}
new_picture->other_field = picture; new_picture->other_field = picture;
new_picture->second_field = TRUE; new_picture->second_field = TRUE;
@ -1200,11 +1222,12 @@ error:
return FALSE; return FALSE;
} }
static gboolean static GstFlowReturn
gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu) gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstH264ParserResult pres = GST_H264_PARSER_OK; GstH264ParserResult pres = GST_H264_PARSER_OK;
GstFlowReturn ret = GST_FLOW_OK;
memset (&priv->current_slice, 0, sizeof (GstH264Slice)); memset (&priv->current_slice, 0, sizeof (GstH264Slice));
@ -1215,13 +1238,13 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
GST_ERROR_OBJECT (self, "Failed to parse slice header, ret %d", pres); GST_ERROR_OBJECT (self, "Failed to parse slice header, ret %d", pres);
memset (&priv->current_slice, 0, sizeof (GstH264Slice)); memset (&priv->current_slice, 0, sizeof (GstH264Slice));
return FALSE; return GST_FLOW_ERROR;
} }
priv->current_slice.nalu = *nalu; priv->current_slice.nalu = *nalu;
if (!gst_h264_decoder_preprocess_slice (self, &priv->current_slice)) if (!gst_h264_decoder_preprocess_slice (self, &priv->current_slice))
return FALSE; return GST_FLOW_ERROR;
priv->active_pps = priv->current_slice.header.pps; priv->active_pps = priv->current_slice.header.pps;
priv->active_sps = priv->active_pps->sequence; priv->active_sps = priv->active_pps->sequence;
@ -1245,7 +1268,7 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
if (cur_field != prev_field) { if (cur_field != prev_field) {
GST_LOG_OBJECT (self, GST_LOG_OBJECT (self,
"Found new field picture, finishing the first field picture"); "Found new field picture, finishing the first field picture");
gst_h264_decoder_finish_current_picture (self); gst_h264_decoder_finish_current_picture (self, &ret);
} }
} }
@ -1260,7 +1283,7 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
if (!gst_h264_decoder_find_first_field_picture (self, if (!gst_h264_decoder_find_first_field_picture (self,
&priv->current_slice, &first_field)) { &priv->current_slice, &first_field)) {
GST_ERROR_OBJECT (self, "Couldn't find or determine first picture"); GST_ERROR_OBJECT (self, "Couldn't find or determine first picture");
return FALSE; return GST_FLOW_ERROR;
} }
if (first_field) { if (first_field) {
@ -1269,7 +1292,7 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
if (!picture) { if (!picture) {
GST_ERROR_OBJECT (self, "Couldn't duplicate the first field picture"); GST_ERROR_OBJECT (self, "Couldn't duplicate the first field picture");
return FALSE; return GST_FLOW_ERROR;
} }
} else { } else {
picture = gst_h264_picture_new (); picture = gst_h264_picture_new ();
@ -1277,10 +1300,11 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
if (klass->new_picture) if (klass->new_picture)
ret = klass->new_picture (self, priv->current_frame, picture); ret = klass->new_picture (self, priv->current_frame, picture);
if (!ret) { if (ret != GST_FLOW_OK) {
GST_ERROR_OBJECT (self, "subclass does not want accept new picture"); GST_WARNING_OBJECT (self, "subclass does not want accept new picture");
priv->current_picture = NULL;
gst_h264_picture_unref (picture); gst_h264_picture_unref (picture);
return FALSE; return ret;
} }
} }
@ -1288,19 +1312,20 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
picture->system_frame_number = priv->current_frame->system_frame_number; picture->system_frame_number = priv->current_frame->system_frame_number;
priv->current_picture = picture; priv->current_picture = picture;
if (!gst_h264_decoder_start_current_picture (self)) { ret = gst_h264_decoder_start_current_picture (self);
GST_ERROR_OBJECT (self, "start picture failed"); if (ret != GST_FLOW_OK) {
return FALSE; GST_WARNING_OBJECT (self, "start picture failed");
return ret;
} }
} }
return gst_h264_decoder_decode_slice (self); return gst_h264_decoder_decode_slice (self);
} }
static gboolean static GstFlowReturn
gst_h264_decoder_decode_nal (GstH264Decoder * self, GstH264NalUnit * nalu) gst_h264_decoder_decode_nal (GstH264Decoder * self, GstH264NalUnit * nalu)
{ {
gboolean ret = TRUE; GstFlowReturn ret = GST_FLOW_OK;
GST_LOG_OBJECT (self, "Parsed nal type: %d, offset %d, size %d", GST_LOG_OBJECT (self, "Parsed nal type: %d, offset %d, size %d",
nalu->type, nalu->offset, nalu->size); nalu->type, nalu->offset, nalu->size);
@ -1443,7 +1468,8 @@ gst_h264_decoder_set_format (GstVideoDecoder * decoder,
GstMapInfo map; GstMapInfo map;
gst_buffer_map (priv->codec_data, &map, GST_MAP_READ); gst_buffer_map (priv->codec_data, &map, GST_MAP_READ);
if (!gst_h264_decoder_parse_codec_data (self, map.data, map.size)) { if (gst_h264_decoder_parse_codec_data (self, map.data, map.size) !=
GST_FLOW_OK) {
/* keep going without error. /* keep going without error.
* Probably inband SPS/PPS might be valid data */ * Probably inband SPS/PPS might be valid data */
GST_WARNING_OBJECT (self, "Failed to handle codec data"); GST_WARNING_OBJECT (self, "Failed to handle codec data");
@ -1742,29 +1768,35 @@ gst_h264_decoder_calculate_poc (GstH264Decoder * self, GstH264Picture * picture)
} }
static void static void
gst_h264_decoder_drain_output_queue (GstH264Decoder * self, guint num) gst_h264_decoder_drain_output_queue (GstH264Decoder * self, guint num,
GstFlowReturn * ret)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self); GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
g_assert (klass->output_picture); g_assert (klass->output_picture);
g_assert (ret != NULL);
while (gst_queue_array_get_length (priv->output_queue) > num) { while (gst_queue_array_get_length (priv->output_queue) > num) {
GstH264DecoderOutputFrame *output_frame = (GstH264DecoderOutputFrame *) GstH264DecoderOutputFrame *output_frame = (GstH264DecoderOutputFrame *)
gst_queue_array_pop_head_struct (priv->output_queue); gst_queue_array_pop_head_struct (priv->output_queue);
priv->last_ret = GstFlowReturn flow_ret = klass->output_picture (self, output_frame->frame,
klass->output_picture (self, output_frame->frame,
output_frame->picture); output_frame->picture);
UPDATE_FLOW_RETURN (ret, flow_ret);
} }
} }
static void static void
gst_h264_decoder_do_output_picture (GstH264Decoder * self, gst_h264_decoder_do_output_picture (GstH264Decoder * self,
GstH264Picture * picture) GstH264Picture * picture, GstFlowReturn * ret)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstVideoCodecFrame *frame = NULL; GstVideoCodecFrame *frame = NULL;
GstH264DecoderOutputFrame output_frame; GstH264DecoderOutputFrame output_frame;
GstFlowReturn flow_ret = GST_FLOW_OK;
g_assert (ret != NULL);
GST_LOG_OBJECT (self, "Outputting picture %p (frame_num %d, poc %d)", GST_LOG_OBJECT (self, "Outputting picture %p (frame_num %d, poc %d)",
picture, picture->frame_num, picture->pic_order_cnt); picture, picture->frame_num, picture->pic_order_cnt);
@ -1784,7 +1816,8 @@ gst_h264_decoder_do_output_picture (GstH264Decoder * self,
GST_ERROR_OBJECT (self, GST_ERROR_OBJECT (self,
"No available codec frame with frame number %d", "No available codec frame with frame number %d",
picture->system_frame_number); picture->system_frame_number);
priv->last_ret = GST_FLOW_ERROR; UPDATE_FLOW_RETURN (ret, GST_FLOW_ERROR);
gst_h264_picture_unref (picture); gst_h264_picture_unref (picture);
return; return;
@ -1795,23 +1828,27 @@ gst_h264_decoder_do_output_picture (GstH264Decoder * self,
output_frame.self = self; output_frame.self = self;
gst_queue_array_push_tail_struct (priv->output_queue, &output_frame); gst_queue_array_push_tail_struct (priv->output_queue, &output_frame);
gst_h264_decoder_drain_output_queue (self, priv->preferred_output_delay); gst_h264_decoder_drain_output_queue (self, priv->preferred_output_delay,
&flow_ret);
UPDATE_FLOW_RETURN (ret, flow_ret);
} }
static gboolean static void
gst_h264_decoder_finish_current_picture (GstH264Decoder * self) gst_h264_decoder_finish_current_picture (GstH264Decoder * self,
GstFlowReturn * ret)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstH264DecoderClass *klass; GstH264DecoderClass *klass;
gboolean ret = TRUE; GstFlowReturn flow_ret = GST_FLOW_OK;
if (!priv->current_picture) if (!priv->current_picture)
return TRUE; return;
klass = GST_H264_DECODER_GET_CLASS (self); klass = GST_H264_DECODER_GET_CLASS (self);
if (klass->end_picture) { if (klass->end_picture) {
if (!klass->end_picture (self, priv->current_picture)) { flow_ret = klass->end_picture (self, priv->current_picture);
if (flow_ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"end picture failed, marking picture %p non-existing " "end picture failed, marking picture %p non-existing "
"(frame_num %d, poc %d)", priv->current_picture, "(frame_num %d, poc %d)", priv->current_picture,
@ -1829,15 +1866,10 @@ gst_h264_decoder_finish_current_picture (GstH264Decoder * self)
gst_h264_decoder_clear_ref_pic_lists (self); gst_h264_decoder_clear_ref_pic_lists (self);
/* finish picture takes ownership of the picture */ /* finish picture takes ownership of the picture */
ret = gst_h264_decoder_finish_picture (self, priv->current_picture); gst_h264_decoder_finish_picture (self, priv->current_picture, &flow_ret);
priv->current_picture = NULL; priv->current_picture = NULL;
if (!ret) { UPDATE_FLOW_RETURN (ret, flow_ret);
GST_ERROR_OBJECT (self, "Failed to finish picture");
return FALSE;
}
return TRUE;
} }
static gint static gint
@ -1852,23 +1884,24 @@ poc_desc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
return (*b)->pic_order_cnt - (*a)->pic_order_cnt; return (*b)->pic_order_cnt - (*a)->pic_order_cnt;
} }
static gboolean static GstFlowReturn
gst_h264_decoder_drain_internal (GstH264Decoder * self) gst_h264_decoder_drain_internal (GstH264Decoder * self)
{ {
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
GstH264Picture *picture; GstH264Picture *picture;
GstFlowReturn ret = GST_FLOW_OK;
while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) { while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) {
gst_h264_decoder_do_output_picture (self, picture); gst_h264_decoder_do_output_picture (self, picture, &ret);
} }
gst_h264_decoder_drain_output_queue (self, 0); gst_h264_decoder_drain_output_queue (self, 0, &ret);
gst_h264_picture_clear (&priv->last_field); gst_h264_picture_clear (&priv->last_field);
gst_h264_dpb_clear (priv->dpb); gst_h264_dpb_clear (priv->dpb);
priv->last_output_poc = G_MININT32; priv->last_output_poc = G_MININT32;
return TRUE; return ret;
} }
static gboolean static gboolean
@ -2052,13 +2085,12 @@ get_bump_level (GstH264Decoder * self)
return GST_H264_DPB_BUMP_NORMAL_LATENCY; return GST_H264_DPB_BUMP_NORMAL_LATENCY;
} }
static gboolean static void
gst_h264_decoder_finish_picture (GstH264Decoder * self, gst_h264_decoder_finish_picture (GstH264Decoder * self,
GstH264Picture * picture) GstH264Picture * picture, GstFlowReturn * ret)
{ {
GstVideoDecoder *decoder = GST_VIDEO_DECODER (self); GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
GstH264DecoderPrivate *priv = self->priv; GstH264DecoderPrivate *priv = self->priv;
gboolean ret = TRUE;
GstH264DpbBumpMode bump_level = get_bump_level (self); GstH264DpbBumpMode bump_level = get_bump_level (self);
/* Finish processing the picture. /* Finish processing the picture.
@ -2095,11 +2127,15 @@ gst_h264_decoder_finish_picture (GstH264Decoder * self,
/* C.4.4 */ /* C.4.4 */
if (picture->mem_mgmt_5) { if (picture->mem_mgmt_5) {
GstFlowReturn drain_ret;
GST_TRACE_OBJECT (self, "Memory management type 5, drain the DPB"); GST_TRACE_OBJECT (self, "Memory management type 5, drain the DPB");
gst_h264_decoder_drain_internal (self);
drain_ret = gst_h264_decoder_drain_internal (self);
UPDATE_FLOW_RETURN (ret, drain_ret);
} }
_bump_dpb (self, bump_level, picture); _bump_dpb (self, bump_level, picture, ret);
/* Add a ref to avoid the case of directly outputed and destroyed. */ /* Add a ref to avoid the case of directly outputed and destroyed. */
gst_h264_picture_ref (picture); gst_h264_picture_ref (picture);
@ -2139,7 +2175,7 @@ gst_h264_decoder_finish_picture (GstH264Decoder * self,
add_picture_to_dpb (self, picture); add_picture_to_dpb (self, picture);
} }
} else { } else {
ret = output_picture_directly (self, picture); output_picture_directly (self, picture, ret);
} }
GST_LOG_OBJECT (self, GST_LOG_OBJECT (self,
@ -2152,9 +2188,7 @@ gst_h264_decoder_finish_picture (GstH264Decoder * self,
/* For the live mode, we try to bump here to avoid waiting /* For the live mode, we try to bump here to avoid waiting
for another decoding circle. */ for another decoding circle. */
if (priv->is_live && priv->compliance != GST_H264_DECODER_COMPLIANCE_STRICT) if (priv->is_live && priv->compliance != GST_H264_DECODER_COMPLIANCE_STRICT)
_bump_dpb (self, bump_level, NULL); _bump_dpb (self, bump_level, NULL, ret);
return ret;
} }
static gboolean static gboolean
@ -2337,7 +2371,7 @@ gst_h264_decoder_set_latency (GstH264Decoder * self, const GstH264SPS * sps,
gst_video_decoder_set_latency (GST_VIDEO_DECODER (self), min, max); gst_video_decoder_set_latency (GST_VIDEO_DECODER (self), min, max);
} }
static gboolean static GstFlowReturn
gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps) gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
{ {
GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self); GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
@ -2350,12 +2384,13 @@ gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
gint prev_max_dpb_size; gint prev_max_dpb_size;
gboolean prev_interlaced; gboolean prev_interlaced;
gboolean interlaced; gboolean interlaced;
GstFlowReturn ret = GST_FLOW_OK;
if (sps->frame_mbs_only_flag == 0) { if (sps->frame_mbs_only_flag == 0) {
if (!klass->new_field_picture) { if (!klass->new_field_picture) {
GST_FIXME_OBJECT (self, GST_FIXME_OBJECT (self,
"frame_mbs_only_flag != 1 not supported by subclass"); "frame_mbs_only_flag != 1 not supported by subclass");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
if (sps->mb_adaptive_frame_field_flag) { if (sps->mb_adaptive_frame_field_flag) {
@ -2381,7 +2416,7 @@ gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
max_dpb_mbs = h264_level_to_max_dpb_mbs ((GstH264DecoderLevel) level); max_dpb_mbs = h264_level_to_max_dpb_mbs ((GstH264DecoderLevel) level);
if (!max_dpb_mbs) if (!max_dpb_mbs)
return FALSE; return GST_FLOW_ERROR;
width_mb = sps->width / 16; width_mb = sps->width / 16;
height_mb = sps->height / 16; height_mb = sps->height / 16;
@ -2407,7 +2442,7 @@ gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
} }
/* Safety, so that subclass don't need bound checking */ /* Safety, so that subclass don't need bound checking */
g_return_val_if_fail (max_dpb_size <= GST_H264_DPB_MAX_SIZE, FALSE); g_return_val_if_fail (max_dpb_size <= GST_H264_DPB_MAX_SIZE, GST_FLOW_ERROR);
prev_max_dpb_size = gst_h264_dpb_get_max_num_frames (priv->dpb); prev_max_dpb_size = gst_h264_dpb_get_max_num_frames (priv->dpb);
prev_interlaced = gst_h264_dpb_get_interlaced (priv->dpb); prev_interlaced = gst_h264_dpb_get_interlaced (priv->dpb);
@ -2421,8 +2456,9 @@ gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
priv->width, priv->height, sps->width, sps->height, priv->width, priv->height, sps->width, sps->height,
prev_max_dpb_size, max_dpb_size, prev_interlaced, interlaced); prev_max_dpb_size, max_dpb_size, prev_interlaced, interlaced);
if (gst_h264_decoder_drain (GST_VIDEO_DECODER (self)) != GST_FLOW_OK) ret = gst_h264_decoder_drain (GST_VIDEO_DECODER (self));
return FALSE; if (ret != GST_FLOW_OK)
return ret;
g_assert (klass->new_sequence); g_assert (klass->new_sequence);
@ -2433,10 +2469,11 @@ gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
priv->preferred_output_delay = 0; priv->preferred_output_delay = 0;
} }
if (!klass->new_sequence (self, sps, ret = klass->new_sequence (self,
max_dpb_size + priv->preferred_output_delay)) { sps, max_dpb_size + priv->preferred_output_delay);
GST_ERROR_OBJECT (self, "subclass does not want accept new sequence"); if (ret != GST_FLOW_OK) {
return FALSE; GST_WARNING_OBJECT (self, "subclass does not want accept new sequence");
return ret;
} }
priv->profile_idc = sps->profile_idc; priv->profile_idc = sps->profile_idc;
@ -2448,7 +2485,10 @@ gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
gst_h264_dpb_set_interlaced (priv->dpb, interlaced); gst_h264_dpb_set_interlaced (priv->dpb, interlaced);
} }
return gst_h264_decoder_update_max_num_reorder_frames (self, sps); if (!gst_h264_decoder_update_max_num_reorder_frames (self, sps))
return GST_FLOW_ERROR;
return GST_FLOW_OK;
} }
static gboolean static gboolean

View File

@ -120,7 +120,7 @@ struct _GstH264DecoderClass
* *
* Notifies subclass of SPS update * Notifies subclass of SPS update
*/ */
gboolean (*new_sequence) (GstH264Decoder * decoder, GstFlowReturn (*new_sequence) (GstH264Decoder * decoder,
const GstH264SPS * sps, const GstH264SPS * sps,
gint max_dpb_size); gint max_dpb_size);
@ -134,7 +134,7 @@ struct _GstH264DecoderClass
* Subclass can set implementation specific user data * Subclass can set implementation specific user data
* on the #GstH264Picture via gst_h264_picture_set_user_data() * on the #GstH264Picture via gst_h264_picture_set_user_data()
*/ */
gboolean (*new_picture) (GstH264Decoder * decoder, GstFlowReturn (*new_picture) (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstVideoCodecFrame * frame,
GstH264Picture * picture); GstH264Picture * picture);
@ -150,7 +150,7 @@ struct _GstH264DecoderClass
* *
* Since: 1.20 * Since: 1.20
*/ */
gboolean (*new_field_picture) (GstH264Decoder * decoder, GstFlowReturn (*new_field_picture) (GstH264Decoder * decoder,
const GstH264Picture * first_field, const GstH264Picture * first_field,
GstH264Picture * second_field); GstH264Picture * second_field);
@ -164,7 +164,7 @@ struct _GstH264DecoderClass
* Optional. Called per one #GstH264Picture to notify subclass to prepare * Optional. Called per one #GstH264Picture to notify subclass to prepare
* decoding process for the #GstH264Picture * decoding process for the #GstH264Picture
*/ */
gboolean (*start_picture) (GstH264Decoder * decoder, GstFlowReturn (*start_picture) (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Picture * picture,
GstH264Slice * slice, GstH264Slice * slice,
GstH264Dpb * dpb); GstH264Dpb * dpb);
@ -189,7 +189,7 @@ struct _GstH264DecoderClass
* need to retrive the other field (i.e., the second field) of the picture * need to retrive the other field (i.e., the second field) of the picture
* if needed. * if needed.
*/ */
gboolean (*decode_slice) (GstH264Decoder * decoder, GstFlowReturn (*decode_slice) (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Picture * picture,
GstH264Slice * slice, GstH264Slice * slice,
GArray * ref_pic_list0, GArray * ref_pic_list0,
@ -203,7 +203,7 @@ struct _GstH264DecoderClass
* Optional. Called per one #GstH264Picture to notify subclass to finish * Optional. Called per one #GstH264Picture to notify subclass to finish
* decoding process for the #GstH264Picture * decoding process for the #GstH264Picture
*/ */
gboolean (*end_picture) (GstH264Decoder * decoder, GstFlowReturn (*end_picture) (GstH264Decoder * decoder,
GstH264Picture * picture); GstH264Picture * picture);
/** /**

View File

@ -142,18 +142,18 @@ static gboolean gst_d3d11_h264_dec_sink_event (GstVideoDecoder * decoder,
GstEvent * event); GstEvent * event);
/* GstH264Decoder */ /* GstH264Decoder */
static gboolean gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder, static GstFlowReturn gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
const GstH264SPS * sps, gint max_dpb_size); const GstH264SPS * sps, gint max_dpb_size);
static gboolean gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder, static GstFlowReturn gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture); GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * static GstFlowReturn gst_d3d11_h264_dec_new_field_picture (GstH264Decoder *
decoder, const GstH264Picture * first_field, GstH264Picture * second_field); decoder, const GstH264Picture * first_field, GstH264Picture * second_field);
static gboolean gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder, static GstFlowReturn gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb); GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
static gboolean gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder, static GstFlowReturn gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0, GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1); GArray * ref_pic_list1);
static gboolean gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder, static GstFlowReturn gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture); GstH264Picture * picture);
static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder * static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
decoder, GstVideoCodecFrame * frame, GstH264Picture * picture); decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
@ -192,14 +192,14 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass, gpointer data)
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_picture); GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_picture);
h264decoder_class->new_field_picture = h264decoder_class->new_field_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_field_picture); GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_new_field_picture);
h264decoder_class->output_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_output_picture);
h264decoder_class->start_picture = h264decoder_class->start_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_start_picture); GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_start_picture);
h264decoder_class->decode_slice = h264decoder_class->decode_slice =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_decode_slice); GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_decode_slice);
h264decoder_class->end_picture = h264decoder_class->end_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_end_picture); GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_end_picture);
h264decoder_class->output_picture =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_output_picture);
} }
static void static void
@ -368,7 +368,7 @@ gst_d3d11_h264_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event); return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
} }
static gboolean static GstFlowReturn
gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder, gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
const GstH264SPS * sps, gint max_dpb_size) const GstH264SPS * sps, gint max_dpb_size)
{ {
@ -439,7 +439,7 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) { if (inner->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format"); GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
gst_video_info_set_format (&info, gst_video_info_set_format (&info,
@ -459,19 +459,19 @@ gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
/* Additional 4 views margin for zero-copy rendering */ /* Additional 4 views margin for zero-copy rendering */
max_dpb_size + 4)) { max_dpb_size + 4)) {
GST_ERROR_OBJECT (self, "Failed to create decoder"); GST_ERROR_OBJECT (self, "Failed to create decoder");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) { if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
GST_ERROR_OBJECT (self, "Failed to negotiate with downstream"); GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
} }
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder, gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture) GstVideoCodecFrame * frame, GstH264Picture * picture)
{ {
@ -483,7 +483,7 @@ gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GST_VIDEO_DECODER (decoder)); GST_VIDEO_DECODER (decoder));
if (!view_buffer) { if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer"); GST_DEBUG_OBJECT (self, "No available output view buffer");
return FALSE; return GST_FLOW_FLUSHING;
} }
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer); GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
@ -493,10 +493,10 @@ gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GST_LOG_OBJECT (self, "New h264picture %p", picture); GST_LOG_OBJECT (self, "New h264picture %p", picture);
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder, gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
const GstH264Picture * first_field, GstH264Picture * second_field) const GstH264Picture * first_field, GstH264Picture * second_field)
{ {
@ -508,7 +508,7 @@ gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
if (!view_buffer) { if (!view_buffer) {
GST_WARNING_OBJECT (self, "First picture does not have output view buffer"); GST_WARNING_OBJECT (self, "First picture does not have output view buffer");
return TRUE; return GST_FLOW_OK;
} }
GST_LOG_OBJECT (self, "New field picture with buffer %" GST_PTR_FORMAT, GST_LOG_OBJECT (self, "New field picture with buffer %" GST_PTR_FORMAT,
@ -517,7 +517,7 @@ gst_d3d11_h264_dec_new_field_picture (GstH264Decoder * decoder,
gst_h264_picture_set_user_data (second_field, gst_h264_picture_set_user_data (second_field,
gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref); gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
return TRUE; return GST_FLOW_OK;
} }
static ID3D11VideoDecoderOutputView * static ID3D11VideoDecoderOutputView *
@ -652,7 +652,7 @@ init_pic_params (DXVA_PicParams_H264 * params)
params->RefFrameList[i].bPicEntry = 0xff; params->RefFrameList[i].bPicEntry = 0xff;
} }
static gboolean static GstFlowReturn
gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder, gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb) GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
{ {
@ -672,7 +672,7 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
&view_id); &view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return GST_FLOW_ERROR;
} }
init_pic_params (pic_params); init_pic_params (pic_params);
@ -771,10 +771,10 @@ gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
inner->slice_list.resize (0); inner->slice_list.resize (0);
inner->bitstream_buffer.resize (0); inner->bitstream_buffer.resize (0);
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder, gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0, GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1) GArray * ref_pic_list1)
@ -802,10 +802,10 @@ gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
memcpy (&inner->bitstream_buffer[0] + pos + start_code_size, memcpy (&inner->bitstream_buffer[0] + pos + start_code_size,
slice->nalu.data + slice->nalu.offset, slice->nalu.size); slice->nalu.data + slice->nalu.offset, slice->nalu.size);
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder, gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture) GstH264Picture * picture)
{ {
@ -822,14 +822,14 @@ gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) { if (inner->bitstream_buffer.empty () || inner->slice_list.empty ()) {
GST_ERROR_OBJECT (self, "No bitstream buffer to submit"); GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
return FALSE; return GST_FLOW_ERROR;
} }
view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture, view = gst_d3d11_h264_dec_get_output_view_from_picture (self, picture,
&view_id); &view_id);
if (!view) { if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle"); GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE; return GST_FLOW_ERROR;
} }
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs)); memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
@ -859,8 +859,10 @@ gst_d3d11_h264_dec_end_picture (GstH264Decoder * decoder,
input_args.inverse_quantization_matrix = &inner->iq_matrix; input_args.inverse_quantization_matrix = &inner->iq_matrix;
input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_H264); input_args.inverse_quantization_matrix_size = sizeof (DXVA_Qmatrix_H264);
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder, if (!gst_d3d11_decoder_decode_frame (inner->d3d11_decoder, view, &input_args))
view, &input_args); return GST_FLOW_ERROR;
return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn

View File

@ -137,20 +137,20 @@ static gboolean gst_nv_h264_dec_src_query (GstVideoDecoder * decoder,
GstQuery * query); GstQuery * query);
/* GstH264Decoder */ /* GstH264Decoder */
static gboolean gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder, static GstFlowReturn gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder,
const GstH264SPS * sps, gint max_dpb_size); const GstH264SPS * sps, gint max_dpb_size);
static gboolean gst_nv_h264_dec_new_picture (GstH264Decoder * decoder, static GstFlowReturn gst_nv_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture); GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_nv_h264_dec_new_field_picture (GstH264Decoder * static GstFlowReturn gst_nv_h264_dec_new_field_picture (GstH264Decoder *
decoder, const GstH264Picture * first_field, GstH264Picture * second_field); decoder, const GstH264Picture * first_field, GstH264Picture * second_field);
static GstFlowReturn gst_nv_h264_dec_output_picture (GstH264Decoder * static GstFlowReturn gst_nv_h264_dec_output_picture (GstH264Decoder *
decoder, GstVideoCodecFrame * frame, GstH264Picture * picture); decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_nv_h264_dec_start_picture (GstH264Decoder * decoder, static GstFlowReturn gst_nv_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb); GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
static gboolean gst_nv_h264_dec_decode_slice (GstH264Decoder * decoder, static GstFlowReturn gst_nv_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0, GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1); GArray * ref_pic_list1);
static gboolean gst_nv_h264_dec_end_picture (GstH264Decoder * decoder, static GstFlowReturn gst_nv_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture); GstH264Picture * picture);
static guint static guint
gst_nv_h264_dec_get_preferred_output_delay (GstH264Decoder * decoder, gst_nv_h264_dec_get_preferred_output_delay (GstH264Decoder * decoder,
@ -359,7 +359,7 @@ gst_nv_h264_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query); return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
} }
static gboolean static GstFlowReturn
gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps, gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps,
gint max_dpb_size) gint max_dpb_size)
{ {
@ -434,7 +434,7 @@ gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps,
if (out_format == GST_VIDEO_FORMAT_UNKNOWN) { if (out_format == GST_VIDEO_FORMAT_UNKNOWN) {
GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format"); GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
gst_video_info_set_format (&info, out_format, self->width, self->height); gst_video_info_set_format (&info, out_format, self->width, self->height);
@ -448,21 +448,21 @@ gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps,
/* Additional 4 buffers for render delay */ /* Additional 4 buffers for render delay */
max_dpb_size + 4)) { max_dpb_size + 4)) {
GST_ERROR_OBJECT (self, "Failed to configure decoder"); GST_ERROR_OBJECT (self, "Failed to configure decoder");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) { if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
GST_ERROR_OBJECT (self, "Failed to negotiate with downstream"); GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
memset (&self->params, 0, sizeof (CUVIDPICPARAMS)); memset (&self->params, 0, sizeof (CUVIDPICPARAMS));
} }
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_nv_h264_dec_new_picture (GstH264Decoder * decoder, gst_nv_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture) GstVideoCodecFrame * frame, GstH264Picture * picture)
{ {
@ -472,7 +472,7 @@ gst_nv_h264_dec_new_picture (GstH264Decoder * decoder,
nv_frame = gst_nv_decoder_new_frame (self->decoder); nv_frame = gst_nv_decoder_new_frame (self->decoder);
if (!nv_frame) { if (!nv_frame) {
GST_ERROR_OBJECT (self, "No available decoder frame"); GST_ERROR_OBJECT (self, "No available decoder frame");
return FALSE; return GST_FLOW_ERROR;
} }
GST_LOG_OBJECT (self, GST_LOG_OBJECT (self,
@ -481,10 +481,10 @@ gst_nv_h264_dec_new_picture (GstH264Decoder * decoder,
gst_h264_picture_set_user_data (picture, gst_h264_picture_set_user_data (picture,
nv_frame, (GDestroyNotify) gst_nv_decoder_frame_unref); nv_frame, (GDestroyNotify) gst_nv_decoder_frame_unref);
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_nv_h264_dec_new_field_picture (GstH264Decoder * decoder, gst_nv_h264_dec_new_field_picture (GstH264Decoder * decoder,
const GstH264Picture * first_field, GstH264Picture * second_field) const GstH264Picture * first_field, GstH264Picture * second_field)
{ {
@ -495,14 +495,14 @@ gst_nv_h264_dec_new_field_picture (GstH264Decoder * decoder,
if (!nv_frame) { if (!nv_frame) {
GST_ERROR_OBJECT (decoder, GST_ERROR_OBJECT (decoder,
"No decoder frame in the first picture %p", first_field); "No decoder frame in the first picture %p", first_field);
return FALSE; return GST_FLOW_ERROR;
} }
gst_h264_picture_set_user_data (second_field, gst_h264_picture_set_user_data (second_field,
gst_nv_decoder_frame_ref (nv_frame), gst_nv_decoder_frame_ref (nv_frame),
(GDestroyNotify) gst_nv_decoder_frame_unref); (GDestroyNotify) gst_nv_decoder_frame_unref);
return TRUE; return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn
@ -714,7 +714,7 @@ gst_nv_h264_dec_fill_dpb (GstNvH264Dec * self, GstH264Picture * ref,
} }
} }
static gboolean static GstFlowReturn
gst_nv_h264_dec_start_picture (GstH264Decoder * decoder, gst_nv_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb) GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
{ {
@ -736,7 +736,7 @@ gst_nv_h264_dec_start_picture (GstH264Decoder * decoder,
if (!frame) { if (!frame) {
GST_ERROR_OBJECT (self, GST_ERROR_OBJECT (self,
"Couldn't get decoder frame frame picture %p", picture); "Couldn't get decoder frame frame picture %p", picture);
return FALSE; return GST_FLOW_ERROR;
} }
gst_nv_h264_dec_reset_bitstream_params (self); gst_nv_h264_dec_reset_bitstream_params (self);
@ -806,10 +806,10 @@ gst_nv_h264_dec_start_picture (GstH264Decoder * decoder,
for (i = ref_frame_idx; i < 16; i++) for (i = ref_frame_idx; i < 16; i++)
h264_params->dpb[i].PicIdx = -1; h264_params->dpb[i].PicIdx = -1;
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_nv_h264_dec_decode_slice (GstH264Decoder * decoder, gst_nv_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0, GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1) GArray * ref_pic_list1)
@ -847,10 +847,10 @@ gst_nv_h264_dec_decode_slice (GstH264Decoder * decoder,
!GST_H264_IS_SI_SLICE (&slice->header)) !GST_H264_IS_SI_SLICE (&slice->header))
self->params.intra_pic_flag = 0; self->params.intra_pic_flag = 0;
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_nv_h264_dec_end_picture (GstH264Decoder * decoder, GstH264Picture * picture) gst_nv_h264_dec_end_picture (GstH264Decoder * decoder, GstH264Picture * picture)
{ {
GstNvH264Dec *self = GST_NV_H264_DEC (decoder); GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
@ -867,10 +867,12 @@ gst_nv_h264_dec_end_picture (GstH264Decoder * decoder, GstH264Picture * picture)
ret = gst_nv_decoder_decode_picture (self->decoder, &self->params); ret = gst_nv_decoder_decode_picture (self->decoder, &self->params);
if (!ret) if (!ret) {
GST_ERROR_OBJECT (self, "Failed to decode picture"); GST_ERROR_OBJECT (self, "Failed to decode picture");
return GST_FLOW_ERROR;
}
return ret; return GST_FLOW_OK;
} }
static guint static guint

View File

@ -808,7 +808,7 @@ gst_v4l2_codec_h264_dec_fill_references (GstV4l2CodecH264Dec * self,
} }
} }
static gboolean static GstFlowReturn
gst_v4l2_codec_h264_dec_new_sequence (GstH264Decoder * decoder, gst_v4l2_codec_h264_dec_new_sequence (GstH264Decoder * decoder,
const GstH264SPS * sps, gint max_dpb_size) const GstH264SPS * sps, gint max_dpb_size)
{ {
@ -873,7 +873,7 @@ gst_v4l2_codec_h264_dec_new_sequence (GstH264Decoder * decoder,
self->need_negotiation = TRUE; self->need_negotiation = TRUE;
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) { if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
GST_ERROR_OBJECT (self, "Failed to negotiate with downstream"); GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
} }
@ -898,7 +898,7 @@ gst_v4l2_codec_h264_dec_new_sequence (GstH264Decoder * decoder,
self->copy_frames = FALSE; self->copy_frames = FALSE;
} }
return TRUE; return GST_FLOW_OK;
} }
static gboolean static gboolean
@ -929,7 +929,7 @@ done:
return TRUE; return TRUE;
} }
static gboolean static GstFlowReturn
gst_v4l2_codec_h264_dec_start_picture (GstH264Decoder * decoder, gst_v4l2_codec_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb) GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
{ {
@ -937,10 +937,10 @@ gst_v4l2_codec_h264_dec_start_picture (GstH264Decoder * decoder,
/* FIXME base class should not call us if negotiation failed */ /* FIXME base class should not call us if negotiation failed */
if (!self->sink_allocator) if (!self->sink_allocator)
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
if (!gst_v4l2_codec_h264_dec_ensure_bitstream (self)) if (!gst_v4l2_codec_h264_dec_ensure_bitstream (self))
return FALSE; return GST_FLOW_ERROR;
/* /*
* Scaling matrix is present if there's one provided * Scaling matrix is present if there's one provided
@ -961,7 +961,7 @@ gst_v4l2_codec_h264_dec_start_picture (GstH264Decoder * decoder,
self->first_slice = TRUE; self->first_slice = TRUE;
return TRUE; return GST_FLOW_OK;
} }
static gboolean static gboolean
@ -1219,7 +1219,7 @@ done:
return ret; return ret;
} }
static gboolean static GstFlowReturn
gst_v4l2_codec_h264_dec_decode_slice (GstH264Decoder * decoder, gst_v4l2_codec_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0, GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1) GArray * ref_pic_list1)
@ -1236,7 +1236,7 @@ gst_v4l2_codec_h264_dec_decode_slice (GstH264Decoder * decoder,
if (!gst_v4l2_codec_h264_dec_submit_bitstream (self, picture, if (!gst_v4l2_codec_h264_dec_submit_bitstream (self, picture,
V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF) V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF)
|| !gst_v4l2_codec_h264_dec_ensure_bitstream (self)) || !gst_v4l2_codec_h264_dec_ensure_bitstream (self))
return FALSE; return GST_FLOW_ERROR;
} }
gst_v4l2_codec_h264_dec_fill_slice_params (self, slice); gst_v4l2_codec_h264_dec_fill_slice_params (self, slice);
@ -1254,7 +1254,7 @@ gst_v4l2_codec_h264_dec_decode_slice (GstH264Decoder * decoder,
if (self->bitstream_map.size + nal_size > self->bitstream_map.maxsize) { if (self->bitstream_map.size + nal_size > self->bitstream_map.maxsize) {
GST_ELEMENT_ERROR (decoder, RESOURCE, NO_SPACE_LEFT, GST_ELEMENT_ERROR (decoder, RESOURCE, NO_SPACE_LEFT,
("Not enough space to send all slice of an H264 frame."), (NULL)); ("Not enough space to send all slice of an H264 frame."), (NULL));
return FALSE; return GST_FLOW_ERROR;
} }
if (needs_start_codes (self)) { if (needs_start_codes (self)) {
@ -1267,10 +1267,10 @@ gst_v4l2_codec_h264_dec_decode_slice (GstH264Decoder * decoder,
slice->nalu.size); slice->nalu.size);
self->bitstream_map.size += nal_size; self->bitstream_map.size += nal_size;
return TRUE; return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_v4l2_codec_h264_dec_end_picture (GstH264Decoder * decoder, gst_v4l2_codec_h264_dec_end_picture (GstH264Decoder * decoder,
GstH264Picture * picture) GstH264Picture * picture)
{ {
@ -1281,10 +1281,13 @@ gst_v4l2_codec_h264_dec_end_picture (GstH264Decoder * decoder,
if (picture->field != GST_H264_PICTURE_FIELD_FRAME && !picture->second_field) if (picture->field != GST_H264_PICTURE_FIELD_FRAME && !picture->second_field)
flags = V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF; flags = V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF;
return gst_v4l2_codec_h264_dec_submit_bitstream (self, picture, flags); if (!gst_v4l2_codec_h264_dec_submit_bitstream (self, picture, flags))
return GST_FLOW_ERROR;
return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_v4l2_codec_h264_dec_new_field_picture (GstH264Decoder * decoder, gst_v4l2_codec_h264_dec_new_field_picture (GstH264Decoder * decoder,
const GstH264Picture * first_field, GstH264Picture * second_field) const GstH264Picture * first_field, GstH264Picture * second_field)
{ {
@ -1295,7 +1298,7 @@ gst_v4l2_codec_h264_dec_new_field_picture (GstH264Decoder * decoder,
if (!request) { if (!request) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"First picture does not have an associated request"); "First picture does not have an associated request");
return TRUE; return GST_FLOW_OK;
} }
GST_DEBUG_OBJECT (self, "Assigned request %p to second field.", request); GST_DEBUG_OBJECT (self, "Assigned request %p to second field.", request);
@ -1305,7 +1308,7 @@ gst_v4l2_codec_h264_dec_new_field_picture (GstH264Decoder * decoder,
gst_h264_picture_set_user_data (second_field, gst_v4l2_request_ref (request), gst_h264_picture_set_user_data (second_field, gst_v4l2_request_ref (request),
(GDestroyNotify) gst_v4l2_request_unref); (GDestroyNotify) gst_v4l2_request_unref);
return TRUE; return GST_FLOW_OK;
} }
static guint static guint

View File

@ -97,7 +97,7 @@ static const gchar *src_caps_str =
static const gchar *sink_caps_str = "video/x-h264"; static const gchar *sink_caps_str = "video/x-h264";
static gboolean static GstFlowReturn
gst_va_h264_dec_end_picture (GstH264Decoder * decoder, GstH264Picture * picture) gst_va_h264_dec_end_picture (GstH264Decoder * decoder, GstH264Picture * picture)
{ {
GstVaBaseDec *base = GST_VA_BASE_DEC (decoder); GstVaBaseDec *base = GST_VA_BASE_DEC (decoder);
@ -108,7 +108,10 @@ gst_va_h264_dec_end_picture (GstH264Decoder * decoder, GstH264Picture * picture)
va_pic = gst_h264_picture_get_user_data (picture); va_pic = gst_h264_picture_get_user_data (picture);
return gst_va_decoder_decode (base->decoder, va_pic); if (!gst_va_decoder_decode (base->decoder, va_pic))
return GST_FLOW_ERROR;
return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn
@ -319,7 +322,7 @@ _get_slice_data_bit_offset (GstH264SliceHdr * header, guint nal_header_bytes)
return 8 * nal_header_bytes + header->header_size - epb_count * 8; return 8 * nal_header_bytes + header->header_size - epb_count * 8;
} }
static gboolean static GstFlowReturn
gst_va_h264_dec_decode_slice (GstH264Decoder * decoder, gst_va_h264_dec_decode_slice (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0, GstH264Picture * picture, GstH264Slice * slice, GArray * ref_pic_list0,
GArray * ref_pic_list1) GArray * ref_pic_list1)
@ -357,12 +360,16 @@ gst_va_h264_dec_decode_slice (GstH264Decoder * decoder,
va_pic = gst_h264_picture_get_user_data (picture); va_pic = gst_h264_picture_get_user_data (picture);
return gst_va_decoder_add_slice_buffer (base->decoder, va_pic, &slice_param, if (!gst_va_decoder_add_slice_buffer (base->decoder, va_pic, &slice_param,
sizeof (slice_param), slice->nalu.data + slice->nalu.offset, sizeof (slice_param), slice->nalu.data + slice->nalu.offset,
slice->nalu.size); slice->nalu.size)) {
return GST_FLOW_ERROR;
} }
static gboolean return GST_FLOW_OK;
}
static GstFlowReturn
gst_va_h264_dec_start_picture (GstH264Decoder * decoder, gst_va_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb) GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb)
{ {
@ -454,7 +461,7 @@ gst_va_h264_dec_start_picture (GstH264Decoder * decoder,
if (!gst_va_decoder_add_param_buffer (base->decoder, va_pic, if (!gst_va_decoder_add_param_buffer (base->decoder, va_pic,
VAPictureParameterBufferType, &pic_param, sizeof (pic_param))) VAPictureParameterBufferType, &pic_param, sizeof (pic_param)))
return FALSE; return GST_FLOW_ERROR;
/* there are always 6 4x4 scaling lists */ /* there are always 6 4x4 scaling lists */
for (i = 0; i < 6; i++) { for (i = 0; i < 6; i++) {
@ -471,11 +478,14 @@ gst_va_h264_dec_start_picture (GstH264Decoder * decoder,
[i], pps->scaling_lists_8x8[i]); [i], pps->scaling_lists_8x8[i]);
} }
return gst_va_decoder_add_param_buffer (base->decoder, va_pic, if (!gst_va_decoder_add_param_buffer (base->decoder, va_pic,
VAIQMatrixBufferType, &iq_matrix, sizeof (iq_matrix)); VAIQMatrixBufferType, &iq_matrix, sizeof (iq_matrix)))
return GST_FLOW_ERROR;
return GST_FLOW_OK;
} }
static gboolean static GstFlowReturn
gst_va_h264_dec_new_picture (GstH264Decoder * decoder, gst_va_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture) GstVideoCodecFrame * frame, GstH264Picture * picture)
{ {
@ -496,18 +506,18 @@ gst_va_h264_dec_new_picture (GstH264Decoder * decoder,
GST_LOG_OBJECT (self, "New va decode picture %p - %#x", pic, GST_LOG_OBJECT (self, "New va decode picture %p - %#x", pic,
gst_va_decode_picture_get_surface (pic)); gst_va_decode_picture_get_surface (pic));
return TRUE; return GST_FLOW_OK;
error: error:
{ {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"Failed to allocated output buffer, return %s", "Failed to allocated output buffer, return %s",
gst_flow_get_name (self->last_ret)); gst_flow_get_name (self->last_ret));
return FALSE; return self->last_ret;
} }
} }
static gboolean static GstFlowReturn
gst_va_h264_dec_new_field_picture (GstH264Decoder * decoder, gst_va_h264_dec_new_field_picture (GstH264Decoder * decoder,
const GstH264Picture * first_field, GstH264Picture * second_field) const GstH264Picture * first_field, GstH264Picture * second_field)
{ {
@ -517,7 +527,7 @@ gst_va_h264_dec_new_field_picture (GstH264Decoder * decoder,
first_pic = gst_h264_picture_get_user_data ((GstH264Picture *) first_field); first_pic = gst_h264_picture_get_user_data ((GstH264Picture *) first_field);
if (!first_pic) if (!first_pic)
return FALSE; return GST_FLOW_ERROR;
second_pic = gst_va_decode_picture_new (base->decoder, first_pic->gstbuffer); second_pic = gst_va_decode_picture_new (base->decoder, first_pic->gstbuffer);
gst_h264_picture_set_user_data (second_field, second_pic, gst_h264_picture_set_user_data (second_field, second_pic,
@ -526,7 +536,7 @@ gst_va_h264_dec_new_field_picture (GstH264Decoder * decoder,
GST_LOG_OBJECT (self, "New va decode picture %p - %#x", second_pic, GST_LOG_OBJECT (self, "New va decode picture %p - %#x", second_pic,
gst_va_decode_picture_get_surface (second_pic)); gst_va_decode_picture_get_surface (second_pic));
return TRUE; return GST_FLOW_OK;
} }
static inline guint static inline guint
@ -643,7 +653,7 @@ _get_profile (GstVaH264Dec * self, const GstH264SPS * sps, gint max_dpb_size)
return VAProfileNone; return VAProfileNone;
} }
static gboolean static GstFlowReturn
gst_va_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps, gst_va_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps,
gint max_dpb_size) gint max_dpb_size)
{ {
@ -675,12 +685,12 @@ gst_va_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps,
profile = _get_profile (self, sps, max_dpb_size); profile = _get_profile (self, sps, max_dpb_size);
if (profile == VAProfileNone) if (profile == VAProfileNone)
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
rt_format = _get_rtformat (self, sps->bit_depth_luma_minus8 + 8, rt_format = _get_rtformat (self, sps->bit_depth_luma_minus8 + 8,
sps->chroma_format_idc); sps->chroma_format_idc);
if (rt_format == 0) if (rt_format == 0)
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
if (!gst_va_decoder_config_is_equal (base->decoder, profile, if (!gst_va_decoder_config_is_equal (base->decoder, profile,
rt_format, sps->width, sps->height)) { rt_format, sps->width, sps->height)) {
@ -739,11 +749,11 @@ gst_va_h264_dec_new_sequence (GstH264Decoder * decoder, const GstH264SPS * sps,
self->need_negotiation = TRUE; self->need_negotiation = TRUE;
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) { if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
GST_ERROR_OBJECT (self, "Failed to negotiate with downstream"); GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
return FALSE; return GST_FLOW_NOT_NEGOTIATED;
} }
} }
return TRUE; return GST_FLOW_OK;
} }
static GstCaps * static GstCaps *