From 0490cb89c67ef173ce3d68f14fbedb87e2788181 Mon Sep 17 00:00:00 2001 From: Carl-Anton Ingmarsson Date: Wed, 9 Jun 2010 15:43:43 +0200 Subject: [PATCH] vdpau: base vdpaumpegdec on GstBaseVideoDecoder --- configure.ac | 2 + sys/vdpau/Makefile.am | 60 +- sys/vdpau/basevideodecoder/Makefile.am | 15 + .../basevideodecoder/gstbasevideodecoder.c | 1433 +++++++++++++++++ .../basevideodecoder/gstbasevideodecoder.h | 206 +++ .../basevideodecoder/gstbasevideoutils.h | 55 + sys/vdpau/basevideodecoder/gstvideoframe.c | 105 ++ sys/vdpau/basevideodecoder/gstvideoframe.h | 155 ++ sys/vdpau/gstvdp/Makefile.am | 28 + sys/vdpau/{ => gstvdp}/gstvdp.c | 0 sys/vdpau/{ => gstvdp}/gstvdp.h | 0 sys/vdpau/{ => gstvdp}/gstvdpdevice.c | 0 sys/vdpau/{ => gstvdp}/gstvdpdevice.h | 0 sys/vdpau/{ => gstvdp}/gstvdpoutputbuffer.c | 0 sys/vdpau/{ => gstvdp}/gstvdpoutputbuffer.h | 0 sys/vdpau/{ => gstvdp}/gstvdpoutputsrcpad.c | 0 sys/vdpau/{ => gstvdp}/gstvdpoutputsrcpad.h | 0 sys/vdpau/{ => gstvdp}/gstvdputils.c | 0 sys/vdpau/{ => gstvdp}/gstvdputils.h | 0 sys/vdpau/{ => gstvdp}/gstvdpvideobuffer.c | 0 sys/vdpau/{ => gstvdp}/gstvdpvideobuffer.h | 0 sys/vdpau/{ => gstvdp}/gstvdpvideosrcpad.c | 0 sys/vdpau/{ => gstvdp}/gstvdpvideosrcpad.h | 0 sys/vdpau/gstvdpau.c | 7 +- sys/vdpau/gstvdpmpegdec.c | 1161 ------------- sys/vdpau/gstvdpsink.c | 2 +- sys/vdpau/gstvdpsink.h | 2 +- sys/vdpau/gstvdpvideopostprocess.c | 6 +- sys/vdpau/gstvdpvideopostprocess.h | 4 +- sys/vdpau/mpeg/gstvdpmpegdec.c | 872 ++++++++++ sys/vdpau/{ => mpeg}/gstvdpmpegdec.h | 45 +- sys/vdpau/mpeg/gstvdpmpegframe.c | 133 ++ sys/vdpau/mpeg/gstvdpmpegframe.h | 77 + sys/vdpau/{ => mpeg}/mpegutil.c | 0 sys/vdpau/{ => mpeg}/mpegutil.h | 0 35 files changed, 3125 insertions(+), 1243 deletions(-) create mode 100644 sys/vdpau/basevideodecoder/Makefile.am create mode 100644 sys/vdpau/basevideodecoder/gstbasevideodecoder.c create mode 100644 sys/vdpau/basevideodecoder/gstbasevideodecoder.h create mode 100644 sys/vdpau/basevideodecoder/gstbasevideoutils.h create mode 100644 sys/vdpau/basevideodecoder/gstvideoframe.c create mode 100644 sys/vdpau/basevideodecoder/gstvideoframe.h create mode 100644 sys/vdpau/gstvdp/Makefile.am rename sys/vdpau/{ => gstvdp}/gstvdp.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdp.h (100%) rename sys/vdpau/{ => gstvdp}/gstvdpdevice.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdpdevice.h (100%) rename sys/vdpau/{ => gstvdp}/gstvdpoutputbuffer.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdpoutputbuffer.h (100%) rename sys/vdpau/{ => gstvdp}/gstvdpoutputsrcpad.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdpoutputsrcpad.h (100%) rename sys/vdpau/{ => gstvdp}/gstvdputils.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdputils.h (100%) rename sys/vdpau/{ => gstvdp}/gstvdpvideobuffer.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdpvideobuffer.h (100%) rename sys/vdpau/{ => gstvdp}/gstvdpvideosrcpad.c (100%) rename sys/vdpau/{ => gstvdp}/gstvdpvideosrcpad.h (100%) delete mode 100644 sys/vdpau/gstvdpmpegdec.c create mode 100644 sys/vdpau/mpeg/gstvdpmpegdec.c rename sys/vdpau/{ => mpeg}/gstvdpmpegdec.h (73%) create mode 100644 sys/vdpau/mpeg/gstvdpmpegframe.c create mode 100644 sys/vdpau/mpeg/gstvdpmpegframe.h rename sys/vdpau/{ => mpeg}/mpegutil.c (100%) rename sys/vdpau/{ => mpeg}/mpegutil.h (100%) diff --git a/configure.ac b/configure.ac index 24fe8d2f19..f6ae465f16 100644 --- a/configure.ac +++ b/configure.ac @@ -1728,6 +1728,8 @@ sys/qtwrapper/Makefile sys/shm/Makefile sys/vcd/Makefile sys/vdpau/Makefile +sys/vdpau/gstvdp/Makefile +sys/vdpau/basevideodecoder/Makefile sys/wasapi/Makefile sys/wininet/Makefile sys/winks/Makefile diff --git a/sys/vdpau/Makefile.am b/sys/vdpau/Makefile.am index a96f1c3af9..10e552c2f0 100644 --- a/sys/vdpau/Makefile.am +++ b/sys/vdpau/Makefile.am @@ -1,53 +1,35 @@ +SUBDIRS = basevideodecoder gstvdp + plugin_LTLIBRARIES = libgstvdpau.la libgstvdpau_la_SOURCES = \ - gstvdpmpegdec.c \ - mpegutil.c \ gstvdpau.c \ gstvdpvideopostprocess.c \ - gstvdpsink.c + gstvdpsink.c \ + mpeg/gstvdpmpegframe.c \ + mpeg/mpegutil.c \ + mpeg/gstvdpmpegdec.c \ + h264/gstnalreader.c \ + h264/gsth264parser.c \ + h264/gstvdph264dec.c libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS) + libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \ $(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \ - -lgstinterfaces-$(GST_MAJORMINOR) $(VDPAU_LIBS) libgstvdp-@GST_MAJORMINOR@.la + -lgstinterfaces-$(GST_MAJORMINOR) $(VDPAU_LIBS) \ + basevideodecoder/libgstbasevideodecoder.la \ + gstvdp/libgstvdp-@GST_MAJORMINOR@.la + libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static noinst_HEADERS = \ - gstvdpmpegdec.h \ - mpegutil.h \ - gstvdputils.h \ gstvdpvideopostprocess.h \ - gstvdpsink.h - -lib_LTLIBRARIES = libgstvdp-@GST_MAJORMINOR@.la - -libgstvdp_@GST_MAJORMINOR@_la_SOURCES = \ - gstvdpdevice.c \ - gstvdputils.c \ - gstvdpvideobuffer.c \ - gstvdpoutputbuffer.c \ - gstvdpvideosrcpad.c \ - gstvdpoutputsrcpad.c \ - gstvdp.c - -libgstvdp_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/vdpau -libgstvdp_@GST_MAJORMINOR@include_HEADERS = \ - gstvdpdevice.h \ - gstvdpvideobuffer.h \ - gstvdpoutputbuffer.h \ - gstvdpvideosrcpad.h \ - gstvdpoutputsrcpad.h \ - gstvdp.h - -libgstvdp_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \ - $(GST_PLUGINS_BASE_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS) - -libgstvdp_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) $(X11_LIBS) $(VDPAU_LIBS) \ - -lgstvideo-$(GST_MAJORMINOR) - -libgstvdp_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS) -libgstvdp_@GST_MAJORMINOR@_la_LIBTOOLFLAGS = --tag=disable-static - - + gstvdpsink.h \ + mpeg/gstvdpmpegframe.h \ + mpeg/mpegutil.h \ + mpeg/gstvdpmpegdec.h \ + h264/gstnalreader.h \ + h264/gsth264parser.h \ + h264/gstvdph264dec.h \ No newline at end of file diff --git a/sys/vdpau/basevideodecoder/Makefile.am b/sys/vdpau/basevideodecoder/Makefile.am new file mode 100644 index 0000000000..bcce25e354 --- /dev/null +++ b/sys/vdpau/basevideodecoder/Makefile.am @@ -0,0 +1,15 @@ +noinst_LTLIBRARIES = libgstbasevideodecoder.la + +libgstbasevideodecoder_la_SOURCES = \ + gstvideoframe.c \ + gstbasevideodecoder.c + +libgstbasevideodecoder_la_CFLAGS = $(GST_CFLAGS) +libgstbasevideodecoder_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \ + $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) +libgstbasevideodecoder_la_LDFLAGS = $(GST_ALL_LDFLAGS) -module -avoid-version + +noinst_HEADERS = \ + gstvideoframe.h \ + gstbasevideodecoder.h \ + gstbasevideoutils.h \ No newline at end of file diff --git a/sys/vdpau/basevideodecoder/gstbasevideodecoder.c b/sys/vdpau/basevideodecoder/gstbasevideodecoder.c new file mode 100644 index 0000000000..438ccb7dfe --- /dev/null +++ b/sys/vdpau/basevideodecoder/gstbasevideodecoder.c @@ -0,0 +1,1433 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstbasevideodecoder.h" + +#include + +GST_DEBUG_CATEGORY (basevideodecoder_debug); +#define GST_CAT_DEFAULT basevideodecoder_debug + +enum +{ + PROP_0, + PROP_PACKETIZED, + PROP_SINK_CLIPPING +}; + + +static GstFlowReturn gst_base_video_decoder_drain (GstBaseVideoDecoder * dec, + gboolean at_eos, gboolean codec_data); + + +GST_BOILERPLATE (GstBaseVideoDecoder, gst_base_video_decoder, + GstElement, GST_TYPE_ELEMENT); + + + +static guint64 +gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder, + gint picture_number) +{ + if (base_video_decoder->state.fps_d == 0) { + return -1; + } + if (picture_number < base_video_decoder->base_picture_number) { + return base_video_decoder->timestamp_offset - + (gint64) gst_util_uint64_scale (base_video_decoder->base_picture_number + - picture_number, base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n); + } else { + return base_video_decoder->timestamp_offset + + gst_util_uint64_scale (picture_number - + base_video_decoder->base_picture_number, + base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n); + } +} + +static guint64 +gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder * + base_video_decoder, gint field_offset) +{ + if (base_video_decoder->state.fps_d == 0) { + return GST_CLOCK_TIME_NONE; + } + if (field_offset < 0) { + GST_WARNING ("field offset < 0"); + return GST_CLOCK_TIME_NONE; + } + return base_video_decoder->timestamp_offset + + gst_util_uint64_scale (field_offset, + base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n * 2); +} + +static guint64 +gst_base_video_decoder_get_field_duration (GstBaseVideoDecoder * + base_video_decoder, gint n_fields) +{ + if (base_video_decoder->state.fps_d == 0) { + return GST_CLOCK_TIME_NONE; + } + if (n_fields < 0) { + GST_WARNING ("n_fields < 0"); + return GST_CLOCK_TIME_NONE; + } + return gst_util_uint64_scale (n_fields, + base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n * 2); +} + +static GstVideoFrame * +gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) +{ + GstBaseVideoDecoderClass *base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GstVideoFrame *frame; + + if (base_video_decoder_class->create_frame) + frame = base_video_decoder_class->create_frame (base_video_decoder); + else + frame = gst_video_frame_new (); + + frame->system_frame_number = base_video_decoder->system_frame_number; + base_video_decoder->system_frame_number++; + + frame->decode_frame_number = frame->system_frame_number - + base_video_decoder->reorder_depth; + + return frame; +} + +static void +gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder) +{ + GList *g; + + GST_DEBUG ("reset"); + + base_video_decoder->discont = TRUE; + base_video_decoder->have_sync = FALSE; + + base_video_decoder->timestamp_offset = GST_CLOCK_TIME_NONE; + base_video_decoder->system_frame_number = 0; + base_video_decoder->presentation_frame_number = 0; + base_video_decoder->base_picture_number = 0; + base_video_decoder->last_timestamp = GST_CLOCK_TIME_NONE; + + base_video_decoder->input_offset = 0; + base_video_decoder->frame_offset = 0; + + /* This function could be called from finalize() */ + if (base_video_decoder->input_adapter) { + gst_adapter_clear (base_video_decoder->input_adapter); + } + + if (base_video_decoder->current_frame) { + gst_video_frame_unref (base_video_decoder->current_frame); + base_video_decoder->current_frame = NULL; + } + + base_video_decoder->have_src_caps = FALSE; + + for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) { + GstVideoFrame *frame = g->data; + gst_video_frame_unref (frame); + } + g_list_free (base_video_decoder->frames); + base_video_decoder->frames = NULL; + + GST_OBJECT_LOCK (base_video_decoder); + base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE; + base_video_decoder->proportion = 0.5; + GST_OBJECT_UNLOCK (base_video_decoder); +} + +static void +gst_base_video_decoder_flush (GstBaseVideoDecoder * base_video_decoder) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + + gst_base_video_decoder_reset (base_video_decoder); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + if (base_video_decoder_class->flush) + base_video_decoder_class->flush (base_video_decoder); +} + + +static gboolean +gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + GstStructure *structure; + const GValue *codec_data; + GstVideoState *state; + gboolean ret = TRUE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps); + + state = &base_video_decoder->state; + + if (state->codec_data) { + gst_buffer_unref (state->codec_data); + } + memset (state, 0, sizeof (GstVideoState)); + + structure = gst_caps_get_structure (caps, 0); + + gst_video_format_parse_caps (caps, NULL, &state->width, &state->height); + gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d); + gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d); + + gst_structure_get_boolean (structure, "interlaced", &state->interlaced); + + codec_data = gst_structure_get_value (structure, "codec_data"); + if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) + state->codec_data = gst_value_get_buffer (codec_data); + + if (base_video_decoder_class->set_sink_caps) + ret = base_video_decoder_class->set_sink_caps (base_video_decoder, caps); + + g_object_unref (base_video_decoder); + + return ret; +} + +static gboolean +gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + gboolean ret = FALSE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: + { + if (!base_video_decoder->packetized) + gst_base_video_decoder_drain (base_video_decoder, TRUE, FALSE); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD + (base_video_decoder), event); + } + break; + case GST_EVENT_NEWSEGMENT: + { + gboolean update; + double rate; + double applied_rate; + GstFormat format; + gint64 start; + gint64 stop; + gint64 position; + GstSegment *segment = &base_video_decoder->segment; + + gst_event_parse_new_segment_full (event, &update, &rate, + &applied_rate, &format, &start, &stop, &position); + + if (format != GST_FORMAT_TIME) + goto newseg_wrong_format; + + if (!update) { + gst_base_video_decoder_flush (base_video_decoder); + } + + base_video_decoder->timestamp_offset = start; + + gst_segment_set_newsegment_full (segment, + update, rate, applied_rate, format, start, stop, position); + base_video_decoder->have_segment = TRUE; + + GST_WARNING ("new segment: format %d rate %g start %" GST_TIME_FORMAT + " stop %" GST_TIME_FORMAT + " position %" GST_TIME_FORMAT + " update %d", + format, rate, + GST_TIME_ARGS (segment->start), + GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD + (base_video_decoder), event); + } + break; + case GST_EVENT_FLUSH_STOP:{ + GST_OBJECT_LOCK (base_video_decoder); + base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE; + base_video_decoder->proportion = 0.5; + GST_OBJECT_UNLOCK (base_video_decoder); + } + default: + /* FIXME this changes the order of events */ + ret = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD + (base_video_decoder), event); + break; + } + +done: + gst_object_unref (base_video_decoder); + return ret; + +newseg_wrong_format: + { + GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment"); + gst_event_unref (event); + goto done; + } +} + +#if 0 +static gboolean +gst_base_video_decoder_sink_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstBaseVideoDecoder *enc; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + /* FIXME: check if we are in a decoding state */ + + switch (src_format) { + case GST_FORMAT_BYTES: + switch (*dest_format) { +#if 0 + case GST_FORMAT_DEFAULT: + *dest_value = gst_util_uint64_scale_int (src_value, 1, + enc->bytes_per_picture); + break; +#endif + case GST_FORMAT_TIME: + /* seems like a rather silly conversion, implement me if you like */ + default: + res = FALSE; + } + break; + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale (src_value, + GST_SECOND * enc->fps_d, enc->fps_n); + break; +#if 0 + case GST_FORMAT_BYTES: + *dest_value = gst_util_uint64_scale_int (src_value, + enc->bytes_per_picture, 1); + break; +#endif + default: + res = FALSE; + } + break; + default: + res = FALSE; + break; + } +} +#endif + +static gboolean +gst_base_video_decoder_src_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstBaseVideoDecoder *enc; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + /* FIXME: check if we are in a encoding state */ + + GST_DEBUG ("src convert"); + switch (src_format) { +#if 0 + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value), + enc->fps_d * GST_SECOND, enc->fps_n); + break; + default: + res = FALSE; + } + break; + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_DEFAULT: + { + *dest_value = gst_util_uint64_scale (src_value, + enc->fps_n, enc->fps_d * GST_SECOND); + break; + } + default: + res = FALSE; + break; + } + break; +#endif + default: + res = FALSE; + break; + } + + gst_object_unref (enc); + + return res; +} + +static gboolean +gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoDecoder *base_video_decoder; + gboolean res = FALSE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_SEEK: + { + GstFormat format, tformat; + gdouble rate; + GstEvent *real_seek; + GstSeekFlags flags; + GstSeekType cur_type, stop_type; + gint64 cur, stop; + gint64 tcur, tstop; + + GST_DEBUG ("seek event"); + + gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, + &cur, &stop_type, &stop); + gst_event_unref (event); + + tformat = GST_FORMAT_TIME; + res = + gst_base_video_decoder_src_convert (pad, format, cur, &tformat, + &tcur); + if (!res) + goto convert_error; + res = + gst_base_video_decoder_src_convert (pad, format, stop, &tformat, + &tstop); + if (!res) + goto convert_error; + + real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, + flags, cur_type, tcur, stop_type, tstop); + + res = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD + (base_video_decoder), real_seek); + + break; + } + case GST_EVENT_QOS: + { + gdouble proportion; + GstClockTimeDiff diff; + GstClockTime timestamp; + GstClockTime duration; + + gst_event_parse_qos (event, &proportion, &diff, ×tamp); + + GST_OBJECT_LOCK (base_video_decoder); + base_video_decoder->proportion = proportion; + if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) { + if (G_UNLIKELY (diff > 0)) { + if (base_video_decoder->state.fps_n > 0) + duration = + gst_util_uint64_scale (GST_SECOND, + base_video_decoder->state.fps_d, + base_video_decoder->state.fps_n); + else + duration = 0; + base_video_decoder->earliest_time = timestamp + 2 * diff + duration; + } else { + base_video_decoder->earliest_time = timestamp + diff; + } + } else { + base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE; + } + GST_OBJECT_UNLOCK (base_video_decoder); + + GST_DEBUG_OBJECT (base_video_decoder, + "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g", + GST_TIME_ARGS (timestamp), diff, proportion); + + res = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD + (base_video_decoder), event); + break; + } + default: + res = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD + (base_video_decoder), event); + break; + } +done: + gst_object_unref (base_video_decoder); + return res; + +convert_error: + GST_DEBUG_OBJECT (base_video_decoder, "could not convert format"); + goto done; +} + +static const GstQueryType * +gst_base_video_decoder_get_query_types (GstPad * pad) +{ + static const GstQueryType query_types[] = { + GST_QUERY_POSITION, + GST_QUERY_DURATION, + GST_QUERY_CONVERT, + 0 + }; + + return query_types; +} + +static gboolean +gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoDecoder *dec; + gboolean res = TRUE; + + dec = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + switch GST_QUERY_TYPE + (query) { + case GST_QUERY_POSITION: + { + GstFormat format; + gint64 time; + + gst_query_parse_position (query, &format, NULL); + GST_DEBUG ("query in format %d", format); + + if (format != GST_FORMAT_TIME) { + goto error; + } + + time = dec->last_timestamp; + time = gst_segment_to_stream_time (&dec->segment, GST_FORMAT_TIME, time); + + gst_query_set_position (query, format, time); + + res = TRUE; + + break; + } + case GST_QUERY_DURATION: + { + res = gst_pad_peer_query (dec->sinkpad, query); + break; + } + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + GST_DEBUG ("convert query"); + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = + gst_base_video_decoder_src_convert (pad, src_fmt, src_val, &dest_fmt, + &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + } + gst_object_unref (dec); + return res; + +error: + GST_ERROR_OBJECT (dec, "query failed"); + gst_object_unref (dec); + return res; +} + +static gboolean +gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoDecoder *base_video_decoder; + gboolean res = FALSE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + GST_DEBUG_OBJECT (base_video_decoder, "sink query fps=%d/%d", + base_video_decoder->state.fps_n, base_video_decoder->state.fps_d); + switch (GST_QUERY_TYPE (query)) { + + default: + res = gst_pad_query_default (pad, query); + break; + } + + gst_object_unref (base_video_decoder); + + return res; +} + +static void +gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) +{ + GstCaps *caps; + GstVideoState *state = &base_video_decoder->state; + + if (base_video_decoder->have_src_caps) + return; + + caps = gst_pad_get_allowed_caps (base_video_decoder->srcpad); + if (!caps) + goto null_caps; + if (gst_caps_is_empty (caps)) + goto empty_caps; + + gst_caps_set_simple (caps, + "width", G_TYPE_INT, state->width, + "height", G_TYPE_INT, state->height, + "framerate", GST_TYPE_FRACTION, state->fps_n, state->fps_d, + "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n, state->par_d, + "interlaced", G_TYPE_BOOLEAN, state->interlaced, NULL); + gst_pad_fixate_caps (base_video_decoder->srcpad, caps); + + + GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps); + + gst_pad_set_caps (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), caps); + + base_video_decoder->have_src_caps = TRUE; + + gst_caps_unref (caps); + return; + +null_caps: + GST_WARNING ("Got null caps from get_allowed_caps"); + return; + +empty_caps: + GST_WARNING ("Got empty caps from get_allowed_caps"); + gst_caps_unref (caps); + return; +} + +static GstFlowReturn +gst_base_video_decoder_drain (GstBaseVideoDecoder * dec, gboolean at_eos, + gboolean codec_data) +{ + GstBaseVideoDecoderClass *klass; + GstBaseVideoDecoderScanResult res; + GstFlowReturn ret; + guint size; + + klass = GST_BASE_VIDEO_DECODER_GET_CLASS (dec); + + if (gst_adapter_available (dec->input_adapter) == 0) + return GST_FLOW_OK; + +lost_sync: + if (!dec->have_sync) { + gint n, m; + + GST_DEBUG ("no sync, scanning"); + + n = gst_adapter_available (dec->input_adapter); + m = klass->scan_for_sync (dec, dec->input_adapter); + if (m == -1) { + gst_object_unref (dec); + return GST_FLOW_OK; + } + + if (m < 0) { + g_warning ("subclass returned negative scan %d", m); + } + + if (m >= n) { + GST_ERROR ("subclass scanned past end %d >= %d", m, n); + } + + gst_adapter_flush (dec->input_adapter, m); + + if (m < n) { + GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); + /* this is only "maybe" sync */ + dec->have_sync = TRUE; + } + + if (!dec->have_sync) { + return GST_FLOW_OK; + } + } + + res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); + while (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK) { + GstClockTime timestamp, duration; + guint64 offset; + gboolean preroll, gap; + + GstBuffer *buf; + + GST_DEBUG ("Packet size: %u", size); + if (size > gst_adapter_available (dec->input_adapter)) + return GST_FLOW_OK; + + timestamp = GST_BUFFER_TIMESTAMP (dec->input_adapter->buflist->data); + duration = GST_BUFFER_DURATION (dec->input_adapter->buflist->data); + offset = GST_BUFFER_OFFSET (dec->input_adapter->buflist->data); + + preroll = GST_BUFFER_FLAG_IS_SET (dec->input_adapter->buflist->data, + GST_BUFFER_FLAG_PREROLL); + gap = GST_BUFFER_FLAG_IS_SET (dec->input_adapter->buflist->data, + GST_BUFFER_FLAG_GAP); + + buf = gst_adapter_take_buffer (dec->input_adapter, size); + GST_BUFFER_TIMESTAMP (buf) = timestamp; + GST_BUFFER_DURATION (buf) = duration; + GST_BUFFER_OFFSET (buf) = offset; + + if (preroll) + GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_PREROLL); + else + GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_PREROLL); + + if (gap) + GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_GAP); + else + GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_GAP); + + if (codec_data) + ret = klass->parse_codec_data (dec, buf); + else + ret = klass->parse_data (dec, buf, at_eos); + if (ret != GST_FLOW_OK) + break; + + res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); + } + + if (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC) { + dec->have_sync = FALSE; + goto lost_sync; + } else if (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA) + return GST_FLOW_OK; + + return ret; +} + +static GstFlowReturn +gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + GstFlowReturn ret; + + GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + +#if 0 + /* requiring the pad to be negotiated makes it impossible to use + * oggdemux or filesrc ! decoder */ + if (!gst_pad_is_negotiated (pad)) { + GST_DEBUG ("not negotiated"); + return GST_FLOW_NOT_NEGOTIATED; + } +#endif + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG_OBJECT (base_video_decoder, "chain"); + + if (!base_video_decoder->have_segment) { + GstEvent *event; + GstFlowReturn ret; + + GST_WARNING + ("Received buffer without a new-segment. Assuming timestamps start from 0."); + + gst_segment_set_newsegment_full (&base_video_decoder->segment, + FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); + base_video_decoder->have_segment = TRUE; + + event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, + GST_CLOCK_TIME_NONE, 0); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), + event); + if (!ret) { + GST_ERROR ("new segment event ret=%d", ret); + return GST_FLOW_ERROR; + } + } + + if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { + GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); + gst_base_video_decoder_flush (base_video_decoder); + } + + if (base_video_decoder->current_frame == NULL) { + base_video_decoder->current_frame = + gst_base_video_decoder_new_frame (base_video_decoder); + } +#if 0 + if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE && + GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { + GST_DEBUG ("got new offset %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); + base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf); + } +#endif + + if (base_video_decoder->packetized) { + base_video_decoder->current_frame->sink_buffer = buf; + + ret = gst_base_video_decoder_have_frame (base_video_decoder, NULL); + } else { + + gst_adapter_push (base_video_decoder->input_adapter, buf); + + ret = gst_base_video_decoder_drain (base_video_decoder, FALSE, FALSE); + } + + gst_object_unref (base_video_decoder); + return ret; +} + +static gboolean +gst_base_video_decoder_stop (GstBaseVideoDecoder * base_video_decoder) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + + GST_DEBUG ("stop"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + if (base_video_decoder_class->stop) + return base_video_decoder_class->stop (base_video_decoder); + + return TRUE; +} + +static gboolean +gst_base_video_decoder_start (GstBaseVideoDecoder * base_video_decoder) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + + GST_DEBUG ("start"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + gst_base_video_decoder_reset (base_video_decoder); + + if (base_video_decoder_class->start) + return base_video_decoder_class->start (base_video_decoder); + + return TRUE; +} + +static GstStateChangeReturn +gst_base_video_decoder_change_state (GstElement * element, + GstStateChange transition) +{ + GstBaseVideoDecoder *base_video_decoder; + GstStateChangeReturn ret; + + base_video_decoder = GST_BASE_VIDEO_DECODER (element); + + switch (transition) { + case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_base_video_decoder_start (base_video_decoder); + break; + + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_base_video_decoder_stop (base_video_decoder); + break; + + default: + break; + } + + return ret; +} + +GstFlowReturn +gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame * frame) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + GstBuffer *src_buffer; + + GST_DEBUG ("finish frame"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT, + GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT), + GST_TIME_ARGS (frame->presentation_timestamp)); + + if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { + if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { + GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp), + GST_TIME_ARGS (frame->presentation_timestamp - + base_video_decoder->segment.start)); + base_video_decoder->timestamp_offset = frame->presentation_timestamp; + base_video_decoder->field_index = 0; + } else { + /* This case is for one initial timestamp and no others, e.g., + * filesrc ! decoder ! xvimagesink */ + GST_WARNING ("sync timestamp didn't change, ignoring"); + frame->presentation_timestamp = GST_CLOCK_TIME_NONE; + } + } else { + if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) { + GST_WARNING ("sync point doesn't have timestamp"); + if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { + GST_WARNING + ("No base timestamp. Assuming frames start at segment start"); + base_video_decoder->timestamp_offset = + base_video_decoder->segment.start; + base_video_decoder->field_index = 0; + } + } + } + frame->field_index = base_video_decoder->field_index; + base_video_decoder->field_index += frame->n_fields; + + if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { + frame->presentation_timestamp = + gst_base_video_decoder_get_field_timestamp (base_video_decoder, + frame->field_index); + frame->presentation_duration = GST_CLOCK_TIME_NONE; + frame->decode_timestamp = + gst_base_video_decoder_get_timestamp (base_video_decoder, + frame->decode_frame_number); + } + if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { + frame->presentation_duration = + gst_base_video_decoder_get_field_duration (base_video_decoder, + frame->n_fields); + } + + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) { + if (frame->presentation_timestamp < base_video_decoder->last_timestamp) { + GST_WARNING ("decreasing timestamp (%" GST_TIME_FORMAT " < %" + GST_TIME_FORMAT ")", GST_TIME_ARGS (frame->presentation_timestamp), + GST_TIME_ARGS (base_video_decoder->last_timestamp)); + } + } + base_video_decoder->last_timestamp = frame->presentation_timestamp; + + src_buffer = frame->src_buffer; + + GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); + if (base_video_decoder->state.interlaced) { +#ifndef GST_VIDEO_BUFFER_TFF +#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5) +#endif +#ifndef GST_VIDEO_BUFFER_RFF +#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6) +#endif +#ifndef GST_VIDEO_BUFFER_ONEFIELD +#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7) +#endif + + if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) { + GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF); + } else { + GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF); + } + GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF); + GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD); + if (frame->n_fields == 3) { + GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF); + } else if (frame->n_fields == 1) { + GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD); + } + } + if (base_video_decoder->discont) { + GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT); + base_video_decoder->discont = FALSE; + } + + GST_BUFFER_TIMESTAMP (src_buffer) = frame->presentation_timestamp; + GST_BUFFER_DURATION (src_buffer) = frame->presentation_duration; + GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE; + GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE; + + GST_DEBUG ("pushing frame %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp)); + + base_video_decoder->frames = + g_list_remove (base_video_decoder->frames, frame); + + gst_base_video_decoder_set_src_caps (base_video_decoder); + + if (base_video_decoder->sink_clipping) { + gint64 start = GST_BUFFER_TIMESTAMP (src_buffer); + gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) + + GST_BUFFER_DURATION (src_buffer); + + if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME, + start, stop, &start, &stop)) { + GST_BUFFER_TIMESTAMP (src_buffer) = start; + GST_BUFFER_DURATION (src_buffer) = stop - start; + GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT + " %" GST_TIME_FORMAT + " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT + " time %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)), + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) + + GST_BUFFER_DURATION (src_buffer)), + GST_TIME_ARGS (base_video_decoder->segment.start), + GST_TIME_ARGS (base_video_decoder->segment.stop), + GST_TIME_ARGS (base_video_decoder->segment.time)); + } else { + GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT + " %" GST_TIME_FORMAT + " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT + " time %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)), + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) + + GST_BUFFER_DURATION (src_buffer)), + GST_TIME_ARGS (base_video_decoder->segment.start), + GST_TIME_ARGS (base_video_decoder->segment.stop), + GST_TIME_ARGS (base_video_decoder->segment.time)); + gst_video_frame_unref (frame); + return GST_FLOW_OK; + } + } + + gst_buffer_ref (src_buffer); + gst_video_frame_unref (frame); + + if (base_video_decoder_class->shape_output) + return base_video_decoder_class->shape_output (base_video_decoder, + src_buffer); + + return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), + src_buffer); +} + +void +gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame * frame) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + + GST_DEBUG ("skip frame"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG ("skip frame sync=%d pts=%" GST_TIME_FORMAT, + GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT), + GST_TIME_ARGS (frame->presentation_timestamp)); + + if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { + if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { + GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp), + GST_TIME_ARGS (frame->presentation_timestamp - + base_video_decoder->segment.start)); + base_video_decoder->timestamp_offset = frame->presentation_timestamp; + base_video_decoder->field_index = 0; + } else { + /* This case is for one initial timestamp and no others, e.g., + * filesrc ! decoder ! xvimagesink */ + GST_WARNING ("sync timestamp didn't change, ignoring"); + frame->presentation_timestamp = GST_CLOCK_TIME_NONE; + } + } else { + if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) { + GST_WARNING ("sync point doesn't have timestamp"); + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { + GST_WARNING + ("No base timestamp. Assuming frames start at segment start"); + base_video_decoder->timestamp_offset = + base_video_decoder->segment.start; + base_video_decoder->field_index = 0; + } + } + } + frame->field_index = base_video_decoder->field_index; + base_video_decoder->field_index += frame->n_fields; + + if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { + frame->presentation_timestamp = + gst_base_video_decoder_get_field_timestamp (base_video_decoder, + frame->field_index); + frame->presentation_duration = GST_CLOCK_TIME_NONE; + frame->decode_timestamp = + gst_base_video_decoder_get_timestamp (base_video_decoder, + frame->decode_frame_number); + } + if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { + frame->presentation_duration = + gst_base_video_decoder_get_field_duration (base_video_decoder, + frame->n_fields); + } + + base_video_decoder->last_timestamp = frame->presentation_timestamp; + + GST_DEBUG ("skipping frame %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp)); + + base_video_decoder->frames = + g_list_remove (base_video_decoder->frames, frame); + + gst_video_frame_unref (frame); +} + +GstFlowReturn +gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame ** new_frame) +{ + GstVideoFrame *frame = base_video_decoder->current_frame; + GstBaseVideoDecoderClass *klass; + GstClockTime running_time; + GstClockTimeDiff deadline; + GstFlowReturn ret; + + klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) + base_video_decoder->distance_from_sync = 0; + + frame->distance_from_sync = base_video_decoder->distance_from_sync; + base_video_decoder->distance_from_sync++; + + if (frame->sink_buffer) { + frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (frame->sink_buffer); + frame->presentation_duration = GST_BUFFER_DURATION (frame->sink_buffer); + } + + GST_DEBUG ("pts %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp)); + GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp)); + GST_DEBUG ("dist %d", frame->distance_from_sync); + + base_video_decoder->frames = g_list_append (base_video_decoder->frames, + frame); + + running_time = gst_segment_to_running_time (&base_video_decoder->segment, + GST_FORMAT_TIME, frame->presentation_timestamp); + + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time)) + deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time); + else + deadline = G_MAXINT64; + + /* do something with frame */ + ret = klass->handle_frame (base_video_decoder, frame, deadline); + if (!GST_FLOW_IS_SUCCESS (ret)) { + GST_DEBUG ("flow error!"); + } + + /* create new frame */ + base_video_decoder->current_frame = + gst_base_video_decoder_new_frame (base_video_decoder); + + if (new_frame) + *new_frame = base_video_decoder->current_frame; + + return ret; +} + +GstVideoState * +gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder) +{ + return &base_video_decoder->state; + +} + +void +gst_base_video_decoder_set_state (GstBaseVideoDecoder * base_video_decoder, + GstVideoState * state) +{ + memcpy (&base_video_decoder->state, state, sizeof (*state)); + +} + +void +gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder) +{ + g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder)); + + GST_DEBUG ("lost_sync"); + + if (gst_adapter_available (base_video_decoder->input_adapter) >= 1) { + gst_adapter_flush (base_video_decoder->input_adapter, 1); + } + + base_video_decoder->have_sync = FALSE; +} + +GstVideoFrame * +gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder * + base_video_decoder) +{ + return base_video_decoder->current_frame; +} + + +GstVideoFrame * +gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder * + base_video_decoder) +{ + GList *g; + + g = g_list_first (base_video_decoder->frames); + + if (g == NULL) + return NULL; + return (GstVideoFrame *) (g->data); +} + +GstVideoFrame * +gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder, + int frame_number) +{ + GList *g; + + for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) { + GstVideoFrame *frame = g->data; + + if (frame->system_frame_number == frame_number) { + return frame; + } + } + + return NULL; +} + +void +gst_base_video_decoder_update_src_caps (GstBaseVideoDecoder * + base_video_decoder) +{ + g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder)); + + base_video_decoder->have_src_caps = FALSE; + gst_base_video_decoder_set_src_caps (base_video_decoder); +} + +/* GObject vmethod implementations */ +static void +gst_base_video_decoder_get_property (GObject * object, guint property_id, + GValue * value, GParamSpec * pspec) +{ + GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (object); + + switch (property_id) { + case PROP_PACKETIZED: + g_value_set_boolean (value, base_video_decoder->packetized); + break; + case PROP_SINK_CLIPPING: + g_value_set_boolean (value, base_video_decoder->sink_clipping); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +/* GObject vmethod implementations */ +static void +gst_base_video_decoder_set_property (GObject * object, guint property_id, + const GValue * value, GParamSpec * pspec) +{ + GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (object); + + switch (property_id) { + case PROP_PACKETIZED: + base_video_decoder->packetized = g_value_get_boolean (value); + break; + case PROP_SINK_CLIPPING: + base_video_decoder->sink_clipping = g_value_get_boolean (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +static void +gst_base_video_decoder_finalize (GObject * object) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + + g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (object)); + base_video_decoder = GST_BASE_VIDEO_DECODER (object); + base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object); + + gst_base_video_decoder_reset (base_video_decoder); + + if (base_video_decoder->input_adapter) { + g_object_unref (base_video_decoder->input_adapter); + base_video_decoder->input_adapter = NULL; + } + + GST_DEBUG_OBJECT (object, "finalize"); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static void +gst_base_video_decoder_base_init (gpointer g_class) +{ + GST_DEBUG_CATEGORY_INIT (basevideodecoder_debug, "basevideodecoder", 0, + "Base Video Decoder"); +} + +static void +gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = G_OBJECT_CLASS (klass); + gstelement_class = GST_ELEMENT_CLASS (klass); + + gobject_class->finalize = gst_base_video_decoder_finalize; + gobject_class->get_property = gst_base_video_decoder_get_property; + gobject_class->set_property = gst_base_video_decoder_set_property; + + g_object_class_install_property (gobject_class, PROP_PACKETIZED, + g_param_spec_boolean ("packetized", "Packetized", + "Whether the incoming data is already packetized into suitable " + "packets", FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_PACKETIZED, + g_param_spec_boolean ("parse-codec-data", "Parse Codec Data", + "Whether the codec_data should be parsed", FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_PACKETIZED, + g_param_spec_boolean ("sink-clipping", "Sink Clipping", + "If enabled GstBaseVideoDecoder will clip outgoing frames", FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gstelement_class->change_state = gst_base_video_decoder_change_state; + + parent_class = g_type_class_peek_parent (klass); +} + +static void +gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, + GstBaseVideoDecoderClass * base_video_decoder_class) +{ + GstPadTemplate *pad_template; + GstPad *pad; + + GST_DEBUG ("gst_base_video_decoder_init"); + + pad_template = + gst_element_class_get_pad_template (GST_ELEMENT_CLASS + (base_video_decoder_class), "sink"); + g_return_if_fail (pad_template != NULL); + + base_video_decoder->sinkpad = pad = + gst_pad_new_from_template (pad_template, "sink"); + gst_element_add_pad (GST_ELEMENT (base_video_decoder), pad); + + gst_pad_set_chain_function (pad, gst_base_video_decoder_chain); + gst_pad_set_event_function (pad, gst_base_video_decoder_sink_event); + gst_pad_set_setcaps_function (pad, gst_base_video_decoder_sink_setcaps); + gst_pad_set_query_function (pad, gst_base_video_decoder_sink_query); + + if (base_video_decoder_class->create_srcpad) { + base_video_decoder->srcpad = pad = + base_video_decoder_class->create_srcpad (base_video_decoder, + base_video_decoder_class); + } else { + pad_template = + gst_element_class_get_pad_template (GST_ELEMENT_CLASS + (base_video_decoder_class), "src"); + g_return_if_fail (pad_template != NULL); + + base_video_decoder->srcpad = pad = + gst_pad_new_from_template (pad_template, "src"); + } + gst_element_add_pad (GST_ELEMENT (base_video_decoder), pad); + + gst_pad_set_event_function (pad, gst_base_video_decoder_src_event); + gst_pad_set_query_type_function (pad, gst_base_video_decoder_get_query_types); + gst_pad_set_query_function (pad, gst_base_video_decoder_src_query); + gst_pad_use_fixed_caps (pad); + + base_video_decoder->input_adapter = gst_adapter_new (); + + gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME); + + base_video_decoder->current_frame = + gst_base_video_decoder_new_frame (base_video_decoder); + + /* properties */ + base_video_decoder->packetized = FALSE; + base_video_decoder->sink_clipping = TRUE; +} diff --git a/sys/vdpau/basevideodecoder/gstbasevideodecoder.h b/sys/vdpau/basevideodecoder/gstbasevideodecoder.h new file mode 100644 index 0000000000..1a56465e6b --- /dev/null +++ b/sys/vdpau/basevideodecoder/gstbasevideodecoder.h @@ -0,0 +1,206 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_DECODER_H_ +#define _GST_BASE_VIDEO_DECODER_H_ + +#include "gstbasevideoutils.h" +#include "gstvideoframe.h" + +G_BEGIN_DECLS + +#define GST_TYPE_BASE_VIDEO_DECODER \ + (gst_base_video_decoder_get_type()) +#define GST_BASE_VIDEO_DECODER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoder)) +#define GST_BASE_VIDEO_DECODER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass)) +#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass)) +#define GST_IS_BASE_VIDEO_DECODER(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_DECODER)) +#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_DECODER)) + +/** + * GST_BASE_VIDEO_DECODER_SINK_NAME: + * + * The name of the templates for the sink pad. + */ +#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink" +/** + * GST_BASE_VIDEO_DECODER_SRC_NAME: + * + * The name of the templates for the source pad. + */ +#define GST_BASE_VIDEO_DECODER_SRC_NAME "src" + +/** + * GST_BASE_VIDEO_CODEC_SRC_PAD: + * @obj: base video codec instance + * + * Gives the pointer to the source #GstPad object of the element. + */ +#define GST_BASE_VIDEO_DECODER_SRC_PAD(obj) (((GstBaseVideoDecoder *) (obj))->srcpad) + +/** + * GST_BASE_VIDEO_CODEC_SINK_PAD: + * @obj: base video codec instance + * + * Gives the pointer to the sink #GstPad object of the element. + */ +#define GST_BASE_VIDEO_DECODER_SINK_PAD(obj) (((GstBaseVideoDecoder *) (obj))->sinkpad) + +/** + * * GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA: + * * + * */ +#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS + + +typedef enum _GstBaseVideoDecoderScanResult GstBaseVideoDecoderScanResult; + +enum _GstBaseVideoDecoderScanResult +{ + GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK, + GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC, + GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA +}; + +typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder; +typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass; + +struct _GstBaseVideoDecoder +{ + GstElement element; + + /*< private >*/ + GstPad *sinkpad; + GstPad *srcpad; + GstAdapter *input_adapter; + + GList *frames; + + gboolean have_sync; + gboolean discont; + + GstVideoState state; + GstSegment segment; + + guint64 presentation_frame_number; + guint64 system_frame_number; + + GstCaps *caps; + gboolean have_src_caps; + + GstVideoFrame *current_frame; + + gint distance_from_sync; + gint reorder_depth; + + GstClockTime buffer_timestamp; + + GstClockTime timestamp_offset; + + gdouble proportion; + GstClockTime earliest_time; + + guint64 input_offset; + guint64 frame_offset; + GstClockTime last_timestamp; + + guint64 base_picture_number; + + gint field_index; + + gboolean is_delta_unit; + + GList *timestamps; + gboolean have_segment; + + /* properties */ + gboolean sink_clipping; + gboolean packetized; + +}; + +struct _GstBaseVideoDecoderClass +{ + GstElementClass element_class; + + gboolean (*start) (GstBaseVideoDecoder *coder); + gboolean (*stop) (GstBaseVideoDecoder *coder); + gboolean (*flush) (GstBaseVideoDecoder *coder); + + gboolean (*set_sink_caps) (GstBaseVideoDecoder *base_video_decoder, + GstCaps *caps); + + GstPad *(*create_srcpad) (GstBaseVideoDecoder * base_video_decoder, + GstBaseVideoDecoderClass *base_video_decoder_class); + + + gint (*scan_for_sync) (GstBaseVideoDecoder *coder, GstAdapter *adapter); + + GstBaseVideoDecoderScanResult (*scan_for_packet_end) + (GstBaseVideoDecoder *coder, GstAdapter *adapter, guint *size, gboolean at_eos); + + GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder, + GstBuffer *buf, gboolean at_eos); + GstFlowReturn (*parse_codec_data) (GstBaseVideoDecoder *decoder, + GstBuffer *buf); + + + GstVideoFrame *(*create_frame) (GstBaseVideoDecoder *coder); + GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame, + GstClockTimeDiff deadline); + GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder, + GstBuffer *buf); + +}; + +GType gst_base_video_decoder_get_type (void); + +GstVideoFrame *gst_base_video_decoder_get_current_frame (GstBaseVideoDecoder + *base_video_decoder); + +GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder, + gint frame_number); +GstVideoFrame *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder); + +GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder, + GstVideoFrame *frame); +void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame * frame); + +GstFlowReturn +gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder, + GstVideoFrame **new_frame); + +GstVideoState * gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder); +void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder, + GstVideoState *state); + +void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder); + +void gst_base_video_decoder_update_src_caps (GstBaseVideoDecoder *base_video_decoder); + +G_END_DECLS + +#endif + diff --git a/sys/vdpau/basevideodecoder/gstbasevideoutils.h b/sys/vdpau/basevideodecoder/gstbasevideoutils.h new file mode 100644 index 0000000000..1591781281 --- /dev/null +++ b/sys/vdpau/basevideodecoder/gstbasevideoutils.h @@ -0,0 +1,55 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_UTILS_H_ +#define _GST_BASE_VIDEO_UTILS_H_ + +#define GST_USE_UNSTABLE_API 1 + +#ifndef GST_USE_UNSTABLE_API +#warning "The base video utils API is unstable and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + +#include +#include +#include + +G_BEGIN_DECLS + +typedef struct _GstVideoState GstVideoState; + +struct _GstVideoState +{ + gint width, height; + gint fps_n, fps_d; + gint par_n, par_d; + + gboolean interlaced; + + gint clean_width, clean_height; + gint clean_offset_left, clean_offset_top; + + gint bytes_per_picture; + + GstBuffer *codec_data; + +}; + +#endif /* _GST_BASE_VIDEO_UTILS_H_ */ \ No newline at end of file diff --git a/sys/vdpau/basevideodecoder/gstvideoframe.c b/sys/vdpau/basevideodecoder/gstvideoframe.c new file mode 100644 index 0000000000..8eecc1933f --- /dev/null +++ b/sys/vdpau/basevideodecoder/gstvideoframe.c @@ -0,0 +1,105 @@ +/* +* GStreamer +* Copyright (C) 2009 Carl-Anton Ingmarsson +* +* This library is free software; you can redistribute it and/or +* modify it under the terms of the GNU Library General Public +* License as published by the Free Software Foundation; either +* version 2 of the License, or (at your option) any later version. +* +* This library is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +* Library General Public License for more details. +* +* You should have received a copy of the GNU Library General Public +* License along with this library; if not, write to the +* Free Software Foundation, Inc., 59 Temple Place - Suite 330, +* Boston, MA 02111-1307, USA. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstvideoframe.h" + +GST_DEBUG_CATEGORY_STATIC (gst_video_frame_debug); +#define GST_CAT_DEFAULT gst_video_frame_debug + +#define DEBUG_INIT(bla) \ +GST_DEBUG_CATEGORY_INIT (gst_video_frame_debug, "gstvideoframe", 0, "Video Frame"); + +GstVideoFrame * +gst_video_frame_new (void) +{ + GstVideoFrame *frame; + + frame = (GstVideoFrame *) gst_mini_object_new (GST_TYPE_VIDEO_FRAME); + + return frame; +} + +static GObjectClass *gst_video_frame_parent_class; + +static void +gst_video_frame_finalize (GstVideoFrame * frame) +{ + if (frame->sink_buffer) + gst_buffer_unref (frame->sink_buffer); + if (frame->src_buffer) + gst_buffer_unref (frame->src_buffer); + + GST_MINI_OBJECT_CLASS (gst_video_frame_parent_class)->finalize + (GST_MINI_OBJECT (frame)); +} + +static void +gst_video_frame_init (GstVideoFrame * frame, gpointer g_class) +{ + frame->decode_timestamp = GST_CLOCK_TIME_NONE; + frame->presentation_timestamp = GST_CLOCK_TIME_NONE; + frame->presentation_duration = GST_CLOCK_TIME_NONE; + frame->n_fields = 2; + + frame->sink_buffer = NULL; + frame->src_buffer = NULL; +} + +static void +gst_video_frame_class_init (gpointer g_class, gpointer class_data) +{ + GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class); + + gst_video_frame_parent_class = g_type_class_peek_parent (g_class); + + mini_object_class->finalize = (GstMiniObjectFinalizeFunction) + gst_video_frame_finalize; +} + + +GType +gst_video_frame_get_type (void) +{ + static GType _gst_video_frame_type = 0; + + if (G_UNLIKELY (_gst_video_frame_type == 0)) { + static const GTypeInfo info = { + sizeof (GstVideoFrameClass), + NULL, + NULL, + gst_video_frame_class_init, + NULL, + NULL, + sizeof (GstVideoFrame), + 0, + (GInstanceInitFunc) gst_video_frame_init, + NULL + }; + _gst_video_frame_type = g_type_register_static (GST_TYPE_MINI_OBJECT, + "GstVideoFrame", &info, 0); + + DEBUG_INIT (); + } + return _gst_video_frame_type; +} diff --git a/sys/vdpau/basevideodecoder/gstvideoframe.h b/sys/vdpau/basevideodecoder/gstvideoframe.h new file mode 100644 index 0000000000..fa6e87dc72 --- /dev/null +++ b/sys/vdpau/basevideodecoder/gstvideoframe.h @@ -0,0 +1,155 @@ +/* +* GStreamer +* Copyright (C) 2009 Carl-Anton Ingmarsson +* +* This library is free software; you can redistribute it and/or +* modify it under the terms of the GNU Library General Public +* License as published by the Free Software Foundation; either +* version 2 of the License, or (at your option) any later version. +* +* This library is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +* Library General Public License for more details. +* +* You should have received a copy of the GNU Library General Public +* License along with this library; if not, write to the +* Free Software Foundation, Inc., 59 Temple Place - Suite 330, +* Boston, MA 02111-1307, USA. +*/ + +#ifndef _GST_VIDEO_FRAME_H_ +#define _GST_VIDEO_FRAME_H_ + +#include + +#define GST_TYPE_VIDEO_FRAME (gst_video_frame_get_type()) +#define GST_IS_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_FRAME)) +#define GST_VIDEO_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_FRAME, GstVideoFrame)) + +/** + * GstVideoFrameFlag: + * @GST_VIDEO_FRAME_FLAG_PREROLL: the frame is part of a preroll and should not be + * displayed. + * @GST_VIDEO_FRAME_FLAG_DISCONT: the frame marks a discontinuity in the stream. + * This typically occurs after a seek or a dropped buffer from a live or + * network source. + * @GST_VIDEO_FRAME_FLAG_GAP: the frame has been created to fill a gap in the + * stream and contains media neutral data (elements can switch to optimized code + * path that ignores the buffer content). + * @GST_VIDEO_FRAME_FLAG_DELTA_UNIT: the frame is a keyframe. + * @GST_VIDEO_FRAME_FLAG_SYNC_POINT: the frame marks a sync point. + * @GST_VIDEO_FRAME_FLAG_EOS: the frame is the last in the stream. + * @GST_VIDEO_FRAME_FLAG_TFF: If the frame is interlaced, then the first + * field in the video frame is the top field. If unset, the bottom field is first. + * @GST_VIDEO_FRAME_FLAG_LAST: additional flags can be added starting from this flag. + * A set of frame flags used to describe properties of a #GstVideoFrame. + */ +typedef enum +{ + GST_VIDEO_FRAME_FLAG_PREROLL = (GST_MINI_OBJECT_FLAG_LAST << 0), + GST_VIDEO_FRAME_FLAG_DISCONT = (GST_MINI_OBJECT_FLAG_LAST << 1), + GST_VIDEO_FRAME_FLAG_GAP = (GST_MINI_OBJECT_FLAG_LAST << 2), + GST_VIDEO_FRAME_FLAG_KEYFRAME = (GST_MINI_OBJECT_FLAG_LAST << 3), + GST_VIDEO_FRAME_FLAG_SYNC_POINT = (GST_MINI_OBJECT_FLAG_LAST << 4), + GST_VIDEO_FRAME_FLAG_EOS = (GST_MINI_OBJECT_FLAG_LAST << 5), + GST_VIDEO_FRAME_FLAG_TFF = (GST_MINI_OBJECT_FLAG_LAST << 6), + GST_VIDEO_FRAME_FLAG_LAST = (GST_MINI_OBJECT_FLAG_LAST << 7) +} GstVideoFrameFlag; + +typedef struct _GstVideoFrame GstVideoFrame; +typedef struct _GstVideoFrameClass GstVideoFrameClass; + +struct _GstVideoFrame +{ + GstMiniObject mini_object; + + GstClockTime decode_timestamp; + GstClockTime presentation_timestamp; + GstClockTime presentation_duration; + + gint system_frame_number; + gint decode_frame_number; + gint presentation_frame_number; + + gint distance_from_sync; + + GstBuffer *sink_buffer; + GstBuffer *src_buffer; + + gint field_index; + gint n_fields; + +}; + +struct _GstVideoFrameClass +{ + GstMiniObjectClass mini_object_class; +}; + +/* refcounting */ +/** + * gst_video_frame_ref: + * @frame: a #GstVideoFrame. + * + * Increases the refcount of the given frame by one. + * + * Returns: @frame + */ +#ifdef _FOOL_GTK_DOC_ +G_INLINE_FUNC GstVideoFrame * gst_buffer_ref (GstVideoFrame * frame); +#endif + +static inline GstVideoFrame * +gst_video_frame_ref (GstVideoFrame *frame) +{ + return (GstVideoFrame *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (frame)); +} + +/** + * gst_video_frame_unref: + * @frame: a #GstVideoFrame. + * + * Decreases the refcount of the frame. If the refcount reaches 0, the frame + * will be freed. + */ +#ifdef _FOOL_GTK_DOC_ +G_INLINE_FUNC void gst_video_frame_unref (GstVideoFrame * frame); +#endif + +static inline void +gst_video_frame_unref (GstVideoFrame * frame) +{ + gst_mini_object_unref (GST_MINI_OBJECT_CAST (frame)); +} + +/** + * GST_VIDEO_FRAME_FLAG_IS_SET: + * @buf: a #GstVideoFrame. + * @flag: the #GstVideoFrameFlag to check. + * + * Gives the status of a specific flag on a video frame. + */ +#define GST_VIDEO_FRAME_FLAG_IS_SET(frame,flag) GST_MINI_OBJECT_FLAG_IS_SET (frame, flag) +/** + * GST_VIDEO_FRAME_FLAG_SET: + * @buf: a #GstVideoFrame. + * @flag: the #GstVideoFrameFlag to set. + * + * Sets a frame flag on a video frame. + */ +#define GST_VIDEO_FRAME_FLAG_SET(frame,flag) GST_MINI_OBJECT_FLAG_SET (frame, flag) +/** + * GST_VIDEO_FRAME_FLAG_UNSET: + * @buf: a #GstVideoFrame. + * @flag: the #GstVideoFrameFlag to clear. + * + * Clears a frame flag. + */ +#define GST_VIDEO_FRAME_FLAG_UNSET(frame,flag) GST_MINI_OBJECT_FLAG_UNSET (frame, flag) + +GstVideoFrame *gst_video_frame_new (void); + +GType gst_video_frame_get_type (void); + +#endif \ No newline at end of file diff --git a/sys/vdpau/gstvdp/Makefile.am b/sys/vdpau/gstvdp/Makefile.am new file mode 100644 index 0000000000..a4d2a7bfbe --- /dev/null +++ b/sys/vdpau/gstvdp/Makefile.am @@ -0,0 +1,28 @@ +lib_LTLIBRARIES = libgstvdp-@GST_MAJORMINOR@.la + +libgstvdp_@GST_MAJORMINOR@_la_SOURCES = \ + gstvdpdevice.c \ + gstvdputils.c \ + gstvdpvideobuffer.c \ + gstvdpoutputbuffer.c \ + gstvdpvideosrcpad.c \ + gstvdpoutputsrcpad.c \ + gstvdp.c + +libgstvdp_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/vdpau +libgstvdp_@GST_MAJORMINOR@include_HEADERS = \ + gstvdpdevice.h \ + gstvdpvideobuffer.h \ + gstvdpoutputbuffer.h \ + gstvdpvideosrcpad.h \ + gstvdpoutputsrcpad.h \ + gstvdp.h + +libgstvdp_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \ + $(GST_PLUGINS_BASE_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS) + +libgstvdp_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) $(X11_LIBS) $(VDPAU_LIBS) \ + -lgstvideo-$(GST_MAJORMINOR) + +libgstvdp_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS) +libgstvdp_@GST_MAJORMINOR@_la_LIBTOOLFLAGS = --tag=disable-static \ No newline at end of file diff --git a/sys/vdpau/gstvdp.c b/sys/vdpau/gstvdp/gstvdp.c similarity index 100% rename from sys/vdpau/gstvdp.c rename to sys/vdpau/gstvdp/gstvdp.c diff --git a/sys/vdpau/gstvdp.h b/sys/vdpau/gstvdp/gstvdp.h similarity index 100% rename from sys/vdpau/gstvdp.h rename to sys/vdpau/gstvdp/gstvdp.h diff --git a/sys/vdpau/gstvdpdevice.c b/sys/vdpau/gstvdp/gstvdpdevice.c similarity index 100% rename from sys/vdpau/gstvdpdevice.c rename to sys/vdpau/gstvdp/gstvdpdevice.c diff --git a/sys/vdpau/gstvdpdevice.h b/sys/vdpau/gstvdp/gstvdpdevice.h similarity index 100% rename from sys/vdpau/gstvdpdevice.h rename to sys/vdpau/gstvdp/gstvdpdevice.h diff --git a/sys/vdpau/gstvdpoutputbuffer.c b/sys/vdpau/gstvdp/gstvdpoutputbuffer.c similarity index 100% rename from sys/vdpau/gstvdpoutputbuffer.c rename to sys/vdpau/gstvdp/gstvdpoutputbuffer.c diff --git a/sys/vdpau/gstvdpoutputbuffer.h b/sys/vdpau/gstvdp/gstvdpoutputbuffer.h similarity index 100% rename from sys/vdpau/gstvdpoutputbuffer.h rename to sys/vdpau/gstvdp/gstvdpoutputbuffer.h diff --git a/sys/vdpau/gstvdpoutputsrcpad.c b/sys/vdpau/gstvdp/gstvdpoutputsrcpad.c similarity index 100% rename from sys/vdpau/gstvdpoutputsrcpad.c rename to sys/vdpau/gstvdp/gstvdpoutputsrcpad.c diff --git a/sys/vdpau/gstvdpoutputsrcpad.h b/sys/vdpau/gstvdp/gstvdpoutputsrcpad.h similarity index 100% rename from sys/vdpau/gstvdpoutputsrcpad.h rename to sys/vdpau/gstvdp/gstvdpoutputsrcpad.h diff --git a/sys/vdpau/gstvdputils.c b/sys/vdpau/gstvdp/gstvdputils.c similarity index 100% rename from sys/vdpau/gstvdputils.c rename to sys/vdpau/gstvdp/gstvdputils.c diff --git a/sys/vdpau/gstvdputils.h b/sys/vdpau/gstvdp/gstvdputils.h similarity index 100% rename from sys/vdpau/gstvdputils.h rename to sys/vdpau/gstvdp/gstvdputils.h diff --git a/sys/vdpau/gstvdpvideobuffer.c b/sys/vdpau/gstvdp/gstvdpvideobuffer.c similarity index 100% rename from sys/vdpau/gstvdpvideobuffer.c rename to sys/vdpau/gstvdp/gstvdpvideobuffer.c diff --git a/sys/vdpau/gstvdpvideobuffer.h b/sys/vdpau/gstvdp/gstvdpvideobuffer.h similarity index 100% rename from sys/vdpau/gstvdpvideobuffer.h rename to sys/vdpau/gstvdp/gstvdpvideobuffer.h diff --git a/sys/vdpau/gstvdpvideosrcpad.c b/sys/vdpau/gstvdp/gstvdpvideosrcpad.c similarity index 100% rename from sys/vdpau/gstvdpvideosrcpad.c rename to sys/vdpau/gstvdp/gstvdpvideosrcpad.c diff --git a/sys/vdpau/gstvdpvideosrcpad.h b/sys/vdpau/gstvdp/gstvdpvideosrcpad.h similarity index 100% rename from sys/vdpau/gstvdpvideosrcpad.h rename to sys/vdpau/gstvdp/gstvdpvideosrcpad.h diff --git a/sys/vdpau/gstvdpau.c b/sys/vdpau/gstvdpau.c index d8e764e5b5..7e47e85a1c 100644 --- a/sys/vdpau/gstvdpau.c +++ b/sys/vdpau/gstvdpau.c @@ -5,9 +5,10 @@ #include -#include "gstvdp.h" +#include "gstvdp/gstvdp.h" -#include "gstvdpmpegdec.h" +#include "mpeg/gstvdpmpegdec.h" +#include "h264/gstvdph264dec.h" #include "gstvdpvideopostprocess.h" #include "gstvdpsink.h" @@ -20,6 +21,8 @@ vdpau_init (GstPlugin * vdpau_plugin) * least the generic/states test when there's no device available */ gst_element_register (vdpau_plugin, "vdpaumpegdec", GST_RANK_NONE, GST_TYPE_VDP_MPEG_DEC); + gst_element_register (vdpau_plugin, "vdpauh264dec", + GST_RANK_NONE, GST_TYPE_VDP_H264_DEC); gst_element_register (vdpau_plugin, "vdpauvideopostprocess", GST_RANK_MARGINAL, GST_TYPE_VDP_VIDEO_POST_PROCESS); gst_element_register (vdpau_plugin, "vdpausink", diff --git a/sys/vdpau/gstvdpmpegdec.c b/sys/vdpau/gstvdpmpegdec.c deleted file mode 100644 index 63fb5be0aa..0000000000 --- a/sys/vdpau/gstvdpmpegdec.c +++ /dev/null @@ -1,1161 +0,0 @@ -/* - * GStreamer - * Copyright (C) 2009 Carl-Anton Ingmarsson - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Library General Public - * License as published by the Free Software Foundation; either - * version 2 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Library General Public License for more details. - * - * You should have received a copy of the GNU Library General Public - * License along with this library; if not, write to the - * Free Software Foundation, Inc., 59 Temple Place - Suite 330, - * Boston, MA 02111-1307, USA. - */ - -/** - * SECTION:element-vdpaumpegdec - * - * FIXME:Describe vdpaumpegdec here. - * - * - * Example launch line - * |[ - * gst-launch -v -m fakesrc ! vdpaumpegdec ! fakesink silent=TRUE - * ]| - * - */ - -#ifdef HAVE_CONFIG_H -# include -#endif - -#include -#include -#include -#include - -#include "mpegutil.h" -#include "gstvdpvideosrcpad.h" -#include "gstvdpvideobuffer.h" - -#include "gstvdpmpegdec.h" - -GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug); -#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug - -/* Filter signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL -}; - -enum -{ - PROP_0, - PROP_DISPLAY -}; - -/* the capabilities of the inputs and outputs. - * - * describe the real formats here. - */ -static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", - GST_PAD_SINK, - GST_PAD_ALWAYS, - GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) [ 1, 2 ], " - "systemstream = (boolean) false, parsed = (boolean) true") - ); - -#define DEBUG_INIT(bla) \ -GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, "VDPAU powered mpeg decoder"); - -GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec, - GstElement, GST_TYPE_ELEMENT, DEBUG_INIT); - -static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info); - -typedef struct -{ - GstByteReader reader; - GstBuffer *buffer; - guint start; -} GstVdpMpegPacketizer; - -static GstBuffer * -gst_vdp_mpeg_packetizer_get_next_packet (GstVdpMpegPacketizer * packetizer) -{ - guint offset, size; - GstBuffer *buf; - - if (packetizer->start == -1) - return NULL; - - if (!gst_byte_reader_set_pos (&packetizer->reader, packetizer->start + 3)) - return NULL; - - offset = gst_byte_reader_masked_scan_uint32 (&packetizer->reader, 0xffffff00, - 0x00000100, 0, gst_byte_reader_get_remaining (&packetizer->reader)); - - if (offset != -1) { - offset = gst_byte_reader_get_pos (&packetizer->reader) + offset; - size = offset - packetizer->start; - } else - size = gst_byte_reader_get_remaining (&packetizer->reader) + 3; - - buf = gst_buffer_create_sub (packetizer->buffer, packetizer->start, size); - - packetizer->start = offset; - - return buf; -} - -static void -gst_vdp_mpeg_packetizer_init (GstVdpMpegPacketizer * packetizer, - GstBuffer * buffer) -{ - guint offset; - - gst_byte_reader_init_from_buffer (&packetizer->reader, buffer); - packetizer->buffer = buffer; - - offset = gst_byte_reader_masked_scan_uint32 (&packetizer->reader, 0xffffff00, - 0x00000100, 0, gst_byte_reader_get_remaining (&packetizer->reader)); - - packetizer->start = offset; -} - -static gboolean -gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps) -{ - GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad)); - GstStructure *structure; - - gint width, height; - gint fps_n, fps_d; - gint par_n, par_d; - gboolean interlaced = FALSE; - - GstCaps *src_caps; - gboolean res; - - const GValue *value; - - structure = gst_caps_get_structure (caps, 0); - - /* create src_pad caps */ - gst_structure_get_int (structure, "width", &width); - gst_structure_get_int (structure, "height", &height); - gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d); - gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d); - gst_structure_get_boolean (structure, "interlaced", &interlaced); - - src_caps = gst_pad_get_allowed_caps (mpeg_dec->src); - if (!src_caps) - goto error; - if (gst_caps_is_empty (src_caps)) - goto error; - - gst_caps_truncate (src_caps); - gst_caps_set_simple (src_caps, - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - "framerate", GST_TYPE_FRACTION, fps_n, fps_d, - "pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, - "interlaced", G_TYPE_BOOLEAN, interlaced, NULL); - gst_pad_fixate_caps (mpeg_dec->src, src_caps); - - structure = gst_caps_get_structure (src_caps, 0); - - GST_DEBUG_OBJECT (mpeg_dec, "Setting source caps to %" GST_PTR_FORMAT, - src_caps); - - res = gst_pad_set_caps (mpeg_dec->src, src_caps); - gst_caps_unref (src_caps); - if (!res) - goto done; - - mpeg_dec->width = width; - mpeg_dec->height = height; - mpeg_dec->fps_n = fps_n; - mpeg_dec->fps_d = fps_d; - mpeg_dec->interlaced = interlaced; - - structure = gst_caps_get_structure (caps, 0); - /* parse caps to setup decoder */ - gst_structure_get_int (structure, "mpegversion", &mpeg_dec->version); - - /* Default to MPEG1 until we find otherwise */ - mpeg_dec->profile = VDP_DECODER_PROFILE_MPEG1; - - value = gst_structure_get_value (structure, "codec_data"); - if (value) { - GstBuffer *codec_data, *buf; - GstVdpMpegPacketizer packetizer; - - codec_data = gst_value_get_buffer (value); - gst_vdp_mpeg_packetizer_init (&packetizer, codec_data); - if ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) { - MPEGSeqHdr hdr; - guint32 bitrate; - - mpeg_util_parse_sequence_hdr (&hdr, buf); - - memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix, - &hdr.intra_quantizer_matrix, 64); - memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix, - &hdr.non_intra_quantizer_matrix, 64); - - bitrate = hdr.bitrate; - gst_buffer_unref (buf); - - if ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) { - MPEGSeqExtHdr ext; - - mpeg_util_parse_sequence_extension (&ext, buf); - if (mpeg_dec->version != 1) { - switch (ext.profile) { - case 5: - mpeg_dec->profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE; - break; - default: - mpeg_dec->profile = VDP_DECODER_PROFILE_MPEG2_MAIN; - break; - } - } - - bitrate += (ext.bitrate_ext << 18); - gst_buffer_unref (buf); - } - - mpeg_dec->duration = - gst_util_uint64_scale (1, GST_SECOND * mpeg_dec->fps_d, - mpeg_dec->fps_n); - - mpeg_dec->byterate = bitrate * 50; - GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate); - } - } - - res = TRUE; - -done: - gst_object_unref (mpeg_dec); - - return res; - -error: - res = FALSE; - goto done; -} - -static GstFlowReturn -gst_vdp_mpeg_dec_push_video_buffer (GstVdpMpegDec * mpeg_dec, - GstVdpVideoBuffer * buf) -{ - gint64 byterate; - - if (GST_BUFFER_TIMESTAMP (buf) == GST_CLOCK_TIME_NONE - && GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) { - GST_BUFFER_TIMESTAMP (buf) = mpeg_dec->next_timestamp; - } else if (GST_BUFFER_TIMESTAMP (buf) == GST_CLOCK_TIME_NONE) { - GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale (mpeg_dec->frame_nr, - GST_SECOND * mpeg_dec->fps_d, mpeg_dec->fps_n); - } - - if (mpeg_dec->seeking) { - GstEvent *event; - - event = gst_event_new_new_segment (FALSE, - mpeg_dec->segment.rate, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf), - mpeg_dec->segment.stop, GST_BUFFER_TIMESTAMP (buf)); - - gst_pad_push_event (mpeg_dec->src, event); - - mpeg_dec->seeking = FALSE; - } - - mpeg_dec->next_timestamp = GST_BUFFER_TIMESTAMP (buf) + - GST_BUFFER_DURATION (buf); - - gst_segment_set_last_stop (&mpeg_dec->segment, GST_FORMAT_TIME, - GST_BUFFER_TIMESTAMP (buf)); - - mpeg_dec->accumulated_duration += GST_BUFFER_DURATION (buf); - mpeg_dec->accumulated_size += GST_BUFFER_SIZE (buf); - if (mpeg_dec->accumulated_duration && mpeg_dec->accumulated_size != 0) { - byterate = gst_util_uint64_scale (mpeg_dec->accumulated_size, GST_SECOND, - mpeg_dec->accumulated_duration); - GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate); - - mpeg_dec->byterate = (mpeg_dec->byterate + byterate) / 2; - } - - GST_DEBUG_OBJECT (mpeg_dec, - "Pushing buffer with timestamp: %" GST_TIME_FORMAT - " frame_nr: %" G_GINT64_FORMAT, - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_BUFFER_OFFSET (buf)); - - return gst_vdp_video_src_pad_push ((GstVdpVideoSrcPad *) mpeg_dec->src, buf); -} - -static GstFlowReturn -gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, - GstVdpVideoBuffer ** outbuf) -{ - GstVdpVideoSrcPad *vdp_pad; - GstFlowReturn ret = GST_FLOW_OK; - - vdp_pad = (GstVdpVideoSrcPad *) mpeg_dec->src; - ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf); - if (ret != GST_FLOW_OK) - return ret; - - if (mpeg_dec->decoder == VDP_INVALID_HANDLE) { - GstVdpDevice *device; - VdpStatus status; - - device = GST_VDP_VIDEO_BUFFER (*outbuf)->device; - - status = device->vdp_decoder_create (device->device, mpeg_dec->profile, - mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder); - if (status != VDP_STATUS_OK) { - GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ, - ("Could not create vdpau decoder"), - ("Error returned from vdpau was: %s", - device->vdp_get_error_string (status))); - gst_buffer_unref (GST_BUFFER_CAST (*outbuf)); - return GST_FLOW_ERROR; - } - } - - return GST_FLOW_OK; -} - -static GstFlowReturn -gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec, - GstClockTime timestamp, gint64 size) -{ - VdpPictureInfoMPEG1Or2 *info; - GstBuffer *buffer; - GstVdpVideoBuffer *outbuf; - VdpVideoSurface surface; - GstVdpDevice *device; - VdpBitstreamBuffer vbit[1]; - VdpStatus status; - - info = &mpeg_dec->vdp_info; - - if (info->picture_coding_type != B_FRAME) { - if (info->backward_reference != VDP_INVALID_HANDLE) { - gst_buffer_ref (GST_BUFFER_CAST (mpeg_dec->b_buffer)); - gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec, mpeg_dec->b_buffer); - } - - if (info->forward_reference != VDP_INVALID_HANDLE) { - gst_buffer_unref (GST_BUFFER_CAST (mpeg_dec->f_buffer)); - info->forward_reference = VDP_INVALID_HANDLE; - } - - info->forward_reference = info->backward_reference; - mpeg_dec->f_buffer = mpeg_dec->b_buffer; - - info->backward_reference = VDP_INVALID_HANDLE; - } - - if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) { - gst_adapter_clear (mpeg_dec->adapter); - return GST_FLOW_ERROR; - } - - device = GST_VDP_VIDEO_BUFFER (outbuf)->device; - - GST_BUFFER_TIMESTAMP (outbuf) = timestamp; - GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration; - GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr; - GST_BUFFER_SIZE (outbuf) = size; - - if (info->picture_coding_type == I_FRAME) - GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); - else - GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); - - if (info->top_field_first) - GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF); - else - GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF); - - buffer = gst_adapter_take_buffer (mpeg_dec->adapter, - gst_adapter_available (mpeg_dec->adapter)); - - surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface; - - vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; - vbit[0].bitstream = GST_BUFFER_DATA (buffer); - vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer); - - status = device->vdp_decoder_render (mpeg_dec->decoder, surface, - (VdpPictureInfo *) info, 1, vbit); - gst_buffer_unref (buffer); - info->slice_count = 0; - - if (status != VDP_STATUS_OK) { - GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ, - ("Could not decode"), - ("Error returned from vdpau was: %s", - device->vdp_get_error_string (status))); - - gst_buffer_unref (GST_BUFFER_CAST (outbuf)); - - return GST_FLOW_ERROR; - } - - if (info->picture_coding_type == B_FRAME) { - gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec, outbuf); - } else { - info->backward_reference = surface; - mpeg_dec->b_buffer = outbuf; - } - - return GST_FLOW_OK; -} - -static gboolean -gst_vdp_mpeg_dec_parse_picture_coding (GstVdpMpegDec * mpeg_dec, - GstBuffer * buffer) -{ - MPEGPictureExt pic_ext; - VdpPictureInfoMPEG1Or2 *info; - gint fields; - - info = &mpeg_dec->vdp_info; - - if (!mpeg_util_parse_picture_coding_extension (&pic_ext, buffer)) - return FALSE; - - memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4); - - info->intra_dc_precision = pic_ext.intra_dc_precision; - info->picture_structure = pic_ext.picture_structure; - info->top_field_first = pic_ext.top_field_first; - info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct; - info->concealment_motion_vectors = pic_ext.concealment_motion_vectors; - info->q_scale_type = pic_ext.q_scale_type; - info->intra_vlc_format = pic_ext.intra_vlc_format; - info->alternate_scan = pic_ext.alternate_scan; - - fields = 2; - if (pic_ext.picture_structure == 3) { - if (mpeg_dec->interlaced) { - if (pic_ext.progressive_frame == 0) - fields = 2; - if (pic_ext.progressive_frame == 0 && pic_ext.repeat_first_field == 0) - fields = 2; - if (pic_ext.progressive_frame == 1 && pic_ext.repeat_first_field == 1) - fields = 3; - } else { - if (pic_ext.repeat_first_field == 0) - fields = 2; - if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 0) - fields = 4; - if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 1) - fields = 6; - } - } else - fields = 1; - - GST_DEBUG ("fields: %d", fields); - - mpeg_dec->duration = gst_util_uint64_scale (fields, - GST_SECOND * mpeg_dec->fps_d, 2 * mpeg_dec->fps_n); - - return TRUE; -} - -static gboolean -gst_vdp_mpeg_dec_parse_sequence (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer) -{ - MPEGSeqHdr hdr; - - if (!mpeg_util_parse_sequence_hdr (&hdr, buffer)) - return FALSE; - - memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix, - &hdr.intra_quantizer_matrix, 64); - memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix, - &hdr.non_intra_quantizer_matrix, 64); - - if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE) - mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_DATA; - - return TRUE; -} - -static gboolean -gst_vdp_mpeg_dec_parse_picture (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer) -{ - MPEGPictureHdr pic_hdr; - - if (!mpeg_util_parse_picture_hdr (&pic_hdr, buffer)) - return FALSE; - - if (pic_hdr.pic_type != I_FRAME - && mpeg_dec->vdp_info.backward_reference == VDP_INVALID_HANDLE) { - GST_DEBUG_OBJECT (mpeg_dec, - "Drop frame since we haven't got an I_FRAME yet"); - return FALSE; - } - if (pic_hdr.pic_type == B_FRAME - && mpeg_dec->vdp_info.forward_reference == VDP_INVALID_HANDLE) { - GST_DEBUG_OBJECT (mpeg_dec, - "Drop frame since we haven't got two non B_FRAMES yet"); - return FALSE; - } - - mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type; - - if (mpeg_dec->version == 1) { - mpeg_dec->vdp_info.full_pel_forward_vector = - pic_hdr.full_pel_forward_vector; - mpeg_dec->vdp_info.full_pel_backward_vector = - pic_hdr.full_pel_backward_vector; - memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4); - } - - mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr.tsn; - - return TRUE; -} - -static gboolean -gst_vdp_mpeg_dec_parse_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer) -{ - MPEGGop gop; - GstClockTime time; - - if (!mpeg_util_parse_gop (&gop, buffer)) - return FALSE; - - time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second); - - GST_DEBUG ("gop timestamp: %" GST_TIME_FORMAT, GST_TIME_ARGS (time)); - - mpeg_dec->gop_frame = - gst_util_uint64_scale (time, mpeg_dec->fps_n, - mpeg_dec->fps_d * GST_SECOND) + gop.frame; - - if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP) - mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_DATA; - - return TRUE; -} - -static gboolean -gst_vdp_mpeg_dec_parse_quant_matrix (GstVdpMpegDec * mpeg_dec, - GstBuffer * buffer) -{ - MPEGQuantMatrix qm; - - if (!mpeg_util_parse_quant_matrix (&qm, buffer)) - return FALSE; - - memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix, - &qm.intra_quantizer_matrix, 64); - memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix, - &qm.non_intra_quantizer_matrix, 64); - return TRUE; -} - -static void -gst_vdp_mpeg_dec_flush (GstVdpMpegDec * mpeg_dec) -{ - if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) - gst_buffer_unref (GST_BUFFER_CAST (mpeg_dec->f_buffer)); - if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE) - gst_buffer_unref (GST_BUFFER_CAST (mpeg_dec->b_buffer)); - - gst_adapter_clear (mpeg_dec->adapter); - - gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info); - mpeg_dec->next_timestamp = GST_CLOCK_TIME_NONE; -} - -static void -gst_vdp_mpeg_dec_start (GstVdpMpegDec * mpeg_dec) -{ - gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info); - - mpeg_dec->decoder = VDP_INVALID_HANDLE; - - mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_SEQUENCE; - - gst_segment_init (&mpeg_dec->segment, GST_FORMAT_TIME); - mpeg_dec->seeking = FALSE; - - mpeg_dec->accumulated_size = 0; - mpeg_dec->accumulated_duration = 0; -} - -static void -gst_vdp_mpeg_dec_stop (GstVdpMpegDec * mpeg_dec) -{ - GstVdpDevice *device; - - if ((device = - gst_vdp_video_src_pad_get_device (GST_VDP_VIDEO_SRC_PAD - (mpeg_dec->src)))) { - if (mpeg_dec->decoder != VDP_INVALID_HANDLE) - device->vdp_decoder_destroy (mpeg_dec->decoder); - } - - if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) - mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE; - if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE) - mpeg_dec->vdp_info.backward_reference = VDP_INVALID_HANDLE; - - gst_adapter_clear (mpeg_dec->adapter); -} - -static GstFlowReturn -gst_vdp_mpeg_dec_chain (GstPad * pad, GstBuffer * buffer) -{ - GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad)); - GstVdpMpegPacketizer packetizer; - GstBuffer *buf; - GstFlowReturn ret = GST_FLOW_OK; - - if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { - GST_DEBUG_OBJECT (mpeg_dec, "Received discont buffer"); - gst_vdp_mpeg_dec_flush (mpeg_dec); - } - - - gst_vdp_mpeg_packetizer_init (&packetizer, buffer); - while ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) { - GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf); - guint32 sync_code; - guint8 start_code; - - /* skip sync_code */ - gst_bit_reader_get_bits_uint32 (&b_reader, &sync_code, 8 * 3); - - /* start_code */ - gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8); - - if (start_code >= MPEG_PACKET_SLICE_MIN - && start_code <= MPEG_PACKET_SLICE_MAX) { - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE"); - - gst_buffer_ref (buf); - gst_adapter_push (mpeg_dec->adapter, buf); - mpeg_dec->vdp_info.slice_count++; - } - - switch (start_code) { - case MPEG_PACKET_PICTURE: - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE"); - - if (!gst_vdp_mpeg_dec_parse_picture (mpeg_dec, buf)) - goto done; - - break; - case MPEG_PACKET_SEQUENCE: - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE"); - gst_vdp_mpeg_dec_parse_sequence (mpeg_dec, buf); - break; - case MPEG_PACKET_EXTENSION: - { - guint8 ext_code; - - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION"); - - /* ext_code */ - gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4); - switch (ext_code) { - case MPEG_PACKET_EXT_PICTURE_CODING: - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING"); - gst_vdp_mpeg_dec_parse_picture_coding (mpeg_dec, buf); - break; - case MPEG_PACKET_EXT_QUANT_MATRIX: - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX"); - gst_vdp_mpeg_dec_parse_quant_matrix (mpeg_dec, buf); - break; - default: - break; - } - break; - } - case MPEG_PACKET_GOP: - GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP"); - gst_vdp_mpeg_dec_parse_gop (mpeg_dec, buf); - break; - default: - break; - } - - gst_buffer_unref (buf); - } - - if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE || - mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP) { - gst_adapter_clear (mpeg_dec->adapter); - goto done; - } - - if (mpeg_dec->vdp_info.slice_count > 0) - ret = gst_vdp_mpeg_dec_decode (mpeg_dec, GST_BUFFER_TIMESTAMP (buffer), - GST_BUFFER_SIZE (buffer)); - -done: - gst_object_unref (mpeg_dec); - - return ret; -} - -static gboolean -gst_vdp_mpeg_dec_convert (GstVdpMpegDec * mpeg_dec, - GstFormat src_format, gint64 src_value, - GstFormat dest_format, gint64 * dest_value) -{ - - if (src_format == dest_format) { - *dest_value = src_value; - return TRUE; - } - - if (mpeg_dec->byterate == -1) - return FALSE; - - if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME) { - *dest_value = gst_util_uint64_scale (GST_SECOND, src_value, - mpeg_dec->byterate); - return TRUE; - } - - if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES) { - *dest_value = - gst_util_uint64_scale_int (src_value, mpeg_dec->byterate, GST_SECOND); - return TRUE; - } - - return FALSE; -} - -static const GstQueryType * -gst_mpeg_dec_get_querytypes (GstPad * pad) -{ - static const GstQueryType list[] = { - GST_QUERY_POSITION, - GST_QUERY_DURATION, - 0 - }; - - return list; -} - -static gboolean -gst_vdp_mpeg_dec_src_query (GstPad * pad, GstQuery * query) -{ - GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad)); - gboolean res; - - switch (GST_QUERY_TYPE (query)) { - case GST_QUERY_POSITION: - { - GstFormat format; - - if ((res = gst_pad_query_default (pad, query))) - goto done; - - gst_query_parse_position (query, &format, NULL); - if (format == GST_FORMAT_TIME && - GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) { - gst_query_set_position (query, GST_FORMAT_TIME, - mpeg_dec->next_timestamp); - res = TRUE; - } - break; - } - - case GST_QUERY_DURATION: - { - GstFormat format; - - if ((res = gst_pad_query_default (pad, query))) - goto done; - - gst_query_parse_duration (query, &format, NULL); - if (format == GST_FORMAT_TIME) { - gint64 bytes; - - format = GST_FORMAT_BYTES; - if (gst_pad_query_duration (pad, &format, &bytes) - && format == GST_FORMAT_BYTES) { - gint64 duration; - - if (gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_BYTES, - bytes, GST_FORMAT_TIME, &duration)) { - GST_DEBUG ("duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (duration)); - gst_query_set_duration (query, GST_FORMAT_TIME, duration); - res = TRUE; - } - } - } - break; - } - - default: - res = gst_pad_query_default (pad, query); - } - -done: - gst_object_unref (mpeg_dec); - - return res; -} - -static gboolean -normal_seek (GstVdpMpegDec * mpeg_dec, GstEvent * event) -{ - gdouble rate; - GstFormat format; - GstSeekFlags flags; - GstSeekType cur_type, stop_type; - gint64 time_cur, bytes_cur; - gint64 time_stop, bytes_stop; - gboolean res; - gboolean update; - GstEvent *peer_event; - - GST_DEBUG ("normal seek"); - - gst_event_parse_seek (event, &rate, &format, &flags, - &cur_type, &time_cur, &stop_type, &time_stop); - - if (format != GST_FORMAT_TIME) - return FALSE; - - gst_segment_set_seek (&mpeg_dec->segment, rate, GST_FORMAT_TIME, flags, - cur_type, time_cur, stop_type, time_stop, &update); - - if (update) { - /* seek on bytes */ - if (!gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_TIME, time_cur, - GST_FORMAT_BYTES, &bytes_cur)) - goto convert_failed; - if (!gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_TIME, time_stop, - GST_FORMAT_BYTES, &bytes_stop)) - goto convert_failed; - - /* conversion succeeded, create the seek */ - peer_event = - gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, - cur_type, bytes_cur, stop_type, bytes_stop); - - g_mutex_lock (mpeg_dec->mutex); - - /* do the seek */ - res = gst_pad_push_event (mpeg_dec->sink, peer_event); - - if (res) { - mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_GOP; - mpeg_dec->seeking = TRUE; - } - - g_mutex_unlock (mpeg_dec->mutex); - - } else { - GstEvent *event; - - /* send segment with new rate */ - event = gst_event_new_new_segment (TRUE, - mpeg_dec->segment.rate, GST_FORMAT_TIME, mpeg_dec->segment.start, - mpeg_dec->segment.stop, mpeg_dec->segment.time); - - gst_pad_push_event (mpeg_dec->src, event); - res = TRUE; - } - - return res; - - /* ERRORS */ -convert_failed: - { - /* probably unsupported seek format */ - GST_DEBUG_OBJECT (mpeg_dec, - "failed to convert format %u into GST_FORMAT_TIME", format); - return FALSE; - } -} - -static gboolean -gst_vdp_mpeg_dec_src_event (GstPad * pad, GstEvent * event) -{ - GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad)); - gboolean res; - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_SEEK: - { - if ((res = gst_pad_event_default (pad, event))) - goto done; - - res = normal_seek (mpeg_dec, event); - - break; - } - default: - res = gst_pad_event_default (pad, event); - } - -done: - gst_object_unref (mpeg_dec); - - return res; -} - -static gboolean -gst_vdp_mpeg_dec_sink_event (GstPad * pad, GstEvent * event) -{ - GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad)); - gboolean res; - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_FLUSH_STOP: - { - GST_DEBUG_OBJECT (mpeg_dec, "flush stop"); - - gst_vdp_mpeg_dec_flush (mpeg_dec); - res = gst_pad_push_event (mpeg_dec->src, event); - - break; - } - case GST_EVENT_NEWSEGMENT: - { - gboolean update; - gdouble rate; - GstFormat format; - gint64 start; - gint64 stop; - gint64 position; - - gst_event_parse_new_segment (event, &update, &rate, &format, - &start, &stop, &position); - - if (format != GST_FORMAT_TIME) { - if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, start, - GST_FORMAT_TIME, &start)) - goto convert_error; - if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, stop, - GST_FORMAT_TIME, &stop)) - goto convert_error; - if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, position, - GST_FORMAT_TIME, &position)) - goto convert_error; - - gst_event_unref (event); - event = gst_event_new_new_segment (update, rate, GST_FORMAT_TIME, start, - stop, position); - } - - g_mutex_lock (mpeg_dec->mutex); - /* if we seek ourselves we don't push out a newsegment now since we - * use the calculated timestamp of the first frame for this */ - if (mpeg_dec->seeking) { - gst_event_unref (event); - res = TRUE; - g_mutex_unlock (mpeg_dec->mutex); - goto done; - } - g_mutex_unlock (mpeg_dec->mutex); - - GST_DEBUG_OBJECT (mpeg_dec, - "Pushing new segment update %d format %d start %" - GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " position %" - GST_TIME_FORMAT, update, format, GST_TIME_ARGS (start), - GST_TIME_ARGS (stop), GST_TIME_ARGS (position)); - convert_error: - res = gst_pad_push_event (mpeg_dec->src, event); - - break; - } - default: - res = gst_pad_event_default (pad, event); - } - -done: - gst_object_unref (mpeg_dec); - - return res; -} - -static GstStateChangeReturn -gst_vdp_mpeg_dec_change_state (GstElement * element, GstStateChange transition) -{ - GstVdpMpegDec *mpeg_dec; - GstStateChangeReturn ret; - - mpeg_dec = GST_VDP_MPEG_DEC (element); - - switch (transition) { - case GST_STATE_CHANGE_READY_TO_PAUSED: - gst_vdp_mpeg_dec_start (mpeg_dec); - break; - default: - break; - } - - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); - - switch (transition) { - case GST_STATE_CHANGE_PAUSED_TO_READY: - gst_vdp_mpeg_dec_stop (mpeg_dec); - break; - default: - break; - } - - return ret; -} - -/* GObject vmethod implementations */ -static void -gst_vdp_mpeg_dec_finalize (GObject * object) -{ - GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object; - - g_object_unref (mpeg_dec->adapter); - g_mutex_free (mpeg_dec->mutex); -} - -static void -gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec) -{ - GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object; - - switch (prop_id) { - case PROP_DISPLAY: - g_object_get_property (G_OBJECT (mpeg_dec->src), "display", value); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void -gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec) -{ - GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object; - - switch (prop_id) { - case PROP_DISPLAY: - g_object_set_property (G_OBJECT (mpeg_dec->src), "display", value); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void -gst_vdp_mpeg_dec_base_init (gpointer gclass) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (gclass); - - GstCaps *src_caps; - GstPadTemplate *src_template; - - gst_element_class_set_details_simple (element_class, - "VDPAU Mpeg Decoder", - "Decoder", - "decode mpeg stream with vdpau", - "Carl-Anton Ingmarsson "); - - - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&sink_template)); - - src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420); - src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, - src_caps); - - gst_element_class_add_pad_template (element_class, src_template); -} - -/* initialize the vdpaumpegdecoder's class */ -static void -gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass) -{ - GObjectClass *gobject_class; - GstElementClass *gstelement_class; - - gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; - - gobject_class->get_property = gst_vdp_mpeg_dec_get_property; - gobject_class->set_property = gst_vdp_mpeg_dec_set_property; - gobject_class->finalize = gst_vdp_mpeg_dec_finalize; - - gstelement_class->change_state = gst_vdp_mpeg_dec_change_state; - - g_object_class_install_property (gobject_class, PROP_DISPLAY, - g_param_spec_string ("display", "Display", "X Display name", - NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT)); -} - -static void -gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info) -{ - vdp_info->forward_reference = VDP_INVALID_HANDLE; - vdp_info->backward_reference = VDP_INVALID_HANDLE; - vdp_info->slice_count = 0; - vdp_info->picture_structure = 3; - vdp_info->picture_coding_type = 0; - vdp_info->intra_dc_precision = 0; - vdp_info->frame_pred_frame_dct = 1; - vdp_info->concealment_motion_vectors = 0; - vdp_info->intra_vlc_format = 0; - vdp_info->alternate_scan = 0; - vdp_info->q_scale_type = 0; - vdp_info->top_field_first = 1; -} - -static void -gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass) -{ - GstPadTemplate *src_template; - - /* SRC PAD */ - src_template = - gst_element_class_get_pad_template (GST_ELEMENT_CLASS (gclass), "src"); - mpeg_dec->src = GST_PAD (gst_vdp_video_src_pad_new (src_template, "src")); - - gst_pad_set_event_function (mpeg_dec->src, - GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_event)); - gst_pad_set_query_function (mpeg_dec->src, - GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_query)); - gst_pad_set_query_type_function (mpeg_dec->src, - GST_DEBUG_FUNCPTR (gst_mpeg_dec_get_querytypes)); - gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->src); - - mpeg_dec->sink = gst_pad_new_from_static_template (&sink_template, "sink"); - gst_pad_set_setcaps_function (mpeg_dec->sink, - GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_set_caps)); - gst_pad_set_chain_function (mpeg_dec->sink, - GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_chain)); - gst_pad_set_event_function (mpeg_dec->sink, - GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_sink_event)); - gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->sink); - - mpeg_dec->adapter = gst_adapter_new (); - mpeg_dec->mutex = g_mutex_new (); -} diff --git a/sys/vdpau/gstvdpsink.c b/sys/vdpau/gstvdpsink.c index bf799457df..815c74dd05 100644 --- a/sys/vdpau/gstvdpsink.c +++ b/sys/vdpau/gstvdpsink.c @@ -29,7 +29,7 @@ /* Debugging category */ #include -#include "gstvdpoutputbuffer.h" +#include "gstvdp/gstvdpoutputbuffer.h" /* Object header */ #include "gstvdpsink.h" diff --git a/sys/vdpau/gstvdpsink.h b/sys/vdpau/gstvdpsink.h index b4ced53266..6d1ff07df7 100644 --- a/sys/vdpau/gstvdpsink.h +++ b/sys/vdpau/gstvdpsink.h @@ -29,7 +29,7 @@ #include #include -#include "gstvdpdevice.h" +#include "gstvdp/gstvdpdevice.h" G_BEGIN_DECLS diff --git a/sys/vdpau/gstvdpvideopostprocess.c b/sys/vdpau/gstvdpvideopostprocess.c index 822a88cce5..1c4c567fd1 100644 --- a/sys/vdpau/gstvdpvideopostprocess.c +++ b/sys/vdpau/gstvdpvideopostprocess.c @@ -45,9 +45,9 @@ #include #include -#include "gstvdputils.h" -#include "gstvdpoutputbuffer.h" -#include "gstvdpoutputsrcpad.h" +#include "gstvdp/gstvdputils.h" +#include "gstvdp/gstvdpoutputbuffer.h" +#include "gstvdp/gstvdpoutputsrcpad.h" #include "gstvdpvideopostprocess.h" diff --git a/sys/vdpau/gstvdpvideopostprocess.h b/sys/vdpau/gstvdpvideopostprocess.h index 537b3d40d3..6780158004 100644 --- a/sys/vdpau/gstvdpvideopostprocess.h +++ b/sys/vdpau/gstvdpvideopostprocess.h @@ -23,8 +23,8 @@ #include -#include "gstvdpdevice.h" -#include "gstvdpvideobuffer.h" +#include "gstvdp/gstvdpdevice.h" +#include "gstvdp/gstvdpvideobuffer.h" G_BEGIN_DECLS diff --git a/sys/vdpau/mpeg/gstvdpmpegdec.c b/sys/vdpau/mpeg/gstvdpmpegdec.c new file mode 100644 index 0000000000..b2d77706fa --- /dev/null +++ b/sys/vdpau/mpeg/gstvdpmpegdec.c @@ -0,0 +1,872 @@ +/* + * GStreamer + * Copyright (C) 2009 Carl-Anton Ingmarsson + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:element-vdpaumpegdec + * + * FIXME:Describe vdpaumpegdec here. + * + * + * Example launch line + * |[ + * gst-launch -v -m fakesrc ! vdpaumpegdec ! fakesink silent=TRUE + * ]| + * + */ + +#ifdef HAVE_CONFIG_H +# include +#endif + +#include +#include +#include +#include + +#include "mpegutil.h" +#include "../gstvdp/gstvdpvideosrcpad.h" +#include "../gstvdp/gstvdpvideobuffer.h" + +#include "gstvdpmpegdec.h" + +GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug); +#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug + +/* Filter signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + PROP_0, + PROP_DISPLAY +}; + +/* the capabilities of the inputs and outputs. + * + * describe the real formats here. + */ +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) [ 1, 2 ], " + "systemstream = (boolean) false") + ); + +#define DEBUG_INIT(bla) \ + GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, \ + "VDPAU mpeg decoder"); + +GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec, + GstBaseVideoDecoder, GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT); + +static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info); + +#define SYNC_CODE_SIZE 3 + +static VdpDecoderProfile +gst_vdp_mpeg_dec_get_profile (MPEGSeqExtHdr * hdr) +{ + VdpDecoderProfile profile; + + switch (hdr->profile) { + case 5: + profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE; + break; + default: + profile = VDP_DECODER_PROFILE_MPEG2_MAIN; + break; + } + + return profile; +} + +static GstFlowReturn +gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, + GstVdpVideoBuffer ** outbuf) +{ + GstVdpVideoSrcPad *vdp_pad; + GstFlowReturn ret = GST_FLOW_OK; + + vdp_pad = (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec); + ret = gst_vdp_video_src_pad_alloc_buffer (vdp_pad, outbuf); + if (ret != GST_FLOW_OK) + return ret; + + return GST_FLOW_OK; +} + +static GstFlowReturn +gst_vdp_mpeg_dec_shape_output (GstBaseVideoDecoder * base_video_decoder, + GstBuffer * buf) +{ + GstVdpVideoSrcPad *vdp_pad; + + vdp_pad = + (GstVdpVideoSrcPad *) GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder); + + return gst_vdp_video_src_pad_push (vdp_pad, GST_VDP_VIDEO_BUFFER (buf)); +} + +static gboolean +gst_vdp_mpeg_dec_handle_picture_coding (GstVdpMpegDec * mpeg_dec, + GstBuffer * buffer, GstVideoFrame * frame) +{ + MPEGPictureExt pic_ext; + VdpPictureInfoMPEG1Or2 *info; + gint fields; + + info = &mpeg_dec->vdp_info; + + if (!mpeg_util_parse_picture_coding_extension (&pic_ext, buffer)) + return FALSE; + + memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4); + + info->intra_dc_precision = pic_ext.intra_dc_precision; + info->picture_structure = pic_ext.picture_structure; + info->top_field_first = pic_ext.top_field_first; + info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct; + info->concealment_motion_vectors = pic_ext.concealment_motion_vectors; + info->q_scale_type = pic_ext.q_scale_type; + info->intra_vlc_format = pic_ext.intra_vlc_format; + info->alternate_scan = pic_ext.alternate_scan; + + fields = 2; + if (pic_ext.picture_structure == 3) { + if (mpeg_dec->stream_info.interlaced) { + if (pic_ext.progressive_frame == 0) + fields = 2; + if (pic_ext.progressive_frame == 0 && pic_ext.repeat_first_field == 0) + fields = 2; + if (pic_ext.progressive_frame == 1 && pic_ext.repeat_first_field == 1) + fields = 3; + } else { + if (pic_ext.repeat_first_field == 0) + fields = 2; + if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 0) + fields = 4; + if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 1) + fields = 6; + } + } else + fields = 1; + + frame->n_fields = fields; + + if (pic_ext.top_field_first) + GST_VIDEO_FRAME_FLAG_SET (frame, GST_VIDEO_FRAME_FLAG_TFF); + + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_handle_picture (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer) +{ + MPEGPictureHdr pic_hdr; + + if (!mpeg_util_parse_picture_hdr (&pic_hdr, buffer)) + return FALSE; + + mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type; + + if (mpeg_dec->stream_info.version == 1) { + mpeg_dec->vdp_info.full_pel_forward_vector = + pic_hdr.full_pel_forward_vector; + mpeg_dec->vdp_info.full_pel_backward_vector = + pic_hdr.full_pel_backward_vector; + memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4); + } + + mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr.tsn; + + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_handle_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer) +{ + MPEGGop gop; + GstClockTime time; + + if (!mpeg_util_parse_gop (&gop, buffer)) + return FALSE; + + time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second); + + GST_DEBUG ("gop timestamp: %" GST_TIME_FORMAT, GST_TIME_ARGS (time)); + + mpeg_dec->gop_frame = + gst_util_uint64_scale (time, mpeg_dec->stream_info.fps_n, + mpeg_dec->stream_info.fps_d * GST_SECOND) + gop.frame; + + if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_GOP) + mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA; + + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_handle_quant_matrix (GstVdpMpegDec * mpeg_dec, + GstBuffer * buffer) +{ + MPEGQuantMatrix qm; + + if (!mpeg_util_parse_quant_matrix (&qm, buffer)) + return FALSE; + + memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix, + &qm.intra_quantizer_matrix, 64); + memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix, + &qm.non_intra_quantizer_matrix, 64); + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_create_decoder (GstVdpMpegDec * mpeg_dec) +{ + GstVdpDevice *device; + + device = gst_vdp_video_src_pad_get_device + (GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec))); + + if (device) { + VdpStatus status; + GstVdpMpegStreamInfo *stream_info; + + stream_info = &mpeg_dec->stream_info; + + if (mpeg_dec->decoder != VDP_INVALID_HANDLE) + device->vdp_decoder_destroy (mpeg_dec->decoder); + + status = device->vdp_decoder_create (device->device, stream_info->profile, + stream_info->width, stream_info->height, 2, &mpeg_dec->decoder); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ, + ("Could not create vdpau decoder"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + + return FALSE; + } + } + + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_handle_sequence (GstVdpMpegDec * mpeg_dec, + GstBuffer * seq, GstBuffer * seq_ext) +{ + GstBaseVideoDecoder *base_video_decoder = GST_BASE_VIDEO_DECODER (mpeg_dec); + + MPEGSeqHdr hdr; + GstVdpMpegStreamInfo stream_info; + + if (!mpeg_util_parse_sequence_hdr (&hdr, seq)) + return FALSE; + + memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix, + &hdr.intra_quantizer_matrix, 64); + memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix, + &hdr.non_intra_quantizer_matrix, 64); + + stream_info.width = hdr.width; + stream_info.height = hdr.height; + + stream_info.fps_n = hdr.fps_n; + stream_info.fps_d = hdr.fps_d; + + stream_info.par_n = hdr.par_w; + stream_info.par_d = hdr.par_h; + + stream_info.interlaced = FALSE; + stream_info.version = 1; + stream_info.profile = VDP_DECODER_PROFILE_MPEG1; + + if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) + mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA; + + if (seq_ext) { + MPEGSeqExtHdr ext; + + if (!mpeg_util_parse_sequence_extension (&ext, seq_ext)) + return FALSE; + + stream_info.fps_n *= (ext.fps_n_ext + 1); + stream_info.fps_d *= (ext.fps_d_ext + 1); + + stream_info.width += (ext.horiz_size_ext << 12); + stream_info.height += (ext.vert_size_ext << 12); + + stream_info.interlaced = !ext.progressive; + stream_info.version = 2; + stream_info.profile = gst_vdp_mpeg_dec_get_profile (&ext); + } + + if (memcmp (&mpeg_dec->stream_info, &stream_info, + sizeof (GstVdpMpegStreamInfo)) != 0) { + GstVideoState *state; + + state = gst_base_video_decoder_get_state (base_video_decoder); + + state->width = stream_info.width; + state->height = stream_info.height; + + state->fps_n = stream_info.fps_n; + state->fps_d = stream_info.fps_d; + + state->par_n = stream_info.par_n; + state->par_d = stream_info.par_d; + + state->interlaced = stream_info.interlaced; + + gst_base_video_decoder_set_state (base_video_decoder, state); + gst_base_video_decoder_update_src_caps (base_video_decoder); + + memcpy (&mpeg_dec->stream_info, &stream_info, + sizeof (GstVdpMpegStreamInfo)); + } + + return TRUE; +} + +static GstFlowReturn +gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame * frame, GstClockTimeDiff deadline) +{ + GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); + + VdpPictureInfoMPEG1Or2 *info; + GstVdpMpegFrame *mpeg_frame; + + GstFlowReturn ret; + GstVdpVideoBuffer *outbuf; + VdpVideoSurface surface; + GstVdpDevice *device; + VdpBitstreamBuffer vbit[1]; + VdpStatus status; + + /* MPEG_PACKET_SEQUENCE */ + mpeg_frame = GST_VDP_MPEG_FRAME (frame); + if (mpeg_frame->seq) { + gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_frame->seq, + mpeg_frame->seq_ext); + } + + /* MPEG_PACKET_PICTURE */ + if (mpeg_frame->pic) + gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_frame->pic); + + /* MPEG_PACKET_EXT_PICTURE_CODING */ + if (mpeg_frame->pic_ext) + gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_frame->pic_ext, + frame); + + /* MPEG_PACKET_GOP */ + if (mpeg_frame->gop) + gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame->gop); + + /* MPEG_PACKET_EXT_QUANT_MATRIX */ + if (mpeg_frame->qm_ext) + gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_frame->qm_ext); + + + info = &mpeg_dec->vdp_info; + + info->slice_count = mpeg_frame->n_slices; + + /* check if we can decode the frame */ + if (info->picture_coding_type != I_FRAME + && info->backward_reference == VDP_INVALID_HANDLE) { + GST_DEBUG_OBJECT (mpeg_dec, + "Drop frame since we haven't got an I_FRAME yet"); + + gst_base_video_decoder_skip_frame (base_video_decoder, frame); + return GST_FLOW_OK; + } + if (info->picture_coding_type == B_FRAME + && info->forward_reference == VDP_INVALID_HANDLE) { + GST_DEBUG_OBJECT (mpeg_dec, + "Drop frame since we haven't got two non B_FRAMES yet"); + + gst_base_video_decoder_skip_frame (base_video_decoder, frame); + return GST_FLOW_OK; + } + + + if (info->picture_coding_type != B_FRAME) { + if (info->backward_reference != VDP_INVALID_HANDLE) { + gst_base_video_decoder_finish_frame (base_video_decoder, + mpeg_dec->b_frame); + } + + if (info->forward_reference != VDP_INVALID_HANDLE) { + gst_video_frame_unref (mpeg_dec->f_frame); + info->forward_reference = VDP_INVALID_HANDLE; + } + + info->forward_reference = info->backward_reference; + mpeg_dec->f_frame = mpeg_dec->b_frame; + + info->backward_reference = VDP_INVALID_HANDLE; + } + + if ((ret = gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK)) + goto alloc_error; + + /* create decoder */ + if (mpeg_dec->decoder == VDP_INVALID_HANDLE) + gst_vdp_mpeg_dec_create_decoder (mpeg_dec); + + device = GST_VDP_VIDEO_BUFFER (outbuf)->device; + + if (info->picture_coding_type == I_FRAME) + GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); + else + GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); + + if (info->top_field_first) + GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF); + else + GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF); + + surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface; + + vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; + vbit[0].bitstream = GST_BUFFER_DATA (mpeg_frame->slices); + vbit[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg_frame->slices); + + status = device->vdp_decoder_render (mpeg_dec->decoder, surface, + (VdpPictureInfo *) info, 1, vbit); + + if (status != VDP_STATUS_OK) + goto decode_error; + + frame->src_buffer = GST_BUFFER_CAST (outbuf); + + if (info->picture_coding_type == B_FRAME) { + gst_base_video_decoder_finish_frame (base_video_decoder, frame); + } else { + info->backward_reference = surface; + mpeg_dec->b_frame = gst_video_frame_ref (frame); + } + + return GST_FLOW_OK; + +alloc_error: + GST_ERROR_OBJECT (mpeg_dec, "Could not allocate output buffer"); + gst_base_video_decoder_skip_frame (base_video_decoder, frame); + return ret; + +decode_error: + GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ, + ("Could not decode"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + + gst_buffer_unref (GST_BUFFER_CAST (outbuf)); + gst_base_video_decoder_skip_frame (base_video_decoder, frame); + + return GST_FLOW_ERROR; +} + +static GstVideoFrame * +gst_vdp_mpeg_dec_create_frame (GstBaseVideoDecoder * base_video_decoder) +{ + return GST_VIDEO_FRAME (gst_vdp_mpeg_frame_new ()); +} + +static GstFlowReturn +gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder, + GstBuffer * buf, gboolean at_eos) +{ + GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); + + GstVdpMpegFrame *mpeg_frame; + GstFlowReturn ret = GST_FLOW_OK; + GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf); + guint32 sync_code; + guint8 start_code; + + /* skip sync_code */ + gst_bit_reader_get_bits_uint32 (&b_reader, &sync_code, 8 * 3); + + /* start_code */ + gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8); + + + if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) { + if (start_code != MPEG_PACKET_SEQUENCE) { + GST_DEBUG_OBJECT (mpeg_dec, "Drop data since we haven't found a " + "MPEG_PACKET_SEQUENCE yet"); + + gst_buffer_unref (buf); + return GST_FLOW_OK; + } + } + + mpeg_frame = (GstVdpMpegFrame *) + gst_base_video_decoder_get_current_frame (base_video_decoder); + + if (start_code >= MPEG_PACKET_SLICE_MIN + && start_code <= MPEG_PACKET_SLICE_MAX) { + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE"); + + gst_vdp_mpeg_frame_add_slice (mpeg_frame, buf); + goto done; + } + + switch (start_code) { + case MPEG_PACKET_SEQUENCE: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE"); + + if (mpeg_dec->prev_packet != -1) { + ret = gst_base_video_decoder_have_frame (base_video_decoder, + (GstVideoFrame **) & mpeg_frame); + } + + mpeg_frame->seq = buf; + mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_DATA; + break; + + case MPEG_PACKET_PICTURE: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE"); + + if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE && + mpeg_dec->prev_packet != MPEG_PACKET_GOP) { + ret = gst_base_video_decoder_have_frame (base_video_decoder, + (GstVideoFrame **) & mpeg_frame); + } + + mpeg_frame->pic = buf; + break; + + case MPEG_PACKET_GOP: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP"); + + if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE) { + ret = gst_base_video_decoder_have_frame (base_video_decoder, + (GstVideoFrame **) & mpeg_frame); + } + + mpeg_frame->gop = buf; + break; + + case MPEG_PACKET_EXTENSION: + { + guint8 ext_code; + + /* ext_code */ + gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4); + + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION: %d", ext_code); + + switch (ext_code) { + case MPEG_PACKET_EXT_SEQUENCE: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE"); + + + mpeg_frame->seq_ext = buf; + + /* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE + * or MPEG_PACKET_GOP after this */ + start_code = MPEG_PACKET_SEQUENCE; + break; + + case MPEG_PACKET_EXT_SEQUENCE_DISPLAY: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_SEQUENCE_DISPLAY"); + + /* so that we don't finish the frame if we get a MPEG_PACKET_PICTURE + * or MPEG_PACKET_GOP after this */ + start_code = MPEG_PACKET_SEQUENCE; + break; + + case MPEG_PACKET_EXT_PICTURE_CODING: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING"); + + mpeg_frame->pic_ext = buf; + break; + + case MPEG_PACKET_EXT_QUANT_MATRIX: + GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX"); + + mpeg_frame->qm_ext = buf; + break; + + default: + gst_buffer_unref (buf); + } + break; + } + + default: + gst_buffer_unref (buf); + } + +done: + mpeg_dec->prev_packet = start_code; + + return ret; +} + +static GstPad * +gst_vdp_mpeg_dec_create_srcpad (GstBaseVideoDecoder * base_video_decoder, + GstBaseVideoDecoderClass * base_video_decoder_class) +{ + GstPadTemplate *pad_template; + GstVdpVideoSrcPad *vdp_pad; + + pad_template = gst_element_class_get_pad_template + (GST_ELEMENT_CLASS (base_video_decoder_class), + GST_BASE_VIDEO_DECODER_SRC_NAME); + + vdp_pad = gst_vdp_video_src_pad_new (pad_template, + GST_BASE_VIDEO_DECODER_SRC_NAME); + + return GST_PAD (vdp_pad); +} + +static gint +gst_vdp_mpeg_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder, + GstAdapter * adapter) +{ + gint m; + + m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, 0, + gst_adapter_available (adapter)); + if (m == -1) + return gst_adapter_available (adapter) - SYNC_CODE_SIZE; + + return m; +} + +static GstBaseVideoDecoderScanResult +gst_vdp_mpeg_dec_scan_for_packet_end (GstBaseVideoDecoder * base_video_decoder, + GstAdapter * adapter, guint * size, gboolean at_eos) +{ + guint8 *data; + guint32 sync_code; + + data = g_slice_alloc (SYNC_CODE_SIZE); + gst_adapter_copy (adapter, data, 0, SYNC_CODE_SIZE); + sync_code = ((data[0] << 16) | (data[1] << 8) | data[2]); + + if (sync_code != 0x000001) + return GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC; + + *size = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100, + SYNC_CODE_SIZE, gst_adapter_available (adapter) - SYNC_CODE_SIZE); + + if (*size == -1) + return GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA; + + return GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK; +} + +static gboolean +gst_vdp_mpeg_dec_flush (GstBaseVideoDecoder * base_video_decoder) +{ + GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); + + if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) + gst_video_frame_unref (mpeg_dec->f_frame); + if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE) + gst_video_frame_unref (mpeg_dec->b_frame); + + gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info); + + mpeg_dec->prev_packet = -1; + + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_start (GstBaseVideoDecoder * base_video_decoder) +{ + GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); + + gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info); + + mpeg_dec->decoder = VDP_INVALID_HANDLE; + mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE; + + memset (&mpeg_dec->stream_info, 0, sizeof (GstVdpMpegStreamInfo)); + + return TRUE; +} + +static gboolean +gst_vdp_mpeg_dec_stop (GstBaseVideoDecoder * base_video_decoder) +{ + GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder); + + GstVdpVideoSrcPad *vdp_pad; + GstVdpDevice *device; + + vdp_pad = + GST_VDP_VIDEO_SRC_PAD (GST_BASE_VIDEO_DECODER_SRC_PAD + (base_video_decoder)); + + if ((device = gst_vdp_video_src_pad_get_device (vdp_pad))) { + + if (mpeg_dec->decoder != VDP_INVALID_HANDLE) + device->vdp_decoder_destroy (mpeg_dec->decoder); + } + + if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) + mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE; + if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE) + mpeg_dec->vdp_info.backward_reference = VDP_INVALID_HANDLE; + + mpeg_dec->state = GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE; + + return TRUE; +} + + +/* GObject vmethod implementations */ +static void +gst_vdp_mpeg_dec_finalize (GObject * object) +{ + +} + +static void +gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object; + + switch (prop_id) { + case PROP_DISPLAY: + g_object_get_property + (G_OBJECT (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display", + value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object; + + switch (prop_id) { + case PROP_DISPLAY: + g_object_set_property + (G_OBJECT (GST_BASE_VIDEO_DECODER_SRC_PAD (mpeg_dec)), "display", + value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_vdp_mpeg_dec_base_init (gpointer gclass) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (gclass); + + GstCaps *src_caps; + GstPadTemplate *src_template; + + gst_element_class_set_details_simple (element_class, + "VDPAU Mpeg Decoder", + "Decoder", + "Decode mpeg stream with vdpau", + "Carl-Anton Ingmarsson "); + + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_template)); + + src_caps = gst_vdp_video_buffer_get_caps (TRUE, VDP_CHROMA_TYPE_420); + src_template = gst_pad_template_new (GST_BASE_VIDEO_DECODER_SRC_NAME, + GST_PAD_SRC, GST_PAD_ALWAYS, src_caps); + + gst_element_class_add_pad_template (element_class, src_template); +} + +/* initialize the vdpaumpegdecoder's class */ +static void +gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + GstBaseVideoDecoderClass *base_video_decoder_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + base_video_decoder_class = (GstBaseVideoDecoderClass *) klass; + + gobject_class->get_property = gst_vdp_mpeg_dec_get_property; + gobject_class->set_property = gst_vdp_mpeg_dec_set_property; + gobject_class->finalize = gst_vdp_mpeg_dec_finalize; + + base_video_decoder_class->start = gst_vdp_mpeg_dec_start; + base_video_decoder_class->stop = gst_vdp_mpeg_dec_stop; + base_video_decoder_class->flush = gst_vdp_mpeg_dec_flush; + base_video_decoder_class->create_srcpad = gst_vdp_mpeg_dec_create_srcpad; + + base_video_decoder_class->scan_for_sync = gst_vdp_mpeg_dec_scan_for_sync; + base_video_decoder_class->scan_for_packet_end = + gst_vdp_mpeg_dec_scan_for_packet_end; + base_video_decoder_class->parse_data = gst_vdp_mpeg_dec_parse_data; + + base_video_decoder_class->handle_frame = gst_vdp_mpeg_dec_handle_frame; + base_video_decoder_class->create_frame = gst_vdp_mpeg_dec_create_frame; + + base_video_decoder_class->shape_output = gst_vdp_mpeg_dec_shape_output; + + g_object_class_install_property (gobject_class, + PROP_DISPLAY, g_param_spec_string ("display", "Display", "X Display name", + NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT)); +} + +static void +gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info) +{ + vdp_info->forward_reference = VDP_INVALID_HANDLE; + vdp_info->backward_reference = VDP_INVALID_HANDLE; + vdp_info->slice_count = 0; + vdp_info->picture_structure = 3; + vdp_info->picture_coding_type = 0; + vdp_info->intra_dc_precision = 0; + vdp_info->frame_pred_frame_dct = 1; + vdp_info->concealment_motion_vectors = 0; + vdp_info->intra_vlc_format = 0; + vdp_info->alternate_scan = 0; + vdp_info->q_scale_type = 0; + vdp_info->top_field_first = 1; +} + +static void +gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass) +{ +} diff --git a/sys/vdpau/gstvdpmpegdec.h b/sys/vdpau/mpeg/gstvdpmpegdec.h similarity index 73% rename from sys/vdpau/gstvdpmpegdec.h rename to sys/vdpau/mpeg/gstvdpmpegdec.h index 1e57f26ba5..5bd20f7519 100644 --- a/sys/vdpau/gstvdpmpegdec.h +++ b/sys/vdpau/mpeg/gstvdpmpegdec.h @@ -24,7 +24,8 @@ #include #include -#include "gstvdpvideobuffer.h" +#include "../basevideodecoder/gstbasevideodecoder.h" +#include "gstvdpmpegframe.h" G_BEGIN_DECLS @@ -35,9 +36,9 @@ G_BEGIN_DECLS #define GST_IS_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_MPEG_DEC)) typedef enum { - GST_VDP_MPEG_DEC_NEED_SEQUENCE, - GST_VDP_MPEG_DEC_NEED_GOP, - GST_VDP_MPEG_DEC_NEED_DATA + GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE, + GST_VDP_MPEG_DEC_STATE_NEED_GOP, + GST_VDP_MPEG_DEC_STATE_NEED_DATA } GstVdpMpegDecState; typedef struct _GstVdpMpegDec GstVdpMpegDec; @@ -45,55 +46,31 @@ typedef struct _GstVdpMpegDecClass GstVdpMpegDecClass; struct _GstVdpMpegDec { - GstElement element; + GstBaseVideoDecoder base_video_decoder; - /* pads */ - GstPad *src; - GstPad *sink; - - VdpDecoderProfile profile; VdpDecoder decoder; - - /* stream info */ - gint width, height; - gint fps_n, fps_d; - gboolean interlaced; - gint version; + + GstVdpMpegStreamInfo stream_info; /* decoder state */ GstVdpMpegDecState state; + gint prev_packet; /* currently decoded frame info */ - GstAdapter *adapter; VdpPictureInfoMPEG1Or2 vdp_info; guint64 frame_nr; - GstClockTime duration; /* frame_nr from GOP */ guint64 gop_frame; /* forward and backward reference */ - GstVdpVideoBuffer *f_buffer; - GstVdpVideoBuffer *b_buffer; - - /* calculated timestamp, size and duration */ - GstClockTime next_timestamp; - guint64 accumulated_size; - guint64 accumulated_duration; - - /* seek data */ - GstSegment segment; - gboolean seeking; - gint64 byterate; - - /* mutex */ - GMutex *mutex; + GstVideoFrame *f_frame, *b_frame; }; struct _GstVdpMpegDecClass { - GstElementClass element_class; + GstBaseVideoDecoderClass base_video_decoder_class; }; GType gst_vdp_mpeg_dec_get_type (void); diff --git a/sys/vdpau/mpeg/gstvdpmpegframe.c b/sys/vdpau/mpeg/gstvdpmpegframe.c new file mode 100644 index 0000000000..7f2cfad9bb --- /dev/null +++ b/sys/vdpau/mpeg/gstvdpmpegframe.c @@ -0,0 +1,133 @@ +/* +* GStreamer +* Copyright (C) 2009 Carl-Anton Ingmarsson +* +* This library is free software; you can redistribute it and/or +* modify it under the terms of the GNU Library General Public +* License as published by the Free Software Foundation; either +* version 2 of the License, or (at your option) any later version. +* +* This library is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +* Library General Public License for more details. +* +* You should have received a copy of the GNU Library General Public +* License along with this library; if not, write to the +* Free Software Foundation, Inc., 59 Temple Place - Suite 330, +* Boston, MA 02111-1307, USA. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstvdpmpegframe.h" + +GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_frame_debug); +#define GST_CAT_DEFAULT gst_vdp_mpeg_frame_debug + +#define DEBUG_INIT(bla) \ +GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_frame_debug, "gstvdpmpegframe", 0, "Video Frame"); + +void +gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame * mpeg_frame, GstBuffer * buf) +{ + if (!mpeg_frame->slices) + mpeg_frame->slices = buf; + else + mpeg_frame->slices = gst_buffer_join (mpeg_frame->slices, buf); + mpeg_frame->n_slices++; +} + +GstVdpMpegFrame * +gst_vdp_mpeg_frame_new (void) +{ + GstVdpMpegFrame *frame; + + frame = (GstVdpMpegFrame *) gst_mini_object_new (GST_TYPE_VDP_MPEG_FRAME); + + return frame; +} + +static GObjectClass *gst_vdp_mpeg_frame_parent_class; + +static void +gst_vdp_mpeg_frame_finalize (GstVdpMpegFrame * mpeg_frame) +{ + if (mpeg_frame->seq) + gst_buffer_unref (mpeg_frame->seq); + if (mpeg_frame->seq_ext) + gst_buffer_unref (mpeg_frame->seq_ext); + + if (mpeg_frame->pic) + gst_buffer_unref (mpeg_frame->pic); + if (mpeg_frame->pic_ext) + gst_buffer_unref (mpeg_frame->pic_ext); + + if (mpeg_frame->gop) + gst_buffer_unref (mpeg_frame->gop); + if (mpeg_frame->qm_ext) + gst_buffer_unref (mpeg_frame->qm_ext); + + if (mpeg_frame->slices) + gst_buffer_unref (mpeg_frame->slices); + + + GST_MINI_OBJECT_CLASS (gst_vdp_mpeg_frame_parent_class)->finalize + (GST_MINI_OBJECT (mpeg_frame)); +} + +static void +gst_vdp_mpeg_frame_init (GstVdpMpegFrame * mpeg_frame, gpointer g_class) +{ + mpeg_frame->seq = NULL; + mpeg_frame->seq_ext = NULL; + + mpeg_frame->pic = NULL; + mpeg_frame->pic_ext = NULL; + + mpeg_frame->gop = NULL; + mpeg_frame->qm_ext = NULL; + + mpeg_frame->n_slices = 0; + mpeg_frame->slices = NULL; +} + +static void +gst_vdp_mpeg_frame_class_init (gpointer g_class, gpointer class_data) +{ + GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class); + + gst_vdp_mpeg_frame_parent_class = g_type_class_peek_parent (g_class); + + mini_object_class->finalize = (GstMiniObjectFinalizeFunction) + gst_vdp_mpeg_frame_finalize; +} + + +GType +gst_vdp_mpeg_frame_get_type (void) +{ + static GType _gst_vdp_mpeg_frame_type = 0; + + if (G_UNLIKELY (_gst_vdp_mpeg_frame_type == 0)) { + static const GTypeInfo info = { + sizeof (GstVdpMpegFrameClass), + NULL, + NULL, + gst_vdp_mpeg_frame_class_init, + NULL, + NULL, + sizeof (GstVdpMpegFrame), + 0, + (GInstanceInitFunc) gst_vdp_mpeg_frame_init, + NULL + }; + _gst_vdp_mpeg_frame_type = g_type_register_static (GST_TYPE_VIDEO_FRAME, + "GstVdpMpegFrame", &info, 0); + + DEBUG_INIT (); + } + return _gst_vdp_mpeg_frame_type; +} diff --git a/sys/vdpau/mpeg/gstvdpmpegframe.h b/sys/vdpau/mpeg/gstvdpmpegframe.h new file mode 100644 index 0000000000..7ccee722ae --- /dev/null +++ b/sys/vdpau/mpeg/gstvdpmpegframe.h @@ -0,0 +1,77 @@ +/* +* GStreamer +* Copyright (C) 2009 Carl-Anton Ingmarsson +* +* This library is free software; you can redistribute it and/or +* modify it under the terms of the GNU Library General Public +* License as published by the Free Software Foundation; either +* version 2 of the License, or (at your option) any later version. +* +* This library is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +* Library General Public License for more details. +* +* You should have received a copy of the GNU Library General Public +* License along with this library; if not, write to the +* Free Software Foundation, Inc., 59 Temple Place - Suite 330, +* Boston, MA 02111-1307, USA. +*/ + +#ifndef _GST_VDP_MPEG_FRAME_H_ +#define _GST_VDP_MPEG_FRAME_H_ + +#include + +#include + +#include "../basevideodecoder/gstvideoframe.h" + +#define GST_TYPE_VDP_MPEG_FRAME (gst_vdp_mpeg_frame_get_type()) +#define GST_IS_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_MPEG_FRAME)) +#define GST_VDP_MPEG_FRAME(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_MPEG_FRAME, GstVdpMpegFrame)) + +typedef struct _GstVdpMpegStreamInfo GstVdpMpegStreamInfo; + +struct _GstVdpMpegStreamInfo +{ + gint width, height; + gint fps_n, fps_d; + gint par_n, par_d; + gboolean interlaced; + gint version; + VdpDecoderProfile profile; +}; + +typedef struct _GstVdpMpegFrame GstVdpMpegFrame; +typedef struct _GstVdpMpegFrameClass GstVdpMpegFrameClass; + +struct _GstVdpMpegFrame +{ + GstVideoFrame video_frame; + + GstBuffer *seq; + GstBuffer *seq_ext; + + GstBuffer *pic; + GstBuffer *pic_ext; + + GstBuffer *gop; + GstBuffer *qm_ext; + + gint n_slices; + GstBuffer *slices; +}; + +struct _GstVdpMpegFrameClass +{ + GstVideoFrameClass video_frame_class; +}; + +void gst_vdp_mpeg_frame_add_slice (GstVdpMpegFrame *mpeg_frame, GstBuffer *buf); + +GstVdpMpegFrame *gst_vdp_mpeg_frame_new (void); + +GType gst_vdp_mpeg_frame_get_type (void); + +#endif \ No newline at end of file diff --git a/sys/vdpau/mpegutil.c b/sys/vdpau/mpeg/mpegutil.c similarity index 100% rename from sys/vdpau/mpegutil.c rename to sys/vdpau/mpeg/mpegutil.c diff --git a/sys/vdpau/mpegutil.h b/sys/vdpau/mpeg/mpegutil.h similarity index 100% rename from sys/vdpau/mpegutil.h rename to sys/vdpau/mpeg/mpegutil.h