diff --git a/sys/vdpau/Makefile.am b/sys/vdpau/Makefile.am index f81f968a7f..1a2bed5865 100644 --- a/sys/vdpau/Makefile.am +++ b/sys/vdpau/Makefile.am @@ -8,11 +8,14 @@ libgstvdpau_la_SOURCES = \ gstvdpvideobuffer.c \ gstvdp.c \ gstvdpyuvvideo.c \ - gstvdputils.c + gstvdputils.c \ + gstvdpvideopostprocess.c \ + gstvdpoutputbuffer.c \ + gstvdpsink.c libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS) libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \ - $(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \ + $(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) -lgstinterfaces-$(GST_MAJORMINOR) \ $(VDPAU_LIBS) libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static @@ -24,6 +27,9 @@ noinst_HEADERS = \ gstvdpvideoyuv.h \ gstvdpvideobuffer.h \ gstvdpyuvvideo.h \ - gstvdputils.h + gstvdputils.h \ + gstvdpvideopostprocess.h \ + gstvdpoutputbuffer.h \ + gstvdpsink.c diff --git a/sys/vdpau/gstvdp.c b/sys/vdpau/gstvdp.c index 0cafbd683b..9943d7ced8 100644 --- a/sys/vdpau/gstvdp.c +++ b/sys/vdpau/gstvdp.c @@ -8,6 +8,8 @@ #include "gstvdpmpegdec.h" #include "gstvdpvideoyuv.h" #include "gstvdpyuvvideo.h" +#include "gstvdpvideopostprocess.h" +#include "gstvdpsink.h" static gboolean vdpau_init (GstPlugin * vdpau_plugin) diff --git a/sys/vdpau/gstvdpdevice.c b/sys/vdpau/gstvdpdevice.c index f4eb821d93..26ed2659d3 100644 --- a/sys/vdpau/gstvdpdevice.c +++ b/sys/vdpau/gstvdpdevice.c @@ -18,7 +18,6 @@ * Boston, MA 02111-1307, USA. */ -#include #include #include "gstvdpdevice.h" @@ -100,6 +99,25 @@ gst_vdp_device_constructed (GObject * object) &device->vdp_decoder_query_capabilities}, {VDP_FUNC_ID_DECODER_GET_PARAMETERS, &device->vdp_decoder_get_parameters}, + {VDP_FUNC_ID_VIDEO_MIXER_CREATE, &device->vdp_video_mixer_create}, + {VDP_FUNC_ID_VIDEO_MIXER_DESTROY, &device->vdp_video_mixer_destroy}, + {VDP_FUNC_ID_VIDEO_MIXER_RENDER, &device->vdp_video_mixer_render}, + {VDP_FUNC_ID_OUTPUT_SURFACE_CREATE, &device->vdp_output_surface_create}, + {VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY, &device->vdp_output_surface_destroy}, + {VDP_FUNC_ID_OUTPUT_SURFACE_QUERY_CAPABILITIES, + &device->vdp_output_surface_query_capabilities}, + {VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_CREATE_X11, + &device->vdp_presentation_queue_target_create_x11}, + {VDP_FUNC_ID_PRESENTATION_QUEUE_CREATE, + &device->vdp_presentation_queue_create}, + {VDP_FUNC_ID_PRESENTATION_QUEUE_DESTROY, + &device->vdp_presentation_queue_destroy}, + {VDP_FUNC_ID_PRESENTATION_QUEUE_DISPLAY, + &device->vdp_presentation_queue_display}, + {VDP_FUNC_ID_PRESENTATION_QUEUE_BLOCK_UNTIL_SURFACE_IDLE, + &device->vdp_presentation_queue_block_until_surface_idle}, + {VDP_FUNC_ID_PRESENTATION_QUEUE_SET_BACKGROUND_COLOR, + &device->vdp_presentation_queue_set_background_color}, {0, NULL} }; diff --git a/sys/vdpau/gstvdpdevice.h b/sys/vdpau/gstvdpdevice.h index 19d0a9803b..b4fb422893 100644 --- a/sys/vdpau/gstvdpdevice.h +++ b/sys/vdpau/gstvdpdevice.h @@ -23,6 +23,7 @@ #include #include +#include #include @@ -68,6 +69,21 @@ struct _GstVdpDevice VdpDecoderRender *vdp_decoder_render; VdpDecoderQueryCapabilities *vdp_decoder_query_capabilities; VdpDecoderGetParameters *vdp_decoder_get_parameters; + + VdpVideoMixerCreate *vdp_video_mixer_create; + VdpVideoMixerDestroy *vdp_video_mixer_destroy; + VdpVideoMixerRender *vdp_video_mixer_render; + + VdpOutputSurfaceCreate *vdp_output_surface_create; + VdpOutputSurfaceDestroy *vdp_output_surface_destroy; + VdpOutputSurfaceQueryCapabilities *vdp_output_surface_query_capabilities; + + VdpPresentationQueueTargetCreateX11 *vdp_presentation_queue_target_create_x11; + VdpPresentationQueueCreate *vdp_presentation_queue_create; + VdpPresentationQueueDestroy *vdp_presentation_queue_destroy; + VdpPresentationQueueDisplay *vdp_presentation_queue_display; + VdpPresentationQueueBlockUntilSurfaceIdle *vdp_presentation_queue_block_until_surface_idle; + VdpPresentationQueueSetBackgroundColor *vdp_presentation_queue_set_background_color; }; GType gst_vdp_device_get_type (void) G_GNUC_CONST; diff --git a/sys/vdpau/gstvdpoutputbuffer.c b/sys/vdpau/gstvdpoutputbuffer.c new file mode 100644 index 0000000000..2d235f024a --- /dev/null +++ b/sys/vdpau/gstvdpoutputbuffer.c @@ -0,0 +1,217 @@ +/* + * GStreamer + * Copyright (C) 2009 Carl-Anton Ingmarsson + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstvdpoutputbuffer.h" + +GST_DEBUG_CATEGORY_STATIC (gst_vdp_output_buffer_debug); +#define GST_CAT_DEFAULT gst_vdp_output_buffer_debug + +#define DEBUG_INIT(bla) \ +GST_DEBUG_CATEGORY_INIT (gst_vdp_output_buffer_debug, "vdpauoutputbuffer", 0, "VDPAU output buffer"); + +GstVdpOutputBuffer * +gst_vdp_output_buffer_new (GstVdpDevice * device, VdpRGBAFormat rgba_format, + gint width, gint height) +{ + GstVdpOutputBuffer *buffer; + VdpStatus status; + VdpOutputSurface surface; + + status = + device->vdp_output_surface_create (device->device, rgba_format, width, + height, &surface); + if (status != VDP_STATUS_OK) { + GST_ERROR ("Couldn't create a VdpOutputSurface, error returned was: %s", + device->vdp_get_error_string (status)); + return NULL; + } + + buffer = + (GstVdpOutputBuffer *) gst_mini_object_new (GST_TYPE_VDP_OUTPUT_BUFFER); + + buffer->device = g_object_ref (device); + buffer->surface = surface; + + return buffer; +} + +static GObjectClass *gst_vdp_output_buffer_parent_class; + +static void +gst_vdp_output_buffer_finalize (GstVdpOutputBuffer * buffer) +{ + GstVdpDevice *device; + VdpStatus status; + + device = buffer->device; + + status = device->vdp_output_surface_destroy (buffer->surface); + if (status != VDP_STATUS_OK) + GST_ERROR + ("Couldn't destroy the buffers VdpOutputSurface, error returned was: %s", + device->vdp_get_error_string (status)); + + g_object_unref (buffer->device); + + GST_MINI_OBJECT_CLASS (gst_vdp_output_buffer_parent_class)->finalize + (GST_MINI_OBJECT (buffer)); +} + +static void +gst_vdp_output_buffer_init (GstVdpOutputBuffer * buffer, gpointer g_class) +{ + buffer->device = NULL; + buffer->surface = VDP_INVALID_HANDLE; +} + +static void +gst_vdp_output_buffer_class_init (gpointer g_class, gpointer class_data) +{ + GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class); + + gst_vdp_output_buffer_parent_class = g_type_class_peek_parent (g_class); + + mini_object_class->finalize = (GstMiniObjectFinalizeFunction) + gst_vdp_output_buffer_finalize; +} + + +GType +gst_vdp_output_buffer_get_type (void) +{ + static GType _gst_vdp_output_buffer_type; + + if (G_UNLIKELY (_gst_vdp_output_buffer_type == 0)) { + static const GTypeInfo info = { + sizeof (GstBufferClass), + NULL, + NULL, + gst_vdp_output_buffer_class_init, + NULL, + NULL, + sizeof (GstVdpOutputBuffer), + 0, + (GInstanceInitFunc) gst_vdp_output_buffer_init, + NULL + }; + _gst_vdp_output_buffer_type = g_type_register_static (GST_TYPE_BUFFER, + "GstVdpOutputBuffer", &info, 0); + + DEBUG_INIT (); + } + return _gst_vdp_output_buffer_type; +} + +typedef struct +{ + VdpRGBAFormat format; + GstStaticCaps caps; +} GstVdpOutputBufferFormats; + +GstVdpOutputBufferFormats rgba_formats[] = { + {VDP_RGBA_FORMAT_A8, + GST_STATIC_CAPS ("video/x-raw-rgb, " + "bpp = (int)8, " + "depth = (int)0, " + "endianness = G_BIG_ENDIAN, " + "red_mask = (int)0x00, " + "green_mask = (int)0x00, " + "blue_mask = (int)0x00, " "alpha_mask = (int)0xff")}, + {VDP_RGBA_FORMAT_B10G10R10A2, + GST_STATIC_CAPS ("video/x-raw-rgb, " + "bpp = (int)32, " + "depth = (int)30, " + "endianness = G_BIG_ENDIAN, " + "red_mask = (int)0x000003fc, " + "green_mask = (int)0x003ff000, " + "blue_mask = (int)0xffc00000, " "alpha_mask = (int)0x00000003")}, + {VDP_RGBA_FORMAT_B8G8R8A8, + GST_STATIC_CAPS ("video/x-raw-rgb, " + "bpp = (int)32, " + "depth = (int)24, " + "endianness = G_BIG_ENDIAN, " + "red_mask = (int)0x0000ff00, " + "green_mask = (int)0x00ff0000, " + "blue_mask = (int)0xff000000, " "alpha_mask = (int)0x000000ff")}, + {VDP_RGBA_FORMAT_R10G10B10A2, + GST_STATIC_CAPS ("video/x-raw-rgb, " + "bpp = (int)32, " + "depth = (int)30, " + "endianness = G_BIG_ENDIAN, " + "red_mask = (int)0xffc00000, " + "green_mask = (int)0x003ff000, " + "blue_mask = (int)0x000003fc, " "alpha_mask = (int)0x00000003")}, + {VDP_RGBA_FORMAT_R8G8B8A8, + GST_STATIC_CAPS ("video/x-raw-rgb, " + "bpp = (int)32, " + "depth = (int)24, " + "endianness = G_BIG_ENDIAN" + "red_mask = (int)0xff000000, " + "green_mask = (int)0x00ff0000, " + "blue_mask = (int)0x0000ff00, " "alpha_mask = (int)0x000000ff")}, +}; + +int n_rgba_formats = G_N_ELEMENTS (rgba_formats); + +GstCaps * +gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice * device) +{ + GstCaps *caps; + gint i; + + g_return_val_if_fail (GST_IS_VDP_DEVICE (device), NULL); + + caps = gst_caps_new_empty (); + + for (i = 0; i < n_rgba_formats; i++) { + VdpStatus status; + VdpBool is_supported; + guint max_w, max_h; + + status = device->vdp_output_surface_query_capabilities (device->device, + rgba_formats[i].format, &is_supported, &max_w, &max_h); + if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_RGBA_FORMAT) { + GST_ERROR_OBJECT (device, + "Could not get query VDPAU output surface capabilites, " + "Error returned from vdpau was: %s", + device->vdp_get_error_string (status)); + + goto error; + } + + if (is_supported) { + GstCaps *format_caps; + + format_caps = gst_caps_new_simple ("video/x-vdpau-output", + "rgba-format", G_TYPE_INT, rgba_formats[i].format, + "width", GST_TYPE_INT_RANGE, 1, max_w, + "height", GST_TYPE_INT_RANGE, 1, max_h, NULL); + gst_caps_append (caps, format_caps); + } + } + +error: + + return caps; +} diff --git a/sys/vdpau/gstvdpoutputbuffer.h b/sys/vdpau/gstvdpoutputbuffer.h new file mode 100644 index 0000000000..afa159f623 --- /dev/null +++ b/sys/vdpau/gstvdpoutputbuffer.h @@ -0,0 +1,53 @@ +/* + * GStreamer + * Copyright (C) 2009 Carl-Anton Ingmarsson + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_VDP_OUTPUT_BUFFER_H_ +#define _GST_VDP_OUTPUT_BUFFER_H_ + +#include + +#include "gstvdpdevice.h" + +typedef struct _GstVdpOutputBuffer GstVdpOutputBuffer; + +#define GST_TYPE_VDP_OUTPUT_BUFFER (gst_vdp_output_buffer_get_type()) +#define GST_IS_VDP_OUTPUT_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_OUTPUT_BUFFER)) +#define GST_VDP_OUTPUT_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_OUTPUT_BUFFER, GstVdpOutputBuffer)) + +struct _GstVdpOutputBuffer { + GstBuffer buffer; + + GstVdpDevice *device; + VdpOutputSurface surface; +}; + +GType gst_vdp_output_buffer_get_type (void); + +GstVdpOutputBuffer* gst_vdp_output_buffer_new (GstVdpDevice * device, VdpRGBAFormat rgba_format, gint width, gint height); + +GstCaps *gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice *device); + +#define GST_VDP_OUTPUT_CAPS \ + "video/x-vdpau-output, " \ + "rgba-format = (int)[0,4], " \ + "width = (int)[1,8192], " \ + "height = (int)[1,8192]" + +#endif \ No newline at end of file diff --git a/sys/vdpau/gstvdpsink.c b/sys/vdpau/gstvdpsink.c new file mode 100644 index 0000000000..8abe83ee6b --- /dev/null +++ b/sys/vdpau/gstvdpsink.c @@ -0,0 +1,1476 @@ +/* GStreamer + * Copyright (C) <2005> Julien Moutte + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +/* Our interfaces */ +#include +#include + +/* Debugging category */ +#include + +#include "gstvdpoutputbuffer.h" + +/* Object header */ +#include "gstvdpsink.h" + +GST_DEBUG_CATEGORY_STATIC (gst_vdp_sink_debug); +#define GST_CAT_DEFAULT gst_vdp_sink_debug + +typedef struct +{ + unsigned long flags; + unsigned long functions; + unsigned long decorations; + long input_mode; + unsigned long status; +} +MotifWmHints, MwmHints; + +#define MWM_HINTS_DECORATIONS (1L << 1) + +static void gst_vdp_sink_expose (GstXOverlay * overlay); + +enum +{ + PROP_0, + PROP_DISPLAY, + PROP_SYNCHRONOUS, + PROP_PIXEL_ASPECT_RATIO, + PROP_HANDLE_EVENTS, + PROP_HANDLE_EXPOSE +}; + +static GstVideoSinkClass *parent_class = NULL; + +/* the capabilities of the inputs and outputs. + * + * describe the real formats here. + */ +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VDP_OUTPUT_CAPS)); + +#define DEBUG_INIT(bla) \ +GST_DEBUG_CATEGORY_INIT (gst_vdp_sink_debug, "vdpausink", 0, "VDPAU video sink"); + +/* ============================================================= */ +/* */ +/* Private Methods */ +/* */ +/* ============================================================= */ + +/* X11 stuff */ + +static gboolean +gst_vdp_sink_window_decorate (VdpSink * vdp_sink, GstVdpWindow * window) +{ + Atom hints_atom = None; + MotifWmHints *hints; + + g_return_val_if_fail (GST_IS_VDP_SINK (vdp_sink), FALSE); + g_return_val_if_fail (window != NULL, FALSE); + + g_mutex_lock (vdp_sink->x_lock); + + hints_atom = XInternAtom (vdp_sink->device->display, "_MOTIF_WM_HINTS", 1); + if (hints_atom == None) { + g_mutex_unlock (vdp_sink->x_lock); + return FALSE; + } + + hints = g_malloc0 (sizeof (MotifWmHints)); + + hints->flags |= MWM_HINTS_DECORATIONS; + hints->decorations = 1 << 0; + + XChangeProperty (vdp_sink->device->display, window->win, + hints_atom, hints_atom, 32, PropModeReplace, + (guchar *) hints, sizeof (MotifWmHints) / sizeof (long)); + + XSync (vdp_sink->device->display, FALSE); + + g_mutex_unlock (vdp_sink->x_lock); + + g_free (hints); + + return TRUE; +} + +static void +gst_vdp_sink_window_set_title (VdpSink * vdp_sink, + GstVdpWindow * window, const gchar * media_title) +{ + if (media_title) { + g_free (vdp_sink->media_title); + vdp_sink->media_title = g_strdup (media_title); + } + if (window) { + /* we have a window */ + if (window->internal) { + XTextProperty xproperty; + const gchar *app_name; + const gchar *title = NULL; + gchar *title_mem = NULL; + + /* set application name as a title */ + app_name = g_get_application_name (); + + if (app_name && vdp_sink->media_title) { + title = title_mem = g_strconcat (vdp_sink->media_title, " : ", + app_name, NULL); + } else if (app_name) { + title = app_name; + } else if (vdp_sink->media_title) { + title = vdp_sink->media_title; + } + + if (title) { + if ((XStringListToTextProperty (((char **) &title), 1, + &xproperty)) != 0) + XSetWMName (vdp_sink->device->display, window->win, &xproperty); + + g_free (title_mem); + } + } + } +} + +/* This function handles a GstVdpWindow creation */ +static GstVdpWindow * +gst_vdp_sink_window_new (VdpSink * vdp_sink, gint width, gint height) +{ + GstVdpWindow *window = NULL; + GstVdpDevice *device = vdp_sink->device; + + Window root; + gint screen_num; + gulong black; + + VdpStatus status; + VdpColor color = { 0, }; + + g_return_val_if_fail (GST_IS_VDP_SINK (vdp_sink), NULL); + + window = g_new0 (GstVdpWindow, 1); + + window->width = width; + window->height = height; + window->internal = TRUE; + + g_mutex_lock (vdp_sink->x_lock); + + screen_num = DefaultScreen (device->display); + root = DefaultRootWindow (device->display); + black = XBlackPixel (device->display, screen_num); + + window->win = XCreateSimpleWindow (vdp_sink->device->display, + root, 0, 0, window->width, window->height, 0, 0, black); + + /* We have to do that to prevent X from redrawing the background on + ConfigureNotify. This takes away flickering of video when resizing. */ + XSetWindowBackgroundPixmap (vdp_sink->device->display, window->win, None); + + /* set application name as a title */ + gst_vdp_sink_window_set_title (vdp_sink, window, NULL); + + if (vdp_sink->handle_events) { + Atom wm_delete; + + XSelectInput (vdp_sink->device->display, window->win, ExposureMask | + StructureNotifyMask | PointerMotionMask | KeyPressMask | + KeyReleaseMask | ButtonPressMask | ButtonReleaseMask); + + /* Tell the window manager we'd like delete client messages instead of + * being killed */ + wm_delete = + XInternAtom (vdp_sink->device->display, "WM_DELETE_WINDOW", False); + (void) XSetWMProtocols (vdp_sink->device->display, window->win, &wm_delete, + 1); + } + + XMapRaised (vdp_sink->device->display, window->win); + + XSync (vdp_sink->device->display, FALSE); + + g_mutex_unlock (vdp_sink->x_lock); + + gst_vdp_sink_window_decorate (vdp_sink, window); + + status = device->vdp_presentation_queue_target_create_x11 (device->device, + window->win, &window->target); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ, + ("Could not create presentation target"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + } + + status = + device->vdp_presentation_queue_create (device->device, window->target, + &window->queue); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ, + ("Could not create presentation queue"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + } + + status = + device->vdp_presentation_queue_set_background_color (window->queue, + &color); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ, + ("Could not set background color"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + } + + gst_x_overlay_got_xwindow_id (GST_X_OVERLAY (vdp_sink), window->win); + + return window; +} + +/* This function destroys a GstVdpWindow */ +static void +gst_vdp_sink_window_destroy (VdpSink * vdp_sink, GstVdpWindow * window) +{ + g_return_if_fail (window != NULL); + g_return_if_fail (GST_IS_VDP_SINK (vdp_sink)); + + g_mutex_lock (vdp_sink->x_lock); + + /* If we did not create that window we just free the GC and let it live */ + if (window->internal) + XDestroyWindow (vdp_sink->device->display, window->win); + else + XSelectInput (vdp_sink->device->display, window->win, 0); + + XSync (vdp_sink->device->display, FALSE); + + g_mutex_unlock (vdp_sink->x_lock); + + g_free (window); +} + +static void +gst_vdp_sink_window_update_geometry (VdpSink * vdp_sink, GstVdpWindow * window) +{ + XWindowAttributes attr; + + g_return_if_fail (window != NULL); + g_return_if_fail (GST_IS_VDP_SINK (vdp_sink)); + + /* Update the window geometry */ + g_mutex_lock (vdp_sink->x_lock); + + XGetWindowAttributes (vdp_sink->device->display, window->win, &attr); + + window->width = attr.width; + window->height = attr.height; + + g_mutex_unlock (vdp_sink->x_lock); +} + +/* This function handles XEvents that might be in the queue. It generates + GstEvent that will be sent upstream in the pipeline to handle interactivity + and navigation.*/ +static void +gst_vdp_sink_handle_xevents (VdpSink * vdp_sink) +{ + XEvent e; + guint pointer_x = 0, pointer_y = 0; + gboolean pointer_moved = FALSE; + gboolean exposed = FALSE, configured = FALSE; + + g_return_if_fail (GST_IS_VDP_SINK (vdp_sink)); + + /* Then we get all pointer motion events, only the last position is + interesting. */ + g_mutex_lock (vdp_sink->flow_lock); + g_mutex_lock (vdp_sink->x_lock); + while (XCheckWindowEvent (vdp_sink->device->display, + vdp_sink->window->win, PointerMotionMask, &e)) { + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + + switch (e.type) { + case MotionNotify: + pointer_x = e.xmotion.x; + pointer_y = e.xmotion.y; + pointer_moved = TRUE; + break; + default: + break; + } + g_mutex_lock (vdp_sink->flow_lock); + g_mutex_lock (vdp_sink->x_lock); + } + + if (pointer_moved) { + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + + GST_DEBUG ("vdp_sink pointer moved over window at %d,%d", + pointer_x, pointer_y); + gst_navigation_send_mouse_event (GST_NAVIGATION (vdp_sink), + "mouse-move", 0, pointer_x, pointer_y); + + g_mutex_lock (vdp_sink->flow_lock); + g_mutex_lock (vdp_sink->x_lock); + } + + /* We get all remaining events on our window to throw them upstream */ + while (XCheckWindowEvent (vdp_sink->device->display, + vdp_sink->window->win, + KeyPressMask | KeyReleaseMask | + ButtonPressMask | ButtonReleaseMask, &e)) { + KeySym keysym; + + /* We lock only for the X function call */ + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + + switch (e.type) { + case ButtonPress: + /* Mouse button pressed/released over our window. We send upstream + events for interactivity/navigation */ + GST_DEBUG ("vdp_sink button %d pressed over window at %d,%d", + e.xbutton.button, e.xbutton.x, e.xbutton.x); + gst_navigation_send_mouse_event (GST_NAVIGATION (vdp_sink), + "mouse-button-press", e.xbutton.button, e.xbutton.x, e.xbutton.y); + break; + case ButtonRelease: + GST_DEBUG ("vdp_sink button %d release over window at %d,%d", + e.xbutton.button, e.xbutton.x, e.xbutton.x); + gst_navigation_send_mouse_event (GST_NAVIGATION (vdp_sink), + "mouse-button-release", e.xbutton.button, e.xbutton.x, e.xbutton.y); + break; + case KeyPress: + case KeyRelease: + /* Key pressed/released over our window. We send upstream + events for interactivity/navigation */ + GST_DEBUG ("vdp_sink key %d pressed over window at %d,%d", + e.xkey.keycode, e.xkey.x, e.xkey.x); + g_mutex_lock (vdp_sink->x_lock); + keysym = + XKeycodeToKeysym (vdp_sink->device->display, e.xkey.keycode, 0); + g_mutex_unlock (vdp_sink->x_lock); + if (keysym != NoSymbol) { + char *key_str = NULL; + + g_mutex_lock (vdp_sink->x_lock); + key_str = XKeysymToString (keysym); + g_mutex_unlock (vdp_sink->x_lock); + gst_navigation_send_key_event (GST_NAVIGATION (vdp_sink), + e.type == KeyPress ? "key-press" : "key-release", key_str); + + } else { + gst_navigation_send_key_event (GST_NAVIGATION (vdp_sink), + e.type == KeyPress ? "key-press" : "key-release", "unknown"); + } + break; + default: + GST_DEBUG_OBJECT (vdp_sink, "vdp_sink unhandled X event (%d)", e.type); + } + g_mutex_lock (vdp_sink->flow_lock); + g_mutex_lock (vdp_sink->x_lock); + } + + while (XCheckWindowEvent (vdp_sink->device->display, + vdp_sink->window->win, ExposureMask | StructureNotifyMask, &e)) { + switch (e.type) { + case Expose: + exposed = TRUE; + break; + case ConfigureNotify: + configured = TRUE; + break; + default: + break; + } + } + + if (vdp_sink->handle_expose && (exposed || configured)) { + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + + gst_vdp_sink_expose (GST_X_OVERLAY (vdp_sink)); + + g_mutex_lock (vdp_sink->flow_lock); + g_mutex_lock (vdp_sink->x_lock); + } + + /* Handle Display events */ + while (XPending (vdp_sink->device->display)) { + XNextEvent (vdp_sink->device->display, &e); + + switch (e.type) { + case ClientMessage:{ + Atom wm_delete; + + wm_delete = XInternAtom (vdp_sink->device->display, + "WM_DELETE_WINDOW", False); + if (wm_delete == (Atom) e.xclient.data.l[0]) { + /* Handle window deletion by posting an error on the bus */ + GST_ELEMENT_ERROR (vdp_sink, RESOURCE, NOT_FOUND, + ("Output window was closed"), (NULL)); + + g_mutex_unlock (vdp_sink->x_lock); + gst_vdp_sink_window_destroy (vdp_sink, vdp_sink->window); + vdp_sink->window = NULL; + g_mutex_lock (vdp_sink->x_lock); + } + break; + } + default: + break; + } + } + + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); +} + +static gpointer +gst_vdp_sink_event_thread (VdpSink * vdp_sink) +{ + g_return_val_if_fail (GST_IS_VDP_SINK (vdp_sink), NULL); + + GST_OBJECT_LOCK (vdp_sink); + while (vdp_sink->running) { + GST_OBJECT_UNLOCK (vdp_sink); + + if (vdp_sink->window) { + gst_vdp_sink_handle_xevents (vdp_sink); + } + g_usleep (100000); + + GST_OBJECT_LOCK (vdp_sink); + } + GST_OBJECT_UNLOCK (vdp_sink); + + return NULL; +} + +/* This function calculates the pixel aspect ratio */ +static GValue * +gst_vdp_sink_calculate_par (Display * display) +{ + static const gint par[][2] = { + {1, 1}, /* regular screen */ + {16, 15}, /* PAL TV */ + {11, 10}, /* 525 line Rec.601 video */ + {54, 59}, /* 625 line Rec.601 video */ + {64, 45}, /* 1280x1024 on 16:9 display */ + {5, 3}, /* 1280x1024 on 4:3 display */ + {4, 3} /* 800x600 on 16:9 display */ + }; + gint screen_num; + gint width, height; + gint widthmm, heightmm; + gint i; + gint index; + gdouble ratio; + gdouble delta; + GValue *par_value; + +#define DELTA(idx) (ABS (ratio - ((gdouble) par[idx][0] / par[idx][1]))) + + screen_num = DefaultScreen (display); + width = DisplayWidth (display, screen_num); + height = DisplayHeight (display, screen_num); + widthmm = DisplayWidthMM (display, screen_num); + heightmm = DisplayHeightMM (display, screen_num); + + /* first calculate the "real" ratio based on the X values; + * which is the "physical" w/h divided by the w/h in pixels of the display */ + ratio = (gdouble) (widthmm * height) + / (heightmm * width); + + /* DirectFB's X in 720x576 reports the physical dimensions wrong, so + * override here */ + if (width == 720 && height == 576) { + ratio = 4.0 * 576 / (3.0 * 720); + } + GST_DEBUG ("calculated pixel aspect ratio: %f", ratio); + + /* now find the one from par[][2] with the lowest delta to the real one */ + delta = DELTA (0); + index = 0; + + for (i = 1; i < sizeof (par) / (sizeof (gint) * 2); ++i) { + gdouble this_delta = DELTA (i); + + if (this_delta < delta) { + index = i; + delta = this_delta; + } + } + + GST_DEBUG ("Decided on index %d (%d/%d)", index, + par[index][0], par[index][1]); + + par_value = g_new0 (GValue, 1); + g_value_init (par_value, GST_TYPE_FRACTION); + gst_value_set_fraction (par_value, par[index][0], par[index][1]); + GST_DEBUG ("set X11 PAR to %d/%d", + gst_value_get_fraction_numerator (par_value), + gst_value_get_fraction_denominator (par_value)); + + return par_value; +} + +static GstCaps * +gst_vdp_sink_get_allowed_caps (GstVdpDevice * device, GValue * par) +{ + GstCaps *caps; + gint i; + + caps = gst_vdp_output_buffer_get_allowed_caps (device); + + if (!par) + par = gst_vdp_sink_calculate_par (device->display); + + for (i = 0; i < gst_caps_get_size (caps); i++) { + GstStructure *structure; + + structure = gst_caps_get_structure (caps, i); + gst_structure_set_value (structure, "pixel-aspect-ratio", par); + } + + return caps; +} + +static GstVdpDevice * +gst_vdp_sink_setup_device (VdpSink * vdp_sink) +{ + GstVdpDevice *device; + + device = gst_vdp_get_device (vdp_sink->display_name); + if (!device) + return NULL; + + vdp_sink->caps = gst_vdp_sink_get_allowed_caps (device, vdp_sink->par); + + /* call XSynchronize with the current value of synchronous */ + GST_DEBUG_OBJECT (vdp_sink, "XSynchronize called with %s", + vdp_sink->synchronous ? "TRUE" : "FALSE"); + XSynchronize (device->display, vdp_sink->synchronous); + + /* Setup our event listening thread */ + vdp_sink->running = TRUE; + vdp_sink->event_thread = g_thread_create ( + (GThreadFunc) gst_vdp_sink_event_thread, vdp_sink, TRUE, NULL); + + return device; +} + +static gboolean +gst_vdp_sink_start (GstBaseSink * bsink) +{ + VdpSink *vdp_sink = GST_VDP_SINK (bsink); + + vdp_sink->window = NULL; + vdp_sink->cur_image = NULL; + + vdp_sink->event_thread = NULL; + + vdp_sink->fps_n = 0; + vdp_sink->fps_d = 1; + + GST_OBJECT_LOCK (vdp_sink); + vdp_sink->device = gst_vdp_sink_setup_device (vdp_sink); + GST_OBJECT_UNLOCK (vdp_sink); + + return TRUE; +} + +static void +gst_vdp_device_clear (VdpSink * vdp_sink) +{ + g_return_if_fail (GST_IS_VDP_SINK (vdp_sink)); + + GST_OBJECT_LOCK (vdp_sink); + if (vdp_sink->device == NULL) { + GST_OBJECT_UNLOCK (vdp_sink); + return; + } + GST_OBJECT_UNLOCK (vdp_sink); + + g_mutex_lock (vdp_sink->x_lock); + + g_object_unref (vdp_sink->device); + vdp_sink->device = NULL; + + g_mutex_unlock (vdp_sink->x_lock); +} + +static gboolean +gst_vdp_sink_stop (GstBaseSink * bsink) +{ + VdpSink *vdp_sink = GST_VDP_SINK (bsink); + + vdp_sink->running = FALSE; + /* Wait for our event thread to finish before we clean up our stuff. */ + if (vdp_sink->event_thread) + g_thread_join (vdp_sink->event_thread); + + if (vdp_sink->cur_image) { + gst_buffer_unref (GST_BUFFER_CAST (vdp_sink->cur_image)); + vdp_sink->cur_image = NULL; + } + + g_mutex_lock (vdp_sink->flow_lock); + if (vdp_sink->window) { + gst_vdp_sink_window_destroy (vdp_sink, vdp_sink->window); + vdp_sink->window = NULL; + } + g_mutex_unlock (vdp_sink->flow_lock); + + gst_vdp_device_clear (vdp_sink); + + return TRUE; +} + +/* Element stuff */ + +static GstCaps * +gst_vdp_sink_getcaps (GstBaseSink * bsink) +{ + VdpSink *vdp_sink; + GstCaps *caps; + + vdp_sink = GST_VDP_SINK (bsink); + + if (vdp_sink->caps) + caps = gst_caps_copy (vdp_sink->caps); + else + caps = gst_static_pad_template_get_caps (&sink_template); + + return caps; +} + +static gboolean +gst_vdp_sink_setcaps (GstBaseSink * bsink, GstCaps * caps) +{ + VdpSink *vdp_sink; + GstCaps *allowed_caps; + gboolean ret = TRUE; + GstStructure *structure; + GstCaps *intersection; + gint new_width, new_height; + const GValue *fps; + + vdp_sink = GST_VDP_SINK (bsink); + + GST_OBJECT_LOCK (vdp_sink); + if (!vdp_sink->device) + return FALSE; + GST_OBJECT_UNLOCK (vdp_sink); + + allowed_caps = gst_pad_get_caps (GST_BASE_SINK_PAD (bsink)); + GST_DEBUG_OBJECT (vdp_sink, + "sinkconnect possible caps %" GST_PTR_FORMAT " with given caps %" + GST_PTR_FORMAT, allowed_caps, caps); + + /* We intersect those caps with our template to make sure they are correct */ + intersection = gst_caps_intersect (allowed_caps, caps); + gst_caps_unref (allowed_caps); + + GST_DEBUG_OBJECT (vdp_sink, "intersection returned %" GST_PTR_FORMAT, + intersection); + if (gst_caps_is_empty (intersection)) { + gst_caps_unref (intersection); + return FALSE; + } + + gst_caps_unref (intersection); + + structure = gst_caps_get_structure (caps, 0); + + ret &= gst_structure_get_int (structure, "width", &new_width); + ret &= gst_structure_get_int (structure, "height", &new_height); + fps = gst_structure_get_value (structure, "framerate"); + ret &= (fps != NULL); + if (!ret) + return FALSE; + + GST_VIDEO_SINK_WIDTH (vdp_sink) = new_width; + GST_VIDEO_SINK_HEIGHT (vdp_sink) = new_height; + vdp_sink->fps_n = gst_value_get_fraction_numerator (fps); + vdp_sink->fps_d = gst_value_get_fraction_denominator (fps); + + /* Notify application to set xwindow id now */ + g_mutex_lock (vdp_sink->flow_lock); + if (!vdp_sink->window) { + g_mutex_unlock (vdp_sink->flow_lock); + gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (vdp_sink)); + } else { + g_mutex_unlock (vdp_sink->flow_lock); + } + + /* Creating our window and our image */ + if (GST_VIDEO_SINK_WIDTH (vdp_sink) <= 0 + || GST_VIDEO_SINK_HEIGHT (vdp_sink) <= 0) { + GST_ELEMENT_ERROR (vdp_sink, CORE, NEGOTIATION, (NULL), + ("Invalid image size.")); + return FALSE; + } + + g_mutex_lock (vdp_sink->flow_lock); + if (!vdp_sink->window) { + vdp_sink->window = gst_vdp_sink_window_new (vdp_sink, + GST_VIDEO_SINK_WIDTH (vdp_sink), GST_VIDEO_SINK_HEIGHT (vdp_sink)); + } + g_mutex_unlock (vdp_sink->flow_lock); + + return TRUE; +} + +static void +gst_vdp_sink_get_times (GstBaseSink * bsink, GstBuffer * buf, + GstClockTime * start, GstClockTime * end) +{ + VdpSink *vdp_sink; + + vdp_sink = GST_VDP_SINK (bsink); + + if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { + *start = GST_BUFFER_TIMESTAMP (buf); + if (GST_BUFFER_DURATION_IS_VALID (buf)) { + *end = *start + GST_BUFFER_DURATION (buf); + } else { + if (vdp_sink->fps_n > 0) { + *end = *start + + gst_util_uint64_scale_int (GST_SECOND, vdp_sink->fps_d, + vdp_sink->fps_n); + } + } + } +} + +static GstFlowReturn +gst_vdp_sink_show_frame (GstBaseSink * bsink, GstBuffer * outbuf) +{ + VdpSink *vdp_sink = GST_VDP_SINK (bsink); + GstVdpOutputBuffer *prev_image = NULL; + VdpStatus status; + GstVdpDevice *device; + + g_return_val_if_fail (GST_IS_VDP_SINK (vdp_sink), FALSE); + + /* We take the flow_lock. If expose is in there we don't want to run + concurrently from the data flow thread */ + g_mutex_lock (vdp_sink->flow_lock); + + if (G_UNLIKELY (vdp_sink->window == NULL)) { + g_mutex_unlock (vdp_sink->flow_lock); + return GST_FLOW_ERROR; + } + + /* Store a reference to the last image we put, lose the previous one */ + if (outbuf && vdp_sink->cur_image != outbuf) { + if (vdp_sink->cur_image) { + prev_image = GST_VDP_OUTPUT_BUFFER (vdp_sink->cur_image); + } + GST_LOG_OBJECT (vdp_sink, "reffing %p as our current image", outbuf); + vdp_sink->cur_image = gst_buffer_ref (outbuf); + } + + /* Expose sends a NULL image, we take the latest frame */ + if (!outbuf) { + if (vdp_sink->cur_image) { + outbuf = vdp_sink->cur_image; + } else { + g_mutex_unlock (vdp_sink->flow_lock); + return GST_FLOW_OK; + } + } + + gst_vdp_sink_window_update_geometry (vdp_sink, vdp_sink->window); + + g_mutex_lock (vdp_sink->x_lock); + + device = vdp_sink->device; + status = device->vdp_presentation_queue_display (vdp_sink->window->queue, + GST_VDP_OUTPUT_BUFFER (outbuf)->surface, 0, 0, 0); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ, + ("Could not display frame"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + return GST_FLOW_ERROR; + } + + if (prev_image) { + VdpTime time; + + /* block till the previous surface has been displayed */ + status = + device->vdp_presentation_queue_block_until_surface_idle (vdp_sink-> + window->queue, prev_image->surface, &time); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vdp_sink, RESOURCE, READ, + ("Could not display frame"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + return GST_FLOW_ERROR; + } + gst_buffer_unref (GST_BUFFER (prev_image)); + } + + XSync (vdp_sink->device->display, FALSE); + + g_mutex_unlock (vdp_sink->x_lock); + g_mutex_unlock (vdp_sink->flow_lock); + + return GST_FLOW_OK; +} + + +static gboolean +gst_vdp_sink_event (GstBaseSink * sink, GstEvent * event) +{ + VdpSink *vdp_sink = GST_VDP_SINK (sink); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_TAG:{ + GstTagList *l; + gchar *title = NULL; + + gst_event_parse_tag (event, &l); + gst_tag_list_get_string (l, GST_TAG_TITLE, &title); + + if (title) { + GST_DEBUG_OBJECT (vdp_sink, "got tags, title='%s'", title); + gst_vdp_sink_window_set_title (vdp_sink, vdp_sink->window, title); + + g_free (title); + } + break; + } + default: + break; + } + if (GST_BASE_SINK_CLASS (parent_class)->event) + return GST_BASE_SINK_CLASS (parent_class)->event (sink, event); + else + return TRUE; +} + +static GstFlowReturn +gst_vdp_sink_get_output_buffer (VdpSink * vdp_sink, GstCaps * caps, + GstBuffer ** buf) +{ + GstStructure *structure; + gint width, height; + gint rgba_format; + + structure = gst_caps_get_structure (caps, 0); + if (!gst_structure_get_int (structure, "width", &width) || + !gst_structure_get_int (structure, "height", &height) || + !gst_structure_get_int (structure, "rgba-format", &rgba_format)) { + GST_WARNING_OBJECT (vdp_sink, "invalid caps for buffer allocation %" + GST_PTR_FORMAT, caps); + return GST_FLOW_ERROR; + } + + *buf = GST_BUFFER (gst_vdp_output_buffer_new (vdp_sink->device, + rgba_format, width, height)); + if (*buf == NULL) { + return GST_FLOW_ERROR; + } + + gst_buffer_set_caps (*buf, caps); + + return GST_FLOW_OK; +} + +/* Buffer management + * + * The buffer_alloc function must either return a buffer with given size and + * caps or create a buffer with different caps attached to the buffer. This + * last option is called reverse negotiation, ie, where the sink suggests a + * different format from the upstream peer. + * + * We try to do reverse negotiation when our geometry changes and we like a + * resized buffer. + */ +static GstFlowReturn +gst_vdp_sink_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size, + GstCaps * caps, GstBuffer ** buf) +{ + VdpSink *vdp_sink; + GstStructure *structure = NULL; + GstFlowReturn ret = GST_FLOW_OK; + GstCaps *alloc_caps; + gboolean alloc_unref = FALSE; + gint width, height; + gint w_width, w_height; + + vdp_sink = GST_VDP_SINK (bsink); + + GST_LOG_OBJECT (vdp_sink, + "a buffer of %d bytes was requested with caps %" GST_PTR_FORMAT + " and offset %" G_GUINT64_FORMAT, size, caps, offset); + + /* assume we're going to alloc what was requested, keep track of + * wheter we need to unref or not. When we suggest a new format + * upstream we will create a new caps that we need to unref. */ + alloc_caps = caps; + alloc_unref = FALSE; + + /* get struct to see what is requested */ + structure = gst_caps_get_structure (caps, 0); + if (!gst_structure_get_int (structure, "width", &width) || + !gst_structure_get_int (structure, "height", &height)) { + GST_WARNING_OBJECT (vdp_sink, "invalid caps for buffer allocation %" + GST_PTR_FORMAT, caps); + ret = GST_FLOW_NOT_NEGOTIATED; + goto beach; + } + + /* We take the flow_lock because the window might go away */ + g_mutex_lock (vdp_sink->flow_lock); + if (!vdp_sink->window) { + g_mutex_unlock (vdp_sink->flow_lock); + goto alloc; + } + + /* What is our geometry */ + gst_vdp_sink_window_update_geometry (vdp_sink, vdp_sink->window); + w_width = vdp_sink->window->width; + w_height = vdp_sink->window->height; + + g_mutex_unlock (vdp_sink->flow_lock); + + /* We would like another geometry */ + if (width != w_width || height != w_height) { + GstCaps *new_caps, *allowed_caps, *desired_caps; + GstStructure *desired_struct; + + /* make a copy of the incomming caps to create the new + * suggestion. We can't use make_writable because we might + * then destroy the original caps which we still need when the + * peer does not accept the suggestion. */ + new_caps = gst_caps_copy (caps); + desired_struct = gst_caps_get_structure (new_caps, 0); + + GST_DEBUG ("we would love to receive a %dx%d video", w_width, w_height); + gst_structure_set (desired_struct, "width", G_TYPE_INT, w_width, NULL); + gst_structure_set (desired_struct, "height", G_TYPE_INT, w_height, NULL); + + allowed_caps = gst_pad_get_caps (GST_BASE_SINK_PAD (vdp_sink)); + desired_caps = gst_caps_intersect (new_caps, allowed_caps); + + gst_caps_unref (new_caps); + gst_caps_unref (allowed_caps); + + /* see if peer accepts our new suggestion, if there is no peer, this + * function returns true. */ + if (gst_pad_peer_accept_caps (GST_VIDEO_SINK_PAD (vdp_sink), desired_caps)) { + /* we will not alloc a buffer of the new suggested caps. Make sure + * we also unref this new caps after we set it on the buffer. */ + alloc_caps = desired_caps; + alloc_unref = TRUE; + width = w_width; + height = w_height; + GST_DEBUG ("peer pad accepts our desired caps %" GST_PTR_FORMAT, + desired_caps); + } else { + GST_DEBUG ("peer pad does not accept our desired caps %" GST_PTR_FORMAT, + desired_caps); + /* we alloc a buffer with the original incomming caps already in the + * width and height variables */ + } + } + +alloc: + ret = gst_vdp_sink_get_output_buffer (vdp_sink, alloc_caps, buf); + + /* could be our new reffed suggestion or the original unreffed caps */ + if (alloc_unref) + gst_caps_unref (alloc_caps); + +beach: + return ret; +} + +/* Interfaces stuff */ + +static gboolean +gst_vdp_sink_interface_supported (GstImplementsInterface * iface, GType type) +{ + g_assert (type == GST_TYPE_NAVIGATION || type == GST_TYPE_X_OVERLAY); + return TRUE; +} + +static void +gst_vdp_sink_interface_init (GstImplementsInterfaceClass * klass) +{ + klass->supported = gst_vdp_sink_interface_supported; +} + +static void +gst_vdp_sink_navigation_send_event (GstNavigation * navigation, + GstStructure * structure) +{ + VdpSink *vdp_sink = GST_VDP_SINK (navigation); + GstEvent *event; + gint x_offset, y_offset; + gdouble x, y; + GstPad *pad = NULL; + + event = gst_event_new_navigation (structure); + + /* We are not converting the pointer coordinates as there's no hardware + scaling done here. The only possible scaling is done by videoscale and + videoscale will have to catch those events and tranform the coordinates + to match the applied scaling. So here we just add the offset if the image + is centered in the window. */ + + /* We take the flow_lock while we look at the window */ + g_mutex_lock (vdp_sink->flow_lock); + + if (!vdp_sink->window) { + g_mutex_unlock (vdp_sink->flow_lock); + return; + } + + x_offset = vdp_sink->window->width - GST_VIDEO_SINK_WIDTH (vdp_sink); + y_offset = vdp_sink->window->height - GST_VIDEO_SINK_HEIGHT (vdp_sink); + + g_mutex_unlock (vdp_sink->flow_lock); + + if (x_offset > 0 && gst_structure_get_double (structure, "pointer_x", &x)) { + x -= x_offset / 2; + gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x, NULL); + } + if (y_offset > 0 && gst_structure_get_double (structure, "pointer_y", &y)) { + y -= y_offset / 2; + gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE, y, NULL); + } + + pad = gst_pad_get_peer (GST_VIDEO_SINK_PAD (vdp_sink)); + + if (GST_IS_PAD (pad) && GST_IS_EVENT (event)) { + gst_pad_send_event (pad, event); + + gst_object_unref (pad); + } +} + +static void +gst_vdp_sink_navigation_init (GstNavigationInterface * iface) +{ + iface->send_event = gst_vdp_sink_navigation_send_event; +} + +static void +gst_vdp_sink_set_xwindow_id (GstXOverlay * overlay, XID xwindow_id) +{ + VdpSink *vdp_sink = GST_VDP_SINK (overlay); + GstVdpWindow *window = NULL; + XWindowAttributes attr; + + /* We acquire the stream lock while setting this window in the element. + We are basically cleaning tons of stuff replacing the old window, putting + images while we do that would surely crash */ + g_mutex_lock (vdp_sink->flow_lock); + + /* If we already use that window return */ + if (vdp_sink->window && (xwindow_id == vdp_sink->window->win)) { + g_mutex_unlock (vdp_sink->flow_lock); + return; + } + + /* If the element has not initialized the X11 context try to do so */ + if (!vdp_sink->device + && !(vdp_sink->device = gst_vdp_sink_setup_device (vdp_sink))) { + g_mutex_unlock (vdp_sink->flow_lock); + /* we have thrown a GST_ELEMENT_ERROR now */ + return; + } + + /* If a window is there already we destroy it */ + if (vdp_sink->window) { + gst_vdp_sink_window_destroy (vdp_sink, vdp_sink->window); + vdp_sink->window = NULL; + } + + /* If the xid is 0 we go back to an internal window */ + if (xwindow_id == 0) { + /* If no width/height caps nego did not happen window will be created + during caps nego then */ + if (GST_VIDEO_SINK_WIDTH (vdp_sink) && GST_VIDEO_SINK_HEIGHT (vdp_sink)) { + window = gst_vdp_sink_window_new (vdp_sink, + GST_VIDEO_SINK_WIDTH (vdp_sink), GST_VIDEO_SINK_HEIGHT (vdp_sink)); + } + } else { + window = g_new0 (GstVdpWindow, 1); + + window->win = xwindow_id; + + /* We get window geometry, set the event we want to receive, + and create a GC */ + g_mutex_lock (vdp_sink->x_lock); + XGetWindowAttributes (vdp_sink->device->display, window->win, &attr); + window->width = attr.width; + window->height = attr.height; + window->internal = FALSE; + if (vdp_sink->handle_events) { + XSelectInput (vdp_sink->device->display, window->win, ExposureMask | + StructureNotifyMask | PointerMotionMask | KeyPressMask | + KeyReleaseMask); + } + + g_mutex_unlock (vdp_sink->x_lock); + } + + if (window) + vdp_sink->window = window; + + g_mutex_unlock (vdp_sink->flow_lock); +} + +static void +gst_vdp_sink_expose (GstXOverlay * overlay) +{ + gst_vdp_sink_show_frame (GST_BASE_SINK (overlay), NULL); +} + +static void +gst_vdp_sink_set_event_handling (GstXOverlay * overlay, gboolean handle_events) +{ + VdpSink *vdp_sink = GST_VDP_SINK (overlay); + + vdp_sink->handle_events = handle_events; + + g_mutex_lock (vdp_sink->flow_lock); + + if (G_UNLIKELY (!vdp_sink->window)) { + g_mutex_unlock (vdp_sink->flow_lock); + return; + } + + g_mutex_lock (vdp_sink->x_lock); + + if (handle_events) { + if (vdp_sink->window->internal) { + XSelectInput (vdp_sink->device->display, vdp_sink->window->win, + ExposureMask | StructureNotifyMask | PointerMotionMask | + KeyPressMask | KeyReleaseMask | ButtonPressMask | ButtonReleaseMask); + } else { + XSelectInput (vdp_sink->device->display, vdp_sink->window->win, + ExposureMask | StructureNotifyMask | PointerMotionMask | + KeyPressMask | KeyReleaseMask); + } + } else { + XSelectInput (vdp_sink->device->display, vdp_sink->window->win, 0); + } + + g_mutex_unlock (vdp_sink->x_lock); + + g_mutex_unlock (vdp_sink->flow_lock); +} + +static void +gst_vdp_sink_xoverlay_init (GstXOverlayClass * iface) +{ + iface->set_xwindow_id = gst_vdp_sink_set_xwindow_id; + iface->expose = gst_vdp_sink_expose; + iface->handle_events = gst_vdp_sink_set_event_handling; +} + +/* =========================================== */ +/* */ +/* Init & Class init */ +/* */ +/* =========================================== */ + +static void +gst_vdp_sink_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + VdpSink *vdp_sink; + + g_return_if_fail (GST_IS_VDP_SINK (object)); + + vdp_sink = GST_VDP_SINK (object); + + switch (prop_id) { + case PROP_DISPLAY: + vdp_sink->display_name = g_strdup (g_value_get_string (value)); + break; + case PROP_SYNCHRONOUS: + vdp_sink->synchronous = g_value_get_boolean (value); + if (vdp_sink->device) { + GST_DEBUG_OBJECT (vdp_sink, "XSynchronize called with %s", + vdp_sink->synchronous ? "TRUE" : "FALSE"); + g_mutex_lock (vdp_sink->x_lock); + XSynchronize (vdp_sink->device->display, vdp_sink->synchronous); + g_mutex_unlock (vdp_sink->x_lock); + } + break; + case PROP_PIXEL_ASPECT_RATIO: + { + GValue *tmp; + + tmp = g_new0 (GValue, 1); + g_value_init (tmp, GST_TYPE_FRACTION); + + if (!g_value_transform (value, tmp)) { + GST_WARNING_OBJECT (vdp_sink, + "Could not transform string to aspect ratio"); + g_free (tmp); + } else { + GST_DEBUG_OBJECT (vdp_sink, "set PAR to %d/%d", + gst_value_get_fraction_numerator (tmp), + gst_value_get_fraction_denominator (tmp)); + g_free (vdp_sink->par); + vdp_sink->par = tmp; + } + } + break; + case PROP_HANDLE_EVENTS: + gst_vdp_sink_set_event_handling (GST_X_OVERLAY (vdp_sink), + g_value_get_boolean (value)); + break; + case PROP_HANDLE_EXPOSE: + vdp_sink->handle_expose = g_value_get_boolean (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_vdp_sink_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + VdpSink *vdp_sink; + + g_return_if_fail (GST_IS_VDP_SINK (object)); + + vdp_sink = GST_VDP_SINK (object); + + switch (prop_id) { + case PROP_DISPLAY: + g_value_set_string (value, vdp_sink->display_name); + break; + case PROP_SYNCHRONOUS: + g_value_set_boolean (value, vdp_sink->synchronous); + break; + case PROP_PIXEL_ASPECT_RATIO: + if (vdp_sink->par) + g_value_transform (vdp_sink->par, value); + break; + case PROP_HANDLE_EVENTS: + g_value_set_boolean (value, vdp_sink->handle_events); + break; + case PROP_HANDLE_EXPOSE: + g_value_set_boolean (value, vdp_sink->handle_expose); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_vdp_sink_finalize (GObject * object) +{ + VdpSink *vdp_sink; + + vdp_sink = GST_VDP_SINK (object); + + if (vdp_sink->display_name) { + g_free (vdp_sink->display_name); + vdp_sink->display_name = NULL; + } + if (vdp_sink->par) { + g_free (vdp_sink->par); + vdp_sink->par = NULL; + } + if (vdp_sink->x_lock) { + g_mutex_free (vdp_sink->x_lock); + vdp_sink->x_lock = NULL; + } + if (vdp_sink->flow_lock) { + g_mutex_free (vdp_sink->flow_lock); + vdp_sink->flow_lock = NULL; + } + + g_free (vdp_sink->media_title); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static void +gst_vdp_sink_init (VdpSink * vdp_sink) +{ + vdp_sink->device = NULL; + + vdp_sink->display_name = NULL; + vdp_sink->par = NULL; + + vdp_sink->x_lock = g_mutex_new (); + vdp_sink->flow_lock = g_mutex_new (); + + vdp_sink->synchronous = FALSE; + vdp_sink->handle_events = TRUE; + vdp_sink->handle_expose = TRUE; +} + +static void +gst_vdp_sink_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details_simple (element_class, + "VDPAU Sink", + "Sink/Video", + "VDPAU Sink", "Carl-Anton Ingmarsson "); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_template)); +} + +static void +gst_vdp_sink_class_init (VdpSinkClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + GstBaseSinkClass *gstbasesink_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + gstbasesink_class = (GstBaseSinkClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + gobject_class->finalize = gst_vdp_sink_finalize; + gobject_class->set_property = gst_vdp_sink_set_property; + gobject_class->get_property = gst_vdp_sink_get_property; + + g_object_class_install_property (gobject_class, PROP_DISPLAY, + g_param_spec_string ("display", "Display", "X Display name", + NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_SYNCHRONOUS, + g_param_spec_boolean ("synchronous", "Synchronous", "When enabled, runs " + "the X display in synchronous mode. (used only for debugging)", FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_PIXEL_ASPECT_RATIO, + g_param_spec_string ("pixel-aspect-ratio", "Pixel Aspect Ratio", + "The pixel aspect ratio of the device", "1/1", + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_HANDLE_EVENTS, + g_param_spec_boolean ("handle-events", "Handle XEvents", + "When enabled, XEvents will be selected and handled", TRUE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_HANDLE_EXPOSE, + g_param_spec_boolean ("handle-expose", "Handle expose", + "When enabled, " + "the current frame will always be drawn in response to X Expose " + "events", TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_vdp_sink_start); + gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_vdp_sink_stop); + gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_vdp_sink_getcaps); + gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_vdp_sink_setcaps); + gstbasesink_class->buffer_alloc = + GST_DEBUG_FUNCPTR (gst_vdp_sink_buffer_alloc); + gstbasesink_class->get_times = GST_DEBUG_FUNCPTR (gst_vdp_sink_get_times); + gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_vdp_sink_show_frame); + gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_vdp_sink_show_frame); + gstbasesink_class->event = GST_DEBUG_FUNCPTR (gst_vdp_sink_event); +} + +/* ============================================================= */ +/* */ +/* Public Methods */ +/* */ +/* ============================================================= */ + +/* =========================================== */ +/* */ +/* Object typing & Creation */ +/* */ +/* =========================================== */ + +GType +gst_vdp_sink_get_type (void) +{ + static GType vdp_sink_type = 0; + + if (!vdp_sink_type) { + static const GTypeInfo vdp_sink_info = { + sizeof (VdpSinkClass), + gst_vdp_sink_base_init, + NULL, + (GClassInitFunc) gst_vdp_sink_class_init, + NULL, + NULL, + sizeof (VdpSink), + 0, + (GInstanceInitFunc) gst_vdp_sink_init, + }; + static const GInterfaceInfo iface_info = { + (GInterfaceInitFunc) gst_vdp_sink_interface_init, + NULL, + NULL, + }; + static const GInterfaceInfo navigation_info = { + (GInterfaceInitFunc) gst_vdp_sink_navigation_init, + NULL, + NULL, + }; + static const GInterfaceInfo overlay_info = { + (GInterfaceInitFunc) gst_vdp_sink_xoverlay_init, + NULL, + NULL, + }; + + vdp_sink_type = g_type_register_static (GST_TYPE_VIDEO_SINK, + "VdpSink", &vdp_sink_info, 0); + + g_type_add_interface_static (vdp_sink_type, GST_TYPE_IMPLEMENTS_INTERFACE, + &iface_info); + g_type_add_interface_static (vdp_sink_type, GST_TYPE_NAVIGATION, + &navigation_info); + g_type_add_interface_static (vdp_sink_type, GST_TYPE_X_OVERLAY, + &overlay_info); + } + + DEBUG_INIT (); + + return vdp_sink_type; +} diff --git a/sys/vdpau/gstvdpsink.h b/sys/vdpau/gstvdpsink.h new file mode 100644 index 0000000000..bbbd34c02a --- /dev/null +++ b/sys/vdpau/gstvdpsink.h @@ -0,0 +1,135 @@ +/* GStreamer + * Copyright (C) <2005> Julien Moutte + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_VDP_SINK_H__ +#define __GST_VDP_SINK_H__ + +#include + +#include +#include + +#include +#include + +#include "gstvdpdevice.h" + +G_BEGIN_DECLS + +#define GST_TYPE_VDP_SINK \ + (gst_vdp_sink_get_type()) +#define GST_VDP_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_VDP_SINK, VdpSink)) +#define GST_VDP_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_VDP_SINK, VdpSinkClass)) +#define GST_IS_VDP_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_VDP_SINK)) +#define GST_IS_VDP_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_VDP_SINK)) + +typedef struct _GstXContext GstXContext; +typedef struct _GstVdpWindow GstVdpWindow; + +typedef struct _VdpSink VdpSink; +typedef struct _VdpSinkClass VdpSinkClass; + +/* + * GstVdpWindow: + * @win: the Window ID of this X11 window + * @target the VdpPresentationQueueTarget of this window + * @queue the VdpPresentationQueue of this window + * @width: the width in pixels of Window @win + * @height: the height in pixels of Window @win + * @internal: used to remember if Window @win was created internally or passed + * through the #GstXOverlay interface + * + * Structure used to store informations about a Window. + */ +struct _GstVdpWindow { + Window win; + VdpPresentationQueueTarget target; + VdpPresentationQueue queue; + gint width, height; + gboolean internal; +}; + +/** + * VdpSink: + * @display_name: the name of the Display we want to render to + * @device: the GstVdpDevice associated with the display_name + * @window: the #GstVdpWindow we are rendering to + * @cur_image: a reference to the last #GstBuffer that was put to @window. It + * is used when Expose events are received to redraw the latest video frame + * @event_thread: a thread listening for events on @window and handling them + * @running: used to inform @event_thread if it should run/shutdown + * @fps_n: the framerate fraction numerator + * @fps_d: the framerate fraction denominator + * @x_lock: used to protect X calls as we are not using the XLib in threaded + * mode + * @flow_lock: used to protect data flow routines from external calls such as + * events from @event_thread or methods from the #GstXOverlay interface + * @par: used to override calculated pixel aspect ratio from @xcontext + * @synchronous: used to store if XSynchronous should be used or not (for + * debugging purpose only) + * @handle_events: used to know if we should handle select XEvents or not + * + * The #VdpSink data structure. + */ +struct _VdpSink { + /* Our element stuff */ + GstVideoSink videosink; + + char *display_name; + + GstVdpDevice *device; + GstCaps *caps; + + GstVdpWindow *window; + GstBuffer *cur_image; + + GThread *event_thread; + gboolean running; + + /* Framerate numerator and denominator */ + gint fps_n; + gint fps_d; + + GMutex *x_lock; + GMutex *flow_lock; + + /* object-set pixel aspect ratio */ + GValue *par; + + gboolean synchronous; + gboolean handle_events; + gboolean handle_expose; + + /* stream metadata */ + gchar *media_title; +}; + +struct _VdpSinkClass { + GstVideoSinkClass parent_class; +}; + +GType gst_vdp_sink_get_type(void); + +G_END_DECLS + +#endif /* __GST_VDP_SINK_H__ */ \ No newline at end of file diff --git a/sys/vdpau/gstvdputils.c b/sys/vdpau/gstvdputils.c index 973990dcef..1c393f53ab 100644 --- a/sys/vdpau/gstvdputils.c +++ b/sys/vdpau/gstvdputils.c @@ -18,68 +18,10 @@ * Boston, MA 02111-1307, USA. */ +#include "gstvdpvideobuffer.h" + #include "gstvdputils.h" -static GstCaps * -gst_vdp_get_allowed_yuv_caps (GstVdpDevice * device) -{ - GstCaps *caps; - gint i; - - caps = gst_caps_new_empty (); - for (i = 0; i < N_CHROMA_TYPES; i++) { - VdpStatus status; - VdpBool is_supported; - guint32 max_w, max_h; - - status = - device->vdp_video_surface_query_capabilities (device->device, - chroma_types[i], &is_supported, &max_w, &max_h); - - if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) { - GST_ERROR_OBJECT (device, - "Could not get query VDPAU video surface capabilites, " - "Error returned from vdpau was: %s", - device->vdp_get_error_string (status)); - - goto error; - } - if (is_supported) { - gint j; - - for (j = 0; j < N_FORMATS; j++) { - if (formats[j].chroma_type != chroma_types[i]) - continue; - - status = - device->vdp_video_surface_query_ycbcr_capabilities (device->device, - formats[j].chroma_type, formats[j].format, &is_supported); - if (status != VDP_STATUS_OK - && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) { - GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, " - "Error returned from vdpau was: %s", - device->vdp_get_error_string (status)); - - goto error; - } - - if (is_supported) { - GstCaps *format_caps; - - format_caps = gst_caps_new_simple ("video/x-raw-yuv", - "format", GST_TYPE_FOURCC, formats[j].fourcc, - "width", GST_TYPE_INT_RANGE, 1, max_w, - "height", GST_TYPE_INT_RANGE, 1, max_h, NULL); - gst_caps_append (caps, format_caps); - } - } - } - } - -error: - return caps; -} - GstCaps * gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device) { @@ -124,7 +66,7 @@ gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device) structure = gst_caps_get_structure (caps, 0); if (device) { - allowed_caps = gst_vdp_get_allowed_yuv_caps (device); + allowed_caps = gst_vdp_video_buffer_get_allowed_yuv_caps (device); result = gst_caps_intersect (new_caps, allowed_caps); gst_caps_unref (new_caps); @@ -135,46 +77,6 @@ gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device) return result; } -static GstCaps * -gst_vdp_get_allowed_video_caps (GstVdpDevice * device) -{ - GstCaps *caps; - gint i; - - caps = gst_caps_new_empty (); - for (i = 0; i < N_CHROMA_TYPES; i++) { - VdpStatus status; - VdpBool is_supported; - guint32 max_w, max_h; - - status = - device->vdp_video_surface_query_capabilities (device->device, - chroma_types[i], &is_supported, &max_w, &max_h); - - if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) { - GST_ERROR_OBJECT (device, - "Could not get query VDPAU video surface capabilites, " - "Error returned from vdpau was: %s", - device->vdp_get_error_string (status)); - - goto error; - } - - if (is_supported) { - GstCaps *format_caps; - - format_caps = gst_caps_new_simple ("video/x-vdpau-video", - "chroma-type", G_TYPE_INT, chroma_types[i], - "width", GST_TYPE_INT_RANGE, 1, max_w, - "height", GST_TYPE_INT_RANGE, 1, max_h, NULL); - gst_caps_append (caps, format_caps); - } - } - -error: - return caps; -} - GstCaps * gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device) { @@ -209,7 +111,7 @@ gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device) if (device) { GstCaps *allowed_caps; - allowed_caps = gst_vdp_get_allowed_video_caps (device); + allowed_caps = gst_vdp_video_buffer_get_allowed_video_caps (device); result = gst_caps_intersect (new_caps, allowed_caps); gst_caps_unref (new_caps); @@ -219,3 +121,20 @@ gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device) return result; } + +GstCaps * +gst_vdp_video_to_output_caps (GstCaps * caps) +{ + GstCaps *result; + gint i; + + result = gst_caps_copy (caps); + for (i = 0; i < gst_caps_get_size (result); i++) { + GstStructure *structure = gst_caps_get_structure (result, i); + + gst_structure_set_name (structure, "video/x-vdpau-output"); + gst_structure_remove_field (structure, "chroma-type"); + } + + return result; +} diff --git a/sys/vdpau/gstvdputils.h b/sys/vdpau/gstvdputils.h index 825477c314..5d9e9012db 100644 --- a/sys/vdpau/gstvdputils.h +++ b/sys/vdpau/gstvdputils.h @@ -25,58 +25,9 @@ #include "gstvdpdevice.h" -typedef struct -{ - VdpChromaType chroma_type; - VdpYCbCrFormat format; - guint32 fourcc; -} VdpauFormats; - -#define N_CHROMA_TYPES 3 -#define N_FORMATS 7 - -static const VdpChromaType chroma_types[N_CHROMA_TYPES] = - { VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 }; - -static const VdpauFormats formats[N_FORMATS] = { - { - VDP_CHROMA_TYPE_420, - VDP_YCBCR_FORMAT_YV12, - GST_MAKE_FOURCC ('I', '4', '2', '0') - }, - { - VDP_CHROMA_TYPE_420, - VDP_YCBCR_FORMAT_YV12, - GST_MAKE_FOURCC ('Y', 'V', '1', '2') - }, - { - VDP_CHROMA_TYPE_420, - VDP_YCBCR_FORMAT_NV12, - GST_MAKE_FOURCC ('N', 'V', '1', '2') - }, - { - VDP_CHROMA_TYPE_422, - VDP_YCBCR_FORMAT_UYVY, - GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y') - }, - { - VDP_CHROMA_TYPE_444, - VDP_YCBCR_FORMAT_V8U8Y8A8, - GST_MAKE_FOURCC ('A', 'Y', 'U', 'V') - }, - { - VDP_CHROMA_TYPE_444, - VDP_YCBCR_FORMAT_Y8U8V8A8, - GST_MAKE_FOURCC ('A', 'V', 'U', 'Y') - }, - { - VDP_CHROMA_TYPE_422, - VDP_YCBCR_FORMAT_YUYV, - GST_MAKE_FOURCC ('Y', 'U', 'Y', '2') - }, -}; - GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps, GstVdpDevice *device); GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device); +GstCaps *gst_vdp_video_to_output_caps (GstCaps * caps); + #endif /* _GST_VDP_UTILS_H_ */ \ No newline at end of file diff --git a/sys/vdpau/gstvdpvideobuffer.c b/sys/vdpau/gstvdpvideobuffer.c index 8ae14e98ae..008cfdd4c3 100644 --- a/sys/vdpau/gstvdpvideobuffer.c +++ b/sys/vdpau/gstvdpvideobuffer.c @@ -136,3 +136,103 @@ gst_vdp_video_buffer_get_type (void) } return _gst_vdp_video_buffer_type; } + +GstCaps * +gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device) +{ + GstCaps *caps; + gint i; + + caps = gst_caps_new_empty (); + for (i = 0; i < N_CHROMA_TYPES; i++) { + VdpStatus status; + VdpBool is_supported; + guint32 max_w, max_h; + + status = + device->vdp_video_surface_query_capabilities (device->device, + chroma_types[i], &is_supported, &max_w, &max_h); + + if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) { + GST_ERROR_OBJECT (device, + "Could not get query VDPAU video surface capabilites, " + "Error returned from vdpau was: %s", + device->vdp_get_error_string (status)); + + goto error; + } + if (is_supported) { + gint j; + + for (j = 0; j < N_FORMATS; j++) { + if (formats[j].chroma_type != chroma_types[i]) + continue; + + status = + device->vdp_video_surface_query_ycbcr_capabilities (device->device, + formats[j].chroma_type, formats[j].format, &is_supported); + if (status != VDP_STATUS_OK + && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) { + GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, " + "Error returned from vdpau was: %s", + device->vdp_get_error_string (status)); + + goto error; + } + + if (is_supported) { + GstCaps *format_caps; + + format_caps = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, formats[j].fourcc, + "width", GST_TYPE_INT_RANGE, 1, max_w, + "height", GST_TYPE_INT_RANGE, 1, max_h, NULL); + gst_caps_append (caps, format_caps); + } + } + } + } + +error: + return caps; +} + +GstCaps * +gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device) +{ + GstCaps *caps; + gint i; + + caps = gst_caps_new_empty (); + for (i = 0; i < N_CHROMA_TYPES; i++) { + VdpStatus status; + VdpBool is_supported; + guint32 max_w, max_h; + + status = + device->vdp_video_surface_query_capabilities (device->device, + chroma_types[i], &is_supported, &max_w, &max_h); + + if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) { + GST_ERROR_OBJECT (device, + "Could not get query VDPAU video surface capabilites, " + "Error returned from vdpau was: %s", + device->vdp_get_error_string (status)); + + goto error; + } + + if (is_supported) { + GstCaps *format_caps; + + format_caps = gst_caps_new_simple ("video/x-vdpau-video", + "chroma-type", G_TYPE_INT, chroma_types[i], + "width", GST_TYPE_INT_RANGE, 1, max_w, + "height", GST_TYPE_INT_RANGE, 1, max_h, NULL); + gst_caps_append (caps, format_caps); + } + } + +error: + return caps; +} diff --git a/sys/vdpau/gstvdpvideobuffer.h b/sys/vdpau/gstvdpvideobuffer.h index 36eddcbd50..6122ec744e 100644 --- a/sys/vdpau/gstvdpvideobuffer.h +++ b/sys/vdpau/gstvdpvideobuffer.h @@ -44,12 +44,65 @@ struct _GstVdpVideoBuffer { GSList *refs; }; +typedef struct +{ + VdpChromaType chroma_type; + VdpYCbCrFormat format; + guint32 fourcc; +} GstVdpVideoBufferFormats; + +#define N_CHROMA_TYPES 3 +#define N_FORMATS 7 + +static const VdpChromaType chroma_types[N_CHROMA_TYPES] = + { VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 }; + +static const GstVdpVideoBufferFormats formats[N_FORMATS] = { + { + VDP_CHROMA_TYPE_420, + VDP_YCBCR_FORMAT_YV12, + GST_MAKE_FOURCC ('I', '4', '2', '0') + }, + { + VDP_CHROMA_TYPE_420, + VDP_YCBCR_FORMAT_YV12, + GST_MAKE_FOURCC ('Y', 'V', '1', '2') + }, + { + VDP_CHROMA_TYPE_420, + VDP_YCBCR_FORMAT_NV12, + GST_MAKE_FOURCC ('N', 'V', '1', '2') + }, + { + VDP_CHROMA_TYPE_422, + VDP_YCBCR_FORMAT_UYVY, + GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y') + }, + { + VDP_CHROMA_TYPE_444, + VDP_YCBCR_FORMAT_V8U8Y8A8, + GST_MAKE_FOURCC ('A', 'Y', 'U', 'V') + }, + { + VDP_CHROMA_TYPE_444, + VDP_YCBCR_FORMAT_Y8U8V8A8, + GST_MAKE_FOURCC ('A', 'V', 'U', 'Y') + }, + { + VDP_CHROMA_TYPE_422, + VDP_YCBCR_FORMAT_YUYV, + GST_MAKE_FOURCC ('Y', 'U', 'Y', '2') + }, +}; + GType gst_vdp_video_buffer_get_type (void); GstVdpVideoBuffer* gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type, gint width, gint height); - void gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer *buffer, GstVdpVideoBuffer *buf); +GstCaps *gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device); +GstCaps *gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device); + #define GST_VDP_VIDEO_CAPS \ "video/x-vdpau-video, " \ "chroma-type = (int)[0,2], " \ diff --git a/sys/vdpau/gstvdpvideopostprocess.c b/sys/vdpau/gstvdpvideopostprocess.c new file mode 100644 index 0000000000..dad577dd2f --- /dev/null +++ b/sys/vdpau/gstvdpvideopostprocess.c @@ -0,0 +1,512 @@ +/* + * GStreamer + * Copyright (C) 2009 Carl-Anton Ingmarsson + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:element-vdpauvideopostprocess + * + * FIXME:Describe vdpaumpegdec here. + * + * + * Example launch line + * |[ + * gst-launch -v -m fakesrc ! vdpauvideopostprocess ! fakesink silent=TRUE + * ]| + * + */ + +/* + * TODO: + * + add support for postprocessing eg. deinterlace + * + mixing videos. (This should perhaps be done in a separate element based on + * VdpOutputSurface) + */ + +#ifdef HAVE_CONFIG_H +# include +#endif + +#include +#include + +#include "gstvdputils.h" +#include "gstvdpvideobuffer.h" +#include "gstvdpoutputbuffer.h" + +#include "gstvdpvideopostprocess.h" + +GST_DEBUG_CATEGORY_STATIC (gst_vdp_vpp_debug); +#define GST_CAT_DEFAULT gst_vdp_vpp_debug + +/* Filter signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + PROP_0, + PROP_FORCE_ASPECT_RATIO +}; + +/* the capabilities of the inputs and outputs. + * + * describe the real formats here. + */ +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS)); +static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VDP_OUTPUT_CAPS)); + +#define DEBUG_INIT(bla) \ +GST_DEBUG_CATEGORY_INIT (gst_vdp_vpp_debug, "vdpauvideopostprocess", 0, "VDPAU video surface to output surface"); + +GST_BOILERPLATE_FULL (GstVdpVideoPostProcess, gst_vdp_vpp, + GstElement, GST_TYPE_ELEMENT, DEBUG_INIT); + +static void gst_vdp_vpp_finalize (GObject * object); + +static gboolean +gst_vdp_vpp_set_caps (GstPad * pad, GstCaps * caps) +{ + GstVdpVideoPostProcess *vpp = + GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); + GstCaps *output_caps, *allowed_caps, *src_caps; + gboolean res; + + output_caps = gst_vdp_video_to_output_caps (caps); + allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad); + + src_caps = gst_caps_intersect (output_caps, allowed_caps); + gst_caps_truncate (src_caps); + GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT + " src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps); + + gst_caps_unref (output_caps); + gst_caps_unref (allowed_caps); + + res = gst_pad_set_caps (vpp->srcpad, src_caps); + + gst_object_unref (vpp); + return res; +} + +static void +gst_vdp_vpp_flush (GstVdpVideoPostProcess * vpp) +{ + /* TODO: Write this */ +} + +static void +gst_vdp_vpp_start (GstVdpVideoPostProcess * vpp) +{ + vpp->mixer = VDP_INVALID_HANDLE; + vpp->device = NULL; +} + +static void +gst_vdp_vpp_stop (GstVdpVideoPostProcess * vpp) +{ + if (vpp->mixer != VDP_INVALID_HANDLE) + vpp->device->vdp_video_mixer_destroy (vpp->mixer); + if (!vpp->device) + g_object_unref (vpp->device); +} + +static GstFlowReturn +gst_vdp_vpp_alloc_output_buffer (GstVdpVideoPostProcess * vpp, GstCaps * caps, + GstVdpOutputBuffer ** outbuf) +{ + GstFlowReturn ret; + + ret = gst_pad_alloc_buffer_and_set_caps (vpp->srcpad, 0, 0, + caps, (GstBuffer **) outbuf); + if (ret != GST_FLOW_OK) + return ret; + + if (!vpp->device) { +#define VDP_NUM_MIXER_PARAMETER 3 +#define MAX_NUM_FEATURES 5 + + GstStructure *structure; + gint chroma_type; + gint width, height; + + VdpStatus status; + GstVdpDevice *device; + + VdpVideoMixerFeature features[5]; + guint n_features = 0; + VdpVideoMixerParameter parameters[VDP_NUM_MIXER_PARAMETER] = { + VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH, + VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT, + VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE + }; + const void *parameter_values[VDP_NUM_MIXER_PARAMETER]; + + structure = gst_caps_get_structure (GST_PAD_CAPS (vpp->sinkpad), 0); + if (!gst_structure_get_int (structure, "chroma-type", &chroma_type) || + !gst_structure_get_int (structure, "width", &width) || + !gst_structure_get_int (structure, "height", &height)) + goto error; + + parameter_values[0] = &width; + parameter_values[1] = &height; + parameter_values[2] = &chroma_type; + + device = vpp->device = g_object_ref ((*outbuf)->device); + + + status = + device->vdp_video_mixer_create (device->device, n_features, features, + VDP_NUM_MIXER_PARAMETER, parameters, parameter_values, &vpp->mixer); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vpp, RESOURCE, READ, + ("Could not create vdpau video mixer"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + goto error; + } + } + + return ret; + +error: + gst_buffer_unref (GST_BUFFER (*outbuf)); + return GST_FLOW_ERROR; +} + +static GstFlowReturn +gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) +{ + GstVdpVideoPostProcess *vpp = + GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); + GstFlowReturn ret; + GstVdpOutputBuffer *outbuf; + + GstStructure *structure; + GstVideoRectangle src_r, dest_r; + VdpRect rect; + + GstVdpDevice *device; + VdpStatus status; + + if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { + GST_DEBUG_OBJECT (vpp, "Received discont buffer"); + gst_vdp_vpp_flush (vpp); + } + + ret = + gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad), + &outbuf); + if (ret != GST_FLOW_OK) + goto done; + + structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0); + if (!gst_structure_get_int (structure, "width", &src_r.w) || + !gst_structure_get_int (structure, "height", &src_r.h)) + goto invalid_caps; + + structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0); + if (!gst_structure_get_int (structure, "width", &dest_r.w) || + !gst_structure_get_int (structure, "height", &dest_r.h)) + goto invalid_caps; + + if (vpp->force_aspect_ratio) { + GstVideoRectangle res_r; + + gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE); + rect.x0 = res_r.x; + rect.x1 = res_r.w + res_r.x; + rect.y0 = res_r.y; + rect.y1 = res_r.h + res_r.y; + } else { + rect.x0 = 0; + rect.x1 = dest_r.w; + rect.y0 = 0; + rect.y1 = dest_r.h; + } + + device = vpp->device; + status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL, + VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME, 0, NULL, + GST_VDP_VIDEO_BUFFER (buffer)->surface, 0, NULL, NULL, outbuf->surface, + NULL, &rect, 0, NULL); + if (status != VDP_STATUS_OK) { + GST_ELEMENT_ERROR (vpp, RESOURCE, READ, + ("Could not post process frame"), + ("Error returned from vdpau was: %s", + device->vdp_get_error_string (status))); + ret = GST_FLOW_ERROR; + goto done; + } + + gst_buffer_copy_metadata (GST_BUFFER (outbuf), buffer, + GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); + + ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf)); + +done: + gst_buffer_unref (buffer); + gst_object_unref (vpp); + + return ret; + +invalid_caps: + gst_buffer_unref (GST_BUFFER (outbuf)); + ret = GST_FLOW_ERROR; + goto done; +} + +static GstCaps * +gst_vdp_vpp_sink_getcaps (GstPad * pad) +{ + GstVdpVideoPostProcess *vpp = + GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); + GstCaps *caps; + + if (vpp->device) + caps = gst_vdp_video_buffer_get_allowed_video_caps (vpp->device); + else + caps = gst_static_pad_template_get_caps (&sink_template); + + gst_object_unref (vpp); + + return caps; +} + +static GstFlowReturn +gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, + GstCaps * caps, GstBuffer ** buf) +{ + GstVdpVideoPostProcess *vpp = + GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); + GstVdpOutputBuffer *outbuf; + GstFlowReturn ret = GST_FLOW_ERROR; + GstVdpDevice *device = NULL; + GstStructure *structure; + gint width, height; + gint chroma_type; + + if (!vpp->device) { + /* if we haven't got a device yet we must alloc a buffer downstream to get it */ + GstCaps *src_caps = gst_pad_get_allowed_caps (vpp->srcpad); + gst_pad_fixate_caps (vpp->srcpad, src_caps); + ret = gst_pad_alloc_buffer (vpp->srcpad, 0, 0, src_caps, + (GstBuffer **) & outbuf); + + gst_caps_unref (src_caps); + if (ret != GST_FLOW_OK) + goto error; + + device = outbuf->device; + gst_buffer_unref (GST_BUFFER (outbuf)); + } else + device = vpp->device; + + structure = gst_caps_get_structure (caps, 0); + + if (!gst_structure_get_int (structure, "width", &width) || + !gst_structure_get_int (structure, "height", &height) || + !gst_structure_get_int (structure, "chroma-type", &chroma_type)) + goto error; + + *buf = GST_BUFFER (gst_vdp_video_buffer_new (device, + chroma_type, width, height)); + + if (*buf == NULL) + goto error; + + GST_BUFFER_SIZE (*buf) = size; + GST_BUFFER_OFFSET (*buf) = offset; + + gst_buffer_set_caps (*buf, caps); + + ret = GST_FLOW_OK; + +error: + + gst_object_unref (vpp); + return ret; +} + +static gboolean +gst_vdp_vpp_sink_event (GstPad * pad, GstEvent * event) +{ + GstVdpVideoPostProcess *vpp = + GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); + gboolean res; + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_FLUSH_STOP: + { + GST_DEBUG_OBJECT (vpp, "flush stop"); + + gst_vdp_vpp_flush (vpp); + res = gst_pad_push_event (vpp->srcpad, event); + + break; + } + default: + res = gst_pad_event_default (pad, event); + } + + gst_object_unref (vpp); + + return res; +} + +static GstStateChangeReturn +gst_vdp_vpp_change_state (GstElement * element, GstStateChange transition) +{ + GstVdpVideoPostProcess *vpp; + GstStateChangeReturn ret; + + vpp = GST_VDP_VIDEO_POST_PROCESS (element); + + switch (transition) { + case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_vdp_vpp_start (vpp); + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_vdp_vpp_stop (vpp); + break; + default: + break; + } + + return ret; +} + +/* GObject vmethod implementations */ +static void +gst_vdp_vpp_get_property (GObject * object, guint property_id, GValue * value, + GParamSpec * pspec) +{ + GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (object); + + switch (property_id) { + case PROP_FORCE_ASPECT_RATIO: + g_value_set_boolean (value, vpp->force_aspect_ratio); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +/* GObject vmethod implementations */ +static void +gst_vdp_vpp_set_property (GObject * object, guint property_id, + const GValue * value, GParamSpec * pspec) +{ + GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (object); + + switch (property_id) { + case PROP_FORCE_ASPECT_RATIO: + vpp->force_aspect_ratio = g_value_get_boolean (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +/* GType vmethod implementations */ + +static void +gst_vdp_vpp_base_init (gpointer gclass) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (gclass); + + gst_element_class_set_details_simple (element_class, + "VDPAU Mpeg Decoder", + "Filter/Converter/Decoder/Video", + "Post process GstVdpVideoBuffers and output GstVdpOutputBuffers", + "Carl-Anton Ingmarsson "); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_template)); +} + +/* initialize the vdpaumpegdecoder's class */ +static void +gst_vdp_vpp_class_init (GstVdpVideoPostProcessClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + gobject_class->get_property = gst_vdp_vpp_get_property; + gobject_class->set_property = gst_vdp_vpp_set_property; + gobject_class->finalize = gst_vdp_vpp_finalize; + + g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO, + g_param_spec_boolean ("force-aspect-ratio", "Force aspect ratio", + "When enabled, the plugin will only scale up the input surface to the" + "maximum size where the aspect ratio can be preserved", FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gstelement_class->change_state = gst_vdp_vpp_change_state; +} + +static void +gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp, + GstVdpVideoPostProcessClass * gclass) +{ + vpp->force_aspect_ratio = FALSE; + + /* SRC PAD */ + vpp->srcpad = gst_pad_new_from_static_template (&src_template, "src"); + gst_element_add_pad (GST_ELEMENT (vpp), vpp->srcpad); + + /* SINK PAD */ + vpp->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); + gst_element_add_pad (GST_ELEMENT (vpp), vpp->sinkpad); + + gst_pad_set_getcaps_function (vpp->sinkpad, gst_vdp_vpp_sink_getcaps); + gst_pad_set_setcaps_function (vpp->sinkpad, gst_vdp_vpp_set_caps); + gst_pad_set_chain_function (vpp->sinkpad, + GST_DEBUG_FUNCPTR (gst_vdp_vpp_chain)); + gst_pad_set_event_function (vpp->sinkpad, + GST_DEBUG_FUNCPTR (gst_vdp_vpp_sink_event)); + gst_pad_set_bufferalloc_function (vpp->sinkpad, gst_vdp_vpp_sink_bufferalloc); +} + +static void +gst_vdp_vpp_finalize (GObject * object) +{ +} diff --git a/sys/vdpau/gstvdpvideopostprocess.h b/sys/vdpau/gstvdpvideopostprocess.h new file mode 100644 index 0000000000..a2d761610f --- /dev/null +++ b/sys/vdpau/gstvdpvideopostprocess.h @@ -0,0 +1,60 @@ +/* + * GStreamer + * Copyright (C) 2009 Carl-Anton Ingmarsson + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_VDP_VIDEO_POST_PROCESS_H__ +#define __GST_VDP_VIDEO_POST_PROCESS_H__ + +#include + +#include "gstvdpdevice.h" + +G_BEGIN_DECLS + +#define GST_TYPE_VDP_VIDEO_POST_PROCESS (gst_vdp_vpp_get_type()) +#define GST_VDP_VIDEO_POST_PROCESS(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_VIDEO_POST_PROCESS,GstVdpVideoPostProcess)) +#define GST_VDP_VIDEO_POST_PROCESS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_VIDEO_POST_PROCESS,GstVdpVideoPostProcessClass)) +#define GST_IS_VDP_VIDEO_POST_PROCESS(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_VIDEO_POST_PROCESS)) +#define GST_IS_VDP_VIDEO_POST_PROCESS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_VIDEO_POST_PROCESS)) + +typedef struct _GstVdpVideoPostProcess GstVdpVideoPostProcess; +typedef struct _GstVdpVideoPostProcessClass GstVdpVideoPostProcessClass; + +struct _GstVdpVideoPostProcess +{ + GstElement element; + + GstPad *sinkpad, *srcpad; + + GstVdpDevice *device; + VdpVideoMixer mixer; + + gboolean force_aspect_ratio; +}; + +struct _GstVdpVideoPostProcessClass +{ + GstElementClass element_class; +}; + +GType gst_vdp_vpp_get_type (void); + +G_END_DECLS + +#endif /* __GST_VDP_VIDEO_POST_PROCESS_H__ */ \ No newline at end of file diff --git a/sys/vdpau/gstvdpvideoyuv.c b/sys/vdpau/gstvdpvideoyuv.c index 466424c0ed..95d9e2998b 100644 --- a/sys/vdpau/gstvdpvideoyuv.c +++ b/sys/vdpau/gstvdpvideoyuv.c @@ -236,7 +236,9 @@ gst_vdp_video_yuv_transform (GstBaseTransform * trans, GstBuffer * inbuf, break; } - gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS); + gst_buffer_copy_metadata (outbuf, inbuf, + GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); + GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); @@ -322,7 +324,7 @@ gst_vdp_video_yuv_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans); - GstCaps *result; + GstCaps *result = NULL; if (direction == GST_PAD_SINK) result = gst_vdp_video_to_yuv_caps (caps, video_yuv->device); @@ -406,7 +408,7 @@ gst_vdp_video_yuv_base_init (gpointer klass) gst_element_class_set_details_simple (element_class, "VdpauVideoYUV", - "Covideo_yuv/Decoder/Video", + "Filter/Converter/Decoder/Video", "VDPAU video surface to YUV", "Carl-Anton Ingmarsson "); diff --git a/sys/vdpau/gstvdpyuvvideo.c b/sys/vdpau/gstvdpyuvvideo.c index be350c7959..132b7f1526 100644 --- a/sys/vdpau/gstvdpyuvvideo.c +++ b/sys/vdpau/gstvdpyuvvideo.c @@ -233,7 +233,8 @@ gst_vdp_yuv_video_transform (GstBaseTransform * trans, GstBuffer * inbuf, break; } - gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS); + gst_buffer_copy_metadata (outbuf, inbuf, + GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); return GST_FLOW_OK; } @@ -262,7 +263,7 @@ gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans); - GstCaps *result; + GstCaps *result = NULL; if (direction == GST_PAD_SINK) { result = gst_vdp_yuv_to_video_caps (caps, yuv_video->device); @@ -305,7 +306,7 @@ gst_vdp_yuv_video_base_init (gpointer klass) gst_element_class_set_details_simple (element_class, "VdpauYUVVideo", - "Coyuv_video/Decoder/Video", + "Filter/Converter/Decoder/Video", "VDPAU video surface to YUV", "Carl-Anton Ingmarsson ");