vdpau: small api cleanup

merge gst_vdp_video_buffer_get_allowed_[video|yuv]_caps into
gst_vdp_video_buffer_get_allowed_caps
This commit is contained in:
Carl-Anton Ingmarsson 2010-04-29 22:24:35 +02:00
parent 5739eb3c8f
commit 8a355f1a24
5 changed files with 42 additions and 78 deletions

View File

@ -809,8 +809,8 @@ gst_vdp_sink_show_frame (GstBaseSink * bsink, GstBuffer * outbuf)
g_mutex_lock (vdp_sink->x_lock); g_mutex_lock (vdp_sink->x_lock);
status = status =
device->vdp_presentation_queue_query_surface_status (vdp_sink->window-> device->vdp_presentation_queue_query_surface_status (vdp_sink->
queue, surface, &queue_status, &pres_time); window->queue, surface, &queue_status, &pres_time);
g_mutex_unlock (vdp_sink->x_lock); g_mutex_unlock (vdp_sink->x_lock);
if (queue_status == VDP_PRESENTATION_QUEUE_STATUS_QUEUED) { if (queue_status == VDP_PRESENTATION_QUEUE_STATUS_QUEUED) {

View File

@ -163,14 +163,16 @@ gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type)
} }
GstCaps * GstCaps *
gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device) gst_vdp_video_buffer_get_allowed_caps (GstVdpDevice * device)
{ {
GstCaps *caps; GstCaps *video_caps, *yuv_caps;
gint i; gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (chroma_types); i++) {
VdpStatus status; VdpStatus status;
video_caps = gst_caps_new_empty ();
yuv_caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (chroma_types); i++) {
VdpBool is_supported; VdpBool is_supported;
guint32 max_w, max_h; guint32 max_w, max_h;
@ -178,17 +180,19 @@ gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device)
device->vdp_video_surface_query_capabilities (device->device, device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h); chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) { if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE)
GST_ERROR_OBJECT (device, goto surface_query_caps_error;
"Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) { if (is_supported) {
GstCaps *format_caps;
gint j; gint j;
format_caps = gst_caps_new_simple ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (video_caps, format_caps);
for (j = 0; j < G_N_ELEMENTS (formats); j++) { for (j = 0; j < G_N_ELEMENTS (formats); j++) {
if (formats[j].chroma_type != chroma_types[i]) if (formats[j].chroma_type != chroma_types[i])
continue; continue;
@ -197,69 +201,36 @@ gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device)
device->vdp_video_surface_query_ycbcr_capabilities (device->device, device->vdp_video_surface_query_ycbcr_capabilities (device->device,
formats[j].chroma_type, formats[j].format, &is_supported); formats[j].chroma_type, formats[j].format, &is_supported);
if (status != VDP_STATUS_OK if (status != VDP_STATUS_OK
&& status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) { && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT)
GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, " goto surface_query_ycbcr_error;
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) { if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-raw-yuv", format_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[j].fourcc, "format", GST_TYPE_FOURCC, formats[j].fourcc,
"width", GST_TYPE_INT_RANGE, 1, max_w, "width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL); "height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps); gst_caps_append (yuv_caps, format_caps);
} }
} }
} }
} }
error: done:
return caps; gst_caps_append (video_caps, yuv_caps);
} return video_caps;
GstCaps * surface_query_caps_error:
gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device)
{
GstCaps *caps;
gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < G_N_ELEMENTS (chroma_types); i++) {
VdpStatus status;
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
GST_ERROR_OBJECT (device, GST_ERROR_OBJECT (device,
"Could not get query VDPAU video surface capabilites, " "Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s", "Error returned from vdpau was: %s",
device->vdp_get_error_string (status)); device->vdp_get_error_string (status));
goto done;
goto error; surface_query_ycbcr_error:
} GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, "
"Error returned from vdpau was: %s",
if (is_supported) { device->vdp_get_error_string (status));
GstCaps *format_caps; goto done;
format_caps = gst_caps_new_simple ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
}
}
error:
return caps;
} }
gboolean gboolean

View File

@ -96,8 +96,7 @@ GstVdpVideoBuffer* gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaTyp
void gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer *buffer, GstVdpVideoBuffer *buf); void gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer *buffer, GstVdpVideoBuffer *buf);
GstCaps *gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type); GstCaps *gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type);
GstCaps *gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device); GstCaps *gst_vdp_video_buffer_get_allowed_caps (GstVdpDevice * device);
GstCaps *gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device);
GstCaps *gst_vdp_video_buffer_parse_yuv_caps (GstCaps *yuv_caps); GstCaps *gst_vdp_video_buffer_parse_yuv_caps (GstCaps *yuv_caps);

View File

@ -844,11 +844,7 @@ gst_vdp_vpp_sink_getcaps (GstPad * pad)
GstCaps *caps; GstCaps *caps;
if (vpp->device) { if (vpp->device) {
GstCaps *video_caps, *yuv_caps; caps = gst_vdp_video_buffer_get_allowed_caps (vpp->device);
video_caps = gst_vdp_video_buffer_get_allowed_video_caps (vpp->device);
yuv_caps = gst_vdp_video_buffer_get_allowed_yuv_caps (vpp->device);
gst_caps_append (video_caps, yuv_caps);
caps = video_caps;
} else { } else {
GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vpp); GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vpp);
GstPadTemplate *sink_template; GstPadTemplate *sink_template;

View File

@ -127,21 +127,19 @@ gst_vdp_video_src_pad_push (GstVdpVideoSrcPad * vdp_pad,
static void static void
gst_vdp_video_src_pad_update_caps (GstVdpVideoSrcPad * vdp_pad) gst_vdp_video_src_pad_update_caps (GstVdpVideoSrcPad * vdp_pad)
{ {
GstCaps *yuv_caps, *video_caps; GstCaps *caps;
const GstCaps *templ_caps; const GstCaps *templ_caps;
video_caps = gst_vdp_video_buffer_get_allowed_video_caps (vdp_pad->device);
yuv_caps = gst_vdp_video_buffer_get_allowed_yuv_caps (vdp_pad->device);
gst_caps_append (video_caps, yuv_caps);
if (vdp_pad->caps) if (vdp_pad->caps)
gst_caps_unref (vdp_pad->caps); gst_caps_unref (vdp_pad->caps);
caps = gst_vdp_video_buffer_get_allowed_caps (vdp_pad->device);
if ((templ_caps = gst_pad_get_pad_template_caps (GST_PAD (vdp_pad)))) { if ((templ_caps = gst_pad_get_pad_template_caps (GST_PAD (vdp_pad)))) {
vdp_pad->caps = gst_caps_intersect (video_caps, templ_caps); vdp_pad->caps = gst_caps_intersect (caps, templ_caps);
gst_caps_unref (video_caps); gst_caps_unref (caps);
} else } else
vdp_pad->caps = video_caps; vdp_pad->caps = caps;
} }
GstFlowReturn GstFlowReturn