sys/v4l2/: add norm, channel and frequency properties.

Original commit message from CVS:
2004-01-12  Benjamin Otte  <in7y118@public.uni-hamburg.de>

* sys/v4l2/gstv4l2element.c: (gst_v4l2element_class_init),
(gst_v4l2element_dispose), (gst_v4l2element_set_property),
(gst_v4l2element_get_property):
* sys/v4l2/v4l2_calls.c: (gst_v4l2_set_defaults), (gst_v4l2_open):
add norm, channel and frequency properties.
* sys/v4l2/gstv4l2tuner.c:
fixes for tuner interface changes
* sys/v4l2/gstv4l2element.h:
* sys/v4l2/gstv4l2src.c:
* sys/v4l2/gstv4l2src.h:
* sys/v4l2/v4l2src_calls.c:
* sys/v4l2/v4l2src_calls.h:
rework v4l2src to work with saa1734 cards and allow mmaped buffers.
This commit is contained in:
Benjamin Otte 2004-01-12 02:19:57 +00:00
parent 09984b518a
commit 1adb86a5d6
9 changed files with 1077 additions and 1060 deletions

View File

@ -1,3 +1,19 @@
2004-01-12 Benjamin Otte <in7y118@public.uni-hamburg.de>
* sys/v4l2/gstv4l2element.c: (gst_v4l2element_class_init),
(gst_v4l2element_dispose), (gst_v4l2element_set_property),
(gst_v4l2element_get_property):
* sys/v4l2/v4l2_calls.c: (gst_v4l2_set_defaults), (gst_v4l2_open):
add norm, channel and frequency properties.
* sys/v4l2/gstv4l2tuner.c:
fixes for tuner interface changes
* sys/v4l2/gstv4l2element.h:
* sys/v4l2/gstv4l2src.c:
* sys/v4l2/gstv4l2src.h:
* sys/v4l2/v4l2src_calls.c:
* sys/v4l2/v4l2src_calls.h:
rework v4l2src to work with saa1734 cards and allow mmaped buffers.
2004-01-12 Benjamin Otte <in7y118@public.uni-hamburg.de> 2004-01-12 Benjamin Otte <in7y118@public.uni-hamburg.de>
* gst-libs/gst/tuner/tuner.c: (gst_tuner_class_init), * gst-libs/gst/tuner/tuner.c: (gst_tuner_class_init),

View File

@ -53,6 +53,9 @@ enum {
ARG_0, ARG_0,
ARG_DEVICE, ARG_DEVICE,
ARG_DEVICE_NAME, ARG_DEVICE_NAME,
ARG_NORM,
ARG_CHANNEL,
ARG_FREQUENCY,
ARG_FLAGS ARG_FLAGS
}; };
@ -378,23 +381,29 @@ gst_v4l2element_base_init (GstV4l2ElementClass *klass)
static void static void
gst_v4l2element_class_init (GstV4l2ElementClass *klass) gst_v4l2element_class_init (GstV4l2ElementClass *klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class; GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
gobject_class = (GObjectClass*)klass; parent_class = g_type_class_peek_parent (klass);
gstelement_class = (GstElementClass*)klass;
parent_class = g_type_class_ref(GST_TYPE_ELEMENT); g_object_class_install_property(gobject_class, ARG_DEVICE,
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_DEVICE,
g_param_spec_string("device", "Device", "Device location", g_param_spec_string("device", "Device", "Device location",
NULL, G_PARAM_READWRITE)); NULL, G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_DEVICE_NAME, g_object_class_install_property(gobject_class, ARG_DEVICE_NAME,
g_param_spec_string("device_name", "Device name", g_param_spec_string("device_name", "Device name",
"Name of the device", NULL, G_PARAM_READABLE)); "Name of the device", NULL, G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_FLAGS, g_object_class_install_property(gobject_class, ARG_FLAGS,
g_param_spec_flags("flags", "Flags", "Device type flags", g_param_spec_flags("flags", "Flags", "Device type flags",
GST_TYPE_V4L2_DEVICE_FLAGS, 0, G_PARAM_READABLE)); GST_TYPE_V4L2_DEVICE_FLAGS, 0, G_PARAM_READABLE));
g_object_class_install_property(gobject_class, ARG_NORM,
g_param_spec_string("norm", "norm",
"Norm to use", NULL, G_PARAM_READWRITE));
g_object_class_install_property(gobject_class, ARG_CHANNEL,
g_param_spec_string("channel", "channel",
"input/output to switch to", NULL, G_PARAM_READWRITE));
g_object_class_install_property(gobject_class, ARG_FREQUENCY,
g_param_spec_ulong ("frequency", "frequency",
"frequency to tune to", 0, G_MAXULONG, 0, G_PARAM_READWRITE));
/* signals */ /* signals */
gst_v4l2element_signals[SIGNAL_OPEN] = gst_v4l2element_signals[SIGNAL_OPEN] =
@ -446,9 +455,12 @@ gst_v4l2element_dispose (GObject *object)
g_free (v4l2element->display); g_free (v4l2element->display);
} }
if (v4l2element->device) {
g_free (v4l2element->device); g_free (v4l2element->device);
} v4l2element->device = NULL;
g_free (v4l2element->norm);
v4l2element->norm = NULL;
g_free (v4l2element->channel);
v4l2element->channel = NULL;
if (((GObjectClass *) parent_class)->dispose) if (((GObjectClass *) parent_class)->dispose)
((GObjectClass *) parent_class)->dispose(object); ((GObjectClass *) parent_class)->dispose(object);
@ -461,17 +473,56 @@ gst_v4l2element_set_property (GObject *object,
GParamSpec *pspec) GParamSpec *pspec)
{ {
GstV4l2Element *v4l2element; GstV4l2Element *v4l2element;
GstTuner *tuner;
/* it's not null if we got it, but it might not be ours */ /* it's not null if we got it, but it might not be ours */
g_return_if_fail(GST_IS_V4L2ELEMENT(object)); g_return_if_fail (GST_IS_V4L2ELEMENT (object));
v4l2element = GST_V4L2ELEMENT(object); v4l2element = GST_V4L2ELEMENT (object);
/* stupid GstInterface */
tuner = (GstTuner *) object;
switch (prop_id) { switch (prop_id) {
case ARG_DEVICE: case ARG_DEVICE:
if (!GST_V4L2_IS_OPEN(v4l2element)) { if (!GST_V4L2_IS_OPEN(v4l2element)) {
if (v4l2element->device) if (v4l2element->device)
g_free(v4l2element->device); g_free(v4l2element->device);
v4l2element->device = g_strdup(g_value_get_string(value)); v4l2element->device = g_value_dup_string(value);
}
break;
case ARG_NORM:
if (GST_V4L2_IS_OPEN(v4l2element)) {
GstTunerNorm *norm = gst_tuner_get_norm (tuner);
if (norm) {
gst_tuner_set_norm (tuner, norm);
}
} else {
g_free (v4l2element->norm);
v4l2element->norm = g_value_dup_string (value);
g_object_notify (object, "norm");
}
break;
case ARG_CHANNEL:
if (GST_V4L2_IS_OPEN(v4l2element)) {
GstTunerChannel *channel = gst_tuner_get_channel (tuner);
if (channel) {
gst_tuner_set_channel (tuner, channel);
}
} else {
g_free (v4l2element->channel);
v4l2element->channel = g_value_dup_string (value);
g_object_notify (object, "channel");
}
break;
case ARG_FREQUENCY:
if (GST_V4L2_IS_OPEN(v4l2element)) {
GstTunerChannel *channel;
if (!v4l2element->channel) return;
channel = gst_tuner_get_channel (tuner);
g_assert (channel);
gst_tuner_set_frequency (tuner, channel, g_value_get_ulong (value));
} else {
v4l2element->frequency = g_value_get_ulong (value);
g_object_notify (object, "frequency");
} }
break; break;
default: default:
@ -513,6 +564,15 @@ gst_v4l2element_get_property (GObject *object,
g_value_set_flags(value, flags); g_value_set_flags(value, flags);
break; break;
} }
case ARG_NORM:
g_value_set_string (value, v4l2element->norm);
break;
case ARG_CHANNEL:
g_value_set_string (value, v4l2element->channel);
break;
case ARG_FREQUENCY:
g_value_set_ulong (value, v4l2element->frequency);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break; break;

View File

@ -82,6 +82,11 @@ struct _GstV4l2Element {
GstXWindowListener *overlay; GstXWindowListener *overlay;
XID xwindow_id; XID xwindow_id;
/* properties */
gchar *norm;
gchar *channel;
gulong frequency;
/* caching values */ /* caching values */
gchar *display; gchar *display;
}; };

View File

@ -26,6 +26,9 @@
#include "v4l2src_calls.h" #include "v4l2src_calls.h"
#include "gstv4l2tuner.h" #include "gstv4l2tuner.h"
GST_DEBUG_CATEGORY (v4l2src_debug);
#define GST_CAT_DEFAULT v4l2src_debug
/* elementfactory details */ /* elementfactory details */
static GstElementDetails gst_v4l2src_details = { static GstElementDetails gst_v4l2src_details = {
"Video (video4linux2) Source", "Video (video4linux2) Source",
@ -51,15 +54,50 @@ enum {
ARG_USE_FIXED_FPS ARG_USE_FIXED_FPS
}; };
guint32 gst_v4l2_formats[] = {
/* from Linux 2.6.0 videodev2.h */
V4L2_PIX_FMT_RGB332, /* 8 RGB-3-3-2 */
V4L2_PIX_FMT_RGB555, /* 16 RGB-5-5-5 */
V4L2_PIX_FMT_RGB565, /* 16 RGB-5-6-5 */
V4L2_PIX_FMT_RGB555X, /* 16 RGB-5-5-5 BE */
V4L2_PIX_FMT_RGB565X, /* 16 RGB-5-6-5 BE */
V4L2_PIX_FMT_BGR24, /* 24 BGR-8-8-8 */
V4L2_PIX_FMT_RGB24, /* 24 RGB-8-8-8 */
V4L2_PIX_FMT_BGR32, /* 32 BGR-8-8-8-8 */
V4L2_PIX_FMT_RGB32, /* 32 RGB-8-8-8-8 */
V4L2_PIX_FMT_GREY, /* 8 Greyscale */
V4L2_PIX_FMT_YVU410, /* 9 YVU 4:1:0 */
V4L2_PIX_FMT_YVU420, /* 12 YVU 4:2:0 */
V4L2_PIX_FMT_YUYV, /* 16 YUV 4:2:2 */
V4L2_PIX_FMT_UYVY, /* 16 YUV 4:2:2 */
V4L2_PIX_FMT_YUV422P, /* 16 YVU422 planar */
V4L2_PIX_FMT_YUV411P, /* 16 YVU411 planar */
V4L2_PIX_FMT_Y41P, /* 12 YUV 4:1:1 */
V4L2_PIX_FMT_NV12, /* 12 Y/CbCr 4:2:0 */
V4L2_PIX_FMT_NV21, /* 12 Y/CrCb 4:2:0 */
V4L2_PIX_FMT_YUV410, /* 9 YUV 4:1:0 */
V4L2_PIX_FMT_YUV420, /* 12 YUV 4:2:0 */
V4L2_PIX_FMT_YYUV, /* 16 YUV 4:2:2 */
V4L2_PIX_FMT_HI240, /* 8 8-bit color */
V4L2_PIX_FMT_MJPEG, /* Motion-JPEG */
V4L2_PIX_FMT_JPEG, /* JFIF JPEG */
V4L2_PIX_FMT_DV, /* 1394 */
V4L2_PIX_FMT_MPEG, /* MPEG */
V4L2_PIX_FMT_WNVA /* Winnov hw compres */
};
#define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
GST_FORMATS_FUNCTION (GstPad *, gst_v4l2src_get_formats, GST_FORMATS_FUNCTION (GstPad *, gst_v4l2src_get_formats,
GST_FORMAT_TIME, GST_FORMAT_DEFAULT); GST_FORMAT_TIME, GST_FORMAT_DEFAULT);
GST_QUERY_TYPE_FUNCTION (GstPad *, gst_v4l2src_get_query_types, GST_QUERY_TYPE_FUNCTION (GstPad *, gst_v4l2src_get_query_types,
GST_QUERY_POSITION); GST_QUERY_POSITION);
/* init functions */ /* init functions */
static void gst_v4l2src_class_init (GstV4l2SrcClass *klass); static void gst_v4l2src_class_init (gpointer g_class,
static void gst_v4l2src_base_init (GstV4l2SrcClass *klass); gpointer class_data);
static void gst_v4l2src_init (GstV4l2Src *v4l2src); static void gst_v4l2src_base_init (gpointer g_class);
static void gst_v4l2src_init (GTypeInstance * instance,
gpointer g_class);
/* signal functions */ /* signal functions */
static void gst_v4l2src_open (GstElement *element, static void gst_v4l2src_open (GstElement *element,
@ -68,9 +106,12 @@ static void gst_v4l2src_close (GstElement *element,
const gchar *device); const gchar *device);
/* pad/buffer functions */ /* pad/buffer functions */
static GstPadLinkReturn gst_v4l2src_srcconnect (GstPad *pad, static const GstCaps * gst_v4l2src_get_all_caps (void);
static GstPadLinkReturn gst_v4l2src_link (GstPad *pad,
const GstCaps *caps); const GstCaps *caps);
static GstCaps * gst_v4l2src_getcaps (GstPad *pad); static GstCaps * gst_v4l2src_getcaps (GstPad *pad);
static GstCaps * gst_v4l2src_fixate (GstPad * pad,
const GstCaps * caps);
static GstData * gst_v4l2src_get (GstPad *pad); static GstData * gst_v4l2src_get (GstPad *pad);
static gboolean gst_v4l2src_src_convert (GstPad *pad, static gboolean gst_v4l2src_src_convert (GstPad *pad,
GstFormat src_format, GstFormat src_format,
@ -100,8 +141,6 @@ static GstElementStateReturn
static void gst_v4l2src_set_clock (GstElement *element, static void gst_v4l2src_set_clock (GstElement *element,
GstClock *clock); GstClock *clock);
static GstPadTemplate *src_template;
static GstElementClass *parent_class = NULL; static GstElementClass *parent_class = NULL;
static guint gst_v4l2src_signals[LAST_SIGNAL] = { 0 }; static guint gst_v4l2src_signals[LAST_SIGNAL] = { 0 };
@ -113,60 +152,58 @@ gst_v4l2src_get_type (void)
if (!v4l2src_type) { if (!v4l2src_type) {
static const GTypeInfo v4l2src_info = { static const GTypeInfo v4l2src_info = {
sizeof(GstV4l2SrcClass), sizeof (GstV4l2SrcClass),
(GBaseInitFunc) gst_v4l2src_base_init, gst_v4l2src_base_init,
NULL, NULL,
(GClassInitFunc) gst_v4l2src_class_init, gst_v4l2src_class_init,
NULL, NULL,
NULL, NULL,
sizeof(GstV4l2Src), sizeof (GstV4l2Src),
0, 0,
(GInstanceInitFunc) gst_v4l2src_init, gst_v4l2src_init,
NULL NULL
}; };
v4l2src_type = g_type_register_static(GST_TYPE_V4L2ELEMENT, v4l2src_type = g_type_register_static(GST_TYPE_V4L2ELEMENT,
"GstV4l2Src", &v4l2src_info, 0); "GstV4l2Src", &v4l2src_info, 0);
GST_DEBUG_CATEGORY_INIT (v4l2src_debug, "v4l2src", 0, "v4l2src element");
} }
return v4l2src_type; return v4l2src_type;
} }
static void static void
gst_v4l2src_base_init (GstV4l2SrcClass *klass) gst_v4l2src_base_init (gpointer g_class)
{ {
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); GstPadTemplate *template;
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (gstelement_class, gst_element_class_set_details (gstelement_class, &gst_v4l2src_details);
&gst_v4l2src_details);
src_template = gst_pad_template_new ("src", template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
GST_PAD_SRC, gst_caps_copy (gst_v4l2src_get_all_caps ()));
GST_PAD_ALWAYS,
NULL);
gst_element_class_add_pad_template (gstelement_class, src_template); gst_element_class_add_pad_template (gstelement_class, template);
} }
static void static void
gst_v4l2src_class_init (GstV4l2SrcClass *klass) gst_v4l2src_class_init (gpointer g_class, gpointer class_data)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class = G_OBJECT_CLASS (g_class);
GstElementClass *gstelement_class; GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
GstV4l2ElementClass *v4l2_class; GstV4l2ElementClass *v4l2_class = GST_V4L2ELEMENT_CLASS (g_class);
gobject_class = (GObjectClass*)klass; parent_class = g_type_class_peek_parent (g_class);
gstelement_class = (GstElementClass*)klass;
v4l2_class = (GstV4l2ElementClass*)klass;
parent_class = g_type_class_ref(GST_TYPE_V4L2ELEMENT); gobject_class->set_property = gst_v4l2src_set_property;
gobject_class->get_property = gst_v4l2src_get_property;
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_NUMBUFS, g_object_class_install_property(gobject_class, ARG_NUMBUFS,
g_param_spec_int("num_buffers","num_buffers","num_buffers", g_param_spec_int("num_buffers","num_buffers","num_buffers",
G_MININT,G_MAXINT,0,G_PARAM_READWRITE)); G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE, g_object_class_install_property(gobject_class, ARG_BUFSIZE,
g_param_spec_int("buffer_size","buffer_size","buffer_size", g_param_spec_int("buffer_size","buffer_size","buffer_size",
G_MININT,G_MAXINT,0,G_PARAM_READABLE)); G_MININT,G_MAXINT,0,G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_USE_FIXED_FPS, g_object_class_install_property(gobject_class, ARG_USE_FIXED_FPS,
g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS", g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS",
"Drop/Insert frames to reach a certain FPS (TRUE) " "Drop/Insert frames to reach a certain FPS (TRUE) "
"or adapt FPS to suit the number of frabbed frames", "or adapt FPS to suit the number of frabbed frames",
@ -174,30 +211,26 @@ gst_v4l2src_class_init (GstV4l2SrcClass *klass)
/* signals */ /* signals */
gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE] = gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE] =
g_signal_new("frame_capture", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST, g_signal_new("frame_capture", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET(GstV4l2SrcClass, frame_capture), G_STRUCT_OFFSET(GstV4l2SrcClass, frame_capture),
NULL, NULL, g_cclosure_marshal_VOID__VOID, NULL, NULL, g_cclosure_marshal_VOID__VOID,
G_TYPE_NONE, 0); G_TYPE_NONE, 0);
gst_v4l2src_signals[SIGNAL_FRAME_DROP] = gst_v4l2src_signals[SIGNAL_FRAME_DROP] =
g_signal_new("frame_drop", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST, g_signal_new("frame_drop", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET(GstV4l2SrcClass, frame_drop), G_STRUCT_OFFSET(GstV4l2SrcClass, frame_drop),
NULL, NULL, g_cclosure_marshal_VOID__VOID, NULL, NULL, g_cclosure_marshal_VOID__VOID,
G_TYPE_NONE, 0); G_TYPE_NONE, 0);
gst_v4l2src_signals[SIGNAL_FRAME_INSERT] = gst_v4l2src_signals[SIGNAL_FRAME_INSERT] =
g_signal_new("frame_insert", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST, g_signal_new("frame_insert", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET(GstV4l2SrcClass, frame_insert), G_STRUCT_OFFSET(GstV4l2SrcClass, frame_insert),
NULL, NULL, g_cclosure_marshal_VOID__VOID, NULL, NULL, g_cclosure_marshal_VOID__VOID,
G_TYPE_NONE, 0); G_TYPE_NONE, 0);
gst_v4l2src_signals[SIGNAL_FRAME_LOST] = gst_v4l2src_signals[SIGNAL_FRAME_LOST] =
g_signal_new("frame_lost", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST, g_signal_new("frame_lost", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET(GstV4l2SrcClass, frame_lost), G_STRUCT_OFFSET(GstV4l2SrcClass, frame_lost),
NULL, NULL, g_cclosure_marshal_VOID__INT, NULL, NULL, g_cclosure_marshal_VOID__INT,
G_TYPE_NONE, 1, G_TYPE_INT); G_TYPE_NONE, 1, G_TYPE_INT);
gobject_class->set_property = gst_v4l2src_set_property;
gobject_class->get_property = gst_v4l2src_get_property;
gstelement_class->change_state = gst_v4l2src_change_state; gstelement_class->change_state = gst_v4l2src_change_state;
v4l2_class->open = gst_v4l2src_open; v4l2_class->open = gst_v4l2src_open;
@ -208,28 +241,28 @@ gst_v4l2src_class_init (GstV4l2SrcClass *klass)
static void static void
gst_v4l2src_init (GstV4l2Src *v4l2src) gst_v4l2src_init (GTypeInstance *instance, gpointer g_class)
{ {
GstV4l2Src *v4l2src = GST_V4L2SRC (instance);
GST_FLAG_SET(GST_ELEMENT(v4l2src), GST_ELEMENT_THREAD_SUGGESTED); GST_FLAG_SET(GST_ELEMENT(v4l2src), GST_ELEMENT_THREAD_SUGGESTED);
v4l2src->srcpad = gst_pad_new_from_template(src_template, "src"); v4l2src->srcpad = gst_pad_new_from_template(
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (v4l2src), "src"), "src");
gst_element_add_pad(GST_ELEMENT(v4l2src), v4l2src->srcpad); gst_element_add_pad(GST_ELEMENT(v4l2src), v4l2src->srcpad);
gst_pad_set_get_function(v4l2src->srcpad, gst_v4l2src_get); gst_pad_set_get_function(v4l2src->srcpad, gst_v4l2src_get);
gst_pad_set_link_function(v4l2src->srcpad, gst_v4l2src_srcconnect); gst_pad_set_link_function(v4l2src->srcpad, gst_v4l2src_link);
gst_pad_set_getcaps_function (v4l2src->srcpad, gst_v4l2src_getcaps); gst_pad_set_getcaps_function (v4l2src->srcpad, gst_v4l2src_getcaps);
gst_pad_set_fixate_function (v4l2src->srcpad, gst_v4l2src_fixate);
gst_pad_set_convert_function (v4l2src->srcpad, gst_v4l2src_src_convert); gst_pad_set_convert_function (v4l2src->srcpad, gst_v4l2src_src_convert);
gst_pad_set_formats_function (v4l2src->srcpad, gst_pad_set_formats_function (v4l2src->srcpad, gst_v4l2src_get_formats);
gst_v4l2src_get_formats); gst_pad_set_query_function (v4l2src->srcpad, gst_v4l2src_src_query);
gst_pad_set_query_function (v4l2src->srcpad, gst_pad_set_query_type_function (v4l2src->srcpad, gst_v4l2src_get_query_types);
gst_v4l2src_src_query);
gst_pad_set_query_type_function (v4l2src->srcpad,
gst_v4l2src_get_query_types);
v4l2src->breq.count = 0; v4l2src->breq.count = 0;
v4l2src->formats = NULL; v4l2src->formats = NULL;
v4l2src->format_list = NULL;
/* no clock */ /* no clock */
v4l2src->clock = NULL; v4l2src->clock = NULL;
@ -243,7 +276,7 @@ static void
gst_v4l2src_open (GstElement *element, gst_v4l2src_open (GstElement *element,
const gchar *device) const gchar *device)
{ {
gst_v4l2src_fill_format_list(GST_V4L2SRC(element)); gst_v4l2src_fill_format_list (GST_V4L2SRC (element));
} }
@ -251,7 +284,7 @@ static void
gst_v4l2src_close (GstElement *element, gst_v4l2src_close (GstElement *element,
const gchar *device) const gchar *device)
{ {
gst_v4l2src_empty_format_list(GST_V4L2SRC(element)); gst_v4l2src_clear_format_list (GST_V4L2SRC (element));
} }
@ -365,12 +398,10 @@ gst_v4l2src_src_query (GstPad *pad,
return res; return res;
} }
static GstStructure * static GstStructure *
gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc, gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc)
gboolean compressed)
{ {
GstStructure *structure; GstStructure *structure = NULL;
switch (fourcc) { switch (fourcc) {
case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */ case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
@ -399,70 +430,50 @@ gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
case V4L2_PIX_FMT_RGB555: case V4L2_PIX_FMT_RGB555:
case V4L2_PIX_FMT_RGB555X: case V4L2_PIX_FMT_RGB555X:
bpp = 16; depth = 15; bpp = 16; depth = 15;
endianness = G_BYTE_ORDER; endianness = fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
if ((fourcc == V4L2_PIX_FMT_RGB555 &&
G_BYTE_ORDER == G_LITTLE_ENDIAN) ||
(fourcc == V4L2_PIX_FMT_RGB555X &&
G_BYTE_ORDER == G_BIG_ENDIAN)) {
r_mask = 0x7c00; r_mask = 0x7c00;
g_mask = 0x03e0; g_mask = 0x03e0;
b_mask = 0x001f; b_mask = 0x001f;
} else {
r_mask = 0x007c;
g_mask = 0xe003;
b_mask = 0x1f00;
}
break; break;
case V4L2_PIX_FMT_RGB565: case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB565X: case V4L2_PIX_FMT_RGB565X:
bpp = depth = 16; bpp = depth = 16;
endianness = G_BYTE_ORDER; endianness = fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
if ((fourcc == V4L2_PIX_FMT_RGB565 &&
G_BYTE_ORDER == G_LITTLE_ENDIAN) ||
(fourcc == V4L2_PIX_FMT_RGB565X &&
G_BYTE_ORDER == G_BIG_ENDIAN)) {
r_mask = 0xf800; r_mask = 0xf800;
g_mask = 0x07e0; g_mask = 0x07e0;
b_mask = 0x001f; b_mask = 0x001f;
} else {
r_mask = 0x00f8;
g_mask = 0xe007;
b_mask = 0x1f00;
}
break;
case V4L2_PIX_FMT_RGB24: case V4L2_PIX_FMT_RGB24:
case V4L2_PIX_FMT_BGR24:
bpp = depth = 24; bpp = depth = 24;
endianness = G_BIG_ENDIAN; endianness = G_BIG_ENDIAN;
if (fourcc == V4L2_PIX_FMT_RGB24) {
r_mask = 0xff0000; r_mask = 0xff0000;
g_mask = 0x00ff00; g_mask = 0x00ff00;
b_mask = 0x0000ff; b_mask = 0x0000ff;
} else { break;
case V4L2_PIX_FMT_BGR24:
bpp = depth = 24;
endianness = G_BIG_ENDIAN;
r_mask = 0x0000ff; r_mask = 0x0000ff;
g_mask = 0x00ff00; g_mask = 0x00ff00;
b_mask = 0xff0000; b_mask = 0xff0000;
}
break; break;
case V4L2_PIX_FMT_RGB32: case V4L2_PIX_FMT_RGB32:
case V4L2_PIX_FMT_BGR32:
bpp = depth = 32; bpp = depth = 32;
endianness = G_BIG_ENDIAN; endianness = G_BIG_ENDIAN;
if (fourcc == V4L2_PIX_FMT_RGB32) {
r_mask = 0xff000000; r_mask = 0xff000000;
g_mask = 0x00ff0000; g_mask = 0x00ff0000;
b_mask = 0x0000ff00; b_mask = 0x0000ff00;
} else { break;
case V4L2_PIX_FMT_BGR32:
bpp = depth = 32;
endianness = G_BIG_ENDIAN;
r_mask = 0x000000ff; r_mask = 0x000000ff;
g_mask = 0x0000ff00; g_mask = 0x0000ff00;
b_mask = 0x00ff0000; b_mask = 0x00ff0000;
}
break; break;
default: default:
g_assert_not_reached(); g_assert_not_reached();
break; break;
} }
structure = gst_structure_new ("video/x-raw-rgb", structure = gst_structure_new ("video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp, "bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth, "depth", G_TYPE_INT, depth,
@ -473,6 +484,17 @@ gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
NULL); NULL);
break; break;
} }
case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
case V4L2_PIX_FMT_YUV422P: /* 16 YVU422 planar */
case V4L2_PIX_FMT_YUV411P: /* 16 YVU411 planar */
case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
/* FIXME: get correct fourccs here */
break;
case V4L2_PIX_FMT_YVU410:
case V4L2_PIX_FMT_YUV410:
case V4L2_PIX_FMT_YUV420: /* I420/IYUV */ case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
case V4L2_PIX_FMT_YUYV: case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVU420: case V4L2_PIX_FMT_YVU420:
@ -481,6 +503,12 @@ gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
guint32 fcc = 0; guint32 fcc = 0;
switch (fourcc) { switch (fourcc) {
case V4L2_PIX_FMT_YVU410:
fcc = GST_MAKE_FOURCC('Y','V','U','9');
break;
case V4L2_PIX_FMT_YUV410:
fcc = GST_MAKE_FOURCC('Y','U','V','9');
break;
case V4L2_PIX_FMT_YUV420: case V4L2_PIX_FMT_YUV420:
fcc = GST_MAKE_FOURCC('I','4','2','0'); fcc = GST_MAKE_FOURCC('I','4','2','0');
break; break;
@ -494,44 +522,30 @@ gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
fcc = GST_MAKE_FOURCC('U','Y','V','Y'); fcc = GST_MAKE_FOURCC('U','Y','V','Y');
break; break;
case V4L2_PIX_FMT_Y41P: case V4L2_PIX_FMT_Y41P:
fcc = GST_MAKE_FOURCC('Y','4','1','P'); fcc = GST_MAKE_FOURCC('Y','4','1','B');
break; break;
default: default:
g_assert_not_reached(); g_assert_not_reached();
break; break;
} }
structure = gst_structure_new ("video/x-raw-yuv", structure = gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fcc, "format", GST_TYPE_FOURCC, fcc,
NULL); NULL);
break; break;
} }
case V4L2_PIX_FMT_DV:
structure = gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
break;
case V4L2_PIX_FMT_MPEG: /* MPEG */
/* someone figure out the MPEG format used... */
break;
case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
break;
default: default:
GST_DEBUG ( GST_DEBUG ("Unknown fourcc 0x%08x " GST_FOURCC_FORMAT,
"Unknown fourcc 0x%08x " GST_FOURCC_FORMAT ", trying default",
fourcc, GST_FOURCC_ARGS(fourcc)); fourcc, GST_FOURCC_ARGS(fourcc));
/* add the standard one */
if (compressed) {
guint32 print_format = GUINT32_FROM_LE(fourcc);
gchar *print_format_str = (gchar *) &print_format, *string_format;
gint i;
for (i=0;i<4;i++) {
print_format_str[i] =
g_ascii_tolower(print_format_str[i]);
}
string_format = g_strdup_printf("video/%4.4s",
print_format_str);
structure = gst_structure_new (string_format, NULL);
g_free(string_format);
} else {
structure = gst_structure_new ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fourcc, NULL);
}
break; break;
} }
#if 0 #if 0
gst_caps_set_simple (caps, gst_caps_set_simple (caps,
"width", G_TYPE_INT, width, "width", G_TYPE_INT, width,
@ -539,42 +553,43 @@ gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
"framerate", G_TYPE_DOUBLE, fps, "framerate", G_TYPE_DOUBLE, fps,
NULL); NULL);
#endif #endif
return structure; return structure;
} }
#define gst_v4l2src_v4l2fourcc_to_caps_fixed(f, width, height, fps, c) \
gst_v4l2src_v4l2fourcc_to_caps(f, \
gst_props_entry_new("width", \
GST_PROPS_INT(width)), \
gst_props_entry_new("height", \
GST_PROPS_INT(height)), \
gst_props_entry_new("framerate", \
GST_PROPS_FLOAT(fps)), \
c)
#define gst_v4l2src_v4l2fourcc_to_caps_range(f, min_w, max_w, min_h, max_h, c) \
gst_v4l2src_v4l2fourcc_to_caps(f, \
gst_props_entry_new("width", \
GST_PROPS_INT_RANGE(min_w, max_w)), \
gst_props_entry_new("height", \
GST_PROPS_INT_RANGE(min_h, max_h)), \
gst_props_entry_new("framerate", \
GST_PROPS_FLOAT_RANGE(0, G_MAXFLOAT)), \
c)
static struct v4l2_fmtdesc * static struct v4l2_fmtdesc *
gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src *v4l2src, gst_v4l2src_get_format_from_fourcc (GstV4l2Src *v4l2src, guint32 fourcc)
GstStructure *structure)
{ {
gint i; struct v4l2_fmtdesc *fmt;
guint32 fourcc = 0; GSList *walk;
struct v4l2_fmtdesc *end_fmt = NULL;
const gchar *format = gst_structure_get_name (structure);
if (!strcmp(format, "video/x-raw-yuv") || if (fourcc == 0)
!strcmp(format, "video/x-raw-rgb")) { return NULL;
if (!strcmp(format, "video/x-raw-rgb"))
walk = v4l2src->formats;
while (walk) {
fmt = (struct v4l2_fmtdesc *) walk->data;
if (fmt->pixelformat == fourcc)
return fmt;
/* special case for jpeg */
if ((fmt->pixelformat == V4L2_PIX_FMT_MJPEG && fourcc == V4L2_PIX_FMT_JPEG) ||
(fmt->pixelformat == V4L2_PIX_FMT_JPEG && fourcc == V4L2_PIX_FMT_MJPEG)) {
return fmt;
}
walk = g_slist_next (walk);
}
return NULL;
}
static guint32
gst_v4l2_fourcc_from_structure (GstStructure *structure)
{
guint32 fourcc = 0;
const gchar *mimetype = gst_structure_get_name (structure);
if (!strcmp(mimetype, "video/x-raw-yuv") ||
!strcmp(mimetype, "video/x-raw-rgb")) {
if (!strcmp(mimetype, "video/x-raw-rgb"))
fourcc = GST_MAKE_FOURCC('R','G','B',' '); fourcc = GST_MAKE_FOURCC('R','G','B',' ');
else else
gst_structure_get_fourcc (structure, "format", &fourcc); gst_structure_get_fourcc (structure, "format", &fourcc);
@ -597,10 +612,11 @@ gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src *v4l2src,
fourcc = V4L2_PIX_FMT_YVU420; fourcc = V4L2_PIX_FMT_YVU420;
break; break;
case GST_MAKE_FOURCC('R','G','B',' '): { case GST_MAKE_FOURCC('R','G','B',' '): {
gint depth, endianness; gint depth, endianness, r_mask;
gst_structure_get_int (structure, "depth", &depth); gst_structure_get_int (structure, "depth", &depth);
gst_structure_get_int (structure, "endianness", &endianness); gst_structure_get_int (structure, "endianness", &endianness);
gst_structure_get_int (structure, "red_mask", &r_mask);
switch (depth) { switch (depth) {
case 8: case 8:
@ -617,82 +633,94 @@ gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src *v4l2src,
V4L2_PIX_FMT_RGB565X; V4L2_PIX_FMT_RGB565X;
break; break;
case 24: case 24:
fourcc = (endianness == G_LITTLE_ENDIAN) ? fourcc = (r_mask == 0xFF) ?
V4L2_PIX_FMT_BGR24 : V4L2_PIX_FMT_BGR24 :
V4L2_PIX_FMT_RGB24; V4L2_PIX_FMT_RGB24;
break; break;
case 32: case 32:
fourcc = (endianness == G_LITTLE_ENDIAN) ? fourcc = (r_mask == 0xFF) ?
V4L2_PIX_FMT_BGR32 : V4L2_PIX_FMT_BGR32 :
V4L2_PIX_FMT_RGB32; V4L2_PIX_FMT_RGB32;
break; break;
} }
}
default: default:
break; break;
} }
for (i=0;i<g_list_length(v4l2src->formats);i++) {
struct v4l2_fmtdesc *fmt;
fmt = (struct v4l2_fmtdesc *)
g_list_nth_data(v4l2src->formats, i);
if (fmt->pixelformat == fourcc) {
end_fmt = fmt;
break;
}
}
} else {
/* compressed */
if (strncmp(format, "video/", 6))
return NULL;
format = &format[6];
if (strlen(format) != 4)
return NULL;
fourcc = GST_MAKE_FOURCC(g_ascii_toupper(format[0]),
g_ascii_toupper(format[1]),
g_ascii_toupper(format[2]),
g_ascii_toupper(format[3]));
switch (fourcc) {
case GST_MAKE_FOURCC('J','P','E','G'): {
struct v4l2_fmtdesc *fmt;
for (i=0;i<g_list_length(v4l2src->formats);i++) {
fmt = g_list_nth_data(v4l2src->formats, i);
if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
fmt->pixelformat == V4L2_PIX_FMT_JPEG) {
end_fmt = fmt;
break;
}
}
break;
}
default: {
/* FIXME: check for fourcc in list */
struct v4l2_fmtdesc *fmt;
for (i=0;i<g_list_length(v4l2src->formats);i++) {
fmt = g_list_nth_data(v4l2src->formats, i);
if (fourcc == fmt->pixelformat) {
end_fmt = fmt;
break;
}
}
break;
}
} }
} else if (strcmp (mimetype, "video/x-dv") == 0) {
fourcc = V4L2_PIX_FMT_DV;
} else if (strcmp (mimetype, "video/x-jpeg") == 0) {
fourcc = V4L2_PIX_FMT_JPEG;
} }
return end_fmt; return fourcc;
} }
#define gst_caps_get_int_range(caps, name, min, max) \ static struct v4l2_fmtdesc *
gst_props_entry_get_int_range(gst_props_get_entry((caps)->properties, \ gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src *v4l2src, GstStructure *structure)
name), \ {
min, max) return gst_v4l2src_get_format_from_fourcc (v4l2src,
gst_v4l2_fourcc_from_structure (structure));
}
static const GstCaps *
gst_v4l2src_get_all_caps (void)
{
static GstCaps *caps = NULL;
if (caps == NULL) {
GstStructure *structure;
guint i;
caps = gst_caps_new_empty ();
for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
structure = gst_v4l2src_v4l2fourcc_to_caps (gst_v4l2_formats[i]);
if (structure) {
gst_structure_set (structure,
"width", GST_TYPE_INT_RANGE, 1, 4096,
"height", GST_TYPE_INT_RANGE, 1, 4096,
"framerate", GST_TYPE_DOUBLE_RANGE, (double) 0, G_MAXDOUBLE,
NULL);
gst_caps_append_structure (caps, structure);
}
}
}
return caps;
}
static GstCaps *
gst_v4l2src_fixate (GstPad *pad, const GstCaps *const_caps)
{
gint i;
GstStructure *structure;
G_GNUC_UNUSED gchar *caps_str;
gboolean changed = FALSE;
GstCaps *caps = gst_caps_copy (const_caps);
caps_str = gst_caps_to_string (caps);
GST_DEBUG_OBJECT (gst_pad_get_parent (pad), "fixating caps %s", caps_str);
g_free (caps_str);
for (i = 0; i < gst_caps_get_size (caps); i++) {
structure = gst_caps_get_structure (caps, i);
changed |= gst_caps_structure_fixate_field_nearest_int (structure, "width", G_MAXINT);
}
if (changed) return caps;
for (i = 0; i < gst_caps_get_size (caps); i++) {
structure = gst_caps_get_structure (caps, i);
changed |= gst_caps_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
}
if (changed) return caps;
gst_caps_free (caps);
return NULL;
}
static GstPadLinkReturn static GstPadLinkReturn
gst_v4l2src_srcconnect (GstPad *pad, gst_v4l2src_link (GstPad *pad, const GstCaps *caps)
const GstCaps *vscapslist)
{ {
GstV4l2Src *v4l2src; GstV4l2Src *v4l2src;
GstV4l2Element *v4l2element; GstV4l2Element *v4l2element;
@ -703,7 +731,7 @@ gst_v4l2src_srcconnect (GstPad *pad,
v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad)); v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
v4l2element = GST_V4L2ELEMENT(v4l2src); v4l2element = GST_V4L2ELEMENT(v4l2src);
structure = gst_caps_get_structure (vscapslist, 0); structure = gst_caps_get_structure (caps, 0);
/* clean up if we still haven't cleaned up our previous /* clean up if we still haven't cleaned up our previous
* capture session */ * capture session */
@ -738,9 +766,10 @@ gst_v4l2src_getcaps (GstPad *pad)
{ {
GstV4l2Src *v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad)); GstV4l2Src *v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
GstCaps *caps; GstCaps *caps;
gint i;
struct v4l2_fmtdesc *format; struct v4l2_fmtdesc *format;
int min_w, max_w, min_h, max_h; int min_w, max_w, min_h, max_h;
GSList *walk;
GstStructure *structure;
if (!GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src))) { if (!GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src))) {
return gst_caps_new_any (); return gst_caps_new_any ();
@ -748,84 +777,83 @@ gst_v4l2src_getcaps (GstPad *pad)
/* build our own capslist */ /* build our own capslist */
caps = gst_caps_new_empty(); caps = gst_caps_new_empty();
for (i=0;i<g_list_length(v4l2src->formats);i++) { walk = v4l2src->formats;
GstStructure *structure; while (walk) {
format = (struct v4l2_fmtdesc *) walk->data;
format = g_list_nth_data(v4l2src->formats, i); walk = g_slist_next (walk);
/* get size delimiters */ /* get size delimiters */
if (!gst_v4l2src_get_size_limits(v4l2src, format, if (!gst_v4l2src_get_size_limits(v4l2src, format,
&min_w, &max_w, &min_w, &max_w, &min_h, &max_h)) {
&min_h, &max_h)) {
continue; continue;
} }
/* add to list */ /* add to list */
structure = gst_v4l2src_v4l2fourcc_to_caps (format->pixelformat, structure = gst_v4l2src_v4l2fourcc_to_caps (format->pixelformat);
format->flags & V4L2_FMT_FLAG_COMPRESSED);
if (structure) {
gst_structure_set (structure, gst_structure_set (structure,
"width", GST_TYPE_INT_RANGE, min_w, max_w, "width", GST_TYPE_INT_RANGE, min_w, max_w,
"height", GST_TYPE_INT_RANGE, min_h, max_h, "height", GST_TYPE_INT_RANGE, min_h, max_h,
"framerate", GST_TYPE_DOUBLE_RANGE, 0, G_MAXDOUBLE, "framerate", GST_TYPE_DOUBLE_RANGE, (double) 0, G_MAXDOUBLE,
NULL); NULL);
gst_caps_append_structure (caps, structure); gst_caps_append_structure (caps, structure);
} }
}
return caps; return caps;
} }
static GstData* static GstData*
gst_v4l2src_get (GstPad *pad) gst_v4l2src_get (GstPad *pad)
{ {
GstV4l2Src *v4l2src; GstV4l2Src *v4l2src;
GstBuffer *buf; GstBuffer *buf;
gint num; gint i, num = -1;
gdouble fps = 0; gdouble fps = 0;
g_return_val_if_fail (pad != NULL, NULL); v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
if (v4l2src->use_fixed_fps && if (v4l2src->use_fixed_fps &&
(fps = gst_v4l2src_get_fps(v4l2src)) == 0) (fps = gst_v4l2src_get_fps(v4l2src)) == 0) {
gst_element_error (GST_ELEMENT (v4l2src), "Could not get frame rate for element.");
return NULL; return NULL;
}
if (v4l2src->need_writes > 0) { if (v4l2src->need_writes > 0) {
/* use last frame */ /* use last frame */
num = v4l2src->last_frame; buf = v4l2src->cached_buffer;
v4l2src->need_writes--; v4l2src->need_writes--;
} else if (v4l2src->clock && v4l2src->use_fixed_fps) { } else {
GstClockTime time; GstClockTime time;
/* grab a frame from the device */
num = gst_v4l2src_grab_frame(v4l2src);
if (num == -1)
return NULL;
/* to check if v4l2 sets the correct time */
time = GST_TIMEVAL_TO_TIME(v4l2src->pool->buffers[num].buffer.timestamp);
if (v4l2src->clock && v4l2src->use_fixed_fps && time != 0) {
gboolean have_frame = FALSE; gboolean have_frame = FALSE;
do { do {
/* FIXME: isn't this v4l2 timestamp its own clock?! */
/* by default, we use the frame once */ /* by default, we use the frame once */
v4l2src->need_writes = 1; v4l2src->need_writes = 1;
/* grab a frame from the device */ g_assert (time >= v4l2src->substract_time);
if (!gst_v4l2src_grab_frame(v4l2src, &num)) time -= v4l2src->substract_time;
return NULL;
v4l2src->last_frame = num;
time = GST_TIMEVAL_TO_TIME(v4l2src->bufsettings.timestamp) -
v4l2src->substract_time;
/* first check whether we lost any frames according to the device */ /* first check whether we lost any frames according to the device */
if (v4l2src->last_seq != 0) { if (v4l2src->last_seq != 0) {
if (v4l2src->bufsettings.sequence - v4l2src->last_seq > 1) { if (v4l2src->pool->buffers[num].buffer.sequence - v4l2src->last_seq > 1) {
v4l2src->need_writes = v4l2src->bufsettings.sequence - v4l2src->need_writes = v4l2src->pool->buffers[num].buffer.sequence - v4l2src->last_seq;
v4l2src->last_seq; g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_LOST], 0,
g_signal_emit(G_OBJECT(v4l2src), v4l2src->need_writes - 1);
gst_v4l2src_signals[SIGNAL_FRAME_LOST],
0,
v4l2src->bufsettings.sequence -
v4l2src->last_seq - 1);
} }
} }
v4l2src->last_seq = v4l2src->bufsettings.sequence; v4l2src->last_seq = v4l2src->pool->buffers[num].buffer.sequence;
/* decide how often we're going to write the frame - set /* decide how often we're going to write the frame - set
* v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE * v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE
@ -837,54 +865,78 @@ gst_v4l2src_get (GstPad *pad)
* timeframe. This means that if time - begin_time = X sec, * timeframe. This means that if time - begin_time = X sec,
* we want to have written X*fps frames. If we've written * we want to have written X*fps frames. If we've written
* more - drop, if we've written less - dup... */ * more - drop, if we've written less - dup... */
if (v4l2src->handled * (GST_SECOND/fps) - time > if (v4l2src->handled * (GST_SECOND/fps) - time > 1.5 * (GST_SECOND/fps)) {
1.5 * (GST_SECOND/fps)) {
/* yo dude, we've got too many frames here! Drop! DROP! */ /* yo dude, we've got too many frames here! Drop! DROP! */
v4l2src->need_writes--; /* -= (v4l2src->handled - (time / fps)); */ v4l2src->need_writes--; /* -= (v4l2src->handled - (time / fps)); */
g_signal_emit(G_OBJECT(v4l2src), g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_DROP], 0);
gst_v4l2src_signals[SIGNAL_FRAME_DROP], 0); } else if (v4l2src->handled * (GST_SECOND/fps) - time < -1.5 * (GST_SECOND/fps)) {
} else if (v4l2src->handled * (GST_SECOND/fps) - time <
-1.5 * (GST_SECOND/fps)) {
/* this means we're lagging far behind */ /* this means we're lagging far behind */
v4l2src->need_writes++; /* += ((time / fps) - v4l2src->handled); */ v4l2src->need_writes++; /* += ((time / fps) - v4l2src->handled); */
g_signal_emit(G_OBJECT(v4l2src), g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_INSERT], 0);
gst_v4l2src_signals[SIGNAL_FRAME_INSERT], 0);
} }
if (v4l2src->need_writes > 0) { if (v4l2src->need_writes > 0) {
have_frame = TRUE; have_frame = TRUE;
v4l2src->use_num_times[num] = v4l2src->need_writes;
v4l2src->need_writes--; v4l2src->need_writes--;
} else { } else {
gst_v4l2src_requeue_frame(v4l2src, num); if (!gst_v4l2src_queue_frame(v4l2src, num))
return NULL;
num = gst_v4l2src_grab_frame(v4l2src);
if (num == -1)
return NULL;
} }
} while (!have_frame); } while (!have_frame);
} else {
/* grab a frame from the device */
if (!gst_v4l2src_grab_frame(v4l2src, &num))
return NULL;
v4l2src->use_num_times[num] = 1;
} }
g_assert (num != -1);
GST_LOG_OBJECT (v4l2src, "buffer %d needs %d writes", num, v4l2src->need_writes + 1);
i = v4l2src->pool->buffers[num].buffer.bytesused > 0 ?
v4l2src->pool->buffers[num].buffer.bytesused :
v4l2src->pool->buffers[num].length;
/* check if this is the last buffer in the queue. If so do a memcpy to put it back asap
to avoid framedrops and deadlocks because of stupid elements */
if (gst_atomic_int_read (&v4l2src->pool->refcount) == v4l2src->breq.count) {
GST_LOG_OBJECT (v4l2src, "using memcpy'd buffer");
buf = gst_buffer_new_and_alloc (i);
memcpy (GST_BUFFER_DATA (buf), v4l2src->pool->buffers[num].start, i);
if (!gst_v4l2src_queue_frame(v4l2src, num)) {
gst_data_unref (GST_DATA (buf));
return NULL;
}
} else {
GST_LOG_OBJECT (v4l2src, "using mmap'd buffer");
buf = gst_buffer_new (); buf = gst_buffer_new ();
GST_BUFFER_DATA(buf) = gst_v4l2src_get_buffer(v4l2src, num); GST_BUFFER_DATA (buf) = v4l2src->pool->buffers[num].start;
GST_BUFFER_SIZE(buf) = v4l2src->bufsettings.bytesused; GST_BUFFER_SIZE (buf) = i;
GST_BUFFER_FLAG_SET(buf, GST_BUFFER_READONLY); GST_BUFFER_FREE_DATA_FUNC (buf) = gst_v4l2src_free_buffer;
if (v4l2src->use_fixed_fps) GST_BUFFER_PRIVATE (buf) = &v4l2src->pool->buffers[num];
GST_BUFFER_TIMESTAMP(buf) = v4l2src->handled * GST_SECOND / fps; /* no need to be careful here, both are > 0, because the element uses them */
else /* calculate time based on our own clock */ gst_atomic_int_inc (&v4l2src->pool->buffers[num].refcount);
GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(v4l2src->bufsettings.timestamp) - gst_atomic_int_inc (&v4l2src->pool->refcount);
}
GST_BUFFER_MAXSIZE (buf) = v4l2src->pool->buffers[num].length;
if (v4l2src->use_fixed_fps) {
GST_BUFFER_TIMESTAMP (buf) = v4l2src->handled * GST_SECOND / fps;
GST_BUFFER_DURATION (buf) = GST_SECOND / fps;
} else {
/* calculate time based on our own clock */
GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(v4l2src->pool->buffers[num].buffer.timestamp) -
v4l2src->substract_time; v4l2src->substract_time;
}
if (v4l2src->need_writes > 0) {
v4l2src->cached_buffer = buf;
for (i = 0; i < v4l2src->need_writes; i++) {
gst_data_ref (GST_DATA (buf));
}
}
}
v4l2src->handled++; v4l2src->handled++;
g_signal_emit(G_OBJECT(v4l2src), g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE], 0);
gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE], 0);
return GST_DATA (buf); return GST_DATA (buf);
} }
static void static void
gst_v4l2src_set_property (GObject *object, gst_v4l2src_set_property (GObject *object,
guint prop_id, guint prop_id,
@ -972,7 +1024,6 @@ gst_v4l2src_change_state (GstElement *element)
case GST_STATE_READY_TO_PAUSED: case GST_STATE_READY_TO_PAUSED:
v4l2src->handled = 0; v4l2src->handled = 0;
v4l2src->need_writes = 0; v4l2src->need_writes = 0;
v4l2src->last_frame = 0;
v4l2src->substract_time = 0; v4l2src->substract_time = 0;
/* buffer setup moved to capsnego */ /* buffer setup moved to capsnego */
break; break;
@ -1013,60 +1064,3 @@ gst_v4l2src_set_clock (GstElement *element,
GST_V4L2SRC(element)->clock = clock; GST_V4L2SRC(element)->clock = clock;
} }
#if 0
static GstBuffer*
gst_v4l2src_buffer_new (GstBufferPool *pool,
guint64 offset,
guint size,
gpointer user_data)
{
GstBuffer *buffer;
GstV4l2Src *v4l2src = GST_V4L2SRC(user_data);
if (!GST_V4L2_IS_ACTIVE(GST_V4L2ELEMENT(v4l2src)))
return NULL;
buffer = gst_buffer_new();
if (!buffer)
return NULL;
/* TODO: add interlacing info to buffer as metadata
* (height>288 or 240 = topfieldfirst, else noninterlaced) */
GST_BUFFER_MAXSIZE(buffer) = v4l2src->bufsettings.length;
GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_DONTFREE);
return buffer;
}
#endif
#if 0
static void
gst_v4l2src_buffer_free (GstBufferPool *pool,
GstBuffer *buf,
gpointer user_data)
{
GstV4l2Src *v4l2src = GST_V4L2SRC(user_data);
int n;
if (gst_element_get_state(GST_ELEMENT(v4l2src)) != GST_STATE_PLAYING)
return; /* we've already cleaned up ourselves */
for (n=0;n<v4l2src->breq.count;n++)
if (GST_BUFFER_DATA(buf) == gst_v4l2src_get_buffer(v4l2src, n)) {
v4l2src->use_num_times[n]--;
if (v4l2src->use_num_times[n] <= 0) {
gst_v4l2src_requeue_frame(v4l2src, n);
}
break;
}
if (n == v4l2src->breq.count)
gst_element_error(GST_ELEMENT(v4l2src),
"Couldn\'t find the buffer");
/* free the buffer itself */
gst_buffer_default_free(buf);
}
#endif

View File

@ -22,6 +22,10 @@
#include <gstv4l2element.h> #include <gstv4l2element.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
#define GST_V4L2_MAX_BUFFERS 16
#define GST_V4L2_MIN_BUFFERS 2
#define GST_TYPE_V4L2SRC \ #define GST_TYPE_V4L2SRC \
(gst_v4l2src_get_type()) (gst_v4l2src_get_type())
@ -34,10 +38,27 @@
#define GST_IS_V4L2SRC_CLASS(obj) \ #define GST_IS_V4L2SRC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2SRC)) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2SRC))
typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
typedef struct _GstV4l2Buffer GstV4l2Buffer;
typedef struct _GstV4l2Src GstV4l2Src; typedef struct _GstV4l2Src GstV4l2Src;
typedef struct _GstV4l2SrcClass GstV4l2SrcClass; typedef struct _GstV4l2SrcClass GstV4l2SrcClass;
/* global info */
struct _GstV4l2BufferPool {
GstAtomicInt refcount; /* number of users: 1 for every buffer, 1 for element */
gint video_fd;
guint buffer_count;
GstV4l2Buffer * buffers;
};
struct _GstV4l2Buffer {
struct v4l2_buffer buffer;
guint8 * start;
guint length;
GstAtomicInt refcount; /* add 1 if in use by element, add 1 if in use by GstBuffer */
GstV4l2BufferPool * pool;
};
struct _GstV4l2Src { struct _GstV4l2Src {
GstV4l2Element v4l2element; GstV4l2Element v4l2element;
@ -45,28 +66,21 @@ struct _GstV4l2Src {
GstPad *srcpad; GstPad *srcpad;
/* internal lists */ /* internal lists */
GList /*v4l2_fmtdesc*/ *formats, *format_list; /* list of available capture formats */ GSList *formats; /* list of available capture formats */
/* buffers */
GstV4l2BufferPool *pool;
/* buffer properties */
struct v4l2_buffer bufsettings;
struct v4l2_requestbuffers breq; struct v4l2_requestbuffers breq;
struct v4l2_format format; struct v4l2_format format;
/* num of queued frames and some GThread stuff
* to wait if there's not enough */
gint8 *frame_queue_state;
GMutex *mutex_queue_state;
GCond *cond_queue_state;
gint num_queued;
gint queue_frame;
/* True if we want to stop */ /* True if we want to stop */
gboolean quit; gboolean quit;
/* A/V sync... frame counter and internal cache */ /* A/V sync... frame counter and internal cache */
gulong handled; gulong handled;
gint last_frame;
gint need_writes; gint need_writes;
GstBuffer *cached_buffer;
gulong last_seq; gulong last_seq;
/* clock */ /* clock */
@ -75,9 +89,6 @@ struct _GstV4l2Src {
/* time to substract from clock time to get back to timestamp */ /* time to substract from clock time to get back to timestamp */
GstClockTime substract_time; GstClockTime substract_time;
/* how often are we going to use each frame? */
gint *use_num_times;
/* how are we going to push buffers? */ /* how are we going to push buffers? */
gboolean use_fixed_fps; gboolean use_fixed_fps;
}; };

View File

@ -40,14 +40,14 @@ static const GList *
gst_v4l2_tuner_list_channels (GstTuner *mixer); gst_v4l2_tuner_list_channels (GstTuner *mixer);
static void gst_v4l2_tuner_set_channel (GstTuner *mixer, static void gst_v4l2_tuner_set_channel (GstTuner *mixer,
GstTunerChannel *channel); GstTunerChannel *channel);
static const GstTunerChannel * static GstTunerChannel *
gst_v4l2_tuner_get_channel (GstTuner *mixer); gst_v4l2_tuner_get_channel (GstTuner *mixer);
static const GList * static const GList *
gst_v4l2_tuner_list_norms (GstTuner *mixer); gst_v4l2_tuner_list_norms (GstTuner *mixer);
static void gst_v4l2_tuner_set_norm (GstTuner *mixer, static void gst_v4l2_tuner_set_norm (GstTuner *mixer,
GstTunerNorm *norm); GstTunerNorm *norm);
static const GstTunerNorm * static GstTunerNorm *
gst_v4l2_tuner_get_norm (GstTuner *mixer); gst_v4l2_tuner_get_norm (GstTuner *mixer);
static void gst_v4l2_tuner_set_frequency (GstTuner *mixer, static void gst_v4l2_tuner_set_frequency (GstTuner *mixer,
@ -205,13 +205,15 @@ gst_v4l2_tuner_set_channel (GstTuner *mixer,
g_return_if_fail (gst_v4l2_tuner_contains_channel (v4l2element, v4l2channel)); g_return_if_fail (gst_v4l2_tuner_contains_channel (v4l2element, v4l2channel));
/* ... or output, if we're a sink... */ /* ... or output, if we're a sink... */
if (gst_v4l2_tuner_is_sink (v4l2element)) if (gst_v4l2_tuner_is_sink (v4l2element) ?
gst_v4l2_set_output (v4l2element, v4l2channel->index); gst_v4l2_set_output (v4l2element, v4l2channel->index) :
else gst_v4l2_set_input (v4l2element, v4l2channel->index)) {
gst_v4l2_set_input (v4l2element, v4l2channel->index); gst_tuner_channel_changed (mixer, channel);
g_object_notify (G_OBJECT (v4l2element), "channel");
}
} }
static const GstTunerChannel * static GstTunerChannel *
gst_v4l2_tuner_get_channel (GstTuner *mixer) gst_v4l2_tuner_get_channel (GstTuner *mixer)
{ {
GstV4l2Element *v4l2element = GST_V4L2ELEMENT (mixer); GstV4l2Element *v4l2element = GST_V4L2ELEMENT (mixer);
@ -229,7 +231,7 @@ gst_v4l2_tuner_get_channel (GstTuner *mixer)
for (item = v4l2element->channels; item != NULL; item = item->next) { for (item = v4l2element->channels; item != NULL; item = item->next) {
if (channel == GST_V4L2_TUNER_CHANNEL (item->data)->index) if (channel == GST_V4L2_TUNER_CHANNEL (item->data)->index)
return (const GstTunerChannel *) item->data; return (GstTunerChannel *) item->data;
} }
return NULL; return NULL;
@ -265,10 +267,13 @@ gst_v4l2_tuner_set_norm (GstTuner *mixer,
g_return_if_fail (GST_V4L2_IS_OPEN (v4l2element)); g_return_if_fail (GST_V4L2_IS_OPEN (v4l2element));
g_return_if_fail (gst_v4l2_tuner_contains_norm (v4l2element, v4l2norm)); g_return_if_fail (gst_v4l2_tuner_contains_norm (v4l2element, v4l2norm));
gst_v4l2_set_norm (v4l2element, v4l2norm->index); if (gst_v4l2_set_norm (v4l2element, v4l2norm->index)) {
gst_tuner_norm_changed (mixer, norm);
g_object_notify (G_OBJECT (v4l2element), "norm");
}
} }
static const GstTunerNorm * static GstTunerNorm *
gst_v4l2_tuner_get_norm (GstTuner *mixer) gst_v4l2_tuner_get_norm (GstTuner *mixer)
{ {
GstV4l2Element *v4l2element = GST_V4L2ELEMENT (mixer); GstV4l2Element *v4l2element = GST_V4L2ELEMENT (mixer);
@ -282,7 +287,7 @@ gst_v4l2_tuner_get_norm (GstTuner *mixer)
for (item = v4l2element->norms; item != NULL; item = item->next) { for (item = v4l2element->norms; item != NULL; item = item->next) {
if (norm == GST_V4L2_TUNER_NORM (item->data)->index) if (norm == GST_V4L2_TUNER_NORM (item->data)->index)
return (const GstTunerNorm *) item->data; return (GstTunerNorm *) item->data;
} }
return NULL; return NULL;
@ -306,7 +311,10 @@ gst_v4l2_tuner_set_frequency (GstTuner *mixer,
gst_v4l2_get_input (v4l2element, &chan); gst_v4l2_get_input (v4l2element, &chan);
if (chan == GST_V4L2_TUNER_CHANNEL (channel)->index && if (chan == GST_V4L2_TUNER_CHANNEL (channel)->index &&
GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) { GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
gst_v4l2_set_frequency (v4l2element, v4l2channel->tuner, frequency); if (gst_v4l2_set_frequency (v4l2element, v4l2channel->tuner, frequency)) {
gst_tuner_frequency_changed (mixer, channel, frequency);
g_object_notify (G_OBJECT (v4l2element), "frequency");
}
} }
} }

View File

@ -342,6 +342,49 @@ gst_v4l2_empty_lists (GstV4l2Element *v4l2element)
v4l2element->colors = NULL; v4l2element->colors = NULL;
} }
/* FIXME: move this stuff to gstv4l2tuner.c? */
static void
gst_v4l2_set_defaults (GstV4l2Element *v4l2element)
{
GstTunerNorm *norm = NULL;
GstTunerChannel *channel = NULL;
GstTuner *tuner = GST_TUNER (v4l2element);
if (v4l2element->norm)
norm = gst_tuner_find_norm_by_name (tuner, v4l2element->norm);
if (norm) {
gst_tuner_set_norm (tuner, norm);
} else {
norm = GST_TUNER_NORM (gst_tuner_get_norm (GST_TUNER (v4l2element)));
v4l2element->norm = g_strdup (norm->label);
gst_tuner_norm_changed (tuner, norm);
g_object_notify (G_OBJECT (v4l2element), "norm");
}
if (v4l2element->channel)
channel = gst_tuner_find_channel_by_name (tuner, v4l2element->channel);
if (channel) {
gst_tuner_set_channel (tuner, channel);
} else {
channel = GST_TUNER_CHANNEL (gst_tuner_get_channel (GST_TUNER (v4l2element)));
v4l2element->channel = g_strdup (channel->label);
gst_tuner_channel_changed (tuner, channel);
g_object_notify (G_OBJECT (v4l2element), "channel");
}
if (v4l2element->frequency != 0) {
gst_tuner_set_frequency (tuner, channel, v4l2element->frequency);
} else {
v4l2element->frequency = gst_tuner_get_frequency (tuner, channel);
if (v4l2element->frequency == 0) {
/* guess */
gst_tuner_set_frequency (tuner, channel, 1000);
} else {
g_object_notify (G_OBJECT (v4l2element), "frequency");
}
}
}
/****************************************************** /******************************************************
* gst_v4l2_open(): * gst_v4l2_open():
@ -387,7 +430,10 @@ gst_v4l2_open (GstV4l2Element *v4l2element)
if (!gst_v4l2_fill_lists(v4l2element)) if (!gst_v4l2_fill_lists(v4l2element))
goto error; goto error;
gst_info("Opened device '%s' (%s) successfully\n", /* set defaults */
gst_v4l2_set_defaults (v4l2element);
GST_INFO_OBJECT (v4l2element, "Opened device '%s' (%s) successfully\n",
v4l2element->vcap.card, v4l2element->device); v4l2element->vcap.card, v4l2element->device);
return TRUE; return TRUE;

View File

@ -31,26 +31,24 @@
#include <errno.h> #include <errno.h>
#include "v4l2src_calls.h" #include "v4l2src_calls.h"
#include <sys/time.h> #include <sys/time.h>
#include <unistd.h>
#define GST_CAT_DEFAULT v4l2src_debug
/* lalala... */
#define GST_V4L2_SET_ACTIVE(element) (element)->buffer = GINT_TO_POINTER (-1)
#define GST_V4L2_SET_INACTIVE(element) (element)->buffer = NULL
#define DEBUG(format, args...) \ #define DEBUG(format, args...) \
GST_DEBUG_OBJECT (\ GST_CAT_DEBUG_OBJECT (\
GST_ELEMENT(v4l2src), \ v4l2src_debug, v4l2src, \
"V4L2SRC: " format, ##args) "V4L2SRC: " format, ##args)
#define MIN_BUFFERS_QUEUED 2
/* On some systems MAP_FAILED seems to be missing */ /* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED #ifndef MAP_FAILED
#define MAP_FAILED ( (caddr_t) -1 ) #define MAP_FAILED ( (caddr_t) -1 )
#endif #endif
enum {
QUEUE_STATE_ERROR = -1,
QUEUE_STATE_READY_FOR_QUEUE,
QUEUE_STATE_QUEUED,
QUEUE_STATE_SYNCED,
};
/****************************************************** /******************************************************
* gst_v4l2src_fill_format_list(): * gst_v4l2src_fill_format_list():
* create list of supported capture formats * create list of supported capture formats
@ -61,29 +59,29 @@ gboolean
gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src) gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src)
{ {
gint n; gint n;
struct v4l2_fmtdesc *format;
DEBUG("getting src format enumerations"); GST_DEBUG_OBJECT (v4l2src, "getting src format enumerations");
/* format enumeration */ /* format enumeration */
for (n=0;;n++) { for (n=0;;n++) {
struct v4l2_fmtdesc format, *fmtptr; format = g_new (struct v4l2_fmtdesc, 1);
format.index = n; format->index = n;
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_ENUM_FMT, &format) < 0) { if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
if (errno == EINVAL) if (errno == EINVAL) {
break; /* end of enumeration */ break; /* end of enumeration */
else { } else {
gst_element_error(GST_ELEMENT(v4l2src), gst_element_error(GST_ELEMENT(v4l2src),
"Failed to get no. %d in pixelformat enumeration for %s: %s", "Failed to get no. %d in pixelformat enumeration for %s: %s",
n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno)); n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
g_free (format);
return FALSE; return FALSE;
} }
} }
fmtptr = g_malloc(sizeof(format)); GST_LOG_OBJECT (v4l2src, "got format"GST_FOURCC_FORMAT,
memcpy(fmtptr, &format, sizeof(format)); GST_FOURCC_ARGS (format->pixelformat));
v4l2src->formats = g_list_append(v4l2src->formats, fmtptr); v4l2src->formats = g_slist_prepend (v4l2src->formats, format);
v4l2src->format_list = g_list_append(v4l2src->format_list, fmtptr->description);
} }
return TRUE; return TRUE;
@ -91,21 +89,16 @@ gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src)
/****************************************************** /******************************************************
* gst_v4l2src_empty_format_list(): * gst_v4l2src_clear_format_list():
* free list of supported capture formats * free list of supported capture formats
* return value: TRUE on success, FALSE on error * return value: TRUE on success, FALSE on error
******************************************************/ ******************************************************/
gboolean gboolean
gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src) gst_v4l2src_clear_format_list (GstV4l2Src *v4l2src)
{ {
while (g_list_length(v4l2src->formats) > 0) { g_slist_foreach (v4l2src->formats, (GFunc) g_free, NULL);
gpointer data = g_list_nth_data(v4l2src->formats, 0); g_slist_free (v4l2src->formats);
v4l2src->formats = g_list_remove(v4l2src->formats, data);
g_free(data);
}
g_list_free(v4l2src->format_list);
v4l2src->format_list = NULL;
return TRUE; return TRUE;
} }
@ -117,65 +110,47 @@ gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src)
* return value: TRUE on success, FALSE on error * return value: TRUE on success, FALSE on error
******************************************************/ ******************************************************/
static gboolean gboolean
gst_v4l2src_queue_frame (GstV4l2Src *v4l2src, gst_v4l2src_queue_frame (GstV4l2Src *v4l2src,
gint num) guint i)
{ {
DEBUG("queueing frame %d", num); GST_LOG_OBJECT (v4l2src, "queueing frame %u", i);
if (v4l2src->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) { if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_QBUF, &v4l2src->pool->buffers[i].buffer) < 0) {
gst_element_error(GST_ELEMENT(v4l2src), "Error queueing buffer %u on device %s: %s",
i, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
return FALSE; return FALSE;
} }
v4l2src->bufsettings.index = num;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_QBUF, &v4l2src->bufsettings) < 0) {
gst_element_error(GST_ELEMENT(v4l2src),
"Error queueing buffer %d on device %s: %s",
num, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
return FALSE;
}
v4l2src->frame_queue_state[num] = QUEUE_STATE_QUEUED;
v4l2src->num_queued++;
return TRUE; return TRUE;
} }
/****************************************************** /******************************************************
* gst_v4l2src_sync_next_frame(): * gst_v4l2src_grab_frame ():
* sync on a frame for capturing * grab a frame for capturing
* return value: TRUE on success, FALSE on error * return value: TRUE on success, FALSE on error
******************************************************/ ******************************************************/
static gboolean gint
gst_v4l2src_sync_next_frame (GstV4l2Src *v4l2src, gst_v4l2src_grab_frame (GstV4l2Src *v4l2src)
gint *num)
{ {
if (v4l2src->num_queued <= 0) { struct v4l2_buffer buffer;
return FALSE;
}
while (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
VIDIOC_DQBUF, &v4l2src->bufsettings) < 0) { while (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_DQBUF, &buffer) < 0) {
/* if the sync() got interrupted, we can retry */ /* if the sync() got interrupted, we can retry */
if (errno != EINTR) { if (errno != EINTR) {
gst_element_error(GST_ELEMENT(v4l2src), gst_element_error(GST_ELEMENT(v4l2src), "Error syncing on a buffer on device %s: %s",
"Error syncing on a buffer on device %s: %s",
GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno)); GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
return FALSE; return -1;
} }
DEBUG("Sync got interrupted"); GST_DEBUG_OBJECT (v4l2src, "grab got interrupted");
} }
DEBUG("synced on frame %d", v4l2src->bufsettings.index); GST_LOG_OBJECT (v4l2src, "grabbed frame %d", buffer.index);
*num = v4l2src->bufsettings.index;
v4l2src->frame_queue_state[*num] = QUEUE_STATE_SYNCED; return buffer.index;
v4l2src->num_queued--;
return TRUE;
} }
@ -226,6 +201,7 @@ gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
v4l2src->format.fmt.pix.width = width; v4l2src->format.fmt.pix.width = width;
v4l2src->format.fmt.pix.height = height; v4l2src->format.fmt.pix.height = height;
v4l2src->format.fmt.pix.pixelformat = fmt->pixelformat; v4l2src->format.fmt.pix.pixelformat = fmt->pixelformat;
v4l2src->format.fmt.pix.field = V4L2_FIELD_INTERLACED;
v4l2src->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; v4l2src->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_S_FMT, &v4l2src->format) < 0) { if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_S_FMT, &v4l2src->format) < 0) {
@ -251,86 +227,79 @@ gboolean
gst_v4l2src_capture_init (GstV4l2Src *v4l2src) gst_v4l2src_capture_init (GstV4l2Src *v4l2src)
{ {
gint n; gint n;
gchar *desc = NULL; guint buffers;
struct v4l2_buffer buf;
DEBUG("initting the capture system"); GST_DEBUG_OBJECT (v4l2src, "initting the capture system");
GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
GST_V4L2_CHECK_NOT_ACTIVE(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_NOT_ACTIVE(GST_V4L2ELEMENT(v4l2src));
/* request buffer info */ /* request buffer info */
if (v4l2src->breq.count < MIN_BUFFERS_QUEUED) { buffers = v4l2src->breq.count;
v4l2src->breq.count = MIN_BUFFERS_QUEUED; if (v4l2src->breq.count > GST_V4L2_MAX_BUFFERS) {
v4l2src->breq.count = GST_V4L2_MAX_BUFFERS;
}
if (v4l2src->breq.count < GST_V4L2_MIN_BUFFERS) {
v4l2src->breq.count = GST_V4L2_MIN_BUFFERS;
} }
v4l2src->breq.type = v4l2src->format.type; v4l2src->breq.type = v4l2src->format.type;
v4l2src->breq.memory = V4L2_MEMORY_MMAP; v4l2src->breq.memory = V4L2_MEMORY_MMAP;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_REQBUFS, &v4l2src->breq) < 0) {
VIDIOC_REQBUFS, &v4l2src->breq) < 0) { gst_element_error(GST_ELEMENT(v4l2src), "Error requesting buffers (%d) for %s: %s",
gst_element_error(GST_ELEMENT(v4l2src),
"Error requesting buffers (%d) for %s: %s",
v4l2src->breq.count, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno)); v4l2src->breq.count, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
return FALSE; return FALSE;
} }
if (v4l2src->breq.count < MIN_BUFFERS_QUEUED) { if (v4l2src->breq.count < GST_V4L2_MIN_BUFFERS) {
gst_element_error(GST_ELEMENT(v4l2src), gst_element_error(GST_ELEMENT(v4l2src), "Too little buffers. We got %d, we want at least %d",
"Too little buffers. We got %d, we want at least %d", v4l2src->breq.count, GST_V4L2_MIN_BUFFERS);
v4l2src->breq.count, MIN_BUFFERS_QUEUED); v4l2src->breq.count = buffers;
return FALSE; return FALSE;
} }
v4l2src->bufsettings.type = v4l2src->format.type; if (v4l2src->breq.count != buffers)
g_object_notify (G_OBJECT (v4l2src), "num_buffers");
for (n=0;n<g_list_length(v4l2src->formats);n++) { GST_INFO_OBJECT (v4l2src, "Got %d buffers ("GST_FOURCC_FORMAT") of size %d KB\n",
struct v4l2_fmtdesc *fmt = (struct v4l2_fmtdesc *) g_list_nth_data(v4l2src->formats, n); v4l2src->breq.count, GST_FOURCC_ARGS (v4l2src->format.fmt.pix.pixelformat),
if (v4l2src->format.fmt.pix.pixelformat == fmt->pixelformat) { v4l2src->format.fmt.pix.sizeimage / 1024);
desc = fmt->description;
break;
}
}
gst_info("Got %d buffers (%s) of size %d KB\n",
v4l2src->breq.count, desc, v4l2src->format.fmt.pix.sizeimage/1024);
/* keep track of queued buffers */
v4l2src->frame_queue_state = (gint8 *)
g_malloc(sizeof(gint8) * v4l2src->breq.count);
/* track how often to use each frame */
v4l2src->use_num_times = (gint *)
g_malloc(sizeof(gint) * v4l2src->breq.count);
/* lock for the frame_state */
v4l2src->mutex_queue_state = g_mutex_new();
v4l2src->cond_queue_state = g_cond_new();
/* Map the buffers */ /* Map the buffers */
GST_V4L2ELEMENT(v4l2src)->buffer = (guint8 **) v4l2src->pool = g_new (GstV4l2BufferPool, 1);
g_malloc(sizeof(guint8 *) * v4l2src->breq.count); gst_atomic_int_init (&v4l2src->pool->refcount, 1);
for (n=0;n<v4l2src->breq.count;n++) { v4l2src->pool->video_fd = GST_V4L2ELEMENT (v4l2src)->video_fd;
buf.index = n; v4l2src->pool->buffer_count = v4l2src->breq.count;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; v4l2src->pool->buffers = g_new0 (GstV4l2Buffer, v4l2src->breq.count);
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_QUERYBUF, &buf) < 0) { for (n = 0; n < v4l2src->breq.count; n++) {
gst_element_error(GST_ELEMENT(v4l2src), GstV4l2Buffer *buffer = &v4l2src->pool->buffers[n];
"Failed to get buffer (%d) properties: %s",
gst_atomic_int_init (&buffer->refcount, 1);
buffer->pool = v4l2src->pool;
buffer->buffer.index = n;
buffer->buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_QUERYBUF, &buffer->buffer) < 0) {
gst_element_error(GST_ELEMENT(v4l2src), "Failed to get buffer (%d) properties: %s",
n, g_strerror(errno)); n, g_strerror(errno));
gst_v4l2src_capture_deinit(v4l2src); gst_v4l2src_capture_deinit(v4l2src);
return FALSE; return FALSE;
} }
GST_V4L2ELEMENT(v4l2src)->buffer[n] = mmap(0, buffer->start = mmap (0, buffer->buffer.length, PROT_READ|PROT_WRITE, MAP_SHARED,
buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, GST_V4L2ELEMENT(v4l2src)->video_fd, buffer->buffer.m.offset);
GST_V4L2ELEMENT(v4l2src)->video_fd, buf.m.offset); if (buffer->start == MAP_FAILED) {
if (GST_V4L2ELEMENT(v4l2src)->buffer[n] == MAP_FAILED) { gst_element_error(GST_ELEMENT(v4l2src), "Error mapping video buffer (%d) on device %s: %s",
gst_element_error(GST_ELEMENT(v4l2src), n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
"Error mapping video buffer (%d) on device %s: %s", buffer->start = 0;
n, GST_V4L2ELEMENT(v4l2src)->device, gst_v4l2src_capture_deinit (v4l2src);
g_strerror(errno)); return FALSE;
GST_V4L2ELEMENT(v4l2src)->buffer[n] = NULL; }
gst_v4l2src_capture_deinit(v4l2src); buffer->length = buffer->buffer.length;
if (!gst_v4l2src_queue_frame(v4l2src, n)) {
gst_v4l2src_capture_deinit (v4l2src);
return FALSE; return FALSE;
} }
} }
GST_V4L2_SET_ACTIVE(GST_V4L2ELEMENT (v4l2src));
return TRUE; return TRUE;
} }
@ -344,146 +313,24 @@ gst_v4l2src_capture_init (GstV4l2Src *v4l2src)
gboolean gboolean
gst_v4l2src_capture_start (GstV4l2Src *v4l2src) gst_v4l2src_capture_start (GstV4l2Src *v4l2src)
{ {
gint n; gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
GST_DEBUG_OBJECT (v4l2src, "starting the capturing");
DEBUG("starting the capturing");
GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
if (!GST_V4L2_IS_ACTIVE (GST_V4L2ELEMENT(v4l2src))) {
gst_pad_renegotiate (v4l2src->srcpad);
}
GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
g_mutex_lock(v4l2src->mutex_queue_state);
v4l2src->quit = FALSE; v4l2src->quit = FALSE;
v4l2src->num_queued = 0;
v4l2src->queue_frame = 0;
/* set all buffers ready to queue , this starts streaming capture */ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMON, &type) < 0) {
for (n=0;n<v4l2src->breq.count;n++) { gst_element_error(GST_ELEMENT(v4l2src), "Error starting streaming capture for %s: %s",
v4l2src->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
if (!gst_v4l2src_queue_frame(v4l2src, n)) {
g_mutex_unlock(v4l2src->mutex_queue_state);
gst_v4l2src_capture_stop(v4l2src);
return FALSE;
}
}
n = 1;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMON, &n) < 0) {
gst_element_error(GST_ELEMENT(v4l2src),
"Error starting streaming capture for %s: %s",
GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno)); GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
return FALSE; return FALSE;
} }
g_mutex_unlock(v4l2src->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4l2src_grab_frame():
* capture one frame during streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l2src_grab_frame (GstV4l2Src *v4l2src,
gint *num)
{
DEBUG("syncing on the next frame");
GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
g_mutex_lock(v4l2src->mutex_queue_state);
/* do we have enough frames? */
while (v4l2src->num_queued < MIN_BUFFERS_QUEUED ||
v4l2src->frame_queue_state[v4l2src->queue_frame] ==
QUEUE_STATE_READY_FOR_QUEUE) {
while (v4l2src->frame_queue_state[v4l2src->queue_frame] !=
QUEUE_STATE_READY_FOR_QUEUE &&
!v4l2src->quit) {
GST_DEBUG (
"Waiting for frames to become available (%d < %d)",
v4l2src->num_queued, MIN_BUFFERS_QUEUED);
g_cond_wait(v4l2src->cond_queue_state,
v4l2src->mutex_queue_state);
}
if (v4l2src->quit) {
g_mutex_unlock(v4l2src->mutex_queue_state);
return TRUE; /* it won't get through anyway */
}
if (!gst_v4l2src_queue_frame(v4l2src, v4l2src->queue_frame)) {
g_mutex_unlock(v4l2src->mutex_queue_state);
return FALSE;
}
v4l2src->queue_frame = (v4l2src->queue_frame + 1) % v4l2src->breq.count;
}
/* syncing on the buffer grabs it */
if (!gst_v4l2src_sync_next_frame(v4l2src, num)) {
g_mutex_unlock(v4l2src->mutex_queue_state);
return FALSE;
}
g_mutex_unlock(v4l2src->mutex_queue_state);
return TRUE;
}
/******************************************************
*
******************************************************/
guint8 *
gst_v4l2src_get_buffer (GstV4l2Src *v4l2src,
gint num)
{
if (!GST_V4L2_IS_ACTIVE(GST_V4L2ELEMENT(v4l2src)) ||
!GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src)))
return NULL;
if (num < 0 || num >= v4l2src->breq.count)
return NULL;
return GST_V4L2ELEMENT(v4l2src)->buffer[num];
}
/******************************************************
* gst_v4l2src_requeue_frame():
* re-queue a frame after we're done with the buffer
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
gint num)
{
DEBUG("requeueing frame %d", num);
GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
/* mark frame as 'ready to requeue' */
g_mutex_lock(v4l2src->mutex_queue_state);
if (v4l2src->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
gst_element_error(GST_ELEMENT(v4l2src),
"Invalid state %d (expected %d), can't requeue",
v4l2src->frame_queue_state[num],
QUEUE_STATE_SYNCED);
return FALSE;
}
v4l2src->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
/* let an optional wait know */
g_cond_broadcast(v4l2src->cond_queue_state);
g_mutex_unlock(v4l2src->mutex_queue_state);
return TRUE; return TRUE;
} }
@ -497,37 +344,60 @@ gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
gboolean gboolean
gst_v4l2src_capture_stop (GstV4l2Src *v4l2src) gst_v4l2src_capture_stop (GstV4l2Src *v4l2src)
{ {
gint n = 0; gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
DEBUG("stopping capturing"); GST_DEBUG_OBJECT (v4l2src, "stopping capturing");
GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
g_mutex_lock(v4l2src->mutex_queue_state);
/* we actually need to sync on all queued buffers but not /* we actually need to sync on all queued buffers but not
* on the non-queued ones */ * on the non-queued ones */
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMOFF, &n) < 0) { if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMOFF, &type) < 0) {
gst_element_error(GST_ELEMENT(v4l2src), gst_element_error (GST_ELEMENT(v4l2src), "Error stopping streaming capture for %s: %s",
"Error stopping streaming capture for %s: %s",
GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno)); GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
return FALSE; return FALSE;
} }
/* make an optional pending wait stop */ /* make an optional pending wait stop */
v4l2src->quit = TRUE; v4l2src->quit = TRUE;
g_cond_broadcast(v4l2src->cond_queue_state);
/* sync on remaining frames */
while (v4l2src->num_queued > 0) {
gst_v4l2src_sync_next_frame(v4l2src, &n);
}
g_mutex_unlock(v4l2src->mutex_queue_state);
return TRUE; return TRUE;
} }
static void
gst_v4l2src_buffer_pool_free (GstV4l2BufferPool *pool, gboolean do_close)
{
guint i;
for (i = 0; i < pool->buffer_count; i++) {
gst_atomic_int_destroy (&pool->buffers[i].refcount);
munmap (pool->buffers[i].start, pool->buffers[i].length);
}
g_free (pool->buffers);
gst_atomic_int_destroy (&pool->refcount);
if (do_close)
close (pool->video_fd);
g_free (pool);
}
void
gst_v4l2src_free_buffer (GstBuffer *buffer)
{
GstV4l2Buffer *buf = (GstV4l2Buffer *) GST_BUFFER_PRIVATE (buffer);
GST_LOG ("freeing buffer %p (nr. %d)", buffer, buf->buffer.index);
if (!gst_atomic_int_dec_and_test (&buf->refcount)) {
/* we're still in use, add to queue again
note: this might fail because the device is already stopped (race) */
if (ioctl(buf->pool->video_fd, VIDIOC_QBUF, &buf->buffer) < 0)
GST_INFO ("readding to queue failed, assuming video device is stopped");
}
if (gst_atomic_int_dec_and_test (&buf->pool->refcount)) {
/* we're last thing that used all this */
gst_v4l2src_buffer_pool_free (buf->pool, TRUE);
}
}
/****************************************************** /******************************************************
* gst_v4l2src_capture_deinit(): * gst_v4l2src_capture_deinit():
@ -538,30 +408,31 @@ gst_v4l2src_capture_stop (GstV4l2Src *v4l2src)
gboolean gboolean
gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src) gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src)
{ {
int n; gint i, dequeue = 0;
GST_DEBUG_OBJECT (v4l2src, "deinitting capture system");
DEBUG("deinitting capture system");
GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src)); GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
/* unmap the buffer */ /* free the buffers */
for (n=0;n<v4l2src->breq.count;n++) { for (i = 0; i < v4l2src->breq.count; i++) {
if (!GST_V4L2ELEMENT(v4l2src)->buffer[n]) { if (gst_atomic_int_dec_and_test (&v4l2src->pool->buffers[i].refcount))
break; dequeue++;
} }
munmap(GST_V4L2ELEMENT(v4l2src)->buffer[n], for (i = 0; i < dequeue; i++) {
v4l2src->format.fmt.pix.sizeimage); struct v4l2_buffer buffer;
GST_V4L2ELEMENT(v4l2src)->buffer[n] = NULL; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_DQBUF, &buffer) < 0)
GST_WARNING_OBJECT (v4l2src, "Could not dequeue buffer on uninitialization");
} }
if (gst_atomic_int_dec_and_test (&v4l2src->pool->refcount)) {
/* we're last thing that used all this */
gst_v4l2src_buffer_pool_free (v4l2src->pool, FALSE);
}
v4l2src->pool = NULL;
/* free buffer tracker */ GST_V4L2_SET_INACTIVE (GST_V4L2ELEMENT (v4l2src));
g_free(GST_V4L2ELEMENT(v4l2src)->buffer);
GST_V4L2ELEMENT(v4l2src)->buffer = NULL;
g_mutex_free(v4l2src->mutex_queue_state);
g_cond_free(v4l2src->cond_queue_state);
g_free(v4l2src->frame_queue_state);
g_free(v4l2src->use_num_times);
return TRUE; return TRUE;
} }
@ -578,13 +449,16 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
{ {
struct v4l2_format fmt; struct v4l2_format fmt;
GST_LOG_OBJECT (v4l2src, "getting size limits with format " GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (format->pixelformat));
/* get size delimiters */ /* get size delimiters */
memset(&fmt, 0, sizeof(fmt)); memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 0; fmt.fmt.pix.width = 0;
fmt.fmt.pix.height = 0; fmt.fmt.pix.height = 0;
fmt.fmt.pix.pixelformat = format->pixelformat; fmt.fmt.pix.pixelformat = format->pixelformat;
fmt.fmt.pix.field = V4L2_FIELD_ANY; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_TRY_FMT, &fmt) < 0) { VIDIOC_TRY_FMT, &fmt) < 0) {
return FALSE; return FALSE;
@ -594,9 +468,10 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
*min_w = fmt.fmt.pix.width; *min_w = fmt.fmt.pix.width;
if (min_h) if (min_h)
*min_h = fmt.fmt.pix.height; *min_h = fmt.fmt.pix.height;
GST_LOG_OBJECT (v4l2src, "got min size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
fmt.fmt.pix.width = G_MAXINT; fmt.fmt.pix.width = G_MAXINT;
fmt.fmt.pix.height = G_MAXINT; fmt.fmt.pix.height = 576;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_TRY_FMT, &fmt) < 0) { VIDIOC_TRY_FMT, &fmt) < 0) {
return FALSE; return FALSE;
@ -606,6 +481,7 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
*max_w = fmt.fmt.pix.width; *max_w = fmt.fmt.pix.width;
if (max_h) if (max_h)
*max_h = fmt.fmt.pix.height; *max_h = fmt.fmt.pix.height;
GST_LOG_OBJECT (v4l2src, "got max size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
return TRUE; return TRUE;
} }

View File

@ -17,8 +17,8 @@
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
#ifndef __V4L2_SRC_CALLS_H__ #ifndef __V4L2SRC_CALLS_H__
#define __V4L2_SRC_CALLS_H__ #define __V4L2SRC_CALLS_H__
#include "gstv4l2src.h" #include "gstv4l2src.h"
#include "v4l2_calls.h" #include "v4l2_calls.h"
@ -31,17 +31,16 @@ gboolean gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
gint height); gint height);
gboolean gst_v4l2src_capture_init (GstV4l2Src *v4l2src); gboolean gst_v4l2src_capture_init (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_capture_start (GstV4l2Src *v4l2src); gboolean gst_v4l2src_capture_start (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_grab_frame (GstV4l2Src *v4l2src, gint gst_v4l2src_grab_frame (GstV4l2Src *v4l2src);
gint *num);
guint8 * gst_v4l2src_get_buffer (GstV4l2Src *v4l2src, guint8 * gst_v4l2src_get_buffer (GstV4l2Src *v4l2src,
gint num); gint num);
gboolean gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src, gboolean gst_v4l2src_queue_frame (GstV4l2Src *v4l2src,
gint num); guint i);
gboolean gst_v4l2src_capture_stop (GstV4l2Src *v4l2src); gboolean gst_v4l2src_capture_stop (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src); gboolean gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src); gboolean gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src); gboolean gst_v4l2src_clear_format_list (GstV4l2Src *v4l2src);
/* hacky */ /* hacky */
gboolean gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src, gboolean gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
@ -49,4 +48,6 @@ gboolean gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
gint *min_w, gint *max_w, gint *min_w, gint *max_w,
gint *min_h, gint *max_h); gint *min_h, gint *max_h);
#endif /* __V4L2_SRC_CALLS_H__ */ void gst_v4l2src_free_buffer (GstBuffer *buffer);
#endif /* __V4L2SRC_CALLS_H__ */