v4l2codecs: Use GstVideoInfoDmaDrm more consistently

This avoids some duplications and makes the DRM info available in
more places, which will help with future changes.

Also fix some error messages while on it.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/8028>
This commit is contained in:
Robert Mader 2024-11-28 23:17:40 +01:00 committed by GStreamer Marge Bot
parent 0d674d3abc
commit f7da069496
10 changed files with 148 additions and 131 deletions

View File

@ -329,11 +329,14 @@ gst_v4l2_codec_av1_dec_negotiate (GstVideoDecoder * decoder)
gst_caps_unref (filter); gst_caps_unref (filter);
GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo, if (!gst_v4l2_decoder_select_src_format (self->decoder, caps,
&self->vinfo_drm)) { &self->vinfo_drm)) {
GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
("Unsupported pixel format"), ("Unsupported pixel format"),
("No support for %ux%u", self->frame_width, self->frame_height)); ("No support for %ux%u format %s", self->frame_width,
self->frame_height,
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->
vinfo_drm.vinfo))));
gst_caps_unref (caps); gst_caps_unref (caps);
return FALSE; return FALSE;
} }
@ -344,9 +347,9 @@ done:
gst_video_codec_state_unref (self->output_state); gst_video_codec_state_unref (self->output_state);
self->output_state = self->output_state =
gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self), &self->vinfo, gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self),
&self->vinfo_drm, self->render_width, self->render_height, &self->vinfo_drm, self->render_width,
av1dec->input_state); self->render_height, av1dec->input_state);
if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) { if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) {
if (self->streaming) if (self->streaming)
@ -430,7 +433,8 @@ gst_v4l2_codec_av1_dec_decide_allocation (GstVideoDecoder * decoder,
return FALSE; return FALSE;
} }
self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo); self->src_pool =
gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo_drm);
no_internal_changes: no_internal_changes:
/* Our buffer pool is internal, we will let the base class create a video /* Our buffer pool is internal, we will let the base class create a video
@ -1061,7 +1065,7 @@ gst_v4l2_codec_av1_dec_new_picture (GstAV1Decoder * decoder,
max_width = seq_hdr->max_frame_width_minus_1 + 1; max_width = seq_hdr->max_frame_width_minus_1 + 1;
max_height = seq_hdr->max_frame_height_minus_1 + 1; max_height = seq_hdr->max_frame_height_minus_1 + 1;
if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) if (self->vinfo_drm.vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
negotiation_needed = TRUE; negotiation_needed = TRUE;
/* FIXME the base class could signal this, but let's assume that when we /* FIXME the base class could signal this, but let's assume that when we
@ -1146,12 +1150,12 @@ gst_v4l2_codec_av1_dec_new_picture (GstAV1Decoder * decoder,
gint i; gint i;
gst_video_info_set_format (&ref_vinfo, gst_video_info_set_format (&ref_vinfo,
GST_VIDEO_INFO_FORMAT (&self->vinfo), GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->render_width,
self->render_width, self->render_height); self->render_height);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) { for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm.vinfo); i++) {
if (self->vinfo.stride[i] != ref_vinfo.stride[i] || if (self->vinfo_drm.vinfo.stride[i] != ref_vinfo.stride[i] ||
self->vinfo.offset[i] != ref_vinfo.offset[i]) { self->vinfo_drm.vinfo.offset[i] != ref_vinfo.offset[i]) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"GstVideoMeta support required, copying frames."); "GstVideoMeta support required, copying frames.");
self->copy_frames = TRUE; self->copy_frames = TRUE;
@ -1390,14 +1394,15 @@ gst_v4l2_codec_av1_dec_copy_output_buffer (GstV4l2CodecAV1Dec * self,
GstVideoInfo dest_vinfo; GstVideoInfo dest_vinfo;
GstBuffer *buffer; GstBuffer *buffer;
gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&dest_vinfo,
self->render_width, self->render_height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->render_width,
self->render_height);
buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!buffer) if (!buffer)
goto fail; goto fail;
if (!gst_video_frame_map (&src_frame, &self->vinfo, if (!gst_video_frame_map (&src_frame, &self->vinfo_drm.vinfo,
codec_frame->output_buffer, GST_MAP_READ)) codec_frame->output_buffer, GST_MAP_READ))
goto fail; goto fail;
@ -1576,7 +1581,6 @@ gst_v4l2_codec_av1_dec_subinit (GstV4l2CodecAV1Dec * self,
GstV4l2CodecAV1DecClass * klass) GstV4l2CodecAV1DecClass * klass)
{ {
self->decoder = gst_v4l2_decoder_new (klass->device); self->decoder = gst_v4l2_decoder_new (klass->device);
gst_video_info_init (&self->vinfo);
gst_video_info_dma_drm_init (&self->vinfo_drm); gst_video_info_dma_drm_init (&self->vinfo_drm);
self->tile_group_entries = self->tile_group_entries =
g_array_new (FALSE, TRUE, sizeof (struct v4l2_ctrl_av1_tile_group_entry)); g_array_new (FALSE, TRUE, sizeof (struct v4l2_ctrl_av1_tile_group_entry));

View File

@ -65,7 +65,6 @@ struct _GstV4l2CodecH264Dec
GstH264Decoder parent; GstH264Decoder parent;
GstV4l2Decoder *decoder; GstV4l2Decoder *decoder;
GstVideoCodecState *output_state; GstVideoCodecState *output_state;
GstVideoInfo vinfo;
GstVideoInfoDmaDrm vinfo_drm; GstVideoInfoDmaDrm vinfo_drm;
gint display_width; gint display_width;
gint display_height; gint display_height;
@ -370,12 +369,14 @@ gst_v4l2_codec_h264_dec_negotiate (GstVideoDecoder * decoder)
gst_caps_unref (filter); gst_caps_unref (filter);
GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo, if (!gst_v4l2_decoder_select_src_format (self->decoder, caps,
&self->vinfo_drm)) { &self->vinfo_drm)) {
GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
("Unsupported bitdepth/chroma format"), ("Unsupported pixel format"),
("No support for %ux%u %ubit chroma IDC %i", self->coded_width, ("No support for %ux%u format %s", self->display_width,
self->coded_height, self->bitdepth, self->chroma_format_idc)); self->display_height,
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->
vinfo_drm.vinfo))));
gst_caps_unref (caps); gst_caps_unref (caps);
return FALSE; return FALSE;
} }
@ -386,7 +387,7 @@ done:
gst_video_codec_state_unref (self->output_state); gst_video_codec_state_unref (self->output_state);
self->output_state = self->output_state =
gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self), &self->vinfo, gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self),
&self->vinfo_drm, self->display_width, self->display_height, &self->vinfo_drm, self->display_width, self->display_height,
h264dec->input_state); h264dec->input_state);
@ -475,7 +476,8 @@ gst_v4l2_codec_h264_dec_decide_allocation (GstVideoDecoder * decoder,
return FALSE; return FALSE;
} }
self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo); self->src_pool =
gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo_drm);
no_internal_changes: no_internal_changes:
/* Our buffer pool is internal, we will let the base class create a video /* Our buffer pool is internal, we will let the base class create a video
@ -879,7 +881,7 @@ gst_v4l2_codec_h264_dec_new_sequence (GstH264Decoder * decoder,
gboolean negotiation_needed = FALSE; gboolean negotiation_needed = FALSE;
gboolean interlaced; gboolean interlaced;
if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) if (self->vinfo_drm.vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
negotiation_needed = TRUE; negotiation_needed = TRUE;
/* TODO check if CREATE_BUFS is supported, and simply grow the pool */ /* TODO check if CREATE_BUFS is supported, and simply grow the pool */
@ -943,12 +945,13 @@ gst_v4l2_codec_h264_dec_new_sequence (GstH264Decoder * decoder,
GstVideoInfo ref_vinfo; GstVideoInfo ref_vinfo;
gint i; gint i;
gst_video_info_set_format (&ref_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&ref_vinfo,
self->display_width, self->display_height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->display_width,
self->display_height);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) { for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm.vinfo); i++) {
if (self->vinfo.stride[i] != ref_vinfo.stride[i] || if (self->vinfo_drm.vinfo.stride[i] != ref_vinfo.stride[i] ||
self->vinfo.offset[i] != ref_vinfo.offset[i]) { self->vinfo_drm.vinfo.offset[i] != ref_vinfo.offset[i]) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"GstVideoMeta support required, copying frames."); "GstVideoMeta support required, copying frames.");
self->copy_frames = TRUE; self->copy_frames = TRUE;
@ -1035,14 +1038,15 @@ gst_v4l2_codec_h264_dec_copy_output_buffer (GstV4l2CodecH264Dec * self,
GstVideoInfo dest_vinfo; GstVideoInfo dest_vinfo;
GstBuffer *buffer; GstBuffer *buffer;
gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&dest_vinfo,
self->display_width, self->display_height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->display_width,
self->display_height);
buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!buffer) if (!buffer)
goto fail; goto fail;
if (!gst_video_frame_map (&src_frame, &self->vinfo, if (!gst_video_frame_map (&src_frame, &self->vinfo_drm.vinfo,
codec_frame->output_buffer, GST_MAP_READ)) codec_frame->output_buffer, GST_MAP_READ))
goto fail; goto fail;
@ -1506,7 +1510,6 @@ gst_v4l2_codec_h264_dec_subinit (GstV4l2CodecH264Dec * self,
GstV4l2CodecH264DecClass * klass) GstV4l2CodecH264DecClass * klass)
{ {
self->decoder = gst_v4l2_decoder_new (klass->device); self->decoder = gst_v4l2_decoder_new (klass->device);
gst_video_info_init (&self->vinfo);
gst_video_info_dma_drm_init (&self->vinfo_drm); gst_video_info_dma_drm_init (&self->vinfo_drm);
self->slice_params = g_array_sized_new (FALSE, TRUE, self->slice_params = g_array_sized_new (FALSE, TRUE,
sizeof (struct v4l2_ctrl_h264_slice_params), 4); sizeof (struct v4l2_ctrl_h264_slice_params), 4);

View File

@ -66,7 +66,6 @@ struct _GstV4l2CodecH265Dec
GstH265Decoder parent; GstH265Decoder parent;
GstV4l2Decoder *decoder; GstV4l2Decoder *decoder;
GstVideoCodecState *output_state; GstVideoCodecState *output_state;
GstVideoInfo vinfo;
GstVideoInfoDmaDrm vinfo_drm; GstVideoInfoDmaDrm vinfo_drm;
gint display_width; gint display_width;
gint display_height; gint display_height;
@ -404,12 +403,14 @@ gst_v4l2_codec_h265_dec_negotiate (GstVideoDecoder * decoder)
gst_caps_unref (filter); gst_caps_unref (filter);
GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo, if (!gst_v4l2_decoder_select_src_format (self->decoder, caps,
&self->vinfo_drm)) { &self->vinfo_drm)) {
GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
("Unsupported bitdepth/chroma format"), ("Unsupported pixel format"),
("No support for %ux%u %ubit chroma IDC %i", self->coded_width, ("No support for %ux%u format %s", self->display_width,
self->coded_height, self->bitdepth, self->chroma_format_idc)); self->display_height,
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->
vinfo_drm.vinfo))));
gst_caps_unref (caps); gst_caps_unref (caps);
return FALSE; return FALSE;
} }
@ -420,7 +421,7 @@ done:
gst_video_codec_state_unref (self->output_state); gst_video_codec_state_unref (self->output_state);
self->output_state = self->output_state =
gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self), &self->vinfo, gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self),
&self->vinfo_drm, self->display_width, self->display_height, &self->vinfo_drm, self->display_width, self->display_height,
h265dec->input_state); h265dec->input_state);
@ -504,7 +505,8 @@ gst_v4l2_codec_h265_dec_decide_allocation (GstVideoDecoder * decoder,
return FALSE; return FALSE;
} }
self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo); self->src_pool =
gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo_drm);
no_internal_changes: no_internal_changes:
/* Our buffer pool is internal, we will let the base class create a video /* Our buffer pool is internal, we will let the base class create a video
@ -902,7 +904,7 @@ gst_v4l2_codec_h265_dec_new_sequence (GstH265Decoder * decoder,
gint crop_height = sps->height; gint crop_height = sps->height;
gboolean negotiation_needed = FALSE; gboolean negotiation_needed = FALSE;
if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) if (self->vinfo_drm.vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
negotiation_needed = TRUE; negotiation_needed = TRUE;
/* TODO check if CREATE_BUFS is supported, and simply grow the pool */ /* TODO check if CREATE_BUFS is supported, and simply grow the pool */
@ -969,12 +971,13 @@ gst_v4l2_codec_h265_dec_new_sequence (GstH265Decoder * decoder,
GstVideoInfo ref_vinfo; GstVideoInfo ref_vinfo;
gint i; gint i;
gst_video_info_set_format (&ref_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&ref_vinfo,
self->display_width, self->display_height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->display_width,
self->display_height);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) { for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm.vinfo); i++) {
if (self->vinfo.stride[i] != ref_vinfo.stride[i] || if (self->vinfo_drm.vinfo.stride[i] != ref_vinfo.stride[i] ||
self->vinfo.offset[i] != ref_vinfo.offset[i]) { self->vinfo_drm.vinfo.offset[i] != ref_vinfo.offset[i]) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"GstVideoMeta support required, copying frames."); "GstVideoMeta support required, copying frames.");
self->copy_frames = TRUE; self->copy_frames = TRUE;
@ -1176,14 +1179,15 @@ gst_v4l2_codec_h265_dec_copy_output_buffer (GstV4l2CodecH265Dec * self,
GstVideoInfo dest_vinfo; GstVideoInfo dest_vinfo;
GstBuffer *buffer; GstBuffer *buffer;
gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&dest_vinfo,
self->display_width, self->display_height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->display_width,
self->display_height);
buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!buffer) if (!buffer)
goto fail; goto fail;
if (!gst_video_frame_map (&src_frame, &self->vinfo, if (!gst_video_frame_map (&src_frame, &self->vinfo_drm.vinfo,
codec_frame->output_buffer, GST_MAP_READ)) codec_frame->output_buffer, GST_MAP_READ))
goto fail; goto fail;
@ -1636,7 +1640,6 @@ gst_v4l2_codec_h265_dec_subinit (GstV4l2CodecH265Dec * self,
GstV4l2CodecH265DecClass * klass) GstV4l2CodecH265DecClass * klass)
{ {
self->decoder = gst_v4l2_decoder_new (klass->device); self->decoder = gst_v4l2_decoder_new (klass->device);
gst_video_info_init (&self->vinfo);
gst_video_info_dma_drm_init (&self->vinfo_drm); gst_video_info_dma_drm_init (&self->vinfo_drm);
self->slice_params = g_array_sized_new (FALSE, TRUE, self->slice_params = g_array_sized_new (FALSE, TRUE,
sizeof (struct v4l2_ctrl_hevc_slice_params), 4); sizeof (struct v4l2_ctrl_hevc_slice_params), 4);

View File

@ -292,12 +292,13 @@ gst_v4l2_codec_mpeg2_dec_negotiate (GstVideoDecoder * decoder)
gst_caps_unref (filter); gst_caps_unref (filter);
GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo, if (!gst_v4l2_decoder_select_src_format (self->decoder, caps,
&self->vinfo_drm)) { &self->vinfo_drm)) {
GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
("Unsupported bitdepth/chroma format"), ("Unsupported pixel format"),
("No support for %ux%u chroma IDC %i", self->width, ("No support for %ux%u format %s", self->width, self->height,
self->height, self->chroma_format)); gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->
vinfo_drm.vinfo))));
gst_caps_unref (caps); gst_caps_unref (caps);
return FALSE; return FALSE;
} }
@ -308,7 +309,7 @@ done:
gst_video_codec_state_unref (self->output_state); gst_video_codec_state_unref (self->output_state);
self->output_state = self->output_state =
gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self), &self->vinfo, gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self),
&self->vinfo_drm, self->width, self->height, mpeg2dec->input_state); &self->vinfo_drm, self->width, self->height, mpeg2dec->input_state);
if (self->interlaced) if (self->interlaced)
@ -384,7 +385,8 @@ gst_v4l2_codec_mpeg2_dec_decide_allocation (GstVideoDecoder * decoder,
GST_PAD_SINK, num_bitstream); GST_PAD_SINK, num_bitstream);
self->src_allocator = gst_v4l2_codec_allocator_new (self->decoder, self->src_allocator = gst_v4l2_codec_allocator_new (self->decoder,
GST_PAD_SRC, self->min_pool_size + min + 4); GST_PAD_SRC, self->min_pool_size + min + 4);
self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo); self->src_pool =
gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo_drm);
no_internal_changes: no_internal_changes:
/* Our buffer pool is internal, we will let the base class create a video /* Our buffer pool is internal, we will let the base class create a video
@ -461,7 +463,7 @@ gst_v4l2_codec_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
self->streaming = TRUE; self->streaming = TRUE;
} }
if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) if (self->vinfo_drm.vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
negotiation_needed = TRUE; negotiation_needed = TRUE;
/* copy quantiser from the sequence header, /* copy quantiser from the sequence header,
@ -504,12 +506,13 @@ gst_v4l2_codec_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
GstVideoInfo ref_vinfo; GstVideoInfo ref_vinfo;
gint i; gint i;
gst_video_info_set_format (&ref_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&ref_vinfo,
self->width, self->height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->width,
self->height);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) { for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm.vinfo); i++) {
if (self->vinfo.stride[i] != ref_vinfo.stride[i] || if (self->vinfo_drm.vinfo.stride[i] != ref_vinfo.stride[i] ||
self->vinfo.offset[i] != ref_vinfo.offset[i]) { self->vinfo_drm.vinfo.offset[i] != ref_vinfo.offset[i]) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"GstVideoMeta support required, copying frames."); "GstVideoMeta support required, copying frames.");
self->copy_frames = TRUE; self->copy_frames = TRUE;
@ -668,14 +671,15 @@ gst_v4l2_codec_mpeg2_dec_copy_output_buffer (GstV4l2CodecMpeg2Dec * self,
GstVideoInfo dest_vinfo; GstVideoInfo dest_vinfo;
GstBuffer *buffer; GstBuffer *buffer;
gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&dest_vinfo,
self->width, self->height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->width,
self->height);
buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!buffer) if (!buffer)
goto fail; goto fail;
if (!gst_video_frame_map (&src_frame, &self->vinfo, if (!gst_video_frame_map (&src_frame, &self->vinfo_drm.vinfo,
codec_frame->output_buffer, GST_MAP_READ)) codec_frame->output_buffer, GST_MAP_READ))
goto fail; goto fail;
@ -1030,7 +1034,6 @@ gst_v4l2_codec_mpeg2_dec_subinit (GstV4l2CodecMpeg2Dec * self,
GstV4l2CodecMpeg2DecClass * klass) GstV4l2CodecMpeg2DecClass * klass)
{ {
self->decoder = gst_v4l2_decoder_new (klass->device); self->decoder = gst_v4l2_decoder_new (klass->device);
gst_video_info_init (&self->vinfo);
gst_video_info_dma_drm_init (&self->vinfo_drm); gst_video_info_dma_drm_init (&self->vinfo_drm);
} }

View File

@ -28,7 +28,7 @@ struct _GstV4l2CodecPool
GstAtomicQueue *queue; GstAtomicQueue *queue;
GstV4l2CodecAllocator *allocator; GstV4l2CodecAllocator *allocator;
/* Used to set GstVideoMeta */ /* Used to set GstVideoMeta */
GstVideoInfo *vinfo; GstVideoInfoDmaDrm *vinfo_drm;
}; };
G_DEFINE_TYPE (GstV4l2CodecPool, gst_v4l2_codec_pool, GST_TYPE_BUFFER_POOL); G_DEFINE_TYPE (GstV4l2CodecPool, gst_v4l2_codec_pool, GST_TYPE_BUFFER_POOL);
@ -54,7 +54,7 @@ gst_v4l2_codec_pool_acquire_buffer (GstBufferPool * pool, GstBuffer ** buffer,
GstVideoMeta *vmeta; GstVideoMeta *vmeta;
/* A GstVideoInfo must be set before buffer can be acquired */ /* A GstVideoInfo must be set before buffer can be acquired */
g_return_val_if_fail (self->vinfo, GST_FLOW_ERROR); g_return_val_if_fail (self->vinfo_drm, GST_FLOW_ERROR);
buf = gst_atomic_queue_pop (self->queue); buf = gst_atomic_queue_pop (self->queue);
if (!buf) if (!buf)
@ -85,12 +85,12 @@ gst_v4l2_codec_pool_acquire_buffer (GstBufferPool * pool, GstBuffer ** buffer,
} }
vmeta = gst_buffer_get_video_meta (buf); vmeta = gst_buffer_get_video_meta (buf);
vmeta->format = GST_VIDEO_INFO_FORMAT (self->vinfo); vmeta->format = GST_VIDEO_INFO_FORMAT (&self->vinfo_drm->vinfo);
vmeta->width = GST_VIDEO_INFO_WIDTH (self->vinfo); vmeta->width = GST_VIDEO_INFO_WIDTH (&self->vinfo_drm->vinfo);
vmeta->height = GST_VIDEO_INFO_HEIGHT (self->vinfo); vmeta->height = GST_VIDEO_INFO_HEIGHT (&self->vinfo_drm->vinfo);
vmeta->n_planes = GST_VIDEO_INFO_N_PLANES (self->vinfo); vmeta->n_planes = GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm->vinfo);
memcpy (vmeta->offset, self->vinfo->offset, sizeof (vmeta->offset)); memcpy (vmeta->offset, self->vinfo_drm->vinfo.offset, sizeof (vmeta->offset));
memcpy (vmeta->stride, self->vinfo->stride, sizeof (vmeta->stride)); memcpy (vmeta->stride, self->vinfo_drm->vinfo.stride, sizeof (vmeta->stride));
*buffer = buf; *buffer = buf;
return GST_FLOW_OK; return GST_FLOW_OK;
@ -133,8 +133,8 @@ gst_v4l2_codec_pool_finalize (GObject * object)
gst_atomic_queue_unref (self->queue); gst_atomic_queue_unref (self->queue);
g_object_unref (self->allocator); g_object_unref (self->allocator);
if (self->vinfo) if (self->vinfo_drm)
gst_video_info_free (self->vinfo); gst_video_info_dma_drm_free (self->vinfo_drm);
G_OBJECT_CLASS (gst_v4l2_codec_pool_parent_class)->finalize (object); G_OBJECT_CLASS (gst_v4l2_codec_pool_parent_class)->finalize (object);
} }
@ -154,13 +154,14 @@ gst_v4l2_codec_pool_class_init (GstV4l2CodecPoolClass * klass)
GstV4l2CodecPool * GstV4l2CodecPool *
gst_v4l2_codec_pool_new (GstV4l2CodecAllocator * allocator, gst_v4l2_codec_pool_new (GstV4l2CodecAllocator * allocator,
const GstVideoInfo * vinfo) const GstVideoInfoDmaDrm * vinfo_drm)
{ {
GstV4l2CodecPool *pool = g_object_new (GST_TYPE_V4L2_CODEC_POOL, NULL); GstV4l2CodecPool *pool = g_object_new (GST_TYPE_V4L2_CODEC_POOL, NULL);
gsize pool_size; gsize pool_size;
pool->allocator = g_object_ref (allocator); pool->allocator = g_object_ref (allocator);
pool->vinfo = gst_video_info_copy (vinfo);
pool->vinfo_drm = g_boxed_copy (GST_TYPE_VIDEO_INFO_DMA_DRM, vinfo_drm);
pool_size = gst_v4l2_codec_allocator_get_pool_size (allocator); pool_size = gst_v4l2_codec_allocator_get_pool_size (allocator);
for (gsize i = 0; i < pool_size; i++) { for (gsize i = 0; i < pool_size; i++) {

View File

@ -30,7 +30,7 @@ G_DECLARE_FINAL_TYPE(GstV4l2CodecPool, gst_v4l2_codec_pool, GST,
V4L2_CODEC_POOL, GstBufferPool) V4L2_CODEC_POOL, GstBufferPool)
GstV4l2CodecPool *gst_v4l2_codec_pool_new (GstV4l2CodecAllocator *allocator, GstV4l2CodecPool *gst_v4l2_codec_pool_new (GstV4l2CodecAllocator *allocator,
const GstVideoInfo * vinfo); const GstVideoInfoDmaDrm * vinfo_drm);
guint32 gst_v4l2_codec_buffer_get_index (GstBuffer * buffer); guint32 gst_v4l2_codec_buffer_get_index (GstBuffer * buffer);

View File

@ -69,7 +69,6 @@ struct _GstV4l2CodecVp8Dec
GstVp8Decoder parent; GstVp8Decoder parent;
GstV4l2Decoder *decoder; GstV4l2Decoder *decoder;
GstVideoCodecState *output_state; GstVideoCodecState *output_state;
GstVideoInfo vinfo;
GstVideoInfoDmaDrm vinfo_drm; GstVideoInfoDmaDrm vinfo_drm;
gint width; gint width;
gint height; gint height;
@ -241,12 +240,13 @@ gst_v4l2_codec_vp8_dec_negotiate (GstVideoDecoder * decoder)
gst_caps_unref (filter); gst_caps_unref (filter);
GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo, if (!gst_v4l2_decoder_select_src_format (self->decoder, caps,
&self->vinfo_drm)) { &self->vinfo_drm)) {
GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
("Unsupported pixel format"), ("Unsupported pixel format"),
("No support for %ux%u format %s", self->width, self->height, ("No support for %ux%u format %s", self->width, self->height,
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->vinfo)))); gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->
vinfo_drm.vinfo))));
gst_caps_unref (caps); gst_caps_unref (caps);
return FALSE; return FALSE;
} }
@ -257,7 +257,7 @@ done:
gst_video_codec_state_unref (self->output_state); gst_video_codec_state_unref (self->output_state);
self->output_state = self->output_state =
gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self), &self->vinfo, gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self),
&self->vinfo_drm, self->width, self->height, vp8dec->input_state); &self->vinfo_drm, self->width, self->height, vp8dec->input_state);
if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) { if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) {
@ -341,7 +341,8 @@ gst_v4l2_codec_vp8_dec_decide_allocation (GstVideoDecoder * decoder,
return FALSE; return FALSE;
} }
self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo); self->src_pool =
gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo_drm);
no_internal_changes: no_internal_changes:
/* Our buffer pool is internal, we will let the base class create a video /* Our buffer pool is internal, we will let the base class create a video
@ -496,7 +497,7 @@ gst_v4l2_codec_vp8_dec_new_sequence (GstVp8Decoder * decoder,
GstV4l2CodecVp8Dec *self = GST_V4L2_CODEC_VP8_DEC (decoder); GstV4l2CodecVp8Dec *self = GST_V4L2_CODEC_VP8_DEC (decoder);
gboolean negotiation_needed = FALSE; gboolean negotiation_needed = FALSE;
if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) if (self->vinfo_drm.vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
negotiation_needed = TRUE; negotiation_needed = TRUE;
/* TODO Check if current buffers are large enough, and reuse them */ /* TODO Check if current buffers are large enough, and reuse them */
@ -523,12 +524,13 @@ gst_v4l2_codec_vp8_dec_new_sequence (GstVp8Decoder * decoder,
GstVideoInfo ref_vinfo; GstVideoInfo ref_vinfo;
gint i; gint i;
gst_video_info_set_format (&ref_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&ref_vinfo,
self->width, self->height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->width,
self->height);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) { for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm.vinfo); i++) {
if (self->vinfo.stride[i] != ref_vinfo.stride[i] || if (self->vinfo_drm.vinfo.stride[i] != ref_vinfo.stride[i] ||
self->vinfo.offset[i] != ref_vinfo.offset[i]) { self->vinfo_drm.vinfo.offset[i] != ref_vinfo.offset[i]) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"GstVideoMeta support required, copying frames."); "GstVideoMeta support required, copying frames.");
self->copy_frames = TRUE; self->copy_frames = TRUE;
@ -703,14 +705,15 @@ gst_v4l2_codec_vp8_dec_copy_output_buffer (GstV4l2CodecVp8Dec * self,
GstVideoInfo dest_vinfo; GstVideoInfo dest_vinfo;
GstBuffer *buffer; GstBuffer *buffer;
gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&dest_vinfo,
self->width, self->height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->width,
self->height);
buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!buffer) if (!buffer)
goto fail; goto fail;
if (!gst_video_frame_map (&src_frame, &self->vinfo, if (!gst_video_frame_map (&src_frame, &self->vinfo_drm.vinfo,
codec_frame->output_buffer, GST_MAP_READ)) codec_frame->output_buffer, GST_MAP_READ))
goto fail; goto fail;
@ -890,7 +893,6 @@ gst_v4l2_codec_vp8_dec_subinit (GstV4l2CodecVp8Dec * self,
GstV4l2CodecVp8DecClass * klass) GstV4l2CodecVp8DecClass * klass)
{ {
self->decoder = gst_v4l2_decoder_new (klass->device); self->decoder = gst_v4l2_decoder_new (klass->device);
gst_video_info_init (&self->vinfo);
gst_video_info_dma_drm_init (&self->vinfo_drm); gst_video_info_dma_drm_init (&self->vinfo_drm);
} }

View File

@ -514,12 +514,13 @@ gst_v4l2_codec_vp9_dec_negotiate (GstVideoDecoder * decoder)
gst_caps_unref (filter); gst_caps_unref (filter);
GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo, if (!gst_v4l2_decoder_select_src_format (self->decoder, caps,
&self->vinfo_drm)) { &self->vinfo_drm)) {
GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
("Unsupported pixel format"), ("Unsupported pixel format"),
("No support for %ux%u format %s", self->width, self->height, ("No support for %ux%u format %s", self->width, self->height,
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->vinfo)))); gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->
vinfo_drm.vinfo))));
gst_caps_unref (caps); gst_caps_unref (caps);
return FALSE; return FALSE;
} }
@ -530,7 +531,7 @@ done:
gst_video_codec_state_unref (self->output_state); gst_video_codec_state_unref (self->output_state);
self->output_state = self->output_state =
gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self), &self->vinfo, gst_v4l2_decoder_set_output_state (GST_VIDEO_DECODER (self),
&self->vinfo_drm, self->width, self->height, vp9dec->input_state); &self->vinfo_drm, self->width, self->height, vp9dec->input_state);
if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) { if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) {
@ -611,7 +612,8 @@ gst_v4l2_codec_vp9_dec_decide_allocation (GstVideoDecoder * decoder,
return FALSE; return FALSE;
} }
self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo); self->src_pool =
gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo_drm);
/* Our buffer pool is internal, we will let the base class create a video /* Our buffer pool is internal, we will let the base class create a video
* pool, and use it if we are running out of buffers or if downstream does * pool, and use it if we are running out of buffers or if downstream does
@ -626,7 +628,7 @@ gst_v4l2_codec_vp9_dec_is_format_change (GstV4l2CodecVp9Dec * self,
{ {
gboolean ret = FALSE; gboolean ret = FALSE;
if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) if (self->vinfo_drm.vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
ret = TRUE; ret = TRUE;
if (self->width != frame_hdr->width || self->height != frame_hdr->height) { if (self->width != frame_hdr->width || self->height != frame_hdr->height) {
@ -735,12 +737,13 @@ gst_v4l2_codec_vp9_dec_new_sequence (GstVp9Decoder * decoder,
GstVideoInfo ref_vinfo; GstVideoInfo ref_vinfo;
gint i; gint i;
gst_video_info_set_format (&ref_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&ref_vinfo,
self->width, self->height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->width,
self->height);
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) { for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo_drm.vinfo); i++) {
if (self->vinfo.stride[i] != ref_vinfo.stride[i] || if (self->vinfo_drm.vinfo.stride[i] != ref_vinfo.stride[i] ||
self->vinfo.offset[i] != ref_vinfo.offset[i]) { self->vinfo_drm.vinfo.offset[i] != ref_vinfo.offset[i]) {
GST_WARNING_OBJECT (self, GST_WARNING_OBJECT (self,
"GstVideoMeta support required, copying frames."); "GstVideoMeta support required, copying frames.");
self->copy_frames = TRUE; self->copy_frames = TRUE;
@ -923,14 +926,15 @@ gst_v4l2_codec_vp9_dec_copy_output_buffer (GstV4l2CodecVp9Dec * self,
GstVideoInfo dest_vinfo; GstVideoInfo dest_vinfo;
GstBuffer *buffer; GstBuffer *buffer;
gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo), gst_video_info_set_format (&dest_vinfo,
self->width, self->height); GST_VIDEO_INFO_FORMAT (&self->vinfo_drm.vinfo), self->width,
self->height);
buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!buffer) if (!buffer)
goto fail; goto fail;
if (!gst_video_frame_map (&src_frame, &self->vinfo, if (!gst_video_frame_map (&src_frame, &self->vinfo_drm.vinfo,
codec_frame->output_buffer, GST_MAP_READ)) codec_frame->output_buffer, GST_MAP_READ))
goto fail; goto fail;
@ -1166,7 +1170,6 @@ gst_v4l2_codec_vp9_dec_subinit (GstV4l2CodecVp9Dec * self,
GstV4l2CodecVp9DecClass * klass) GstV4l2CodecVp9DecClass * klass)
{ {
self->decoder = gst_v4l2_decoder_new (klass->device); self->decoder = gst_v4l2_decoder_new (klass->device);
gst_video_info_init (&self->vinfo);
gst_video_info_dma_drm_init (&self->vinfo_drm); gst_video_info_dma_drm_init (&self->vinfo_drm);
} }

View File

@ -36,6 +36,10 @@
#include <gst/base/base.h> #include <gst/base/base.h>
#define DRM_FORMAT_INVALID 0
#define DRM_FORMAT_MOD_LINEAR 0ULL
#define DRM_FORMAT_MOD_INVALID 0xffffffffffffffULL
#define IMAGE_MINSZ (256*1024) /* 256kB */ #define IMAGE_MINSZ (256*1024) /* 256kB */
GST_DEBUG_CATEGORY (v4l2_decoder_debug); GST_DEBUG_CATEGORY (v4l2_decoder_debug);
@ -630,7 +634,7 @@ gst_v4l2_decoder_remove_buffers (GstV4l2Decoder * self,
gboolean gboolean
gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self, GstCaps * caps, gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self, GstCaps * caps,
GstVideoInfo * vinfo, GstVideoInfoDmaDrm * vinfo_drm) GstVideoInfoDmaDrm * vinfo_drm)
{ {
gint ret; gint ret;
struct v4l2_format fmt = { struct v4l2_format fmt = {
@ -638,7 +642,6 @@ gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self, GstCaps * caps,
}; };
GstVideoFormat format; GstVideoFormat format;
guint32 pix_fmt; guint32 pix_fmt;
GstVideoInfo tmp_vinfo;
GstVideoInfoDmaDrm tmp_vinfo_drm; GstVideoInfoDmaDrm tmp_vinfo_drm;
if (gst_caps_is_empty (caps)) if (gst_caps_is_empty (caps))
@ -650,22 +653,18 @@ gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self, GstCaps * caps,
return FALSE; return FALSE;
} }
gst_video_info_init (&tmp_vinfo);
gst_video_info_dma_drm_init (&tmp_vinfo_drm);
GST_DEBUG_OBJECT (self, "Original caps: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Original caps: %" GST_PTR_FORMAT, caps);
caps = gst_caps_fixate (caps); caps = gst_caps_fixate (caps);
GST_DEBUG_OBJECT (self, "Fixated caps: %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (self, "Fixated caps: %" GST_PTR_FORMAT, caps);
if (gst_video_info_dma_drm_from_caps (&tmp_vinfo_drm, caps)) { gst_video_info_dma_drm_init (&tmp_vinfo_drm);
format = tmp_vinfo_drm.vinfo.finfo->format; if (!gst_video_info_dma_drm_from_caps (&tmp_vinfo_drm, caps) &&
} else if (gst_video_info_from_caps (&tmp_vinfo, caps)) { !gst_video_info_from_caps (&tmp_vinfo_drm.vinfo, caps)) {
format = tmp_vinfo.finfo->format;
} else {
GST_WARNING_OBJECT (self, "Can't transform caps into video info!"); GST_WARNING_OBJECT (self, "Can't transform caps into video info!");
return FALSE; return FALSE;
} }
format = tmp_vinfo_drm.vinfo.finfo->format;
if (!gst_v4l2_format_from_video_format (format, &pix_fmt)) { if (!gst_v4l2_format_from_video_format (format, &pix_fmt)) {
GST_ERROR_OBJECT (self, "Unsupported V4L2 pixelformat %" GST_FOURCC_FORMAT, GST_ERROR_OBJECT (self, "Unsupported V4L2 pixelformat %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fmt.fmt.pix_mp.pixelformat)); GST_FOURCC_ARGS (fmt.fmt.pix_mp.pixelformat));
@ -684,36 +683,37 @@ gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self, GstCaps * caps,
} }
} }
if (!gst_v4l2_format_to_video_info (&fmt, vinfo)) { if (!gst_v4l2_format_to_video_info (&fmt, &vinfo_drm->vinfo)) {
GST_ERROR_OBJECT (self, "Unsupported V4L2 pixelformat %" GST_FOURCC_FORMAT, GST_ERROR_OBJECT (self, "Unsupported V4L2 pixelformat %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fmt.fmt.pix_mp.pixelformat)); GST_FOURCC_ARGS (fmt.fmt.pix_mp.pixelformat));
return FALSE; return FALSE;
} }
gst_video_info_dma_drm_init (vinfo_drm); vinfo_drm->drm_fourcc = DRM_FORMAT_INVALID;
vinfo_drm->drm_modifier = DRM_FORMAT_MOD_INVALID;
if (tmp_vinfo_drm.drm_fourcc) { if (tmp_vinfo_drm.drm_fourcc) {
GstVideoFormat format = GST_VIDEO_INFO_FORMAT (vinfo); vinfo_drm->drm_fourcc =
vinfo_drm->drm_fourcc = gst_video_dma_drm_format_from_gst_format (format, gst_video_dma_drm_format_from_gst_format (tmp_vinfo_drm.vinfo.
&vinfo_drm->drm_modifier); finfo->format, &vinfo_drm->drm_modifier);
vinfo_drm->vinfo = *vinfo;
} }
GST_INFO_OBJECT (self, "Selected format %s %ix%i", GST_INFO_OBJECT (self, "Selected format %s %ix%i",
gst_video_format_to_string (vinfo->finfo->format), gst_video_format_to_string (format), tmp_vinfo_drm.vinfo.width,
vinfo->width, vinfo->height); tmp_vinfo_drm.vinfo.height);
return TRUE; return TRUE;
} }
GstVideoCodecState * GstVideoCodecState *
gst_v4l2_decoder_set_output_state (GstVideoDecoder * decoder, gst_v4l2_decoder_set_output_state (GstVideoDecoder * decoder,
GstVideoInfo * vinfo, GstVideoInfoDmaDrm * vinfo_drm, guint width, GstVideoInfoDmaDrm * vinfo_drm, guint width,
guint height, GstVideoCodecState * reference) guint height, GstVideoCodecState * reference)
{ {
GstVideoCodecState *state; GstVideoCodecState *state;
state = gst_video_decoder_set_output_state (decoder, vinfo->finfo->format, state =
width, height, reference); gst_video_decoder_set_output_state (decoder,
vinfo_drm->vinfo.finfo->format, width, height, reference);
if (vinfo_drm->drm_fourcc /* != DRM_FORMAT_INVALID */ ) { if (vinfo_drm->drm_fourcc /* != DRM_FORMAT_INVALID */ ) {
GstVideoInfoDmaDrm tmp_vinfo_drm = *vinfo_drm; GstVideoInfoDmaDrm tmp_vinfo_drm = *vinfo_drm;

View File

@ -75,11 +75,9 @@ GstCaps * gst_v4l2_decoder_enum_all_src_formats (GstV4l2Decoder * self,
gboolean gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self, gboolean gst_v4l2_decoder_select_src_format (GstV4l2Decoder * self,
GstCaps * caps, GstCaps * caps,
GstVideoInfo * vinfo,
GstVideoInfoDmaDrm * vinfo_drm); GstVideoInfoDmaDrm * vinfo_drm);
GstVideoCodecState * gst_v4l2_decoder_set_output_state (GstVideoDecoder * decoder, GstVideoCodecState * gst_v4l2_decoder_set_output_state (GstVideoDecoder * decoder,
GstVideoInfo * vinfo,
GstVideoInfoDmaDrm * drm_info, GstVideoInfoDmaDrm * drm_info,
guint width, guint width,
guint height, guint height,