nvencoder: Always allow interlaced stream

... even if hardware does not support interlaced encoding at bitstream level.
Although interlacing information is not written in the bitstream,
that information can be signalled via container, thus allow interlaced
stream.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/9328>
This commit is contained in:
Seungha Yang 2025-07-04 23:34:13 +09:00
parent 4f932ee081
commit 5325ec41e4
4 changed files with 55 additions and 88 deletions

View File

@ -859,43 +859,6 @@ gst_nv_encoder_propose_allocation (GstVideoEncoder * encoder, GstQuery * query)
return TRUE;
}
static NV_ENC_PIC_STRUCT
gst_nv_encoder_get_pic_struct (GstNvEncoder * self, GstBuffer * buffer)
{
GstNvEncoderPrivate *priv = self->priv;
GstVideoInfo *info = &priv->input_state->info;
if (!GST_VIDEO_INFO_IS_INTERLACED (info))
return NV_ENC_PIC_STRUCT_FRAME;
if (GST_VIDEO_INFO_INTERLACE_MODE (info) == GST_VIDEO_INTERLACE_MODE_MIXED) {
if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
return NV_ENC_PIC_STRUCT_FRAME;
}
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF))
return NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
return NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
}
switch (GST_VIDEO_INFO_FIELD_ORDER (info)) {
case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST:
return NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
break;
case GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST:
return NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
break;
default:
break;
}
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF))
return NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
return NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
}
static GstVideoCodecFrame *
gst_nv_encoder_find_output_frame (GstVideoEncoder * self, GstNvEncTask * task)
{
@ -2242,8 +2205,12 @@ gst_nv_encoder_handle_frame (GstVideoEncoder * encoder,
gst_nv_enc_task_get_sei_payload (task));
}
status = priv->object->Encode (frame,
gst_nv_encoder_get_pic_struct (self, in_buf), task);
auto pic_struct = NV_ENC_PIC_STRUCT_FRAME;
if (klass->get_pic_struct) {
pic_struct = klass->get_pic_struct (self, &priv->input_state->info, in_buf);
}
status = priv->object->Encode (frame, pic_struct, task);
if (status != NV_ENC_SUCCESS) {
GST_ERROR_OBJECT (self, "Failed to encode frame");
gst_video_encoder_release_frame (encoder, frame);

View File

@ -275,6 +275,10 @@ struct _GstNvEncoderClass
GstNvEncoderDeviceData * data);
guint (*calculate_min_buffers) (GstNvEncoder * encoder);
NV_ENC_PIC_STRUCT (*get_pic_struct) (GstNvEncoder * encoder,
const GstVideoInfo * info,
GstBuffer * buffer);
};
GType gst_nv_encoder_get_type (void);

View File

@ -50,8 +50,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_nv_h264_encoder_debug);
#define DOC_SINK_CAPS_COMM \
"format = (string) { NV12, Y444, VUYA, RGBA, RGBx, BGRA, BGRx }, " \
"width = (int) [ 160, 4096 ], height = (int) [ 64, 4096 ], " \
"interlace-mode = (string) progressive"
"width = (int) [ 160, 4096 ], height = (int) [ 64, 4096 ]"
#define DOC_SINK_CAPS \
"video/x-raw(memory:CUDAMemory), " DOC_SINK_CAPS_COMM "; " \
@ -262,6 +261,9 @@ static gboolean gst_nv_h264_encoder_select_device (GstNvEncoder * encoder,
const GstVideoInfo * info, GstBuffer * buffer,
GstNvEncoderDeviceData * data);
static guint gst_nv_h264_encoder_calculate_min_buffers (GstNvEncoder * encoder);
static NV_ENC_PIC_STRUCT
gst_nv_h264_encoder_get_pic_struct (GstNvEncoder * encoder,
const GstVideoInfo * info, GstBuffer * buffer);
static void
gst_nv_h264_encoder_class_init (GstNvH264EncoderClass * klass, gpointer data)
@ -660,6 +662,8 @@ gst_nv_h264_encoder_class_init (GstNvH264EncoderClass * klass, gpointer data)
GST_DEBUG_FUNCPTR (gst_nv_h264_encoder_select_device);
nvenc_class->calculate_min_buffers =
GST_DEBUG_FUNCPTR (gst_nv_h264_encoder_calculate_min_buffers);
nvenc_class->get_pic_struct =
GST_DEBUG_FUNCPTR (gst_nv_h264_encoder_get_pic_struct);
klass->device_caps = cdata->device_caps;
klass->cuda_device_id = cdata->cuda_device_id;
@ -1203,14 +1207,12 @@ static GstCaps *
gst_nv_h264_encoder_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
{
GstNvH264Encoder *self = GST_NV_H264_ENCODER (encoder);
GstNvH264EncoderClass *klass = GST_NV_H264_ENCODER_GET_CLASS (self);
GstCaps *allowed_caps;
GstCaps *template_caps;
GstCaps *filtered_caps;
GstCaps *supported_caps;
std::set < std::string > downstream_profiles;
std::set < std::string > allowed_formats;
gboolean profile_support_interlaced = FALSE;
gst_nv_h264_encoder_get_downstream_profiles_and_format (self,
downstream_profiles, nullptr);
@ -1223,11 +1225,7 @@ gst_nv_h264_encoder_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
/* *INDENT-OFF* */
for (const auto &iter: downstream_profiles) {
if (iter == "high" || iter == "main")
profile_support_interlaced = TRUE;
if (iter == "high-4:4:4") {
profile_support_interlaced = TRUE;
allowed_formats.insert("Y444");
} else {
allowed_formats.insert("NV12");
@ -1240,17 +1238,9 @@ gst_nv_h264_encoder_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
}
/* *INDENT-ON* */
GST_DEBUG_OBJECT (self, "Downstream %s support interlaced format",
profile_support_interlaced ? "can" : "cannot");
template_caps = gst_pad_get_pad_template_caps (encoder->sinkpad);
allowed_caps = gst_caps_copy (template_caps);
if (klass->device_caps.field_encoding == 0 || !profile_support_interlaced) {
gst_caps_set_simple (allowed_caps, "interlace-mode", G_TYPE_STRING,
"progressive", nullptr);
}
GValue formats = G_VALUE_INIT;
g_value_init (&formats, GST_TYPE_LIST);
@ -1330,19 +1320,6 @@ gst_nv_h264_encoder_set_format (GstNvEncoder * encoder,
return FALSE;
}
if (GST_VIDEO_INFO_IS_INTERLACED (info)) {
downstream_profiles.erase ("progressive-high");
downstream_profiles.erase ("constrained-high");
downstream_profiles.erase ("constrained-baseline");
downstream_profiles.erase ("baseline");
if (downstream_profiles.empty ()) {
GST_ERROR_OBJECT (self,
"None of downstream profile supports interlaced encoding");
return FALSE;
}
}
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444) {
if (downstream_profiles.find ("high-4:4:4") == downstream_profiles.end ()) {
GST_ERROR_OBJECT (self, "Downstream does not support 4:4:4 profile");
@ -2156,6 +2133,42 @@ gst_nv_h264_encoder_calculate_min_buffers (GstNvEncoder * encoder)
return num_buffers;
}
static NV_ENC_PIC_STRUCT
gst_nv_h264_encoder_get_pic_struct (GstNvEncoder * encoder,
const GstVideoInfo * info, GstBuffer * buffer)
{
auto klass = GST_NV_H264_ENCODER_GET_CLASS (encoder);
if (klass->device_caps.field_encoding == 0)
return NV_ENC_PIC_STRUCT_FRAME;
if (GST_VIDEO_INFO_INTERLACE_MODE (info) == GST_VIDEO_INTERLACE_MODE_MIXED) {
if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
return NV_ENC_PIC_STRUCT_FRAME;
}
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF))
return NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
return NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
}
switch (GST_VIDEO_INFO_FIELD_ORDER (info)) {
case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST:
return NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
break;
case GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST:
return NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
break;
default:
break;
}
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF))
return NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
return NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
}
static GstNvEncoderClassData *
gst_nv_h264_encoder_create_class_data (GstObject * device, gpointer session,
GstNvEncoderDeviceMode device_mode)
@ -2306,13 +2319,6 @@ gst_nv_h264_encoder_create_class_data (GstObject * device, gpointer session,
sink_caps_str = "video/x-raw, " + format_str + ", " + resolution_str;
if (dev_caps.field_encoding > 0) {
sink_caps_str +=
", interlace-mode = (string) { progressive, interleaved, mixed }";
} else {
sink_caps_str += ", interlace-mode = (string) progressive";
}
src_caps_str = "video/x-h264, " + resolution_str + ", " + profile_str +
", stream-format = (string) { byte-stream, avc }, alignment = (string) au";
@ -2656,13 +2662,6 @@ gst_nv_h264_encoder_register_auto_select (GstPlugin * plugin,
sink_caps_str = "video/x-raw, " + format_str + ", " + resolution_str;
if (dev_caps.field_encoding > 0) {
sink_caps_str +=
", interlace-mode = (string) { progressive, interleaved, mixed }";
} else {
sink_caps_str += ", interlace-mode = (string) progressive";
}
src_caps_str = "video/x-h264, " + resolution_str + ", " + profile_str +
", stream-format = (string) { byte-stream, avc }, alignment = (string) au";

View File

@ -50,8 +50,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_nv_h265_encoder_debug);
#define DOC_SINK_CAPS_COMM \
"format = (string) { NV12, P010_10LE, Y444, Y444_16LE, GBR, GBR_16LE, VUYA, RGBA, RGBx, BGRA, BGRx, RGB10A2_LE }, " \
"width = (int) [ 144, 8192 ], height = (int) [ 48, 8192 ], " \
"interlace-mode = (string) progressive"
"width = (int) [ 144, 8192 ], height = (int) [ 48, 8192 ]"
#define DOC_SINK_CAPS \
"video/x-raw(memory:CUDAMemory), " DOC_SINK_CAPS_COMM "; " \
@ -2367,8 +2366,7 @@ gst_nv_h265_encoder_create_class_data (GstObject * device, gpointer session,
std::to_string (GST_ROUND_UP_16 (dev_caps.height_min))
+ ", " + std::to_string (dev_caps.height_max) + " ]";
sink_caps_str = "video/x-raw, " + format_str + ", " + resolution_str
+ ", interlace-mode = (string) progressive";
sink_caps_str = "video/x-raw, " + format_str + ", " + resolution_str;
src_caps_str = "video/x-h265, " + resolution_str + ", " + profile_str +
", stream-format = (string) { byte-stream, hvc1, hev1 }" +
@ -2714,8 +2712,7 @@ gst_nv_h265_encoder_register_auto_select (GstPlugin * plugin,
std::to_string (GST_ROUND_UP_16 (dev_caps.height_min))
+ ", " + std::to_string (dev_caps.height_max) + " ]";
sink_caps_str = "video/x-raw, " + format_str + ", " + resolution_str
+ ", interlace-mode = (string) progressive";
sink_caps_str = "video/x-raw, " + format_str + ", " + resolution_str;
src_caps_str = "video/x-h265, " + resolution_str + ", " + profile_str +
", stream-format = (string) { byte-stream, hvc1, hev1 }" +