tflite: Add support for VSI delegate

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/9410>
This commit is contained in:
Olivier Crête 2025-07-15 20:10:25 -04:00
parent d1b00839c3
commit 69e91b78a7
5 changed files with 264 additions and 1 deletions

View File

@ -30,6 +30,10 @@
#include "gsttfliteedgetpuinference.h"
#endif
#ifdef TFLITE_VSI
#include "gsttflitevsiinference.h"
#endif
static gboolean
plugin_init (GstPlugin * plugin)
{
@ -39,6 +43,10 @@ plugin_init (GstPlugin * plugin)
ret |= GST_ELEMENT_REGISTER (tflite_edgetpu_inference, plugin);
#endif
#ifdef TFLITE_VSI
ret |= GST_ELEMENT_REGISTER (tflite_vsi_inference, plugin);
#endif
return ret;
}

View File

@ -0,0 +1,205 @@
/*
* GStreamer
* Copyright (C) 2025 Collabora Ltd.
*
* gsttflitevsiinference.c
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-tflitevsiinference
* @short_description: Run TFLITE inference model on video buffers
* using a Verisilicon accelerator
*
* This element can apply an TFLITE model to video buffers. It attaches
* the tensor output to the buffer as a @ref GstTensorMeta.
*
* To install TFLITE on your system, follow the instructions in the
* README.md in with this plugin.
*
* ## Example launch command:
*
* GST_DEBUG=ssdobjectdetector:5 \
* gst-launch-1.0 filesrc location=tflite-models/images/bus.jpg ! \
* jpegdec ! videoconvert ! tflitevsiinference model-file=tflite-models/models/ssd_mobilenet_v1_coco.tflite ! \
* ssdobjectdetector label-file=tflite-models/labels/COCO_classes.txt ! videoconvert ! imagefreeze ! autovideosink
*
* Since: 1.28
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gsttflitevsiinference.h"
#include "VX/vsi_npu_custom_op.h"
#include <tensorflow/lite/delegates/external/external_delegate.h>
typedef struct _GstTFliteVsiInference
{
GstTFliteInference parent;
gchar *delegate_path;
TfLiteDelegate *tflite_delegate;
} GstTFliteVsiInference;
GST_DEBUG_CATEGORY (tflite_vsi_inference_debug);
#define GST_CAT_DEFAULT tflite_vsi_inference_debug
GST_ELEMENT_REGISTER_DEFINE (tflite_vsi_inference,
"tflitevsiinference", GST_RANK_NONE, GST_TYPE_TFLITE_VSI_INFERENCE);
enum
{
PROP_0,
PROP_DELEGATE,
};
#define DEFAULT_DELEGATE_PATH "libvx_delegate.so.2"
static void gst_tflite_vsi_inference_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_tflite_vsi_inference_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static void gst_tflite_vsi_inference_finalize (GObject * object);
static gboolean gst_tflite_vsi_update_options (GstTFliteInference * inf,
TfLiteInterpreterOptions * interpreter_options);
static gboolean gst_tflite_vsi_inference_stop (GstBaseTransform * trans);
G_DEFINE_TYPE (GstTFliteVsiInference, gst_tflite_vsi_inference,
GST_TYPE_TFLITE_INFERENCE);
static void
gst_tflite_vsi_inference_class_init (GstTFliteVsiInferenceClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstElementClass *element_class = (GstElementClass *) klass;
GstBaseTransformClass *basetransform_class = (GstBaseTransformClass *) klass;
GstTFliteInferenceClass *tflite_class = (GstTFliteInferenceClass *) klass;
GST_DEBUG_CATEGORY_INIT (tflite_vsi_inference_debug,
"tflitevsiinference", 0, "TFLlite vsi inference");
gst_element_class_set_static_metadata (element_class,
"tflitevsiinference",
"Filter/Effect",
"Apply neural network to video frames and create tensor output"
" using a Verisilicon accelerator",
"Olivier Crête <olivier.crete@collabora.com>");
gobject_class->set_property = gst_tflite_vsi_inference_set_property;
gobject_class->get_property = gst_tflite_vsi_inference_get_property;
gobject_class->finalize = gst_tflite_vsi_inference_finalize;
basetransform_class->stop = gst_tflite_vsi_inference_stop;
tflite_class->update_options = gst_tflite_vsi_update_options;
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_DELEGATE,
g_param_spec_string ("delegate",
"TfLite Delegate", "Path to the VSI TfLite delegate library",
DEFAULT_DELEGATE_PATH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void
gst_tflite_vsi_inference_init (GstTFliteVsiInference * self)
{
self->delegate_path = g_strdup (DEFAULT_DELEGATE_PATH);
}
static void
gst_tflite_vsi_inference_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstTFliteVsiInference *self = GST_TFLITE_VSI_INFERENCE (object);
switch (prop_id) {
case PROP_DELEGATE:
g_free (self->delegate_path);
self->delegate_path = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_tflite_vsi_inference_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstTFliteVsiInference *self = GST_TFLITE_VSI_INFERENCE (object);
switch (prop_id) {
case PROP_DELEGATE:
g_value_set_string (value, self->delegate_path);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_tflite_vsi_inference_finalize (GObject * object)
{
GstTFliteVsiInference *self = GST_TFLITE_VSI_INFERENCE (object);
g_free (self->delegate_path);
G_OBJECT_CLASS (gst_tflite_vsi_inference_parent_class)->finalize (object);
}
static gboolean
gst_tflite_vsi_update_options (GstTFliteInference * inf,
TfLiteInterpreterOptions * interpreter_options)
{
GstTFliteVsiInference *self = GST_TFLITE_VSI_INFERENCE (inf);
TfLiteExternalDelegateOptions external_delegate_options;
external_delegate_options =
TfLiteExternalDelegateOptionsDefault (self->delegate_path);
self->tflite_delegate =
TfLiteExternalDelegateCreate (&external_delegate_options);
TfLiteInterpreterOptionsAddDelegate (interpreter_options,
self->tflite_delegate);
TfLiteInterpreterOptionsAddRegistrationExternal (interpreter_options,
(TfLiteRegistrationExternal *) Register_VSI_NPU_PRECOMPILED ());
return TRUE;
}
static gboolean
gst_tflite_vsi_inference_stop (GstBaseTransform * trans)
{
GstTFliteVsiInference *self = GST_TFLITE_VSI_INFERENCE (trans);
gboolean ret;
ret = GST_BASE_TRANSFORM_CLASS (gst_tflite_vsi_inference_parent_class)
->stop (trans);
if (self->tflite_delegate)
TfLiteExternalDelegateDelete (self->tflite_delegate);
self->tflite_delegate = NULL;
return ret;
}

View File

@ -0,0 +1,39 @@
/*
* GStreamer gstreamer-tflitevsiinference
* Copyright (C) 2025 Collabora Ltd
*
* gsttflitevsiinference.h
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_TFLITE_VSI_INFERENCE_H__
#define __GST_TFLITE_VSI_INFERENCE_H__
#include "gsttfliteinference.h"
G_BEGIN_DECLS
#define GST_TYPE_TFLITE_VSI_INFERENCE (gst_tflite_vsi_inference_get_type())
G_DECLARE_FINAL_TYPE (GstTFliteVsiInference, gst_tflite_vsi_inference, GST,
TFLITE_VSI_INFERENCE, GstTFliteInference)
GST_ELEMENT_REGISTER_DECLARE (tflite_vsi_inference)
G_END_DECLS
#endif /* __GST_TFLITE_INFERENCE_H__ */

View File

@ -6,13 +6,19 @@ tflite_sources = [
tflite_headers = [
'gsttfliteinference.h',
'gsttfliteedgetpuinference.h'
'gsttfliteedgetpuinference.h',
'gsttflitevsiinference.h',
]
edgetpu_sources = [
'gsttfliteedgetpuinference.c'
]
vsi_sources = [
'gsttflitevsiinference.c',
'VX/vsi_npu_custom_op.cc'
]
doc_sources = []
foreach s: tflite_sources + tflite_headers + edgetpu_sources
doc_sources += meson.current_source_dir() / s
@ -64,6 +70,10 @@ if tensorflow_lite_dep.found() and tensorflow_lite_header_found
tflite_extra_dep += [edgetpu_dep]
endif
if get_option('tflite-vsi').allowed()
tflite_sources += vsi_sources
tflite_c_args += ['-Wno-aggregate-return', '-DTFLITE_VSI']
endif
gsttflite = library('gsttflite',
tflite_sources,

View File

@ -178,6 +178,7 @@ option('svtjpegxs', type : 'feature', value : 'auto', description : 'Scalable Vi
option('teletext', type : 'feature', value : 'auto', description : 'Teletext plugin')
option('tflite', type : 'feature', value : 'auto', description : 'TensorFlow Lite (LiteRT) plugin')
option('tflite-edgetpu', type : 'feature', value : 'auto', description : 'TensorFlow Lite (LiteRT) EdgeTPU (Coral) support')
option('tflite-vsi', type : 'feature', value : 'disabled', description : 'TensorFlow Lite (LiteRT) Verisilicon support')
option('tinyalsa', type : 'feature', value : 'auto', description : 'TinyALSA plugin')
option('transcode', type : 'feature', value : 'auto', description : 'Transcode plugin')
option('ttml', type : 'feature', value : 'auto', description : 'TTML subtitle parser and renderer plugin')