tflite: Add Coral EdgeTPU inference element

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/8523>
This commit is contained in:
Olivier Crête 2025-04-06 12:05:48 -04:00 committed by GStreamer Marge Bot
parent 05782229ee
commit c941ded4ba
7 changed files with 248 additions and 7 deletions

View File

@ -26,10 +26,20 @@
#include "gsttfliteinference.h"
#ifdef EDGETPU
#include "gsttfliteedgetpuinference.h"
#endif
static gboolean
plugin_init (GstPlugin *plugin)
plugin_init (GstPlugin * plugin)
{
return GST_ELEMENT_REGISTER (tflite_inference, plugin);
gboolean ret = GST_ELEMENT_REGISTER (tflite_inference, plugin);
#ifdef EDGETPU
ret |= GST_ELEMENT_REGISTER (tflite_edgetpu_inference, plugin);
#endif
return ret;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,

View File

@ -0,0 +1,165 @@
/*
* GStreamer
* Copyright (C) 2025 Collabora Ltd.
*
* gsttfliteedgetpuinference.c
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-tfliteedgetpuinference
* @short_description: Run TFLITE inference model on video buffers using a EdgeTpu device
*
* This element can apply an TFLITE model to video buffers. It attaches
* the tensor output to the buffer as a @ref GstTensorMeta.
*
* Uses the Google Coral EdgeTpu devices.
*
* To install TFLITE on your system, follow the instructions in the
* README.md in with this plugin.
*
* ## Example launch command:
*
* GST_DEBUG=ssdobjectdetector:5 \
* gst-launch-1.0 filesrc location=tflite-models/images/bus.jpg ! \
* jpegdec ! videoconvert ! tfliteedgetpuinference model-file=tflite-models/models/ssd_mobilenet_v1_coco.tflite ! \
* ssdobjectdetector label-file=tflite-models/labels/COCO_classes.txt ! videoconvert ! imagefreeze ! autovideosink
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gsttfliteedgetpuinference.h"
#include <libedgetpu/edgetpu_c.h>
typedef struct _GstTFliteEdgeTpuInference
{
GstTFliteInference parent;
TfLiteDelegate *tflite_delegate;
} GstTFliteEdgeTpuInference;
GST_DEBUG_CATEGORY (tflite_edgetpu_inference_debug);
#define GST_CAT_DEFAULT tflite_edgetpu_inference_debug
GST_ELEMENT_REGISTER_DEFINE (tflite_edgetpu_inference,
"tfliteedgetpuinference", GST_RANK_NONE, GST_TYPE_TFLITE_EDGETPU_INFERENCE);
static gboolean gst_tflite_edgetpu_update_options (GstTFliteInference * inf,
TfLiteInterpreterOptions * interpreter_options);
static gboolean gst_tflite_edgetpu_inference_stop (GstBaseTransform * trans);
G_DEFINE_TYPE (GstTFliteEdgeTpuInference, gst_tflite_edgetpu_inference,
GST_TYPE_TFLITE_INFERENCE);
static void
gst_tflite_edgetpu_inference_class_init (GstTFliteEdgeTpuInferenceClass * klass)
{
GstElementClass *element_class = (GstElementClass *) klass;
GstBaseTransformClass *basetransform_class = (GstBaseTransformClass *) klass;
GstTFliteInferenceClass *tflite_class = (GstTFliteInferenceClass *) klass;
GST_DEBUG_CATEGORY_INIT (tflite_edgetpu_inference_debug,
"tfliteedgetpuinference", 0, "TFLlite edgetpu inference");
gst_element_class_set_static_metadata (element_class,
"tfliteedgetpuinference",
"Filter/Effect",
"Apply neural network to video frames and create tensor output"
" using the Google Edge TPU",
"Olivier Crête <olivier.crete@collabora.com>");
basetransform_class->stop = gst_tflite_edgetpu_inference_stop;
tflite_class->update_options = gst_tflite_edgetpu_update_options;
}
static void
gst_tflite_edgetpu_inference_init (GstTFliteEdgeTpuInference * self)
{
}
static gboolean
gst_tflite_edgetpu_update_options (GstTFliteInference * inf,
TfLiteInterpreterOptions * interpreter_options)
{
GstTFliteEdgeTpuInference *self = GST_TFLITE_EDGETPU_INFERENCE (inf);
size_t num_devices = 0;
struct edgetpu_device *devices;
devices = edgetpu_list_devices (&num_devices);
if (num_devices == 0) {
GST_ERROR_OBJECT (self,
"Could not create EdgeTPU session because no EdgeTPU"
" device is connected");
return FALSE;
}
/* Not passing options or a path for now */
self->tflite_delegate = edgetpu_create_delegate (devices[0].type,
devices[0].path, NULL, 0);
if (self->tflite_delegate == NULL) {
GST_ERROR_OBJECT (self, "Could not create EdgeTPU session");
edgetpu_free_devices (devices);
return FALSE;
}
const gchar *dev_type_str = "";
switch (devices[0].type) {
case EDGETPU_APEX_PCI:
dev_type_str = "PCIe";
break;
case EDGETPU_APEX_USB:
dev_type_str = "USB";
break;
default:
dev_type_str = "unknown";
break;
}
GST_DEBUG ("Using EdgeTPU version %s device of type %s at %s",
edgetpu_version (), dev_type_str, devices[0].path);
edgetpu_free_devices (devices);
if (self->tflite_delegate)
TfLiteInterpreterOptionsAddDelegate (interpreter_options,
self->tflite_delegate);
return TRUE;
}
static gboolean
gst_tflite_edgetpu_inference_stop (GstBaseTransform * trans)
{
GstTFliteEdgeTpuInference *self = GST_TFLITE_EDGETPU_INFERENCE (trans);
gboolean ret;
ret = GST_BASE_TRANSFORM_CLASS (gst_tflite_edgetpu_inference_parent_class)
->stop (trans);
if (self->tflite_delegate)
edgetpu_free_delegate (self->tflite_delegate);
self->tflite_delegate = NULL;
return ret;
}

View File

@ -0,0 +1,39 @@
/*
* GStreamer gstreamer-tfliteinference
* Copyright (C) 2024 Collabora Ltd
*
* gsttfliteinference.h
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_TFLITE_EDGETPU_INFERENCE_H__
#define __GST_TFLITE_EDGETPU_INFERENCE_H__
#include "gsttfliteinference.h"
G_BEGIN_DECLS
#define GST_TYPE_TFLITE_EDGETPU_INFERENCE (gst_tflite_edgetpu_inference_get_type())
G_DECLARE_FINAL_TYPE (GstTFliteEdgeTpuInference, gst_tflite_edgetpu_inference, GST,
TFLITE_EDGETPU_INFERENCE, GstTFliteInference)
GST_ELEMENT_REGISTER_DECLARE (tflite_edgetpu_inference)
G_END_DECLS
#endif /* __GST_TFLITE_INFERENCE_H__ */

View File

@ -42,8 +42,6 @@
#include "config.h"
#endif
#include "tensorflow/lite/c/c_api.h"
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gsttfliteinference.h"
@ -511,6 +509,10 @@ gst_tflite_inference_start (GstBaseTransform * trans)
priv->numberOfThreads);
}
if (klass->update_options)
if (!klass->update_options (self, priv->interpreter_options))
goto error;
priv->interpreter = TfLiteInterpreterCreate (priv->model,
priv->interpreter_options);
if (!priv->interpreter) {

View File

@ -26,6 +26,8 @@
#include <gst/gst.h>
#include <gst/base/base.h>
#include "tensorflow/lite/c/c_api.h"
G_BEGIN_DECLS
#define GST_TYPE_TFLITE_INFERENCE (gst_tflite_inference_get_type())
@ -36,6 +38,9 @@ GST_ELEMENT_REGISTER_DECLARE (tflite_inference)
struct _GstTFliteInferenceClass
{
GstBaseTransformClass basetransform;
gboolean (*update_options) (GstTFliteInference * self,
TfLiteInterpreterOptions * interpreter_options);
};
G_END_DECLS

View File

@ -5,11 +5,16 @@ tflite_sources = [
]
tflite_headers = [
'gstfliteinference.h'
'gsttfliteinference.h',
'gsttfliteedgetpuinference.h'
]
edgetpu_sources = [
'gsttfliteedgetpuinference.c'
]
doc_sources = []
foreach s: tflite_sources + tflite_headers
foreach s: tflite_sources + tflite_headers + edgetpu_sources
doc_sources += meson.current_source_dir() / s
endforeach
@ -29,6 +34,7 @@ tensorflow_lite_header_found = cc.has_header('tensorflow/lite/c/c_api.h',
required: get_option('tflite'))
if tensorflow_lite_dep.found() and tensorflow_lite_header_found
tflite_extra_dep = []
tflite_c_args = []
if cc.has_header_symbol('tensorflow/lite/c/c_api.h', 'kTfLiteBFloat16',
@ -36,12 +42,25 @@ if tensorflow_lite_dep.found() and tensorflow_lite_header_found
tflite_c_args += ['-DTFLITE_HAS_BFLOAT16']
endif
edgetpu_dep = cc.find_library('edgetpu',
required : get_option('tflite-edgetpu'))
if edgetpu_dep.found() and cc.has_header('libedgetpu/edgetpu_c.h',
dependencies: edgetpu_dep,
required: get_option('tflite-edgetpu'))
tflite_c_args += ['-DEDGETPU','-DTFLITE_USE_OPAQUE_DELEGATE=0',
'-DTFLITE_WITH_STABLE_ABI=0']
tflite_sources += edgetpu_sources
tflite_extra_dep += [edgetpu_dep]
endif
gsttflite = library('gsttflite',
tflite_sources,
c_args : gst_plugins_bad_args + tflite_c_args,
include_directories : [configinc, libsinc],
dependencies : [gstbase_dep, gstvideo_dep, gstanalytics_dep,
tensorflow_lite_dep,libm, gio_dep],
tensorflow_lite_dep,libm, gio_dep, tflite_extra_dep],
install : true,
install_dir : plugins_install_dir,
)

View File

@ -177,6 +177,7 @@ option('svthevcenc', type : 'feature', value : 'auto', description : 'Scalable V
option('svtjpegxs', type : 'feature', value : 'auto', description : 'Scalable Video Technology for JPEG-XS plugin')
option('teletext', type : 'feature', value : 'auto', description : 'Teletext plugin')
option('tflite', type : 'feature', value : 'auto', description : 'TensorFlow Lite (LiteRT) plugin')
option('tflite-edgetpu', type : 'feature', value : 'auto', description : 'TensorFlow Lite (LiteRT) EdgeTPU (Coral) support')
option('tinyalsa', type : 'feature', value : 'auto', description : 'TinyALSA plugin')
option('transcode', type : 'feature', value : 'auto', description : 'Transcode plugin')
option('ttml', type : 'feature', value : 'auto', description : 'TTML subtitle parser and renderer plugin')