wpe2: New WPE plugin making use of the "WPE Platform API"

Currently only a wpevideosrc2 element is exposed. GL and SHM buffer rendering
are supported, navigation events too (touch is un-tested). Audio pads handling
is not supported yet (that requires new WPE API).

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/8789>
This commit is contained in:
Philippe Normand 2025-04-28 09:51:23 +01:00
parent 7288b034ac
commit 28d97212c5
17 changed files with 2591 additions and 1 deletions

View File

@ -1,6 +1,7 @@
subprojects/gst-plugins-bad/ext/nvcomp subprojects/gst-plugins-bad/ext/nvcomp
subprojects/gst-plugins-bad/ext/nvdswrapper subprojects/gst-plugins-bad/ext/nvdswrapper
subprojects/gst-plugins-bad/ext/qt6d3d11 subprojects/gst-plugins-bad/ext/qt6d3d11
subprojects/gst-plugins-bad/ext/wpe2
subprojects/gst-plugins-bad/gst-libs/gst/cuda subprojects/gst-plugins-bad/gst-libs/gst/cuda
subprojects/gst-plugins-bad/gst-libs/gst/d3d11 subprojects/gst-plugins-bad/gst-libs/gst/d3d11
subprojects/gst-plugins-bad/gst-libs/gst/d3d12 subprojects/gst-plugins-bad/gst-libs/gst/d3d12

View File

@ -20830,6 +20830,7 @@
"GstWpeSrc:draw-background", "GstWpeSrc:draw-background",
"GstWpeSrc:location", "GstWpeSrc:location",
"GstWpeVideoSrc!src", "GstWpeVideoSrc!src",
"GstWpeVideoSrc2!src",
"GstWrapperCameraBinSrc", "GstWrapperCameraBinSrc",
"GstWrapperCameraBinSrc!imgsrc", "GstWrapperCameraBinSrc!imgsrc",
"GstWrapperCameraBinSrc!vfsrc", "GstWrapperCameraBinSrc!vfsrc",
@ -39216,6 +39217,7 @@
"element-webvttenc", "element-webvttenc",
"element-wildmididec", "element-wildmididec",
"element-wpesrc", "element-wpesrc",
"element-wpesrc2",
"element-wrappercamerabinsrc", "element-wrappercamerabinsrc",
"element-x264enc", "element-x264enc",
"element-x265enc", "element-x265enc",
@ -69485,6 +69487,7 @@
"plugin-winks", "plugin-winks",
"plugin-winscreencap", "plugin-winscreencap",
"plugin-wpe", "plugin-wpe",
"plugin-wpe2",
"plugin-x264", "plugin-x264",
"plugin-x265", "plugin-x265",
"plugin-ximagesink", "plugin-ximagesink",
@ -72088,13 +72091,22 @@
"wpesrc", "wpesrc",
"wpesrc::configure-web-view", "wpesrc::configure-web-view",
"wpesrc::load-bytes", "wpesrc::load-bytes",
"wpesrc::run-javascript",
"wpesrc:draw-background", "wpesrc:draw-background",
"wpesrc:location", "wpesrc:location",
"wpevideosrc", "wpevideosrc",
"wpevideosrc::configure-web-view", "wpevideosrc::configure-web-view",
"wpevideosrc::load-bytes", "wpevideosrc::load-bytes",
"wpevideosrc::run-javascript",
"wpevideosrc:draw-background", "wpevideosrc:draw-background",
"wpevideosrc:location", "wpevideosrc:location",
"wpevideosrc2",
"wpevideosrc2::configure-web-view",
"wpevideosrc2::load-bytes",
"wpevideosrc2::wpe-view-created",
"wpevideosrc2::run-javascript",
"wpevideosrc2:draw-background",
"wpevideosrc2:location",
"wrappercamerabinsrc", "wrappercamerabinsrc",
"wrappercamerabinsrc:video-source", "wrappercamerabinsrc:video-source",
"wrappercamerabinsrc:video-source-filter", "wrappercamerabinsrc:video-source-filter",
@ -72205,4 +72217,4 @@
"zxing:message", "zxing:message",
"zxing:try-faster", "zxing:try-faster",
"zxing:try-rotate" "zxing:try-rotate"
] ]

View File

@ -79,6 +79,7 @@ subdir('webrtcdsp')
subdir('webp') subdir('webp')
subdir('wildmidi') subdir('wildmidi')
subdir('wpe') subdir('wpe')
subdir('wpe2')
subdir('x265') subdir('x265')
subdir('zxing') subdir('zxing')
subdir('zbar') subdir('zbar')

View File

@ -0,0 +1,24 @@
/* Copyright (C) <2018, 2019, 2025> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018, 2019> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#pragma once
#include <gst/gst.h>
#define DEFAULT_LOCATION "about:blank"

View File

@ -0,0 +1,48 @@
/* Copyright (C) <2018, 2025> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwpevideosrc.h"
#include "gstwpe.h"
GST_DEBUG_CATEGORY (wpe_video_src_debug);
GST_DEBUG_CATEGORY (wpe_view_debug);
GST_DEBUG_CATEGORY (wpe_src_debug);
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean result;
GST_DEBUG_CATEGORY_INIT (wpe_video_src_debug, "wpevideosrc2", 0,
"WPE Video Source");
GST_DEBUG_CATEGORY_INIT (wpe_view_debug, "wpeview2", 0, "WPE Threaded View");
GST_DEBUG_CATEGORY_INIT (wpe_src_debug, "wpesrc2", 0, "WPE Source");
result = gst_element_register (plugin, "wpevideosrc2", GST_RANK_NONE,
GST_TYPE_WPE_VIDEO_SRC);
return result;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR,
wpe2, "WPE src plugin", plugin_init, VERSION, GST_LICENSE, PACKAGE,
GST_PACKAGE_ORIGIN)

View File

@ -0,0 +1,245 @@
/* Copyright (C) <2025> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwpedisplay.h"
#include "gstwpeview.h"
#include "gstwpetoplevel.h"
#include <EGL/egl.h>
#include <gst/gl/gstglfeature.h>
#include <EGL/eglext.h>
GST_DEBUG_CATEGORY_EXTERN (wpe_view_debug);
#define GST_CAT_DEFAULT wpe_view_debug
enum
{
SIGNAL_WPE_VIEW_CREATED,
LAST_SIGNAL
};
static guint gst_wpe_display_signals[LAST_SIGNAL] = { 0 };
struct _WPEDisplayGStreamer
{
WPEDisplay parent;
GstGLDisplay *gstDisplay;
GstGLContext *gstContext;
GstGLDisplayEGL *gstEGLDisplay;
EGLDisplay eglDisplay;
gchar *drm_device;
gchar *drm_render_node;
};
#define wpe_display_gstreamer_parent_class parent_class
G_DEFINE_TYPE (WPEDisplayGStreamer, wpe_display_gstreamer, WPE_TYPE_DISPLAY);
typedef EGLBoolean (*eglQueryDisplayAttribEXTFunc) (EGLDisplay, EGLint,
EGLAttrib *);
typedef const char *(*eglQueryDeviceStringEXTFunc) (EGLDeviceEXT device,
EGLint name);
typedef struct _VTable
{
eglQueryDisplayAttribEXTFunc eglQueryDisplayAttribEXT;
eglQueryDeviceStringEXTFunc eglQueryDeviceStringEXT;
} VTable;
static gboolean
wpe_display_gstreamer_connect (WPEDisplay * display, GError ** error)
{
auto self = WPE_DISPLAY_GSTREAMER (display);
if (!self->gstDisplay)
return TRUE;
if (gst_gl_context_get_gl_platform (self->gstContext) == GST_GL_PLATFORM_EGL) {
self->gstEGLDisplay = gst_gl_display_egl_from_gl_display (self->gstDisplay);
} else {
g_set_error_literal (error, WPE_VIEW_ERROR, WPE_VIEW_ERROR_RENDER_FAILED,
"Available GStreamer GL Context is not EGL - not creating an EGL display from it");
return FALSE;
}
const gchar *egl_exts = eglQueryString (EGL_NO_DISPLAY, EGL_EXTENSIONS);
self->eglDisplay = (EGLDisplay)
gst_gl_display_get_handle (GST_GL_DISPLAY (self->gstEGLDisplay));
if (!gst_gl_check_extension ("EGL_EXT_device_query", egl_exts)) {
g_set_error_literal (error, WPE_VIEW_ERROR, WPE_VIEW_ERROR_RENDER_FAILED,
"Failed to initialize rendering: 'EGL_EXT_device_query' not available");
return FALSE;
}
EGLDeviceEXT eglDevice;
VTable vt;
vt.eglQueryDisplayAttribEXT = (eglQueryDisplayAttribEXTFunc)
gst_gl_context_get_proc_address (self->gstContext,
"eglQueryDisplayAttribEXT");
if (!vt.eglQueryDisplayAttribEXT (self->eglDisplay, EGL_DEVICE_EXT,
reinterpret_cast < EGLAttrib * >(&eglDevice))) {
g_set_error_literal (error, WPE_VIEW_ERROR, WPE_VIEW_ERROR_RENDER_FAILED,
"Failed to initialize rendering: 'EGLDeviceEXT' not available");
return FALSE;
}
vt.eglQueryDeviceStringEXT = (eglQueryDeviceStringEXTFunc)
gst_gl_context_get_proc_address (self->gstContext,
"eglQueryDeviceStringEXT");
const char *extensions =
vt.eglQueryDeviceStringEXT (eglDevice, EGL_EXTENSIONS);
if (gst_gl_check_extension ("EGL_EXT_device_drm", extensions))
self->drm_device =
g_strdup (vt.eglQueryDeviceStringEXT (eglDevice,
EGL_DRM_DEVICE_FILE_EXT));
else {
// FIXME: This kind of hack is needed when using gtkglsink. glimagesink somehow works as expected.
const gchar *render_node_path = g_getenv ("GST_WPE_DRM_RENDER_NODE_PATH");
if (render_node_path) {
GST_DEBUG ("Setting render node path from GST_WPE_DRM_RENDER_NODE_PATH "
"environment variable");
self->drm_render_node = g_strdup (render_node_path);
} else {
GST_WARNING ("'EGL_EXT_device_drm' not available, hardcoding render node "
"to /dev/dri/renderD128");
self->drm_render_node = g_strdup ("/dev/dri/renderD128");
}
return TRUE;
}
if (gst_gl_check_extension ("EGL_EXT_device_drm_render_node", extensions))
self->drm_render_node =
g_strdup (vt.eglQueryDeviceStringEXT (eglDevice,
EGL_DRM_RENDER_NODE_FILE_EXT));
else {
g_set_error_literal (error, WPE_VIEW_ERROR, WPE_VIEW_ERROR_RENDER_FAILED,
"Failed to initialize rendering: 'EGL_EXT_device_drm_render_node' not available");
return FALSE;
}
return TRUE;
}
static WPEView *
wpe_display_gstreamer_create_view (WPEDisplay * display)
{
auto gst_display = WPE_DISPLAY_GSTREAMER (display);
auto view = wpe_view_gstreamer_new (gst_display);
GValue args[2] = { {0}, {0} };
g_value_init (&args[0], WPE_TYPE_DISPLAY_GSTREAMER);
g_value_set_object (&args[0], gst_display);
g_value_init (&args[1], WPE_TYPE_VIEW);
g_value_set_object (&args[1], view);
g_signal_emitv (args, gst_wpe_display_signals[SIGNAL_WPE_VIEW_CREATED], 0,
NULL);
g_value_unset (&args[0]);
g_value_unset (&args[1]);
auto toplevel = wpe_toplevel_gstreamer_new (gst_display);
wpe_view_set_toplevel (view, toplevel);
g_object_unref (toplevel);
return view;
}
static gpointer
wpe_display_gstreamer_get_egl_display (WPEDisplay * display, GError **)
{
return WPE_DISPLAY_GSTREAMER (display)->eglDisplay;
}
static const char *
wpe_display_gstreamer_get_drm_device (WPEDisplay * display)
{
return WPE_DISPLAY_GSTREAMER (display)->drm_device;
}
static const char *
wpe_display_gstreamer_get_drm_render_node (WPEDisplay * display)
{
auto self = WPE_DISPLAY_GSTREAMER (display);
if (self->drm_render_node)
return self->drm_render_node;
return self->drm_device;
}
static void
wpe_display_gstreamer_init (WPEDisplayGStreamer * display)
{
display->drm_render_node = nullptr;
display->drm_device = nullptr;
}
static void
wpe_display_gstreamer_finalize (GObject * object)
{
auto self = WPE_DISPLAY_GSTREAMER (object);
g_clear_pointer (&self->drm_device, g_free);
g_clear_pointer (&self->drm_render_node, g_free);
gst_clear_object (&self->gstEGLDisplay);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
wpe_display_gstreamer_class_init (WPEDisplayGStreamerClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
gobject_class->finalize = wpe_display_gstreamer_finalize;
WPEDisplayClass *displayClass = WPE_DISPLAY_CLASS (klass);
displayClass->connect = wpe_display_gstreamer_connect;
displayClass->create_view = wpe_display_gstreamer_create_view;
displayClass->get_egl_display = wpe_display_gstreamer_get_egl_display;
displayClass->get_drm_device = wpe_display_gstreamer_get_drm_device;
displayClass->get_drm_render_node = wpe_display_gstreamer_get_drm_render_node;
gst_wpe_display_signals[SIGNAL_WPE_VIEW_CREATED] =
g_signal_new ("wpe-view-created", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, WPE_TYPE_VIEW);
}
WPEDisplay *
wpe_display_gstreamer_new ()
{
auto display =
WPE_DISPLAY_GSTREAMER (g_object_new (WPE_TYPE_DISPLAY_GSTREAMER,
nullptr));
return WPE_DISPLAY (display);
}
void
wpe_display_gstreamer_set_gl (WPEDisplay * display, GstGLDisplay * glDisplay,
GstGLContext * context)
{
auto self = WPE_DISPLAY_GSTREAMER (display);
self->gstDisplay = glDisplay;
self->gstContext = context;
}

View File

@ -0,0 +1,42 @@
/* Copyright (C) <2025> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef GstWPEDisplay_h
#define GstWPEDisplay_h
#include <glib-object.h>
#include <wpe/wpe-platform.h>
#include <gst/gl/egl/gstgldisplay_egl.h>
#include <gst/gl/gl.h>
#include <gst/gl/gstglfuncs.h>
#include "gstwpevideosrc.h"
G_BEGIN_DECLS
#define WPE_TYPE_DISPLAY_GSTREAMER (wpe_display_gstreamer_get_type())
G_DECLARE_FINAL_TYPE(WPEDisplayGStreamer, wpe_display_gstreamer, WPE,
DISPLAY_GSTREAMER, WPEDisplay)
WPEDisplay *wpe_display_gstreamer_new();
void wpe_display_gstreamer_set_gl(WPEDisplay *, GstGLDisplay *, GstGLContext *);
G_END_DECLS
#endif /* GstWPEDisplay_h */

View File

@ -0,0 +1,946 @@
/* Copyright (C) <2018, 2019, 2020, 2025> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwpe.h"
#include "gstwpethreadedview.h"
#include "gstwpedisplay.h"
#include "gstwpeview.h"
#include <gst/gl/gl.h>
#include <gst/gl/egl/gsteglimage.h>
#include <gst/gl/egl/gstgldisplay_egl.h>
#include <cstdio>
#include <mutex>
GST_DEBUG_CATEGORY_EXTERN (wpe_view_debug);
#define GST_CAT_DEFAULT wpe_view_debug
/* *INDENT-OFF* */
class GMutexHolder {
public:
GMutexHolder (GMutex & mutex)
:m(mutex)
{
g_mutex_lock (&m);
}
~GMutexHolder ()
{
g_mutex_unlock (&m);
}
private:
GMutex &m;
};
/* *INDENT-ON* */
static GstWPEContextThread *s_view = NULL;
GstWPEContextThread & GstWPEContextThread::singleton ()
{
/* *INDENT-OFF* */
static gsize initialized = 0;
/* *INDENT-ON* */
if (g_once_init_enter (&initialized)) {
s_view = new GstWPEContextThread;
g_once_init_leave (&initialized, 1);
}
return *s_view;
}
GstWPEContextThread::GstWPEContextThread ()
{
g_mutex_init (&threading.mutex);
g_cond_init (&threading.cond);
threading.ready = FALSE;
{
GMutexHolder lock (threading.mutex);
threading.thread = g_thread_new ("GstWPEContextThread", s_viewThread, this);
while (!threading.ready) {
g_cond_wait (&threading.cond, &threading.mutex);
}
GST_DEBUG ("thread spawned");
}
}
GstWPEContextThread::~GstWPEContextThread ()
{
if (threading.thread) {
g_thread_unref (threading.thread);
threading.thread = nullptr;
}
g_mutex_clear (&threading.mutex);
g_cond_clear (&threading.cond);
}
template < typename Function > void
GstWPEContextThread::dispatch (Function func)
{
/* *INDENT-OFF* */
struct Job {
Job (Function & f)
:func (f)
{
g_mutex_init (&mutex);
g_cond_init (&cond);
dispatched = FALSE;
}
~Job ()
{
g_mutex_clear (&mutex);
g_cond_clear (&cond);
}
void dispatch ()
{
GMutexHolder lock (mutex);
func ();
dispatched = TRUE;
g_cond_signal (&cond);
}
void waitCompletion ()
{
GMutexHolder lock (mutex);
while (!dispatched) {
g_cond_wait (&cond, &mutex);
}
}
Function & func;
GMutex mutex;
GCond cond;
gboolean dispatched;
};
/* *INDENT-ON* */
struct Job job (func);
GSource *source = g_idle_source_new ();
/* *INDENT-OFF* */
g_source_set_callback (source,[](gpointer data)->gboolean {
auto job = static_cast<struct Job *>(data);
job->dispatch ();
return G_SOURCE_REMOVE;
}, &job, nullptr);
/* *INDENT-ON* */
g_source_set_priority (source, G_PRIORITY_DEFAULT);
g_source_attach (source, glib.context);
job.waitCompletion ();
g_source_unref (source);
}
gpointer
GstWPEContextThread::s_viewThread (gpointer data)
{
/* *INDENT-OFF* */
auto &view = *static_cast<GstWPEContextThread *>(data);
/* *INDENT-ON* */
view.glib.context = g_main_context_new ();
view.glib.loop = g_main_loop_new (view.glib.context, FALSE);
g_main_context_push_thread_default (view.glib.context);
{
GSource *source = g_idle_source_new ();
/* *INDENT-OFF* */
g_source_set_callback(source, [](gpointer data) -> gboolean {
auto& view = *static_cast<GstWPEContextThread*>(data);
GMutexHolder lock (view.threading.mutex);
view.threading.ready = TRUE;
g_cond_signal(&view.threading.cond);
return G_SOURCE_REMOVE;
}, &view, nullptr);
/* *INDENT-ON* */
g_source_attach (source, view.glib.context);
g_source_unref (source);
}
g_main_loop_run (view.glib.loop);
g_main_loop_unref (view.glib.loop);
view.glib.loop = nullptr;
g_main_context_pop_thread_default (view.glib.context);
g_main_context_unref (view.glib.context);
view.glib.context = nullptr;
return nullptr;
}
GstWPEThreadedView *
GstWPEContextThread::createWPEView (GstWpeVideoSrc2 * src,
GstGLContext * context,
GstGLDisplay * display, WPEDisplay * wpe_display, int width, int height)
{
GST_DEBUG ("context %p display %p, size (%d,%d)", context, display, width,
height);
GstWPEThreadedView *view = nullptr;
/* *INDENT-OFF* */
dispatch([&]() mutable {
if (!glib.web_context) {
glib.web_context =
WEBKIT_WEB_CONTEXT (g_object_new (WEBKIT_TYPE_WEB_CONTEXT, nullptr));
}
view =
new GstWPEThreadedView (glib.web_context, src, context, display, wpe_display,
width, height);
});
/* *INDENT-ON* */
if (view && view->hasUri ()) {
GST_DEBUG ("waiting load to finish");
view->waitLoadCompletion ();
GST_DEBUG ("done");
}
return view;
}
static gboolean
s_loadFailed (WebKitWebView *, WebKitLoadEvent, gchar * failing_uri,
GError * error, gpointer data)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (data);
if (g_error_matches (error, WEBKIT_NETWORK_ERROR,
WEBKIT_NETWORK_ERROR_CANCELLED)) {
GST_INFO_OBJECT (src, "Loading cancelled.");
return FALSE;
}
GST_ELEMENT_ERROR (GST_ELEMENT_CAST (src), RESOURCE, FAILED, (NULL),
("Failed to load %s (%s)", failing_uri, error->message));
return FALSE;
}
static gboolean
s_loadFailedWithTLSErrors (WebKitWebView *, gchar * failing_uri,
GTlsCertificate *, GTlsCertificateFlags, gpointer data)
{
// Defer to load-failed.
return FALSE;
}
static void
s_loadProgressChanged (GObject * object, GParamSpec *, gpointer data)
{
GstElement *src = GST_ELEMENT_CAST (data);
// The src element is locked already so we can't call
// gst_element_post_message(). Instead retrieve the bus manually and use it
// directly.
GstBus *bus = GST_ELEMENT_BUS (src);
double estimatedProgress;
g_object_get (object, "estimated-load-progress", &estimatedProgress, nullptr);
gst_object_ref (bus);
gst_bus_post (bus, gst_message_new_element (GST_OBJECT_CAST (src),
gst_structure_new ("wpe-stats", "estimated-load-progress",
G_TYPE_DOUBLE, estimatedProgress * 100, nullptr)));
gst_object_unref (bus);
}
static void
s_webProcessCrashed (WebKitWebView *, WebKitWebProcessTerminationReason reason,
gpointer data)
{
/* *INDENT-OFF* */
auto &view = *static_cast<GstWPEThreadedView *>(data);
/* *INDENT-ON* */
auto *src = view.src ();
gchar *reason_str =
g_enum_to_string (WEBKIT_TYPE_WEB_PROCESS_TERMINATION_REASON, reason);
// In case the crash happened while doing the initial URL loading, unlock
// the load completion waiting.
view.notifyLoadFinished ();
// TODO: Emit a signal here and fallback to error system if signal wasn't handled by application?
GST_ELEMENT_ERROR (GST_ELEMENT_CAST (src), RESOURCE, FAILED, (NULL), ("%s",
reason_str));
g_free (reason_str);
}
/* *INDENT-OFF* */
GstWPEThreadedView::GstWPEThreadedView(
WebKitWebContext *web_context, GstWpeVideoSrc2 *src, GstGLContext *context,
GstGLDisplay *display, WPEDisplay *wpe_display, int width, int height)
: m_src(src) {
g_mutex_init (&threading.ready_mutex);
g_cond_init (&threading.ready_cond);
threading.ready = FALSE;
g_mutex_init (&images_mutex);
if (context)
gst.context = GST_GL_CONTEXT (gst_object_ref (context));
if (display)
gst.display = GST_GL_DISPLAY (gst_object_ref (display));
wpe.width = width;
wpe.height = height;
auto *defaultWebsitePolicies = webkit_website_policies_new_with_policies(
"autoplay", WEBKIT_AUTOPLAY_ALLOW, nullptr);
webkit.view = WEBKIT_WEB_VIEW(g_object_new(
WEBKIT_TYPE_WEB_VIEW, "web-context", web_context, "display", wpe_display,
"website-policies", defaultWebsitePolicies, nullptr));
g_object_unref (wpe_display);
g_object_unref(defaultWebsitePolicies);
wpe.view = webkit_web_view_get_wpe_view (webkit.view);
wpe_view_gstreamer_set_client (WPE_VIEW_GSTREAMER (wpe.view), this);
if (auto wpeToplevel = wpe_view_get_toplevel (wpe.view))
wpe_toplevel_resize (wpeToplevel, width, height);
// FIXME: unmap when appropriate and implement can_be_mapped if needed.
wpe_view_map (wpe.view);
g_signal_connect (webkit.view, "load-failed", G_CALLBACK (s_loadFailed), src);
g_signal_connect (webkit.view, "load-failed-with-tls-errors",
G_CALLBACK (s_loadFailedWithTLSErrors), src);
g_signal_connect (webkit.view, "notify::estimated-load-progress",
G_CALLBACK (s_loadProgressChanged), src);
g_signal_connect (webkit.view, "web-process-terminated",
G_CALLBACK (s_webProcessCrashed), this);
auto *settings = webkit_web_view_get_settings (webkit.view);
webkit_settings_set_enable_webaudio (settings, TRUE);
gst_wpe_video_src_configure_web_view (src, webkit.view);
gchar *location;
gboolean drawBackground = TRUE;
g_object_get (src, "location", &location, "draw-background", &drawBackground, nullptr);
setDrawBackground (drawBackground);
if (location) {
loadUriUnlocked (location);
g_free (location);
}
}
/* *INDENT-ON* */
GstWPEThreadedView::~GstWPEThreadedView ()
{
GstEGLImage *egl_pending = NULL;
GstEGLImage *egl_committed = NULL;
GstBuffer *shm_pending = NULL;
GstBuffer *shm_committed = NULL;
GST_TRACE ("%p destroying", this);
g_mutex_clear (&threading.ready_mutex);
g_cond_clear (&threading.ready_cond);
{
GMutexHolder lock (images_mutex);
if (egl.pending) {
egl_pending = egl.pending;
egl.pending = nullptr;
}
if (egl.committed) {
egl_committed = egl.committed;
egl.committed = nullptr;
}
if (shm.pending) {
GST_TRACE ("%p freeing shm pending %" GST_PTR_FORMAT, this, shm.pending);
shm_pending = shm.pending;
shm.pending = nullptr;
}
if (shm.committed) {
GST_TRACE ("%p freeing shm commited %" GST_PTR_FORMAT, this,
shm.committed);
shm_committed = shm.committed;
shm.committed = nullptr;
}
}
if (egl_pending)
gst_egl_image_unref (egl_pending);
if (egl_committed)
gst_egl_image_unref (egl_committed);
if (shm_pending)
gst_buffer_unref (shm_pending);
if (shm_committed)
gst_buffer_unref (shm_committed);
/* *INDENT-OFF* */
GstWPEContextThread::singleton().dispatch([&]() {
if (webkit.view) {
g_object_unref (webkit.view);
webkit.view = nullptr;
}
});
/* *INDENT-ON* */
if (gst.display_egl) {
gst_object_unref (gst.display_egl);
gst.display_egl = nullptr;
}
if (gst.display) {
gst_object_unref (gst.display);
gst.display = nullptr;
}
if (gst.context) {
gst_object_unref (gst.context);
gst.context = nullptr;
}
if (webkit.uri) {
g_free (webkit.uri);
webkit.uri = nullptr;
}
g_mutex_clear (&images_mutex);
GST_TRACE ("%p destroyed", this);
}
void
GstWPEThreadedView::notifyLoadFinished ()
{
GMutexHolder lock (threading.ready_mutex);
if (!threading.ready) {
threading.ready = TRUE;
g_cond_signal (&threading.ready_cond);
}
}
void
GstWPEThreadedView::waitLoadCompletion ()
{
GMutexHolder lock (threading.ready_mutex);
while (!threading.ready)
g_cond_wait (&threading.ready_cond, &threading.ready_mutex);
}
GstEGLImage *
GstWPEThreadedView::image ()
{
GstEGLImage *ret = nullptr;
bool dispatchFrameComplete = false;
GstEGLImage *prev_image = NULL;
{
GMutexHolder lock (images_mutex);
GST_TRACE ("pending %" GST_PTR_FORMAT " (%d) committed %" GST_PTR_FORMAT
" (%d)", egl.pending,
GST_IS_EGL_IMAGE (egl.pending) ?
GST_MINI_OBJECT_REFCOUNT_VALUE (GST_MINI_OBJECT_CAST (egl.pending)) : 0,
egl.committed,
GST_IS_EGL_IMAGE (egl.committed) ?
GST_MINI_OBJECT_REFCOUNT_VALUE (GST_MINI_OBJECT_CAST (egl.committed)) :
0);
if (egl.pending) {
prev_image = egl.committed;
egl.committed = egl.pending;
egl.pending = nullptr;
dispatchFrameComplete = true;
}
if (egl.committed)
ret = egl.committed;
}
if (prev_image) {
gst_egl_image_unref (prev_image);
}
if (dispatchFrameComplete) {
frameComplete ();
}
return ret;
}
GstBuffer *
GstWPEThreadedView::buffer ()
{
GstBuffer *ret = nullptr;
bool dispatchFrameComplete = false;
GstBuffer *prev_image = NULL;
{
GMutexHolder lock (images_mutex);
GST_TRACE ("pending %" GST_PTR_FORMAT " (%d) committed %" GST_PTR_FORMAT
" (%d)", shm.pending,
GST_IS_BUFFER (shm.pending) ?
GST_MINI_OBJECT_REFCOUNT_VALUE (GST_MINI_OBJECT_CAST (shm.pending)) : 0,
shm.committed,
GST_IS_BUFFER (shm.committed) ?
GST_MINI_OBJECT_REFCOUNT_VALUE (GST_MINI_OBJECT_CAST (shm.committed)) :
0);
if (shm.pending) {
prev_image = shm.committed;
shm.committed = shm.pending;
shm.pending = nullptr;
dispatchFrameComplete = true;
}
if (shm.committed)
ret = shm.committed;
}
if (prev_image)
gst_buffer_unref (prev_image);
if (dispatchFrameComplete) {
frameComplete ();
}
return ret;
}
void
GstWPEThreadedView::resize (int width, int height)
{
GST_DEBUG ("resize to %dx%d", width, height);
wpe.width = width;
wpe.height = height;
if (auto wpeToplevel = wpe_view_get_toplevel (wpe.view))
wpe_toplevel_resize (wpeToplevel, wpe.width, wpe.height);
}
void
GstWPEThreadedView::clearBuffers ()
{
bool dispatchFrameComplete = false;
{
GMutexHolder lock (images_mutex);
if (shm.pending) {
auto meta = gst_buffer_get_video_meta (shm.pending);
if (static_cast < int >(meta->width) != wpe.width ||
static_cast < int >(meta->height) != wpe.height) {
gst_clear_buffer (&shm.pending);
dispatchFrameComplete = true;
}
}
if (shm.committed) {
auto meta = gst_buffer_get_video_meta (shm.committed);
if (static_cast < int >(meta->width) != wpe.width ||
static_cast < int >(meta->height) != wpe.height) {
gst_clear_buffer (&shm.committed);
dispatchFrameComplete = true;
}
}
}
if (dispatchFrameComplete) {
frameComplete ();
// Wait until the next SHM buffer has been received.
threading.ready = false;
waitLoadCompletion ();
}
}
void
GstWPEThreadedView::loadUriUnlocked (const gchar * uri)
{
if (webkit.uri)
g_free (webkit.uri);
GST_DEBUG ("loading %s", uri);
webkit.uri = g_strdup (uri);
webkit_web_view_load_uri (webkit.view, webkit.uri);
}
void
GstWPEThreadedView::loadUri (const gchar * uri)
{
s_view->dispatch ([&]() {
loadUriUnlocked (uri);});
}
static void
s_runJavascriptFinished (GObject * object, GAsyncResult * result,
gpointer user_data)
{
GError *error = NULL;
g_autoptr (JSCValue) js_result =
webkit_web_view_evaluate_javascript_finish (WEBKIT_WEB_VIEW (object),
result, &error);
// TODO: Pass result back to signal call site using a GstPromise?
(void) js_result;
if (error) {
GST_WARNING ("Error running javascript: %s", error->message);
g_error_free (error);
}
}
void
GstWPEThreadedView::runJavascript (const char *script)
{
/* *INDENT-OFF* */
s_view->dispatch([&]() {
webkit_web_view_evaluate_javascript(webkit.view, script, -1, nullptr,
nullptr, nullptr,
s_runJavascriptFinished, nullptr);
});
/* *INDENT-ON* */
}
void
GstWPEThreadedView::loadData (GBytes * bytes)
{
/* *INDENT-OFF* */
s_view->dispatch([this, bytes = g_bytes_ref(bytes)]() {
webkit_web_view_load_bytes(webkit.view, bytes, nullptr, nullptr, nullptr);
g_bytes_unref(bytes);
});
/* *INDENT-ON* */
}
void
GstWPEThreadedView::setDrawBackground (gboolean drawsBackground)
{
GST_DEBUG ("%s background rendering",
drawsBackground ? "Enabling" : "Disabling");
WebKitColor color;
webkit_color_parse (&color, drawsBackground ? "white" : "transparent");
webkit_web_view_set_background_color (webkit.view, &color);
}
struct WPEBufferContext
{
GstWPEThreadedView *view;
WPEBuffer *buffer;
};
void
GstWPEThreadedView::s_releaseBuffer (gpointer data)
{
/* *INDENT-OFF* */
s_view->dispatch([&]() {
WPEBufferContext *context = static_cast<WPEBufferContext *>(data);
wpe_view_buffer_released(WPE_VIEW(context->view->wpe.view),
context->buffer);
g_object_unref(context->buffer);
g_free(context);
});
/* *INDENT-ON* */
}
/* *INDENT-OFF* */
gboolean GstWPEThreadedView::setPendingBuffer(WPEBuffer *buffer, GError **error)
{
WPEBufferContext *bufferContext = g_new (WPEBufferContext, 1);
bufferContext->view = this;
bufferContext->buffer = g_object_ref (buffer);
if (WPE_IS_BUFFER_DMA_BUF (buffer)) {
auto eglImage = wpe_buffer_import_to_egl_image (buffer, error);
if (*error)
return FALSE;
auto *gstImage =
gst_egl_image_new_wrapped (gst.context, eglImage, GST_GL_RGBA,
bufferContext,[](GstEGLImage *, gpointer data) { s_releaseBuffer (data); });
{
GMutexHolder lock (images_mutex);
GST_TRACE ("EGLImage %p wrapped in GstEGLImage %" GST_PTR_FORMAT,
eglImage, gstImage);
gst_clear_mini_object ((GstMiniObject **) & egl.pending);
egl.pending = gstImage;
m_pending_buffer = g_object_ref (buffer);
notifyLoadFinished ();
}
return TRUE;
}
if (!WPE_IS_BUFFER_SHM (buffer)) {
g_set_error_literal (error, WPE_VIEW_ERROR, WPE_VIEW_ERROR_RENDER_FAILED,
"Unsupported WPEBuffer format");
return FALSE;
}
GBytes *bytes = wpe_buffer_import_to_pixels (buffer, error);
if (!bytes) {
return FALSE;
}
auto width = wpe_buffer_get_width (buffer);
auto height = wpe_buffer_get_height (buffer);
guint stride;
g_object_get (buffer, "stride", &stride, nullptr);
gsize size = g_bytes_get_size (bytes);
auto *gstBuffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
(gpointer) g_bytes_get_data (bytes, nullptr), size, 0, size,
bufferContext, s_releaseBuffer);
gsize offsets[1];
gint strides[1];
offsets[0] = 0;
strides[0] = stride;
gst_buffer_add_video_meta_full (gstBuffer, GST_VIDEO_FRAME_FLAG_NONE,
GST_VIDEO_FORMAT_BGRA, width, height, 1, offsets, strides);
{
GMutexHolder lock (images_mutex);
GST_TRACE ("SHM buffer %p wrapped in buffer %" GST_PTR_FORMAT, buffer,
gstBuffer);
gst_clear_buffer (&shm.pending);
shm.pending = gstBuffer;
m_pending_buffer = g_object_ref (buffer);
notifyLoadFinished ();
}
return TRUE;
}
/* *INDENT-ON* */
static uint32_t
_pointer_modifiers_from_gst_event (GstEvent * ev)
{
GstNavigationModifierType modifier_state;
uint32_t modifiers = 0;
if (gst_navigation_event_parse_modifier_state (ev, &modifier_state)) {
if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON1_MASK)
modifiers |= WPE_MODIFIER_POINTER_BUTTON1;
if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON2_MASK)
modifiers |= WPE_MODIFIER_POINTER_BUTTON2;
if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON3_MASK)
modifiers |= WPE_MODIFIER_POINTER_BUTTON3;
if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON4_MASK)
modifiers |= WPE_MODIFIER_POINTER_BUTTON4;
if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON5_MASK)
modifiers |= WPE_MODIFIER_POINTER_BUTTON5;
}
return modifiers;
}
static uint32_t
_keyboard_modifiers_from_gst_event (GstEvent * ev)
{
GstNavigationModifierType modifier_state;
uint32_t modifiers = 0;
if (gst_navigation_event_parse_modifier_state (ev, &modifier_state)) {
if (modifier_state & GST_NAVIGATION_MODIFIER_CONTROL_MASK)
modifiers |= WPE_MODIFIER_KEYBOARD_CONTROL;
if (modifier_state & GST_NAVIGATION_MODIFIER_SHIFT_MASK)
modifiers |= WPE_MODIFIER_KEYBOARD_SHIFT;
if (modifier_state & GST_NAVIGATION_MODIFIER_MOD1_MASK)
modifiers |= WPE_MODIFIER_KEYBOARD_ALT;
if (modifier_state & GST_NAVIGATION_MODIFIER_META_MASK)
modifiers |= WPE_MODIFIER_KEYBOARD_META;
}
return modifiers;
}
static WPEModifiers
modifiers_from_gst_event (GstEvent * event)
{
/* *INDENT-OFF* */
return static_cast<WPEModifiers>
(_pointer_modifiers_from_gst_event (event) |
_keyboard_modifiers_from_gst_event (event));
/* *INDENT-ON* */
}
void
GstWPEThreadedView::frameComplete ()
{
GST_TRACE ("frame complete");
/* *INDENT-OFF* */
s_view->dispatch([&]() {
if (m_committed_buffer) {
wpe_view_buffer_released(WPE_VIEW(wpe.view), m_committed_buffer);
g_object_unref(m_committed_buffer);
}
m_committed_buffer = m_pending_buffer;
wpe_view_buffer_rendered (WPE_VIEW (wpe.view), m_committed_buffer);
});
/* *INDENT-ON* */
}
void
GstWPEThreadedView::dispatchEvent (WPEEvent * wpe_event)
{
/* *INDENT-OFF* */
s_view->dispatch([&]() {
wpe_view_event(WPE_VIEW(wpe.view), wpe_event);
wpe_event_unref(wpe_event);
});
/* *INDENT-ON* */
}
/* *INDENT-OFF* */
gboolean GstWPEThreadedView::dispatchKeyboardEvent(GstEvent *event) {
const gchar *key;
if (!gst_navigation_event_parse_key_event (event, &key)) {
return FALSE;
}
auto modifiers = static_cast<WPEModifiers>(_keyboard_modifiers_from_gst_event (event));
auto timestamp = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
/* FIXME: This is wrong... The GstNavigation API should pass
hardware-level information, not high-level keysym strings */
gunichar *unichar;
glong items_written;
uint32_t keysym;
unichar = g_utf8_to_ucs4_fast (key, -1, &items_written);
if (items_written == 1)
keysym = (uint32_t) xkb_utf32_to_keysym (*unichar);
else
keysym = (uint32_t) xkb_keysym_from_name (key, XKB_KEYSYM_NO_FLAGS);
WPEEventType event_type = WPE_EVENT_NONE;
if (gst_navigation_event_get_type (event) == GST_NAVIGATION_EVENT_KEY_PRESS)
event_type = WPE_EVENT_KEYBOARD_KEY_DOWN;
else
event_type = WPE_EVENT_KEYBOARD_KEY_UP;
dispatchEvent (wpe_event_keyboard_new (event_type, WPE_VIEW (wpe.view),
WPE_INPUT_SOURCE_KEYBOARD, timestamp, modifiers, keysym, keysym));
return TRUE;
}
gboolean GstWPEThreadedView::dispatchPointerEvent (GstEvent * event)
{
gdouble x, y;
gint button;
if (!gst_navigation_event_parse_mouse_button_event (event, &button, &x, &y)) {
return FALSE;
}
GstNavigationModifierType modifier_state;
guint wpe_button = 0;
if (gst_navigation_event_parse_modifier_state (event, &modifier_state)) {
if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON1_MASK)
wpe_button = WPE_BUTTON_PRIMARY;
else if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON2_MASK)
wpe_button = WPE_BUTTON_MIDDLE;
else if (modifier_state & GST_NAVIGATION_MODIFIER_BUTTON3_MASK)
wpe_button = WPE_BUTTON_SECONDARY;
}
auto timestamp = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
guint press_count = 0;
WPEEventType type;
if (gst_navigation_event_get_type (event) ==
GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS) {
press_count = wpe_view_compute_press_count (WPE_VIEW (wpe.view), x, y,
wpe_button, timestamp);
type = WPE_EVENT_POINTER_DOWN;
} else {
type = WPE_EVENT_POINTER_UP;
}
dispatchEvent (wpe_event_pointer_button_new (type, WPE_VIEW (wpe.view),
WPE_INPUT_SOURCE_MOUSE, timestamp, modifiers_from_gst_event (event),
wpe_button, x, y, press_count));
return TRUE;
}
gboolean GstWPEThreadedView::dispatchPointerMoveEvent (GstEvent * event)
{
gdouble x, y;
if (!gst_navigation_event_parse_mouse_move_event (event, &x, &y)) {
return FALSE;
}
gdouble delta_x = 0;
gdouble delta_y = 0;
if (m_last_pointer_position) {
delta_x = x - m_last_pointer_position->first;
delta_y = y - m_last_pointer_position->second;
}
m_last_pointer_position = { x, y };
auto timestamp = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
dispatchEvent (wpe_event_pointer_move_new (WPE_EVENT_POINTER_MOVE,
WPE_VIEW (wpe.view), WPE_INPUT_SOURCE_MOUSE, timestamp,
modifiers_from_gst_event (event), x, y, delta_x, delta_y));
return TRUE;
}
gboolean GstWPEThreadedView::dispatchAxisEvent (GstEvent * event)
{
gdouble x, y, delta_x, delta_y;
if (!gst_navigation_event_parse_mouse_scroll_event (event, &x, &y, &delta_x,
&delta_y)) {
return FALSE;
}
auto timestamp = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
dispatchEvent (wpe_event_scroll_new (WPE_VIEW (wpe.view),
WPE_INPUT_SOURCE_MOUSE, timestamp, modifiers_from_gst_event (event),
delta_x, delta_y, TRUE, FALSE, x, y));
return TRUE;
}
gboolean GstWPEThreadedView::dispatchTouchEvent (GstEvent * event)
{
guint touch_id;
gdouble x, y;
if (!gst_navigation_event_parse_touch_event (event, &touch_id, &x, &y, NULL)) {
return FALSE;
}
WPEEventType event_type = WPE_EVENT_NONE;
switch (gst_navigation_event_get_type (event)) {
case GST_NAVIGATION_EVENT_TOUCH_DOWN:
event_type = WPE_EVENT_TOUCH_DOWN;
break;
case GST_NAVIGATION_EVENT_TOUCH_MOTION:
event_type = WPE_EVENT_TOUCH_MOVE;
break;
case GST_NAVIGATION_EVENT_TOUCH_UP:
event_type = WPE_EVENT_TOUCH_UP;
break;
default:
break;
}
auto timestamp = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
auto modifiers = static_cast<WPEModifiers>(_keyboard_modifiers_from_gst_event (event));
dispatchEvent (wpe_event_touch_new (event_type, WPE_VIEW (wpe.view),
WPE_INPUT_SOURCE_TOUCHPAD, timestamp, modifiers, touch_id, x, y));
return TRUE;
}
/* *INDENT-ON* */

View File

@ -0,0 +1,154 @@
/* Copyright (C) <2018, 2025> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#pragma once
#include <EGL/egl.h>
#include <glib.h>
#include <gst/gl/gstglfuncs.h>
#include <gst/gl/egl/gstgldisplay_egl.h>
#include <wpe/webkit.h>
#include "gstwpevideosrc.h"
#include <optional>
#include <utility>
typedef struct _GstGLContext GstGLContext;
typedef struct _GstGLDisplay GstGLDisplay;
typedef struct _GstEGLImage GstEGLImage;
class GstWPEThreadedView {
public:
GstWPEThreadedView(WebKitWebContext *, GstWpeVideoSrc2 *, GstGLContext *,
GstGLDisplay *, WPEDisplay *, int width, int height);
~GstWPEThreadedView();
/* Used by gstwpeview */
gboolean setPendingBuffer(WPEBuffer*, GError**);
/* Used by wpevideosrc */
void resize(int width, int height);
void loadUri(const gchar*);
void loadData(GBytes*);
void runJavascript(const gchar*);
void setDrawBackground(gboolean);
void clearBuffers();
GstEGLImage* image();
GstBuffer* buffer();
gboolean dispatchKeyboardEvent(GstEvent*);
gboolean dispatchPointerEvent(GstEvent*);
gboolean dispatchPointerMoveEvent(GstEvent*);
gboolean dispatchAxisEvent(GstEvent*);
gboolean dispatchTouchEvent(GstEvent*);
/* Used by GstWPEContextThread */
bool hasUri() const { return webkit.uri; }
void disconnectLoadFailedSignal();
void waitLoadCompletion();
GstWpeVideoSrc2 *src() const { return m_src; }
void notifyLoadFinished();
private:
void frameComplete();
void dispatchEvent(WPEEvent*);
void loadUriUnlocked(const gchar*);
static void s_releaseBuffer(gpointer);
struct {
GstGLContext* context;
GstGLDisplay* display;
GstGLDisplayEGL* display_egl;
} gst { nullptr, nullptr, nullptr };
struct {
WPEView *view;
int width;
int height;
} wpe { nullptr, 0, 0 };
struct {
gchar* uri;
WebKitWebView* view;
} webkit = { nullptr, nullptr };
struct {
GMutex ready_mutex;
GCond ready_cond;
gboolean ready;
} threading;
// This mutex guards access to either egl or shm resources declared below,
// depending on the runtime behavior.
GMutex images_mutex;
struct {
GstEGLImage* pending;
GstEGLImage* committed;
} egl { nullptr, nullptr };
struct {
GstBuffer* pending;
GstBuffer* committed;
} shm { nullptr, nullptr };
struct {
gulong init_ext_sigid;
gulong extension_msg_sigid;
} audio {0, 0};
GstWpeVideoSrc2 *m_src { nullptr };
WPEBuffer *m_pending_buffer { nullptr };
WPEBuffer *m_committed_buffer { nullptr };
std::optional<std::pair<gdouble, gdouble>> m_last_pointer_position;
};
class GstWPEContextThread {
public:
static GstWPEContextThread& singleton();
GstWPEContextThread();
~GstWPEContextThread();
GstWPEThreadedView* createWPEView(GstWpeVideoSrc2*, GstGLContext*, GstGLDisplay*, WPEDisplay*, int width, int height);
template<typename Function>
void dispatch(Function);
private:
static gpointer s_viewThread(gpointer);
struct {
GMutex mutex;
GCond cond;
gboolean ready;
GThread* thread { nullptr };
} threading;
struct {
GMainContext* context;
GMainLoop* loop;
WebKitWebContext* web_context;
} glib { nullptr, nullptr, nullptr };
};

View File

@ -0,0 +1,65 @@
/* Copyright (C) <2025> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwpetoplevel.h"
struct _WPEToplevelGStreamer
{
WPEToplevel parent;
};
#define wpe_toplevel_gstreamer_parent_class parent_class
G_DEFINE_TYPE (WPEToplevelGStreamer, wpe_toplevel_gstreamer, WPE_TYPE_TOPLEVEL);
static gboolean
wpe_toplevel_gstreamer_resize (WPEToplevel * toplevel, int width, int height)
{
wpe_toplevel_resized (toplevel, width, height);
/* *INDENT-OFF* */
wpe_toplevel_foreach_view(toplevel, [](WPEToplevel *toplevel, WPEView *view, gpointer) -> gboolean {
int width, height;
wpe_toplevel_get_size (toplevel, &width, &height);
wpe_view_resized (view, width, height);
return FALSE;
}, nullptr);
/* *INDENT-ON* */
return TRUE;
}
static void
wpe_toplevel_gstreamer_init (WPEToplevelGStreamer * toplevel)
{
}
static void
wpe_toplevel_gstreamer_class_init (WPEToplevelGStreamerClass * klass)
{
WPEToplevelClass *toplevelClass = WPE_TOPLEVEL_CLASS (klass);
toplevelClass->resize = wpe_toplevel_gstreamer_resize;
}
WPEToplevel *
wpe_toplevel_gstreamer_new (WPEDisplayGStreamer * display)
{
return WPE_TOPLEVEL (g_object_new (WPE_TYPE_TOPLEVEL_GSTREAMER, "display",
display, nullptr));
}

View File

@ -0,0 +1,35 @@
/* Copyright (C) <2025> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef GstWPETopLevel_h
#define GstWPETopLevel_h
#include <glib-object.h>
#include "gstwpedisplay.h"
G_BEGIN_DECLS
#define WPE_TYPE_TOPLEVEL_GSTREAMER (wpe_toplevel_gstreamer_get_type())
G_DECLARE_FINAL_TYPE(WPEToplevelGStreamer, wpe_toplevel_gstreamer, WPE,
TOPLEVEL_GSTREAMER, WPEToplevel)
WPEToplevel *wpe_toplevel_gstreamer_new(WPEDisplayGStreamer *);
G_END_DECLS
#endif /* GstWPETopLevel_h */

View File

@ -0,0 +1,829 @@
/* Copyright (C) <2018, 2025> Philippe Normand <philn@igalia.com>
* Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
/**
* SECTION:element-wpevideosrc2
* @title: wpevideosrc2
*
* The wpevideosrc2 element is used to produce a video texture representing a web page
* rendered off-screen by WPE.
*
* Software rendering support is also available. This features allows
* wpevideosrc2 to be used on machines without GPU, and/or for testing purpose.
* To enable software rendering support, set the `LIBGL_ALWAYS_SOFTWARE=true`
* environment variable and make sure `video/x-raw, format=BGRA` caps are
* negotiated by the wpevideosrc2 element.
*
* As the webview loading is usually not instantaneous, the wpevideosrc2 element emits
* messages indicating the load progress, in percent. The value is an estimate
* based on the total number of bytes expected to be received for a document,
* including all its possible subresources and child documents. The application
* can handle these `element` messages synchronously for instance, in order to
* display a progress bar or other visual load indicator. The load percent value
* is stored in the message structure as a double value named
* `estimated-load-progress` and the structure name is `wpe-stats`.
*
* ## Example launch lines
*
* ```shell
* gst-launch-1.0 -v wpevideosrc2 location="https://gstreamer.freedesktop.org" ! queue ! glimagesink
* ```
* Shows the GStreamer website homepage
*
* ```shell
* LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -v wpevideosrc2 num-buffers=50 location="https://gstreamer.freedesktop.org" \
* videoconvert ! pngenc ! multifilesink location=/tmp/snapshot-%05d.png
* ```
* Saves the first 50 video frames generated for the GStreamer website as PNG files in /tmp.
*
* ```shell
* gst-play-1.0 --videosink gtkglsink web+https://gstreamer.freedesktop.org
* ```
* Shows the GStreamer website homepage as played with GstPlayer in a GTK+ window.
*
* ```shell
* gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 ! glimagesink wpevideosrc2 location="file:///tmp/asset.html" draw-background=0 \
* ! m. videotestsrc ! queue ! glupload ! glcolorconvert ! m.
* ```
* Composite WPE with a video stream in a single OpenGL scene.
*
* ```shell
* gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 sink_0::height=818 sink_0::width=1920 ! gtkglsink \
* wpevideosrc2 location="file:///tmp/asset.html" draw-background=0 ! m.
* uridecodebin uri="http://example.com/Sintel.2010.1080p.mkv" name=d d. ! queue ! glupload ! glcolorconvert ! m.
* ```
* Composite WPE with a video stream, sink_0 pad properties have to match the video dimensions.
*
* ```shell
* weston -S $HOME/weston-sock -B headless-backend.so --use-gl &
* WAYLAND_DISPLAY=$HOME/weston-sock gst-launch-1.0 wpevideosrc2 location=https://google.com ! queue ! fakevideosink
* ```
* Render Google.com with WPE in a headless Weston compositor. This can be useful for server-side WPE video processing.
*
* Since: 1.28
*/
/*
* TODO:
* - Better navigation events handling (would require a new GstNavigation API)
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwpe.h"
#include "gstwpevideosrc.h"
#include <gst/gl/gl.h>
#include <gst/gl/egl/gstglmemoryegl.h>
#include <gst/gl/wayland/gstgldisplay_wayland.h>
#include <gst/video/video.h>
#include <xkbcommon/xkbcommon.h>
#include "gstwpethreadedview.h"
#include "gstwpedisplay.h"
#define DEFAULT_WIDTH 1920
#define DEFAULT_HEIGHT 1080
#define DEFAULT_FPS_N 30
#define DEFAULT_FPS_D 1
#define DEFAULT_DRAW_BACKGROUND TRUE
enum
{
PROP_0,
PROP_LOCATION,
PROP_DRAW_BACKGROUND
};
enum
{
SIGNAL_WPE_VIEW_CREATED,
SIGNAL_CONFIGURE_WEB_VIEW,
SIGNAL_LOAD_BYTES,
SIGNAL_RUN_JAVASCRIPT,
LAST_SIGNAL
};
static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 };
struct _GstWpeVideoSrc2
{
GstGLBaseSrc parent;
/* properties */
gchar *location;
gboolean draw_background;
GBytes *bytes;
gboolean gl_enabled;
gint64 n_frames; /* total frames sent */
WPEDisplay *display;
GstWPEThreadedView *view;
GMutex lock;
};
#define WPE_LOCK(o) g_mutex_lock(&(o)->lock)
#define WPE_UNLOCK(o) g_mutex_unlock(&(o)->lock)
GST_DEBUG_CATEGORY_EXTERN (wpe_video_src_debug);
#define GST_CAT_DEFAULT wpe_video_src_debug
#define gst_wpe_video_src_parent_class parent_class
G_DEFINE_TYPE (GstWpeVideoSrc2, gst_wpe_video_src, GST_TYPE_GL_BASE_SRC);
#define WPE_RAW_CAPS "video/x-raw, " \
"format = (string) BGRA, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE ", " \
"pixel-aspect-ratio = (fraction)1/1"
#define WPE_GL_CAPS "video/x-raw(memory:GLMemory), " \
"format = (string) RGBA, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE ", " \
"pixel-aspect-ratio = (fraction)1/1, texture-target = (string)2D"
#define WPE_VIDEO_SRC_CAPS WPE_GL_CAPS "; " WPE_RAW_CAPS
#define WPE_VIDEO_SRC_DOC_CAPS WPE_GL_CAPS "; video/x-raw, format = (string) BGRA"
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (WPE_VIDEO_SRC_CAPS));
#define GST_ELEMENT_PROGRESS(el, type, code, text) \
G_STMT_START { \
gchar *__txt = _gst_element_error_printf text; \
gst_element_post_message( \
GST_ELEMENT_CAST(el), \
gst_message_new_progress(GST_OBJECT_CAST(el), \
GST_PROGRESS_TYPE_##type, code, __txt)); \
g_free(__txt); \
} \
G_STMT_END
static GstFlowReturn
gst_wpe_video_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
GstBuffer ** buf)
{
GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (bsrc);
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (bsrc);
GstFlowReturn ret = GST_FLOW_ERROR;
GstBuffer *locked_buffer;
GstClockTime next_time;
gint64 ts_offset = 0;
WPE_LOCK (src);
if (src->gl_enabled) {
WPE_UNLOCK (src);
return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, create, (bsrc,
offset, length, buf), ret);
}
locked_buffer = src->view->buffer ();
if (locked_buffer == NULL) {
WPE_UNLOCK (src);
GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
("WPE View did not render a buffer"), (NULL));
return ret;
}
*buf = gst_buffer_copy_deep (locked_buffer);
g_object_get (gl_src, "timestamp-offset", &ts_offset, NULL);
/* The following code mimics the behaviour of GLBaseSrc::fill */
GST_BUFFER_TIMESTAMP (*buf) = ts_offset + gl_src->running_time;
GST_BUFFER_OFFSET (*buf) = src->n_frames;
src->n_frames++;
GST_BUFFER_OFFSET_END (*buf) = src->n_frames;
if (gl_src->out_info.fps_n) {
next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
gl_src->out_info.fps_d, gl_src->out_info.fps_n);
GST_BUFFER_DURATION (*buf) = next_time - gl_src->running_time;
} else {
next_time = ts_offset;
GST_BUFFER_DURATION (*buf) = GST_CLOCK_TIME_NONE;
}
GST_LOG_OBJECT (src, "Created buffer from SHM %" GST_PTR_FORMAT, *buf);
gl_src->running_time = next_time;
ret = GST_FLOW_OK;
WPE_UNLOCK (src);
return ret;
}
static GQuark
_egl_image_quark (void)
{
static GQuark quark = 0;
if (!quark)
quark = g_quark_from_static_string ("GstWPEEGLImage");
return quark;
}
static gboolean
gst_wpe_video_src_fill_memory (GstGLBaseSrc * bsrc, GstGLMemory * memory)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (bsrc);
const GstGLFuncs *gl;
guint tex_id;
GstEGLImage *locked_image;
if (!gst_gl_context_check_feature (GST_GL_CONTEXT (bsrc->context),
"EGL_KHR_image_base")) {
GST_ERROR_OBJECT (src, "EGL_KHR_image_base is not supported");
return FALSE;
}
WPE_LOCK (src);
gl = bsrc->context->gl_vtable;
tex_id = gst_gl_memory_get_texture_id (memory);
locked_image = src->view->image ();
if (!locked_image) {
WPE_UNLOCK (src);
return TRUE;
}
// The EGLImage is implicitely associated with the memory we're filling, so we
// need to ensure their life cycles are tied.
gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (memory), _egl_image_quark (),
gst_egl_image_ref (locked_image), (GDestroyNotify) gst_egl_image_unref);
gl->ActiveTexture (GL_TEXTURE0 + memory->plane);
gl->BindTexture (GL_TEXTURE_2D, tex_id);
gl->EGLImageTargetTexture2D (GL_TEXTURE_2D,
gst_egl_image_get_image (locked_image));
WPE_UNLOCK (src);
return TRUE;
}
static gboolean
gst_wpe_video_src_start (GstWpeVideoSrc2 * src)
{
GstGLContext *context = NULL;
GstGLDisplay *display = NULL;
GstGLBaseSrc *base_src = GST_GL_BASE_SRC (src);
gboolean created_view = FALSE;
GBytes *bytes;
GST_ELEMENT_PROGRESS (src, START, "open", ("Starting up"));
GST_INFO_OBJECT (src, "Starting up");
WPE_LOCK (src);
if (src->gl_enabled) {
context = base_src->context;
display = base_src->display;
}
GST_DEBUG_OBJECT (src, "Will %sfill GLMemories",
src->gl_enabled ? "" : "NOT ");
auto & thread = GstWPEContextThread::singleton ();
if (!src->view) {
GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("Creating WPE WebView"));
GError *error = nullptr;
wpe_display_gstreamer_set_gl (src->display, display, context);
if (!wpe_display_connect (src->display, &error)) {
WPE_UNLOCK (src);
GST_ELEMENT_PROGRESS (src, ERROR, "open",
("WPE display initialisation failed"));
GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
("Display initialisation failed: %s", error->message), (NULL));
g_error_free (error);
return FALSE;
}
src->view =
thread.createWPEView (src, context, display, src->display,
GST_VIDEO_INFO_WIDTH (&base_src->out_info),
GST_VIDEO_INFO_HEIGHT (&base_src->out_info));
created_view = TRUE;
GST_DEBUG_OBJECT (src, "created view %p", src->view);
GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("WPE WebView is ready"));
}
if (!created_view) {
GST_INFO_OBJECT (src,
"Re-starting after re-negotiation, clearing cached SHM buffers");
src->view->clearBuffers ();
}
GST_OBJECT_LOCK (src);
bytes = src->bytes;
src->bytes = NULL;
GST_OBJECT_UNLOCK (src);
if (bytes != NULL) {
GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("Loading HTML data"));
src->view->loadData (bytes);
g_bytes_unref (bytes);
}
if (created_view) {
src->n_frames = 0;
}
WPE_UNLOCK (src);
GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Ready to produce buffers"));
return TRUE;
}
static gboolean
gst_wpe_video_src_decide_allocation (GstBaseSrc * base_src, GstQuery * query)
{
GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (base_src);
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
GstCapsFeatures *caps_features;
WPE_LOCK (src);
caps_features = gst_caps_get_features (gl_src->out_caps, 0);
if (caps_features != NULL
&& gst_caps_features_contains (caps_features,
GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
src->gl_enabled = TRUE;
} else {
src->gl_enabled = FALSE;
}
if (src->gl_enabled) {
WPE_UNLOCK (src);
return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, decide_allocation,
(base_src, query), FALSE);
}
WPE_UNLOCK (src);
return gst_wpe_video_src_start (src);
}
static gboolean
gst_wpe_video_src_gl_start (GstGLBaseSrc * base_src)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
return gst_wpe_video_src_start (src);
}
static void
gst_wpe_video_src_stop_unlocked (GstWpeVideoSrc2 * src)
{
if (src->view) {
GST_DEBUG_OBJECT (src, "deleting view %p", src->view);
delete src->view;
src->view = NULL;
}
}
static void
gst_wpe_video_src_gl_stop (GstGLBaseSrc * base_src)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
WPE_LOCK (src);
gst_wpe_video_src_stop_unlocked (src);
WPE_UNLOCK (src);
}
static gboolean
gst_wpe_video_src_stop (GstBaseSrc * base_src)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
/* we can call this always, GstGLBaseSrc is smart enough to not crash if
* gst_gl_base_src_gl_start() has not been called from chaining up
* gst_wpe_video_src_decide_allocation() */
if (!GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, stop, (base_src),
FALSE))
return FALSE;
WPE_LOCK (src);
/* if gl-enabled, gst_wpe_video_src_stop_unlocked() would have already been called
* inside gst_wpe_video_src_gl_stop() from the base class stopping the OpenGL
* context */
if (!src->gl_enabled)
gst_wpe_video_src_stop_unlocked (src);
WPE_UNLOCK (src);
return TRUE;
}
static GstCaps *
gst_wpe_video_src_fixate (GstBaseSrc * base_src, GstCaps * combined_caps)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
GstStructure *structure;
gint width, height;
GstCaps *caps;
/* In situation where software GL support is explicitly requested, select raw
* caps, otherwise perform default caps negotiation. Unfortunately at this
* point we don't know yet if a GL context will be usable or not, so we can't
* check the element GstContext.
*/
if (!g_strcmp0 (g_getenv ("LIBGL_ALWAYS_SOFTWARE"), "true")) {
caps = gst_caps_from_string (WPE_RAW_CAPS);
} else {
caps = gst_caps_make_writable (combined_caps);
}
structure = gst_caps_get_structure (caps, 0);
gst_structure_fixate_field_nearest_int (structure, "width", DEFAULT_WIDTH);
gst_structure_fixate_field_nearest_int (structure, "height", DEFAULT_HEIGHT);
if (gst_structure_has_field (structure, "framerate"))
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
DEFAULT_FPS_N, DEFAULT_FPS_D);
else
gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, DEFAULT_FPS_N,
DEFAULT_FPS_D, NULL);
caps = GST_BASE_SRC_CLASS (parent_class)->fixate (base_src, caps);
GST_INFO_OBJECT (base_src, "Fixated caps to %" GST_PTR_FORMAT, caps);
if (src->view) {
gst_structure_get (structure, "width", G_TYPE_INT, &width, "height",
G_TYPE_INT, &height, NULL);
src->view->resize (width, height);
}
return caps;
}
void
gst_wpe_video_src_configure_web_view (GstWpeVideoSrc2 * src,
WebKitWebView * webview)
{
GValue args[2] = { {0}, {0} };
g_value_init (&args[0], GST_TYPE_ELEMENT);
g_value_set_object (&args[0], src);
g_value_init (&args[1], G_TYPE_OBJECT);
g_value_set_object (&args[1], webview);
g_signal_emitv (args, gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW], 0,
NULL);
g_value_unset (&args[0]);
g_value_unset (&args[1]);
}
static void
gst_wpe_video_src_run_javascript (GstWpeVideoSrc2 * src, const gchar * script)
{
if (src->view && GST_STATE (GST_ELEMENT_CAST (src)) > GST_STATE_NULL) {
GST_INFO_OBJECT (src, "running javascript");
src->view->runJavascript (script);
}
}
static void
gst_wpe_video_src_load_bytes (GstWpeVideoSrc2 * src, GBytes * bytes)
{
if (src->view && GST_STATE (GST_ELEMENT_CAST (src)) > GST_STATE_NULL) {
src->view->loadData (bytes);
} else {
GST_OBJECT_LOCK (src);
if (src->bytes)
g_bytes_unref (src->bytes);
src->bytes = g_bytes_ref (bytes);
GST_OBJECT_UNLOCK (src);
}
}
static gboolean
gst_wpe_video_src_set_location (GstWpeVideoSrc2 * src, const gchar * location,
GError ** error)
{
GST_OBJECT_LOCK (src);
g_free (src->location);
src->location = g_strdup (location);
GST_OBJECT_UNLOCK (src);
if (src->view)
src->view->loadUri (location);
return TRUE;
}
static void
gst_wpe_video_src_set_draw_background (GstWpeVideoSrc2 * src,
gboolean draw_background)
{
GST_OBJECT_LOCK (src);
src->draw_background = draw_background;
GST_OBJECT_UNLOCK (src);
if (src->view)
src->view->setDrawBackground (draw_background);
}
static void
gst_wpe_video_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (object);
switch (prop_id) {
case PROP_LOCATION:
{
const gchar *location;
location = g_value_get_string (value);
if (location == NULL) {
GST_WARNING_OBJECT (src, "location property cannot be NULL");
return;
}
if (!gst_wpe_video_src_set_location (src, location, NULL)) {
GST_WARNING_OBJECT (src, "badly formatted location");
return;
}
break;
}
case PROP_DRAW_BACKGROUND:
gst_wpe_video_src_set_draw_background (src, g_value_get_boolean (value));
break;
default:
break;
}
}
static void
gst_wpe_video_src_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (object);
switch (prop_id) {
case PROP_LOCATION:
GST_OBJECT_LOCK (src);
g_value_set_string (value, src->location);
GST_OBJECT_UNLOCK (src);
break;
case PROP_DRAW_BACKGROUND:
GST_OBJECT_LOCK (src);
g_value_set_boolean (value, src->draw_background);
GST_OBJECT_UNLOCK (src);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean
gst_wpe_video_src_event (GstBaseSrc * base_src, GstEvent * event)
{
gboolean ret = FALSE;
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
if (src->view && GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION) {
GST_DEBUG_OBJECT (src, "Processing event %" GST_PTR_FORMAT, event);
switch (gst_navigation_event_get_type (event)) {
case GST_NAVIGATION_EVENT_KEY_PRESS:
case GST_NAVIGATION_EVENT_KEY_RELEASE:
ret = src->view->dispatchKeyboardEvent (event);
break;
case GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS:
case GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE:
ret = src->view->dispatchPointerEvent (event);
break;
case GST_NAVIGATION_EVENT_MOUSE_MOVE:
ret = src->view->dispatchPointerMoveEvent (event);
break;
case GST_NAVIGATION_EVENT_MOUSE_SCROLL:
ret = src->view->dispatchAxisEvent (event);
break;
case GST_NAVIGATION_EVENT_TOUCH_DOWN:
case GST_NAVIGATION_EVENT_TOUCH_MOTION:
case GST_NAVIGATION_EVENT_TOUCH_UP:
ret = src->view->dispatchTouchEvent (event);
break;
case GST_NAVIGATION_EVENT_TOUCH_FRAME:
case GST_NAVIGATION_EVENT_TOUCH_CANCEL:
break;
default:
break;
}
}
if (!ret) {
ret =
GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, event, (base_src,
event), FALSE);
}
return ret;
}
static gboolean
gst_wpe_video_src_query (GstBaseSrc * base_src, GstQuery * query)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (base_src);
GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (base_src);
gboolean ret = FALSE;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
GST_OBJECT_LOCK (src);
if (gl_src->out_info.fps_n > 0) {
GstClockTime latency;
latency = gst_util_uint64_scale (GST_SECOND, gl_src->out_info.fps_d,
gl_src->out_info.fps_n);
GST_OBJECT_UNLOCK (src);
gst_query_set_latency (query,
gst_base_src_is_live (GST_BASE_SRC_CAST (src)), latency,
GST_CLOCK_TIME_NONE);
GST_DEBUG_OBJECT (src, "Reporting latency of %" GST_TIME_FORMAT,
GST_TIME_ARGS (latency));
ret = TRUE;
} else {
GST_OBJECT_UNLOCK (src);
}
break;
}
default:
ret = GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, query,
(base_src, query), FALSE);
break;
}
return ret;
}
static void
on_view_created (WPEDisplayGStreamer *, WPEView * view, gpointer user_data)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (user_data);
GValue args[2] = { {0}, {0}
};
g_value_init (&args[0], GST_TYPE_WPE_VIDEO_SRC);
g_value_set_object (&args[0], src);
g_value_init (&args[1], WPE_TYPE_VIEW);
g_value_set_object (&args[1], view);
g_signal_emitv (args, gst_wpe_video_src_signals[SIGNAL_WPE_VIEW_CREATED], 0,
NULL);
g_value_unset (&args[0]);
g_value_unset (&args[1]);
}
static void
gst_wpe_video_src_init (GstWpeVideoSrc2 * src)
{
src->draw_background = DEFAULT_DRAW_BACKGROUND;
src->location = g_strdup (DEFAULT_LOCATION);
src->display = wpe_display_gstreamer_new ();
g_signal_connect (src->display, "wpe-view-created",
G_CALLBACK (on_view_created), src);
gst_base_src_set_live (GST_BASE_SRC_CAST (src), TRUE);
g_mutex_init (&src->lock);
}
static void
gst_wpe_video_src_finalize (GObject * object)
{
GstWpeVideoSrc2 *src = GST_WPE_VIDEO_SRC (object);
g_free (src->location);
g_clear_pointer (&src->bytes, g_bytes_unref);
g_mutex_clear (&src->lock);
g_clear_pointer (&src->display, g_object_unref);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_wpe_video_src_class_init (GstWpeVideoSrc2Class * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstGLBaseSrcClass *gl_base_src_class = GST_GL_BASE_SRC_CLASS (klass);
GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
GstPadTemplate *tmpl;
GstCaps *doc_caps;
gobject_class->set_property = gst_wpe_video_src_set_property;
gobject_class->get_property = gst_wpe_video_src_get_property;
gobject_class->finalize = gst_wpe_video_src_finalize;
g_object_class_install_property (gobject_class, PROP_LOCATION,
g_param_spec_string ("location", "location",
"The URL to display",
DEFAULT_LOCATION,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_DRAW_BACKGROUND,
g_param_spec_boolean ("draw-background", "Draws the background",
"Whether to draw the WebView background", DEFAULT_DRAW_BACKGROUND,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (gstelement_class,
"WPE source", "Source/Video",
"Creates a video stream from a WPE browser",
"Philippe Normand <philn@igalia.com>, Žan Doberšek <zdobersek@igalia.com>");
tmpl = gst_static_pad_template_get (&src_factory);
gst_element_class_add_pad_template (gstelement_class, tmpl);
base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_wpe_video_src_fixate);
base_src_class->create = GST_DEBUG_FUNCPTR (gst_wpe_video_src_create);
base_src_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_wpe_video_src_decide_allocation);
base_src_class->stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_stop);
base_src_class->event = GST_DEBUG_FUNCPTR (gst_wpe_video_src_event);
base_src_class->query = GST_DEBUG_FUNCPTR (gst_wpe_video_src_query);
gl_base_src_class->supported_gl_api =
static_cast < GstGLAPI >
(GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2);
gl_base_src_class->gl_start = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_start);
gl_base_src_class->gl_stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_stop);
gl_base_src_class->fill_gl_memory =
GST_DEBUG_FUNCPTR (gst_wpe_video_src_fill_memory);
doc_caps = gst_caps_from_string (WPE_VIDEO_SRC_DOC_CAPS);
gst_pad_template_set_documentation_caps (tmpl, doc_caps);
gst_clear_caps (&doc_caps);
/**
* GstWpeVideoSrc2::wpe-view-created:
* @src: the object which received the signal
* @view: the #WPEView that was created
*
* This signal can be used to hook into the WPEView signals as soon as it was
* created.
*/
gst_wpe_video_src_signals[SIGNAL_WPE_VIEW_CREATED] =
g_signal_new ("wpe-view-created", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, WPE_TYPE_VIEW);
/**
* GstWpeVideoSrc2::configure-web-view:
* @src: the object which received the signal
* @webview: the webView
*
* Allow application to configure the webView settings.
*/
gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW] =
g_signal_new ("configure-web-view", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_OBJECT);
/**
* GstWpeVideoSrc2::load-bytes:
* @src: the object which received the signal
* @bytes: the GBytes data to load
*
* Load the specified bytes into the internal webView.
*/
gst_wpe_video_src_signals[SIGNAL_LOAD_BYTES] =
g_signal_new_class_handler ("load-bytes", G_TYPE_FROM_CLASS (klass),
static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
G_CALLBACK (gst_wpe_video_src_load_bytes), NULL, NULL, NULL,
G_TYPE_NONE, 1, G_TYPE_BYTES);
/**
* GstWpeVideoSrc2::run-javascript:
* @src: the object which received the signal
* @script: the script to run
*
* Asynchronously run script in the context of the current page on the
* internal webView.
*
*/
gst_wpe_video_src_signals[SIGNAL_RUN_JAVASCRIPT] =
g_signal_new_class_handler ("run-javascript", G_TYPE_FROM_CLASS (klass),
static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
G_CALLBACK (gst_wpe_video_src_run_javascript), NULL, NULL, NULL,
G_TYPE_NONE, 1, G_TYPE_STRING);
}

View File

@ -0,0 +1,17 @@
#pragma once
#include <wpe/webkit.h>
#include <gst/gl/gl.h>
#include "gstwpeview.h"
typedef struct _GstWpeVideoSrc2 GstWpeVideoSrc2;
G_BEGIN_DECLS
#define GST_TYPE_WPE_VIDEO_SRC (gst_wpe_video_src_get_type ())
G_DECLARE_FINAL_TYPE (GstWpeVideoSrc2, gst_wpe_video_src, GST, WPE_VIDEO_SRC, GstGLBaseSrc);
void gst_wpe_video_src_configure_web_view (GstWpeVideoSrc2 * src, WebKitWebView * webview);
G_END_DECLS

View File

@ -0,0 +1,69 @@
/* Copyright (C) <2025> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstwpeview.h"
#include "gstwpethreadedview.h"
struct _WPEViewGStreamer
{
WPEView parent;
GstWPEThreadedView *client;
};
#define wpe_view_gstreamer_parent_class parent_class
G_DEFINE_TYPE (WPEViewGStreamer, wpe_view_gstreamer, WPE_TYPE_VIEW);
static gboolean
wpe_view_gstreamer_render_buffer (WPEView * view, WPEBuffer * buffer,
const WPERectangle *, guint, GError ** error)
{
auto self = WPE_VIEW_GSTREAMER (view);
// TODO: Add support for damage rects.
return self->client->setPendingBuffer (buffer, error);
}
static void
wpe_view_gstreamer_init (WPEViewGStreamer * view)
{
}
static void
wpe_view_gstreamer_class_init (WPEViewGStreamerClass * klass)
{
WPEViewClass *viewClass = WPE_VIEW_CLASS (klass);
viewClass->render_buffer = wpe_view_gstreamer_render_buffer;
}
WPEView *
wpe_view_gstreamer_new (WPEDisplayGStreamer * display)
{
return WPE_VIEW (g_object_new (WPE_TYPE_VIEW_GSTREAMER, "display", display,
nullptr));
}
void
wpe_view_gstreamer_set_client (WPEViewGStreamer * view,
GstWPEThreadedView * client)
{
view->client = client;
}

View File

@ -0,0 +1,41 @@
/* Copyright (C) <2025> Philippe Normand <philn@igalia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef GstWPEView_h
#define GstWPEView_h
#include <glib-object.h>
#include "gstwpedisplay.h"
class GstWPEThreadedView;
G_BEGIN_DECLS
#define WPE_TYPE_VIEW_GSTREAMER (wpe_view_gstreamer_get_type())
G_DECLARE_FINAL_TYPE(WPEViewGStreamer, wpe_view_gstreamer, WPE,
VIEW_GSTREAMER, WPEView)
typedef struct _WPEDisplayGStreamer WPEDisplayGStreamer;
WPEView *wpe_view_gstreamer_new(WPEDisplayGStreamer *);
void wpe_view_gstreamer_set_client(WPEViewGStreamer*, GstWPEThreadedView*);
G_END_DECLS
#endif /* GstWPEView_h */

View File

@ -0,0 +1,60 @@
wpe_sources = [
'gstwpethreadedview.cpp','gstwpe2.cpp',
'gstwpevideosrc.cpp',
'gstwpedisplay.cpp',
'gstwpeview.cpp',
'gstwpetoplevel.cpp',
]
wpe_headers = [
'gstwpevideosrc.h',
'gstwpe2.h',
'gstwpethreadedview.h',
]
doc_sources = []
foreach s: wpe_sources + wpe_headers
doc_sources += meson.current_source_dir() / s
endforeach
plugin_sources += {
'wpe': pathsep.join(doc_sources)
}
wpe_feat = get_option('wpe2').require(gstgl_dep.found(),
error_message : 'wpe plugin enabled but GL support was not detected')
if not wpe_feat.allowed()
subdir_done()
endif
wpewebkit_dep = dependency('wpe-webkit-2.0', version: '>=2.46.0', required: wpe_feat)
if not wpewebkit_dep.found()
subdir_done()
endif
# GstWPE2 makes use of the new WPEPlatform API, as it is optional though we need
# to explicitely check for it.
wpe_platform_dep = dependency('wpe-platform-2.0', required: wpe_feat)
if not wpe_platform_dep.found()
subdir_done()
endif
egl_dep = dependency('egl', required : wpe_feat)
xkbcommon_dep = dependency('xkbcommon', version : '>= 0.8', required : wpe_feat)
if not (egl_dep.found() and xkbcommon_dep.found())
subdir_done()
endif
gstwpe = library('gstwpe2',
wpe_sources,
override_options : ['cpp_std=c++17'],
dependencies : [egl_dep, wpewebkit_dep, gstallocators_dep, gstaudio_dep, gstvideo_dep,
gstbase_dep, gstgl_dep, xkbcommon_dep],
cpp_args : gst_plugins_bad_args + ['-DHAVE_CONFIG_H=1'],
include_directories : [configinc],
install : true,
install_dir : plugins_install_dir)
plugins += [gstwpe]

View File

@ -206,6 +206,7 @@ option(
choices: ['auto', '1.0', '1.1', '2.0'], choices: ['auto', '1.0', '1.1', '2.0'],
description: 'WPE WebKit API to target (1.0 = soup2, 1.1/2.0 = soup3)' description: 'WPE WebKit API to target (1.0 = soup2, 1.1/2.0 = soup3)'
) )
option('wpe2', type : 'feature', value : 'auto', description : 'WPE Web browser plugin')
option('magicleap', type : 'feature', value : 'auto', description : 'Magic Leap platform support') option('magicleap', type : 'feature', value : 'auto', description : 'Magic Leap platform support')
option('v4l2codecs', type : 'feature', value : 'auto', description : 'Video4Linux Stateless CODECs support') option('v4l2codecs', type : 'feature', value : 'auto', description : 'Video4Linux Stateless CODECs support')