wasapi2: Port to IMMDevice based device selection

Because of a couple of issues reported related to WinRT device
enumeration, porting to IMMDevice device id based device selection.

Fixes: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/4311
Fixes: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3936
Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/9307>
This commit is contained in:
Seungha Yang 2025-06-29 01:46:44 +09:00 committed by GStreamer Marge Bot
parent ba41200c96
commit 0b3108cb3c
7 changed files with 561 additions and 69 deletions

View File

@ -0,0 +1,461 @@
/* GStreamer
* Copyright (C) 2025 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstwasapi2object.h"
#include "gstwasapi2activator.h"
#include <endpointvolume.h>
#include <mutex>
#include <condition_variable>
#include <wrl.h>
#include <string>
#include <atomic>
#include <string.h>
/* *INDENT-OFF* */
using namespace Microsoft::WRL;
GST_DEBUG_CATEGORY_EXTERN (gst_wasapi2_debug);
#define GST_CAT_DEFAULT gst_wasapi2_debug
static GstStaticCaps template_caps = GST_STATIC_CAPS (GST_WASAPI2_STATIC_CAPS);
static void gst_wasapi2_object_set_endpoint_muted (GstWasapi2Object * object,
bool muted);
DEFINE_GUID (IID_Wasapi2EndpointVolumeCallback, 0x21ba991f, 0x4d78,
0x418c, 0xa1, 0xea, 0x8a, 0xc7, 0xdd, 0xa2, 0xdc, 0x39);
class Wasapi2EndpointVolumeCallback : public IAudioEndpointVolumeCallback
{
public:
static void CreateInstance (Wasapi2EndpointVolumeCallback ** iface,
GstWasapi2Object * client)
{
auto self = new Wasapi2EndpointVolumeCallback ();
g_weak_ref_set (&self->client_, client);
*iface = self;
}
STDMETHODIMP_ (ULONG)
AddRef (void)
{
return InterlockedIncrement (&refcount_);
}
STDMETHODIMP_ (ULONG)
Release (void)
{
ULONG ref_count;
ref_count = InterlockedDecrement (&refcount_);
if (ref_count == 0)
delete this;
return ref_count;
}
STDMETHODIMP
QueryInterface (REFIID riid, void ** object)
{
if (riid == __uuidof(IUnknown) || riid == __uuidof(IAgileObject)) {
*object = static_cast<IUnknown *>(
static_cast<Wasapi2EndpointVolumeCallback*>(this));
} else if (riid == __uuidof(IAudioEndpointVolumeCallback)) {
*object = static_cast<IAudioEndpointVolumeCallback *>(
static_cast<Wasapi2EndpointVolumeCallback*>(this));
} else if (riid == IID_Wasapi2EndpointVolumeCallback) {
*object = static_cast<Wasapi2EndpointVolumeCallback *> (this);
} else {
*object = nullptr;
return E_NOINTERFACE;
}
AddRef ();
return S_OK;
}
STDMETHODIMP
OnNotify (AUDIO_VOLUME_NOTIFICATION_DATA * notify)
{
auto client = (GstWasapi2Object *) g_weak_ref_get (&client_);
if (client) {
gst_wasapi2_object_set_endpoint_muted (client, notify->bMuted);
gst_object_unref (client);
}
return S_OK;
}
private:
Wasapi2EndpointVolumeCallback ()
{
g_weak_ref_init (&client_, nullptr);
}
virtual ~Wasapi2EndpointVolumeCallback ()
{
g_weak_ref_set (&client_, nullptr);
}
private:
ULONG refcount_ = 1;
GWeakRef client_;
};
struct GstWasapi2ObjectPrivate
{
ComPtr<IMMDeviceEnumerator> enumerator;
ComPtr<IMMDevice> device;
ComPtr<IAudioClient> client;
ComPtr<IAudioEndpointVolume> endpoint_volume;
std::atomic<bool> endpoint_muted = { false };
Wasapi2EndpointVolumeCallback *volume_callback = nullptr;
Wasapi2ActivationHandler *activator = nullptr;
std::mutex lock;
std::condition_variable cond;
std::string device_id;
GstWasapi2EndpointClass device_class;
guint target_pid;
gboolean is_default_device = FALSE;
void ClearCOM ()
{
if (volume_callback && endpoint_volume)
endpoint_volume->UnregisterControlChangeNotify (volume_callback);
if (activator)
activator->Release ();
client = nullptr;
if (volume_callback)
volume_callback->Release ();
endpoint_volume = nullptr;
device = nullptr;
enumerator = nullptr;
}
};
/* *INDENT-ON* */
struct _GstWasapi2Object
{
GstObject parent;
GstWasapi2ObjectPrivate *priv;
GThread *thread;
GMainContext *context;
GMainLoop *loop;
GstCaps *caps;
};
static void gst_wasapi2_object_finalize (GObject * object);
#define gst_wasapi2_object_parent_class parent_class
G_DEFINE_TYPE (GstWasapi2Object, gst_wasapi2_object, GST_TYPE_OBJECT);
static void
gst_wasapi2_object_class_init (GstWasapi2ObjectClass * klass)
{
auto object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gst_wasapi2_object_finalize;
}
static void
gst_wasapi2_object_init (GstWasapi2Object * self)
{
self->priv = new GstWasapi2ObjectPrivate ();
self->context = g_main_context_new ();
self->loop = g_main_loop_new (self->context, FALSE);
}
static void
gst_wasapi2_object_finalize (GObject * object)
{
auto self = GST_WASAPI2_OBJECT (object);
g_main_loop_quit (self->loop);
g_thread_join (self->thread);
g_main_loop_unref (self->loop);
g_main_context_unref (self->context);
gst_clear_caps (&self->caps);
delete self->priv;
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_wasapi2_object_set_endpoint_muted (GstWasapi2Object * object, bool muted)
{
auto priv = object->priv;
priv->endpoint_muted.store (muted, std::memory_order_release);
}
static gboolean
is_equal_device_id (const gchar * a, const gchar * b)
{
auto len_a = strlen (a);
auto len_b = strlen (b);
if (len_a != len_b)
return FALSE;
#ifdef _MSC_VER
return _strnicmp (a, b, len_a) == 0;
#else
return strncasecmp (a, b, len_a) == 0;
#endif
}
static gpointer
gst_wasapi2_object_thread_func (GstWasapi2Object * self)
{
auto priv = self->priv;
CoInitializeEx (nullptr, COINIT_MULTITHREADED);
g_main_context_push_thread_default (self->context);
auto idle_source = g_idle_source_new ();
/* *INDENT-OFF* */
g_source_set_callback (idle_source,
[] (gpointer user_data) -> gboolean {
auto self = (GstWasapi2Object *) user_data;
auto priv = self->priv;
std::lock_guard < std::mutex > lk (priv->lock);
priv->cond.notify_all ();
return G_SOURCE_REMOVE;
},
self, nullptr);
/* *INDENT-ON* */
g_source_attach (idle_source, self->context);
g_source_unref (idle_source);
auto hr = CoCreateInstance (__uuidof (MMDeviceEnumerator),
nullptr, CLSCTX_ALL, IID_PPV_ARGS (&priv->enumerator));
if (FAILED (hr)) {
GST_ERROR_OBJECT (self, "Failed to create IMMDeviceEnumerator instance");
goto run_loop;
}
switch (priv->device_class) {
case GST_WASAPI2_ENDPOINT_CLASS_CAPTURE:
if (priv->device_id.empty () ||
is_equal_device_id (priv->device_id.c_str (),
gst_wasapi2_get_default_device_id (eCapture))) {
if (gst_wasapi2_can_automatic_stream_routing ()) {
Wasapi2ActivationHandler::CreateInstance (&priv->activator,
gst_wasapi2_get_default_device_id_wide (eCapture), nullptr);
GST_DEBUG_OBJECT (self, "Creating default capture device");
priv->is_default_device = TRUE;
} else {
GST_DEBUG_OBJECT (self, "Creating default capture MMdevice");
hr = priv->enumerator->GetDefaultAudioEndpoint (eCapture,
eConsole, &priv->device);
}
} else {
auto wstr = g_utf8_to_utf16 (priv->device_id.c_str (),
-1, nullptr, nullptr, nullptr);
hr = priv->enumerator->GetDevice ((LPCWSTR) wstr, &priv->device);
g_free (wstr);
}
break;
case GST_WASAPI2_ENDPOINT_CLASS_RENDER:
case GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE:
if (priv->device_id.empty () ||
is_equal_device_id (priv->device_id.c_str (),
gst_wasapi2_get_default_device_id (eRender))) {
if (gst_wasapi2_can_automatic_stream_routing ()) {
Wasapi2ActivationHandler::CreateInstance (&priv->activator,
gst_wasapi2_get_default_device_id_wide (eRender), nullptr);
GST_DEBUG_OBJECT (self, "Creating default render device");
priv->is_default_device = TRUE;
} else {
GST_DEBUG_OBJECT (self, "Creating default render MMdevice");
hr = priv->enumerator->GetDefaultAudioEndpoint (eRender,
eConsole, &priv->device);
}
} else {
auto wstr = g_utf8_to_utf16 (priv->device_id.c_str (),
-1, nullptr, nullptr, nullptr);
hr = priv->enumerator->GetDevice ((LPCWSTR) wstr, &priv->device);
g_free (wstr);
}
break;
case GST_WASAPI2_ENDPOINT_CLASS_INCLUDE_PROCESS_LOOPBACK_CAPTURE:
case GST_WASAPI2_ENDPOINT_CLASS_EXCLUDE_PROCESS_LOOPBACK_CAPTURE:
{
AUDIOCLIENT_ACTIVATION_PARAMS params = { };
params.ActivationType = AUDIOCLIENT_ACTIVATION_TYPE_PROCESS_LOOPBACK;
params.ProcessLoopbackParams.TargetProcessId = priv->target_pid;
if (priv->device_class ==
GST_WASAPI2_ENDPOINT_CLASS_INCLUDE_PROCESS_LOOPBACK_CAPTURE) {
params.ProcessLoopbackParams.ProcessLoopbackMode =
PROCESS_LOOPBACK_MODE_INCLUDE_TARGET_PROCESS_TREE;
} else {
params.ProcessLoopbackParams.ProcessLoopbackMode =
PROCESS_LOOPBACK_MODE_EXCLUDE_TARGET_PROCESS_TREE;
}
GST_DEBUG_OBJECT (self, "Creating process loopback capture device");
Wasapi2ActivationHandler::CreateInstance (&priv->activator,
VIRTUAL_AUDIO_DEVICE_PROCESS_LOOPBACK, &params);
break;
}
default:
g_assert_not_reached ();
break;
}
if (priv->activator || priv->device) {
if (priv->activator) {
hr = priv->activator->ActivateAsync ();
if (gst_wasapi2_result (hr))
hr = priv->activator->GetClient (&priv->client, INFINITE);
} else {
hr = priv->device->Activate (__uuidof (IAudioClient), CLSCTX_ALL,
nullptr, &priv->client);
}
if (!gst_wasapi2_result (hr)) {
GST_WARNING_OBJECT (self, "Couldn't activate device");
} else if (priv->device &&
priv->device_class == GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE) {
hr = priv->device->Activate (__uuidof (IAudioEndpointVolume),
CLSCTX_ALL, nullptr, &priv->endpoint_volume);
if (gst_wasapi2_result (hr)) {
Wasapi2EndpointVolumeCallback::CreateInstance (&priv->volume_callback,
self);
hr = priv->endpoint_volume->
RegisterControlChangeNotify (priv->volume_callback);
if (!gst_wasapi2_result (hr)) {
priv->volume_callback->Release ();
priv->volume_callback = nullptr;
} else {
BOOL muted = FALSE;
priv->endpoint_volume->GetMute (&muted);
if (gst_wasapi2_result (hr))
gst_wasapi2_object_set_endpoint_muted (self, muted);
}
}
}
} else {
GST_WARNING_OBJECT (self, "No device created");
}
if (priv->client) {
WAVEFORMATEX *mix_format = nullptr;
hr = priv->client->GetMixFormat (&mix_format);
if (!gst_wasapi2_result (hr)) {
if (gst_wasapi2_is_process_loopback_class (priv->device_class))
mix_format = gst_wasapi2_get_default_mix_format ();
}
if (mix_format) {
auto scaps = gst_static_caps_get (&template_caps);
gst_wasapi2_util_parse_waveformatex (mix_format,
scaps, &self->caps, nullptr);
gst_caps_unref (scaps);
CoTaskMemFree (mix_format);
}
}
run_loop:
GST_INFO_OBJECT (self, "Starting loop");
g_main_loop_run (self->loop);
GST_INFO_OBJECT (self, "Stopped loop");
priv->ClearCOM ();
g_main_context_pop_thread_default (self->context);
CoUninitialize ();
return nullptr;
}
GstWasapi2Object *
gst_wasapi2_object_new (GstWasapi2EndpointClass device_class,
const gchar * device_id, guint target_pid)
{
auto self = (GstWasapi2Object *)
g_object_new (GST_TYPE_WASAPI2_OBJECT, nullptr);
gst_object_ref_sink (self);
auto priv = self->priv;
priv->device_class = device_class;
if (device_id)
priv->device_id = device_id;
priv->target_pid = target_pid;
if (gst_wasapi2_is_process_loopback_class (device_class) && !target_pid) {
GST_ERROR_OBJECT (self, "Unspecified target PID");
gst_object_unref (self);
return nullptr;
}
{
std::unique_lock < std::mutex > lk (priv->lock);
self->thread = g_thread_new ("GstWasapi2Object",
(GThreadFunc) gst_wasapi2_object_thread_func, self);
while (!g_main_loop_is_running (self->loop))
priv->cond.wait (lk);
}
if (!priv->client) {
gst_object_unref (self);
return nullptr;
}
return self;
}
GstCaps *
gst_wasapi2_object_get_caps (GstWasapi2Object * object)
{
if (object->caps)
return gst_caps_ref (object->caps);
return nullptr;
}
IAudioClient *
gst_wasapi2_object_get_handle (GstWasapi2Object * object)
{
return object->priv->client.Get ();
}
gboolean
gst_wasapi2_object_is_endpoint_muted (GstWasapi2Object * object)
{
return object->priv->endpoint_muted.load (std::memory_order_acquire);
}
gboolean
gst_wasapi2_object_auto_routing_supported (GstWasapi2Object * object)
{
return object->priv->is_default_device;
}

View File

@ -0,0 +1,44 @@
/* GStreamer
* Copyright (C) 2025 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#pragma once
#include <gst/gst.h>
#include "gstwasapi2util.h"
G_BEGIN_DECLS
#define GST_TYPE_WASAPI2_OBJECT (gst_wasapi2_object_get_type ())
G_DECLARE_FINAL_TYPE (GstWasapi2Object, gst_wasapi2_object,
GST, WASAPI2_OBJECT, GstObject);
GstWasapi2Object * gst_wasapi2_object_new (GstWasapi2EndpointClass device_class,
const gchar * device_id,
guint target_pid);
GstCaps * gst_wasapi2_object_get_caps (GstWasapi2Object * object);
IAudioClient * gst_wasapi2_object_get_handle (GstWasapi2Object * object);
gboolean gst_wasapi2_object_is_endpoint_muted (GstWasapi2Object * object);
gboolean gst_wasapi2_object_auto_routing_supported (GstWasapi2Object * object);
G_END_DECLS

View File

@ -18,6 +18,7 @@
*/
#include "gstwasapi2ringbuffer.h"
#include "gstwasapi2object.h"
#include <string.h>
#include <mfapi.h>
#include <wrl.h>
@ -151,7 +152,7 @@ struct _GstWasapi2RingBuffer
{
GstAudioRingBuffer parent;
GstWasapi2ClientDeviceClass device_class;
GstWasapi2EndpointClass device_class;
gchar *device_id;
gboolean low_latency;
gboolean mute;
@ -160,8 +161,8 @@ struct _GstWasapi2RingBuffer
gboolean can_auto_routing;
guint loopback_target_pid;
GstWasapi2Client *client;
GstWasapi2Client *loopback_client;
GstWasapi2Object *client;
GstWasapi2Object *loopback_client;
IAudioCaptureClient *capture_client;
IAudioRenderClient *render_client;
IAudioStreamVolume *volume_object;
@ -344,7 +345,7 @@ gst_wasapi2_ring_buffer_post_open_error (GstWasapi2RingBuffer * self)
return;
}
if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
if (self->device_class == GST_WASAPI2_ENDPOINT_CLASS_RENDER) {
GST_ELEMENT_ERROR (parent, RESOURCE, OPEN_WRITE,
(nullptr), ("Failed to open device"));
} else {
@ -381,7 +382,7 @@ gst_wasapi2_ring_buffer_post_io_error (GstWasapi2RingBuffer * self, HRESULT hr)
error_msg = gst_wasapi2_util_get_error_message (hr);
GST_ERROR_OBJECT (self, "Posting I/O error %s (hr: 0x%x)", error_msg, hr);
if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
if (self->device_class == GST_WASAPI2_ENDPOINT_CLASS_RENDER) {
GST_ELEMENT_ERROR (parent, RESOURCE, WRITE,
("Failed to write to device"), ("%s, hr: 0x%x", error_msg, hr));
} else {
@ -404,20 +405,21 @@ gst_wasapi2_ring_buffer_open_device (GstAudioRingBuffer * buf)
return TRUE;
}
self->client = gst_wasapi2_client_new (self->device_class,
-1, self->device_id, self->loopback_target_pid, self->dispatcher);
self->client = gst_wasapi2_object_new (self->device_class,
self->device_id, self->loopback_target_pid);
if (!self->client) {
gst_wasapi2_ring_buffer_post_open_error (self);
return FALSE;
}
g_object_get (self->client, "auto-routing", &self->can_auto_routing, nullptr);
self->can_auto_routing =
gst_wasapi2_object_auto_routing_supported (self->client);
/* Open another render client to feed silence */
if (gst_wasapi2_device_class_is_loopback (self->device_class)) {
if (gst_wasapi2_is_loopback_class (self->device_class)) {
self->loopback_client =
gst_wasapi2_client_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
-1, self->device_id, 0, self->dispatcher);
gst_wasapi2_object_new (GST_WASAPI2_ENDPOINT_CLASS_RENDER,
self->device_id, 0);
if (!self->loopback_client) {
gst_wasapi2_ring_buffer_post_open_error (self);
@ -511,12 +513,12 @@ gst_wasapi2_ring_buffer_read (GstWasapi2RingBuffer * self)
is_device_muted =
self->priv->monitor_device_mute.load (std::memory_order_acquire) &&
gst_wasapi2_client_is_endpoint_muted (self->client);
gst_wasapi2_object_is_endpoint_muted (self->client);
to_read_bytes = to_read * GST_AUDIO_INFO_BPF (info);
/* XXX: position might not be increased in case of process loopback */
if (!gst_wasapi2_device_class_is_process_loopback (self->device_class)) {
if (!gst_wasapi2_is_process_loopback_class (self->device_class)) {
if (self->is_first) {
self->expected_position = position + to_read;
self->is_first = FALSE;
@ -614,7 +616,7 @@ gst_wasapi2_ring_buffer_write (GstWasapi2RingBuffer * self, gboolean preroll)
gint len;
BYTE *data = nullptr;
client_handle = gst_wasapi2_client_get_handle (self->client);
client_handle = gst_wasapi2_object_get_handle (self->client);
if (!client_handle) {
GST_ERROR_OBJECT (self, "IAudioClient is not available");
return E_FAIL;
@ -719,13 +721,13 @@ gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * self)
}
switch (self->device_class) {
case GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE:
case GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE:
case GST_WASAPI2_CLIENT_DEVICE_CLASS_INCLUDE_PROCESS_LOOPBACK_CAPTURE:
case GST_WASAPI2_CLIENT_DEVICE_CLASS_EXCLUDE_PROCESS_LOOPBACK_CAPTURE:
case GST_WASAPI2_ENDPOINT_CLASS_CAPTURE:
case GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE:
case GST_WASAPI2_ENDPOINT_CLASS_INCLUDE_PROCESS_LOOPBACK_CAPTURE:
case GST_WASAPI2_ENDPOINT_CLASS_EXCLUDE_PROCESS_LOOPBACK_CAPTURE:
hr = gst_wasapi2_ring_buffer_read (self);
break;
case GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER:
case GST_WASAPI2_ENDPOINT_CLASS_RENDER:
hr = gst_wasapi2_ring_buffer_write (self, FALSE);
break;
default:
@ -738,8 +740,8 @@ gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * self)
* loopback capture client doesn't seem to be able to recover status from this
* situation */
if (self->can_auto_routing &&
!gst_wasapi2_device_class_is_loopback (self->device_class) &&
!gst_wasapi2_device_class_is_process_loopback (self->device_class) &&
!gst_wasapi2_is_loopback_class (self->device_class) &&
!gst_wasapi2_is_process_loopback_class (self->device_class) &&
(hr == AUDCLNT_E_ENDPOINT_CREATE_FAILED
|| hr == AUDCLNT_E_DEVICE_INVALIDATED)) {
GST_WARNING_OBJECT (self,
@ -751,7 +753,7 @@ gst_wasapi2_ring_buffer_io_callback (GstWasapi2RingBuffer * self)
if (gst_wasapi2_result (hr) &&
/* In case of normal loopback capture, this method is called from
* silence feeding thread. Don't schedule again in that case */
self->device_class != GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE) {
self->device_class != GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE) {
hr = MFPutWaitingWorkItem (self->event_handle, 0, self->callback_result,
&self->callback_key);
@ -783,7 +785,7 @@ gst_wasapi2_ring_buffer_fill_loopback_silence (GstWasapi2RingBuffer * self)
guint32 can_write;
BYTE *data = nullptr;
client_handle = gst_wasapi2_client_get_handle (self->loopback_client);
client_handle = gst_wasapi2_object_get_handle (self->loopback_client);
if (!client_handle) {
GST_ERROR_OBJECT (self, "IAudioClient is not available");
return E_FAIL;
@ -824,7 +826,7 @@ gst_wasapi2_ring_buffer_loopback_callback (GstWasapi2RingBuffer * self)
HRESULT hr = E_FAIL;
g_return_val_if_fail (GST_IS_WASAPI2_RING_BUFFER (self), E_FAIL);
g_return_val_if_fail (gst_wasapi2_device_class_is_loopback
g_return_val_if_fail (gst_wasapi2_is_loopback_class
(self->device_class), E_FAIL);
if (!self->running) {
@ -905,7 +907,7 @@ gst_wasapi2_ring_buffer_initialize_audio_client3 (GstWasapi2RingBuffer * self,
static HRESULT
gst_wasapi2_ring_buffer_initialize_audio_client (GstWasapi2RingBuffer * self,
IAudioClient * client_handle, WAVEFORMATEX * mix_format, guint * period,
DWORD extra_flags, GstWasapi2ClientDeviceClass device_class,
DWORD extra_flags, GstWasapi2EndpointClass device_class,
GstAudioRingBufferSpec * spec, gboolean low_latency)
{
GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER_CAST (self);
@ -917,7 +919,7 @@ gst_wasapi2_ring_buffer_initialize_audio_client (GstWasapi2RingBuffer * self,
stream_flags |= extra_flags;
if (!gst_wasapi2_device_class_is_process_loopback (device_class)) {
if (!gst_wasapi2_is_process_loopback_class (device_class)) {
hr = client_handle->GetDevicePeriod (&default_period, &min_period);
if (!gst_wasapi2_result (hr)) {
GST_WARNING_OBJECT (self, "Couldn't get device period info");
@ -989,12 +991,7 @@ gst_wasapi2_ring_buffer_prepare_loopback_client (GstWasapi2RingBuffer * self)
return FALSE;
}
if (!gst_wasapi2_client_ensure_activation (self->loopback_client)) {
GST_ERROR_OBJECT (self, "Failed to activate audio client");
return FALSE;
}
client_handle = gst_wasapi2_client_get_handle (self->loopback_client);
client_handle = gst_wasapi2_object_get_handle (self->loopback_client);
if (!client_handle) {
GST_ERROR_OBJECT (self, "IAudioClient handle is not available");
return FALSE;
@ -1007,7 +1004,7 @@ gst_wasapi2_ring_buffer_prepare_loopback_client (GstWasapi2RingBuffer * self)
}
hr = gst_wasapi2_ring_buffer_initialize_audio_client (self, client_handle,
mix_format, &period, 0, GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
mix_format, &period, 0, GST_WASAPI2_ENDPOINT_CLASS_RENDER,
nullptr, FALSE);
CoTaskMemFree (mix_format);
@ -1080,19 +1077,14 @@ gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
if (!self->client && !gst_wasapi2_ring_buffer_open_device (buf))
return FALSE;
if (gst_wasapi2_device_class_is_loopback (self->device_class)) {
if (gst_wasapi2_is_loopback_class (self->device_class)) {
if (!gst_wasapi2_ring_buffer_prepare_loopback_client (self)) {
GST_ERROR_OBJECT (self, "Failed to prepare loopback client");
goto error;
}
}
if (!gst_wasapi2_client_ensure_activation (self->client)) {
GST_ERROR_OBJECT (self, "Failed to activate audio client");
goto error;
}
client_handle = gst_wasapi2_client_get_handle (self->client);
client_handle = gst_wasapi2_object_get_handle (self->client);
if (!client_handle) {
GST_ERROR_OBJECT (self, "IAudioClient handle is not available");
goto error;
@ -1101,7 +1093,7 @@ gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
/* TODO: convert given caps to mix format */
hr = client_handle->GetMixFormat (&mix_format);
if (!gst_wasapi2_result (hr)) {
if (gst_wasapi2_device_class_is_process_loopback (self->device_class)) {
if (gst_wasapi2_is_process_loopback_class (self->device_class)) {
mix_format = gst_wasapi2_get_default_mix_format ();
} else {
GST_ERROR_OBJECT (self, "Failed to get mix format");
@ -1116,8 +1108,8 @@ gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
if (self->low_latency &&
/* AUDCLNT_STREAMFLAGS_LOOPBACK is not allowed for
* InitializeSharedAudioStream */
!gst_wasapi2_device_class_is_loopback (self->device_class) &&
!gst_wasapi2_device_class_is_process_loopback (self->device_class)) {
!gst_wasapi2_is_loopback_class (self->device_class) &&
!gst_wasapi2_is_process_loopback_class (self->device_class)) {
hr = gst_wasapi2_ring_buffer_initialize_audio_client3 (self, client_handle,
mix_format, &period);
}
@ -1130,7 +1122,7 @@ gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
*/
if (FAILED (hr)) {
DWORD extra_flags = 0;
if (gst_wasapi2_device_class_is_loopback (self->device_class))
if (gst_wasapi2_is_loopback_class (self->device_class))
extra_flags = AUDCLNT_STREAMFLAGS_LOOPBACK;
hr = gst_wasapi2_ring_buffer_initialize_audio_client (self, client_handle,
@ -1175,7 +1167,7 @@ gst_wasapi2_ring_buffer_acquire (GstAudioRingBuffer * buf,
"Buffer size: %d frames, period: %d frames, segsize: %d bytes, "
"segtotal: %d", self->buffer_size, period, spec->segsize, spec->segtotal);
if (self->device_class == GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER) {
if (self->device_class == GST_WASAPI2_ENDPOINT_CLASS_RENDER) {
ComPtr < IAudioRenderClient > render_client;
hr = client_handle->GetService (IID_PPV_ARGS (&render_client));
@ -1258,14 +1250,14 @@ gst_wasapi2_ring_buffer_start_internal (GstWasapi2RingBuffer * self)
return TRUE;
}
client_handle = gst_wasapi2_client_get_handle (self->client);
client_handle = gst_wasapi2_object_get_handle (self->client);
self->is_first = TRUE;
self->running = TRUE;
self->segoffset = 0;
self->write_frame_offset = 0;
switch (self->device_class) {
case GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER:
case GST_WASAPI2_ENDPOINT_CLASS_RENDER:
/* render client might read data from buffer immediately once it's prepared.
* Pre-fill with silence in order to start-up glitch */
hr = gst_wasapi2_ring_buffer_write (self, TRUE);
@ -1274,13 +1266,13 @@ gst_wasapi2_ring_buffer_start_internal (GstWasapi2RingBuffer * self)
goto error;
}
break;
case GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE:
case GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE:
{
IAudioClient *loopback_client_handle;
/* Start silence feed client first */
loopback_client_handle =
gst_wasapi2_client_get_handle (self->loopback_client);
gst_wasapi2_object_get_handle (self->loopback_client);
hr = loopback_client_handle->Start ();
if (!gst_wasapi2_result (hr)) {
@ -1310,7 +1302,7 @@ gst_wasapi2_ring_buffer_start_internal (GstWasapi2RingBuffer * self)
goto error;
}
if (self->device_class != GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE) {
if (self->device_class != GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE) {
hr = MFPutWaitingWorkItem (self->event_handle, 0, self->callback_result,
&self->callback_key);
if (!gst_wasapi2_result (hr)) {
@ -1364,7 +1356,7 @@ gst_wasapi2_ring_buffer_stop_internal (GstWasapi2RingBuffer * self)
return TRUE;
}
client_handle = gst_wasapi2_client_get_handle (self->client);
client_handle = gst_wasapi2_object_get_handle (self->client);
self->running = FALSE;
MFCancelWorkItem (self->callback_key);
@ -1378,7 +1370,7 @@ gst_wasapi2_ring_buffer_stop_internal (GstWasapi2RingBuffer * self)
self->write_frame_offset = 0;
if (self->loopback_client) {
client_handle = gst_wasapi2_client_get_handle (self->loopback_client);
client_handle = gst_wasapi2_object_get_handle (self->loopback_client);
MFCancelWorkItem (self->loopback_callback_key);
@ -1434,7 +1426,7 @@ gst_wasapi2_ring_buffer_delay (GstAudioRingBuffer * buf)
}
GstAudioRingBuffer *
gst_wasapi2_ring_buffer_new (GstWasapi2ClientDeviceClass device_class,
gst_wasapi2_ring_buffer_new (GstWasapi2EndpointClass device_class,
gboolean low_latency, const gchar * device_id, gpointer dispatcher,
const gchar * name, guint loopback_target_pid)
{
@ -1468,12 +1460,7 @@ gst_wasapi2_ring_buffer_get_caps (GstWasapi2RingBuffer * buf)
if (!buf->client)
return nullptr;
if (!gst_wasapi2_client_ensure_activation (buf->client)) {
GST_ERROR_OBJECT (buf, "Failed to activate audio client");
return nullptr;
}
buf->supported_caps = gst_wasapi2_client_get_caps (buf->client);
buf->supported_caps = gst_wasapi2_object_get_caps (buf->client);
if (buf->supported_caps)
return gst_caps_ref (buf->supported_caps);

View File

@ -22,7 +22,7 @@
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include "gstwasapi2client.h"
#include "gstwasapi2util.h"
G_BEGIN_DECLS
@ -30,7 +30,7 @@ G_BEGIN_DECLS
G_DECLARE_FINAL_TYPE (GstWasapi2RingBuffer, gst_wasapi2_ring_buffer,
GST, WASAPI2_RING_BUFFER, GstAudioRingBuffer);
GstAudioRingBuffer * gst_wasapi2_ring_buffer_new (GstWasapi2ClientDeviceClass device_class,
GstAudioRingBuffer * gst_wasapi2_ring_buffer_new (GstWasapi2EndpointClass device_class,
gboolean low_latency,
const gchar *device_id,
gpointer dispatcher,

View File

@ -124,7 +124,7 @@ gst_wasapi2_sink_class_init (GstWasapi2SinkClass * klass)
g_object_class_install_property (gobject_class, PROP_DEVICE,
g_param_spec_string ("device", "Device",
"Audio device ID as provided by "
"Windows.Devices.Enumeration.DeviceInformation.Id",
"WASAPI device endpoint ID as provided by IMMDevice::GetId",
NULL, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
@ -333,7 +333,7 @@ gst_wasapi2_sink_create_ringbuffer (GstAudioBaseSink * sink)
name = g_strdup_printf ("%s-ringbuffer", GST_OBJECT_NAME (sink));
ringbuffer =
gst_wasapi2_ring_buffer_new (GST_WASAPI2_CLIENT_DEVICE_CLASS_RENDER,
gst_wasapi2_ring_buffer_new (GST_WASAPI2_ENDPOINT_CLASS_RENDER,
self->low_latency, self->device_id, self->dispatcher, name, 0);
g_free (name);

View File

@ -194,7 +194,7 @@ gst_wasapi2_src_class_init (GstWasapi2SrcClass * klass)
g_object_class_install_property (gobject_class, PROP_DEVICE,
g_param_spec_string ("device", "Device",
"Audio device ID as provided by "
"Windows.Devices.Enumeration.DeviceInformation.Id",
"WASAPI device endpoint ID as provided by IMMDevice::GetId",
NULL, GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
@ -493,20 +493,19 @@ gst_wasapi2_src_create_ringbuffer (GstAudioBaseSrc * src)
GstWasapi2Src *self = GST_WASAPI2_SRC (src);
GstAudioRingBuffer *ringbuffer;
gchar *name;
GstWasapi2ClientDeviceClass device_class =
GST_WASAPI2_CLIENT_DEVICE_CLASS_CAPTURE;
GstWasapi2EndpointClass device_class = GST_WASAPI2_ENDPOINT_CLASS_CAPTURE;
if (self->loopback_pid) {
if (self->loopback_mode == GST_WASAPI2_SRC_LOOPBACK_INCLUDE_PROCESS_TREE) {
device_class =
GST_WASAPI2_CLIENT_DEVICE_CLASS_INCLUDE_PROCESS_LOOPBACK_CAPTURE;
GST_WASAPI2_ENDPOINT_CLASS_INCLUDE_PROCESS_LOOPBACK_CAPTURE;
} else if (self->loopback_mode ==
GST_WASAPI2_SRC_LOOPBACK_EXCLUDE_PROCESS_TREE) {
device_class =
GST_WASAPI2_CLIENT_DEVICE_CLASS_EXCLUDE_PROCESS_LOOPBACK_CAPTURE;
GST_WASAPI2_ENDPOINT_CLASS_EXCLUDE_PROCESS_LOOPBACK_CAPTURE;
}
} else if (self->loopback) {
device_class = GST_WASAPI2_CLIENT_DEVICE_CLASS_LOOPBACK_CAPTURE;
device_class = GST_WASAPI2_ENDPOINT_CLASS_LOOPBACK_CAPTURE;
}
GST_DEBUG_OBJECT (self, "Device class %d", device_class);

View File

@ -7,6 +7,7 @@ wasapi2_sources = [
'gstwasapi2ringbuffer.cpp',
'gstwasapi2activator.cpp',
'gstwasapi2enumerator.cpp',
'gstwasapi2object.cpp',
'plugin.cpp',
]