Web Audio: Experimental loopback support.

This uses `getDisplayMedia()`. Support for this is extremely browser and
system specific so I'm not advertising support for this documentation.

Public issue https://github.com/mackron/miniaudio/issues/967
This commit is contained in:
David Reid
2026-01-21 12:18:42 +10:00
parent f6b973d384
commit ba963e46b5
2 changed files with 156 additions and 34 deletions
+62 -28
View File
@@ -45937,6 +45937,7 @@ static void ma_backend_info__webaudio(ma_device_backend_info* pBackendInfo)
{
MA_ASSERT(pBackendInfo != NULL);
pBackendInfo->pName = "Web Audio";
pBackendInfo->isLoopbackSupported = MA_TRUE;
pBackendInfo->noAudioThread = MA_TRUE; /* We don't want to be creating a miniaudio-managed audio thread with Web Audio. */
pBackendInfo->isMultiThreadedModeAllowedWhenThreadingDisabled = MA_TRUE;
}
@@ -45974,11 +45975,12 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const void* pCo
window.miniaudio.device_type.playback = $0;
window.miniaudio.device_type.capture = $1;
window.miniaudio.device_type.duplex = $2;
window.miniaudio.device_type.loopback = $3;
/* Device states. */
window.miniaudio.device_status = {};
window.miniaudio.device_status.stopped = $3;
window.miniaudio.device_status.started = $4;
window.miniaudio.device_status.stopped = $4;
window.miniaudio.device_status.started = $5;
/* Device cache for mapping devices to indexes for JavaScript/C interop. */
let miniaudio = window.miniaudio;
@@ -46052,7 +46054,7 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const void* pCo
window.miniaudio.referenceCount += 1;
return 1;
}, ma_device_type_playback, ma_device_type_capture, ma_device_type_duplex, ma_device_status_stopped, ma_device_status_started);
}, ma_device_type_playback, ma_device_type_capture, ma_device_type_duplex, ma_device_type_loopback, ma_device_status_stopped, ma_device_status_started);
if (resultFromJS != 1) {
ma_free(pContextStateWebAudio, ma_context_get_allocation_callbacks(pContext));
@@ -46255,7 +46257,7 @@ static EM_BOOL ma_audio_worklet_process_callback__webaudio(int inputCount, const
if (outputCount > 0) {
/* If it's a capture-only device, we'll need to output silence. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
for (int i = 0; i < outputCount; i += 1) {
MA_ZERO_MEMORY(pOutputs[i].data, pOutputs[i].numberOfChannels * frameCount * sizeof(float));
}
@@ -46307,7 +46309,7 @@ static void ma_audio_worklet_processor_created__webaudio(EMSCRIPTEN_WEBAUDIO_T a
wouldn't actually connect an output to an input-only node, but this is what we'll have to do in order to have
proper control over the channel count. In the capture case, we'll have to output silence to its output node.
*/
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
MA_ASSERT(pParameters->descriptorCapture.channels > 0); /* Should have been initialized to a valid value earlier. */
channels = (int)pParameters->descriptorCapture.channels;
audioWorkletOptions.numberOfInputs = 1;
@@ -46350,26 +46352,46 @@ static void ma_audio_worklet_processor_created__webaudio(EMSCRIPTEN_WEBAUDIO_T a
pParameters->pDeviceStateWebAudio->audioWorklet = emscripten_create_wasm_audio_worklet_node(audioContext, "miniaudio", &audioWorkletOptions, &ma_audio_worklet_process_callback__webaudio, pParameters->pDevice);
/* With the audio worklet initialized we can now attach it to the graph. */
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_duplex) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_duplex || deviceType == ma_device_type_loopback) {
ma_result attachmentResult = (ma_result)EM_ASM_INT({
var getUserMediaResult = 0;
var getMediaResult = 0;
var audioWorklet = emscriptenGetAudioObject($0);
var audioContext = emscriptenGetAudioObject($1);
var deviceType = $2;
navigator.mediaDevices.getUserMedia({audio:true, video:false})
.then(function(stream) {
audioContext.streamNode = audioContext.createMediaStreamSource(stream);
audioContext.streamNode.connect(audioWorklet);
audioWorklet.connect(audioContext.destination);
getUserMediaResult = 0; /* 0 = MA_SUCCESS */
})
.catch(function(error) {
console.log("navigator.mediaDevices.getUserMedia Failed: " + error);
getUserMediaResult = -1; /* -1 = MA_ERROR */
});
if (deviceType == window.miniaudio.device_type.loopback) {
if (navigator.mediaDevices.getDisplayMedia === 'undefined') {
console.log("navigator.mediaDevices.getDisplayMedia undefined. Cannot use loopback mode.");
getMediaResult = -1;
} else {
navigator.mediaDevices.getDisplayMedia({audio:true, video:true})
.then(function(stream) {
audioContext.streamNode = audioContext.createMediaStreamSource(stream);
audioContext.streamNode.connect(audioWorklet);
audioWorklet.connect(audioContext.destination);
getMediaResult = 0; /* 0 = MA_SUCCESS */
})
.catch(function(error) {
console.log("navigator.mediaDevices.getDisplayMedia Failed: " + error);
getMediaResult = -1; /* -1 = MA_ERROR */
});
}
} else {
navigator.mediaDevices.getUserMedia({audio:true, video:false})
.then(function(stream) {
audioContext.streamNode = audioContext.createMediaStreamSource(stream);
audioContext.streamNode.connect(audioWorklet);
audioWorklet.connect(audioContext.destination);
getMediaResult = 0; /* 0 = MA_SUCCESS */
})
.catch(function(error) {
console.log("navigator.mediaDevices.getUserMedia Failed: " + error);
getMediaResult = -1; /* -1 = MA_ERROR */
});
}
return getUserMediaResult;
}, pParameters->pDeviceStateWebAudio->audioWorklet, audioContext);
return getMediaResult;
}, pParameters->pDeviceStateWebAudio->audioWorklet, audioContext, deviceType);
if (attachmentResult != MA_SUCCESS) {
ma_log_postf(ma_device_get_log(pParameters->pDevice), MA_LOG_LEVEL_ERROR, "Web Audio: Failed to connect capture node.");
@@ -46436,10 +46458,6 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
pDeviceConfigWebAudio = &defaultConfigWebAudio;
}
if (deviceType == ma_device_type_loopback) {
return MA_DEVICE_TYPE_NOT_SUPPORTED;
}
/* No exclusive mode with Web Audio. */
if (((deviceType == ma_device_type_playback || deviceType == ma_device_type_duplex) && pDescriptorPlayback->shareMode == ma_share_mode_exclusive) ||
((deviceType == ma_device_type_capture || deviceType == ma_device_type_duplex) && pDescriptorCapture->shareMode == ma_share_mode_exclusive)) {
@@ -46518,7 +46536,7 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
format = ma_format_f32;
/* The channels are chosen by us. For duplex mode we'll always use the playback channel count. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
channels = (pDescriptorCapture->channels > 0) ? pDescriptorCapture->channels : MA_DEFAULT_CHANNELS;
} else {
channels = (pDescriptorPlayback->channels > 0) ? pDescriptorPlayback->channels : MA_DEFAULT_CHANNELS;
@@ -46627,7 +46645,7 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
ma_uint32 periodSizeInFrames;
/* The channel count will depend on the device type. If it's a capture, use its, otherwise use the playback side. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
channels = (pDescriptorCapture->channels > 0) ? pDescriptorCapture->channels : MA_DEFAULT_CHANNELS;
} else {
channels = (pDescriptorPlayback->channels > 0) ? pDescriptorPlayback->channels : MA_DEFAULT_CHANNELS;
@@ -46644,7 +46662,7 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
}
/* The period size needs to be a power of 2. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
periodSizeInFrames = ma_calculate_period_size_in_frames_from_descriptor__webaudio(pDescriptorCapture, sampleRate);
} else {
periodSizeInFrames = ma_calculate_period_size_in_frames_from_descriptor__webaudio(pDescriptorPlayback, sampleRate);
@@ -46745,6 +46763,22 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
});
}
if (deviceType == window.miniaudio.device_type.loopback) {
if (navigator.mediaDevices.getDisplayMedia === 'undefined') {
return -1;
}
navigator.mediaDevices.getDisplayMedia({audio:true, video:true})
.then(function(stream) {
device.streamNode = device.webaudio.createMediaStreamSource(stream);
device.streamNode.connect(device.scriptNode);
device.scriptNode.connect(device.webaudio.destination);
})
.catch(function(error) {
console.log("Failed to get user media: " + error);
});
}
if (deviceType == window.miniaudio.device_type.playback) {
device.scriptNode.connect(device.webaudio.destination);
}
@@ -49714,7 +49748,7 @@ MA_API void ma_device_state_async_process(ma_device_state_async* pAsyncDeviceSta
}
if (pInput != NULL) {
if (pAsyncDeviceState->deviceType == ma_device_type_capture || pAsyncDeviceState->deviceType == ma_device_type_duplex) {
if (pAsyncDeviceState->deviceType == ma_device_type_capture || pAsyncDeviceState->deviceType == ma_device_type_duplex || pAsyncDeviceState->deviceType == ma_device_type_loopback) {
ma_spinlock_lock(&pAsyncDeviceState->capture.lock);
{
ma_uint32 framesToCopy;
+94 -6
View File
@@ -22,9 +22,8 @@ ma_waveform sineWave; /* For playback example. */
void main_loop__em(void* pUserData)
{
ma_device* pDevice = (ma_device*)pUserData;
if (ma_device_get_threading_mode(pDevice) == MA_THREADING_MODE_SINGLE_THREADED) {
ma_device_step(pDevice, MA_BLOCKING_MODE_NON_BLOCKING);
if (threadingMode == MA_THREADING_MODE_SINGLE_THREADED) {
ma_device_step((ma_device*)pUserData, MA_BLOCKING_MODE_NON_BLOCKING);
}
}
@@ -97,7 +96,6 @@ static void do_duplex()
deviceConfig.capture.pDeviceID = NULL;
deviceConfig.capture.format = DEVICE_FORMAT;
deviceConfig.capture.channels = 2;
deviceConfig.capture.shareMode = ma_share_mode_shared;
deviceConfig.playback.pDeviceID = NULL;
deviceConfig.playback.format = DEVICE_FORMAT;
deviceConfig.playback.channels = 2;
@@ -117,13 +115,102 @@ static void do_duplex()
}
ma_device loopbackPlaybackDevice;
ma_pcm_rb loopbackRB;
void data_callback_loopback_capture(ma_device* pDevice, void* pOutput, const void* pInput, ma_uint32 frameCount)
{
ma_uint32 framesToWrite;
void* pBuffer;
(void)pDevice;
(void)pOutput;
/* Write to the ring buffer. */
framesToWrite = frameCount;
ma_pcm_rb_acquire_write(&loopbackRB, &framesToWrite, &pBuffer);
MA_COPY_MEMORY(pBuffer, pInput, framesToWrite);
ma_pcm_rb_commit_write(&loopbackRB, framesToWrite);
}
void data_callback_loopback_playback(ma_device* pDevice, void* pOutput, const void* pInput, ma_uint32 frameCount)
{
ma_uint32 framesToRead;
void* pBuffer;
(void)pDevice;
(void)pInput;
/* Read from the ring buffer. */
framesToRead = frameCount;
ma_pcm_rb_acquire_read(&loopbackRB, &framesToRead, &pBuffer);
MA_COPY_MEMORY(pOutput, pBuffer, framesToRead);
ma_pcm_rb_commit_read(&loopbackRB, framesToRead);
}
static void do_loopback()
{
ma_result result;
ma_device_config deviceConfig;
ma_device_backend_config backend;
backend = ma_device_backend_config_init(DEVICE_BACKEND, NULL);
deviceConfig = ma_device_config_init(ma_device_type_loopback);
deviceConfig.threadingMode = threadingMode;
deviceConfig.capture.format = DEVICE_FORMAT;
deviceConfig.capture.channels = 2;
deviceConfig.sampleRate = DEVICE_SAMPLE_RATE;
deviceConfig.dataCallback = data_callback_loopback_capture;
deviceConfig.pBackendConfigs = &backend;
deviceConfig.backendConfigCount = 1;
result = ma_device_init_ex(&backend, 1, NULL, &deviceConfig, &device);
if (result != MA_SUCCESS) {
printf("Failed to initialize loopback device.\n");
return;
}
deviceConfig = ma_device_config_init(ma_device_type_playback);
deviceConfig.threadingMode = threadingMode;
deviceConfig.playback.format = DEVICE_FORMAT;
deviceConfig.playback.channels = 2;
deviceConfig.sampleRate = DEVICE_SAMPLE_RATE;
deviceConfig.dataCallback = data_callback_loopback_playback;
deviceConfig.pBackendConfigs = &backend;
deviceConfig.backendConfigCount = 1;
result = ma_device_init_ex(&backend, 1, NULL, &deviceConfig, &loopbackPlaybackDevice);
if (result != MA_SUCCESS) {
printf("Failed to initialize loopback playback device.\n");
return;
}
/* We need a ring buffer. */
printf("device.capture.internalPeriodSizeInFrames = %u\n", device.capture.internalPeriodSizeInFrames);
ma_pcm_rb_init(DEVICE_FORMAT, device.capture.channels, device.capture.internalPeriodSizeInFrames * 100, NULL, NULL, &loopbackRB);
if (ma_device_start(&loopbackPlaybackDevice) != MA_SUCCESS) {
printf("Failed to start loopback playback device.");
return;
}
if (ma_device_start(&device) != MA_SUCCESS) {
printf("Failed to start device.");
return;
}
}
static EM_BOOL on_canvas_click(int eventType, const EmscriptenMouseEvent* pMouseEvent, void* pUserData)
{
if (isRunning == MA_FALSE) {
if (pMouseEvent->button == 0) { /* Left click. */
/* */ if (pMouseEvent->button == 0) { /* Left click. */
do_playback();
} else if (pMouseEvent->button == 2) { /* Right click. */
} else if (pMouseEvent->button == 2) { /* Right click. */
do_duplex();
} else if (pMouseEvent->button == 1) { /* Middle click. */
do_loopback();
}
isRunning = MA_TRUE;
@@ -208,6 +295,7 @@ int main(int argc, char** argv)
printf("Click inside canvas to start playing:\n");
printf(" Left click for playback\n");
printf(" Right click for duplex\n");
printf(" Middle click for loopback\n");
/* The device must be started in response to an input event. */
emscripten_set_mouseup_callback("canvas", &device, 0, on_canvas_click);