Web Audio: Experimental loopback support.

This uses `getDisplayMedia()`. Support for this is extremely browser and
system specific so I'm not advertising support for this documentation.

Public issue https://github.com/mackron/miniaudio/issues/967
This commit is contained in:
David Reid
2026-01-21 12:18:42 +10:00
parent f6b973d384
commit ba963e46b5
2 changed files with 156 additions and 34 deletions
+62 -28
View File
@@ -45937,6 +45937,7 @@ static void ma_backend_info__webaudio(ma_device_backend_info* pBackendInfo)
{
MA_ASSERT(pBackendInfo != NULL);
pBackendInfo->pName = "Web Audio";
pBackendInfo->isLoopbackSupported = MA_TRUE;
pBackendInfo->noAudioThread = MA_TRUE; /* We don't want to be creating a miniaudio-managed audio thread with Web Audio. */
pBackendInfo->isMultiThreadedModeAllowedWhenThreadingDisabled = MA_TRUE;
}
@@ -45974,11 +45975,12 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const void* pCo
window.miniaudio.device_type.playback = $0;
window.miniaudio.device_type.capture = $1;
window.miniaudio.device_type.duplex = $2;
window.miniaudio.device_type.loopback = $3;
/* Device states. */
window.miniaudio.device_status = {};
window.miniaudio.device_status.stopped = $3;
window.miniaudio.device_status.started = $4;
window.miniaudio.device_status.stopped = $4;
window.miniaudio.device_status.started = $5;
/* Device cache for mapping devices to indexes for JavaScript/C interop. */
let miniaudio = window.miniaudio;
@@ -46052,7 +46054,7 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const void* pCo
window.miniaudio.referenceCount += 1;
return 1;
}, ma_device_type_playback, ma_device_type_capture, ma_device_type_duplex, ma_device_status_stopped, ma_device_status_started);
}, ma_device_type_playback, ma_device_type_capture, ma_device_type_duplex, ma_device_type_loopback, ma_device_status_stopped, ma_device_status_started);
if (resultFromJS != 1) {
ma_free(pContextStateWebAudio, ma_context_get_allocation_callbacks(pContext));
@@ -46255,7 +46257,7 @@ static EM_BOOL ma_audio_worklet_process_callback__webaudio(int inputCount, const
if (outputCount > 0) {
/* If it's a capture-only device, we'll need to output silence. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
for (int i = 0; i < outputCount; i += 1) {
MA_ZERO_MEMORY(pOutputs[i].data, pOutputs[i].numberOfChannels * frameCount * sizeof(float));
}
@@ -46307,7 +46309,7 @@ static void ma_audio_worklet_processor_created__webaudio(EMSCRIPTEN_WEBAUDIO_T a
wouldn't actually connect an output to an input-only node, but this is what we'll have to do in order to have
proper control over the channel count. In the capture case, we'll have to output silence to its output node.
*/
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
MA_ASSERT(pParameters->descriptorCapture.channels > 0); /* Should have been initialized to a valid value earlier. */
channels = (int)pParameters->descriptorCapture.channels;
audioWorkletOptions.numberOfInputs = 1;
@@ -46350,26 +46352,46 @@ static void ma_audio_worklet_processor_created__webaudio(EMSCRIPTEN_WEBAUDIO_T a
pParameters->pDeviceStateWebAudio->audioWorklet = emscripten_create_wasm_audio_worklet_node(audioContext, "miniaudio", &audioWorkletOptions, &ma_audio_worklet_process_callback__webaudio, pParameters->pDevice);
/* With the audio worklet initialized we can now attach it to the graph. */
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_duplex) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_duplex || deviceType == ma_device_type_loopback) {
ma_result attachmentResult = (ma_result)EM_ASM_INT({
var getUserMediaResult = 0;
var getMediaResult = 0;
var audioWorklet = emscriptenGetAudioObject($0);
var audioContext = emscriptenGetAudioObject($1);
var deviceType = $2;
navigator.mediaDevices.getUserMedia({audio:true, video:false})
.then(function(stream) {
audioContext.streamNode = audioContext.createMediaStreamSource(stream);
audioContext.streamNode.connect(audioWorklet);
audioWorklet.connect(audioContext.destination);
getUserMediaResult = 0; /* 0 = MA_SUCCESS */
})
.catch(function(error) {
console.log("navigator.mediaDevices.getUserMedia Failed: " + error);
getUserMediaResult = -1; /* -1 = MA_ERROR */
});
if (deviceType == window.miniaudio.device_type.loopback) {
if (navigator.mediaDevices.getDisplayMedia === 'undefined') {
console.log("navigator.mediaDevices.getDisplayMedia undefined. Cannot use loopback mode.");
getMediaResult = -1;
} else {
navigator.mediaDevices.getDisplayMedia({audio:true, video:true})
.then(function(stream) {
audioContext.streamNode = audioContext.createMediaStreamSource(stream);
audioContext.streamNode.connect(audioWorklet);
audioWorklet.connect(audioContext.destination);
getMediaResult = 0; /* 0 = MA_SUCCESS */
})
.catch(function(error) {
console.log("navigator.mediaDevices.getDisplayMedia Failed: " + error);
getMediaResult = -1; /* -1 = MA_ERROR */
});
}
} else {
navigator.mediaDevices.getUserMedia({audio:true, video:false})
.then(function(stream) {
audioContext.streamNode = audioContext.createMediaStreamSource(stream);
audioContext.streamNode.connect(audioWorklet);
audioWorklet.connect(audioContext.destination);
getMediaResult = 0; /* 0 = MA_SUCCESS */
})
.catch(function(error) {
console.log("navigator.mediaDevices.getUserMedia Failed: " + error);
getMediaResult = -1; /* -1 = MA_ERROR */
});
}
return getUserMediaResult;
}, pParameters->pDeviceStateWebAudio->audioWorklet, audioContext);
return getMediaResult;
}, pParameters->pDeviceStateWebAudio->audioWorklet, audioContext, deviceType);
if (attachmentResult != MA_SUCCESS) {
ma_log_postf(ma_device_get_log(pParameters->pDevice), MA_LOG_LEVEL_ERROR, "Web Audio: Failed to connect capture node.");
@@ -46436,10 +46458,6 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
pDeviceConfigWebAudio = &defaultConfigWebAudio;
}
if (deviceType == ma_device_type_loopback) {
return MA_DEVICE_TYPE_NOT_SUPPORTED;
}
/* No exclusive mode with Web Audio. */
if (((deviceType == ma_device_type_playback || deviceType == ma_device_type_duplex) && pDescriptorPlayback->shareMode == ma_share_mode_exclusive) ||
((deviceType == ma_device_type_capture || deviceType == ma_device_type_duplex) && pDescriptorCapture->shareMode == ma_share_mode_exclusive)) {
@@ -46518,7 +46536,7 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
format = ma_format_f32;
/* The channels are chosen by us. For duplex mode we'll always use the playback channel count. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
channels = (pDescriptorCapture->channels > 0) ? pDescriptorCapture->channels : MA_DEFAULT_CHANNELS;
} else {
channels = (pDescriptorPlayback->channels > 0) ? pDescriptorPlayback->channels : MA_DEFAULT_CHANNELS;
@@ -46627,7 +46645,7 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
ma_uint32 periodSizeInFrames;
/* The channel count will depend on the device type. If it's a capture, use its, otherwise use the playback side. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
channels = (pDescriptorCapture->channels > 0) ? pDescriptorCapture->channels : MA_DEFAULT_CHANNELS;
} else {
channels = (pDescriptorPlayback->channels > 0) ? pDescriptorPlayback->channels : MA_DEFAULT_CHANNELS;
@@ -46644,7 +46662,7 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
}
/* The period size needs to be a power of 2. */
if (deviceType == ma_device_type_capture) {
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
periodSizeInFrames = ma_calculate_period_size_in_frames_from_descriptor__webaudio(pDescriptorCapture, sampleRate);
} else {
periodSizeInFrames = ma_calculate_period_size_in_frames_from_descriptor__webaudio(pDescriptorPlayback, sampleRate);
@@ -46745,6 +46763,22 @@ static ma_result ma_device_init__webaudio(ma_device* pDevice, const void* pDevic
});
}
if (deviceType == window.miniaudio.device_type.loopback) {
if (navigator.mediaDevices.getDisplayMedia === 'undefined') {
return -1;
}
navigator.mediaDevices.getDisplayMedia({audio:true, video:true})
.then(function(stream) {
device.streamNode = device.webaudio.createMediaStreamSource(stream);
device.streamNode.connect(device.scriptNode);
device.scriptNode.connect(device.webaudio.destination);
})
.catch(function(error) {
console.log("Failed to get user media: " + error);
});
}
if (deviceType == window.miniaudio.device_type.playback) {
device.scriptNode.connect(device.webaudio.destination);
}
@@ -49714,7 +49748,7 @@ MA_API void ma_device_state_async_process(ma_device_state_async* pAsyncDeviceSta
}
if (pInput != NULL) {
if (pAsyncDeviceState->deviceType == ma_device_type_capture || pAsyncDeviceState->deviceType == ma_device_type_duplex) {
if (pAsyncDeviceState->deviceType == ma_device_type_capture || pAsyncDeviceState->deviceType == ma_device_type_duplex || pAsyncDeviceState->deviceType == ma_device_type_loopback) {
ma_spinlock_lock(&pAsyncDeviceState->capture.lock);
{
ma_uint32 framesToCopy;