revert feat(audio): migrate from ScriptProcessorNode to AudioWorkletNode for low-latency broadcasting

- Implemented `BroadcasterProcessor` for audio processing in a separate audio thread.
- Replaced deprecated `ScriptProcessorNode` with `AudioWorkletNode` in `startBroadcast`.
- Enhanced audio performance by reducing main thread interference and improving scalability.
- Added `broadcaster-processor.js` to handle custom audio processing logic.

This change ensures compatibility with modern browsers and improves broadcast audio quality.
This commit is contained in:
snxraven 2024-11-23 04:28:41 -05:00
parent d3e3e92f9b
commit 18d282cd6c
3 changed files with 11 additions and 49 deletions

28
app.js
View File

@ -86,38 +86,32 @@ async function startBroadcast() {
try {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
// Load and register the audio worklet processor
await audioContext.audioWorklet.addModule('broadcaster-processor.js');
micStream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId: currentDeviceId ? { exact: currentDeviceId } : undefined },
});
const source = audioContext.createMediaStreamSource(micStream);
const processor = audioContext.createScriptProcessor(4096, 1, 1);
// Create AudioWorkletNode
const broadcasterNode = new AudioWorkletNode(audioContext, 'broadcaster-processor');
source.connect(broadcasterNode);
source.connect(processor);
processor.connect(audioContext.destination);
// Handle audio data
broadcasterNode.port.onmessage = (event) => {
const buffer = event.data;
processor.onaudioprocess = (event) => {
const audioData = event.inputBuffer.getChannelData(0);
const buffer = b4a.from(new Float32Array(audioData).buffer);
// Send audio data to all connections
for (const conn of conns) {
conn.write(buffer);
}
};
broadcasterNode.connect(audioContext.destination); // Optional monitoring
isBroadcasting = true;
console.log("Broadcasting started with AudioWorklet.");
console.log("Broadcasting started.");
} catch (err) {
console.error("Error accessing microphone or setting up broadcast:", err);
console.error("Error accessing microphone:", err);
}
}
// Function to stop broadcasting and clean up resources
function stopBroadcast() {
if (!isBroadcasting) return;
@ -266,4 +260,4 @@ async function joinStation() {
if (modalInstance) {
modalInstance.hide();
}
}
}

View File

@ -1,19 +0,0 @@
class BroadcasterProcessor extends AudioWorkletProcessor {
process(inputs, outputs) {
const input = inputs[0];
const output = outputs[0];
if (input && output) {
for (let channel = 0; channel < input.length; ++channel) {
const inputChannel = input[channel];
const outputChannel = output[channel];
for (let i = 0; i < inputChannel.length; ++i) {
outputChannel[i] = inputChannel[i];
}
}
}
return true; // Keep the processor alive
}
}
registerProcessor('broadcaster-processor', BroadcasterProcessor);

View File

@ -24,18 +24,6 @@
}
</style>
<script>
document.addEventListener('DOMContentLoaded', () => {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
// Handle audio stream here
console.log("Microphone access granted:", stream);
})
.catch(error => {
console.error("Microphone access denied:", error);
});
});
</script>
</head>
<body class="bg-dark text-light">
<div id="titlebar">
@ -43,7 +31,6 @@
</div>
<div class="container mt-5 text-center">
<h1>pearCast</h1>
<div id="retry-message-bar" class="alert alert-warning d-none" role="alert"></div>
<div id="setup" class="btn-group mt-4">
<button id="create-station" class="btn btn-primary">Create Station</button>
<button id="open-join-modal" class="btn btn-secondary" data-bs-toggle="modal" data-bs-target="#joinModal">Join Station</button>