revert d3e3e92f9b
revert feat(audio): migrate from ScriptProcessorNode to AudioWorkletNode for low-latency broadcasting - Implemented `BroadcasterProcessor` for audio processing in a separate audio thread. - Replaced deprecated `ScriptProcessorNode` with `AudioWorkletNode` in `startBroadcast`. - Enhanced audio performance by reducing main thread interference and improving scalability. - Added `broadcaster-processor.js` to handle custom audio processing logic. This change ensures compatibility with modern browsers and improves broadcast audio quality.
This commit is contained in:
parent
d3e3e92f9b
commit
18d282cd6c
26
app.js
26
app.js
@ -86,38 +86,32 @@ async function startBroadcast() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
||||||
|
|
||||||
// Load and register the audio worklet processor
|
|
||||||
await audioContext.audioWorklet.addModule('broadcaster-processor.js');
|
|
||||||
|
|
||||||
micStream = await navigator.mediaDevices.getUserMedia({
|
micStream = await navigator.mediaDevices.getUserMedia({
|
||||||
audio: { deviceId: currentDeviceId ? { exact: currentDeviceId } : undefined },
|
audio: { deviceId: currentDeviceId ? { exact: currentDeviceId } : undefined },
|
||||||
});
|
});
|
||||||
|
|
||||||
const source = audioContext.createMediaStreamSource(micStream);
|
const source = audioContext.createMediaStreamSource(micStream);
|
||||||
|
const processor = audioContext.createScriptProcessor(4096, 1, 1);
|
||||||
|
|
||||||
// Create AudioWorkletNode
|
source.connect(processor);
|
||||||
const broadcasterNode = new AudioWorkletNode(audioContext, 'broadcaster-processor');
|
processor.connect(audioContext.destination);
|
||||||
source.connect(broadcasterNode);
|
|
||||||
|
|
||||||
// Handle audio data
|
processor.onaudioprocess = (event) => {
|
||||||
broadcasterNode.port.onmessage = (event) => {
|
const audioData = event.inputBuffer.getChannelData(0);
|
||||||
const buffer = event.data;
|
const buffer = b4a.from(new Float32Array(audioData).buffer);
|
||||||
|
|
||||||
|
// Send audio data to all connections
|
||||||
for (const conn of conns) {
|
for (const conn of conns) {
|
||||||
conn.write(buffer);
|
conn.write(buffer);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
broadcasterNode.connect(audioContext.destination); // Optional monitoring
|
|
||||||
|
|
||||||
isBroadcasting = true;
|
isBroadcasting = true;
|
||||||
console.log("Broadcasting started with AudioWorklet.");
|
console.log("Broadcasting started.");
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error accessing microphone or setting up broadcast:", err);
|
console.error("Error accessing microphone:", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Function to stop broadcasting and clean up resources
|
// Function to stop broadcasting and clean up resources
|
||||||
function stopBroadcast() {
|
function stopBroadcast() {
|
||||||
if (!isBroadcasting) return;
|
if (!isBroadcasting) return;
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
class BroadcasterProcessor extends AudioWorkletProcessor {
|
|
||||||
process(inputs, outputs) {
|
|
||||||
const input = inputs[0];
|
|
||||||
const output = outputs[0];
|
|
||||||
|
|
||||||
if (input && output) {
|
|
||||||
for (let channel = 0; channel < input.length; ++channel) {
|
|
||||||
const inputChannel = input[channel];
|
|
||||||
const outputChannel = output[channel];
|
|
||||||
for (let i = 0; i < inputChannel.length; ++i) {
|
|
||||||
outputChannel[i] = inputChannel[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true; // Keep the processor alive
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
registerProcessor('broadcaster-processor', BroadcasterProcessor);
|
|
13
index.html
13
index.html
@ -24,18 +24,6 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
<script>
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
|
||||||
navigator.mediaDevices.getUserMedia({ audio: true })
|
|
||||||
.then(stream => {
|
|
||||||
// Handle audio stream here
|
|
||||||
console.log("Microphone access granted:", stream);
|
|
||||||
})
|
|
||||||
.catch(error => {
|
|
||||||
console.error("Microphone access denied:", error);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
</head>
|
</head>
|
||||||
<body class="bg-dark text-light">
|
<body class="bg-dark text-light">
|
||||||
<div id="titlebar">
|
<div id="titlebar">
|
||||||
@ -43,7 +31,6 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="container mt-5 text-center">
|
<div class="container mt-5 text-center">
|
||||||
<h1>pearCast</h1>
|
<h1>pearCast</h1>
|
||||||
<div id="retry-message-bar" class="alert alert-warning d-none" role="alert"></div>
|
|
||||||
<div id="setup" class="btn-group mt-4">
|
<div id="setup" class="btn-group mt-4">
|
||||||
<button id="create-station" class="btn btn-primary">Create Station</button>
|
<button id="create-station" class="btn btn-primary">Create Station</button>
|
||||||
<button id="open-join-modal" class="btn btn-secondary" data-bs-toggle="modal" data-bs-target="#joinModal">Join Station</button>
|
<button id="open-join-modal" class="btn btn-secondary" data-bs-toggle="modal" data-bs-target="#joinModal">Join Station</button>
|
||||||
|
Loading…
Reference in New Issue
Block a user