by Anonymous » 22 Mar 2025, 06:06
Ich möchte aktuelles Spielen Audio mithilfe von Audio -Working -Prozessor und Web -Audio -API aufnehmen und in Echtzeit mit einer möglichen Verzögerung von 100 ms aktuellen Audioquellen abspielen, aber der Audio wird verzerrt und nicht richtig abgespielt. Was ist der richtige Weg, um die folgenden Probleme zu beheben. Audio im neuen Kontext. Der Audio aus dem Audio-Working-Prozessor.
Code: Select all
Real-Time Audio Processing
Real-Time Audio Processing
Your browser does not support the audio tag.
Start Processing
Stop Processing
let originalAudio, audioContext, newAudioContext, workletNode, mediaStreamSource;
let bufferQueue = [];
let isPlaying = false;
let processorNode;
let startTime = 0;
let lastAudioTime = 0;
document.getElementById('start').addEventListener('click', async () => {
originalAudio = document.getElementById('audio');
originalAudio.volume = 0.01; // Mute original audio to 0.01 but still play
const stream = originalAudio.captureStream();
audioContext = new AudioContext();
newAudioContext = new AudioContext();
// Register WorkletProcessor
await audioContext.audioWorklet.addModule(URL.createObjectURL(new Blob([`
class RecorderProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.port.start();
}
process(inputs) {
const input = inputs[0];
if (input.length > 0) {
const outputBuffer = input[0]; // First channel data
this.port.postMessage(outputBuffer); // Send to main thread
}
return true;
}
}
registerProcessor("recorder-processor", RecorderProcessor);
`], { type: "application/javascript" })));
workletNode = new AudioWorkletNode(audioContext, "recorder-processor");
workletNode.port.onmessage = (event) => {
const data = event.data;
bufferQueue.push(data);
if (!isPlaying) {
playBufferedAudio();
}
};
mediaStreamSource = audioContext.createMediaStreamSource(stream);
mediaStreamSource.connect(workletNode);
workletNode.connect(audioContext.destination);
document.getElementById('start').disabled = true;
document.getElementById('stop').disabled = false;
});
function playBufferedAudio() {
if (bufferQueue.length === 0) {
isPlaying = false;
return;
}
isPlaying = true;
const data = bufferQueue.shift();
const buffer = newAudioContext.createBuffer(1, data.length, newAudioContext.sampleRate);
buffer.copyToChannel(new Float32Array(data), 0);
const source = newAudioContext.createBufferSource();
source.buffer = buffer;
source.connect(newAudioContext.destination);
if (startTime === 0) {
startTime = newAudioContext.currentTime + 0.02; // Add slight delay to sync
} else {
startTime = Math.max(newAudioContext.currentTime, lastAudioTime);
}
lastAudioTime = startTime + buffer.duration;
source.start(startTime);
source.onended = playBufferedAudio;
}
document.getElementById('stop').addEventListener('click', () => {
audioContext.close();
newAudioContext.close();
bufferQueue = [];
isPlaying = false;
console.log("Stopped processing audio");
});
[url=viewtopic.php?t=14917]Ich möchte[/url] aktuelles Spielen Audio mithilfe von Audio -Working -Prozessor und Web -Audio -API aufnehmen und in Echtzeit mit einer möglichen Verzögerung von 100 ms aktuellen Audioquellen abspielen, aber der Audio wird verzerrt und nicht richtig abgespielt. Was ist der richtige Weg, um die folgenden Probleme zu beheben. Audio im neuen Kontext. Der Audio aus dem Audio-Working-Prozessor.[code]
Real-Time Audio Processing
Real-Time Audio Processing
Your browser does not support the audio tag.
Start Processing
Stop Processing
let originalAudio, audioContext, newAudioContext, workletNode, mediaStreamSource;
let bufferQueue = [];
let isPlaying = false;
let processorNode;
let startTime = 0;
let lastAudioTime = 0;
document.getElementById('start').addEventListener('click', async () => {
originalAudio = document.getElementById('audio');
originalAudio.volume = 0.01; // Mute original audio to 0.01 but still play
const stream = originalAudio.captureStream();
audioContext = new AudioContext();
newAudioContext = new AudioContext();
// Register WorkletProcessor
await audioContext.audioWorklet.addModule(URL.createObjectURL(new Blob([`
class RecorderProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.port.start();
}
process(inputs) {
const input = inputs[0];
if (input.length > 0) {
const outputBuffer = input[0]; // First channel data
this.port.postMessage(outputBuffer); // Send to main thread
}
return true;
}
}
registerProcessor("recorder-processor", RecorderProcessor);
`], { type: "application/javascript" })));
workletNode = new AudioWorkletNode(audioContext, "recorder-processor");
workletNode.port.onmessage = (event) => {
const data = event.data;
bufferQueue.push(data);
if (!isPlaying) {
playBufferedAudio();
}
};
mediaStreamSource = audioContext.createMediaStreamSource(stream);
mediaStreamSource.connect(workletNode);
workletNode.connect(audioContext.destination);
document.getElementById('start').disabled = true;
document.getElementById('stop').disabled = false;
});
function playBufferedAudio() {
if (bufferQueue.length === 0) {
isPlaying = false;
return;
}
isPlaying = true;
const data = bufferQueue.shift();
const buffer = newAudioContext.createBuffer(1, data.length, newAudioContext.sampleRate);
buffer.copyToChannel(new Float32Array(data), 0);
const source = newAudioContext.createBufferSource();
source.buffer = buffer;
source.connect(newAudioContext.destination);
if (startTime === 0) {
startTime = newAudioContext.currentTime + 0.02; // Add slight delay to sync
} else {
startTime = Math.max(newAudioContext.currentTime, lastAudioTime);
}
lastAudioTime = startTime + buffer.duration;
source.start(startTime);
source.onended = playBufferedAudio;
}
document.getElementById('stop').addEventListener('click', () => {
audioContext.close();
newAudioContext.close();
bufferQueue = [];
isPlaying = false;
console.log("Stopped processing audio");
});
[/code]