今天2020年以后,可以通过audioWorklet节点实现。
https://developer.mozilla.org/en-US/docs/Web/API/AudioWorkletProcessor/AudioWorkletProcessor
在AudioWorkletContext中运行,因此只有通过消息传递二进制原始数据的方式。
class RecorderWorkletProcessor extends AudioWorkletProcessor {
constructor (options) {
super()
console.log(options.numberOfInputs)
console.log(options.processorOptions.someUsefulVariable)
}
process(inputs, output, parameters) {
const inputChannel = inputs[0][0];
const { postMessage } = this.port;
postMessage(inputChannel)
return true;
}
}
主代码
const audioContext = new AudioContext()
const audioMediaElement = audioContext.createMediaElementSource(
audio
);
await audioContext.audioWorklet.addModule('test-processor.js')
const recorder = new AudioWorkletNode(audioContext, 'test-processor',
{
processorOptions: {
someUsefulVariable: new Map([[1, 'one'], [2, 'two']])
}
});
const convertFloatToAudioBuffer = (data) => {
const sampleRate = 8000 | audioContext.sampleRate
const channels = 1;
const sampleLength = 128 | data.length;
const audioBuffer = audioContext.createBuffer(channels, sampleLength, sampleRate);
audioBuffer.copyToChannel(new Float32Array(data), 0);
return audioBuffer;
}
let startAt = 0
const streamDestination = audioContext.createMediaStreamDestination();
const play = (data) => {
const audioBufferSourceNoce = audioContext.createBufferSource();
audioBufferSourceNoce.buffer = convertFloatToAudioBuffer(data);
const context = audioContext;
audioBufferSourceNoce.connect(context);
startAt = Math.max(context.currentTime, startAt);
source.start(startAt);
startAt += buffer.duration;
audioBufferSourceNoce.start(startAt);
}
recorder.port.onmessage = (ev) => play(ev.data);
audioMediaElement.connect(recorder);
注意
这只是最基本的方法,用于在控制台中记录来自录音处理器的数据。当您真正想要处理这些数据时,您应该考虑在工作线程中执行操作,再注册一个处理程序,将数据直接发送到该工作线程,否则如果您进行大量处理,主进程可能会变得无响应。