// src/services/audioUtils.js
// This file should not import React or use any hooks.
export const stopAllPlayingAudio = (playingSourcesRef, audioContextRef, playbackTimeRef) => {
if (audioContextRef.current) {
playingSourcesRef.current.forEach((source) => {
try {
source.stop();
} catch (e) {
// Ignore errors from stopping already stopped sources
}
});
playingSourcesRef.current = [];
playbackTimeRef.current = audioContextRef.current.currentTime;
console.log("All playing audio has been stopped.");
}
};
export const stopAllMediaStreams = (vadStreamRef, mediaRecorderRef, scriptProcessorRef, streamRef) => {
if (vadStreamRef.current) {
vadStreamRef.current.getTracks().forEach((track) => track.stop());
vadStreamRef.current = null;
}
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== "inactive") {
mediaRecorderRef.current.stop();
}
if (scriptProcessorRef.current) {
scriptProcessorRef.current.disconnect();
scriptProcessorRef.current = null;
}
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
streamRef.current = null;
}
console.log("All audio streams and timers have been stopped.");
};
export const resampleBuffer = (buffer, srcRate, dstRate) => {
if (srcRate === dstRate) return buffer;
const ratio = srcRate / dstRate;
const newLength = Math.round(buffer.length / ratio);
const resampled = new Float32Array(newLength);
for (let i = 0; i < newLength; i++) {
const srcIndex = i * ratio;
const srcIndexFloor = Math.floor(srcIndex);
const srcIndexCeil = Math.min(srcIndexFloor + 1, buffer.length - 1);
const weight = srcIndex - srcIndexFloor;
resampled[i] =
buffer[srcIndexFloor] * (1 - weight) + buffer[srcIndexCeil] * weight;
}
return resampled;
};
export const convertPcmToFloat32 = (pcmBytes) => {
const int16Array = new Int16Array(
pcmBytes.buffer,
pcmBytes.byteOffset,
pcmBytes.byteLength / 2
);
const float32Array = new Float32Array(int16Array.length);
for (let i = 0; i < int16Array.length; i++) {
float32Array[i] = int16Array[i] / 32768;
}
return float32Array;
};
export const encodeWAV = (samples, sampleRate) => {
const buffer = new ArrayBuffer(44 + samples.length * 2);
const view = new DataView(buffer);
const writeString = (view, offset, string) => {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
writeString(view, 0, 'RIFF');
view.setUint32(4, 36 + samples.length * 2, true);
writeString(view, 8, 'WAVE');
writeString(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
view.setUint16(22, 1, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 2, true);
view.setUint16(32, 2, true);
view.setUint16(34, 16, true);
writeString(view, 36, 'data');
view.setUint32(40, samples.length * 2, true);
let offset = 44;
for (let i = 0; i < samples.length; i++, offset += 2) {
const s = Math.max(-1, Math.min(1, samples[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
return new Blob([view], { type: 'audio/wav' });
};