This commit is contained in:
renzhiyuan 2025-08-31 22:35:28 +08:00
parent ece104226d
commit dfeaebb038
4 changed files with 277 additions and 61 deletions

Binary file not shown.

View File

@ -30,18 +30,72 @@ const showQues=ref(false)
const handleClick = () => {
talk(resourceMap.get('start'))
.then(() => {
startStreaming()
text.value="请简单阐述mysql的索引机制,请简单阐述mysql的索引机制请简单阐述mysql的索引机制请简单阐述mysql的索引机制请简单阐述mysql的索引机制"
Ques()
})
startStreaming()
//const mouthOpen = 1; // 0~1
//handlePlayAction(resourceMap.get('test_wav'))
//let voicePath=resourceMap.get('start')
// live2DSprite._model.("PARAM_Mouth_OpenY", volume)
//live2DSprite._model.playVoice({ voicePath, immediate: true });
// talk(resourceMap.get('start'))
// .then(() => {
// startStreaming()
// text.value="mysql,mysqlmysqlmysqlmysql"
// Ques()
// })
};
const overClick = () => {
app.ticker.stop()
//stopStreaming()
};
const handlePlayAction = (voicePath) => {
console.log("接收到地址:"+voicePath);
live2DSprite.playVoice({ voicePath, immediate: true })
};
const handleBlob = async (blobUrl) => {
// Blob
const response = await fetch(blobUrl);
const blob = await response.blob();
let targetPath="1.wav"
//
const a = document.createElement('a');
a.href = URL.createObjectURL(blob);
a.download = targetPath.split('/').pop(); // "start.wav"
a.click();
//
URL.revokeObjectURL(a.href);
return handlePlayAction(targetPath);
};
const talk = async (voicePath) => {
return new Promise((resolve, reject) => {
try {
const tempAudio = new Audio(voicePath);
tempAudio.onloadedmetadata = () => {
const duration = tempAudio.duration * 1000; //
// 2.
live2DSprite.playVoice({ voicePath, immediate: true });
// 3. 使 setTimeout
setTimeout(() => {
console.log("语音播放完成(估算时间)");
resolve(); //
}, duration);
};
// 4.
tempAudio.onerror = () => {
reject(new Error("语音加载失败"));
};
} catch (error) {
reject(error); //
}
});
};
function ansTran(delta){
live2DSprite.x += 8;
@ -54,8 +108,6 @@ const Ques=()=>{
app.ticker.stop()
showQues.value=true
}, 1000);
}
@ -133,40 +185,12 @@ const modelInit=async (modelPath)=>{
}
}
onUnmounted(() => {
console.log("释放实例")
live2DSprite.destroy()
})
const talk = async (voicePath) => {
return new Promise((resolve, reject) => {
try {
const tempAudio = new Audio(voicePath);
tempAudio.onloadedmetadata = () => {
const duration = tempAudio.duration * 1000; //
// 2.
live2DSprite.playVoice({ voicePath, immediate: true });
// 3. 使 setTimeout
setTimeout(() => {
console.log("语音播放完成(估算时间)");
resolve(); //
}, duration);
};
// 4.
tempAudio.onerror = () => {
reject(new Error("语音加载失败"));
};
} catch (error) {
reject(error); //
}
});
};
const isStreaming = ref(false);
const status = ref("");
@ -224,6 +248,7 @@ const handleError = (error) => {
:is-streaming="isStreaming"
@update-status="handleStatusUpdate"
@streaming-error="handleError"
@audio-generated="handlePlayAction"
/>
<p v-if="status">{{ status }}</p>

View File

@ -11,18 +11,26 @@ export default {
props: {
isStreaming: Boolean,
},
emits: ["update-status", "streaming-error"],
emits: ["update-status", "streaming-error","audio-generated"],
setup(props, { emit }) {
const status = ref("");
let audioContext = null;
let mediaStream = null;
let socket = null;
let audioWorkletNode = null;
let speechSynthesis = null;
let mediaSource = null;
let audioStream = null;
let audioElement = null;
const SAMPLE_RATE = 16000;
const CHANNELS = 1;
//
const audioQueue = [];
let isAppending = false;
let receivedChunks = []; //
let totalSize = 0;
let isReceiving = false;
let hasReceivedHeader = false;
let wavParams ={};
const cleanupResources = () => {
if (audioWorkletNode) {
audioWorkletNode.disconnect();
@ -30,7 +38,7 @@ export default {
audioWorkletNode = null;
}
if (mediaStream) {
mediaStream.getTracks().forEach(track => track.stop());
mediaStream.getTracks().forEach((track) => track.stop());
mediaStream = null;
}
if (socket) {
@ -45,15 +53,99 @@ export default {
audioContext.close().catch(() => {});
audioContext = null;
}
if (speechSynthesis) {
speechSynthesis.cancel();
speechSynthesis = null;
}
if (audioElement) {
audioElement.pause();
audioElement.src = "";
audioElement = null;
}
if (mediaSource) {
if (mediaSource.readyState === "open") {
mediaSource.endOfStream();
}
mediaSource = null;
}
audioStream = null;
};
const processAudioQueue = () => {
if (
audioQueue.length === 0 ||
isAppending ||
!mediaSource ||
mediaSource.readyState !== "open" ||
!audioStream
) {
return;
}
isAppending = true;
const audioData = audioQueue.shift();
try {
if (audioStream.updating) {
audioQueue.unshift(audioData);
isAppending = false;
return;
}
audioStream.appendBuffer(audioData);
} catch (error) {
console.error("Error appending audio buffer:", error);
isAppending = false;
// MediaSource
setupAudioElement();
processAudioQueue();
}
};
const setupAudioElement = () => {
if (audioElement) {
audioElement.pause();
audioElement.src = "";
}
mediaSource = new MediaSource();
audioElement = document.createElement("audio");
audioElement.src = URL.createObjectURL(mediaSource);
audioElement.play().catch((e) => {
console.error("Audio element play failed:", e);
});
mediaSource.addEventListener("sourceopen", () => {
try {
audioStream = mediaSource.addSourceBuffer("audio/mpeg");
audioStream.addEventListener("updateend", () => {
isAppending = false;
processAudioQueue();
});
audioStream.addEventListener("error", (e) => {
console.error("SourceBuffer error:", e);
isAppending = false;
});
} catch (error) {
console.error("Failed to add SourceBuffer:", error);
}
});
mediaSource.addEventListener("sourceended", () => {
console.log("MediaSource ended");
});
};
const startStreaming = async () => {
try {
//
cleanupResources();
setupAudioElement();
// 1. WebSocket
socket = new WebSocket("ws://localhost:8000/ws");
audioContext = new (window.AudioContext || window.webkitAudioContext)({
sampleRate: SAMPLE_RATE,
});
socket = new WebSocket("ws://127.0.0.1:27004/ws/transcribe");
socket.binaryType = "arraybuffer";
socket.onopen = () => {
@ -69,32 +161,55 @@ export default {
cleanupResources();
};
// 2. AudioContext
audioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
socket.onmessage = (event) => {
if (event.data instanceof ArrayBuffer) {
const chunk = new Uint8Array(event.data);
// chunk WAV
// if (!hasReceivedHeader) {
// const header = chunk.slice(0, 4);
// const isRiff = header[0] === 0x52 && header[1] === 0x49 && header[2] === 0x46 && header[3] === 0x46;
// if (!isRiff) {
// console.error(" chunk WAV ");
// hasReceivedHeader=false
// return;
// }
// hasReceivedHeader = true;
// }
receivedChunks.push(chunk);
totalSize += chunk.length;
} else {
const data = JSON.parse(event.data);
if (data.type === "EOF") {
wavParams = {
sampleRate: data.sampleRate,
numChannels: data.numChannels,
bitDepth: data.bitDepth,
};
finalizeWavFile();
}
}
};
// 3.
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
const source = audioContext.createMediaStreamSource(mediaStream);
// 4. AudioWorklet
await audioContext.audioWorklet.addModule("/audio-processor.js");
// 5. AudioWorkletNode
audioWorkletNode = new AudioWorkletNode(audioContext, "audio-processor");
// 6. WebSocket
audioWorkletNode.port.onmessage = (event) => {
if (socket?.readyState === WebSocket.OPEN) {
socket.send(event.data);
}
};
// 7.
source.connect(audioWorkletNode);
audioWorkletNode.connect(audioContext.destination);
emit("update-status", "Streaming started");
} catch (error) {
console.error("Streaming error:", error);
cleanupResources();
emit("streaming-error", error.message);
}
@ -106,15 +221,89 @@ export default {
};
watch(
() => props.isStreaming,
(newVal) => {
if (newVal) {
startStreaming();
} else {
stopStreaming();
}
},
{ immediate: true }
() => props.isStreaming,
(newVal) => {
if (newVal) {
startStreaming();
} else {
stopStreaming();
}
},
{ immediate: true }
);
function finalizeWavFile() {
if (!wavParams || receivedChunks.length === 0) return;
const { sampleRate, numChannels, bitDepth } = wavParams;
const pcmData = new Uint8Array(totalSize);
let offset = 0;
for (const chunk of receivedChunks) {
pcmData.set(chunk, offset);
offset += chunk.length;
}
const wavHeader = generateWavHeader(sampleRate, numChannels, bitDepth, totalSize);
const wavData = new Uint8Array(wavHeader.length + totalSize);
wavData.set(wavHeader);
wavData.set(pcmData, wavHeader.length);
const wavBlob = new Blob([wavData], { type: "audio/wav" });
const audioUrl = URL.createObjectURL(wavBlob);
emit("audio-generated", audioUrl);
//
receivedChunks = [];
totalSize = 0;
wavParams ={};
}
const cleanupAudioData = () => {
receivedChunks = [];
totalSize = 0;
};
// WAV
function generateWavHeader(sampleRate, numChannels, bitDepth, dataSize) {
const bytesPerSample = bitDepth / 8;
const blockAlign = numChannels * bytesPerSample;
const buffer = new ArrayBuffer(44);
const view = new DataView(buffer);
// RIFF
view.setUint32(0, 0x52494646, false); // "RIFF"
view.setUint32(4, 36 + dataSize, true); // - 8
view.setUint32(8, 0x57415645, false); // "WAVE"
// fmt
view.setUint32(12, 0x666d7420, false); // "fmt "
view.setUint32(16, 16, true); // 16 for PCM
view.setUint16(20, 1, true); // 1 = PCM
view.setUint16(22, numChannels, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * blockAlign, true); //
view.setUint16(32, blockAlign, true);
view.setUint16(34, bitDepth, true);
// data
view.setUint32(36, 0x64617461, false); // "data"
view.setUint32(40, dataSize, true); //
return new Uint8Array(buffer);
}
watch(
() => props.isStreaming,
(newVal) => {
if (newVal) {
startStreaming();
} else {
stopStreaming();
}
},
{ immediate: true }
);
onUnmounted(() => {
@ -124,4 +313,5 @@ export default {
return { status };
},
};
</script>

View File

@ -4,5 +4,6 @@ export const resourceMap = new Map([
['start', '/resource/sounds/start.wav'],
['model', '/resource/models/UG/ugofficial.model3.json'],
['log', '/resource/img/log.png'],
['test_wav', '/resource/sounds/1.wav'],
// 其他资源...
]);