diff --git a/app/components/chat.tsx b/app/components/chat.tsx index 5897a5d40..275edaddc 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -2035,14 +2035,16 @@ function _Chat() { [styles["chat-side-panel-show"]]: showChatSidePanel, })} > - { - setShowChatSidePanel(false); - }} - onStartVoice={async () => { - console.log("start voice"); - }} - /> + {showChatSidePanel && ( + { + setShowChatSidePanel(false); + }} + onStartVoice={async () => { + console.log("start voice"); + }} + /> + )} diff --git a/app/components/realtime-chat/realtime-chat.tsx b/app/components/realtime-chat/realtime-chat.tsx index 3ab4b4c56..f697766f2 100644 --- a/app/components/realtime-chat/realtime-chat.tsx +++ b/app/components/realtime-chat/realtime-chat.tsx @@ -1,6 +1,6 @@ +import { useDebouncedCallback } from "use-debounce"; import VoiceIcon from "@/app/icons/voice.svg"; import VoiceOffIcon from "@/app/icons/voice-off.svg"; -import Close24Icon from "@/app/icons/close-24.svg"; import PowerIcon from "@/app/icons/power.svg"; import styles from "./realtime-chat.module.scss"; @@ -60,6 +60,7 @@ export function RealtimeChat({ const apiKey = accessStore.openaiApiKey; const handleConnect = async () => { + if (isConnecting) return; if (!isConnected) { try { setIsConnecting(true); @@ -230,215 +231,34 @@ export function RealtimeChat({ } }; - useEffect(() => { - const initAudioHandler = async () => { - const handler = new AudioHandler(); - await handler.initialize(); - audioHandlerRef.current = handler; - }; + useEffect( + useDebouncedCallback(() => { + const initAudioHandler = async () => { + const handler = new AudioHandler(); + await handler.initialize(); + audioHandlerRef.current = handler; + await handleConnect(); + await toggleRecording(); + }; - initAudioHandler().catch(console.error); + initAudioHandler().catch(console.error); - return () => { - disconnect(); - audioHandlerRef.current?.close().catch(console.error); - }; - }, []); + return () => { + if (isRecording) { + toggleRecording(); + } + audioHandlerRef.current?.close().catch(console.error); + disconnect(); + }; + }), + [], + ); - // useEffect(() => { - // if ( - // clientRef.current?.getTurnDetectionType() === "server_vad" && - // audioData - // ) { - // // console.log("appendInputAudio", audioData); - // // 将录制的16PCM音频发送给openai - // clientRef.current?.appendInputAudio(audioData); - // } - // }, [audioData]); - - // useEffect(() => { - // console.log("isRecording", isRecording); - // if (!isRecording.current) return; - // if (!clientRef.current) { - // const apiKey = accessStore.openaiApiKey; - // const client = (clientRef.current = new RealtimeClient({ - // url: "wss://api.openai.com/v1/realtime", - // apiKey, - // dangerouslyAllowAPIKeyInBrowser: true, - // debug: true, - // })); - // client - // .connect() - // .then(() => { - // // TODO 设置真实的上下文 - // client.sendUserMessageContent([ - // { - // type: `input_text`, - // text: `Hi`, - // // text: `For testing purposes, I want you to list ten car brands. Number each item, e.g. "one (or whatever number you are one): the item name".` - // }, - // ]); - - // // 配置服务端判断说话人开启还是结束 - // client.updateSession({ - // turn_detection: { type: "server_vad" }, - // }); - - // client.on("realtime.event", (realtimeEvent) => { - // // 调试 - // console.log("realtime.event", realtimeEvent); - // }); - - // client.on("conversation.interrupted", async () => { - // if (currentBotMessage.current) { - // stopPlaying(); - // try { - // client.cancelResponse( - // currentBotMessage.current?.id, - // currentTime(), - // ); - // } catch (e) { - // console.error(e); - // } - // } - // }); - // client.on("conversation.updated", async (event: any) => { - // // console.log("currentSession", chatStore.currentSession()); - // // const items = client.conversation.getItems(); - // const content = event?.item?.content?.[0]?.transcript || ""; - // const text = event?.item?.content?.[0]?.text || ""; - // // console.log( - // // "conversation.updated", - // // event, - // // "content[0]", - // // event?.item?.content?.[0]?.transcript, - // // "formatted", - // // event?.item?.formatted?.transcript, - // // "content", - // // content, - // // "text", - // // text, - // // event?.item?.status, - // // event?.item?.role, - // // items.length, - // // items, - // // ); - // const { item, delta } = event; - // const { role, id, status, formatted } = item || {}; - // if (id && role == "assistant") { - // if ( - // !currentBotMessage.current || - // currentBotMessage.current?.id != id - // ) { - // // create assistant message and save to session - // currentBotMessage.current = createMessage({ id, role }); - // chatStore.updateCurrentSession((session) => { - // session.messages = session.messages.concat([ - // currentBotMessage.current!, - // ]); - // }); - // } - // if (currentBotMessage.current?.id != id) { - // stopPlaying(); - // } - // if (content) { - // currentBotMessage.current.content = content; - // chatStore.updateCurrentSession((session) => { - // session.messages = session.messages.concat(); - // }); - // } - // if (delta?.audio) { - // // typeof delta.audio is Int16Array - // // 直接播放 - // addInt16PCM(delta.audio); - // } - // // console.log( - // // "updated try save wavFile", - // // status, - // // currentBotMessage.current?.audio_url, - // // formatted?.audio, - // // ); - // if ( - // status == "completed" && - // !currentBotMessage.current?.audio_url && - // formatted?.audio?.length - // ) { - // // 转换为wav文件保存 TODO 使用mp3格式会更节省空间 - // const botMessage = currentBotMessage.current; - // const wavFile = new WavPacker().pack(sampleRate, { - // bitsPerSample: 16, - // channelCount: 1, - // data: formatted?.audio, - // }); - // // 这里将音频文件放到对象里面wavFile.url可以使用