realtime-chat.tsx 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359
  1. import VoiceIcon from "@/app/icons/voice.svg";
  2. import VoiceOffIcon from "@/app/icons/voice-off.svg";
  3. import PowerIcon from "@/app/icons/power.svg";
  4. import styles from "./realtime-chat.module.scss";
  5. import clsx from "clsx";
  6. import { useState, useRef, useEffect } from "react";
  7. import { useChatStore, createMessage, useAppConfig } from "@/app/store";
  8. import { IconButton } from "@/app/components/button";
  9. import {
  10. Modality,
  11. RTClient,
  12. RTInputAudioItem,
  13. RTResponse,
  14. TurnDetection,
  15. } from "rt-client";
  16. import { AudioHandler } from "@/app/lib/audio";
  17. import { uploadImage } from "@/app/utils/chat";
  18. import { VoicePrint } from "@/app/components/voice-print";
  19. interface RealtimeChatProps {
  20. onClose?: () => void;
  21. onStartVoice?: () => void;
  22. onPausedVoice?: () => void;
  23. }
  24. export function RealtimeChat({
  25. onClose,
  26. onStartVoice,
  27. onPausedVoice,
  28. }: RealtimeChatProps) {
  29. const chatStore = useChatStore();
  30. const session = chatStore.currentSession();
  31. const config = useAppConfig();
  32. const [status, setStatus] = useState("");
  33. const [isRecording, setIsRecording] = useState(false);
  34. const [isConnected, setIsConnected] = useState(false);
  35. const [isConnecting, setIsConnecting] = useState(false);
  36. const [modality, setModality] = useState("audio");
  37. const [useVAD, setUseVAD] = useState(true);
  38. const [frequencies, setFrequencies] = useState<Uint8Array | undefined>();
  39. const clientRef = useRef<RTClient | null>(null);
  40. const audioHandlerRef = useRef<AudioHandler | null>(null);
  41. const initRef = useRef(false);
  42. const temperature = config.realtimeConfig.temperature;
  43. const apiKey = config.realtimeConfig.apiKey;
  44. const model = config.realtimeConfig.model;
  45. const azure = config.realtimeConfig.provider === "Azure";
  46. const azureEndpoint = config.realtimeConfig.azure.endpoint;
  47. const azureDeployment = config.realtimeConfig.azure.deployment;
  48. const voice = config.realtimeConfig.voice;
  49. const handleConnect = async () => {
  50. if (isConnecting) return;
  51. if (!isConnected) {
  52. try {
  53. setIsConnecting(true);
  54. clientRef.current = azure
  55. ? new RTClient(
  56. new URL(azureEndpoint),
  57. { key: apiKey },
  58. { deployment: azureDeployment },
  59. )
  60. : new RTClient({ key: apiKey }, { model });
  61. const modalities: Modality[] =
  62. modality === "audio" ? ["text", "audio"] : ["text"];
  63. const turnDetection: TurnDetection = useVAD
  64. ? { type: "server_vad" }
  65. : null;
  66. await clientRef.current.configure({
  67. instructions: "",
  68. voice,
  69. input_audio_transcription: { model: "whisper-1" },
  70. turn_detection: turnDetection,
  71. tools: [],
  72. temperature,
  73. modalities,
  74. });
  75. startResponseListener();
  76. setIsConnected(true);
  77. // TODO
  78. // try {
  79. // const recentMessages = chatStore.getMessagesWithMemory();
  80. // for (const message of recentMessages) {
  81. // const { role, content } = message;
  82. // if (typeof content === "string") {
  83. // await clientRef.current.sendItem({
  84. // type: "message",
  85. // role: role as any,
  86. // content: [
  87. // {
  88. // type: (role === "assistant" ? "text" : "input_text") as any,
  89. // text: content as string,
  90. // },
  91. // ],
  92. // });
  93. // }
  94. // }
  95. // // await clientRef.current.generateResponse();
  96. // } catch (error) {
  97. // console.error("Set message failed:", error);
  98. // }
  99. } catch (error) {
  100. console.error("Connection failed:", error);
  101. setStatus("Connection failed");
  102. } finally {
  103. setIsConnecting(false);
  104. }
  105. } else {
  106. await disconnect();
  107. }
  108. };
  109. const disconnect = async () => {
  110. if (clientRef.current) {
  111. try {
  112. await clientRef.current.close();
  113. clientRef.current = null;
  114. setIsConnected(false);
  115. } catch (error) {
  116. console.error("Disconnect failed:", error);
  117. }
  118. }
  119. };
  120. const startResponseListener = async () => {
  121. if (!clientRef.current) return;
  122. try {
  123. for await (const serverEvent of clientRef.current.events()) {
  124. if (serverEvent.type === "response") {
  125. await handleResponse(serverEvent);
  126. } else if (serverEvent.type === "input_audio") {
  127. await handleInputAudio(serverEvent);
  128. }
  129. }
  130. } catch (error) {
  131. if (clientRef.current) {
  132. console.error("Response iteration error:", error);
  133. }
  134. }
  135. };
  136. const handleResponse = async (response: RTResponse) => {
  137. for await (const item of response) {
  138. if (item.type === "message" && item.role === "assistant") {
  139. const botMessage = createMessage({
  140. role: item.role,
  141. content: "",
  142. });
  143. // add bot message first
  144. chatStore.updateTargetSession(session, (session) => {
  145. session.messages = session.messages.concat([botMessage]);
  146. });
  147. let hasAudio = false;
  148. for await (const content of item) {
  149. if (content.type === "text") {
  150. for await (const text of content.textChunks()) {
  151. botMessage.content += text;
  152. }
  153. } else if (content.type === "audio") {
  154. const textTask = async () => {
  155. for await (const text of content.transcriptChunks()) {
  156. botMessage.content += text;
  157. }
  158. };
  159. const audioTask = async () => {
  160. audioHandlerRef.current?.startStreamingPlayback();
  161. for await (const audio of content.audioChunks()) {
  162. hasAudio = true;
  163. audioHandlerRef.current?.playChunk(audio);
  164. }
  165. };
  166. await Promise.all([textTask(), audioTask()]);
  167. }
  168. // update message.content
  169. chatStore.updateTargetSession(session, (session) => {
  170. session.messages = session.messages.concat();
  171. });
  172. }
  173. if (hasAudio) {
  174. // upload audio get audio_url
  175. const blob = audioHandlerRef.current?.savePlayFile();
  176. uploadImage(blob!).then((audio_url) => {
  177. botMessage.audio_url = audio_url;
  178. // update text and audio_url
  179. chatStore.updateTargetSession(session, (session) => {
  180. session.messages = session.messages.concat();
  181. });
  182. });
  183. }
  184. }
  185. }
  186. };
  187. const handleInputAudio = async (item: RTInputAudioItem) => {
  188. await item.waitForCompletion();
  189. if (item.transcription) {
  190. const userMessage = createMessage({
  191. role: "user",
  192. content: item.transcription,
  193. });
  194. chatStore.updateTargetSession(session, (session) => {
  195. session.messages = session.messages.concat([userMessage]);
  196. });
  197. // save input audio_url, and update session
  198. const { audioStartMillis, audioEndMillis } = item;
  199. // upload audio get audio_url
  200. const blob = audioHandlerRef.current?.saveRecordFile(
  201. audioStartMillis,
  202. audioEndMillis,
  203. );
  204. uploadImage(blob!).then((audio_url) => {
  205. userMessage.audio_url = audio_url;
  206. chatStore.updateTargetSession(session, (session) => {
  207. session.messages = session.messages.concat();
  208. });
  209. });
  210. }
  211. // stop streaming play after get input audio.
  212. audioHandlerRef.current?.stopStreamingPlayback();
  213. };
  214. const toggleRecording = async () => {
  215. if (!isRecording && clientRef.current) {
  216. try {
  217. if (!audioHandlerRef.current) {
  218. audioHandlerRef.current = new AudioHandler();
  219. await audioHandlerRef.current.initialize();
  220. }
  221. await audioHandlerRef.current.startRecording(async (chunk) => {
  222. await clientRef.current?.sendAudio(chunk);
  223. });
  224. setIsRecording(true);
  225. } catch (error) {
  226. console.error("Failed to start recording:", error);
  227. }
  228. } else if (audioHandlerRef.current) {
  229. try {
  230. audioHandlerRef.current.stopRecording();
  231. if (!useVAD) {
  232. const inputAudio = await clientRef.current?.commitAudio();
  233. await handleInputAudio(inputAudio!);
  234. await clientRef.current?.generateResponse();
  235. }
  236. setIsRecording(false);
  237. } catch (error) {
  238. console.error("Failed to stop recording:", error);
  239. }
  240. }
  241. };
  242. useEffect(() => {
  243. // 防止重复初始化
  244. if (initRef.current) return;
  245. initRef.current = true;
  246. const initAudioHandler = async () => {
  247. const handler = new AudioHandler();
  248. await handler.initialize();
  249. audioHandlerRef.current = handler;
  250. await handleConnect();
  251. await toggleRecording();
  252. };
  253. initAudioHandler().catch((error) => {
  254. setStatus(error);
  255. console.error(error);
  256. });
  257. return () => {
  258. if (isRecording) {
  259. toggleRecording();
  260. }
  261. audioHandlerRef.current?.close().catch(console.error);
  262. disconnect();
  263. };
  264. }, []);
  265. useEffect(() => {
  266. let animationFrameId: number;
  267. if (isConnected && isRecording) {
  268. const animationFrame = () => {
  269. if (audioHandlerRef.current) {
  270. const freqData = audioHandlerRef.current.getByteFrequencyData();
  271. setFrequencies(freqData);
  272. }
  273. animationFrameId = requestAnimationFrame(animationFrame);
  274. };
  275. animationFrameId = requestAnimationFrame(animationFrame);
  276. } else {
  277. setFrequencies(undefined);
  278. }
  279. return () => {
  280. if (animationFrameId) {
  281. cancelAnimationFrame(animationFrameId);
  282. }
  283. };
  284. }, [isConnected, isRecording]);
  285. // update session params
  286. useEffect(() => {
  287. clientRef.current?.configure({ voice });
  288. }, [voice]);
  289. useEffect(() => {
  290. clientRef.current?.configure({ temperature });
  291. }, [temperature]);
  292. const handleClose = async () => {
  293. onClose?.();
  294. if (isRecording) {
  295. await toggleRecording();
  296. }
  297. disconnect().catch(console.error);
  298. };
  299. return (
  300. <div className={styles["realtime-chat"]}>
  301. <div
  302. className={clsx(styles["circle-mic"], {
  303. [styles["pulse"]]: isRecording,
  304. })}
  305. >
  306. <VoicePrint frequencies={frequencies} isActive={isRecording} />
  307. </div>
  308. <div className={styles["bottom-icons"]}>
  309. <div>
  310. <IconButton
  311. icon={isRecording ? <VoiceIcon /> : <VoiceOffIcon />}
  312. onClick={toggleRecording}
  313. disabled={!isConnected}
  314. shadow
  315. bordered
  316. />
  317. </div>
  318. <div className={styles["icon-center"]}>{status}</div>
  319. <div>
  320. <IconButton
  321. icon={<PowerIcon />}
  322. onClick={handleClose}
  323. shadow
  324. bordered
  325. />
  326. </div>
  327. </div>
  328. </div>
  329. );
  330. }