realtime-chat.tsx 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. import VoiceIcon from "@/app/icons/voice.svg";
  2. import VoiceOffIcon from "@/app/icons/voice-off.svg";
  3. import PowerIcon from "@/app/icons/power.svg";
  4. import styles from "./realtime-chat.module.scss";
  5. import clsx from "clsx";
  6. import { useState, useRef, useEffect } from "react";
  7. import { useChatStore, createMessage, useAppConfig } from "@/app/store";
  8. import { IconButton } from "@/app/components/button";
  9. import {
  10. Modality,
  11. RTClient,
  12. RTInputAudioItem,
  13. RTResponse,
  14. TurnDetection,
  15. } from "rt-client";
  16. import { AudioHandler } from "@/app/lib/audio";
  17. import { uploadImage } from "@/app/utils/chat";
  18. import { VoicePrint } from "@/app/components/voice-print";
  19. interface RealtimeChatProps {
  20. onClose?: () => void;
  21. onStartVoice?: () => void;
  22. onPausedVoice?: () => void;
  23. }
  24. export function RealtimeChat({
  25. onClose,
  26. onStartVoice,
  27. onPausedVoice,
  28. }: RealtimeChatProps) {
  29. const chatStore = useChatStore();
  30. const session = chatStore.currentSession();
  31. const config = useAppConfig();
  32. const [status, setStatus] = useState("");
  33. const [isRecording, setIsRecording] = useState(false);
  34. const [isConnected, setIsConnected] = useState(false);
  35. const [isConnecting, setIsConnecting] = useState(false);
  36. const [modality, setModality] = useState("audio");
  37. const [useVAD, setUseVAD] = useState(true);
  38. const [frequencies, setFrequencies] = useState<Uint8Array | undefined>();
  39. const clientRef = useRef<RTClient | null>(null);
  40. const audioHandlerRef = useRef<AudioHandler | null>(null);
  41. const initRef = useRef(false);
  42. const temperature = config.realtimeConfig.temperature;
  43. const apiKey = config.realtimeConfig.apiKey;
  44. const model = config.realtimeConfig.model;
  45. const azure = config.realtimeConfig.provider === "Azure";
  46. const azureEndpoint = config.realtimeConfig.azure.endpoint;
  47. const azureDeployment = config.realtimeConfig.azure.deployment;
  48. const voice = config.realtimeConfig.voice;
  49. const handleConnect = async () => {
  50. if (isConnecting) return;
  51. if (!isConnected) {
  52. try {
  53. setIsConnecting(true);
  54. clientRef.current = azure
  55. ? new RTClient(
  56. new URL(azureEndpoint),
  57. { key: apiKey },
  58. { deployment: azureDeployment },
  59. )
  60. : new RTClient({ key: apiKey }, { model });
  61. const modalities: Modality[] =
  62. modality === "audio" ? ["text", "audio"] : ["text"];
  63. const turnDetection: TurnDetection = useVAD
  64. ? { type: "server_vad" }
  65. : null;
  66. await clientRef.current.configure({
  67. instructions: "",
  68. voice,
  69. input_audio_transcription: { model: "whisper-1" },
  70. turn_detection: turnDetection,
  71. tools: [],
  72. temperature,
  73. modalities,
  74. });
  75. startResponseListener();
  76. setIsConnected(true);
  77. try {
  78. const recentMessages = chatStore.getMessagesWithMemory();
  79. for (const message of recentMessages) {
  80. const { role, content } = message;
  81. if (typeof content === "string") {
  82. await clientRef.current.sendItem({
  83. type: "message",
  84. role: role as any,
  85. content: [
  86. {
  87. type: (role === "assistant" ? "text" : "input_text") as any,
  88. text: content as string,
  89. },
  90. ],
  91. });
  92. }
  93. }
  94. // await clientRef.current.generateResponse();
  95. } catch (error) {
  96. console.error("Set message failed:", error);
  97. }
  98. } catch (error) {
  99. console.error("Connection failed:", error);
  100. setStatus("Connection failed");
  101. } finally {
  102. setIsConnecting(false);
  103. }
  104. } else {
  105. await disconnect();
  106. }
  107. };
  108. const disconnect = async () => {
  109. if (clientRef.current) {
  110. try {
  111. await clientRef.current.close();
  112. clientRef.current = null;
  113. setIsConnected(false);
  114. } catch (error) {
  115. console.error("Disconnect failed:", error);
  116. }
  117. }
  118. };
  119. const startResponseListener = async () => {
  120. if (!clientRef.current) return;
  121. try {
  122. for await (const serverEvent of clientRef.current.events()) {
  123. if (serverEvent.type === "response") {
  124. await handleResponse(serverEvent);
  125. } else if (serverEvent.type === "input_audio") {
  126. await handleInputAudio(serverEvent);
  127. }
  128. }
  129. } catch (error) {
  130. if (clientRef.current) {
  131. console.error("Response iteration error:", error);
  132. }
  133. }
  134. };
  135. const handleResponse = async (response: RTResponse) => {
  136. for await (const item of response) {
  137. if (item.type === "message" && item.role === "assistant") {
  138. const botMessage = createMessage({
  139. role: item.role,
  140. content: "",
  141. });
  142. // add bot message first
  143. chatStore.updateTargetSession(session, (session) => {
  144. session.messages = session.messages.concat([botMessage]);
  145. });
  146. let hasAudio = false;
  147. for await (const content of item) {
  148. if (content.type === "text") {
  149. for await (const text of content.textChunks()) {
  150. botMessage.content += text;
  151. }
  152. } else if (content.type === "audio") {
  153. const textTask = async () => {
  154. for await (const text of content.transcriptChunks()) {
  155. botMessage.content += text;
  156. }
  157. };
  158. const audioTask = async () => {
  159. audioHandlerRef.current?.startStreamingPlayback();
  160. for await (const audio of content.audioChunks()) {
  161. hasAudio = true;
  162. audioHandlerRef.current?.playChunk(audio);
  163. }
  164. };
  165. await Promise.all([textTask(), audioTask()]);
  166. }
  167. // update message.content
  168. chatStore.updateTargetSession(session, (session) => {
  169. session.messages = session.messages.concat();
  170. });
  171. }
  172. if (hasAudio) {
  173. // upload audio get audio_url
  174. const blob = audioHandlerRef.current?.savePlayFile();
  175. uploadImage(blob!).then((audio_url) => {
  176. botMessage.audio_url = audio_url;
  177. // update text and audio_url
  178. chatStore.updateTargetSession(session, (session) => {
  179. session.messages = session.messages.concat();
  180. });
  181. });
  182. }
  183. }
  184. }
  185. };
  186. const handleInputAudio = async (item: RTInputAudioItem) => {
  187. await item.waitForCompletion();
  188. if (item.transcription) {
  189. const userMessage = createMessage({
  190. role: "user",
  191. content: item.transcription,
  192. });
  193. chatStore.updateTargetSession(session, (session) => {
  194. session.messages = session.messages.concat([userMessage]);
  195. });
  196. // save input audio_url, and update session
  197. const { audioStartMillis, audioEndMillis } = item;
  198. // upload audio get audio_url
  199. const blob = audioHandlerRef.current?.saveRecordFile(
  200. audioStartMillis,
  201. audioEndMillis,
  202. );
  203. uploadImage(blob!).then((audio_url) => {
  204. userMessage.audio_url = audio_url;
  205. chatStore.updateTargetSession(session, (session) => {
  206. session.messages = session.messages.concat();
  207. });
  208. });
  209. }
  210. // stop streaming play after get input audio.
  211. audioHandlerRef.current?.stopStreamingPlayback();
  212. };
  213. const toggleRecording = async () => {
  214. if (!isRecording && clientRef.current) {
  215. try {
  216. if (!audioHandlerRef.current) {
  217. audioHandlerRef.current = new AudioHandler();
  218. await audioHandlerRef.current.initialize();
  219. }
  220. await audioHandlerRef.current.startRecording(async (chunk) => {
  221. await clientRef.current?.sendAudio(chunk);
  222. });
  223. setIsRecording(true);
  224. } catch (error) {
  225. console.error("Failed to start recording:", error);
  226. }
  227. } else if (audioHandlerRef.current) {
  228. try {
  229. audioHandlerRef.current.stopRecording();
  230. if (!useVAD) {
  231. const inputAudio = await clientRef.current?.commitAudio();
  232. await handleInputAudio(inputAudio!);
  233. await clientRef.current?.generateResponse();
  234. }
  235. setIsRecording(false);
  236. } catch (error) {
  237. console.error("Failed to stop recording:", error);
  238. }
  239. }
  240. };
  241. useEffect(() => {
  242. // 防止重复初始化
  243. if (initRef.current) return;
  244. initRef.current = true;
  245. const initAudioHandler = async () => {
  246. const handler = new AudioHandler();
  247. await handler.initialize();
  248. audioHandlerRef.current = handler;
  249. await handleConnect();
  250. await toggleRecording();
  251. };
  252. initAudioHandler().catch((error) => {
  253. setStatus(error);
  254. console.error(error);
  255. });
  256. return () => {
  257. if (isRecording) {
  258. toggleRecording();
  259. }
  260. audioHandlerRef.current?.close().catch(console.error);
  261. disconnect();
  262. };
  263. }, []);
  264. useEffect(() => {
  265. let animationFrameId: number;
  266. if (isConnected && isRecording) {
  267. const animationFrame = () => {
  268. if (audioHandlerRef.current) {
  269. const freqData = audioHandlerRef.current.getByteFrequencyData();
  270. setFrequencies(freqData);
  271. }
  272. animationFrameId = requestAnimationFrame(animationFrame);
  273. };
  274. animationFrameId = requestAnimationFrame(animationFrame);
  275. } else {
  276. setFrequencies(undefined);
  277. }
  278. return () => {
  279. if (animationFrameId) {
  280. cancelAnimationFrame(animationFrameId);
  281. }
  282. };
  283. }, [isConnected, isRecording]);
  284. // update session params
  285. useEffect(() => {
  286. clientRef.current?.configure({ voice });
  287. }, [voice]);
  288. useEffect(() => {
  289. clientRef.current?.configure({ temperature });
  290. }, [temperature]);
  291. const handleClose = async () => {
  292. onClose?.();
  293. if (isRecording) {
  294. await toggleRecording();
  295. }
  296. disconnect().catch(console.error);
  297. };
  298. return (
  299. <div className={styles["realtime-chat"]}>
  300. <div
  301. className={clsx(styles["circle-mic"], {
  302. [styles["pulse"]]: isRecording,
  303. })}
  304. >
  305. <VoicePrint frequencies={frequencies} isActive={isRecording} />
  306. </div>
  307. <div className={styles["bottom-icons"]}>
  308. <div>
  309. <IconButton
  310. icon={isRecording ? <VoiceIcon /> : <VoiceOffIcon />}
  311. onClick={toggleRecording}
  312. disabled={!isConnected}
  313. shadow
  314. bordered
  315. />
  316. </div>
  317. <div className={styles["icon-center"]}>{status}</div>
  318. <div>
  319. <IconButton
  320. icon={<PowerIcon />}
  321. onClick={handleClose}
  322. shadow
  323. bordered
  324. />
  325. </div>
  326. </div>
  327. </div>
  328. );
  329. }