realtime-chat.tsx 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341
  1. import VoiceIcon from "@/app/icons/voice.svg";
  2. import VoiceOffIcon from "@/app/icons/voice-off.svg";
  3. import PowerIcon from "@/app/icons/power.svg";
  4. import styles from "./realtime-chat.module.scss";
  5. import clsx from "clsx";
  6. import { useState, useRef, useEffect } from "react";
  7. import { useChatStore, createMessage, useAppConfig } from "@/app/store";
  8. import { IconButton } from "@/app/components/button";
  9. import {
  10. Modality,
  11. RTClient,
  12. RTInputAudioItem,
  13. RTResponse,
  14. TurnDetection,
  15. } from "rt-client";
  16. import { AudioHandler } from "@/app/lib/audio";
  17. import { uploadImage } from "@/app/utils/chat";
  18. interface RealtimeChatProps {
  19. onClose?: () => void;
  20. onStartVoice?: () => void;
  21. onPausedVoice?: () => void;
  22. }
  23. export function RealtimeChat({
  24. onClose,
  25. onStartVoice,
  26. onPausedVoice,
  27. }: RealtimeChatProps) {
  28. const chatStore = useChatStore();
  29. const session = chatStore.currentSession();
  30. const config = useAppConfig();
  31. const [status, setStatus] = useState("");
  32. const [isRecording, setIsRecording] = useState(false);
  33. const [isConnected, setIsConnected] = useState(false);
  34. const [isConnecting, setIsConnecting] = useState(false);
  35. const [modality, setModality] = useState("audio");
  36. const [useVAD, setUseVAD] = useState(true);
  37. const clientRef = useRef<RTClient | null>(null);
  38. const audioHandlerRef = useRef<AudioHandler | null>(null);
  39. const initRef = useRef(false);
  40. const temperature = config.realtimeConfig.temperature;
  41. const apiKey = config.realtimeConfig.apiKey;
  42. const model = config.realtimeConfig.model;
  43. const azure = config.realtimeConfig.provider === "Azure";
  44. const azureEndpoint = config.realtimeConfig.azure.endpoint;
  45. const azureDeployment = config.realtimeConfig.azure.deployment;
  46. const voice = config.realtimeConfig.voice;
  47. const handleConnect = async () => {
  48. if (isConnecting) return;
  49. if (!isConnected) {
  50. try {
  51. setIsConnecting(true);
  52. clientRef.current = azure
  53. ? new RTClient(
  54. new URL(azureEndpoint),
  55. { key: apiKey },
  56. { deployment: azureDeployment },
  57. )
  58. : new RTClient({ key: apiKey }, { model });
  59. const modalities: Modality[] =
  60. modality === "audio" ? ["text", "audio"] : ["text"];
  61. const turnDetection: TurnDetection = useVAD
  62. ? { type: "server_vad" }
  63. : null;
  64. await clientRef.current.configure({
  65. instructions: "",
  66. voice,
  67. input_audio_transcription: { model: "whisper-1" },
  68. turn_detection: turnDetection,
  69. tools: [],
  70. temperature,
  71. modalities,
  72. });
  73. startResponseListener();
  74. setIsConnected(true);
  75. try {
  76. const recentMessages = chatStore.getMessagesWithMemory();
  77. for (const message of recentMessages) {
  78. const { role, content } = message;
  79. if (typeof content === "string") {
  80. await clientRef.current.sendItem({
  81. type: "message",
  82. role: role as any,
  83. content: [
  84. {
  85. type: (role === "assistant" ? "text" : "input_text") as any,
  86. text: content as string,
  87. },
  88. ],
  89. });
  90. }
  91. }
  92. await clientRef.current.generateResponse();
  93. } catch (error) {
  94. console.error("Set message failed:", error);
  95. }
  96. } catch (error) {
  97. console.error("Connection failed:", error);
  98. setStatus("Connection failed");
  99. } finally {
  100. setIsConnecting(false);
  101. }
  102. } else {
  103. await disconnect();
  104. }
  105. };
  106. const disconnect = async () => {
  107. if (clientRef.current) {
  108. try {
  109. await clientRef.current.close();
  110. clientRef.current = null;
  111. setIsConnected(false);
  112. } catch (error) {
  113. console.error("Disconnect failed:", error);
  114. }
  115. }
  116. };
  117. const startResponseListener = async () => {
  118. if (!clientRef.current) return;
  119. try {
  120. for await (const serverEvent of clientRef.current.events()) {
  121. if (serverEvent.type === "response") {
  122. await handleResponse(serverEvent);
  123. } else if (serverEvent.type === "input_audio") {
  124. await handleInputAudio(serverEvent);
  125. }
  126. }
  127. } catch (error) {
  128. if (clientRef.current) {
  129. console.error("Response iteration error:", error);
  130. }
  131. }
  132. };
  133. const handleResponse = async (response: RTResponse) => {
  134. for await (const item of response) {
  135. if (item.type === "message" && item.role === "assistant") {
  136. const botMessage = createMessage({
  137. role: item.role,
  138. content: "",
  139. });
  140. // add bot message first
  141. chatStore.updateTargetSession(session, (session) => {
  142. session.messages = session.messages.concat([botMessage]);
  143. });
  144. for await (const content of item) {
  145. if (content.type === "text") {
  146. for await (const text of content.textChunks()) {
  147. botMessage.content += text;
  148. }
  149. } else if (content.type === "audio") {
  150. const textTask = async () => {
  151. for await (const text of content.transcriptChunks()) {
  152. botMessage.content += text;
  153. }
  154. };
  155. const audioTask = async () => {
  156. audioHandlerRef.current?.startStreamingPlayback();
  157. for await (const audio of content.audioChunks()) {
  158. audioHandlerRef.current?.playChunk(audio);
  159. }
  160. };
  161. await Promise.all([textTask(), audioTask()]);
  162. }
  163. // update message.content
  164. chatStore.updateTargetSession(session, (session) => {
  165. session.messages = session.messages.concat();
  166. });
  167. }
  168. // upload audio get audio_url
  169. const blob = audioHandlerRef.current?.savePlayFile();
  170. uploadImage(blob!).then((audio_url) => {
  171. botMessage.audio_url = audio_url;
  172. // update text and audio_url
  173. chatStore.updateTargetSession(session, (session) => {
  174. session.messages = session.messages.concat();
  175. });
  176. });
  177. }
  178. }
  179. };
  180. const handleInputAudio = async (item: RTInputAudioItem) => {
  181. await item.waitForCompletion();
  182. if (item.transcription) {
  183. const userMessage = createMessage({
  184. role: "user",
  185. content: item.transcription,
  186. });
  187. chatStore.updateTargetSession(session, (session) => {
  188. session.messages = session.messages.concat([userMessage]);
  189. });
  190. // save input audio_url, and update session
  191. const { audioStartMillis, audioEndMillis } = item;
  192. // upload audio get audio_url
  193. const blob = audioHandlerRef.current?.saveRecordFile(
  194. audioStartMillis,
  195. audioEndMillis,
  196. );
  197. uploadImage(blob!).then((audio_url) => {
  198. userMessage.audio_url = audio_url;
  199. chatStore.updateTargetSession(session, (session) => {
  200. session.messages = session.messages.concat();
  201. });
  202. });
  203. }
  204. // stop streaming play after get input audio.
  205. audioHandlerRef.current?.stopStreamingPlayback();
  206. };
  207. const toggleRecording = async () => {
  208. if (!isRecording && clientRef.current) {
  209. try {
  210. if (!audioHandlerRef.current) {
  211. audioHandlerRef.current = new AudioHandler();
  212. await audioHandlerRef.current.initialize();
  213. }
  214. await audioHandlerRef.current.startRecording(async (chunk) => {
  215. await clientRef.current?.sendAudio(chunk);
  216. });
  217. setIsRecording(true);
  218. } catch (error) {
  219. console.error("Failed to start recording:", error);
  220. }
  221. } else if (audioHandlerRef.current) {
  222. try {
  223. audioHandlerRef.current.stopRecording();
  224. if (!useVAD) {
  225. const inputAudio = await clientRef.current?.commitAudio();
  226. await handleInputAudio(inputAudio!);
  227. await clientRef.current?.generateResponse();
  228. }
  229. setIsRecording(false);
  230. } catch (error) {
  231. console.error("Failed to stop recording:", error);
  232. }
  233. }
  234. };
  235. useEffect(() => {
  236. // 防止重复初始化
  237. if (initRef.current) return;
  238. initRef.current = true;
  239. const initAudioHandler = async () => {
  240. const handler = new AudioHandler();
  241. await handler.initialize();
  242. audioHandlerRef.current = handler;
  243. await handleConnect();
  244. await toggleRecording();
  245. };
  246. initAudioHandler().catch((error) => {
  247. setStatus(error);
  248. console.error(error);
  249. });
  250. // TODO demo to get frequency. will pass audioHandlerRef.current to child component draw.
  251. // TODO try using requestAnimationFrame
  252. const interval = setInterval(() => {
  253. if (audioHandlerRef.current) {
  254. const data = audioHandlerRef.current.getByteFrequencyData();
  255. console.log("getByteFrequencyData", data);
  256. }
  257. }, 100);
  258. return () => {
  259. if (isRecording) {
  260. toggleRecording();
  261. }
  262. audioHandlerRef.current
  263. ?.close()
  264. .catch(console.error)
  265. .finally(() => {
  266. clearInterval(interval);
  267. });
  268. disconnect();
  269. };
  270. }, []);
  271. // update session params
  272. useEffect(() => {
  273. clientRef.current?.configure({ voice });
  274. }, [voice]);
  275. useEffect(() => {
  276. clientRef.current?.configure({ temperature });
  277. }, [temperature]);
  278. const handleClose = async () => {
  279. onClose?.();
  280. if (isRecording) {
  281. await toggleRecording();
  282. }
  283. disconnect().catch(console.error);
  284. };
  285. return (
  286. <div className={styles["realtime-chat"]}>
  287. <div
  288. className={clsx(styles["circle-mic"], {
  289. [styles["pulse"]]: isRecording,
  290. })}
  291. >
  292. <div className={styles["icon-center"]}></div>
  293. </div>
  294. <div className={styles["bottom-icons"]}>
  295. <div>
  296. <IconButton
  297. icon={isRecording ? <VoiceIcon /> : <VoiceOffIcon />}
  298. onClick={toggleRecording}
  299. disabled={!isConnected}
  300. shadow
  301. bordered
  302. />
  303. </div>
  304. <div className={styles["icon-center"]}>{status}</div>
  305. <div>
  306. <IconButton
  307. icon={<PowerIcon />}
  308. onClick={handleClose}
  309. shadow
  310. bordered
  311. />
  312. </div>
  313. </div>
  314. </div>
  315. );
  316. }