realtime-chat.tsx 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345
  1. import VoiceIcon from "@/app/icons/voice.svg";
  2. import VoiceOffIcon from "@/app/icons/voice-off.svg";
  3. import PowerIcon from "@/app/icons/power.svg";
  4. import styles from "./realtime-chat.module.scss";
  5. import clsx from "clsx";
  6. import { useState, useRef, useEffect } from "react";
  7. import { useChatStore, createMessage, useAppConfig } from "@/app/store";
  8. import { IconButton } from "@/app/components/button";
  9. import {
  10. Modality,
  11. RTClient,
  12. RTInputAudioItem,
  13. RTResponse,
  14. TurnDetection,
  15. } from "rt-client";
  16. import { AudioHandler } from "@/app/lib/audio";
  17. import { uploadImage } from "@/app/utils/chat";
  18. interface RealtimeChatProps {
  19. onClose?: () => void;
  20. onStartVoice?: () => void;
  21. onPausedVoice?: () => void;
  22. }
  23. export function RealtimeChat({
  24. onClose,
  25. onStartVoice,
  26. onPausedVoice,
  27. }: RealtimeChatProps) {
  28. const chatStore = useChatStore();
  29. const session = chatStore.currentSession();
  30. const config = useAppConfig();
  31. const [status, setStatus] = useState("");
  32. const [isRecording, setIsRecording] = useState(false);
  33. const [isConnected, setIsConnected] = useState(false);
  34. const [isConnecting, setIsConnecting] = useState(false);
  35. const [modality, setModality] = useState("audio");
  36. const [useVAD, setUseVAD] = useState(true);
  37. const clientRef = useRef<RTClient | null>(null);
  38. const audioHandlerRef = useRef<AudioHandler | null>(null);
  39. const initRef = useRef(false);
  40. const temperature = config.realtimeConfig.temperature;
  41. const apiKey = config.realtimeConfig.apiKey;
  42. const model = config.realtimeConfig.model;
  43. const azure = config.realtimeConfig.provider === "Azure";
  44. const azureEndpoint = config.realtimeConfig.azure.endpoint;
  45. const azureDeployment = config.realtimeConfig.azure.deployment;
  46. const voice = config.realtimeConfig.voice;
  47. const handleConnect = async () => {
  48. if (isConnecting) return;
  49. if (!isConnected) {
  50. try {
  51. setIsConnecting(true);
  52. clientRef.current = azure
  53. ? new RTClient(
  54. new URL(azureEndpoint),
  55. { key: apiKey },
  56. { deployment: azureDeployment },
  57. )
  58. : new RTClient({ key: apiKey }, { model });
  59. const modalities: Modality[] =
  60. modality === "audio" ? ["text", "audio"] : ["text"];
  61. const turnDetection: TurnDetection = useVAD
  62. ? { type: "server_vad" }
  63. : null;
  64. await clientRef.current.configure({
  65. instructions: "",
  66. voice,
  67. input_audio_transcription: { model: "whisper-1" },
  68. turn_detection: turnDetection,
  69. tools: [],
  70. temperature,
  71. modalities,
  72. });
  73. startResponseListener();
  74. setIsConnected(true);
  75. try {
  76. const recentMessages = chatStore.getMessagesWithMemory();
  77. for (const message of recentMessages) {
  78. const { role, content } = message;
  79. if (typeof content === "string") {
  80. await clientRef.current.sendItem({
  81. type: "message",
  82. role: role as any,
  83. content: [
  84. {
  85. type: (role === "assistant" ? "text" : "input_text") as any,
  86. text: content as string,
  87. },
  88. ],
  89. });
  90. }
  91. }
  92. // await clientRef.current.generateResponse();
  93. } catch (error) {
  94. console.error("Set message failed:", error);
  95. }
  96. } catch (error) {
  97. console.error("Connection failed:", error);
  98. setStatus("Connection failed");
  99. } finally {
  100. setIsConnecting(false);
  101. }
  102. } else {
  103. await disconnect();
  104. }
  105. };
  106. const disconnect = async () => {
  107. if (clientRef.current) {
  108. try {
  109. await clientRef.current.close();
  110. clientRef.current = null;
  111. setIsConnected(false);
  112. } catch (error) {
  113. console.error("Disconnect failed:", error);
  114. }
  115. }
  116. };
  117. const startResponseListener = async () => {
  118. if (!clientRef.current) return;
  119. try {
  120. for await (const serverEvent of clientRef.current.events()) {
  121. if (serverEvent.type === "response") {
  122. await handleResponse(serverEvent);
  123. } else if (serverEvent.type === "input_audio") {
  124. await handleInputAudio(serverEvent);
  125. }
  126. }
  127. } catch (error) {
  128. if (clientRef.current) {
  129. console.error("Response iteration error:", error);
  130. }
  131. }
  132. };
  133. const handleResponse = async (response: RTResponse) => {
  134. for await (const item of response) {
  135. if (item.type === "message" && item.role === "assistant") {
  136. const botMessage = createMessage({
  137. role: item.role,
  138. content: "",
  139. });
  140. // add bot message first
  141. chatStore.updateTargetSession(session, (session) => {
  142. session.messages = session.messages.concat([botMessage]);
  143. });
  144. let hasAudio = false;
  145. for await (const content of item) {
  146. if (content.type === "text") {
  147. for await (const text of content.textChunks()) {
  148. botMessage.content += text;
  149. }
  150. } else if (content.type === "audio") {
  151. const textTask = async () => {
  152. for await (const text of content.transcriptChunks()) {
  153. botMessage.content += text;
  154. }
  155. };
  156. const audioTask = async () => {
  157. audioHandlerRef.current?.startStreamingPlayback();
  158. for await (const audio of content.audioChunks()) {
  159. hasAudio = true;
  160. audioHandlerRef.current?.playChunk(audio);
  161. }
  162. };
  163. await Promise.all([textTask(), audioTask()]);
  164. }
  165. // update message.content
  166. chatStore.updateTargetSession(session, (session) => {
  167. session.messages = session.messages.concat();
  168. });
  169. }
  170. if (hasAudio) {
  171. // upload audio get audio_url
  172. const blob = audioHandlerRef.current?.savePlayFile();
  173. uploadImage(blob!).then((audio_url) => {
  174. botMessage.audio_url = audio_url;
  175. // update text and audio_url
  176. chatStore.updateTargetSession(session, (session) => {
  177. session.messages = session.messages.concat();
  178. });
  179. });
  180. }
  181. }
  182. }
  183. };
  184. const handleInputAudio = async (item: RTInputAudioItem) => {
  185. await item.waitForCompletion();
  186. if (item.transcription) {
  187. const userMessage = createMessage({
  188. role: "user",
  189. content: item.transcription,
  190. });
  191. chatStore.updateTargetSession(session, (session) => {
  192. session.messages = session.messages.concat([userMessage]);
  193. });
  194. // save input audio_url, and update session
  195. const { audioStartMillis, audioEndMillis } = item;
  196. // upload audio get audio_url
  197. const blob = audioHandlerRef.current?.saveRecordFile(
  198. audioStartMillis,
  199. audioEndMillis,
  200. );
  201. uploadImage(blob!).then((audio_url) => {
  202. userMessage.audio_url = audio_url;
  203. chatStore.updateTargetSession(session, (session) => {
  204. session.messages = session.messages.concat();
  205. });
  206. });
  207. }
  208. // stop streaming play after get input audio.
  209. audioHandlerRef.current?.stopStreamingPlayback();
  210. };
  211. const toggleRecording = async () => {
  212. if (!isRecording && clientRef.current) {
  213. try {
  214. if (!audioHandlerRef.current) {
  215. audioHandlerRef.current = new AudioHandler();
  216. await audioHandlerRef.current.initialize();
  217. }
  218. await audioHandlerRef.current.startRecording(async (chunk) => {
  219. await clientRef.current?.sendAudio(chunk);
  220. });
  221. setIsRecording(true);
  222. } catch (error) {
  223. console.error("Failed to start recording:", error);
  224. }
  225. } else if (audioHandlerRef.current) {
  226. try {
  227. audioHandlerRef.current.stopRecording();
  228. if (!useVAD) {
  229. const inputAudio = await clientRef.current?.commitAudio();
  230. await handleInputAudio(inputAudio!);
  231. await clientRef.current?.generateResponse();
  232. }
  233. setIsRecording(false);
  234. } catch (error) {
  235. console.error("Failed to stop recording:", error);
  236. }
  237. }
  238. };
  239. useEffect(() => {
  240. // 防止重复初始化
  241. if (initRef.current) return;
  242. initRef.current = true;
  243. const initAudioHandler = async () => {
  244. const handler = new AudioHandler();
  245. await handler.initialize();
  246. audioHandlerRef.current = handler;
  247. await handleConnect();
  248. await toggleRecording();
  249. };
  250. initAudioHandler().catch((error) => {
  251. setStatus(error);
  252. console.error(error);
  253. });
  254. // TODO demo to get frequency. will pass audioHandlerRef.current to child component draw.
  255. // TODO try using requestAnimationFrame
  256. const interval = setInterval(() => {
  257. if (audioHandlerRef.current) {
  258. const data = audioHandlerRef.current.getByteFrequencyData();
  259. console.log("getByteFrequencyData", data);
  260. }
  261. }, 1000);
  262. return () => {
  263. if (isRecording) {
  264. toggleRecording();
  265. }
  266. audioHandlerRef.current
  267. ?.close()
  268. .catch(console.error)
  269. .finally(() => {
  270. clearInterval(interval);
  271. });
  272. disconnect();
  273. };
  274. }, []);
  275. // update session params
  276. useEffect(() => {
  277. clientRef.current?.configure({ voice });
  278. }, [voice]);
  279. useEffect(() => {
  280. clientRef.current?.configure({ temperature });
  281. }, [temperature]);
  282. const handleClose = async () => {
  283. onClose?.();
  284. if (isRecording) {
  285. await toggleRecording();
  286. }
  287. disconnect().catch(console.error);
  288. };
  289. return (
  290. <div className={styles["realtime-chat"]}>
  291. <div
  292. className={clsx(styles["circle-mic"], {
  293. [styles["pulse"]]: isRecording,
  294. })}
  295. >
  296. <div className={styles["icon-center"]}></div>
  297. </div>
  298. <div className={styles["bottom-icons"]}>
  299. <div>
  300. <IconButton
  301. icon={isRecording ? <VoiceIcon /> : <VoiceOffIcon />}
  302. onClick={toggleRecording}
  303. disabled={!isConnected}
  304. shadow
  305. bordered
  306. />
  307. </div>
  308. <div className={styles["icon-center"]}>{status}</div>
  309. <div>
  310. <IconButton
  311. icon={<PowerIcon />}
  312. onClick={handleClose}
  313. shadow
  314. bordered
  315. />
  316. </div>
  317. </div>
  318. </div>
  319. );
  320. }