realtime-chat.tsx 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. import { useDebouncedCallback } from "use-debounce";
  2. import VoiceIcon from "@/app/icons/voice.svg";
  3. import VoiceOffIcon from "@/app/icons/voice-off.svg";
  4. import PowerIcon from "@/app/icons/power.svg";
  5. import styles from "./realtime-chat.module.scss";
  6. import clsx from "clsx";
  7. import { useState, useRef, useEffect } from "react";
  8. import {
  9. useAccessStore,
  10. useChatStore,
  11. ChatMessage,
  12. createMessage,
  13. } from "@/app/store";
  14. import { IconButton } from "@/app/components/button";
  15. import {
  16. Modality,
  17. RTClient,
  18. RTInputAudioItem,
  19. RTResponse,
  20. TurnDetection,
  21. Voice,
  22. } from "rt-client";
  23. import { AudioHandler } from "@/app/lib/audio";
  24. import { uploadImage } from "@/app/utils/chat";
  25. interface RealtimeChatProps {
  26. onClose?: () => void;
  27. onStartVoice?: () => void;
  28. onPausedVoice?: () => void;
  29. }
  30. export function RealtimeChat({
  31. onClose,
  32. onStartVoice,
  33. onPausedVoice,
  34. }: RealtimeChatProps) {
  35. const currentItemId = useRef<string>("");
  36. const currentBotMessage = useRef<ChatMessage | null>();
  37. const currentUserMessage = useRef<ChatMessage | null>();
  38. const accessStore = useAccessStore.getState();
  39. const chatStore = useChatStore();
  40. const session = chatStore.currentSession();
  41. const [status, setStatus] = useState("");
  42. const [isRecording, setIsRecording] = useState(false);
  43. const [isConnected, setIsConnected] = useState(false);
  44. const [isConnecting, setIsConnecting] = useState(false);
  45. const [modality, setModality] = useState("audio");
  46. const [isAzure, setIsAzure] = useState(false);
  47. const [endpoint, setEndpoint] = useState("");
  48. const [deployment, setDeployment] = useState("");
  49. const [useVAD, setUseVAD] = useState(true);
  50. const [voice, setVoice] = useState<Voice>("alloy");
  51. const clientRef = useRef<RTClient | null>(null);
  52. const audioHandlerRef = useRef<AudioHandler | null>(null);
  53. const apiKey = accessStore.openaiApiKey;
  54. const handleConnect = async () => {
  55. if (isConnecting) return;
  56. if (!isConnected) {
  57. try {
  58. setIsConnecting(true);
  59. clientRef.current = isAzure
  60. ? new RTClient(new URL(endpoint), { key: apiKey }, { deployment })
  61. : new RTClient(
  62. { key: apiKey },
  63. { model: "gpt-4o-realtime-preview-2024-10-01" },
  64. );
  65. const modalities: Modality[] =
  66. modality === "audio" ? ["text", "audio"] : ["text"];
  67. const turnDetection: TurnDetection = useVAD
  68. ? { type: "server_vad" }
  69. : null;
  70. clientRef.current.configure({
  71. instructions: "",
  72. voice,
  73. input_audio_transcription: { model: "whisper-1" },
  74. turn_detection: turnDetection,
  75. tools: [],
  76. temperature: 0.9,
  77. modalities,
  78. });
  79. startResponseListener();
  80. setIsConnected(true);
  81. try {
  82. const recentMessages = chatStore.getMessagesWithMemory();
  83. for (const message of recentMessages) {
  84. const { role, content } = message;
  85. if (typeof content === "string") {
  86. await clientRef.current.sendItem({
  87. type: "message",
  88. role: role as any,
  89. content: [
  90. {
  91. type: (role === "assistant" ? "text" : "input_text") as any,
  92. text: content as string,
  93. },
  94. ],
  95. });
  96. }
  97. }
  98. } catch (error) {
  99. console.error("Set message failed:", error);
  100. }
  101. } catch (error) {
  102. console.error("Connection failed:", error);
  103. setStatus("Connection failed");
  104. } finally {
  105. setIsConnecting(false);
  106. }
  107. } else {
  108. await disconnect();
  109. }
  110. };
  111. const disconnect = async () => {
  112. if (clientRef.current) {
  113. try {
  114. await clientRef.current.close();
  115. clientRef.current = null;
  116. setIsConnected(false);
  117. } catch (error) {
  118. console.error("Disconnect failed:", error);
  119. }
  120. }
  121. };
  122. const startResponseListener = async () => {
  123. if (!clientRef.current) return;
  124. try {
  125. for await (const serverEvent of clientRef.current.events()) {
  126. if (serverEvent.type === "response") {
  127. await handleResponse(serverEvent);
  128. } else if (serverEvent.type === "input_audio") {
  129. await handleInputAudio(serverEvent);
  130. }
  131. }
  132. } catch (error) {
  133. if (clientRef.current) {
  134. console.error("Response iteration error:", error);
  135. }
  136. }
  137. };
  138. const handleResponse = async (response: RTResponse) => {
  139. for await (const item of response) {
  140. if (item.type === "message" && item.role === "assistant") {
  141. const botMessage = createMessage({
  142. role: item.role,
  143. content: "",
  144. });
  145. // add bot message first
  146. chatStore.updateTargetSession(session, (session) => {
  147. session.messages = session.messages.concat([botMessage]);
  148. });
  149. for await (const content of item) {
  150. if (content.type === "text") {
  151. for await (const text of content.textChunks()) {
  152. botMessage.content += text;
  153. }
  154. } else if (content.type === "audio") {
  155. const textTask = async () => {
  156. for await (const text of content.transcriptChunks()) {
  157. botMessage.content += text;
  158. }
  159. };
  160. const audioTask = async () => {
  161. audioHandlerRef.current?.startStreamingPlayback();
  162. for await (const audio of content.audioChunks()) {
  163. audioHandlerRef.current?.playChunk(audio);
  164. }
  165. };
  166. await Promise.all([textTask(), audioTask()]);
  167. }
  168. // update message.content
  169. chatStore.updateTargetSession(session, (session) => {
  170. session.messages = session.messages.concat();
  171. });
  172. }
  173. // upload audio get audio_url
  174. const blob = audioHandlerRef.current?.savePlayFile();
  175. uploadImage(blob!).then((audio_url) => {
  176. botMessage.audio_url = audio_url;
  177. // botMessage.date = new Date().toLocaleString();
  178. // update text and audio_url
  179. chatStore.updateTargetSession(session, (session) => {
  180. session.messages = session.messages.concat();
  181. });
  182. });
  183. }
  184. }
  185. };
  186. const handleInputAudio = async (item: RTInputAudioItem) => {
  187. audioHandlerRef.current?.stopStreamingPlayback();
  188. await item.waitForCompletion();
  189. if (item.transcription) {
  190. const userMessage = createMessage({
  191. role: "user",
  192. content: item.transcription,
  193. });
  194. chatStore.updateTargetSession(session, (session) => {
  195. session.messages = session.messages.concat([userMessage]);
  196. });
  197. // save input audio_url, and update session
  198. const { audioStartMillis, audioEndMillis } = item;
  199. // upload audio get audio_url
  200. const blob = audioHandlerRef.current?.saveRecordFile(
  201. audioStartMillis,
  202. audioEndMillis,
  203. );
  204. uploadImage(blob!).then((audio_url) => {
  205. userMessage.audio_url = audio_url;
  206. chatStore.updateTargetSession(session, (session) => {
  207. session.messages = session.messages.concat();
  208. });
  209. });
  210. }
  211. };
  212. const toggleRecording = async () => {
  213. if (!isRecording && clientRef.current) {
  214. try {
  215. if (!audioHandlerRef.current) {
  216. audioHandlerRef.current = new AudioHandler();
  217. await audioHandlerRef.current.initialize();
  218. }
  219. await audioHandlerRef.current.startRecording(async (chunk) => {
  220. await clientRef.current?.sendAudio(chunk);
  221. });
  222. setIsRecording(true);
  223. } catch (error) {
  224. console.error("Failed to start recording:", error);
  225. }
  226. } else if (audioHandlerRef.current) {
  227. try {
  228. audioHandlerRef.current.stopRecording();
  229. if (!useVAD) {
  230. const inputAudio = await clientRef.current?.commitAudio();
  231. await handleInputAudio(inputAudio!);
  232. await clientRef.current?.generateResponse();
  233. }
  234. setIsRecording(false);
  235. } catch (error) {
  236. console.error("Failed to stop recording:", error);
  237. }
  238. }
  239. };
  240. useEffect(
  241. useDebouncedCallback(() => {
  242. const initAudioHandler = async () => {
  243. const handler = new AudioHandler();
  244. await handler.initialize();
  245. audioHandlerRef.current = handler;
  246. await handleConnect();
  247. await toggleRecording();
  248. };
  249. initAudioHandler().catch((error) => {
  250. setStatus(error);
  251. console.error(error);
  252. });
  253. return () => {
  254. if (isRecording) {
  255. toggleRecording();
  256. }
  257. audioHandlerRef.current?.close().catch(console.error);
  258. disconnect();
  259. };
  260. }),
  261. [],
  262. );
  263. const handleClose = async () => {
  264. onClose?.();
  265. if (isRecording) {
  266. await toggleRecording();
  267. }
  268. disconnect().catch(console.error);
  269. };
  270. return (
  271. <div className={styles["realtime-chat"]}>
  272. <div
  273. className={clsx(styles["circle-mic"], {
  274. [styles["pulse"]]: true,
  275. })}
  276. >
  277. <div className={styles["icon-center"]}></div>
  278. </div>
  279. <div className={styles["bottom-icons"]}>
  280. <div>
  281. <IconButton
  282. icon={isRecording ? <VoiceOffIcon /> : <VoiceIcon />}
  283. onClick={toggleRecording}
  284. disabled={!isConnected}
  285. type={isRecording ? "danger" : isConnected ? "primary" : null}
  286. />
  287. </div>
  288. <div className={styles["icon-center"]}>{status}</div>
  289. <div>
  290. <IconButton
  291. icon={<PowerIcon />}
  292. onClick={handleClose}
  293. type={isConnecting || isConnected ? "danger" : "primary"}
  294. />
  295. </div>
  296. </div>
  297. </div>
  298. );
  299. }