requests.ts 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263
  1. import type { ChatRequest, ChatResponse } from "./api/openai/typing";
  2. import {
  3. Message,
  4. ModelConfig,
  5. ModelType,
  6. useAccessStore,
  7. useAppConfig,
  8. useChatStore,
  9. } from "./store";
  10. import { showToast } from "./components/ui-lib";
  11. import { ACCESS_CODE_PREFIX } from "./constant";
  12. const TIME_OUT_MS = 60000;
  13. const makeRequestParam = (
  14. messages: Message[],
  15. options?: {
  16. stream?: boolean;
  17. overrideModel?: ModelType;
  18. },
  19. ): ChatRequest => {
  20. let sendMessages = messages.map((v) => ({
  21. role: v.role,
  22. content: v.content,
  23. }));
  24. const modelConfig = {
  25. ...useAppConfig.getState().modelConfig,
  26. ...useChatStore.getState().currentSession().mask.modelConfig,
  27. };
  28. // override model config
  29. if (options?.overrideModel) {
  30. modelConfig.model = options.overrideModel;
  31. }
  32. return {
  33. messages: sendMessages,
  34. stream: options?.stream,
  35. model: modelConfig.model,
  36. temperature: modelConfig.temperature,
  37. presence_penalty: modelConfig.presence_penalty,
  38. };
  39. };
  40. export function requestOpenaiClient(path: string) {
  41. const openaiUrl = useAccessStore.getState().openaiUrl;
  42. return (body: any, method = "POST") =>
  43. fetch(openaiUrl + path, {
  44. method,
  45. body: body && JSON.stringify(body),
  46. headers: getHeaders(),
  47. });
  48. }
  49. export async function requestChat(
  50. messages: Message[],
  51. options?: {
  52. model?: ModelType;
  53. },
  54. ) {
  55. const req: ChatRequest = makeRequestParam(messages, {
  56. overrideModel: options?.model,
  57. });
  58. const res = await requestOpenaiClient("v1/chat/completions")(req);
  59. try {
  60. const response = (await res.json()) as ChatResponse;
  61. return response;
  62. } catch (error) {
  63. console.error("[Request Chat] ", error, res.body);
  64. }
  65. }
  66. export async function requestUsage() {
  67. const formatDate = (d: Date) =>
  68. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  69. .getDate()
  70. .toString()
  71. .padStart(2, "0")}`;
  72. const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
  73. const now = new Date();
  74. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  75. const startDate = formatDate(startOfMonth);
  76. const endDate = formatDate(new Date(Date.now() + ONE_DAY));
  77. const [used, subs] = await Promise.all([
  78. requestOpenaiClient(
  79. `dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
  80. )(null, "GET"),
  81. requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
  82. ]);
  83. const response = (await used.json()) as {
  84. total_usage?: number;
  85. error?: {
  86. type: string;
  87. message: string;
  88. };
  89. };
  90. const total = (await subs.json()) as {
  91. hard_limit_usd?: number;
  92. };
  93. if (response.error && response.error.type) {
  94. showToast(response.error.message);
  95. return;
  96. }
  97. if (response.total_usage) {
  98. response.total_usage = Math.round(response.total_usage) / 100;
  99. }
  100. if (total.hard_limit_usd) {
  101. total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
  102. }
  103. return {
  104. used: response.total_usage,
  105. subscription: total.hard_limit_usd,
  106. };
  107. }
  108. export async function requestChatStream(
  109. messages: Message[],
  110. options?: {
  111. modelConfig?: ModelConfig;
  112. overrideModel?: ModelType;
  113. onMessage: (message: string, done: boolean) => void;
  114. onError: (error: Error, statusCode?: number) => void;
  115. onController?: (controller: AbortController) => void;
  116. },
  117. ) {
  118. const req = makeRequestParam(messages, {
  119. stream: true,
  120. overrideModel: options?.overrideModel,
  121. });
  122. console.log("[Request] ", req);
  123. const controller = new AbortController();
  124. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  125. try {
  126. const openaiUrl = useAccessStore.getState().openaiUrl;
  127. const res = await fetch(openaiUrl + "v1/chat/completions", {
  128. method: "POST",
  129. headers: {
  130. "Content-Type": "application/json",
  131. ...getHeaders(),
  132. },
  133. body: JSON.stringify(req),
  134. signal: controller.signal,
  135. });
  136. clearTimeout(reqTimeoutId);
  137. let responseText = "";
  138. const finish = () => {
  139. options?.onMessage(responseText, true);
  140. controller.abort();
  141. };
  142. if (res.ok) {
  143. const reader = res.body?.getReader();
  144. const decoder = new TextDecoder();
  145. options?.onController?.(controller);
  146. while (true) {
  147. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  148. const content = await reader?.read();
  149. clearTimeout(resTimeoutId);
  150. if (!content || !content.value) {
  151. break;
  152. }
  153. const text = decoder.decode(content.value, { stream: true });
  154. responseText += text;
  155. const done = content.done;
  156. options?.onMessage(responseText, false);
  157. if (done) {
  158. break;
  159. }
  160. }
  161. finish();
  162. } else if (res.status === 401) {
  163. console.error("Unauthorized");
  164. options?.onError(new Error("Unauthorized"), res.status);
  165. } else {
  166. console.error("Stream Error", res.body);
  167. options?.onError(new Error("Stream Error"), res.status);
  168. }
  169. } catch (err) {
  170. console.error("NetWork Error", err);
  171. options?.onError(err as Error);
  172. }
  173. }
  174. export async function requestWithPrompt(
  175. messages: Message[],
  176. prompt: string,
  177. options?: {
  178. model?: ModelType;
  179. },
  180. ) {
  181. messages = messages.concat([
  182. {
  183. role: "user",
  184. content: prompt,
  185. date: new Date().toLocaleString(),
  186. },
  187. ]);
  188. const res = await requestChat(messages, options);
  189. return res?.choices?.at(0)?.message?.content ?? "";
  190. }
  191. // To store message streaming controller
  192. export const ControllerPool = {
  193. controllers: {} as Record<string, AbortController>,
  194. addController(
  195. sessionIndex: number,
  196. messageId: number,
  197. controller: AbortController,
  198. ) {
  199. const key = this.key(sessionIndex, messageId);
  200. this.controllers[key] = controller;
  201. return key;
  202. },
  203. stop(sessionIndex: number, messageId: number) {
  204. const key = this.key(sessionIndex, messageId);
  205. const controller = this.controllers[key];
  206. controller?.abort();
  207. },
  208. stopAll() {
  209. Object.values(this.controllers).forEach((v) => v.abort());
  210. },
  211. hasPending() {
  212. return Object.values(this.controllers).length > 0;
  213. },
  214. remove(sessionIndex: number, messageId: number) {
  215. const key = this.key(sessionIndex, messageId);
  216. delete this.controllers[key];
  217. },
  218. key(sessionIndex: number, messageIndex: number) {
  219. return `${sessionIndex},${messageIndex}`;
  220. },
  221. };