bigmodel.ts 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249
  1. "use client";
  2. import { REQUEST_TIMEOUT_MS } from "@/app/constant";
  3. import { useChatStore } from "@/app/store";
  4. import {
  5. ChatOptions,
  6. LLMApi,
  7. LLMModel,
  8. } from "../api";
  9. import Locale from "../../locales";
  10. import {
  11. EventStreamContentType,
  12. fetchEventSource,
  13. } from "@fortaine/fetch-event-source";
  14. import { prettyObject } from "@/app/utils/format";
  15. import { getMessageTextContent } from "@/app/utils";
  16. import { bigModelApiKey, knowledgeId, template } from "../config";
  17. export class BigModelApi implements LLMApi {
  18. public useApi: 'public' | 'private';
  19. public publicPath: string;
  20. public privatePath: string;
  21. constructor() {
  22. this.useApi = 'private';
  23. this.publicPath = 'https://open.bigmodel.cn/api/paas/v4/chat/completions';
  24. // 配置私有请求地址
  25. // this.privatePath = 'https://open.bigmodel.cn/api/llm-application/open/model-api/1828613766624038913/sse-invoke'; // 建科校招助手
  26. this.privatePath = 'https://open.bigmodel.cn/api/llm-application/open/model-api/1830829847853891585/sse-invoke'; // 欢迎使用建科校招助手2👏
  27. }
  28. async chat(options: ChatOptions) {
  29. const messages = options.messages.map((item) => {
  30. return {
  31. role: item.role,
  32. content: getMessageTextContent(item),
  33. }
  34. });
  35. const userMessages = messages.filter(item => item.content);
  36. if (userMessages.length % 2 === 0) {
  37. userMessages.unshift({
  38. role: "user",
  39. content: "⠀",
  40. });
  41. }
  42. // 开放大模型参数
  43. const publicParams: any = {
  44. messages: userMessages,
  45. stream: true,// 流式回复
  46. model: 'glm-4-0520',// 模型
  47. temperature: 0.01,// 采样温度
  48. top_p: 0.7,// 核取样
  49. // 进阶配置
  50. tools: [
  51. {
  52. type: 'retrieval', // 工具类型为检索
  53. retrieval: {
  54. // 知识库ID
  55. knowledge_id: knowledgeId,
  56. // 知识库模板
  57. prompt_template: template.content,
  58. },
  59. },
  60. ],
  61. };
  62. // 私有大模型参数
  63. const privateParams: any = {
  64. prompt: userMessages,
  65. // model: 'glm-4-0520',// 模型
  66. // temperature: 0.01,// 采样温度
  67. // top_p: 0.7,// 核取样
  68. // 进阶配置
  69. request_id: 'jkec2024',
  70. returnType: undefined,
  71. knowledge_ids: undefined,
  72. document_ids: undefined,
  73. };
  74. const controller = new AbortController();
  75. options.onController?.(controller);
  76. try {
  77. const chatPath = this.useApi === 'public' ? this.publicPath : this.privatePath;
  78. const chatPayload = {
  79. method: "POST",
  80. body: JSON.stringify(this.useApi === 'public' ? publicParams : privateParams),
  81. signal: controller.signal,
  82. headers: {
  83. 'Content-Type': 'application/json',
  84. // APIKey
  85. Authorization: bigModelApiKey
  86. },
  87. };
  88. const requestTimeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
  89. let responseText = "";
  90. let remainText = "";
  91. let finished = false;
  92. function animateResponseText() {
  93. if (finished || controller.signal.aborted) {
  94. responseText += remainText;
  95. if (responseText?.length === 0) {
  96. // options.onError?.(new Error("empty response from server"));
  97. options.onError?.(new Error("请求已中止,请检查网络环境。"));
  98. }
  99. return;
  100. }
  101. if (remainText.length > 0) {
  102. const fetchCount = Math.max(1, Math.round(remainText.length / 60));
  103. const fetchText = remainText.slice(0, fetchCount);
  104. responseText += fetchText;
  105. remainText = remainText.slice(fetchCount);
  106. options.onUpdate?.(responseText, fetchText);
  107. }
  108. requestAnimationFrame(animateResponseText);
  109. }
  110. animateResponseText();
  111. const finish = () => {
  112. if (!finished) {
  113. finished = true;
  114. options.onFinish(responseText + remainText);
  115. }
  116. };
  117. controller.signal.onabort = finish;
  118. // 记录上次的 remainText
  119. let previousRemainText = "";
  120. fetchEventSource(chatPath, {
  121. ...chatPayload,
  122. async onopen(res) {
  123. clearTimeout(requestTimeoutId);
  124. const contentType = res.headers.get("content-type");
  125. if (contentType?.startsWith("text/plain")) {
  126. responseText = await res.clone().text();
  127. return finish();
  128. }
  129. if (
  130. !res.ok ||
  131. !res.headers.get("content-type")?.startsWith(EventStreamContentType) ||
  132. res.status !== 200
  133. ) {
  134. const responseTexts = [responseText];
  135. let extraInfo = await res.clone().text();
  136. try {
  137. const resJson = await res.clone().json();
  138. extraInfo = prettyObject(resJson);
  139. } catch { }
  140. if (res.status === 401) {
  141. responseTexts.push(Locale.Error.Unauthorized);
  142. }
  143. if (extraInfo) {
  144. responseTexts.push(extraInfo);
  145. }
  146. responseText = responseTexts.join("\n\n");
  147. return finish();
  148. }
  149. },
  150. onmessage: (msg) => {
  151. const handlePublicMessage = () => {
  152. if (msg.data === "[DONE]" || finished) {
  153. return finish();
  154. }
  155. const text = msg.data;
  156. try {
  157. const json = JSON.parse(text);
  158. const choices = json.choices as Array<{ delta: { content: string } }>;
  159. const delta = choices[0]?.delta?.content;
  160. if (delta) {
  161. remainText += delta;
  162. }
  163. } catch (e) {
  164. console.error("[Request] parse error", text, msg);
  165. }
  166. };
  167. const handlePrivateMessage = () => {
  168. if (msg.event === 'finish') {
  169. return finish();
  170. }
  171. // 获取当前的数据
  172. const currentData = msg.data;
  173. // 计算新增的字符
  174. const newChars = currentData.substring(previousRemainText.length);
  175. remainText += newChars;
  176. // 更新 previousRemainText
  177. previousRemainText = currentData;
  178. };
  179. if (this.useApi === 'public') {
  180. handlePublicMessage();
  181. } else {
  182. handlePrivateMessage();
  183. }
  184. },
  185. async onclose() {
  186. finish();
  187. const session = useChatStore.getState().sessions[0];
  188. const data = {
  189. id: session.id,
  190. messages: session.messages.map(item => ({
  191. id: item.id,
  192. date: item.date,
  193. role: item.role,
  194. content: item.content,
  195. })),
  196. };
  197. await fetch('/api/bigModel', {
  198. method: 'POST',
  199. body: JSON.stringify(data),
  200. });
  201. },
  202. onerror(e) {
  203. options.onError?.(e);
  204. throw e;
  205. },
  206. openWhenHidden: true,
  207. });
  208. } catch (e) {
  209. options.onError?.(e as Error);
  210. }
  211. }
  212. async usage() {
  213. return {
  214. used: 0,
  215. total: 0,
  216. };
  217. }
  218. async models(): Promise<LLMModel[]> {
  219. return [];
  220. }
  221. }