bigmodel.ts 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. "use client";
  2. import { REQUEST_TIMEOUT_MS } from "@/app/constant";
  3. import { useChatStore } from "@/app/store";
  4. import {
  5. ChatOptions,
  6. LLMApi,
  7. LLMModel,
  8. } from "../api";
  9. import Locale from "../../locales";
  10. import {
  11. EventStreamContentType,
  12. fetchEventSource,
  13. } from "@fortaine/fetch-event-source";
  14. import { prettyObject } from "@/app/utils/format";
  15. import { getMessageTextContent } from "@/app/utils";
  16. import api from "@/app/api/api";
  17. export class BigModelApi implements LLMApi {
  18. public baseURL: string;
  19. public apiPath: string;
  20. public deepSeekApiPath: string;
  21. public apiType: 'bigModel' | 'deepSeek';
  22. constructor() {
  23. this.baseURL = '/bigmodel-api';
  24. this.apiPath = this.baseURL + '/bigmodel/api/model-api/sse-invoke';
  25. this.deepSeekApiPath = 'http://192.168.3.209:8000/chat';
  26. this.apiType = 'bigModel';
  27. }
  28. async chat(options: ChatOptions) {
  29. const messages = options.messages.map((item) => {
  30. return {
  31. role: item.role,
  32. content: getMessageTextContent(item),
  33. }
  34. });
  35. const userMessages = messages.filter(item => item.content);
  36. if (userMessages.length % 2 === 0) {
  37. userMessages.unshift({
  38. role: "user",
  39. content: "⠀",
  40. });
  41. }
  42. // 大模型参数
  43. let params: any = {};
  44. if (this.apiType === 'bigModel') {
  45. params = {
  46. appId: options.config.appId,// 应用id
  47. prompt: userMessages,
  48. // 进阶配置
  49. request_id: 'jkec2024-knowledge-base',
  50. returnType: undefined,
  51. knowledge_ids: undefined,
  52. document_ids: undefined,
  53. };
  54. } else {
  55. params = {
  56. model: 'deepseek-r1:8b',
  57. messages: userMessages,
  58. stream: true,
  59. };
  60. }
  61. const controller = new AbortController();
  62. options.onController?.(controller);
  63. try {
  64. const chatPath = this.apiType === 'bigModel' ? this.apiPath : this.deepSeekApiPath;
  65. const chatPayload = {
  66. method: "POST",
  67. body: JSON.stringify(params),
  68. signal: controller.signal,
  69. headers: {
  70. 'Content-Type': 'application/json',
  71. },
  72. };
  73. const requestTimeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
  74. let responseText = "";
  75. let remainText = "";
  76. let finished = false;
  77. function animateResponseText() {
  78. if (finished || controller.signal.aborted) {
  79. responseText += remainText;
  80. if (responseText?.length === 0) {
  81. options.onError?.(new Error("请求已中止,请检查网络环境。"));
  82. }
  83. return;
  84. }
  85. if (remainText.length > 0) {
  86. const fetchCount = Math.max(1, Math.round(remainText.length / 60));
  87. const fetchText = remainText.slice(0, fetchCount);
  88. responseText += fetchText;
  89. remainText = remainText.slice(fetchCount);
  90. options.onUpdate?.(responseText, fetchText);
  91. }
  92. requestAnimationFrame(animateResponseText);
  93. }
  94. animateResponseText();
  95. const finish = () => {
  96. if (!finished) {
  97. finished = true;
  98. options.onFinish(responseText + remainText);
  99. }
  100. };
  101. controller.signal.onabort = finish;
  102. fetchEventSource(chatPath, {
  103. ...chatPayload,
  104. async onopen(res: any) {
  105. clearTimeout(requestTimeoutId);
  106. const contentType = res.headers.get("content-type");
  107. if (contentType?.startsWith("text/plain")) {
  108. responseText = await res.clone().text();
  109. return finish();
  110. }
  111. if (
  112. !res.ok ||
  113. !res.headers.get("content-type")?.startsWith(EventStreamContentType) ||
  114. res.status !== 200
  115. ) {
  116. const responseTexts = [responseText];
  117. let extraInfo = await res.clone().text();
  118. try {
  119. const resJson = await res.clone().json();
  120. extraInfo = prettyObject(resJson);
  121. } catch { }
  122. if (res.status === 401) {
  123. responseTexts.push(Locale.Error.Unauthorized);
  124. }
  125. if (extraInfo) {
  126. responseTexts.push(extraInfo);
  127. }
  128. responseText = responseTexts.join("\n\n");
  129. return finish();
  130. }
  131. },
  132. onmessage: (msg) => {
  133. const info = JSON.parse(msg.data);
  134. if (info.event === 'finish') {
  135. return finish();
  136. }
  137. // 获取当前的数据
  138. const currentData = info.data;
  139. remainText += currentData;
  140. },
  141. async onclose() {
  142. finish();
  143. const session = useChatStore.getState().sessions[0];
  144. const data = {
  145. id: session.id,
  146. appId: session.appId,
  147. userId: undefined,
  148. dialogName: session.topic,
  149. messages: session.messages.map(item => ({
  150. id: item.id,
  151. date: item.date,
  152. role: item.role,
  153. content: item.content,
  154. })),
  155. };
  156. await api.post('bigmodel/api/dialog/save', data);
  157. },
  158. onerror(e) {
  159. options.onError?.(e);
  160. throw e;
  161. },
  162. openWhenHidden: true,
  163. });
  164. } catch (e) {
  165. options.onError?.(e as Error);
  166. }
  167. }
  168. async usage() {
  169. return {
  170. used: 0,
  171. total: 0,
  172. };
  173. }
  174. async models(): Promise<LLMModel[]> {
  175. return [];
  176. }
  177. }