bigModel.ts 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215
  1. "use client";
  2. import { REQUEST_TIMEOUT_MS } from "@/app/constant";
  3. import { useChatStore } from "@/app/store";
  4. import {
  5. ChatOptions,
  6. LLMApi,
  7. LLMModel,
  8. } from "../api";
  9. import Locale from "../../locales";
  10. import {
  11. EventStreamContentType,
  12. fetchEventSource,
  13. } from "@fortaine/fetch-event-source";
  14. import { prettyObject } from "@/app/utils/format";
  15. import { getMessageTextContent } from "@/app/utils";
  16. import api from "@/app/api/api";
  17. export class BigModelApi implements LLMApi {
  18. public baseURL: string;
  19. public apiPath: string;
  20. constructor() {
  21. const status = useChatStore.getState().deepSeekStatus;
  22. this.baseURL = '/bigmodel-api';
  23. if(status === 'LOCAL'){
  24. this.apiPath = this.baseURL + '/bigmodel/api/model-api/sse-invoke';
  25. }else{
  26. this.apiPath = this.baseURL + '/takai/api/chat';
  27. }
  28. // 本地流式调用
  29. // this.apiPath = ' http://xia0miduo.gicp.net:8091' + '/bigmodel/api/model-api/sse-invoke';
  30. }
  31. async chat(options: ChatOptions) {
  32. const messages = options.messages.map((item) => {
  33. return {
  34. role: item.role,
  35. content: getMessageTextContent(item),
  36. }
  37. });
  38. const userMessages = messages.filter(item => item.content);
  39. if (userMessages.length % 2 === 0) {
  40. userMessages.unshift({
  41. role: "user",
  42. content: "⠀",
  43. });
  44. }
  45. // 参数
  46. const params = {
  47. appId: options.config.appId,// 应用id
  48. prompt: userMessages,
  49. // 进阶配置
  50. request_id: 'jkec2024-knowledge-base',
  51. returnType: undefined,
  52. knowledge_ids: undefined,
  53. document_ids: undefined,
  54. };
  55. const controller = new AbortController();
  56. options.onController?.(controller);
  57. try {
  58. const chatPath = this.apiPath;
  59. const chatPayload = {
  60. method: "POST",
  61. body: JSON.stringify(params),
  62. signal: controller.signal,
  63. headers: {
  64. 'Content-Type': 'application/json',
  65. },
  66. };
  67. const requestTimeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
  68. let responseText = "";
  69. let remainText = "";
  70. let finished = false;
  71. function animateResponseText() {
  72. if (finished || controller.signal.aborted) {
  73. responseText += remainText;
  74. if (responseText?.length === 0) {
  75. options.onError?.(new Error("请求已中止,请检查网络环境。"));
  76. }
  77. return;
  78. }
  79. if (remainText.length > 0) {
  80. const fetchCount = Math.max(1, Math.round(remainText.length / 60));
  81. const fetchText = remainText.slice(0, fetchCount);
  82. responseText += fetchText;
  83. remainText = remainText.slice(fetchCount);
  84. options.onUpdate?.(responseText, fetchText);
  85. }
  86. requestAnimationFrame(animateResponseText);
  87. }
  88. animateResponseText();
  89. const finish = () => {
  90. if (!finished) {
  91. finished = true;
  92. options.onFinish(responseText + remainText);
  93. }
  94. };
  95. controller.signal.onabort = finish;
  96. fetchEventSource(chatPath, {
  97. ...chatPayload,
  98. async onopen(res: any) {
  99. clearTimeout(requestTimeoutId);
  100. const contentType = res.headers.get("content-type");
  101. if (contentType?.startsWith("text/plain")) {
  102. responseText = await res.clone().text();
  103. return finish();
  104. }
  105. if (
  106. !res.ok ||
  107. !res.headers.get("content-type")?.startsWith(EventStreamContentType) ||
  108. res.status !== 200
  109. ) {
  110. const responseTexts = [responseText];
  111. let extraInfo = await res.clone().text();
  112. try {
  113. const resJson = await res.clone().json();
  114. extraInfo = prettyObject(resJson);
  115. } catch { }
  116. if (res.status === 401) {
  117. responseTexts.push(Locale.Error.Unauthorized);
  118. }
  119. if (extraInfo) {
  120. responseTexts.push(extraInfo);
  121. }
  122. responseText = responseTexts.join("\n\n");
  123. return finish();
  124. }
  125. },
  126. onmessage: (msg) => {
  127. const info = JSON.parse(msg.data);
  128. if (info.event === 'finish') {
  129. return finish();
  130. }
  131. // 获取当前的数据
  132. const currentData = info.data;
  133. remainText += currentData;
  134. },
  135. async onclose() {
  136. finish();
  137. const session = useChatStore.getState().sessions[0];
  138. const item = session.messages.find(item => item.role === 'user');
  139. const dialogName = item ? item.content : '新的聊天';
  140. const data = {
  141. id: session.id,
  142. appId: session.appId,
  143. userId: undefined,
  144. dialogName: dialogName,
  145. messages: session.messages.map(item => ({
  146. id: item.id,
  147. date: item.date,
  148. role: item.role,
  149. content: item.content,
  150. })),
  151. };
  152. const messages = session.messages.slice();
  153. const backList = messages.reverse();
  154. const record = backList.find(item => item.content && item.role === 'assistant');
  155. if (record) {
  156. useChatStore.setState({
  157. message: {
  158. content: record.content as string,
  159. role: record.role,
  160. }
  161. });
  162. }
  163. const status = useChatStore.getState().deepSeekStatus;
  164. if(status === 'LOCAL'){
  165. await api.post('bigmodel/api/dialog/save', data);
  166. }else{
  167. await api.post('takai/api/dialog/save', data);
  168. }
  169. },
  170. onerror(e) {
  171. options.onError?.(e);
  172. throw e;
  173. },
  174. openWhenHidden: true,
  175. });
  176. } catch (e) {
  177. options.onError?.(e as Error);
  178. }
  179. }
  180. async usage() {
  181. return {
  182. used: 0,
  183. total: 0,
  184. };
  185. }
  186. async models(): Promise<LLMModel[]> {
  187. return [];
  188. }
  189. }