Selaa lähdekoodia

连接知识库

李富豪 1 vuosi sitten
vanhempi
commit
32066811f9
3 muutettua tiedostoa jossa 22 lisäystä ja 18 poistoa
  1. 5 1
      app/client/config.ts
  2. 15 15
      app/client/platforms/bigmodel.ts
  3. 2 2
      app/store/config.ts

+ 5 - 1
app/client/config.ts

@@ -1 +1,5 @@
-export const bigModelApiKey = '20480a1ad76c4d9e0a168206a25f9614.bUjEVNXHpgY0H0GH'
+// 大模型APIKey
+export const bigModelApiKey = '20480a1ad76c4d9e0a168206a25f9614.bUjEVNXHpgY0H0GH';
+
+// 知识库ID
+export const knowledgeId = '1822866230839840768';

+ 15 - 15
app/client/platforms/bigmodel.ts

@@ -1,6 +1,6 @@
 "use client";
 import { REQUEST_TIMEOUT_MS } from "@/app/constant";
-import { useAppConfig, useChatStore } from "@/app/store";
+import { useChatStore } from "@/app/store";
 import {
   ChatOptions,
   LLMApi,
@@ -13,7 +13,7 @@ import {
 } from "@fortaine/fetch-event-source";
 import { prettyObject } from "@/app/utils/format";
 import { getMessageTextContent } from "@/app/utils";
-import { bigModelApiKey } from "../config";
+import { bigModelApiKey, knowledgeId } from "../config";
 
 export class BigModelApi implements LLMApi {
   path(): string {
@@ -33,23 +33,23 @@ export class BigModelApi implements LLMApi {
       });
     }
 
-    const modelConfig = {
-      ...useAppConfig.getState().modelConfig,
-      ...useChatStore.getState().currentSession().mask.modelConfig,
-      ...{
-        model: options.config.model,
-      },
-    };
-
-    const shouldStream = !!options.config.stream;
+    const shouldStream = true;
 
     // 通用大模型参数
     const requestPayload: any = {
       messages,
-      stream: shouldStream,
-      model: 'glm-4-flash',
-      temperature: modelConfig.temperature,
-      top_p: modelConfig.top_p,
+      stream: shouldStream,// 流式回复
+      model: 'glm-4-flash',// 模型
+      temperature: 0.95,// 采样温度
+      top_p: 0.7,// 核取样
+      tools: [
+        {
+          type: 'retrieval', // 工具类型为检索
+          retrieval: {
+            knowledge_id: knowledgeId,// 知识库ID
+          },
+        },
+      ],
     };
 
     const controller = new AbortController();

+ 2 - 2
app/store/config.ts

@@ -52,8 +52,8 @@ export const DEFAULT_CONFIG = {
   modelConfig: {
     model: "gpt-3.5-turbo" as ModelType,
     providerName: "OpenAI" as ServiceProvider,
-    temperature: 0.95,
-    top_p: 0.7,
+    temperature: 0.5,
+    top_p: 1,
     max_tokens: 4000,
     presence_penalty: 0,
     frequency_penalty: 0,