ソースを参照

add processToolMessage callback

lloydzhou 1 年間 前
コミット
d2cb984ced
2 ファイル変更27 行追加16 行削除
  1. 18 3
      app/client/platforms/openai.ts
  2. 9 13
      app/utils/chat.ts

+ 18 - 3
app/client/platforms/openai.ts

@@ -240,6 +240,7 @@ export class ChatGPTApi implements LLMApi {
         );
       }
       if (shouldStream) {
+        // TODO mock tools and funcs
         const tools = [
           {
             type: "function",
@@ -278,8 +279,9 @@ export class ChatGPTApi implements LLMApi {
           tools,
           funcs,
           controller,
+          // parseSSE
           (text: string, runTools: ChatMessageTool[]) => {
-            console.log("parseSSE", text, runTools);
+            // console.log("parseSSE", text, runTools);
             const json = JSON.parse(text);
             const choices = json.choices as Array<{
               delta: {
@@ -306,10 +308,23 @@ export class ChatGPTApi implements LLMApi {
                 runTools[index]["function"]["arguments"] += args;
               }
             }
-
-            console.log("runTools", runTools);
             return choices[0]?.delta?.content;
           },
+          // processToolMessage, include tool_calls message and tool call results
+          (
+            requestPayload: RequestPayload,
+            toolCallMessage: any,
+            toolCallResult: any[],
+          ) => {
+            // @ts-ignore
+            requestPayload?.messages?.splice(
+              // @ts-ignore
+              requestPayload?.messages?.length,
+              0,
+              toolCallMessage,
+              ...toolCallResult,
+            );
+          },
           options,
         );
       } else {

+ 9 - 13
app/utils/chat.ts

@@ -161,6 +161,11 @@ export function stream(
   funcs: any,
   controller: AbortController,
   parseSSE: (text: string, runTools: any[]) => string | undefined,
+  processToolMessage: (
+    requestPayload: any,
+    toolCallMessage: any,
+    toolCallResult: any[],
+  ) => void,
   options: any,
 ) {
   let responseText = "";
@@ -196,7 +201,6 @@ export function stream(
 
   const finish = () => {
     if (!finished) {
-      console.log("try run tools", runTools.length, finished, running);
       if (!running && runTools.length > 0) {
         const toolCallMessage = {
           role: "assistant",
@@ -233,28 +237,20 @@ export function stream(
               }));
           }),
         ).then((toolCallResult) => {
-          console.log("end runTools", toolCallMessage, toolCallResult);
-          // @ts-ignore
-          requestPayload?.messages?.splice(
-            // @ts-ignore
-            requestPayload?.messages?.length,
-            0,
-            toolCallMessage,
-            ...toolCallResult,
-          );
+          processToolMessage(requestPayload, toolCallMessage, toolCallResult);
           setTimeout(() => {
             // call again
-            console.log("start again");
+            console.debug("[ChatAPI] restart");
             running = false;
             chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
           }, 60);
         });
-        console.log("try run tools", runTools.length, finished);
         return;
       }
       if (running) {
         return;
       }
+      console.debug("[ChatAPI] end");
       finished = true;
       options.onFinish(responseText + remainText);
     }
@@ -343,7 +339,7 @@ export function stream(
       },
       openWhenHidden: true,
     });
-    console.log("chatApi", chatPath, requestPayload, tools);
   }
+  console.debug("[ChatAPI] start");
   chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
 }