Pārlūkot izejas kodu

support deepseek-r1@OpenAI's reasoning_content, parse <think></think> from stream

suruiqiang 9 mēneši atpakaļ
vecāks
revīzija
9714258322
2 mainītis faili ar 55 papildinājumiem un 3 dzēšanām
  1. 37 3
      app/client/platforms/openai.ts
  2. 18 0
      app/utils/chat.ts

+ 37 - 3
app/client/platforms/openai.ts

@@ -22,7 +22,7 @@ import {
   preProcessImageContent,
   uploadImage,
   base64Image2Blob,
-  stream,
+  streamWithThink,
 } from "@/app/utils/chat";
 import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
 import { ModelSize, DalleQuality, DalleStyle } from "@/app/typing";
@@ -294,7 +294,7 @@ export class ChatGPTApi implements LLMApi {
             useChatStore.getState().currentSession().mask?.plugin || [],
           );
         // console.log("getAsTools", tools, funcs);
-        stream(
+        streamWithThink(
           chatPath,
           requestPayload,
           getHeaders(),
@@ -309,8 +309,12 @@ export class ChatGPTApi implements LLMApi {
               delta: {
                 content: string;
                 tool_calls: ChatMessageTool[];
+                reasoning_content: string | null;
               };
             }>;
+
+            if (!choices?.length) return { isThinking: false, content: "" };
+
             const tool_calls = choices[0]?.delta?.tool_calls;
             if (tool_calls?.length > 0) {
               const id = tool_calls[0]?.id;
@@ -330,7 +334,37 @@ export class ChatGPTApi implements LLMApi {
                 runTools[index]["function"]["arguments"] += args;
               }
             }
-            return choices[0]?.delta?.content;
+
+            const reasoning = choices[0]?.delta?.reasoning_content;
+            const content = choices[0]?.delta?.content;
+
+            // Skip if both content and reasoning_content are empty or null
+            if (
+              (!reasoning || reasoning.trim().length === 0) &&
+              (!content || content.trim().length === 0)
+            ) {
+              return {
+                isThinking: false,
+                content: "",
+              };
+            }
+
+            if (reasoning && reasoning.trim().length > 0) {
+              return {
+                isThinking: true,
+                content: reasoning,
+              };
+            } else if (content && content.trim().length > 0) {
+              return {
+                isThinking: false,
+                content: content,
+              };
+            }
+
+            return {
+              isThinking: false,
+              content: "",
+            };
           },
           // processToolMessage, include tool_calls message and tool call results
           (

+ 18 - 0
app/utils/chat.ts

@@ -400,6 +400,7 @@ export function streamWithThink(
   let responseRes: Response;
   let isInThinkingMode = false;
   let lastIsThinking = false;
+  let lastIsThinkingTagged = false; //between <think> and </think> tags
 
   // animate response to make it looks smooth
   function animateResponseText() {
@@ -579,6 +580,23 @@ export function streamWithThink(
           if (!chunk?.content || chunk.content.length === 0) {
             return;
           }
+
+          // deal with <think> and </think> tags start
+          if (!chunk.isThinking) {
+            if (chunk.content.startsWith("<think>")) {
+              chunk.isThinking = true;
+              chunk.content = chunk.content.slice(7).trim();
+              lastIsThinkingTagged = true;
+            } else if (chunk.content.endsWith("</think>")) {
+              chunk.isThinking = false;
+              chunk.content = chunk.content.slice(0, -8).trim();
+              lastIsThinkingTagged = false;
+            } else if (lastIsThinkingTagged) {
+              chunk.isThinking = true;
+            }
+          }
+          // deal with <think> and </think> tags start
+
           // Check if thinking mode changed
           const isThinkingChanged = lastIsThinking !== chunk.isThinking;
           lastIsThinking = chunk.isThinking;