|
@@ -22,7 +22,7 @@ import {
|
|
|
preProcessImageContent,
|
|
preProcessImageContent,
|
|
|
uploadImage,
|
|
uploadImage,
|
|
|
base64Image2Blob,
|
|
base64Image2Blob,
|
|
|
- stream,
|
|
|
|
|
|
|
+ streamWithThink,
|
|
|
} from "@/app/utils/chat";
|
|
} from "@/app/utils/chat";
|
|
|
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
|
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
|
|
import { ModelSize, DalleQuality, DalleStyle } from "@/app/typing";
|
|
import { ModelSize, DalleQuality, DalleStyle } from "@/app/typing";
|
|
@@ -294,7 +294,7 @@ export class ChatGPTApi implements LLMApi {
|
|
|
useChatStore.getState().currentSession().mask?.plugin || [],
|
|
useChatStore.getState().currentSession().mask?.plugin || [],
|
|
|
);
|
|
);
|
|
|
// console.log("getAsTools", tools, funcs);
|
|
// console.log("getAsTools", tools, funcs);
|
|
|
- stream(
|
|
|
|
|
|
|
+ streamWithThink(
|
|
|
chatPath,
|
|
chatPath,
|
|
|
requestPayload,
|
|
requestPayload,
|
|
|
getHeaders(),
|
|
getHeaders(),
|
|
@@ -309,8 +309,12 @@ export class ChatGPTApi implements LLMApi {
|
|
|
delta: {
|
|
delta: {
|
|
|
content: string;
|
|
content: string;
|
|
|
tool_calls: ChatMessageTool[];
|
|
tool_calls: ChatMessageTool[];
|
|
|
|
|
+ reasoning_content: string | null;
|
|
|
};
|
|
};
|
|
|
}>;
|
|
}>;
|
|
|
|
|
+
|
|
|
|
|
+ if (!choices?.length) return { isThinking: false, content: "" };
|
|
|
|
|
+
|
|
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
|
|
if (tool_calls?.length > 0) {
|
|
if (tool_calls?.length > 0) {
|
|
|
const id = tool_calls[0]?.id;
|
|
const id = tool_calls[0]?.id;
|
|
@@ -330,7 +334,37 @@ export class ChatGPTApi implements LLMApi {
|
|
|
runTools[index]["function"]["arguments"] += args;
|
|
runTools[index]["function"]["arguments"] += args;
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
- return choices[0]?.delta?.content;
|
|
|
|
|
|
|
+
|
|
|
|
|
+ const reasoning = choices[0]?.delta?.reasoning_content;
|
|
|
|
|
+ const content = choices[0]?.delta?.content;
|
|
|
|
|
+
|
|
|
|
|
+ // Skip if both content and reasoning_content are empty or null
|
|
|
|
|
+ if (
|
|
|
|
|
+ (!reasoning || reasoning.trim().length === 0) &&
|
|
|
|
|
+ (!content || content.trim().length === 0)
|
|
|
|
|
+ ) {
|
|
|
|
|
+ return {
|
|
|
|
|
+ isThinking: false,
|
|
|
|
|
+ content: "",
|
|
|
|
|
+ };
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ if (reasoning && reasoning.trim().length > 0) {
|
|
|
|
|
+ return {
|
|
|
|
|
+ isThinking: true,
|
|
|
|
|
+ content: reasoning,
|
|
|
|
|
+ };
|
|
|
|
|
+ } else if (content && content.trim().length > 0) {
|
|
|
|
|
+ return {
|
|
|
|
|
+ isThinking: false,
|
|
|
|
|
+ content: content,
|
|
|
|
|
+ };
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ return {
|
|
|
|
|
+ isThinking: false,
|
|
|
|
|
+ content: "",
|
|
|
|
|
+ };
|
|
|
},
|
|
},
|
|
|
// processToolMessage, include tool_calls message and tool call results
|
|
// processToolMessage, include tool_calls message and tool call results
|
|
|
(
|
|
(
|