Kaynağa Gözat

fix: add max_tokens when using vision model (#4157)

fred-bf 1 yıl önce
ebeveyn
işleme
08fa22749a
1 değiştirilmiş dosya ile 10 ekleme ve 0 silme
  1. 10 0
      app/client/platforms/openai.ts

+ 10 - 0
app/client/platforms/openai.ts

@@ -110,6 +110,16 @@ export class ChatGPTApi implements LLMApi {
       // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
     };
 
+    // add max_tokens to vision model
+    if (visionModel) {
+      Object.defineProperty(requestPayload, "max_tokens", {
+        enumerable: true,
+        configurable: true,
+        writable: true,
+        value: Math.max(modelConfig.max_tokens, 4096),
+      });
+    }
+
     console.log("[Request] openai payload: ", requestPayload);
 
     const shouldStream = !!options.config.stream;