Forráskód Böngészése

fix typescript error

lloydzhou 1 éve
szülő
commit
46cb48023e
5 módosított fájl, 14 hozzáadás és 5 törlés
  1. 2 1
      app/client/api.ts
  2. 5 2
      app/client/platforms/openai.ts
  3. 3 2
      app/components/chat.tsx
  4. 2 0
      app/store/config.ts
  5. 2 0
      app/typing.ts

+ 2 - 1
app/client/api.ts

@@ -6,7 +6,7 @@ import {
   ServiceProvider,
 } from "../constant";
 import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
-import { ChatGPTApi } from "./platforms/openai";
+import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
 import { GeminiProApi } from "./platforms/google";
 import { ClaudeApi } from "./platforms/anthropic";
 import { ErnieApi } from "./platforms/baidu";
@@ -42,6 +42,7 @@ export interface LLMConfig {
   stream?: boolean;
   presence_penalty?: number;
   frequency_penalty?: number;
+  size?: DalleRequestPayload["size"];
 }
 
 export interface ChatOptions {

+ 5 - 2
app/client/platforms/openai.ts

@@ -13,6 +13,7 @@ import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
 import { collectModelsWithDefaultModel } from "@/app/utils/model";
 import { preProcessImageContent } from "@/app/utils/chat";
 import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
+import { DalleSize } from "@/app/typing";
 
 import {
   ChatOptions,
@@ -63,7 +64,7 @@ export interface DalleRequestPayload {
   model: string;
   prompt: string;
   n: number;
-  size: "1024x1024" | "1792x1024" | "1024x1792";
+  size: DalleSize;
 }
 
 export class ChatGPTApi implements LLMApi {
@@ -141,7 +142,9 @@ export class ChatGPTApi implements LLMApi {
 
     const isDalle3 = _isDalle3(options.config.model);
     if (isDalle3) {
-      const prompt = getMessageTextContent(options.messages.slice(-1)?.pop());
+      const prompt = getMessageTextContent(
+        options.messages.slice(-1)?.pop() as any,
+      );
       requestPayload = {
         model: options.config.model,
         prompt,

+ 3 - 2
app/components/chat.tsx

@@ -69,6 +69,7 @@ import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
 import dynamic from "next/dynamic";
 
 import { ChatControllerPool } from "../client/controller";
+import { DalleSize } from "../typing";
 import { Prompt, usePromptStore } from "../store/prompt";
 import Locale from "../locales";
 
@@ -484,9 +485,9 @@ export function ChatActions(props: {
   const [showUploadImage, setShowUploadImage] = useState(false);
 
   const [showSizeSelector, setShowSizeSelector] = useState(false);
-  const dalle3Sizes = ["1024x1024", "1792x1024", "1024x1792"];
+  const dalle3Sizes: DalleSize[] = ["1024x1024", "1792x1024", "1024x1792"];
   const currentSize =
-    chatStore.currentSession().mask.modelConfig?.size || "1024x1024";
+    chatStore.currentSession().mask.modelConfig?.size ?? "1024x1024";
 
   useEffect(() => {
     const show = isVisionModel(currentModel);

+ 2 - 0
app/store/config.ts

@@ -1,4 +1,5 @@
 import { LLMModel } from "../client/api";
+import { DalleSize } from "../typing";
 import { getClientConfig } from "../config/client";
 import {
   DEFAULT_INPUT_TEMPLATE,
@@ -60,6 +61,7 @@ export const DEFAULT_CONFIG = {
     compressMessageLengthThreshold: 1000,
     enableInjectSystemPrompts: true,
     template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
+    size: "1024x1024" as DalleSize,
   },
 };
 

+ 2 - 0
app/typing.ts

@@ -7,3 +7,5 @@ export interface RequestMessage {
   role: MessageRole;
   content: string;
 }
+
+export type DalleSize = "1024x1024" | "1792x1024" | "1024x1792";