瀏覽代碼

fix: ts type

Dogtiti 1 年之前
父節點
當前提交
7237d33be3
共有 4 個文件被更改,包括 11 次插入10 次删除
  1. 1 2
      app/client/platforms/anthropic.ts
  2. 1 1
      app/client/platforms/google.ts
  3. 1 1
      app/client/platforms/openai.ts
  4. 8 6
      app/utils/chat.ts

+ 1 - 2
app/client/platforms/anthropic.ts

@@ -3,7 +3,6 @@ import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api";
 import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
 import { getClientConfig } from "@/app/config/client";
 import { DEFAULT_API_HOST } from "@/app/constant";
-import { RequestMessage } from "@/app/typing";
 import {
   EventStreamContentType,
   fetchEventSource,
@@ -95,7 +94,7 @@ export class ClaudeApi implements LLMApi {
     };
 
     // try get base64image from local cache image_url
-    const messages = [];
+    const messages: ChatOptions["messages"] = [];
     for (const v of options.messages) {
       const content = await preProcessImageContent(v.content);
       messages.push({ role: v.role, content });

+ 1 - 1
app/client/platforms/google.ts

@@ -59,7 +59,7 @@ export class GeminiProApi implements LLMApi {
     let multimodal = false;
 
     // try get base64image from local cache image_url
-    const _messages = [];
+    const _messages: ChatOptions["messages"] = [];
     for (const v of options.messages) {
       const content = await preProcessImageContent(v.content);
       _messages.push({ role: v.role, content });

+ 1 - 1
app/client/platforms/openai.ts

@@ -106,7 +106,7 @@ export class ChatGPTApi implements LLMApi {
 
   async chat(options: ChatOptions) {
     const visionModel = isVisionModel(options.config.model);
-    const messages = [];
+    const messages: ChatOptions["messages"] = [];
     for (const v of options.messages) {
       const content = visionModel
         ? await preProcessImageContent(v.content)

+ 8 - 6
app/utils/chat.ts

@@ -1,7 +1,8 @@
 import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
 // import heic2any from "heic2any";
+import { RequestMessage } from "@/app/client/api";
 
-export function compressImage(file: File, maxSize: number): Promise<string> {
+export function compressImage(file: Blob, maxSize: number): Promise<string> {
   return new Promise((resolve, reject) => {
     const reader = new FileReader();
     reader.onload = (readerEvent: any) => {
@@ -43,10 +44,10 @@ export function compressImage(file: File, maxSize: number): Promise<string> {
     if (file.type.includes("heic")) {
       const heic2any = require("heic2any");
       heic2any({ blob: file, toType: "image/jpeg" })
-        .then((blob) => {
-          reader.readAsDataURL(blob as Blob);
+        .then((blob: Blob) => {
+          reader.readAsDataURL(blob);
         })
-        .catch((e) => {
+        .catch((e: any) => {
           reject(e);
         });
     }
@@ -73,7 +74,7 @@ export async function preProcessImageContent(
   return result;
 }
 
-const imageCaches = {};
+const imageCaches: Record<string, string> = {};
 export function cacheImageToBase64Image(imageUrl: string) {
   if (imageUrl.includes(CACHE_URL_PREFIX)) {
     if (!imageCaches[imageUrl]) {
@@ -85,7 +86,8 @@ export function cacheImageToBase64Image(imageUrl: string) {
       })
         .then((res) => res.blob())
         .then(
-          (blob) => (imageCaches[imageUrl] = compressImage(blob, 256 * 1024)),
+          async (blob) =>
+            (imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)),
         ); // compressImage
     }
     return Promise.resolve(imageCaches[imageUrl]);