Ver código fonte

Merge remote-tracking branch 'origin/dev-sd' into dev-sd

licoy 1 ano atrás
pai
commit
d61cb98ac7
47 arquivos alterados com 2632 adições e 322 exclusões
  1. 1 1
      .gitignore
  2. 2 2
      Dockerfile
  3. 1 1
      LICENSE
  4. 48 1
      README.md
  5. 55 5
      README_CN.md
  6. 155 0
      app/api/alibaba/[...path]/route.ts
  7. 9 6
      app/api/anthropic/[...path]/route.ts
  8. 12 3
      app/api/auth.ts
  9. 57 0
      app/api/azure/[...path]/route.ts
  10. 169 0
      app/api/baidu/[...path]/route.ts
  11. 153 0
      app/api/bytedance/[...path]/route.ts
  12. 66 27
      app/api/common.ts
  13. 3 1
      app/api/google/[...path]/route.ts
  14. 21 3
      app/api/webdav/[...path]/route.ts
  15. 0 9
      app/azure.ts
  16. 101 23
      app/client/api.ts
  17. 268 0
      app/client/platforms/alibaba.ts
  18. 4 29
      app/client/platforms/anthropic.ts
  19. 273 0
      app/client/platforms/baidu.ts
  20. 255 0
      app/client/platforms/bytedance.ts
  21. 86 64
      app/client/platforms/google.ts
  22. 42 13
      app/client/platforms/openai.ts
  23. 43 12
      app/components/chat.tsx
  24. 3 11
      app/components/exporter.tsx
  25. 5 11
      app/components/home.tsx
  26. 10 9
      app/components/model-config.tsx
  27. 153 0
      app/components/settings.tsx
  28. 0 1
      app/components/sidebar.tsx
  29. 43 2
      app/config/server.ts
  30. 119 4
      app/constant.ts
  31. 14 10
      app/layout.tsx
  32. 38 0
      app/locales/cn.ts
  33. 38 0
      app/locales/en.ts
  34. 12 10
      app/locales/index.ts
  35. 25 0
      app/masks/build.ts
  36. 17 3
      app/masks/index.ts
  37. 34 1
      app/store/access.ts
  38. 9 19
      app/store/chat.ts
  39. 4 2
      app/store/config.ts
  40. 23 0
      app/utils/baidu.ts
  41. 0 21
      app/utils/checkers.ts
  42. 6 1
      app/utils/hooks.ts
  43. 47 9
      app/utils/model.ts
  44. 5 0
      next.config.mjs
  45. 10 6
      package.json
  46. 2 2
      src-tauri/tauri.conf.json
  47. 191 0
      yarn.lock

+ 1 - 1
.gitignore

@@ -43,4 +43,4 @@ dev
 .env
 
 *.key
-*.key.pub
+*.key.pub

+ 2 - 2
Dockerfile

@@ -43,7 +43,7 @@ COPY --from=builder /app/.next/server ./.next/server
 EXPOSE 3000
 
 CMD if [ -n "$PROXY_URL" ]; then \
-    export HOSTNAME="127.0.0.1"; \
+    export HOSTNAME="0.0.0.0"; \
     protocol=$(echo $PROXY_URL | cut -d: -f1); \
     host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \
     port=$(echo $PROXY_URL | cut -d: -f3); \
@@ -58,7 +58,7 @@ CMD if [ -n "$PROXY_URL" ]; then \
     echo "[ProxyList]" >> $conf; \
     echo "$protocol $host $port" >> $conf; \
     cat /etc/proxychains.conf; \
-    proxychains -f $conf node server.js --host 0.0.0.0; \
+    proxychains -f $conf node server.js; \
     else \
     node server.js; \
     fi

+ 1 - 1
LICENSE

@@ -1,6 +1,6 @@
 MIT License
 
-Copyright (c) 2023 Zhang Yifei
+Copyright (c) 2023-2024 Zhang Yifei
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal

+ 48 - 1
README.md

@@ -18,7 +18,7 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4
 
 [网页版](https://app.nextchat.dev/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues)
 
-[web-url]: https://chatgpt.nextweb.fun
+[web-url]: https://app.nextchat.dev/
 [download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
 [Web-image]: https://img.shields.io/badge/Web-PWA-orange?logo=microsoftedge
 [Windows-image]: https://img.shields.io/badge/-Windows-blue?logo=windows
@@ -181,6 +181,7 @@ Specify OpenAI organization ID.
 ### `AZURE_URL` (optional)
 
 > Example: https://{azure-resource-url}/openai/deployments/{deploy-name}
+> if you config deployment name in `CUSTOM_MODELS`, you can remove `{deploy-name}` in `AZURE_URL`
 
 Azure deploy url.
 
@@ -212,6 +213,34 @@ anthropic claude Api version.
 
 anthropic claude Api Url.
 
+### `BAIDU_API_KEY` (optional)
+
+Baidu Api Key.
+
+### `BAIDU_SECRET_KEY` (optional)
+
+Baidu Secret Key.
+
+### `BAIDU_URL` (optional)
+
+Baidu Api Url.
+
+### `BYTEDANCE_API_KEY` (optional)
+
+ByteDance Api Key.
+
+### `BYTEDANCE_URL` (optional)
+
+ByteDance Api Url.
+
+### `ALIBABA_API_KEY` (optional)
+
+Alibaba Cloud Api Key.
+
+### `ALIBABA_URL` (optional)
+
+Alibaba Cloud Api Url.
+
 ### `HIDE_USER_API_KEY` (optional)
 
 > Default: Empty
@@ -245,6 +274,16 @@ To control custom models, use `+` to add a custom model, use `-` to hide a model
 
 User `-all` to disable all default models, `+all` to enable all default models.
 
+For Azure: use `modelName@azure=deploymentName` to customize model name and deployment name.
+> Example: `+gpt-3.5-turbo@azure=gpt35` will show option `gpt35(Azure)` in model list.
+
+For ByteDance: use `modelName@bytedance=deploymentName` to customize model name and deployment name.
+> Example: `+Doubao-lite-4k@bytedance=ep-xxxxx-xxx` will show option `Doubao-lite-4k(ByteDance)` in model list.
+
+### `DEFAULT_MODEL` (optional)
+
+Change default model
+
 ### `WHITE_WEBDEV_ENDPOINTS` (optional)
 
 You can use this option if you want to increase the number of webdav service addresses you are allowed to access, as required by the format:
@@ -256,6 +295,14 @@ You can use this option if you want to increase the number of webdav service add
 
 Customize the default template used to initialize the User Input Preprocessing configuration item in Settings.
 
+### `STABILITY_API_KEY` (optional)
+
+Stability API key.
+
+### `STABILITY_URL` (optional)
+
+Customize Stability API url.
+
 ## Requirements
 
 NodeJS >= 18, Docker >= 20

+ 55 - 5
README_CN.md

@@ -95,6 +95,7 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填
 ### `AZURE_URL` (可选)
 
 > 形如:https://{azure-resource-url}/openai/deployments/{deploy-name}
+> 如果你已经在`CUSTOM_MODELS`中参考`displayName`的方式配置了{deploy-name},那么可以从`AZURE_URL`中移除`{deploy-name}`
 
 Azure 部署地址。
 
@@ -106,26 +107,54 @@ Azure 密钥。
 
 Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions)。
 
-### `GOOGLE_API_KEY` (optional)
+### `GOOGLE_API_KEY` (可选)
 
 Google Gemini Pro 密钥.
 
-### `GOOGLE_URL` (optional)
+### `GOOGLE_URL` (可选)
 
 Google Gemini Pro Api Url.
 
-### `ANTHROPIC_API_KEY` (optional)
+### `ANTHROPIC_API_KEY` (可选)
 
 anthropic claude Api Key.
 
-### `ANTHROPIC_API_VERSION` (optional)
+### `ANTHROPIC_API_VERSION` (可选)
 
 anthropic claude Api version.
 
-### `ANTHROPIC_URL` (optional)
+### `ANTHROPIC_URL` (可选)
 
 anthropic claude Api Url.
 
+### `BAIDU_API_KEY` (可选)
+
+Baidu Api Key.
+
+### `BAIDU_SECRET_KEY` (可选)
+
+Baidu Secret Key.
+
+### `BAIDU_URL` (可选)
+
+Baidu Api Url.
+
+### `BYTEDANCE_API_KEY` (可选)
+
+ByteDance Api Key.
+
+### `BYTEDANCE_URL` (可选)
+
+ByteDance Api Url.
+
+### `ALIBABA_API_KEY` (可选)
+
+阿里云(千问)Api Key.
+
+### `ALIBABA_URL` (可选)
+
+阿里云(千问)Api Url.
+
 ### `HIDE_USER_API_KEY` (可选)
 
 如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。
@@ -156,9 +185,30 @@ anthropic claude Api Url.
 
 用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。
 
+在Azure的模式下,支持使用`modelName@azure=deploymentName`的方式配置模型名称和部署名称(deploy-name)
+> 示例:`+gpt-3.5-turbo@azure=gpt35`这个配置会在模型列表显示一个`gpt35(Azure)`的选项
+
+在ByteDance的模式下,支持使用`modelName@bytedance=deploymentName`的方式配置模型名称和部署名称(deploy-name)
+> 示例: `+Doubao-lite-4k@bytedance=ep-xxxxx-xxx`这个配置会在模型列表显示一个`Doubao-lite-4k(ByteDance)`的选项
+
+
+### `DEFAULT_MODEL` (可选)
+
+更改默认模型
+
 ### `DEFAULT_INPUT_TEMPLATE` (可选)
+
 自定义默认的 template,用于初始化『设置』中的『用户输入预处理』配置项
 
+### `STABILITY_API_KEY` (optional)
+
+Stability API密钥
+
+### `STABILITY_URL` (optional)
+
+自定义的Stability API请求地址
+
+
 ## 开发
 
 点击下方按钮,开始二次开发:

+ 155 - 0
app/api/alibaba/[...path]/route.ts

@@ -0,0 +1,155 @@
+import { getServerSideConfig } from "@/app/config/server";
+import {
+  Alibaba,
+  ALIBABA_BASE_URL,
+  ApiPath,
+  ModelProvider,
+  ServiceProvider,
+} from "@/app/constant";
+import { prettyObject } from "@/app/utils/format";
+import { NextRequest, NextResponse } from "next/server";
+import { auth } from "@/app/api/auth";
+import { isModelAvailableInServer } from "@/app/utils/model";
+import type { RequestPayload } from "@/app/client/platforms/openai";
+
+const serverConfig = getServerSideConfig();
+
+async function handle(
+  req: NextRequest,
+  { params }: { params: { path: string[] } },
+) {
+  console.log("[Alibaba Route] params ", params);
+
+  if (req.method === "OPTIONS") {
+    return NextResponse.json({ body: "OK" }, { status: 200 });
+  }
+
+  const authResult = auth(req, ModelProvider.Qwen);
+  if (authResult.error) {
+    return NextResponse.json(authResult, {
+      status: 401,
+    });
+  }
+
+  try {
+    const response = await request(req);
+    return response;
+  } catch (e) {
+    console.error("[Alibaba] ", e);
+    return NextResponse.json(prettyObject(e));
+  }
+}
+
+export const GET = handle;
+export const POST = handle;
+
+export const runtime = "edge";
+export const preferredRegion = [
+  "arn1",
+  "bom1",
+  "cdg1",
+  "cle1",
+  "cpt1",
+  "dub1",
+  "fra1",
+  "gru1",
+  "hnd1",
+  "iad1",
+  "icn1",
+  "kix1",
+  "lhr1",
+  "pdx1",
+  "sfo1",
+  "sin1",
+  "syd1",
+];
+
+async function request(req: NextRequest) {
+  const controller = new AbortController();
+
+  // alibaba use base url or just remove the path
+  let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Alibaba, "");
+
+  let baseUrl = serverConfig.alibabaUrl || ALIBABA_BASE_URL;
+
+  if (!baseUrl.startsWith("http")) {
+    baseUrl = `https://${baseUrl}`;
+  }
+
+  if (baseUrl.endsWith("/")) {
+    baseUrl = baseUrl.slice(0, -1);
+  }
+
+  console.log("[Proxy] ", path);
+  console.log("[Base Url]", baseUrl);
+
+  const timeoutId = setTimeout(
+    () => {
+      controller.abort();
+    },
+    10 * 60 * 1000,
+  );
+
+  const fetchUrl = `${baseUrl}${path}`;
+  const fetchOptions: RequestInit = {
+    headers: {
+      "Content-Type": "application/json",
+      Authorization: req.headers.get("Authorization") ?? "",
+      "X-DashScope-SSE": req.headers.get("X-DashScope-SSE") ?? "disable",
+    },
+    method: req.method,
+    body: req.body,
+    redirect: "manual",
+    // @ts-ignore
+    duplex: "half",
+    signal: controller.signal,
+  };
+
+  // #1815 try to refuse some request to some models
+  if (serverConfig.customModels && req.body) {
+    try {
+      const clonedBody = await req.text();
+      fetchOptions.body = clonedBody;
+
+      const jsonBody = JSON.parse(clonedBody) as { model?: string };
+
+      // not undefined and is false
+      if (
+        isModelAvailableInServer(
+          serverConfig.customModels,
+          jsonBody?.model as string,
+          ServiceProvider.Alibaba as string,
+        )
+      ) {
+        return NextResponse.json(
+          {
+            error: true,
+            message: `you are not allowed to use ${jsonBody?.model} model`,
+          },
+          {
+            status: 403,
+          },
+        );
+      }
+    } catch (e) {
+      console.error(`[Alibaba] filter`, e);
+    }
+  }
+  try {
+    const res = await fetch(fetchUrl, fetchOptions);
+
+    // to prevent browser prompt for credentials
+    const newHeaders = new Headers(res.headers);
+    newHeaders.delete("www-authenticate");
+    // to disable nginx buffering
+    newHeaders.set("X-Accel-Buffering", "no");
+
+    return new Response(res.body, {
+      status: res.status,
+      statusText: res.statusText,
+      headers: newHeaders,
+    });
+  } finally {
+    clearTimeout(timeoutId);
+  }
+}

+ 9 - 6
app/api/anthropic/[...path]/route.ts

@@ -4,12 +4,13 @@ import {
   Anthropic,
   ApiPath,
   DEFAULT_MODELS,
+  ServiceProvider,
   ModelProvider,
 } from "@/app/constant";
 import { prettyObject } from "@/app/utils/format";
 import { NextRequest, NextResponse } from "next/server";
 import { auth } from "../../auth";
-import { collectModelTable } from "@/app/utils/model";
+import { isModelAvailableInServer } from "@/app/utils/model";
 
 const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
 
@@ -136,17 +137,19 @@ async function request(req: NextRequest) {
   // #1815 try to refuse some request to some models
   if (serverConfig.customModels && req.body) {
     try {
-      const modelTable = collectModelTable(
-        DEFAULT_MODELS,
-        serverConfig.customModels,
-      );
       const clonedBody = await req.text();
       fetchOptions.body = clonedBody;
 
       const jsonBody = JSON.parse(clonedBody) as { model?: string };
 
       // not undefined and is false
-      if (modelTable[jsonBody?.model ?? ""].available === false) {
+      if (
+        isModelAvailableInServer(
+          serverConfig.customModels,
+          jsonBody?.model as string,
+          ServiceProvider.Anthropic as string,
+        )
+      ) {
         return NextResponse.json(
           {
             error: true,

+ 12 - 3
app/api/auth.ts

@@ -67,18 +67,27 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
     let systemApiKey: string | undefined;
 
     switch (modelProvider) {
+      case ModelProvider.Stability:
+        systemApiKey = serverConfig.stabilityApiKey;
+        break;
       case ModelProvider.GeminiPro:
         systemApiKey = serverConfig.googleApiKey;
         break;
       case ModelProvider.Claude:
         systemApiKey = serverConfig.anthropicApiKey;
         break;
-      case ModelProvider.Stability:
-        systemApiKey = serverConfig.stabilityApiKey;
+      case ModelProvider.Doubao:
+        systemApiKey = serverConfig.bytedanceApiKey;
+        break;
+      case ModelProvider.Ernie:
+        systemApiKey = serverConfig.baiduApiKey;
+        break;
+      case ModelProvider.Qwen:
+        systemApiKey = serverConfig.alibabaApiKey;
         break;
       case ModelProvider.GPT:
       default:
-        if (serverConfig.isAzure) {
+        if (req.nextUrl.pathname.includes("azure/deployments")) {
           systemApiKey = serverConfig.azureApiKey;
         } else {
           systemApiKey = serverConfig.apiKey;

+ 57 - 0
app/api/azure/[...path]/route.ts

@@ -0,0 +1,57 @@
+import { getServerSideConfig } from "@/app/config/server";
+import { ModelProvider } from "@/app/constant";
+import { prettyObject } from "@/app/utils/format";
+import { NextRequest, NextResponse } from "next/server";
+import { auth } from "../../auth";
+import { requestOpenai } from "../../common";
+
+async function handle(
+  req: NextRequest,
+  { params }: { params: { path: string[] } },
+) {
+  console.log("[Azure Route] params ", params);
+
+  if (req.method === "OPTIONS") {
+    return NextResponse.json({ body: "OK" }, { status: 200 });
+  }
+
+  const subpath = params.path.join("/");
+
+  const authResult = auth(req, ModelProvider.GPT);
+  if (authResult.error) {
+    return NextResponse.json(authResult, {
+      status: 401,
+    });
+  }
+
+  try {
+    return await requestOpenai(req);
+  } catch (e) {
+    console.error("[Azure] ", e);
+    return NextResponse.json(prettyObject(e));
+  }
+}
+
+export const GET = handle;
+export const POST = handle;
+
+export const runtime = "edge";
+export const preferredRegion = [
+  "arn1",
+  "bom1",
+  "cdg1",
+  "cle1",
+  "cpt1",
+  "dub1",
+  "fra1",
+  "gru1",
+  "hnd1",
+  "iad1",
+  "icn1",
+  "kix1",
+  "lhr1",
+  "pdx1",
+  "sfo1",
+  "sin1",
+  "syd1",
+];

+ 169 - 0
app/api/baidu/[...path]/route.ts

@@ -0,0 +1,169 @@
+import { getServerSideConfig } from "@/app/config/server";
+import {
+  BAIDU_BASE_URL,
+  ApiPath,
+  ModelProvider,
+  BAIDU_OATUH_URL,
+  ServiceProvider,
+} from "@/app/constant";
+import { prettyObject } from "@/app/utils/format";
+import { NextRequest, NextResponse } from "next/server";
+import { auth } from "@/app/api/auth";
+import { isModelAvailableInServer } from "@/app/utils/model";
+import { getAccessToken } from "@/app/utils/baidu";
+
+const serverConfig = getServerSideConfig();
+
+async function handle(
+  req: NextRequest,
+  { params }: { params: { path: string[] } },
+) {
+  console.log("[Baidu Route] params ", params);
+
+  if (req.method === "OPTIONS") {
+    return NextResponse.json({ body: "OK" }, { status: 200 });
+  }
+
+  const authResult = auth(req, ModelProvider.Ernie);
+  if (authResult.error) {
+    return NextResponse.json(authResult, {
+      status: 401,
+    });
+  }
+
+  if (!serverConfig.baiduApiKey || !serverConfig.baiduSecretKey) {
+    return NextResponse.json(
+      {
+        error: true,
+        message: `missing BAIDU_API_KEY or BAIDU_SECRET_KEY in server env vars`,
+      },
+      {
+        status: 401,
+      },
+    );
+  }
+
+  try {
+    const response = await request(req);
+    return response;
+  } catch (e) {
+    console.error("[Baidu] ", e);
+    return NextResponse.json(prettyObject(e));
+  }
+}
+
+export const GET = handle;
+export const POST = handle;
+
+export const runtime = "edge";
+export const preferredRegion = [
+  "arn1",
+  "bom1",
+  "cdg1",
+  "cle1",
+  "cpt1",
+  "dub1",
+  "fra1",
+  "gru1",
+  "hnd1",
+  "iad1",
+  "icn1",
+  "kix1",
+  "lhr1",
+  "pdx1",
+  "sfo1",
+  "sin1",
+  "syd1",
+];
+
+async function request(req: NextRequest) {
+  const controller = new AbortController();
+
+  let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Baidu, "");
+
+  let baseUrl = serverConfig.baiduUrl || BAIDU_BASE_URL;
+
+  if (!baseUrl.startsWith("http")) {
+    baseUrl = `https://${baseUrl}`;
+  }
+
+  if (baseUrl.endsWith("/")) {
+    baseUrl = baseUrl.slice(0, -1);
+  }
+
+  console.log("[Proxy] ", path);
+  console.log("[Base Url]", baseUrl);
+
+  const timeoutId = setTimeout(
+    () => {
+      controller.abort();
+    },
+    10 * 60 * 1000,
+  );
+
+  const { access_token } = await getAccessToken(
+    serverConfig.baiduApiKey as string,
+    serverConfig.baiduSecretKey as string,
+  );
+  const fetchUrl = `${baseUrl}${path}?access_token=${access_token}`;
+
+  const fetchOptions: RequestInit = {
+    headers: {
+      "Content-Type": "application/json",
+    },
+    method: req.method,
+    body: req.body,
+    redirect: "manual",
+    // @ts-ignore
+    duplex: "half",
+    signal: controller.signal,
+  };
+
+  // #1815 try to refuse some request to some models
+  if (serverConfig.customModels && req.body) {
+    try {
+      const clonedBody = await req.text();
+      fetchOptions.body = clonedBody;
+
+      const jsonBody = JSON.parse(clonedBody) as { model?: string };
+
+      // not undefined and is false
+      if (
+        isModelAvailableInServer(
+          serverConfig.customModels,
+          jsonBody?.model as string,
+          ServiceProvider.Baidu as string,
+        )
+      ) {
+        return NextResponse.json(
+          {
+            error: true,
+            message: `you are not allowed to use ${jsonBody?.model} model`,
+          },
+          {
+            status: 403,
+          },
+        );
+      }
+    } catch (e) {
+      console.error(`[Baidu] filter`, e);
+    }
+  }
+  try {
+    const res = await fetch(fetchUrl, fetchOptions);
+
+    // to prevent browser prompt for credentials
+    const newHeaders = new Headers(res.headers);
+    newHeaders.delete("www-authenticate");
+    // to disable nginx buffering
+    newHeaders.set("X-Accel-Buffering", "no");
+
+    return new Response(res.body, {
+      status: res.status,
+      statusText: res.statusText,
+      headers: newHeaders,
+    });
+  } finally {
+    clearTimeout(timeoutId);
+  }
+}

+ 153 - 0
app/api/bytedance/[...path]/route.ts

@@ -0,0 +1,153 @@
+import { getServerSideConfig } from "@/app/config/server";
+import {
+  BYTEDANCE_BASE_URL,
+  ApiPath,
+  ModelProvider,
+  ServiceProvider,
+} from "@/app/constant";
+import { prettyObject } from "@/app/utils/format";
+import { NextRequest, NextResponse } from "next/server";
+import { auth } from "@/app/api/auth";
+import { isModelAvailableInServer } from "@/app/utils/model";
+
+const serverConfig = getServerSideConfig();
+
+async function handle(
+  req: NextRequest,
+  { params }: { params: { path: string[] } },
+) {
+  console.log("[ByteDance Route] params ", params);
+
+  if (req.method === "OPTIONS") {
+    return NextResponse.json({ body: "OK" }, { status: 200 });
+  }
+
+  const authResult = auth(req, ModelProvider.Doubao);
+  if (authResult.error) {
+    return NextResponse.json(authResult, {
+      status: 401,
+    });
+  }
+
+  try {
+    const response = await request(req);
+    return response;
+  } catch (e) {
+    console.error("[ByteDance] ", e);
+    return NextResponse.json(prettyObject(e));
+  }
+}
+
+export const GET = handle;
+export const POST = handle;
+
+export const runtime = "edge";
+export const preferredRegion = [
+  "arn1",
+  "bom1",
+  "cdg1",
+  "cle1",
+  "cpt1",
+  "dub1",
+  "fra1",
+  "gru1",
+  "hnd1",
+  "iad1",
+  "icn1",
+  "kix1",
+  "lhr1",
+  "pdx1",
+  "sfo1",
+  "sin1",
+  "syd1",
+];
+
+async function request(req: NextRequest) {
+  const controller = new AbortController();
+
+  let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ByteDance, "");
+
+  let baseUrl = serverConfig.bytedanceUrl || BYTEDANCE_BASE_URL;
+
+  if (!baseUrl.startsWith("http")) {
+    baseUrl = `https://${baseUrl}`;
+  }
+
+  if (baseUrl.endsWith("/")) {
+    baseUrl = baseUrl.slice(0, -1);
+  }
+
+  console.log("[Proxy] ", path);
+  console.log("[Base Url]", baseUrl);
+
+  const timeoutId = setTimeout(
+    () => {
+      controller.abort();
+    },
+    10 * 60 * 1000,
+  );
+
+  const fetchUrl = `${baseUrl}${path}`;
+
+  const fetchOptions: RequestInit = {
+    headers: {
+      "Content-Type": "application/json",
+      Authorization: req.headers.get("Authorization") ?? "",
+    },
+    method: req.method,
+    body: req.body,
+    redirect: "manual",
+    // @ts-ignore
+    duplex: "half",
+    signal: controller.signal,
+  };
+
+  // #1815 try to refuse some request to some models
+  if (serverConfig.customModels && req.body) {
+    try {
+      const clonedBody = await req.text();
+      fetchOptions.body = clonedBody;
+
+      const jsonBody = JSON.parse(clonedBody) as { model?: string };
+
+      // not undefined and is false
+      if (
+        isModelAvailableInServer(
+          serverConfig.customModels,
+          jsonBody?.model as string,
+          ServiceProvider.ByteDance as string,
+        )
+      ) {
+        return NextResponse.json(
+          {
+            error: true,
+            message: `you are not allowed to use ${jsonBody?.model} model`,
+          },
+          {
+            status: 403,
+          },
+        );
+      }
+    } catch (e) {
+      console.error(`[ByteDance] filter`, e);
+    }
+  }
+
+  try {
+    const res = await fetch(fetchUrl, fetchOptions);
+
+    // to prevent browser prompt for credentials
+    const newHeaders = new Headers(res.headers);
+    newHeaders.delete("www-authenticate");
+    // to disable nginx buffering
+    newHeaders.set("X-Accel-Buffering", "no");
+
+    return new Response(res.body, {
+      status: res.status,
+      statusText: res.statusText,
+      headers: newHeaders,
+    });
+  } finally {
+    clearTimeout(timeoutId);
+  }
+}

+ 66 - 27
app/api/common.ts

@@ -1,17 +1,23 @@
 import { NextRequest, NextResponse } from "next/server";
 import { getServerSideConfig } from "../config/server";
-import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
-import { collectModelTable } from "../utils/model";
-import { makeAzurePath } from "../azure";
+import {
+  DEFAULT_MODELS,
+  OPENAI_BASE_URL,
+  GEMINI_BASE_URL,
+  ServiceProvider,
+} from "../constant";
+import { isModelAvailableInServer } from "../utils/model";
 
 const serverConfig = getServerSideConfig();
 
 export async function requestOpenai(req: NextRequest) {
   const controller = new AbortController();
 
+  const isAzure = req.nextUrl.pathname.includes("azure/deployments");
+
   var authValue,
     authHeaderName = "";
-  if (serverConfig.isAzure) {
+  if (isAzure) {
     authValue =
       req.headers
         .get("Authorization")
@@ -51,14 +57,42 @@ export async function requestOpenai(req: NextRequest) {
     10 * 60 * 1000,
   );
 
-  if (serverConfig.isAzure) {
-    if (!serverConfig.azureApiVersion) {
-      return NextResponse.json({
-        error: true,
-        message: `missing AZURE_API_VERSION in server env vars`,
-      });
+  if (isAzure) {
+    const azureApiVersion =
+      req?.nextUrl?.searchParams?.get("api-version") ||
+      serverConfig.azureApiVersion;
+    baseUrl = baseUrl.split("/deployments").shift() as string;
+    path = `${req.nextUrl.pathname.replaceAll(
+      "/api/azure/",
+      "",
+    )}?api-version=${azureApiVersion}`;
+
+    // Forward compatibility:
+    // if display_name(deployment_name) not set, and '{deploy-id}' in AZURE_URL
+    // then using default '{deploy-id}'
+    if (serverConfig.customModels && serverConfig.azureUrl) {
+      const modelName = path.split("/")[1];
+      let realDeployName = "";
+      serverConfig.customModels
+        .split(",")
+        .filter((v) => !!v && !v.startsWith("-") && v.includes(modelName))
+        .forEach((m) => {
+          const [fullName, displayName] = m.split("=");
+          const [_, providerName] = fullName.split("@");
+          if (providerName === "azure" && !displayName) {
+            const [_, deployId] = (serverConfig?.azureUrl ?? "").split(
+              "deployments/",
+            );
+            if (deployId) {
+              realDeployName = deployId;
+            }
+          }
+        });
+      if (realDeployName) {
+        console.log("[Replace with DeployId", realDeployName);
+        path = path.replaceAll(modelName, realDeployName);
+      }
     }
-    path = makeAzurePath(path, serverConfig.azureApiVersion);
   }
 
   const fetchUrl = `${baseUrl}/${path}`;
@@ -83,17 +117,24 @@ export async function requestOpenai(req: NextRequest) {
   // #1815 try to refuse gpt4 request
   if (serverConfig.customModels && req.body) {
     try {
-      const modelTable = collectModelTable(
-        DEFAULT_MODELS,
-        serverConfig.customModels,
-      );
       const clonedBody = await req.text();
       fetchOptions.body = clonedBody;
 
       const jsonBody = JSON.parse(clonedBody) as { model?: string };
 
       // not undefined and is false
-      if (modelTable[jsonBody?.model ?? ""].available === false) {
+      if (
+        isModelAvailableInServer(
+          serverConfig.customModels,
+          jsonBody?.model as string,
+          ServiceProvider.OpenAI as string,
+        ) ||
+        isModelAvailableInServer(
+          serverConfig.customModels,
+          jsonBody?.model as string,
+          ServiceProvider.Azure as string,
+        )
+      ) {
         return NextResponse.json(
           {
             error: true,
@@ -112,16 +153,16 @@ export async function requestOpenai(req: NextRequest) {
   try {
     const res = await fetch(fetchUrl, fetchOptions);
 
-  // Extract the OpenAI-Organization header from the response
-  const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
+    // Extract the OpenAI-Organization header from the response
+    const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
 
-  // Check if serverConfig.openaiOrgId is defined and not an empty string
-  if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
-    // If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
-    console.log("[Org ID]", openaiOrganizationHeader);
-  } else {
-    console.log("[Org ID] is not set up.");
-  }
+    // Check if serverConfig.openaiOrgId is defined and not an empty string
+    if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
+      // If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
+      console.log("[Org ID]", openaiOrganizationHeader);
+    } else {
+      console.log("[Org ID] is not set up.");
+    }
 
     // to prevent browser prompt for credentials
     const newHeaders = new Headers(res.headers);
@@ -129,7 +170,6 @@ export async function requestOpenai(req: NextRequest) {
     // to disable nginx buffering
     newHeaders.set("X-Accel-Buffering", "no");
 
-
     // Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
     // Also, this is to prevent the header from being sent to the client
     if (!serverConfig.openaiOrgId || serverConfig.openaiOrgId.trim() === "") {
@@ -142,7 +182,6 @@ export async function requestOpenai(req: NextRequest) {
     // The browser will try to decode the response with brotli and fail
     newHeaders.delete("content-encoding");
 
-
     return new Response(res.body, {
       status: res.status,
       statusText: res.statusText,

+ 3 - 1
app/api/google/[...path]/route.ts

@@ -63,7 +63,9 @@ async function handle(
     );
   }
 
-  const fetchUrl = `${baseUrl}/${path}?key=${key}`;
+  const fetchUrl = `${baseUrl}/${path}?key=${key}${
+    req?.nextUrl?.searchParams?.get("alt") == "sse" ? "&alt=sse" : ""
+  }`;
   const fetchOptions: RequestInit = {
     headers: {
       "Content-Type": "application/json",

+ 21 - 3
app/api/webdav/[...path]/route.ts

@@ -9,6 +9,14 @@ const mergedAllowedWebDavEndpoints = [
   ...config.allowedWebDevEndpoints,
 ].filter((domain) => Boolean(domain.trim()));
 
+const normalizeUrl = (url: string) => {
+  try {
+    return new URL(url);
+  } catch (err) {
+    return null;
+  }
+};
+
 async function handle(
   req: NextRequest,
   { params }: { params: { path: string[] } },
@@ -24,9 +32,19 @@ async function handle(
 
   // Validate the endpoint to prevent potential SSRF attacks
   if (
-    !mergedAllowedWebDavEndpoints.some(
-      (allowedEndpoint) => endpoint?.startsWith(allowedEndpoint),
-    )
+    !endpoint ||
+    !mergedAllowedWebDavEndpoints.some((allowedEndpoint) => {
+      const normalizedAllowedEndpoint = normalizeUrl(allowedEndpoint);
+      const normalizedEndpoint = normalizeUrl(endpoint as string);
+
+      return (
+        normalizedEndpoint &&
+        normalizedEndpoint.hostname === normalizedAllowedEndpoint?.hostname &&
+        normalizedEndpoint.pathname.startsWith(
+          normalizedAllowedEndpoint.pathname,
+        )
+      );
+    })
   ) {
     return NextResponse.json(
       {

+ 0 - 9
app/azure.ts

@@ -1,9 +0,0 @@
-export function makeAzurePath(path: string, apiVersion: string) {
-  // should omit /v1 prefix
-  path = path.replaceAll("v1/", "");
-
-  // should add api-key to query string
-  path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`;
-
-  return path;
-}

+ 101 - 23
app/client/api.ts

@@ -9,6 +9,10 @@ import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
 import { ChatGPTApi } from "./platforms/openai";
 import { GeminiProApi } from "./platforms/google";
 import { ClaudeApi } from "./platforms/anthropic";
+import { ErnieApi } from "./platforms/baidu";
+import { DoubaoApi } from "./platforms/bytedance";
+import { QwenApi } from "./platforms/alibaba";
+
 export const ROLES = ["system", "user", "assistant"] as const;
 export type MessageRole = (typeof ROLES)[number];
 
@@ -30,6 +34,7 @@ export interface RequestMessage {
 
 export interface LLMConfig {
   model: string;
+  providerName?: string;
   temperature?: number;
   top_p?: number;
   stream?: boolean;
@@ -54,6 +59,7 @@ export interface LLMUsage {
 
 export interface LLMModel {
   name: string;
+  displayName?: string;
   available: boolean;
   provider: LLMModelProvider;
 }
@@ -102,6 +108,15 @@ export class ClientApi {
       case ModelProvider.Claude:
         this.llm = new ClaudeApi();
         break;
+      case ModelProvider.Ernie:
+        this.llm = new ErnieApi();
+        break;
+      case ModelProvider.Doubao:
+        this.llm = new DoubaoApi();
+        break;
+      case ModelProvider.Qwen:
+        this.llm = new QwenApi();
+        break;
       default:
         this.llm = new ChatGPTApi();
     }
@@ -155,37 +170,100 @@ export class ClientApi {
 
 export function getHeaders() {
   const accessStore = useAccessStore.getState();
+  const chatStore = useChatStore.getState();
   const headers: Record<string, string> = {
     "Content-Type": "application/json",
     Accept: "application/json",
   };
-  const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
-  const isGoogle = modelConfig.model.startsWith("gemini");
-  const isAzure = accessStore.provider === ServiceProvider.Azure;
-  const authHeader = isAzure ? "api-key" : "Authorization";
-  const apiKey = isGoogle
-    ? accessStore.googleApiKey
-    : isAzure
-    ? accessStore.azureApiKey
-    : accessStore.openaiApiKey;
+
   const clientConfig = getClientConfig();
-  const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
-  const validString = (x: string) => x && x.length > 0;
 
+  function getConfig() {
+    const modelConfig = chatStore.currentSession().mask.modelConfig;
+    const isGoogle = modelConfig.providerName == ServiceProvider.Google;
+    const isAzure = modelConfig.providerName === ServiceProvider.Azure;
+    const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
+    const isBaidu = modelConfig.providerName == ServiceProvider.Baidu;
+    const isByteDance = modelConfig.providerName === ServiceProvider.ByteDance;
+    const isAlibaba = modelConfig.providerName === ServiceProvider.Alibaba;
+    const isEnabledAccessControl = accessStore.enabledAccessControl();
+    const apiKey = isGoogle
+      ? accessStore.googleApiKey
+      : isAzure
+      ? accessStore.azureApiKey
+      : isAnthropic
+      ? accessStore.anthropicApiKey
+      : isByteDance
+      ? accessStore.bytedanceApiKey
+      : isAlibaba
+      ? accessStore.alibabaApiKey
+      : accessStore.openaiApiKey;
+    return {
+      isGoogle,
+      isAzure,
+      isAnthropic,
+      isBaidu,
+      isByteDance,
+      isAlibaba,
+      apiKey,
+      isEnabledAccessControl,
+    };
+  }
+
+  function getAuthHeader(): string {
+    return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
+  }
+
+  function getBearerToken(apiKey: string, noBearer: boolean = false): string {
+    return validString(apiKey)
+      ? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
+      : "";
+  }
+
+  function validString(x: string): boolean {
+    return x?.length > 0;
+  }
+  const {
+    isGoogle,
+    isAzure,
+    isAnthropic,
+    isBaidu,
+    apiKey,
+    isEnabledAccessControl,
+  } = getConfig();
   // when using google api in app, not set auth header
-  if (!(isGoogle && clientConfig?.isApp)) {
-    // use user's api key first
-    if (validString(apiKey)) {
-      headers[authHeader] = makeBearer(apiKey);
-    } else if (
-      accessStore.enabledAccessControl() &&
-      validString(accessStore.accessCode)
-    ) {
-      headers[authHeader] = makeBearer(
-        ACCESS_CODE_PREFIX + accessStore.accessCode,
-      );
-    }
+  if (isGoogle && clientConfig?.isApp) return headers;
+  // when using baidu api in app, not set auth header
+  if (isBaidu && clientConfig?.isApp) return headers;
+
+  const authHeader = getAuthHeader();
+
+  const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
+
+  if (bearerToken) {
+    headers[authHeader] = bearerToken;
+  } else if (isEnabledAccessControl && validString(accessStore.accessCode)) {
+    headers["Authorization"] = getBearerToken(
+      ACCESS_CODE_PREFIX + accessStore.accessCode,
+    );
   }
 
   return headers;
 }
+
+export function getClientApi(provider: ServiceProvider): ClientApi {
+  switch (provider) {
+    case ServiceProvider.Google:
+      return new ClientApi(ModelProvider.GeminiPro);
+    case ServiceProvider.Anthropic:
+      return new ClientApi(ModelProvider.Claude);
+    case ServiceProvider.Baidu:
+      return new ClientApi(ModelProvider.Ernie);
+    case ServiceProvider.ByteDance:
+      return new ClientApi(ModelProvider.Doubao);
+    case ServiceProvider.Alibaba:
+      return new ClientApi(ModelProvider.Qwen);
+    default:
+      return new ClientApi(ModelProvider.GPT);
+  }
+}

+ 268 - 0
app/client/platforms/alibaba.ts

@@ -0,0 +1,268 @@
+"use client";
+import {
+  ApiPath,
+  Alibaba,
+  ALIBABA_BASE_URL,
+  REQUEST_TIMEOUT_MS,
+} from "@/app/constant";
+import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
+
+import {
+  ChatOptions,
+  getHeaders,
+  LLMApi,
+  LLMModel,
+  MultimodalContent,
+} from "../api";
+import Locale from "../../locales";
+import {
+  EventStreamContentType,
+  fetchEventSource,
+} from "@fortaine/fetch-event-source";
+import { prettyObject } from "@/app/utils/format";
+import { getClientConfig } from "@/app/config/client";
+import { getMessageTextContent, isVisionModel } from "@/app/utils";
+
+export interface OpenAIListModelResponse {
+  object: string;
+  data: Array<{
+    id: string;
+    object: string;
+    root: string;
+  }>;
+}
+
+interface RequestInput {
+  messages: {
+    role: "system" | "user" | "assistant";
+    content: string | MultimodalContent[];
+  }[];
+}
+interface RequestParam {
+  result_format: string;
+  incremental_output?: boolean;
+  temperature: number;
+  repetition_penalty?: number;
+  top_p: number;
+  max_tokens?: number;
+}
+interface RequestPayload {
+  model: string;
+  input: RequestInput;
+  parameters: RequestParam;
+}
+
+export class QwenApi implements LLMApi {
+  path(path: string): string {
+    const accessStore = useAccessStore.getState();
+
+    let baseUrl = "";
+
+    if (accessStore.useCustomConfig) {
+      baseUrl = accessStore.alibabaUrl;
+    }
+
+    if (baseUrl.length === 0) {
+      const isApp = !!getClientConfig()?.isApp;
+      baseUrl = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;
+    }
+
+    if (baseUrl.endsWith("/")) {
+      baseUrl = baseUrl.slice(0, baseUrl.length - 1);
+    }
+    if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Alibaba)) {
+      baseUrl = "https://" + baseUrl;
+    }
+
+    console.log("[Proxy Endpoint] ", baseUrl, path);
+
+    return [baseUrl, path].join("/");
+  }
+
+  extractMessage(res: any) {
+    return res?.output?.choices?.at(0)?.message?.content ?? "";
+  }
+
+  async chat(options: ChatOptions) {
+    const messages = options.messages.map((v) => ({
+      role: v.role,
+      content: getMessageTextContent(v),
+    }));
+
+    const modelConfig = {
+      ...useAppConfig.getState().modelConfig,
+      ...useChatStore.getState().currentSession().mask.modelConfig,
+      ...{
+        model: options.config.model,
+      },
+    };
+
+    const shouldStream = !!options.config.stream;
+    const requestPayload: RequestPayload = {
+      model: modelConfig.model,
+      input: {
+        messages,
+      },
+      parameters: {
+        result_format: "message",
+        incremental_output: shouldStream,
+        temperature: modelConfig.temperature,
+        // max_tokens: modelConfig.max_tokens,
+        top_p: modelConfig.top_p === 1 ? 0.99 : modelConfig.top_p, // qwen top_p is should be < 1
+      },
+    };
+
+    const controller = new AbortController();
+    options.onController?.(controller);
+
+    try {
+      const chatPath = this.path(Alibaba.ChatPath);
+      const chatPayload = {
+        method: "POST",
+        body: JSON.stringify(requestPayload),
+        signal: controller.signal,
+        headers: {
+          ...getHeaders(),
+          "X-DashScope-SSE": shouldStream ? "enable" : "disable",
+        },
+      };
+
+      // make a fetch request
+      const requestTimeoutId = setTimeout(
+        () => controller.abort(),
+        REQUEST_TIMEOUT_MS,
+      );
+
+      if (shouldStream) {
+        let responseText = "";
+        let remainText = "";
+        let finished = false;
+
+        // animate response to make it looks smooth
+        function animateResponseText() {
+          if (finished || controller.signal.aborted) {
+            responseText += remainText;
+            console.log("[Response Animation] finished");
+            if (responseText?.length === 0) {
+              options.onError?.(new Error("empty response from server"));
+            }
+            return;
+          }
+
+          if (remainText.length > 0) {
+            const fetchCount = Math.max(1, Math.round(remainText.length / 60));
+            const fetchText = remainText.slice(0, fetchCount);
+            responseText += fetchText;
+            remainText = remainText.slice(fetchCount);
+            options.onUpdate?.(responseText, fetchText);
+          }
+
+          requestAnimationFrame(animateResponseText);
+        }
+
+        // start animaion
+        animateResponseText();
+
+        const finish = () => {
+          if (!finished) {
+            finished = true;
+            options.onFinish(responseText + remainText);
+          }
+        };
+
+        controller.signal.onabort = finish;
+
+        fetchEventSource(chatPath, {
+          ...chatPayload,
+          async onopen(res) {
+            clearTimeout(requestTimeoutId);
+            const contentType = res.headers.get("content-type");
+            console.log(
+              "[Alibaba] request response content type: ",
+              contentType,
+            );
+
+            if (contentType?.startsWith("text/plain")) {
+              responseText = await res.clone().text();
+              return finish();
+            }
+
+            if (
+              !res.ok ||
+              !res.headers
+                .get("content-type")
+                ?.startsWith(EventStreamContentType) ||
+              res.status !== 200
+            ) {
+              const responseTexts = [responseText];
+              let extraInfo = await res.clone().text();
+              try {
+                const resJson = await res.clone().json();
+                extraInfo = prettyObject(resJson);
+              } catch {}
+
+              if (res.status === 401) {
+                responseTexts.push(Locale.Error.Unauthorized);
+              }
+
+              if (extraInfo) {
+                responseTexts.push(extraInfo);
+              }
+
+              responseText = responseTexts.join("\n\n");
+
+              return finish();
+            }
+          },
+          onmessage(msg) {
+            if (msg.data === "[DONE]" || finished) {
+              return finish();
+            }
+            const text = msg.data;
+            try {
+              const json = JSON.parse(text);
+              const choices = json.output.choices as Array<{
+                message: { content: string };
+              }>;
+              const delta = choices[0]?.message?.content;
+              if (delta) {
+                remainText += delta;
+              }
+            } catch (e) {
+              console.error("[Request] parse error", text, msg);
+            }
+          },
+          onclose() {
+            finish();
+          },
+          onerror(e) {
+            options.onError?.(e);
+            throw e;
+          },
+          openWhenHidden: true,
+        });
+      } else {
+        const res = await fetch(chatPath, chatPayload);
+        clearTimeout(requestTimeoutId);
+
+        const resJson = await res.json();
+        const message = this.extractMessage(resJson);
+        options.onFinish(message);
+      }
+    } catch (e) {
+      console.log("[Request] failed to make a chat request", e);
+      options.onError?.(e as Error);
+    }
+  }
+  async usage() {
+    return {
+      used: 0,
+      total: 0,
+    };
+  }
+
+  async models(): Promise<LLMModel[]> {
+    return [];
+  }
+}
+export { Alibaba };

+ 4 - 29
app/client/platforms/anthropic.ts

@@ -1,5 +1,5 @@
 import { ACCESS_CODE_PREFIX, Anthropic, ApiPath } from "@/app/constant";
-import { ChatOptions, LLMApi, MultimodalContent } from "../api";
+import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api";
 import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
 import { getClientConfig } from "@/app/config/client";
 import { DEFAULT_API_HOST } from "@/app/constant";
@@ -190,11 +190,10 @@ export class ClaudeApi implements LLMApi {
       body: JSON.stringify(requestBody),
       signal: controller.signal,
       headers: {
-        "Content-Type": "application/json",
-        Accept: "application/json",
-        "x-api-key": accessStore.anthropicApiKey,
+        ...getHeaders(), // get common headers
         "anthropic-version": accessStore.anthropicApiVersion,
-        Authorization: getAuthKey(accessStore.anthropicApiKey),
+        // do not send `anthropicApiKey` in browser!!!
+        // Authorization: getAuthKey(accessStore.anthropicApiKey),
       },
     };
 
@@ -389,27 +388,3 @@ function trimEnd(s: string, end = " ") {
 
   return s;
 }
-
-function bearer(value: string) {
-  return `Bearer ${value.trim()}`;
-}
-
-function getAuthKey(apiKey = "") {
-  const accessStore = useAccessStore.getState();
-  const isApp = !!getClientConfig()?.isApp;
-  let authKey = "";
-
-  if (apiKey) {
-    // use user's api key first
-    authKey = bearer(apiKey);
-  } else if (
-    accessStore.enabledAccessControl() &&
-    !isApp &&
-    !!accessStore.accessCode
-  ) {
-    // or use access code
-    authKey = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
-  }
-
-  return authKey;
-}

+ 273 - 0
app/client/platforms/baidu.ts

@@ -0,0 +1,273 @@
+"use client";
+import {
+  ApiPath,
+  Baidu,
+  BAIDU_BASE_URL,
+  REQUEST_TIMEOUT_MS,
+} from "@/app/constant";
+import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
+import { getAccessToken } from "@/app/utils/baidu";
+
+import {
+  ChatOptions,
+  getHeaders,
+  LLMApi,
+  LLMModel,
+  MultimodalContent,
+} from "../api";
+import Locale from "../../locales";
+import {
+  EventStreamContentType,
+  fetchEventSource,
+} from "@fortaine/fetch-event-source";
+import { prettyObject } from "@/app/utils/format";
+import { getClientConfig } from "@/app/config/client";
+import { getMessageTextContent } from "@/app/utils";
+
+export interface OpenAIListModelResponse {
+  object: string;
+  data: Array<{
+    id: string;
+    object: string;
+    root: string;
+  }>;
+}
+
+interface RequestPayload {
+  messages: {
+    role: "system" | "user" | "assistant";
+    content: string | MultimodalContent[];
+  }[];
+  stream?: boolean;
+  model: string;
+  temperature: number;
+  presence_penalty: number;
+  frequency_penalty: number;
+  top_p: number;
+  max_tokens?: number;
+}
+
+export class ErnieApi implements LLMApi {
+  path(path: string): string {
+    const accessStore = useAccessStore.getState();
+
+    let baseUrl = "";
+
+    if (accessStore.useCustomConfig) {
+      baseUrl = accessStore.baiduUrl;
+    }
+
+    if (baseUrl.length === 0) {
+      const isApp = !!getClientConfig()?.isApp;
+      // do not use proxy for baidubce api
+      baseUrl = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;
+    }
+
+    if (baseUrl.endsWith("/")) {
+      baseUrl = baseUrl.slice(0, baseUrl.length - 1);
+    }
+    if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Baidu)) {
+      baseUrl = "https://" + baseUrl;
+    }
+
+    console.log("[Proxy Endpoint] ", baseUrl, path);
+
+    return [baseUrl, path].join("/");
+  }
+
+  async chat(options: ChatOptions) {
+    const messages = options.messages.map((v) => ({
+      role: v.role,
+      content: getMessageTextContent(v),
+    }));
+
+    // "error_code": 336006, "error_msg": "the length of messages must be an odd number",
+    if (messages.length % 2 === 0) {
+      messages.unshift({
+        role: "user",
+        content: " ",
+      });
+    }
+
+    const modelConfig = {
+      ...useAppConfig.getState().modelConfig,
+      ...useChatStore.getState().currentSession().mask.modelConfig,
+      ...{
+        model: options.config.model,
+      },
+    };
+
+    const shouldStream = !!options.config.stream;
+    const requestPayload: RequestPayload = {
+      messages,
+      stream: shouldStream,
+      model: modelConfig.model,
+      temperature: modelConfig.temperature,
+      presence_penalty: modelConfig.presence_penalty,
+      frequency_penalty: modelConfig.frequency_penalty,
+      top_p: modelConfig.top_p,
+    };
+
+    console.log("[Request] Baidu payload: ", requestPayload);
+
+    const controller = new AbortController();
+    options.onController?.(controller);
+
+    try {
+      let chatPath = this.path(Baidu.ChatPath(modelConfig.model));
+
+      // getAccessToken can not run in browser, because cors error
+      if (!!getClientConfig()?.isApp) {
+        const accessStore = useAccessStore.getState();
+        if (accessStore.useCustomConfig) {
+          if (accessStore.isValidBaidu()) {
+            const { access_token } = await getAccessToken(
+              accessStore.baiduApiKey,
+              accessStore.baiduSecretKey,
+            );
+            chatPath = `${chatPath}${
+              chatPath.includes("?") ? "&" : "?"
+            }access_token=${access_token}`;
+          }
+        }
+      }
+      const chatPayload = {
+        method: "POST",
+        body: JSON.stringify(requestPayload),
+        signal: controller.signal,
+        headers: getHeaders(),
+      };
+
+      // make a fetch request
+      const requestTimeoutId = setTimeout(
+        () => controller.abort(),
+        REQUEST_TIMEOUT_MS,
+      );
+
+      if (shouldStream) {
+        let responseText = "";
+        let remainText = "";
+        let finished = false;
+
+        // animate response to make it looks smooth
+        function animateResponseText() {
+          if (finished || controller.signal.aborted) {
+            responseText += remainText;
+            console.log("[Response Animation] finished");
+            if (responseText?.length === 0) {
+              options.onError?.(new Error("empty response from server"));
+            }
+            return;
+          }
+
+          if (remainText.length > 0) {
+            const fetchCount = Math.max(1, Math.round(remainText.length / 60));
+            const fetchText = remainText.slice(0, fetchCount);
+            responseText += fetchText;
+            remainText = remainText.slice(fetchCount);
+            options.onUpdate?.(responseText, fetchText);
+          }
+
+          requestAnimationFrame(animateResponseText);
+        }
+
+        // start animaion
+        animateResponseText();
+
+        const finish = () => {
+          if (!finished) {
+            finished = true;
+            options.onFinish(responseText + remainText);
+          }
+        };
+
+        controller.signal.onabort = finish;
+
+        fetchEventSource(chatPath, {
+          ...chatPayload,
+          async onopen(res) {
+            clearTimeout(requestTimeoutId);
+            const contentType = res.headers.get("content-type");
+            console.log("[Baidu] request response content type: ", contentType);
+
+            if (contentType?.startsWith("text/plain")) {
+              responseText = await res.clone().text();
+              return finish();
+            }
+
+            if (
+              !res.ok ||
+              !res.headers
+                .get("content-type")
+                ?.startsWith(EventStreamContentType) ||
+              res.status !== 200
+            ) {
+              const responseTexts = [responseText];
+              let extraInfo = await res.clone().text();
+              try {
+                const resJson = await res.clone().json();
+                extraInfo = prettyObject(resJson);
+              } catch {}
+
+              if (res.status === 401) {
+                responseTexts.push(Locale.Error.Unauthorized);
+              }
+
+              if (extraInfo) {
+                responseTexts.push(extraInfo);
+              }
+
+              responseText = responseTexts.join("\n\n");
+
+              return finish();
+            }
+          },
+          onmessage(msg) {
+            if (msg.data === "[DONE]" || finished) {
+              return finish();
+            }
+            const text = msg.data;
+            try {
+              const json = JSON.parse(text);
+              const delta = json?.result;
+              if (delta) {
+                remainText += delta;
+              }
+            } catch (e) {
+              console.error("[Request] parse error", text, msg);
+            }
+          },
+          onclose() {
+            finish();
+          },
+          onerror(e) {
+            options.onError?.(e);
+            throw e;
+          },
+          openWhenHidden: true,
+        });
+      } else {
+        const res = await fetch(chatPath, chatPayload);
+        clearTimeout(requestTimeoutId);
+
+        const resJson = await res.json();
+        const message = resJson?.result;
+        options.onFinish(message);
+      }
+    } catch (e) {
+      console.log("[Request] failed to make a chat request", e);
+      options.onError?.(e as Error);
+    }
+  }
+  async usage() {
+    return {
+      used: 0,
+      total: 0,
+    };
+  }
+
+  async models(): Promise<LLMModel[]> {
+    return [];
+  }
+}
+export { Baidu };

+ 255 - 0
app/client/platforms/bytedance.ts

@@ -0,0 +1,255 @@
+"use client";
+import {
+  ApiPath,
+  ByteDance,
+  BYTEDANCE_BASE_URL,
+  REQUEST_TIMEOUT_MS,
+} from "@/app/constant";
+import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
+
+import {
+  ChatOptions,
+  getHeaders,
+  LLMApi,
+  LLMModel,
+  MultimodalContent,
+} from "../api";
+import Locale from "../../locales";
+import {
+  EventStreamContentType,
+  fetchEventSource,
+} from "@fortaine/fetch-event-source";
+import { prettyObject } from "@/app/utils/format";
+import { getClientConfig } from "@/app/config/client";
+import { getMessageTextContent } from "@/app/utils";
+
+export interface OpenAIListModelResponse {
+  object: string;
+  data: Array<{
+    id: string;
+    object: string;
+    root: string;
+  }>;
+}
+
+interface RequestPayload {
+  messages: {
+    role: "system" | "user" | "assistant";
+    content: string | MultimodalContent[];
+  }[];
+  stream?: boolean;
+  model: string;
+  temperature: number;
+  presence_penalty: number;
+  frequency_penalty: number;
+  top_p: number;
+  max_tokens?: number;
+}
+
+export class DoubaoApi implements LLMApi {
+  path(path: string): string {
+    const accessStore = useAccessStore.getState();
+
+    let baseUrl = "";
+
+    if (accessStore.useCustomConfig) {
+      baseUrl = accessStore.bytedanceUrl;
+    }
+
+    if (baseUrl.length === 0) {
+      const isApp = !!getClientConfig()?.isApp;
+      baseUrl = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;
+    }
+
+    if (baseUrl.endsWith("/")) {
+      baseUrl = baseUrl.slice(0, baseUrl.length - 1);
+    }
+    if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ByteDance)) {
+      baseUrl = "https://" + baseUrl;
+    }
+
+    console.log("[Proxy Endpoint] ", baseUrl, path);
+
+    return [baseUrl, path].join("/");
+  }
+
+  extractMessage(res: any) {
+    return res.choices?.at(0)?.message?.content ?? "";
+  }
+
+  async chat(options: ChatOptions) {
+    const messages = options.messages.map((v) => ({
+      role: v.role,
+      content: getMessageTextContent(v),
+    }));
+
+    const modelConfig = {
+      ...useAppConfig.getState().modelConfig,
+      ...useChatStore.getState().currentSession().mask.modelConfig,
+      ...{
+        model: options.config.model,
+      },
+    };
+
+    const shouldStream = !!options.config.stream;
+    const requestPayload: RequestPayload = {
+      messages,
+      stream: shouldStream,
+      model: modelConfig.model,
+      temperature: modelConfig.temperature,
+      presence_penalty: modelConfig.presence_penalty,
+      frequency_penalty: modelConfig.frequency_penalty,
+      top_p: modelConfig.top_p,
+    };
+
+    const controller = new AbortController();
+    options.onController?.(controller);
+
+    try {
+      const chatPath = this.path(ByteDance.ChatPath);
+      const chatPayload = {
+        method: "POST",
+        body: JSON.stringify(requestPayload),
+        signal: controller.signal,
+        headers: getHeaders(),
+      };
+
+      // make a fetch request
+      const requestTimeoutId = setTimeout(
+        () => controller.abort(),
+        REQUEST_TIMEOUT_MS,
+      );
+
+      if (shouldStream) {
+        let responseText = "";
+        let remainText = "";
+        let finished = false;
+
+        // animate response to make it looks smooth
+        function animateResponseText() {
+          if (finished || controller.signal.aborted) {
+            responseText += remainText;
+            console.log("[Response Animation] finished");
+            if (responseText?.length === 0) {
+              options.onError?.(new Error("empty response from server"));
+            }
+            return;
+          }
+
+          if (remainText.length > 0) {
+            const fetchCount = Math.max(1, Math.round(remainText.length / 60));
+            const fetchText = remainText.slice(0, fetchCount);
+            responseText += fetchText;
+            remainText = remainText.slice(fetchCount);
+            options.onUpdate?.(responseText, fetchText);
+          }
+
+          requestAnimationFrame(animateResponseText);
+        }
+
+        // start animaion
+        animateResponseText();
+
+        const finish = () => {
+          if (!finished) {
+            finished = true;
+            options.onFinish(responseText + remainText);
+          }
+        };
+
+        controller.signal.onabort = finish;
+
+        fetchEventSource(chatPath, {
+          ...chatPayload,
+          async onopen(res) {
+            clearTimeout(requestTimeoutId);
+            const contentType = res.headers.get("content-type");
+            console.log(
+              "[ByteDance] request response content type: ",
+              contentType,
+            );
+
+            if (contentType?.startsWith("text/plain")) {
+              responseText = await res.clone().text();
+              return finish();
+            }
+
+            if (
+              !res.ok ||
+              !res.headers
+                .get("content-type")
+                ?.startsWith(EventStreamContentType) ||
+              res.status !== 200
+            ) {
+              const responseTexts = [responseText];
+              let extraInfo = await res.clone().text();
+              try {
+                const resJson = await res.clone().json();
+                extraInfo = prettyObject(resJson);
+              } catch {}
+
+              if (res.status === 401) {
+                responseTexts.push(Locale.Error.Unauthorized);
+              }
+
+              if (extraInfo) {
+                responseTexts.push(extraInfo);
+              }
+
+              responseText = responseTexts.join("\n\n");
+
+              return finish();
+            }
+          },
+          onmessage(msg) {
+            if (msg.data === "[DONE]" || finished) {
+              return finish();
+            }
+            const text = msg.data;
+            try {
+              const json = JSON.parse(text);
+              const choices = json.choices as Array<{
+                delta: { content: string };
+              }>;
+              const delta = choices[0]?.delta?.content;
+              if (delta) {
+                remainText += delta;
+              }
+            } catch (e) {
+              console.error("[Request] parse error", text, msg);
+            }
+          },
+          onclose() {
+            finish();
+          },
+          onerror(e) {
+            options.onError?.(e);
+            throw e;
+          },
+          openWhenHidden: true,
+        });
+      } else {
+        const res = await fetch(chatPath, chatPayload);
+        clearTimeout(requestTimeoutId);
+
+        const resJson = await res.json();
+        const message = this.extractMessage(resJson);
+        options.onFinish(message);
+      }
+    } catch (e) {
+      console.log("[Request] failed to make a chat request", e);
+      options.onError?.(e as Error);
+    }
+  }
+  async usage() {
+    return {
+      used: 0,
+      total: 0,
+    };
+  }
+
+  async models(): Promise<LLMModel[]> {
+    return [];
+  }
+}
+export { ByteDance };

+ 86 - 64
app/client/platforms/google.ts

@@ -3,6 +3,12 @@ import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
 import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
 import { getClientConfig } from "@/app/config/client";
 import { DEFAULT_API_HOST } from "@/app/constant";
+import Locale from "../../locales";
+import {
+  EventStreamContentType,
+  fetchEventSource,
+} from "@fortaine/fetch-event-source";
+import { prettyObject } from "@/app/utils/format";
 import {
   getMessageTextContent,
   getMessageImages,
@@ -20,7 +26,7 @@ export class GeminiProApi implements LLMApi {
     );
   }
   async chat(options: ChatOptions): Promise<void> {
-    // const apiClient = this;
+    const apiClient = this;
     let multimodal = false;
     const messages = options.messages.map((v) => {
       let parts: any[] = [{ text: getMessageTextContent(v) }];
@@ -120,7 +126,9 @@ export class GeminiProApi implements LLMApi {
 
       if (!baseUrl) {
         baseUrl = isApp
-          ? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath(modelConfig.model)
+          ? DEFAULT_API_HOST +
+            "/api/proxy/google/" +
+            Google.ChatPath(modelConfig.model)
           : this.path(Google.ChatPath(modelConfig.model));
       }
 
@@ -139,16 +147,17 @@ export class GeminiProApi implements LLMApi {
         () => controller.abort(),
         REQUEST_TIMEOUT_MS,
       );
-      
+
       if (shouldStream) {
         let responseText = "";
         let remainText = "";
         let finished = false;
 
-        let existingTexts: string[] = [];
         const finish = () => {
-          finished = true;
-          options.onFinish(existingTexts.join(""));
+          if (!finished) {
+            finished = true;
+            options.onFinish(responseText + remainText);
+          }
         };
 
         // animate response to make it looks smooth
@@ -173,72 +182,85 @@ export class GeminiProApi implements LLMApi {
         // start animaion
         animateResponseText();
 
-        fetch(
-          baseUrl.replace("generateContent", "streamGenerateContent"),
-          chatPayload,
-        )
-          .then((response) => {
-            const reader = response?.body?.getReader();
-            const decoder = new TextDecoder();
-            let partialData = "";
+        controller.signal.onabort = finish;
+
+        // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
+        const chatPath =
+          baseUrl.replace("generateContent", "streamGenerateContent") +
+          (baseUrl.indexOf("?") > -1 ? "&alt=sse" : "?alt=sse");
+        fetchEventSource(chatPath, {
+          ...chatPayload,
+          async onopen(res) {
+            clearTimeout(requestTimeoutId);
+            const contentType = res.headers.get("content-type");
+            console.log(
+              "[Gemini] request response content type: ",
+              contentType,
+            );
+
+            if (contentType?.startsWith("text/plain")) {
+              responseText = await res.clone().text();
+              return finish();
+            }
 
-            return reader?.read().then(function processText({
-              done,
-              value,
-            }): Promise<any> {
-              if (done) {
-                if (response.status !== 200) {
-                  try {
-                    let data = JSON.parse(ensureProperEnding(partialData));
-                    if (data && data[0].error) {
-                      options.onError?.(new Error(data[0].error.message));
-                    } else {
-                      options.onError?.(new Error("Request failed"));
-                    }
-                  } catch (_) {
-                    options.onError?.(new Error("Request failed"));
-                  }
-                }
+            if (
+              !res.ok ||
+              !res.headers
+                .get("content-type")
+                ?.startsWith(EventStreamContentType) ||
+              res.status !== 200
+            ) {
+              const responseTexts = [responseText];
+              let extraInfo = await res.clone().text();
+              try {
+                const resJson = await res.clone().json();
+                extraInfo = prettyObject(resJson);
+              } catch {}
 
-                console.log("Stream complete");
-                // options.onFinish(responseText + remainText);
-                finished = true;
-                return Promise.resolve();
+              if (res.status === 401) {
+                responseTexts.push(Locale.Error.Unauthorized);
               }
 
-              partialData += decoder.decode(value, { stream: true });
+              if (extraInfo) {
+                responseTexts.push(extraInfo);
+              }
 
-              try {
-                let data = JSON.parse(ensureProperEnding(partialData));
+              responseText = responseTexts.join("\n\n");
 
-                const textArray = data.reduce(
-                  (acc: string[], item: { candidates: any[] }) => {
-                    const texts = item.candidates.map((candidate) =>
-                      candidate.content.parts
-                        .map((part: { text: any }) => part.text)
-                        .join(""),
-                    );
-                    return acc.concat(texts);
-                  },
-                  [],
-                );
+              return finish();
+            }
+          },
+          onmessage(msg) {
+            if (msg.data === "[DONE]" || finished) {
+              return finish();
+            }
+            const text = msg.data;
+            try {
+              const json = JSON.parse(text);
+              const delta = apiClient.extractMessage(json);
 
-                if (textArray.length > existingTexts.length) {
-                  const deltaArray = textArray.slice(existingTexts.length);
-                  existingTexts = textArray;
-                  remainText += deltaArray.join("");
-                }
-              } catch (error) {
-                // console.log("[Response Animation] error: ", error,partialData);
-                // skip error message when parsing json
+              if (delta) {
+                remainText += delta;
               }
 
-              return reader.read().then(processText);
-            });
-          })
-          .catch((error) => {
-            console.error("Error:", error);
-          });
+              const blockReason = json?.promptFeedback?.blockReason;
+              if (blockReason) {
+                // being blocked
+                console.log(`[Google] [Safety Ratings] result:`, blockReason);
+              }
+            } catch (e) {
+              console.error("[Request] parse error", text, msg);
+            }
+          },
+          onclose() {
+            finish();
+          },
+          onerror(e) {
+            options.onError?.(e);
+            throw e;
+          },
+          openWhenHidden: true,
+        });
       } else {
         const res = await fetch(baseUrl, chatPayload);
         clearTimeout(requestTimeoutId);
@@ -252,7 +274,7 @@ export class GeminiProApi implements LLMApi {
             ),
           );
         }
-        const message = this.extractMessage(resJson);
+        const message = apiClient.extractMessage(resJson);
         options.onFinish(message);
       }
     } catch (e) {

+ 42 - 13
app/client/platforms/openai.ts

@@ -1,13 +1,16 @@
 "use client";
+// azure and openai, using same models. so using same LLMApi.
 import {
   ApiPath,
   DEFAULT_API_HOST,
   DEFAULT_MODELS,
   OpenaiPath,
+  Azure,
   REQUEST_TIMEOUT_MS,
   ServiceProvider,
 } from "@/app/constant";
 import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
+import { collectModelsWithDefaultModel } from "@/app/utils/model";
 
 import {
   ChatOptions,
@@ -24,7 +27,6 @@ import {
 } from "@fortaine/fetch-event-source";
 import { prettyObject } from "@/app/utils/format";
 import { getClientConfig } from "@/app/config/client";
-import { makeAzurePath } from "@/app/azure";
 import {
   getMessageTextContent,
   getMessageImages,
@@ -40,7 +42,7 @@ export interface OpenAIListModelResponse {
   }>;
 }
 
-interface RequestPayload {
+export interface RequestPayload {
   messages: {
     role: "system" | "user" | "assistant";
     content: string | MultimodalContent[];
@@ -62,33 +64,31 @@ export class ChatGPTApi implements LLMApi {
 
     let baseUrl = "";
 
+    const isAzure = path.includes("deployments");
     if (accessStore.useCustomConfig) {
-      const isAzure = accessStore.provider === ServiceProvider.Azure;
-
       if (isAzure && !accessStore.isValidAzure()) {
         throw Error(
           "incomplete azure config, please check it in your settings page",
         );
       }
 
-      if (isAzure) {
-        path = makeAzurePath(path, accessStore.azureApiVersion);
-      }
-
       baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
     }
 
     if (baseUrl.length === 0) {
       const isApp = !!getClientConfig()?.isApp;
-      baseUrl = isApp
-        ? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
-        : ApiPath.OpenAI;
+      const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
+      baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
     }
 
     if (baseUrl.endsWith("/")) {
       baseUrl = baseUrl.slice(0, baseUrl.length - 1);
     }
-    if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.OpenAI)) {
+    if (
+      !baseUrl.startsWith("http") &&
+      !isAzure &&
+      !baseUrl.startsWith(ApiPath.OpenAI)
+    ) {
       baseUrl = "https://" + baseUrl;
     }
 
@@ -113,6 +113,7 @@ export class ChatGPTApi implements LLMApi {
       ...useChatStore.getState().currentSession().mask.modelConfig,
       ...{
         model: options.config.model,
+        providerName: options.config.providerName,
       },
     };
 
@@ -140,7 +141,35 @@ export class ChatGPTApi implements LLMApi {
     options.onController?.(controller);
 
     try {
-      const chatPath = this.path(OpenaiPath.ChatPath);
+      let chatPath = "";
+      if (modelConfig.providerName === ServiceProvider.Azure) {
+        // find model, and get displayName as deployName
+        const { models: configModels, customModels: configCustomModels } =
+          useAppConfig.getState();
+        const {
+          defaultModel,
+          customModels: accessCustomModels,
+          useCustomConfig,
+        } = useAccessStore.getState();
+        const models = collectModelsWithDefaultModel(
+          configModels,
+          [configCustomModels, accessCustomModels].join(","),
+          defaultModel,
+        );
+        const model = models.find(
+          (model) =>
+            model.name === modelConfig.model &&
+            model?.provider?.providerName === ServiceProvider.Azure,
+        );
+        chatPath = this.path(
+          Azure.ChatPath(
+            (model?.displayName ?? model?.name) as string,
+            useCustomConfig ? useAccessStore.getState().azureApiVersion : "",
+          ),
+        );
+      } else {
+        chatPath = this.path(OpenaiPath.ChatPath);
+      }
       const chatPayload = {
         method: "POST",
         body: JSON.stringify(requestPayload),

+ 43 - 12
app/components/chat.tsx

@@ -88,6 +88,7 @@ import {
   Path,
   REQUEST_TIMEOUT_MS,
   UNFINISHED_INPUT,
+  ServiceProvider,
 } from "../constant";
 import { Avatar } from "./emoji";
 import { ContextPrompts, MaskAvatar, MaskConfig } from "./mask";
@@ -448,6 +449,9 @@ export function ChatActions(props: {
 
   // switch model
   const currentModel = chatStore.currentSession().mask.modelConfig.model;
+  const currentProviderName =
+    chatStore.currentSession().mask.modelConfig?.providerName ||
+    ServiceProvider.OpenAI;
   const allModels = useAllModels();
   const models = useMemo(() => {
     const filteredModels = allModels.filter((m) => m.available);
@@ -463,6 +467,14 @@ export function ChatActions(props: {
       return filteredModels;
     }
   }, [allModels]);
+  const currentModelName = useMemo(() => {
+    const model = models.find(
+      (m) =>
+        m.name == currentModel &&
+        m?.provider?.providerName == currentProviderName,
+    );
+    return model?.displayName ?? "";
+  }, [models, currentModel, currentProviderName]);
   const [showModelSelector, setShowModelSelector] = useState(false);
   const [showUploadImage, setShowUploadImage] = useState(false);
 
@@ -479,13 +491,17 @@ export function ChatActions(props: {
     const isUnavaliableModel = !models.some((m) => m.name === currentModel);
     if (isUnavaliableModel && models.length > 0) {
       // show next model to default model if exist
-      let nextModel: ModelType = (
-        models.find((model) => model.isDefault) || models[0]
-      ).name;
-      chatStore.updateCurrentSession(
-        (session) => (session.mask.modelConfig.model = nextModel),
+      let nextModel = models.find((model) => model.isDefault) || models[0];
+      chatStore.updateCurrentSession((session) => {
+        session.mask.modelConfig.model = nextModel.name;
+        session.mask.modelConfig.providerName = nextModel?.provider
+          ?.providerName as ServiceProvider;
+      });
+      showToast(
+        nextModel?.provider?.providerName == "ByteDance"
+          ? nextModel.displayName
+          : nextModel.name,
       );
-      showToast(nextModel);
     }
   }, [chatStore, currentModel, models]);
 
@@ -567,25 +583,40 @@ export function ChatActions(props: {
 
       <ChatAction
         onClick={() => setShowModelSelector(true)}
-        text={currentModel}
+        text={currentModelName}
         icon={<RobotIcon />}
       />
 
       {showModelSelector && (
         <Selector
-          defaultSelectedValue={currentModel}
+          defaultSelectedValue={`${currentModel}@${currentProviderName}`}
           items={models.map((m) => ({
-            title: m.displayName,
-            value: m.name,
+            title: `${m.displayName}${
+              m?.provider?.providerName
+                ? "(" + m?.provider?.providerName + ")"
+                : ""
+            }`,
+            value: `${m.name}@${m?.provider?.providerName}`,
           }))}
           onClose={() => setShowModelSelector(false)}
           onSelection={(s) => {
             if (s.length === 0) return;
+            const [model, providerName] = s[0].split("@");
             chatStore.updateCurrentSession((session) => {
-              session.mask.modelConfig.model = s[0] as ModelType;
+              session.mask.modelConfig.model = model as ModelType;
+              session.mask.modelConfig.providerName =
+                providerName as ServiceProvider;
               session.mask.syncGlobalConfig = false;
             });
-            showToast(s[0]);
+            if (providerName == "ByteDance") {
+              const selectedModel = models.find(
+                (m) =>
+                  m.name == model && m?.provider?.providerName == providerName,
+              );
+              showToast(selectedModel?.displayName ?? "");
+            } else {
+              showToast(model);
+            }
           }}
         />
       )}

+ 3 - 11
app/components/exporter.tsx

@@ -36,11 +36,10 @@ import { toBlob, toPng } from "html-to-image";
 import { DEFAULT_MASK_AVATAR } from "../store/mask";
 
 import { prettyObject } from "../utils/format";
-import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant";
+import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
 import { getClientConfig } from "../config/client";
-import { ClientApi } from "../client/api";
+import { type ClientApi, getClientApi } from "../client/api";
 import { getMessageTextContent } from "../utils";
-import { identifyDefaultClaudeModel } from "../utils/checkers";
 
 const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
   loading: () => <LoadingIcon />,
@@ -313,14 +312,7 @@ export function PreviewActions(props: {
   const onRenderMsgs = (msgs: ChatMessage[]) => {
     setShouldExport(false);
 
-    var api: ClientApi;
-    if (config.modelConfig.model.startsWith("gemini")) {
-      api = new ClientApi(ModelProvider.GeminiPro);
-    } else if (identifyDefaultClaudeModel(config.modelConfig.model)) {
-      api = new ClientApi(ModelProvider.Claude);
-    } else {
-      api = new ClientApi(ModelProvider.GPT);
-    }
+    const api: ClientApi = getClientApi(config.modelConfig.providerName);
 
     api
       .share(msgs)

+ 5 - 11
app/components/home.tsx

@@ -12,7 +12,7 @@ import LoadingIcon from "../icons/three-dots.svg";
 import { getCSSVar, useMobileScreen } from "../utils";
 
 import dynamic from "next/dynamic";
-import { ModelProvider, Path, SlotID } from "../constant";
+import { Path, SlotID } from "../constant";
 import { ErrorBoundary } from "./error";
 
 import { getISOLang, getLang } from "../locales";
@@ -27,9 +27,8 @@ import { SideBar } from "./sidebar";
 import { useAppConfig } from "../store/config";
 import { AuthPage } from "./auth";
 import { getClientConfig } from "../config/client";
-import { ClientApi } from "../client/api";
+import { type ClientApi, getClientApi } from "../client/api";
 import { useAccessStore } from "../store";
-import { identifyDefaultClaudeModel } from "../utils/checkers";
 import { initDB } from "react-indexed-db-hook";
 
 export function Loading(props: { noLogo?: boolean }) {
@@ -180,14 +179,9 @@ function Screen() {
 
 export function useLoadData() {
   const config = useAppConfig();
-  var api: ClientApi;
-  if (config.modelConfig.model.startsWith("gemini")) {
-    api = new ClientApi(ModelProvider.GeminiPro);
-  } else if (identifyDefaultClaudeModel(config.modelConfig.model)) {
-    api = new ClientApi(ModelProvider.Claude);
-  } else {
-    api = new ClientApi(ModelProvider.GPT);
-  }
+
+  const api: ClientApi = getClientApi(config.modelConfig.providerName);
+
   useEffect(() => {
     (async () => {
       const models = await api.llm.models();

+ 10 - 9
app/components/model-config.tsx

@@ -1,3 +1,4 @@
+import { ServiceProvider } from "@/app/constant";
 import { ModalConfigValidator, ModelConfig } from "../store";
 
 import Locale from "../locales";
@@ -10,25 +11,25 @@ export function ModelConfigList(props: {
   updateConfig: (updater: (config: ModelConfig) => void) => void;
 }) {
   const allModels = useAllModels();
+  const value = `${props.modelConfig.model}@${props.modelConfig?.providerName}`;
 
   return (
     <>
       <ListItem title={Locale.Settings.Model}>
         <Select
-          value={props.modelConfig.model}
+          value={value}
           onChange={(e) => {
-            props.updateConfig(
-              (config) =>
-                (config.model = ModalConfigValidator.model(
-                  e.currentTarget.value,
-                )),
-            );
+            const [model, providerName] = e.currentTarget.value.split("@");
+            props.updateConfig((config) => {
+              config.model = ModalConfigValidator.model(model);
+              config.providerName = providerName as ServiceProvider;
+            });
           }}
         >
           {allModels
             .filter((v) => v.available)
             .map((v, i) => (
-              <option value={v.name} key={i}>
+              <option value={`${v.name}@${v.provider?.providerName}`} key={i}>
                 {v.displayName}({v.provider?.providerName})
               </option>
             ))}
@@ -92,7 +93,7 @@ export function ModelConfigList(props: {
         ></input>
       </ListItem>
 
-      {props.modelConfig.model.startsWith("gemini") ? null : (
+      {props.modelConfig?.providerName == ServiceProvider.Google ? null : (
         <>
           <ListItem
             title={Locale.Settings.PresencePenalty.Title}

+ 153 - 0
app/components/settings.tsx

@@ -53,6 +53,9 @@ import Link from "next/link";
 import {
   Anthropic,
   Azure,
+  Baidu,
+  ByteDance,
+  Alibaba,
   Google,
   OPENAI_BASE_URL,
   Path,
@@ -1187,6 +1190,156 @@ export function Settings() {
                       </ListItem>
                     </>
                   )}
+                  {accessStore.provider === ServiceProvider.Baidu && (
+                    <>
+                      <ListItem
+                        title={Locale.Settings.Access.Baidu.Endpoint.Title}
+                        subTitle={
+                          Locale.Settings.Access.Anthropic.Endpoint.SubTitle +
+                          Baidu.ExampleEndpoint
+                        }
+                      >
+                        <input
+                          type="text"
+                          value={accessStore.baiduUrl}
+                          placeholder={Baidu.ExampleEndpoint}
+                          onChange={(e) =>
+                            accessStore.update(
+                              (access) =>
+                                (access.baiduUrl = e.currentTarget.value),
+                            )
+                          }
+                        ></input>
+                      </ListItem>
+                      <ListItem
+                        title={Locale.Settings.Access.Baidu.ApiKey.Title}
+                        subTitle={Locale.Settings.Access.Baidu.ApiKey.SubTitle}
+                      >
+                        <PasswordInput
+                          value={accessStore.baiduApiKey}
+                          type="text"
+                          placeholder={
+                            Locale.Settings.Access.Baidu.ApiKey.Placeholder
+                          }
+                          onChange={(e) => {
+                            accessStore.update(
+                              (access) =>
+                                (access.baiduApiKey = e.currentTarget.value),
+                            );
+                          }}
+                        />
+                      </ListItem>
+                      <ListItem
+                        title={Locale.Settings.Access.Baidu.SecretKey.Title}
+                        subTitle={
+                          Locale.Settings.Access.Baidu.SecretKey.SubTitle
+                        }
+                      >
+                        <PasswordInput
+                          value={accessStore.baiduSecretKey}
+                          type="text"
+                          placeholder={
+                            Locale.Settings.Access.Baidu.SecretKey.Placeholder
+                          }
+                          onChange={(e) => {
+                            accessStore.update(
+                              (access) =>
+                                (access.baiduSecretKey = e.currentTarget.value),
+                            );
+                          }}
+                        />
+                      </ListItem>
+                    </>
+                  )}
+
+                  {accessStore.provider === ServiceProvider.ByteDance && (
+                    <>
+                      <ListItem
+                        title={Locale.Settings.Access.ByteDance.Endpoint.Title}
+                        subTitle={
+                          Locale.Settings.Access.ByteDance.Endpoint.SubTitle +
+                          ByteDance.ExampleEndpoint
+                        }
+                      >
+                        <input
+                          type="text"
+                          value={accessStore.bytedanceUrl}
+                          placeholder={ByteDance.ExampleEndpoint}
+                          onChange={(e) =>
+                            accessStore.update(
+                              (access) =>
+                                (access.bytedanceUrl = e.currentTarget.value),
+                            )
+                          }
+                        ></input>
+                      </ListItem>
+                      <ListItem
+                        title={Locale.Settings.Access.ByteDance.ApiKey.Title}
+                        subTitle={
+                          Locale.Settings.Access.ByteDance.ApiKey.SubTitle
+                        }
+                      >
+                        <PasswordInput
+                          value={accessStore.bytedanceApiKey}
+                          type="text"
+                          placeholder={
+                            Locale.Settings.Access.ByteDance.ApiKey.Placeholder
+                          }
+                          onChange={(e) => {
+                            accessStore.update(
+                              (access) =>
+                                (access.bytedanceApiKey =
+                                  e.currentTarget.value),
+                            );
+                          }}
+                        />
+                      </ListItem>
+                    </>
+                  )}
+
+                  {accessStore.provider === ServiceProvider.Alibaba && (
+                    <>
+                      <ListItem
+                        title={Locale.Settings.Access.Alibaba.Endpoint.Title}
+                        subTitle={
+                          Locale.Settings.Access.Alibaba.Endpoint.SubTitle +
+                          Alibaba.ExampleEndpoint
+                        }
+                      >
+                        <input
+                          type="text"
+                          value={accessStore.alibabaUrl}
+                          placeholder={Alibaba.ExampleEndpoint}
+                          onChange={(e) =>
+                            accessStore.update(
+                              (access) =>
+                                (access.alibabaUrl = e.currentTarget.value),
+                            )
+                          }
+                        ></input>
+                      </ListItem>
+                      <ListItem
+                        title={Locale.Settings.Access.Alibaba.ApiKey.Title}
+                        subTitle={
+                          Locale.Settings.Access.Alibaba.ApiKey.SubTitle
+                        }
+                      >
+                        <PasswordInput
+                          value={accessStore.alibabaApiKey}
+                          type="text"
+                          placeholder={
+                            Locale.Settings.Access.Alibaba.ApiKey.Placeholder
+                          }
+                          onChange={(e) => {
+                            accessStore.update(
+                              (access) =>
+                                (access.alibabaApiKey = e.currentTarget.value),
+                            );
+                          }}
+                        />
+                      </ListItem>
+                    </>
+                  )}
                 </>
               )}
             </>

+ 0 - 1
app/components/sidebar.tsx

@@ -31,7 +31,6 @@ import { Link, useLocation, useNavigate } from "react-router-dom";
 import { isIOS, useMobileScreen } from "../utils";
 import dynamic from "next/dynamic";
 import { Selector, showConfirm, showToast } from "./ui-lib";
-import de from "@/app/locales/de";
 
 const ChatList = dynamic(async () => (await import("./chat-list")).ChatList, {
   loading: () => null,

+ 43 - 2
app/config/server.ts

@@ -23,6 +23,10 @@ declare global {
       CUSTOM_MODELS?: string; // to control custom models
       DEFAULT_MODEL?: string; // to cnntrol default model in every new chat window
 
+      // stability only
+      STABILITY_URL?: string;
+      STABILITY_API_KEY?: string;
+
       // azure only
       AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name}
       AZURE_API_KEY?: string;
@@ -35,6 +39,24 @@ declare global {
       // google tag manager
       GTM_ID?: string;
 
+      // anthropic only
+      ANTHROPIC_URL?: string;
+      ANTHROPIC_API_KEY?: string;
+      ANTHROPIC_API_VERSION?: string;
+
+      // baidu only
+      BAIDU_URL?: string;
+      BAIDU_API_KEY?: string;
+      BAIDU_SECRET_KEY?: string;
+
+      // bytedance only
+      BYTEDANCE_URL?: string;
+      BYTEDANCE_API_KEY?: string;
+
+      // alibaba only
+      ALIBABA_URL?: string;
+      ALIBABA_API_KEY?: string;
+
       // custom template for preprocessing user input
       DEFAULT_INPUT_TEMPLATE?: string;
     }
@@ -89,10 +111,15 @@ export const getServerSideConfig = () => {
     if (defaultModel.startsWith("gpt-4")) defaultModel = "";
   }
 
+  const isStability = !!process.env.STABILITY_API_KEY;
+
   const isAzure = !!process.env.AZURE_URL;
   const isGoogle = !!process.env.GOOGLE_API_KEY;
   const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
 
+  const isBaidu = !!process.env.BAIDU_API_KEY;
+  const isBytedance = !!process.env.BYTEDANCE_API_KEY;
+  const isAlibaba = !!process.env.ALIBABA_API_KEY;
   // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
   // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
   // const randomIndex = Math.floor(Math.random() * apiKeys.length);
@@ -110,6 +137,10 @@ export const getServerSideConfig = () => {
     apiKey: getApiKey(process.env.OPENAI_API_KEY),
     openaiOrgId: process.env.OPENAI_ORG_ID,
 
+    isStability,
+    stabilityUrl: process.env.STABILITY_URL,
+    stabilityApiKey: getApiKey(process.env.STABILITY_API_KEY),
+
     isAzure,
     azureUrl: process.env.AZURE_URL,
     azureApiKey: getApiKey(process.env.AZURE_API_KEY),
@@ -124,8 +155,18 @@ export const getServerSideConfig = () => {
     anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
     anthropicUrl: process.env.ANTHROPIC_URL,
 
-    stabilityUrl: process.env.STABILITY_URL,
-    stabilityApiKey: process.env.STABILITY_API_KEY,
+    isBaidu,
+    baiduUrl: process.env.BAIDU_URL,
+    baiduApiKey: getApiKey(process.env.BAIDU_API_KEY),
+    baiduSecretKey: process.env.BAIDU_SECRET_KEY,
+
+    isBytedance,
+    bytedanceApiKey: getApiKey(process.env.BYTEDANCE_API_KEY),
+    bytedanceUrl: process.env.BYTEDANCE_URL,
+
+    isAlibaba,
+    alibabaUrl: process.env.ALIBABA_URL,
+    alibabaApiKey: getApiKey(process.env.ALIBABA_API_KEY),
 
     gtmId: process.env.GTM_ID,
 

+ 119 - 4
app/constant.ts

@@ -10,12 +10,20 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
 export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
 export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
 
+export const STABILITY_BASE_URL = "https://api.stability.ai";
+
 export const DEFAULT_API_HOST = "https://api.nextchat.dev";
 export const OPENAI_BASE_URL = "https://api.openai.com";
 export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
 
 export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
-export const STABILITY_BASE_URL = "https://api.stability.ai";
+
+export const BAIDU_BASE_URL = "https://aip.baidubce.com";
+export const BAIDU_OATUH_URL = `${BAIDU_BASE_URL}/oauth/2.0/token`;
+
+export const BYTEDANCE_BASE_URL = "https://ark.cn-beijing.volces.com";
+
+export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/";
 
 export enum Path {
   Home = "/",
@@ -30,8 +38,12 @@ export enum Path {
 
 export enum ApiPath {
   Cors = "",
+  Azure = "/api/azure",
   OpenAI = "/api/openai",
   Anthropic = "/api/anthropic",
+  Baidu = "/api/baidu",
+  ByteDance = "/api/bytedance",
+  Alibaba = "/api/alibaba",
 }
 
 export enum SlotID {
@@ -72,19 +84,30 @@ export const REQUEST_TIMEOUT_MS = 60000;
 export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown";
 
 export enum ServiceProvider {
+  Stability = "Stability",
   OpenAI = "OpenAI",
   Azure = "Azure",
   Google = "Google",
   Anthropic = "Anthropic",
+  Baidu = "Baidu",
+  ByteDance = "ByteDance",
+  Alibaba = "Alibaba",
 }
 
 export enum ModelProvider {
+  Stability = "Stability",
   GPT = "GPT",
   GeminiPro = "GeminiPro",
   Claude = "Claude",
-  Stability = "Stability",
+  Ernie = "Ernie",
+  Doubao = "Doubao",
+  Qwen = "Qwen",
 }
 
+export const StabilityPath = {
+  GeneratePath: "v2beta/stable-image/generate",
+};
+
 export const Anthropic = {
   ChatPath: "v1/messages",
   ChatPath1: "v1/complete",
@@ -100,6 +123,8 @@ export const OpenaiPath = {
 };
 
 export const Azure = {
+  ChatPath: (deployName: string, apiVersion: string) =>
+    `deployments/${deployName}/chat/completions?api-version=${apiVersion}`,
   ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
 };
 
@@ -108,8 +133,31 @@ export const Google = {
   ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
 };
 
-export const StabilityPath = {
-  GeneratePath: "v2beta/stable-image/generate",
+export const Baidu = {
+  ExampleEndpoint: BAIDU_BASE_URL,
+  ChatPath: (modelName: string) => {
+    let endpoint = modelName;
+    if (modelName === "ernie-4.0-8k") {
+      endpoint = "completions_pro";
+    }
+    if (modelName === "ernie-4.0-8k-preview-0518") {
+      endpoint = "completions_adv_pro";
+    }
+    if (modelName === "ernie-3.5-8k") {
+      endpoint = "completions";
+    }
+    return `rpc/2.0/ai_custom/v1/wenxinworkshop/chat/${endpoint}`;
+  },
+};
+
+export const ByteDance = {
+  ExampleEndpoint: "https://ark.cn-beijing.volces.com/api/",
+  ChatPath: "api/v3/chat/completions",
+};
+
+export const Alibaba = {
+  ExampleEndpoint: ALIBABA_BASE_URL,
+  ChatPath: "v1/services/aigc/text-generation/generation",
 };
 
 export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
@@ -161,6 +209,7 @@ const openaiModels = [
   "gpt-4o-2024-05-13",
   "gpt-4-vision-preview",
   "gpt-4-turbo-2024-04-09",
+  "gpt-4-1106-preview",
 ];
 
 const googleModels = [
@@ -177,6 +226,36 @@ const anthropicModels = [
   "claude-3-sonnet-20240229",
   "claude-3-opus-20240229",
   "claude-3-haiku-20240307",
+  "claude-3-5-sonnet-20240620",
+];
+
+const baiduModels = [
+  "ernie-4.0-turbo-8k",
+  "ernie-4.0-8k",
+  "ernie-4.0-8k-preview",
+  "ernie-4.0-8k-preview-0518",
+  "ernie-4.0-8k-latest",
+  "ernie-3.5-8k",
+  "ernie-3.5-8k-0205",
+];
+
+const bytedanceModels = [
+  "Doubao-lite-4k",
+  "Doubao-lite-32k",
+  "Doubao-lite-128k",
+  "Doubao-pro-4k",
+  "Doubao-pro-32k",
+  "Doubao-pro-128k",
+];
+
+const alibabaModes = [
+  "qwen-turbo",
+  "qwen-plus",
+  "qwen-max",
+  "qwen-max-0428",
+  "qwen-max-0403",
+  "qwen-max-0107",
+  "qwen-max-longcontext",
 ];
 
 export const DEFAULT_MODELS = [
@@ -189,6 +268,15 @@ export const DEFAULT_MODELS = [
       providerType: "openai",
     },
   })),
+  ...openaiModels.map((name) => ({
+    name,
+    available: true,
+    provider: {
+      id: "azure",
+      providerName: "Azure",
+      providerType: "azure",
+    },
+  })),
   ...googleModels.map((name) => ({
     name,
     available: true,
@@ -207,6 +295,33 @@ export const DEFAULT_MODELS = [
       providerType: "anthropic",
     },
   })),
+  ...baiduModels.map((name) => ({
+    name,
+    available: true,
+    provider: {
+      id: "baidu",
+      providerName: "Baidu",
+      providerType: "baidu",
+    },
+  })),
+  ...bytedanceModels.map((name) => ({
+    name,
+    available: true,
+    provider: {
+      id: "bytedance",
+      providerName: "ByteDance",
+      providerType: "bytedance",
+    },
+  })),
+  ...alibabaModes.map((name) => ({
+    name,
+    available: true,
+    provider: {
+      id: "alibaba",
+      providerName: "Alibaba",
+      providerType: "alibaba",
+    },
+  })),
 ] as const;
 
 export const CHAT_PAGE_SIZE = 15;

+ 14 - 10
app/layout.tsx

@@ -3,7 +3,7 @@ import "./styles/globals.scss";
 import "./styles/markdown.scss";
 import "./styles/highlight.scss";
 import { getClientConfig } from "./config/client";
-import { type Metadata } from "next";
+import type { Metadata, Viewport } from "next";
 import { SpeedInsights } from "@vercel/speed-insights/next";
 import { getServerSideConfig } from "./config/server";
 import { GoogleTagManager } from "@next/third-parties/google";
@@ -12,19 +12,20 @@ const serverConfig = getServerSideConfig();
 export const metadata: Metadata = {
   title: "NextChat",
   description: "Your personal ChatGPT Chat Bot.",
-  viewport: {
-    width: "device-width",
-    initialScale: 1,
-    maximumScale: 1,
+  appleWebApp: {
+    title: "NextChat",
+    statusBarStyle: "default",
   },
+};
+
+export const viewport: Viewport = {
+  width: "device-width",
+  initialScale: 1,
+  maximumScale: 1,
   themeColor: [
     { media: "(prefers-color-scheme: light)", color: "#fafafa" },
     { media: "(prefers-color-scheme: dark)", color: "#151515" },
   ],
-  appleWebApp: {
-    title: "NextChat",
-    statusBarStyle: "default",
-  },
 };
 
 export default function RootLayout({
@@ -36,7 +37,10 @@ export default function RootLayout({
     <html lang="en">
       <head>
         <meta name="config" content={JSON.stringify(getClientConfig())} />
-        <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
+        <meta
+          name="viewport"
+          content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"
+        />
         <link rel="manifest" href="/site.webmanifest"></link>
         <script src="/serviceWorkerRegister.js" defer></script>
       </head>

+ 38 - 0
app/locales/cn.ts

@@ -347,6 +347,44 @@ const cn = {
           SubTitle: "选择一个特定的 API 版本",
         },
       },
+      Baidu: {
+        ApiKey: {
+          Title: "接口密钥",
+          SubTitle: "使用自定义 Baidu API Key",
+          Placeholder: "Baidu API Key",
+        },
+        SecretKey: {
+          Title: "接口密钥",
+          SubTitle: "使用自定义 Baidu Secret Key",
+          Placeholder: "Baidu Secret Key",
+        },
+        Endpoint: {
+          Title: "接口地址",
+          SubTitle: "样例:",
+        },
+      },
+      ByteDance: {
+        ApiKey: {
+          Title: "接口密钥",
+          SubTitle: "使用自定义 ByteDance API Key",
+          Placeholder: "ByteDance API Key",
+        },
+        Endpoint: {
+          Title: "接口地址",
+          SubTitle: "样例:",
+        },
+      },
+      Alibaba: {
+        ApiKey: {
+          Title: "接口密钥",
+          SubTitle: "使用自定义阿里云API Key",
+          Placeholder: "Alibaba Cloud API Key",
+        },
+        Endpoint: {
+          Title: "接口地址",
+          SubTitle: "样例:",
+        },
+      },
       CustomModel: {
         Title: "自定义模型名",
         SubTitle: "增加自定义模型可选项,使用英文逗号隔开",

+ 38 - 0
app/locales/en.ts

@@ -334,6 +334,44 @@ const en: LocaleType = {
           SubTitle: "Select and input a specific API version",
         },
       },
+      Baidu: {
+        ApiKey: {
+          Title: "Baidu API Key",
+          SubTitle: "Use a custom Baidu API Key",
+          Placeholder: "Baidu API Key",
+        },
+        SecretKey: {
+          Title: "Baidu Secret Key",
+          SubTitle: "Use a custom Baidu Secret Key",
+          Placeholder: "Baidu Secret Key",
+        },
+        Endpoint: {
+          Title: "Endpoint Address",
+          SubTitle: "Example:",
+        },
+      },
+      ByteDance: {
+        ApiKey: {
+          Title: "ByteDance API Key",
+          SubTitle: "Use a custom ByteDance API Key",
+          Placeholder: "ByteDance API Key",
+        },
+        Endpoint: {
+          Title: "Endpoint Address",
+          SubTitle: "Example:",
+        },
+      },
+      Alibaba: {
+        ApiKey: {
+          Title: "Alibaba API Key",
+          SubTitle: "Use a custom Alibaba Cloud API Key",
+          Placeholder: "Alibaba Cloud API Key",
+        },
+        Endpoint: {
+          Title: "Endpoint Address",
+          SubTitle: "Example:",
+        },
+      },
       CustomModel: {
         Title: "Custom Models",
         SubTitle: "Custom model options, seperated by comma",

+ 12 - 10
app/locales/index.ts

@@ -97,7 +97,17 @@ function setItem(key: string, value: string) {
 
 function getLanguage() {
   try {
-    return navigator.language.toLowerCase();
+    const locale = new Intl.Locale(navigator.language).maximize();
+    const region = locale?.region?.toLowerCase();
+    // 1. check region code in ALL_LANGS
+    if (AllLangs.includes(region as Lang)) {
+      return region as Lang;
+    }
+    // 2. check language code in ALL_LANGS
+    if (AllLangs.includes(locale.language as Lang)) {
+      return locale.language as Lang;
+    }
+    return DEFAULT_LANG;
   } catch {
     return DEFAULT_LANG;
   }
@@ -110,15 +120,7 @@ export function getLang(): Lang {
     return savedLang as Lang;
   }
 
-  const lang = getLanguage();
-
-  for (const option of AllLangs) {
-    if (lang.includes(option)) {
-      return option;
-    }
-  }
-
-  return DEFAULT_LANG;
+  return getLanguage();
 }
 
 export function changeLang(lang: Lang) {

+ 25 - 0
app/masks/build.ts

@@ -0,0 +1,25 @@
+import fs from "fs";
+import path from "path";
+import { CN_MASKS } from "./cn";
+import { TW_MASKS } from "./tw";
+import { EN_MASKS } from "./en";
+
+import { type BuiltinMask } from "./typing";
+
+const BUILTIN_MASKS: Record<string, BuiltinMask[]> = {
+  cn: CN_MASKS,
+  tw: TW_MASKS,
+  en: EN_MASKS,
+};
+
+const dirname = path.dirname(__filename);
+
+fs.writeFile(
+  dirname + "/../../public/masks.json",
+  JSON.stringify(BUILTIN_MASKS, null, 4),
+  function (error) {
+    if (error) {
+      console.error("[Build] failed to build masks", error);
+    }
+  },
+);

+ 17 - 3
app/masks/index.ts

@@ -22,6 +22,20 @@ export const BUILTIN_MASK_STORE = {
   },
 };
 
-export const BUILTIN_MASKS: BuiltinMask[] = [...CN_MASKS, ...TW_MASKS, ...EN_MASKS].map(
-  (m) => BUILTIN_MASK_STORE.add(m),
-);
+export const BUILTIN_MASKS: BuiltinMask[] = [];
+
+if (typeof window != "undefined") {
+  // run in browser skip in next server
+  fetch("/masks.json")
+    .then((res) => res.json())
+    .catch((error) => {
+      console.error("[Fetch] failed to fetch masks", error);
+      return { cn: [], tw: [], en: [] };
+    })
+    .then((masks) => {
+      const { cn = [], tw = [], en = [] } = masks;
+      return [...cn, ...tw, ...en].map((m) => {
+        BUILTIN_MASKS.push(BUILTIN_MASK_STORE.add(m));
+      });
+    });
+}

+ 34 - 1
app/store/access.ts

@@ -17,6 +17,11 @@ const DEFAULT_OPENAI_URL =
     ? DEFAULT_API_HOST + "/api/proxy/openai"
     : ApiPath.OpenAI;
 
+const DEFAULT_AZURE_URL =
+  getClientConfig()?.buildMode === "export"
+    ? DEFAULT_API_HOST + "/api/proxy/azure/{resource_name}"
+    : ApiPath.Azure;
+
 const DEFAULT_ACCESS_STATE = {
   accessCode: "",
   useCustomConfig: false,
@@ -28,7 +33,7 @@ const DEFAULT_ACCESS_STATE = {
   openaiApiKey: "",
 
   // azure
-  azureUrl: "",
+  azureUrl: DEFAULT_AZURE_URL,
   azureApiKey: "",
   azureApiVersion: "2023-08-01-preview",
 
@@ -42,6 +47,19 @@ const DEFAULT_ACCESS_STATE = {
   anthropicApiVersion: "2023-06-01",
   anthropicUrl: "",
 
+  // baidu
+  baiduUrl: "",
+  baiduApiKey: "",
+  baiduSecretKey: "",
+
+  // bytedance
+  bytedanceApiKey: "",
+  bytedanceUrl: "",
+
+  // alibaba
+  alibabaUrl: "",
+  alibabaApiKey: "",
+
   // server config
   needCode: true,
   hideUserApiKey: false,
@@ -78,6 +96,18 @@ export const useAccessStore = createPersistStore(
       return ensure(get(), ["anthropicApiKey"]);
     },
 
+    isValidBaidu() {
+      return ensure(get(), ["baiduApiKey", "baiduSecretKey"]);
+    },
+
+    isValidByteDance() {
+      return ensure(get(), ["bytedanceApiKey"]);
+    },
+
+    isValidAlibaba() {
+      return ensure(get(), ["alibabaApiKey"]);
+    },
+
     isAuthorized() {
       this.fetch();
 
@@ -87,6 +117,9 @@ export const useAccessStore = createPersistStore(
         this.isValidAzure() ||
         this.isValidGoogle() ||
         this.isValidAnthropic() ||
+        this.isValidBaidu() ||
+        this.isValidByteDance() ||
+        this.isValidAlibaba() ||
         !this.enabledAccessControl() ||
         (this.enabledAccessControl() && ensure(get(), ["accessCode"]))
       );

+ 9 - 19
app/store/chat.ts

@@ -9,18 +9,23 @@ import {
   DEFAULT_MODELS,
   DEFAULT_SYSTEM_TEMPLATE,
   KnowledgeCutOffDate,
+  ServiceProvider,
   ModelProvider,
   StoreKey,
   SUMMARIZE_MODEL,
   GEMINI_SUMMARIZE_MODEL,
 } from "../constant";
-import { ClientApi, RequestMessage, MultimodalContent } from "../client/api";
+import { getClientApi } from "../client/api";
+import type {
+  ClientApi,
+  RequestMessage,
+  MultimodalContent,
+} from "../client/api";
 import { ChatControllerPool } from "../client/controller";
 import { prettyObject } from "../utils/format";
 import { estimateTokenLength } from "../utils/token";
 import { nanoid } from "nanoid";
 import { createPersistStore } from "../utils/store";
-import { identifyDefaultClaudeModel } from "../utils/checkers";
 import { collectModelsWithDefaultModel } from "../utils/model";
 import { useAccessStore } from "./access";
 
@@ -363,15 +368,7 @@ export const useChatStore = createPersistStore(
           ]);
         });
 
-        var api: ClientApi;
-        if (modelConfig.model.startsWith("gemini")) {
-          api = new ClientApi(ModelProvider.GeminiPro);
-        } else if (identifyDefaultClaudeModel(modelConfig.model)) {
-          api = new ClientApi(ModelProvider.Claude);
-        } else {
-          api = new ClientApi(ModelProvider.GPT);
-        }
-
+        const api: ClientApi = getClientApi(modelConfig.providerName);
         // make request
         api.llm.chat({
           messages: sendMessages,
@@ -547,14 +544,7 @@ export const useChatStore = createPersistStore(
         const session = get().currentSession();
         const modelConfig = session.mask.modelConfig;
 
-        var api: ClientApi;
-        if (modelConfig.model.startsWith("gemini")) {
-          api = new ClientApi(ModelProvider.GeminiPro);
-        } else if (identifyDefaultClaudeModel(modelConfig.model)) {
-          api = new ClientApi(ModelProvider.Claude);
-        } else {
-          api = new ClientApi(ModelProvider.GPT);
-        }
+        const api: ClientApi = getClientApi(modelConfig.providerName);
 
         // remove error messages if any
         const messages = session.messages;

+ 4 - 2
app/store/config.ts

@@ -5,6 +5,7 @@ import {
   DEFAULT_MODELS,
   DEFAULT_SIDEBAR_WIDTH,
   StoreKey,
+  ServiceProvider,
 } from "../constant";
 import { createPersistStore } from "../utils/store";
 
@@ -48,6 +49,7 @@ export const DEFAULT_CONFIG = {
 
   modelConfig: {
     model: "gpt-3.5-turbo" as ModelType,
+    providerName: "OpenAI" as ServiceProvider,
     temperature: 0.5,
     top_p: 1,
     max_tokens: 4000,
@@ -116,12 +118,12 @@ export const useAppConfig = createPersistStore(
 
       for (const model of oldModels) {
         model.available = false;
-        modelMap[model.name] = model;
+        modelMap[`${model.name}@${model?.provider?.id}`] = model;
       }
 
       for (const model of newModels) {
         model.available = true;
-        modelMap[model.name] = model;
+        modelMap[`${model.name}@${model?.provider?.id}`] = model;
       }
 
       set(() => ({

+ 23 - 0
app/utils/baidu.ts

@@ -0,0 +1,23 @@
+import { BAIDU_OATUH_URL } from "../constant";
+/**
+ * 使用 AK,SK 生成鉴权签名(Access Token)
+ * @return 鉴权签名信息
+ */
+export async function getAccessToken(
+  clientId: string,
+  clientSecret: string,
+): Promise<{
+  access_token: string;
+  expires_in: number;
+  error?: number;
+}> {
+  const res = await fetch(
+    `${BAIDU_OATUH_URL}?grant_type=client_credentials&client_id=${clientId}&client_secret=${clientSecret}`,
+    {
+      method: "POST",
+      mode: "cors",
+    },
+  );
+  const resJson = await res.json();
+  return resJson;
+}

+ 0 - 21
app/utils/checkers.ts

@@ -1,21 +0,0 @@
-import { useAccessStore } from "../store/access";
-import { useAppConfig } from "../store/config";
-import { collectModels } from "./model";
-
-export function identifyDefaultClaudeModel(modelName: string) {
-  const accessStore = useAccessStore.getState();
-  const configStore = useAppConfig.getState();
-
-  const allModals = collectModels(
-    configStore.models,
-    [configStore.customModels, accessStore.customModels].join(","),
-  );
-
-  const modelMeta = allModals.find((m) => m.name === modelName);
-
-  return (
-    modelName.startsWith("claude") &&
-    modelMeta &&
-    modelMeta.provider?.providerType === "anthropic"
-  );
-}

+ 6 - 1
app/utils/hooks.ts

@@ -11,7 +11,12 @@ export function useAllModels() {
       [configStore.customModels, accessStore.customModels].join(","),
       accessStore.defaultModel,
     );
-  }, [accessStore.customModels, configStore.customModels, configStore.models]);
+  }, [
+    accessStore.customModels,
+    accessStore.defaultModel,
+    configStore.customModels,
+    configStore.models,
+  ]);
 
   return models;
 }

+ 47 - 9
app/utils/model.ts

@@ -1,8 +1,9 @@
+import { DEFAULT_MODELS } from "../constant";
 import { LLMModel } from "../client/api";
 
 const customProvider = (modelName: string) => ({
   id: modelName,
-  providerName: "",
+  providerName: "Custom",
   providerType: "custom",
 });
 
@@ -23,7 +24,8 @@ export function collectModelTable(
 
   // default models
   models.forEach((m) => {
-    modelTable[m.name] = {
+    // using <modelName>@<providerId> as fullName
+    modelTable[`${m.name}@${m?.provider?.id}`] = {
       ...m,
       displayName: m.name, // 'provider' is copied over if it exists
     };
@@ -37,7 +39,7 @@ export function collectModelTable(
       const available = !m.startsWith("-");
       const nameConfig =
         m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m;
-      const [name, displayName] = nameConfig.split("=");
+      let [name, displayName] = nameConfig.split("=");
 
       // enable or disable all models
       if (name === "all") {
@@ -45,12 +47,38 @@ export function collectModelTable(
           (model) => (model.available = available),
         );
       } else {
-        modelTable[name] = {
-          name,
-          displayName: displayName || name,
-          available,
-          provider: modelTable[name]?.provider ?? customProvider(name), // Use optional chaining
-        };
+        // 1. find model by name, and set available value
+        const [customModelName, customProviderName] = name.split("@");
+        let count = 0;
+        for (const fullName in modelTable) {
+          const [modelName, providerName] = fullName.split("@");
+          if (
+            customModelName == modelName &&
+            (customProviderName === undefined ||
+              customProviderName === providerName)
+          ) {
+            count += 1;
+            modelTable[fullName]["available"] = available;
+            // swap name and displayName for bytedance
+            if (providerName === "bytedance") {
+              [name, displayName] = [displayName, modelName];
+              modelTable[fullName]["name"] = name;
+            }
+            if (displayName) {
+              modelTable[fullName]["displayName"] = displayName;
+            }
+          }
+        }
+        // 2. if model not exists, create new model with available value
+        if (count === 0) {
+          const provider = customProvider(name);
+          modelTable[`${name}@${provider?.id}`] = {
+            name,
+            displayName: displayName || name,
+            available,
+            provider, // Use optional chaining
+          };
+        }
       }
     });
 
@@ -100,3 +128,13 @@ export function collectModelsWithDefaultModel(
   const allModels = Object.values(modelTable);
   return allModels;
 }
+
+export function isModelAvailableInServer(
+  customModels: string,
+  modelName: string,
+  providerName: string,
+) {
+  const fullName = `${modelName}@${providerName}`;
+  const modelTable = collectModelTable(DEFAULT_MODELS, customModels);
+  return modelTable[fullName]?.available === false;
+}

+ 5 - 0
next.config.mjs

@@ -69,6 +69,11 @@ if (mode !== "export") {
         source: "/api/proxy/v1/:path*",
         destination: "https://api.openai.com/v1/:path*",
       },
+      {
+        // https://{resource_name}.openai.azure.com/openai/deployments/{deploy_name}/chat/completions
+        source: "/api/proxy/azure/:resource_name/deployments/:deploy_name/:path*",
+        destination: "https://:resource_name.openai.azure.com/openai/deployments/:deploy_name/:path*",
+      },
       {
         source: "/api/proxy/google/:path*",
         destination: "https://generativelanguage.googleapis.com/:path*",

+ 10 - 6
package.json

@@ -3,14 +3,16 @@
   "private": false,
   "license": "mit",
   "scripts": {
-    "dev": "next dev",
-    "build": "cross-env BUILD_MODE=standalone next build",
+    "mask": "npx tsx app/masks/build.ts",
+    "mask:watch": "npx watch 'yarn mask' app/masks",
+    "dev": "yarn run mask:watch & next dev",
+    "build": "yarn mask && cross-env BUILD_MODE=standalone next build",
     "start": "next start",
     "lint": "next lint",
-    "export": "cross-env BUILD_MODE=export BUILD_APP=1 next build",
-    "export:dev": "cross-env BUILD_MODE=export BUILD_APP=1 next dev",
-    "app:dev": "yarn tauri dev",
-    "app:build": "yarn tauri build",
+    "export": "yarn mask && cross-env BUILD_MODE=export BUILD_APP=1 next build",
+    "export:dev": "yarn mask:watch & cross-env BUILD_MODE=export BUILD_APP=1 next dev",
+    "app:dev": "yarn mask:watch & yarn tauri dev",
+    "app:build": "yarn mask && yarn tauri build",
     "prompts": "node ./scripts/fetch-prompts.mjs",
     "prepare": "husky install",
     "proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev"
@@ -60,7 +62,9 @@
     "husky": "^8.0.0",
     "lint-staged": "^13.2.2",
     "prettier": "^3.0.2",
+    "tsx": "^4.16.0",
     "typescript": "5.2.2",
+    "watch": "^1.0.2",
     "webpack": "^5.88.1"
   },
   "resolutions": {

+ 2 - 2
src-tauri/tauri.conf.json

@@ -9,7 +9,7 @@
   },
   "package": {
     "productName": "NextChat",
-    "version": "2.12.3"
+    "version": "2.12.4"
   },
   "tauri": {
     "allowlist": {
@@ -112,4 +112,4 @@
       }
     ]
   }
-}
+}

+ 191 - 0
yarn.lock

@@ -1092,6 +1092,121 @@
   resolved "https://registry.yarnpkg.com/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz#923ca57e173c6b232bbbb07347b1be982f03e783"
   integrity sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==
 
+"@esbuild/aix-ppc64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f"
+  integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==
+
+"@esbuild/android-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052"
+  integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==
+
+"@esbuild/android-arm@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28"
+  integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==
+
+"@esbuild/android-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e"
+  integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==
+
+"@esbuild/darwin-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a"
+  integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==
+
+"@esbuild/darwin-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22"
+  integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==
+
+"@esbuild/freebsd-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e"
+  integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==
+
+"@esbuild/freebsd-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261"
+  integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==
+
+"@esbuild/linux-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b"
+  integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==
+
+"@esbuild/linux-arm@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9"
+  integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==
+
+"@esbuild/linux-ia32@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2"
+  integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==
+
+"@esbuild/linux-loong64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df"
+  integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==
+
+"@esbuild/linux-mips64el@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe"
+  integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==
+
+"@esbuild/linux-ppc64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4"
+  integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==
+
+"@esbuild/linux-riscv64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc"
+  integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==
+
+"@esbuild/linux-s390x@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de"
+  integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==
+
+"@esbuild/linux-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0"
+  integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==
+
+"@esbuild/netbsd-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047"
+  integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==
+
+"@esbuild/openbsd-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70"
+  integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==
+
+"@esbuild/sunos-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b"
+  integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==
+
+"@esbuild/win32-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d"
+  integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==
+
+"@esbuild/win32-ia32@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b"
+  integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==
+
+"@esbuild/win32-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c"
+  integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==
+
 "@eslint-community/eslint-utils@^4.2.0":
   version "4.4.0"
   resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59"
@@ -2981,6 +3096,35 @@ es-to-primitive@^1.2.1:
     is-date-object "^1.0.1"
     is-symbol "^1.0.2"
 
+esbuild@~0.21.5:
+  version "0.21.5"
+  resolved "https://registry.npmmirror.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d"
+  integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==
+  optionalDependencies:
+    "@esbuild/aix-ppc64" "0.21.5"
+    "@esbuild/android-arm" "0.21.5"
+    "@esbuild/android-arm64" "0.21.5"
+    "@esbuild/android-x64" "0.21.5"
+    "@esbuild/darwin-arm64" "0.21.5"
+    "@esbuild/darwin-x64" "0.21.5"
+    "@esbuild/freebsd-arm64" "0.21.5"
+    "@esbuild/freebsd-x64" "0.21.5"
+    "@esbuild/linux-arm" "0.21.5"
+    "@esbuild/linux-arm64" "0.21.5"
+    "@esbuild/linux-ia32" "0.21.5"
+    "@esbuild/linux-loong64" "0.21.5"
+    "@esbuild/linux-mips64el" "0.21.5"
+    "@esbuild/linux-ppc64" "0.21.5"
+    "@esbuild/linux-riscv64" "0.21.5"
+    "@esbuild/linux-s390x" "0.21.5"
+    "@esbuild/linux-x64" "0.21.5"
+    "@esbuild/netbsd-x64" "0.21.5"
+    "@esbuild/openbsd-x64" "0.21.5"
+    "@esbuild/sunos-x64" "0.21.5"
+    "@esbuild/win32-arm64" "0.21.5"
+    "@esbuild/win32-ia32" "0.21.5"
+    "@esbuild/win32-x64" "0.21.5"
+
 escalade@^3.1.1:
   version "3.1.1"
   resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
@@ -3234,6 +3378,13 @@ events@^3.2.0:
   resolved "https://registry.npmmirror.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
   integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
 
+exec-sh@^0.2.0:
+  version "0.2.2"
+  resolved "https://registry.npmmirror.com/exec-sh/-/exec-sh-0.2.2.tgz#2a5e7ffcbd7d0ba2755bdecb16e5a427dfbdec36"
+  integrity sha512-FIUCJz1RbuS0FKTdaAafAByGS0CPvU3R0MeHxgtl+djzCc//F8HakL8GzmVNZanasTbTAY/3DRFA0KpVqj/eAw==
+  dependencies:
+    merge "^1.2.0"
+
 execa@^7.0.0:
   version "7.1.1"
   resolved "https://registry.yarnpkg.com/execa/-/execa-7.1.1.tgz#3eb3c83d239488e7b409d48e8813b76bb55c9c43"
@@ -3376,6 +3527,11 @@ fsevents@~2.3.2:
   resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
   integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
 
+fsevents@~2.3.3:
+  version "2.3.3"
+  resolved "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
+  integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
+
 function-bind@^1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
@@ -3433,6 +3589,13 @@ get-tsconfig@^4.5.0:
   resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.5.0.tgz#6d52d1c7b299bd3ee9cd7638561653399ac77b0f"
   integrity sha512-MjhiaIWCJ1sAU4pIQ5i5OfOuHHxVo1oYeNsWTON7jxYkod8pHocXeh+SSbmu5OZZZK73B6cbJ2XADzXehLyovQ==
 
+get-tsconfig@^4.7.5:
+  version "4.7.5"
+  resolved "https://registry.npmmirror.com/get-tsconfig/-/get-tsconfig-4.7.5.tgz#5e012498579e9a6947511ed0cd403272c7acbbaf"
+  integrity sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==
+  dependencies:
+    resolve-pkg-maps "^1.0.0"
+
 glob-parent@^5.1.2, glob-parent@~5.1.2:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
@@ -4387,6 +4550,11 @@ merge2@^1.3.0, merge2@^1.4.1:
   resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
   integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
 
+merge@^1.2.0:
+  version "1.2.1"
+  resolved "https://registry.npmmirror.com/merge/-/merge-1.2.1.tgz#38bebf80c3220a8a487b6fcfb3941bb11720c145"
+  integrity sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ==
+
 mermaid@^10.6.1:
   version "10.6.1"
   resolved "https://registry.yarnpkg.com/mermaid/-/mermaid-10.6.1.tgz#701f4160484137a417770ce757ce1887a98c00fc"
@@ -5322,6 +5490,11 @@ resolve-from@^4.0.0:
   resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
   integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
 
+resolve-pkg-maps@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.npmmirror.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f"
+  integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==
+
 resolve@^1.14.2, resolve@^1.22.1:
   version "1.22.1"
   resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177"
@@ -5828,6 +6001,16 @@ tslib@^2.6.2:
   resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
   integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
 
+tsx@^4.16.0:
+  version "4.16.0"
+  resolved "https://registry.npmmirror.com/tsx/-/tsx-4.16.0.tgz#913dd96f191b76f07a8744201d8c15d510aa1352"
+  integrity sha512-MPgN+CuY+4iKxGoJNPv+1pyo5YWZAQ5XfsyobUG+zoKG7IkvCPLZDEyoIb8yLS2FcWci1nlxAqmvPlFWD5AFiQ==
+  dependencies:
+    esbuild "~0.21.5"
+    get-tsconfig "^4.7.5"
+  optionalDependencies:
+    fsevents "~2.3.3"
+
 type-check@^0.4.0, type-check@~0.4.0:
   version "0.4.0"
   resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"
@@ -6048,6 +6231,14 @@ vfile@^5.0.0:
     unist-util-stringify-position "^3.0.0"
     vfile-message "^3.0.0"
 
+watch@^1.0.2:
+  version "1.0.2"
+  resolved "https://registry.npmmirror.com/watch/-/watch-1.0.2.tgz#340a717bde765726fa0aa07d721e0147a551df0c"
+  integrity sha512-1u+Z5n9Jc1E2c7qDO8SinPoZuHj7FgbgU1olSFoyaklduDvvtX7GMMtlE6OC9FTXq4KvNAOfj6Zu4vI1e9bAKA==
+  dependencies:
+    exec-sh "^0.2.0"
+    minimist "^1.2.0"
+
 watchpack@^2.4.0:
   version "2.4.0"
   resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d"