|
|
@@ -1,7 +1,12 @@
|
|
|
import { NextRequest, NextResponse } from "next/server";
|
|
|
import { getServerSideConfig } from "../config/server";
|
|
|
-import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
|
|
|
-import { collectModelTable } from "../utils/model";
|
|
|
+import {
|
|
|
+ DEFAULT_MODELS,
|
|
|
+ OPENAI_BASE_URL,
|
|
|
+ GEMINI_BASE_URL,
|
|
|
+ ServiceProvider,
|
|
|
+} from "../constant";
|
|
|
+import { isModelAvailableInServer } from "../utils/model";
|
|
|
import { makeAzurePath } from "../azure";
|
|
|
|
|
|
const serverConfig = getServerSideConfig();
|
|
|
@@ -83,17 +88,24 @@ export async function requestOpenai(req: NextRequest) {
|
|
|
// #1815 try to refuse gpt4 request
|
|
|
if (serverConfig.customModels && req.body) {
|
|
|
try {
|
|
|
- const modelTable = collectModelTable(
|
|
|
- DEFAULT_MODELS,
|
|
|
- serverConfig.customModels,
|
|
|
- );
|
|
|
const clonedBody = await req.text();
|
|
|
fetchOptions.body = clonedBody;
|
|
|
|
|
|
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
|
|
|
|
|
// not undefined and is false
|
|
|
- if (modelTable[jsonBody?.model ?? ""].available === false) {
|
|
|
+ if (
|
|
|
+ isModelAvailableInServer(
|
|
|
+ serverConfig.customModels,
|
|
|
+ jsonBody?.model as string,
|
|
|
+ ServiceProvider.OpenAI as string,
|
|
|
+ ) ||
|
|
|
+ isModelAvailableInServer(
|
|
|
+ serverConfig.customModels,
|
|
|
+ jsonBody?.model as string,
|
|
|
+ ServiceProvider.Azure as string,
|
|
|
+ )
|
|
|
+ ) {
|
|
|
return NextResponse.json(
|
|
|
{
|
|
|
error: true,
|
|
|
@@ -112,16 +124,16 @@ export async function requestOpenai(req: NextRequest) {
|
|
|
try {
|
|
|
const res = await fetch(fetchUrl, fetchOptions);
|
|
|
|
|
|
- // Extract the OpenAI-Organization header from the response
|
|
|
- const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
|
|
|
+ // Extract the OpenAI-Organization header from the response
|
|
|
+ const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
|
|
|
|
|
|
- // Check if serverConfig.openaiOrgId is defined and not an empty string
|
|
|
- if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
|
|
|
- // If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
|
|
|
- console.log("[Org ID]", openaiOrganizationHeader);
|
|
|
- } else {
|
|
|
- console.log("[Org ID] is not set up.");
|
|
|
- }
|
|
|
+ // Check if serverConfig.openaiOrgId is defined and not an empty string
|
|
|
+ if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
|
|
|
+ // If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
|
|
|
+ console.log("[Org ID]", openaiOrganizationHeader);
|
|
|
+ } else {
|
|
|
+ console.log("[Org ID] is not set up.");
|
|
|
+ }
|
|
|
|
|
|
// to prevent browser prompt for credentials
|
|
|
const newHeaders = new Headers(res.headers);
|
|
|
@@ -129,7 +141,6 @@ export async function requestOpenai(req: NextRequest) {
|
|
|
// to disable nginx buffering
|
|
|
newHeaders.set("X-Accel-Buffering", "no");
|
|
|
|
|
|
-
|
|
|
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
|
|
|
// Also, this is to prevent the header from being sent to the client
|
|
|
if (!serverConfig.openaiOrgId || serverConfig.openaiOrgId.trim() === "") {
|
|
|
@@ -142,7 +153,6 @@ export async function requestOpenai(req: NextRequest) {
|
|
|
// The browser will try to decode the response with brotli and fail
|
|
|
newHeaders.delete("content-encoding");
|
|
|
|
|
|
-
|
|
|
return new Response(res.body, {
|
|
|
status: res.status,
|
|
|
statusText: res.statusText,
|