add function to check model is available

This commit is contained in:
lloydzhou 2024-07-04 15:30:24 +08:00
parent 2803a91673
commit e7b16bfbc0
3 changed files with 21 additions and 23 deletions

View File

@ -9,7 +9,7 @@ import {
import { prettyObject } from "@/app/utils/format"; import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth"; import { auth } from "../../auth";
import { collectModelTable } from "@/app/utils/model"; import { isModelAvailableInServer } from "@/app/utils/model";
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]); const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
@ -136,17 +136,13 @@ async function request(req: NextRequest) {
// #1815 try to refuse some request to some models // #1815 try to refuse some request to some models
if (serverConfig.customModels && req.body) { if (serverConfig.customModels && req.body) {
try { try {
const modelTable = collectModelTable(
DEFAULT_MODELS,
serverConfig.customModels,
);
const clonedBody = await req.text(); const clonedBody = await req.text();
fetchOptions.body = clonedBody; fetchOptions.body = clonedBody;
const jsonBody = JSON.parse(clonedBody) as { model?: string }; const jsonBody = JSON.parse(clonedBody) as { model?: string };
// not undefined and is false // not undefined and is false
if (modelTable[jsonBody?.model ?? ""].available === false) { if (isModelAvailableInServer(jsonBody?.model ?? "")) {
return NextResponse.json( return NextResponse.json(
{ {
error: true, error: true,

View File

@ -1,7 +1,7 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server"; import { getServerSideConfig } from "../config/server";
import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant"; import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
import { collectModelTable } from "../utils/model"; import { isModelAvailableInServer } from "../utils/model";
import { makeAzurePath } from "../azure"; import { makeAzurePath } from "../azure";
const serverConfig = getServerSideConfig(); const serverConfig = getServerSideConfig();
@ -83,17 +83,15 @@ export async function requestOpenai(req: NextRequest) {
// #1815 try to refuse gpt4 request // #1815 try to refuse gpt4 request
if (serverConfig.customModels && req.body) { if (serverConfig.customModels && req.body) {
try { try {
const modelTable = collectModelTable(
DEFAULT_MODELS,
serverConfig.customModels,
);
const clonedBody = await req.text(); const clonedBody = await req.text();
fetchOptions.body = clonedBody; fetchOptions.body = clonedBody;
const jsonBody = JSON.parse(clonedBody) as { model?: string }; const jsonBody = JSON.parse(clonedBody) as { model?: string };
// not undefined and is false // not undefined and is false
if (modelTable[jsonBody?.model ?? ""].available === false) { if (
isModelAvailableInServer(serverConfig.customModels, jsonBody?.model)
) {
return NextResponse.json( return NextResponse.json(
{ {
error: true, error: true,
@ -112,16 +110,16 @@ export async function requestOpenai(req: NextRequest) {
try { try {
const res = await fetch(fetchUrl, fetchOptions); const res = await fetch(fetchUrl, fetchOptions);
// Extract the OpenAI-Organization header from the response // Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization"); const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Check if serverConfig.openaiOrgId is defined and not an empty string // Check if serverConfig.openaiOrgId is defined and not an empty string
if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") { if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present // If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader); console.log("[Org ID]", openaiOrganizationHeader);
} else { } else {
console.log("[Org ID] is not set up."); console.log("[Org ID] is not set up.");
} }
// to prevent browser prompt for credentials // to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers); const newHeaders = new Headers(res.headers);
@ -129,7 +127,6 @@ export async function requestOpenai(req: NextRequest) {
// to disable nginx buffering // to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no"); newHeaders.set("X-Accel-Buffering", "no");
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV) // Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
// Also, this is to prevent the header from being sent to the client // Also, this is to prevent the header from being sent to the client
if (!serverConfig.openaiOrgId || serverConfig.openaiOrgId.trim() === "") { if (!serverConfig.openaiOrgId || serverConfig.openaiOrgId.trim() === "") {
@ -142,7 +139,6 @@ export async function requestOpenai(req: NextRequest) {
// The browser will try to decode the response with brotli and fail // The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding"); newHeaders.delete("content-encoding");
return new Response(res.body, { return new Response(res.body, {
status: res.status, status: res.status,
statusText: res.statusText, statusText: res.statusText,

View File

@ -1,3 +1,4 @@
import { DEFAULT_MODELS } from "../constant";
import { LLMModel } from "../client/api"; import { LLMModel } from "../client/api";
const customProvider = (modelName: string) => ({ const customProvider = (modelName: string) => ({
@ -100,3 +101,8 @@ export function collectModelsWithDefaultModel(
const allModels = Object.values(modelTable); const allModels = Object.values(modelTable);
return allModels; return allModels;
} }
export function isModelAvailableInServer(customModels, modelName) {
const modelTable = collectModelTable(DEFAULT_MODELS, customModels);
return modelTable[modelName ?? ""].available === false;
}