feat: close #2192 use /list/models to get model ids

This commit is contained in:
Yidadaa
2023-07-04 23:16:24 +08:00
parent f2d748cfe4
commit 4131fccbe0
12 changed files with 214 additions and 121 deletions

View File

@@ -9,7 +9,7 @@ const serverConfig = getServerSideConfig();
const DANGER_CONFIG = {
needCode: serverConfig.needCode,
hideUserApiKey: serverConfig.hideUserApiKey,
enableGPT4: serverConfig.enableGPT4,
disableGPT4: serverConfig.disableGPT4,
hideBalanceQuery: serverConfig.hideBalanceQuery,
};

View File

@@ -1,3 +1,5 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
import { getServerSideConfig } from "@/app/config/server";
import { OpenaiPath } from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
@@ -6,6 +8,18 @@ import { requestOpenai } from "../../common";
const ALLOWD_PATH = new Set(Object.values(OpenaiPath));
function getModels(remoteModelRes: OpenAIListModelResponse) {
const config = getServerSideConfig();
if (config.disableGPT4) {
remoteModelRes.data = remoteModelRes.data.filter(
(m) => !m.id.startsWith("gpt-4"),
);
}
return remoteModelRes;
}
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
@@ -39,7 +53,18 @@ async function handle(
}
try {
return await requestOpenai(req);
const response = await requestOpenai(req);
// list models
if (subpath === OpenaiPath.ListModelPath && response.status === 200) {
const resJson = (await response.json()) as OpenAIListModelResponse;
const availableModels = getModels(resJson);
return NextResponse.json(availableModels, {
status: response.status,
});
}
return response;
} catch (e) {
console.error("[OpenAI] ", e);
return NextResponse.json(prettyObject(e));