feat: clean codes

This commit is contained in:
butterfly 2024-04-07 11:50:25 +08:00
parent 69b079c86e
commit 3cb4315193
1 changed files with 3 additions and 12 deletions

View File

@ -1,4 +1,3 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
import { getServerSideConfig } from "@/app/config/server"; import { getServerSideConfig } from "@/app/config/server";
import { import {
ANTHROPIC_BASE_URL, ANTHROPIC_BASE_URL,
@ -6,12 +5,10 @@ import {
ApiPath, ApiPath,
DEFAULT_MODELS, DEFAULT_MODELS,
ModelProvider, ModelProvider,
OpenaiPath,
} from "@/app/constant"; } from "@/app/constant";
import { prettyObject } from "@/app/utils/format"; import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth"; import { auth } from "../../auth";
import { requestOpenai } from "../../common";
import { collectModelTable } from "@/app/utils/model"; import { collectModelTable } from "@/app/utils/model";
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]); const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
@ -121,7 +118,7 @@ export async function request(req: NextRequest) {
const fetchOptions: RequestInit = { const fetchOptions: RequestInit = {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
// "Cache-Control": "no-store", "Cache-Control": "no-store",
[authHeaderName]: authValue, [authHeaderName]: authValue,
"anthropic-version": "anthropic-version":
req.headers.get("anthropic-version") || req.headers.get("anthropic-version") ||
@ -136,7 +133,7 @@ export async function request(req: NextRequest) {
signal: controller.signal, signal: controller.signal,
}; };
// #1815 try to refuse gpt4 request // #1815 try to refuse some request to some models
if (serverConfig.customModels && req.body) { if (serverConfig.customModels && req.body) {
try { try {
const modelTable = collectModelTable( const modelTable = collectModelTable(
@ -161,7 +158,7 @@ export async function request(req: NextRequest) {
); );
} }
} catch (e) { } catch (e) {
console.error("[OpenAI] gpt4 filter", e); console.error(`[Anthropic] filter`, e);
} }
} }
console.log("[Anthropic request]", fetchOptions.headers, req.method); console.log("[Anthropic request]", fetchOptions.headers, req.method);
@ -181,12 +178,6 @@ export async function request(req: NextRequest) {
// to disable nginx buffering // to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no"); newHeaders.set("X-Accel-Buffering", "no");
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new Response(res.body, { return new Response(res.body, {
status: res.status, status: res.status,
statusText: res.statusText, statusText: res.statusText,