feat: mix handlers of proxy server in providers

This commit is contained in:
Dean-YZG
2024-05-22 21:31:54 +08:00
parent 8093d1ffba
commit 8de8acdce8
23 changed files with 1570 additions and 420 deletions

View File

@@ -5,6 +5,26 @@ export const OPENAI_BASE_URL = "https://api.openai.com";
export const ROLES = ["system", "user", "assistant"] as const;
export const preferredRegion: string | string[] = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
export const OpenaiMetas = {
ChatPath: "v1/chat/completions",
UsagePath: "dashboard/billing/usage",
@@ -157,9 +177,9 @@ export const modelConfigs = [
},
];
const defaultEndpoint = "/api/openai";
export const settingItems: SettingItem<SettingKeys>[] = [
export const settingItems: (
defaultEndpoint: string,
) => SettingItem<SettingKeys>[] = (defaultEndpoint) => [
{
name: "openaiUrl",
title: Locale.Endpoint.Title,
@@ -189,6 +209,6 @@ export const settingItems: SettingItem<SettingKeys>[] = [
placeholder: Locale.ApiKey.Placeholder,
type: "input",
inputType: "password",
validators: ["required"],
// validators: ["required"],
},
];

View File

@@ -4,20 +4,32 @@ import {
IProviderTemplate,
ModelInfo,
getMessageTextContent,
ServerConfig,
} from "../../common";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import Locale from "@/app/locales";
import { makeBearer, validString, prettyObject } from "./utils";
import {
authHeaderName,
prettyObject,
parseResp,
auth,
getTimer,
getHeaders,
} from "./utils";
import {
modelConfigs,
settingItems,
SettingKeys,
OpenaiMetas,
ROLES,
OPENAI_BASE_URL,
preferredRegion,
} from "./config";
import { NextRequest, NextResponse } from "next/server";
import { ModelList } from "./type";
export type OpenAIProviderSettingKeys = SettingKeys;
@@ -49,66 +61,54 @@ interface RequestPayload {
max_tokens?: number;
}
interface ModelList {
object: "list";
data: Array<{
id: string;
object: "model";
created: number;
owned_by: "system" | "openai-internal";
}>;
}
type ProviderTemplate = IProviderTemplate<
SettingKeys,
"azure",
typeof OpenaiMetas
>;
class OpenAIProvider
implements IProviderTemplate<SettingKeys, "openai", typeof OpenaiMetas>
{
apiRouteRootName: "/api/provider/openai" = "/api/provider/openai";
allowedApiMethods: (
| "POST"
| "GET"
| "OPTIONS"
| "PUT"
| "PATCH"
| "DELETE"
)[] = ["GET", "POST"];
runtime = "edge" as const;
preferredRegion = preferredRegion;
name = "openai" as const;
metas = OpenaiMetas;
readonly REQUEST_TIMEOUT_MS = 60000;
defaultModels = modelConfigs;
providerMeta = {
displayName: "OpenAI",
settingItems,
settingItems: settingItems(
`${this.apiRouteRootName}/${OpenaiMetas.ChatPath}`,
),
};
private path(payload: InternalChatRequestPayload<SettingKeys>): string {
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const {
messages,
isVisionModel,
model,
stream,
modelConfig: {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
},
providerConfig: { openaiUrl },
} = payload;
const path = OpenaiMetas.ChatPath;
console.log("[Proxy Endpoint] ", openaiUrl, path);
return [openaiUrl, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { openaiApiKey } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(openaiApiKey)) {
headers["Authorization"] = makeBearer(openaiApiKey);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload;
const {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
} = modelConfig;
const openAiMessages = messages.map((v) => ({
role: v.role,
@@ -133,42 +133,101 @@ class OpenAIProvider
console.log("[Request] openai payload: ", requestPayload);
return {
headers: this.getHeaders(payload),
headers: getHeaders(payload.providerConfig.openaiApiKey),
body: JSON.stringify(requestPayload),
method: "POST",
url: this.path(payload),
url: openaiUrl!,
};
}
private readWholeMessageResponseBody(res: {
choices: { message: { content: any } }[];
}) {
return {
message: res.choices?.[0]?.message?.content ?? "",
};
}
private getTimer = () => {
private async requestOpenai(req: NextRequest, serverConfig: ServerConfig) {
const { baseUrl = OPENAI_BASE_URL, openaiOrgId } = serverConfig;
const controller = new AbortController();
const authValue = req.headers.get(authHeaderName) ?? "";
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
const path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
this.apiRouteRootName,
"",
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
async chat(payload: InternalChatRequestPayload<SettingKeys>) {
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const fetchUrl = `${baseUrl}/${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
[authHeaderName]: authValue,
...(openaiOrgId && {
"OpenAI-Organization": openaiOrgId,
}),
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
try {
const res = await fetch(fetchUrl, fetchOptions);
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (openaiOrgId && openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
// Also, this is to prevent the header from being sent to the client
if (!openaiOrgId || openaiOrgId.trim() === "") {
newHeaders.delete("OpenAI-Organization");
}
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
async chat(
payload: InternalChatRequestPayload<SettingKeys>,
fetch: typeof window.fetch,
) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
const timer = getTimer();
const res = await fetch(requestPayload.url, {
headers: {
@@ -182,7 +241,7 @@ class OpenAIProvider
timer.clear();
const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson);
const message = parseResp(resJson);
return message;
}
@@ -190,13 +249,15 @@ class OpenAIProvider
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers,
fetch: typeof window.fetch,
) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
const timer = getTimer();
fetchEventSource(requestPayload.url, {
...requestPayload,
fetch,
async onopen(res) {
timer.clear();
const contentType = res.headers.get("content-type");
@@ -270,7 +331,7 @@ class OpenAIProvider
providerConfig: Record<SettingKeys, string>,
): Promise<ModelInfo[]> {
const { openaiApiKey, openaiUrl } = providerConfig;
const res = await fetch(`${openaiUrl}/vi/models`, {
const res = await fetch(`${openaiUrl}/v1/models`, {
headers: {
Authorization: `Bearer ${openaiApiKey}`,
},
@@ -282,6 +343,39 @@ class OpenAIProvider
name: o.id,
}));
}
serverSideRequestHandler: ProviderTemplate["serverSideRequestHandler"] =
async (req, config) => {
const { subpath } = req;
const ALLOWD_PATH = new Set(Object.values(OpenaiMetas));
if (!ALLOWD_PATH.has(subpath)) {
return NextResponse.json(
{
error: true,
message: "you are not allowed to request " + subpath,
},
{
status: 403,
},
);
}
const authResult = auth(req, config);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
try {
const response = await this.requestOpenai(req, config);
return response;
} catch (e) {
return NextResponse.json(prettyObject(e));
}
};
}
export default OpenAIProvider;

View File

@@ -0,0 +1,18 @@
export interface ModelList {
object: "list";
data: Array<{
id: string;
object: "model";
created: number;
owned_by: "system" | "openai-internal";
}>;
}
export interface OpenAIListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}

View File

@@ -1,7 +1,21 @@
export const makeBearer = (s: string) => `Bearer ${s.trim()}`;
import { NextRequest } from "next/server";
import { ServerConfig, getIP } from "../../common";
export const validString = (x?: string): x is string =>
Boolean(x && x.length > 0);
export const REQUEST_TIMEOUT_MS = 60000;
export const authHeaderName = "Authorization";
const makeBearer = (s: string) => `Bearer ${s.trim()}`;
const validString = (x?: string): x is string => Boolean(x && x.length > 0);
function parseApiKey(bearToken: string) {
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
return {
apiKey: token,
};
}
export function prettyObject(msg: any) {
const obj = msg;
@@ -16,3 +30,74 @@ export function prettyObject(msg: any) {
}
return ["```json", msg, "```"].join("\n");
}
export function parseResp(res: { choices: { message: { content: any } }[] }) {
return {
message: res.choices?.[0]?.message?.content ?? "",
};
}
export function auth(req: NextRequest, serverConfig: ServerConfig) {
const { hideUserApiKey, apiKey: systemApiKey } = serverConfig;
const authToken = req.headers.get(authHeaderName) ?? "";
const { apiKey } = parseApiKey(authToken);
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
if (hideUserApiKey && apiKey) {
return {
error: true,
message: "you are not allowed to access with your own api key",
};
}
if (apiKey) {
console.log("[Auth] use user api key");
return {
error: false,
};
}
if (systemApiKey) {
console.log("[Auth] use system api key");
req.headers.set(authHeaderName, `Bearer ${systemApiKey}`);
} else {
console.log("[Auth] admin did not provide an api key");
}
return {
error: false,
};
}
export function getTimer() {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
}
export function getHeaders(openaiApiKey?: string) {
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(openaiApiKey)) {
headers[authHeaderName] = makeBearer(openaiApiKey);
}
return headers;
}