feat: mix handlers of proxy server in providers

This commit is contained in:
Dean-YZG 2024-05-22 21:31:54 +08:00
parent 8093d1ffba
commit 8de8acdce8
23 changed files with 1570 additions and 420 deletions

View File

@ -0,0 +1,93 @@
import * as ProviderTemplates from "@/app/client/providers";
import { getServerSideConfig } from "@/app/config/server";
import { NextRequest, NextResponse } from "next/server";
import { cloneDeep } from "lodash-es";
import {
disableSystemApiKey,
makeUrlsUsable,
modelNameRequestHeader,
} from "@/app/client/common";
import { collectModelTable } from "@/app/utils/model";
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
const [providerName] = params.path;
const { headers } = req;
const serverConfig = getServerSideConfig();
const modelName = headers.get(modelNameRequestHeader);
const ProviderTemplate = Object.values(ProviderTemplates).find(
(t) => t.prototype.name === providerName,
);
if (!ProviderTemplate) {
return NextResponse.json(
{
error: true,
message: "No provider found: " + providerName,
},
{
status: 404,
},
);
}
// #1815 try to refuse gpt4 request
if (modelName && serverConfig.customModels) {
try {
const modelTable = collectModelTable([], serverConfig.customModels);
// not undefined and is false
if (modelTable[modelName]?.available === false) {
return NextResponse.json(
{
error: true,
message: `you are not allowed to use ${modelName} model`,
},
{
status: 403,
},
);
}
} catch (e) {
console.error("models filter", e);
}
}
const config = disableSystemApiKey(
makeUrlsUsable(cloneDeep(serverConfig), [
"anthropicUrl",
"azureUrl",
"googleUrl",
"baseUrl",
]),
["anthropicApiKey", "azureApiKey", "googleApiKey", "apiKey"],
serverConfig.needCode &&
ProviderTemplate !== ProviderTemplates.NextChatProvider, // if it must take a access code in the req, do not provide system-keys for Non-nextchat providers
);
const request = Object.assign({}, req, {
subpath: params.path.join("/"),
});
return new ProviderTemplate().serverSideRequestHandler(request, config);
}
export const GET = handle;
export const POST = handle;
export const PUT = handle;
export const PATCH = handle;
export const DELETE = handle;
export const OPTIONS = handle;
export const runtime = "edge";
export const preferredRegion = Array.from(
new Set(
Object.values(ProviderTemplates).reduce(
(arr, t) => [...arr, ...(t.prototype.preferredRegion ?? [])],
[] as string[],
),
),
);

View File

@ -3,3 +3,5 @@ export * from "./types";
export * from "./locale"; export * from "./locale";
export * from "./utils"; export * from "./utils";
export const modelNameRequestHeader = "x-nextchat-model-name";

View File

@ -1,4 +1,6 @@
import { RequestMessage } from "../api"; import { RequestMessage } from "../api";
import { getServerSideConfig } from "@/app/config/server";
import { NextRequest, NextResponse } from "next/server";
export { type RequestMessage }; export { type RequestMessage };
@ -152,6 +154,9 @@ export type SettingItem<SettingKeys extends string = ""> =
// ===================================== Provider Settings Types end ====================================== // ===================================== Provider Settings Types end ======================================
// ===================================== Provider Template Types start ====================================== // ===================================== Provider Template Types start ======================================
export type ServerConfig = ReturnType<typeof getServerSideConfig>;
export interface IProviderTemplate< export interface IProviderTemplate<
SettingKeys extends string, SettingKeys extends string,
NAME extends string, NAME extends string,
@ -159,6 +164,12 @@ export interface IProviderTemplate<
> { > {
readonly name: NAME; readonly name: NAME;
readonly apiRouteRootName: `/api/provider/${NAME}`;
readonly allowedApiMethods: Array<
"GET" | "POST" | "PUT" | "PATCH" | "DELETE" | "OPTIONS"
>;
readonly metas: Meta; readonly metas: Meta;
readonly providerMeta: { readonly providerMeta: {
@ -170,17 +181,31 @@ export interface IProviderTemplate<
streamChat( streamChat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers, handlers: ChatHandlers,
fetch: typeof window.fetch,
): AbortController; ): AbortController;
chat( chat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
fetch: typeof window.fetch,
): Promise<StandChatReponseMessage>; ): Promise<StandChatReponseMessage>;
getAvailableModels?( getAvailableModels?(
providerConfig: InternalChatRequestPayload<SettingKeys>["providerConfig"], providerConfig: InternalChatRequestPayload<SettingKeys>["providerConfig"],
): Promise<ModelInfo[]>; ): Promise<ModelInfo[]>;
readonly runtime: "edge";
readonly preferredRegion: "auto" | "global" | "home" | string | string[];
serverSideRequestHandler(
req: NextRequest & {
subpath: string;
},
serverConfig: ServerConfig,
): Promise<NextResponse>;
} }
export type ProviderTemplate = IProviderTemplate<any, any, any>;
export interface Serializable<Snapshot> { export interface Serializable<Snapshot> {
serialize(): Snapshot; serialize(): Snapshot;
} }

View File

@ -1,4 +1,6 @@
import { RequestMessage } from "./types"; import { NextRequest } from "next/server";
import { RequestMessage, ServerConfig } from "./types";
import { cloneDeep } from "lodash-es";
export function getMessageTextContent(message: RequestMessage) { export function getMessageTextContent(message: RequestMessage) {
if (typeof message.content === "string") { if (typeof message.content === "string") {
@ -24,3 +26,63 @@ export function getMessageImages(message: RequestMessage): string[] {
} }
return urls; return urls;
} }
export function getIP(req: NextRequest) {
let ip = req.ip ?? req.headers.get("x-real-ip");
const forwardedFor = req.headers.get("x-forwarded-for");
if (!ip && forwardedFor) {
ip = forwardedFor.split(",").at(0) ?? "";
}
return ip;
}
export function formatUrl(baseUrl?: string) {
if (baseUrl && !baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}
if (baseUrl?.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}
return baseUrl;
}
function travel(
config: ServerConfig,
keys: Array<keyof ServerConfig>,
handle: (prop: any) => any,
): ServerConfig {
const copiedConfig = cloneDeep(config);
keys.forEach((k) => {
copiedConfig[k] = handle(copiedConfig[k] as string) as never;
});
return copiedConfig;
}
export const makeUrlsUsable = (
config: ServerConfig,
keys: Array<keyof ServerConfig>,
) => travel(config, keys, formatUrl);
export const disableSystemApiKey = (
config: ServerConfig,
keys: Array<keyof ServerConfig>,
forbidden: boolean,
) =>
travel(config, keys, (p) => {
return forbidden ? undefined : p;
});
export function isSameOrigin(requestUrl: string) {
var a = document.createElement("a");
a.href = requestUrl;
// 检查协议、主机名和端口号是否与当前页面相同
return (
a.protocol === window.location.protocol &&
a.hostname === window.location.hostname &&
a.port === window.location.port
);
}

View File

@ -1,3 +1,5 @@
export * from "./shim";
export * from "../common/types"; export * from "../common/types";
export * from "./providerClient"; export * from "./providerClient";
@ -5,5 +7,3 @@ export * from "./providerClient";
export * from "./modelClient"; export * from "./modelClient";
export * from "../common/locale"; export * from "../common/locale";
export * from "./shim";

View File

@ -3,14 +3,15 @@ import {
InternalChatHandlers, InternalChatHandlers,
Model, Model,
ModelTemplate, ModelTemplate,
ProviderTemplate,
StandChatReponseMessage, StandChatReponseMessage,
StandChatRequestPayload, StandChatRequestPayload,
isSameOrigin,
modelNameRequestHeader,
} from "../common"; } from "../common";
import * as ProviderTemplates from "@/app/client/providers"; import * as ProviderTemplates from "@/app/client/providers";
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
export type ProviderTemplate = IProviderTemplate<any, any, any>;
export type ProviderTemplateName = export type ProviderTemplateName =
(typeof ProviderTemplates)[keyof typeof ProviderTemplates]["prototype"]["name"]; (typeof ProviderTemplates)[keyof typeof ProviderTemplates]["prototype"]["name"];
@ -38,6 +39,7 @@ const providerTemplates = Object.values(ProviderTemplates).reduce(
export class ProviderClient { export class ProviderClient {
providerTemplate: IProviderTemplate<any, any, any>; providerTemplate: IProviderTemplate<any, any, any>;
genFetch: (modelName: string) => typeof window.fetch;
static ProviderTemplates = providerTemplates; static ProviderTemplates = providerTemplates;
@ -61,6 +63,31 @@ export class ProviderClient {
constructor(private provider: Provider) { constructor(private provider: Provider) {
const { providerTemplateName } = provider; const { providerTemplateName } = provider;
this.providerTemplate = this.getProviderTemplate(providerTemplateName); this.providerTemplate = this.getProviderTemplate(providerTemplateName);
this.genFetch =
(modelName: string) =>
(...args) => {
const req = new Request(...args);
const headers: Record<string, any> = {
...req.headers,
};
if (isSameOrigin(req.url)) {
headers[modelNameRequestHeader] = modelName;
}
return window.fetch(req.url, {
method: req.method,
keepalive: req.keepalive,
headers,
body: req.body,
redirect: req.redirect,
integrity: req.integrity,
signal: req.signal,
credentials: req.credentials,
mode: req.mode,
referrer: req.referrer,
referrerPolicy: req.referrerPolicy,
});
};
} }
private getProviderTemplate(providerTemplateName: string) { private getProviderTemplate(providerTemplateName: string) {
@ -98,12 +125,15 @@ export class ProviderClient {
async chat( async chat(
payload: StandChatRequestPayload, payload: StandChatRequestPayload,
): Promise<StandChatReponseMessage> { ): Promise<StandChatReponseMessage> {
return this.providerTemplate.chat({ return this.providerTemplate.chat(
...payload, {
stream: false, ...payload,
isVisionModel: this.getModelConfig(payload.model)?.isVisionModel, stream: false,
providerConfig: this.provider.providerConfig, isVisionModel: this.getModelConfig(payload.model)?.isVisionModel,
}); providerConfig: this.provider.providerConfig,
},
this.genFetch(payload.model),
);
} }
streamChat(payload: StandChatRequestPayload, handlers: InternalChatHandlers) { streamChat(payload: StandChatRequestPayload, handlers: InternalChatHandlers) {
@ -129,6 +159,7 @@ export class ProviderClient {
handlers.onFinish(message); handlers.onFinish(message);
}, },
}, },
this.genFetch(payload.model),
); );
timer.signal.onabort = () => { timer.signal.onabort = () => {

View File

@ -6,10 +6,11 @@ export type SettingKeys =
| "anthropicApiKey" | "anthropicApiKey"
| "anthropicApiVersion"; | "anthropicApiVersion";
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
export const AnthropicMetas = { export const AnthropicMetas = {
ChatPath: "v1/messages", ChatPath: "v1/messages",
ChatPath1: "v1/complete", ExampleEndpoint: ANTHROPIC_BASE_URL,
ExampleEndpoint: "https://api.anthropic.com",
Vision: "2023-06-01", Vision: "2023-06-01",
}; };
@ -64,9 +65,29 @@ export const modelConfigs = [
}, },
]; ];
const defaultEndpoint = "/api/anthropic"; export const preferredRegion: string | string[] = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
export const settingItems: SettingItem<SettingKeys>[] = [ export const settingItems: (
defaultEndpoint: string,
) => SettingItem<SettingKeys>[] = (defaultEndpoint) => [
{ {
name: "anthropicUrl", name: "anthropicUrl",
title: Locale.Endpoint.Title, title: Locale.Endpoint.Title,
@ -103,7 +124,7 @@ export const settingItems: SettingItem<SettingKeys>[] = [
name: "anthropicApiVersion", name: "anthropicApiVersion",
title: Locale.ApiVerion.Title, title: Locale.ApiVerion.Title,
description: Locale.ApiVerion.SubTitle, description: Locale.ApiVerion.SubTitle,
placeholder: AnthropicMetas.Vision, defaultValue: AnthropicMetas.Vision,
type: "input", type: "input",
// validators: ["required"], // validators: ["required"],
}, },

View File

@ -1,24 +1,33 @@
import { import {
ANTHROPIC_BASE_URL,
AnthropicMetas, AnthropicMetas,
ClaudeMapper, ClaudeMapper,
SettingKeys, SettingKeys,
modelConfigs, modelConfigs,
preferredRegion,
settingItems, settingItems,
} from "./config"; } from "./config";
import { import {
ChatHandlers, ChatHandlers,
InternalChatRequestPayload, InternalChatRequestPayload,
IProviderTemplate, IProviderTemplate,
getMessageTextContent, ServerConfig,
RequestMessage,
} from "../../common"; } from "../../common";
import { import {
EventStreamContentType, EventStreamContentType,
fetchEventSource, fetchEventSource,
} from "@fortaine/fetch-event-source"; } from "@fortaine/fetch-event-source";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
import { getAuthKey, trimEnd, prettyObject } from "./utils"; import {
prettyObject,
getTimer,
authHeaderName,
auth,
parseResp,
formatMessage,
} from "./utils";
import { cloneDeep } from "lodash-es"; import { cloneDeep } from "lodash-es";
import { NextRequest, NextResponse } from "next/server";
export type AnthropicProviderSettingKeys = SettingKeys; export type AnthropicProviderSettingKeys = SettingKeys;
@ -61,86 +70,32 @@ export interface ChatRequest {
stream?: boolean; // Whether to incrementally stream the response using server-sent events. stream?: boolean; // Whether to incrementally stream the response using server-sent events.
} }
export default class AnthropicProvider type ProviderTemplate = IProviderTemplate<
implements IProviderTemplate<SettingKeys, "anthropic", typeof AnthropicMetas> SettingKeys,
{ "anthropic",
typeof AnthropicMetas
>;
export default class AnthropicProvider implements ProviderTemplate {
apiRouteRootName = "/api/provider/anthropic" as const;
allowedApiMethods: ["GET", "POST"] = ["GET", "POST"];
runtime = "edge" as const;
preferredRegion = preferredRegion;
name = "anthropic" as const; name = "anthropic" as const;
metas = AnthropicMetas; metas = AnthropicMetas;
providerMeta = { providerMeta = {
displayName: "Anthropic", displayName: "Anthropic",
settingItems, settingItems: settingItems(
`${this.apiRouteRootName}//${AnthropicMetas.ChatPath}`,
),
}; };
defaultModels = modelConfigs; defaultModels = modelConfigs;
readonly REQUEST_TIMEOUT_MS = 60000;
private path(payload: InternalChatRequestPayload<SettingKeys>) {
const {
providerConfig: { anthropicUrl },
} = payload;
return `${trimEnd(anthropicUrl!)}/${AnthropicMetas.ChatPath}`;
}
private formatMessage(
messages: RequestMessage[],
payload: InternalChatRequestPayload<SettingKeys>,
) {
const { isVisionModel } = payload;
return messages
.flat()
.filter((v) => {
if (!v.content) return false;
if (typeof v.content === "string" && !v.content.trim()) return false;
return true;
})
.map((v) => {
const { role, content } = v;
const insideRole = ClaudeMapper[role] ?? "user";
if (!isVisionModel || typeof content === "string") {
return {
role: insideRole,
content: getMessageTextContent(v),
};
}
return {
role: insideRole,
content: content
.filter((v) => v.image_url || v.text)
.map(({ type, text, image_url }) => {
if (type === "text") {
return {
type,
text: text!,
};
}
const { url = "" } = image_url || {};
const colonIndex = url.indexOf(":");
const semicolonIndex = url.indexOf(";");
const comma = url.indexOf(",");
const mimeType = url.slice(colonIndex + 1, semicolonIndex);
const encodeType = url.slice(semicolonIndex + 1, comma);
const data = url.slice(comma + 1);
return {
type: "image" as const,
source: {
type: encodeType,
media_type: mimeType,
data,
},
};
}),
};
});
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) { private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { const {
messages: outsideMessages, messages: outsideMessages,
@ -149,7 +104,8 @@ export default class AnthropicProvider
modelConfig, modelConfig,
providerConfig, providerConfig,
} = payload; } = payload;
const { anthropicApiKey, anthropicApiVersion } = providerConfig; const { anthropicApiKey, anthropicApiVersion, anthropicUrl } =
providerConfig;
const { temperature, top_p, max_tokens } = modelConfig; const { temperature, top_p, max_tokens } = modelConfig;
const keys = ["system", "user"]; const keys = ["system", "user"];
@ -172,7 +128,7 @@ export default class AnthropicProvider
} }
} }
const prompt = this.formatMessage(messages, payload); const prompt = formatMessage(messages, payload.isVisionModel);
const requestBody: AnthropicChatRequest = { const requestBody: AnthropicChatRequest = {
messages: prompt, messages: prompt,
@ -188,52 +144,84 @@ export default class AnthropicProvider
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
Accept: "application/json", Accept: "application/json",
"x-api-key": anthropicApiKey ?? "", [authHeaderName]: anthropicApiKey ?? "",
"anthropic-version": anthropicApiVersion ?? "", "anthropic-version": anthropicApiVersion ?? "",
Authorization: getAuthKey(anthropicApiKey),
}, },
body: JSON.stringify(requestBody), body: JSON.stringify(requestBody),
method: "POST", method: "POST",
url: this.path(payload), url: anthropicUrl!,
}; };
} }
private readWholeMessageResponseBody(res: any) { private async request(req: NextRequest, serverConfig: ServerConfig) {
return {
message: res?.content?.[0]?.text ?? "",
};
}
private getTimer = (onabort: () => void = () => {}) => {
const controller = new AbortController(); const controller = new AbortController();
// make a fetch request const authValue = req.headers.get(authHeaderName) ?? "";
const requestTimeoutId = setTimeout(
() => controller.abort(), const path = `${req.nextUrl.pathname}`.replaceAll(
this.REQUEST_TIMEOUT_MS, this.apiRouteRootName,
"",
); );
controller.signal.onabort = onabort; const baseUrl = serverConfig.anthropicUrl || ANTHROPIC_BASE_URL;
return { console.log("[Proxy] ", path);
...controller, console.log("[Base Url]", baseUrl);
clear: () => {
clearTimeout(requestTimeoutId); const timeoutId = setTimeout(
() => {
controller.abort();
}, },
}; 10 * 60 * 1000,
};
async chat(payload: InternalChatRequestPayload<SettingKeys>) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
// make a fetch request
const requestTimeoutId = setTimeout(
() => timer.abort(),
this.REQUEST_TIMEOUT_MS,
); );
const fetchUrl = `${baseUrl}${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
[authHeaderName]: authValue,
"anthropic-version":
req.headers.get("anthropic-version") ||
serverConfig.anthropicApiVersion ||
AnthropicMetas.Vision,
},
method: req.method,
body: req.body,
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
console.log("[Anthropic request]", fetchOptions.headers, req.method);
try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
async chat(
payload: InternalChatRequestPayload<SettingKeys>,
fetch: typeof window.fetch,
) {
const requestPayload = this.formatChatPayload(payload);
const timer = getTimer();
const res = await fetch(requestPayload.url, { const res = await fetch(requestPayload.url, {
headers: { headers: {
...requestPayload.headers, ...requestPayload.headers,
@ -246,7 +234,7 @@ export default class AnthropicProvider
timer.clear(); timer.clear();
const resJson = await res.json(); const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson); const message = parseResp(resJson);
return message; return message;
} }
@ -254,13 +242,14 @@ export default class AnthropicProvider
streamChat( streamChat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers, handlers: ChatHandlers,
fetch: typeof window.fetch,
) { ) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = getTimer();
const timer = this.getTimer();
fetchEventSource(requestPayload.url, { fetchEventSource(requestPayload.url, {
...requestPayload, ...requestPayload,
fetch,
async onopen(res) { async onopen(res) {
timer.clear(); timer.clear();
const contentType = res.headers.get("content-type"); const contentType = res.headers.get("content-type");
@ -329,4 +318,39 @@ export default class AnthropicProvider
return timer; return timer;
} }
serverSideRequestHandler: ProviderTemplate["serverSideRequestHandler"] =
async (req, config) => {
const { subpath } = req;
const ALLOWD_PATH = [AnthropicMetas.ChatPath];
if (!ALLOWD_PATH.includes(subpath)) {
console.log("[Anthropic Route] forbidden path ", subpath);
return NextResponse.json(
{
error: true,
message: "you are not allowed to request " + subpath,
},
{
status: 403,
},
);
}
const authResult = auth(req, config);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
try {
const response = await this.request(req, config);
return response;
} catch (e) {
console.error("[Anthropic] ", e);
return NextResponse.json(prettyObject(e));
}
};
} }

View File

@ -1,3 +1,15 @@
import { NextRequest } from "next/server";
import {
RequestMessage,
ServerConfig,
getIP,
getMessageTextContent,
} from "../../common";
import { ClaudeMapper } from "./config";
export const REQUEST_TIMEOUT_MS = 60000;
export const authHeaderName = "x-api-key";
export function trimEnd(s: string, end = " ") { export function trimEnd(s: string, end = " ") {
if (end.length === 0) return s; if (end.length === 0) return s;
@ -12,17 +24,6 @@ export function bearer(value: string) {
return `Bearer ${value.trim()}`; return `Bearer ${value.trim()}`;
} }
export function getAuthKey(apiKey = "") {
let authKey = "";
if (apiKey) {
// use user's api key first
authKey = bearer(apiKey);
}
return authKey;
}
export function prettyObject(msg: any) { export function prettyObject(msg: any) {
const obj = msg; const obj = msg;
if (typeof msg !== "string") { if (typeof msg !== "string") {
@ -36,3 +37,115 @@ export function prettyObject(msg: any) {
} }
return ["```json", msg, "```"].join("\n"); return ["```json", msg, "```"].join("\n");
} }
export function getTimer() {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
}
export function auth(req: NextRequest, serverConfig: ServerConfig) {
const apiKey = req.headers.get(authHeaderName);
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
if (serverConfig.hideUserApiKey && apiKey) {
return {
error: true,
message: "you are not allowed to access with your own api key",
};
}
if (apiKey) {
console.log("[Auth] use user api key");
return {
error: false,
};
}
// if user does not provide an api key, inject system api key
const systemApiKey = serverConfig.anthropicApiKey;
if (systemApiKey) {
console.log("[Auth] use system api key");
req.headers.set(authHeaderName, systemApiKey);
} else {
console.log("[Auth] admin did not provide an api key");
}
return {
error: false,
};
}
export function parseResp(res: any) {
return {
message: res?.content?.[0]?.text ?? "",
};
}
export function formatMessage(
messages: RequestMessage[],
isVisionModel?: boolean,
) {
return messages
.flat()
.filter((v) => {
if (!v.content) return false;
if (typeof v.content === "string" && !v.content.trim()) return false;
return true;
})
.map((v) => {
const { role, content } = v;
const insideRole = ClaudeMapper[role] ?? "user";
if (!isVisionModel || typeof content === "string") {
return {
role: insideRole,
content: getMessageTextContent(v),
};
}
return {
role: insideRole,
content: content
.filter((v) => v.image_url || v.text)
.map(({ type, text, image_url }) => {
if (type === "text") {
return {
type,
text: text!,
};
}
const { url = "" } = image_url || {};
const colonIndex = url.indexOf(":");
const semicolonIndex = url.indexOf(";");
const comma = url.indexOf(",");
const mimeType = url.slice(colonIndex + 1, semicolonIndex);
const encodeType = url.slice(semicolonIndex + 1, comma);
const data = url.slice(comma + 1);
return {
type: "image" as const,
source: {
type: encodeType,
media_type: mimeType,
data,
},
};
}),
};
});
}

View File

@ -5,20 +5,44 @@ import { modelConfigs as openaiModelConfigs } from "../openai/config";
export const AzureMetas = { export const AzureMetas = {
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}", ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
ChatPath: "v1/chat/completions", ChatPath: "chat/completions",
ListModelPath: "v1/models",
}; };
export type SettingKeys = "azureUrl" | "azureApiKey" | "azureApiVersion"; export type SettingKeys = "azureUrl" | "azureApiKey" | "azureApiVersion";
export const preferredRegion: string | string[] = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
export const modelConfigs = openaiModelConfigs; export const modelConfigs = openaiModelConfigs;
export const settingItems: SettingItem<SettingKeys>[] = [ export const settingItems: (
defaultEndpoint: string,
) => SettingItem<SettingKeys>[] = (defaultEndpoint) => [
{ {
name: "azureUrl", name: "azureUrl",
title: Locale.Endpoint.Title, title: Locale.Endpoint.Title,
description: Locale.Endpoint.SubTitle + AzureMetas.ExampleEndpoint, description: Locale.Endpoint.SubTitle + AzureMetas.ExampleEndpoint,
placeholder: AzureMetas.ExampleEndpoint, placeholder: AzureMetas.ExampleEndpoint,
type: "input", type: "input",
defaultValue: defaultEndpoint,
validators: [ validators: [
async (v: any) => { async (v: any) => {
if (typeof v === "string") { if (typeof v === "string") {

View File

@ -1,17 +1,33 @@
import { settingItems, SettingKeys, modelConfigs, AzureMetas } from "./config"; import {
settingItems,
SettingKeys,
modelConfigs,
AzureMetas,
preferredRegion,
} from "./config";
import { import {
ChatHandlers, ChatHandlers,
InternalChatRequestPayload, InternalChatRequestPayload,
IProviderTemplate, IProviderTemplate,
ModelInfo, ModelInfo,
getMessageTextContent, getMessageTextContent,
ServerConfig,
} from "../../common"; } from "../../common";
import { import {
EventStreamContentType, EventStreamContentType,
fetchEventSource, fetchEventSource,
} from "@fortaine/fetch-event-source"; } from "@fortaine/fetch-event-source";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
import { makeAzurePath, makeBearer, prettyObject, validString } from "./utils"; import {
auth,
authHeaderName,
getHeaders,
getTimer,
makeAzurePath,
parseResp,
prettyObject,
} from "./utils";
import { NextRequest, NextResponse } from "next/server";
export type AzureProviderSettingKeys = SettingKeys; export type AzureProviderSettingKeys = SettingKeys;
@ -62,9 +78,35 @@ interface ModelList {
}>; }>;
} }
export default class Azure interface OpenAIListModelResponse {
implements IProviderTemplate<SettingKeys, "azure", typeof AzureMetas> object: string;
{ data: Array<{
id: string;
object: string;
root: string;
}>;
}
type ProviderTemplate = IProviderTemplate<
SettingKeys,
"azure",
typeof AzureMetas
>;
export default class Azure implements ProviderTemplate {
apiRouteRootName: "/api/provider/azure" = "/api/provider/azure";
allowedApiMethods: (
| "POST"
| "GET"
| "OPTIONS"
| "PUT"
| "PATCH"
| "DELETE"
)[] = ["POST", "GET"];
runtime = "edge" as const;
preferredRegion = preferredRegion;
name = "azure" as const; name = "azure" as const;
metas = AzureMetas; metas = AzureMetas;
@ -72,46 +114,26 @@ export default class Azure
providerMeta = { providerMeta = {
displayName: "Azure", displayName: "Azure",
settingItems, settingItems: settingItems(
`${this.apiRouteRootName}/${AzureMetas.ChatPath}`,
),
}; };
readonly REQUEST_TIMEOUT_MS = 60000; private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
private path(payload: InternalChatRequestPayload<SettingKeys>): string {
const { const {
messages,
isVisionModel,
model,
stream,
modelConfig: {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
},
providerConfig: { azureUrl, azureApiVersion }, providerConfig: { azureUrl, azureApiVersion },
} = payload; } = payload;
const path = makeAzurePath(AzureMetas.ChatPath, azureApiVersion!);
console.log("[Proxy Endpoint] ", azureUrl, path);
return [azureUrl!, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { azureApiKey } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(azureApiKey)) {
headers["Authorization"] = makeBearer(azureApiKey);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload;
const {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
} = modelConfig;
const openAiMessages = messages.map((v) => ({ const openAiMessages = messages.map((v) => ({
role: v.role, role: v.role,
@ -136,47 +158,105 @@ export default class Azure
console.log("[Request] openai payload: ", requestPayload); console.log("[Request] openai payload: ", requestPayload);
return { return {
headers: this.getHeaders(payload), headers: getHeaders(payload.providerConfig.azureApiKey),
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
method: "POST", method: "POST",
url: this.path(payload), url: `${azureUrl}?api-version=${azureApiVersion!}`,
}; };
} }
private readWholeMessageResponseBody(res: any) { private async requestAzure(req: NextRequest, serverConfig: ServerConfig) {
return {
message: res.choices?.at(0)?.message?.content ?? "",
};
}
private getTimer = (onabort: () => void = () => {}) => {
const controller = new AbortController(); const controller = new AbortController();
// make a fetch request const authValue =
const requestTimeoutId = setTimeout( req.headers
() => controller.abort(), .get("Authorization")
this.REQUEST_TIMEOUT_MS, ?.trim()
.replaceAll("Bearer ", "")
.trim() ?? "";
const { azureUrl, azureApiVersion } = serverConfig;
if (!azureUrl) {
return NextResponse.json({
error: true,
message: `missing AZURE_URL in server env vars`,
});
}
if (!azureApiVersion) {
return NextResponse.json({
error: true,
message: `missing AZURE_API_VERSION in server env vars`,
});
}
let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
this.apiRouteRootName,
"",
); );
controller.signal.onabort = onabort; path = makeAzurePath(path, azureApiVersion);
return { console.log("[Proxy] ", path);
...controller, console.log("[Base Url]", azureUrl);
clear: () => {
clearTimeout(requestTimeoutId); const fetchUrl = `${azureUrl}/${path}`;
const timeoutId = setTimeout(
() => {
controller.abort();
}, },
}; 10 * 60 * 1000,
}; );
async chat(payload: InternalChatRequestPayload<SettingKeys>) { const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
[authHeaderName]: authValue,
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
async chat(
payload: InternalChatRequestPayload<SettingKeys>,
fetch: typeof window.fetch,
) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
// make a fetch request
const requestTimeoutId = setTimeout(
() => timer.abort(),
this.REQUEST_TIMEOUT_MS,
);
const res = await fetch(requestPayload.url, { const res = await fetch(requestPayload.url, {
headers: { headers: {
@ -187,10 +267,10 @@ export default class Azure
signal: timer.signal, signal: timer.signal,
}); });
clearTimeout(requestTimeoutId); timer.clear();
const resJson = await res.json(); const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson); const message = parseResp(resJson);
return message; return message;
} }
@ -198,13 +278,15 @@ export default class Azure
streamChat( streamChat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers, handlers: ChatHandlers,
fetch: typeof window.fetch,
) { ) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
fetchEventSource(requestPayload.url, { fetchEventSource(requestPayload.url, {
...requestPayload, ...requestPayload,
fetch,
async onopen(res) { async onopen(res) {
timer.clear(); timer.clear();
const contentType = res.headers.get("content-type"); const contentType = res.headers.get("content-type");
@ -278,7 +360,7 @@ export default class Azure
providerConfig: Record<SettingKeys, string>, providerConfig: Record<SettingKeys, string>,
): Promise<ModelInfo[]> { ): Promise<ModelInfo[]> {
const { azureApiKey, azureUrl } = providerConfig; const { azureApiKey, azureUrl } = providerConfig;
const res = await fetch(`${azureUrl}/vi/models`, { const res = await fetch(`${azureUrl}/${AzureMetas.ListModelPath}`, {
headers: { headers: {
Authorization: `Bearer ${azureApiKey}`, Authorization: `Bearer ${azureApiKey}`,
}, },
@ -290,4 +372,37 @@ export default class Azure
name: o.id, name: o.id,
})); }));
} }
serverSideRequestHandler: ProviderTemplate["serverSideRequestHandler"] =
async (req, config) => {
const { subpath } = req;
const ALLOWD_PATH = [AzureMetas.ChatPath];
if (!ALLOWD_PATH.includes(subpath)) {
return NextResponse.json(
{
error: true,
message: "you are not allowed to request " + subpath,
},
{
status: 403,
},
);
}
const authResult = auth(req, config);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
try {
const response = await this.requestAzure(req, config);
return response;
} catch (e) {
return NextResponse.json(prettyObject(e));
}
};
} }

View File

@ -1,7 +1,29 @@
export function makeAzurePath(path: string, apiVersion: string) { import { NextRequest } from "next/server";
// should omit /v1 prefix import { ServerConfig, getIP } from "../../common";
path = path.replaceAll("v1/", "");
export const authHeaderName = "api-key";
export const REQUEST_TIMEOUT_MS = 60000;
export function getHeaders(azureApiKey?: string) {
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(azureApiKey)) {
headers[authHeaderName] = makeBearer(azureApiKey);
}
return headers;
}
export function parseResp(res: any) {
return {
message: res.choices?.at(0)?.message?.content ?? "",
};
}
export function makeAzurePath(path: string, apiVersion: string) {
// should add api-key to query string // should add api-key to query string
path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`; path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`;
@ -25,3 +47,64 @@ export function prettyObject(msg: any) {
export const makeBearer = (s: string) => `Bearer ${s.trim()}`; export const makeBearer = (s: string) => `Bearer ${s.trim()}`;
export const validString = (x?: string): x is string => export const validString = (x?: string): x is string =>
Boolean(x && x.length > 0); Boolean(x && x.length > 0);
export function parseApiKey(bearToken: string) {
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
return {
apiKey: token,
};
}
export function getTimer() {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
}
export function auth(req: NextRequest, serverConfig: ServerConfig) {
const authToken = req.headers.get(authHeaderName) ?? "";
const { hideUserApiKey, apiKey: systemApiKey } = serverConfig;
const { apiKey } = parseApiKey(authToken);
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
if (hideUserApiKey && apiKey) {
return {
error: true,
message: "you are not allowed to access with your own api key",
};
}
if (apiKey) {
console.log("[Auth] use user api key");
return {
error: false,
};
}
if (systemApiKey) {
console.log("[Auth] use system api key");
req.headers.set("Authorization", `Bearer ${systemApiKey}`);
} else {
console.log("[Auth] admin did not provide an api key");
}
return {
error: false,
};
}

View File

@ -1,8 +1,25 @@
import { SettingItem } from "../../common"; import { SettingItem } from "../../common";
import Locale from "./locale"; import Locale from "./locale";
export const preferredRegion: string | string[] = [
"bom1",
"cle1",
"cpt1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
export const GoogleMetas = { export const GoogleMetas = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/", ExampleEndpoint: GEMINI_BASE_URL,
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`, ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
}; };
@ -32,13 +49,16 @@ export const modelConfigs = [
}, },
]; ];
export const settingItems: SettingItem<SettingKeys>[] = [ export const settingItems: (
defaultEndpoint: string,
) => SettingItem<SettingKeys>[] = (defaultEndpoint) => [
{ {
name: "googleUrl", name: "googleUrl",
title: Locale.Endpoint.Title, title: Locale.Endpoint.Title,
description: Locale.Endpoint.SubTitle + GoogleMetas.ExampleEndpoint, description: Locale.Endpoint.SubTitle + GoogleMetas.ExampleEndpoint,
placeholder: GoogleMetas.ExampleEndpoint, placeholder: GoogleMetas.ExampleEndpoint,
type: "input", type: "input",
defaultValue: defaultEndpoint,
validators: [ validators: [
async (v: any) => { async (v: any) => {
if (typeof v === "string") { if (typeof v === "string") {
@ -52,6 +72,7 @@ export const settingItems: SettingItem<SettingKeys>[] = [
return Locale.Endpoint.Error.EndWithBackslash; return Locale.Endpoint.Error.EndWithBackslash;
} }
}, },
"required",
], ],
}, },
{ {

View File

@ -1,4 +1,11 @@
import { SettingKeys, modelConfigs, settingItems, GoogleMetas } from "./config"; import {
SettingKeys,
modelConfigs,
settingItems,
GoogleMetas,
GEMINI_BASE_URL,
preferredRegion,
} from "./config";
import { import {
ChatHandlers, ChatHandlers,
InternalChatRequestPayload, InternalChatRequestPayload,
@ -8,7 +15,14 @@ import {
getMessageTextContent, getMessageTextContent,
getMessageImages, getMessageImages,
} from "../../common"; } from "../../common";
import { ensureProperEnding, makeBearer, validString } from "./utils"; import {
auth,
ensureProperEnding,
getTimer,
parseResp,
urlParamApikeyName,
} from "./utils";
import { NextResponse } from "next/server";
export type GoogleProviderSettingKeys = SettingKeys; export type GoogleProviderSettingKeys = SettingKeys;
@ -29,38 +43,38 @@ interface ModelList {
nextPageToken: string; nextPageToken: string;
} }
type ProviderTemplate = IProviderTemplate<
SettingKeys,
"azure",
typeof GoogleMetas
>;
export default class GoogleProvider export default class GoogleProvider
implements IProviderTemplate<SettingKeys, "google", typeof GoogleMetas> implements IProviderTemplate<SettingKeys, "google", typeof GoogleMetas>
{ {
allowedApiMethods: (
| "POST"
| "GET"
| "OPTIONS"
| "PUT"
| "PATCH"
| "DELETE"
)[] = ["GET", "POST"];
runtime = "edge" as const;
apiRouteRootName: "/api/provider/google" = "/api/provider/google";
preferredRegion = preferredRegion;
name = "google" as const; name = "google" as const;
metas = GoogleMetas; metas = GoogleMetas;
providerMeta = { providerMeta = {
displayName: "Google", displayName: "Google",
settingItems, settingItems: settingItems(this.apiRouteRootName),
}; };
defaultModels = modelConfigs; defaultModels = modelConfigs;
readonly REQUEST_TIMEOUT_MS = 60000;
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const {
providerConfig: { googleApiKey },
context: { isApp },
} = payload;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (!isApp && validString(googleApiKey)) {
headers["Authorization"] = makeBearer(googleApiKey);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) { private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { const {
messages, messages,
@ -69,19 +83,16 @@ export default class GoogleProvider
stream, stream,
modelConfig, modelConfig,
providerConfig, providerConfig,
context: { isApp },
} = payload; } = payload;
const { googleUrl, googleApiKey } = providerConfig; const { googleUrl, googleApiKey } = providerConfig;
const { temperature, top_p, max_tokens } = modelConfig; const { temperature, top_p, max_tokens } = modelConfig;
let multimodal = false;
const internalMessages = messages.map((v) => { const internalMessages = messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }]; let parts: any[] = [{ text: getMessageTextContent(v) }];
if (isVisionModel) { if (isVisionModel) {
const images = getMessageImages(v); const images = getMessageImages(v);
if (images.length > 0) { if (images.length > 0) {
multimodal = true;
parts = parts.concat( parts = parts.concat(
images.map((image) => { images.map((image) => {
const imageType = image.split(";")[0].split(":")[1]; const imageType = image.split(";")[0].split(":")[1];
@ -145,16 +156,15 @@ export default class GoogleProvider
], ],
}; };
let googleChatPath = GoogleMetas.ChatPath(model); const baseUrl = `${googleUrl}/${GoogleMetas.ChatPath(
model,
let baseUrl = googleUrl ?? "/api/google/" + googleChatPath; )}?${urlParamApikeyName}=${googleApiKey}`;
if (isApp) {
baseUrl += `?key=${googleApiKey}`;
}
return { return {
headers: this.getHeaders(payload), headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
method: "POST", method: "POST",
url: stream url: stream
@ -162,46 +172,15 @@ export default class GoogleProvider
: baseUrl, : baseUrl,
}; };
} }
private readWholeMessageResponseBody(res: any) {
if (res?.promptFeedback?.blockReason) {
// being blocked
throw new Error(
"Message is being blocked for reason: " +
res.promptFeedback.blockReason,
);
}
return {
message:
res.candidates?.at(0)?.content?.parts?.at(0)?.text ||
res.error?.message ||
"",
};
}
private getTimer = () => {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
streamChat( streamChat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers, handlers: ChatHandlers,
fetch: typeof window.fetch,
) { ) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
let existingTexts: string[] = []; let existingTexts: string[] = [];
@ -274,15 +253,10 @@ export default class GoogleProvider
async chat( async chat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
fetch: typeof window.fetch,
): Promise<StandChatReponseMessage> { ): Promise<StandChatReponseMessage> {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
// make a fetch request
const requestTimeoutId = setTimeout(
() => timer.abort(),
this.REQUEST_TIMEOUT_MS,
);
const res = await fetch(requestPayload.url, { const res = await fetch(requestPayload.url, {
headers: { headers: {
@ -293,10 +267,10 @@ export default class GoogleProvider
signal: timer.signal, signal: timer.signal,
}); });
clearTimeout(requestTimeoutId); timer.clear();
const resJson = await res.json(); const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson); const message = parseResp(resJson);
return message; return message;
} }
@ -315,4 +289,65 @@ export default class GoogleProvider
return data.models; return data.models;
} }
serverSideRequestHandler: ProviderTemplate["serverSideRequestHandler"] =
async (req, serverConfig) => {
const { googleUrl = GEMINI_BASE_URL } = serverConfig;
const controller = new AbortController();
const path = `${req.nextUrl.pathname}`.replaceAll(
this.apiRouteRootName,
"",
);
console.log("[Proxy] ", path);
console.log("[Base Url]", googleUrl);
const authResult = auth(req, serverConfig);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
const fetchUrl = `${googleUrl}/${path}?key=${authResult.apiKey}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
};
} }

View File

@ -1,3 +1,10 @@
import { NextRequest } from "next/server";
import { ServerConfig, getIP } from "../../common";
export const urlParamApikeyName = "key";
export const REQUEST_TIMEOUT_MS = 60000;
export const makeBearer = (s: string) => `Bearer ${s.trim()}`; export const makeBearer = (s: string) => `Bearer ${s.trim()}`;
export const validString = (x?: string): x is string => export const validString = (x?: string): x is string =>
Boolean(x && x.length > 0); Boolean(x && x.length > 0);
@ -8,3 +15,73 @@ export function ensureProperEnding(str: string) {
} }
return str; return str;
} }
export function auth(req: NextRequest, serverConfig: ServerConfig) {
let apiKey = req.nextUrl.searchParams.get(urlParamApikeyName);
const { hideUserApiKey, googleApiKey } = serverConfig;
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
if (hideUserApiKey && apiKey) {
return {
error: true,
message: "you are not allowed to access with your own api key",
};
}
if (apiKey) {
console.log("[Auth] use user api key");
return {
error: false,
apiKey,
};
}
if (googleApiKey) {
console.log("[Auth] use system api key");
return {
error: false,
apiKey: googleApiKey,
};
}
console.log("[Auth] admin did not provide an api key");
return {
error: true,
message: `missing api key`,
};
}
export function getTimer() {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
}
export function parseResp(res: any) {
if (res?.promptFeedback?.blockReason) {
// being blocked
throw new Error(
"Message is being blocked for reason: " + res.promptFeedback.blockReason,
);
}
return {
message:
res.candidates?.at(0)?.content?.parts?.at(0)?.text ||
res.error?.message ||
"",
};
}

View File

@ -2,6 +2,8 @@ import { SettingItem } from "../../common";
import { isVisionModel } from "@/app/utils"; import { isVisionModel } from "@/app/utils";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
export const OPENAI_BASE_URL = "https://api.openai.com";
export const NextChatMetas = { export const NextChatMetas = {
ChatPath: "v1/chat/completions", ChatPath: "v1/chat/completions",
UsagePath: "dashboard/billing/usage", UsagePath: "dashboard/billing/usage",
@ -9,6 +11,26 @@ export const NextChatMetas = {
ListModelPath: "v1/models", ListModelPath: "v1/models",
}; };
export const preferredRegion: string | string[] = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
export type SettingKeys = "accessCode"; export type SettingKeys = "accessCode";
export const defaultModal = "gpt-3.5-turbo"; export const defaultModal = "gpt-3.5-turbo";

View File

@ -3,13 +3,15 @@ import {
settingItems, settingItems,
SettingKeys, SettingKeys,
NextChatMetas, NextChatMetas,
preferredRegion,
OPENAI_BASE_URL,
} from "./config"; } from "./config";
import { ACCESS_CODE_PREFIX } from "@/app/constant";
import { import {
ChatHandlers, ChatHandlers,
getMessageTextContent, getMessageTextContent,
InternalChatRequestPayload, InternalChatRequestPayload,
IProviderTemplate, IProviderTemplate,
ServerConfig,
StandChatReponseMessage, StandChatReponseMessage,
} from "../../common"; } from "../../common";
import { import {
@ -18,7 +20,8 @@ import {
} from "@fortaine/fetch-event-source"; } from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format"; import { prettyObject } from "@/app/utils/format";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
import { makeBearer, validString } from "./utils"; import { auth, authHeaderName, getHeaders, getTimer, parseResp } from "./utils";
import { NextRequest, NextResponse } from "next/server";
export type NextChatProviderSettingKeys = SettingKeys; export type NextChatProviderSettingKeys = SettingKeys;
@ -52,9 +55,27 @@ interface RequestPayload {
max_tokens?: number; max_tokens?: number;
} }
type ProviderTemplate = IProviderTemplate<
SettingKeys,
"azure",
typeof NextChatMetas
>;
export default class NextChatProvider export default class NextChatProvider
implements IProviderTemplate<SettingKeys, "nextchat", typeof NextChatMetas> implements IProviderTemplate<SettingKeys, "nextchat", typeof NextChatMetas>
{ {
apiRouteRootName: "/api/provider/nextchat" = "/api/provider/nextchat";
allowedApiMethods: (
| "POST"
| "GET"
| "OPTIONS"
| "PUT"
| "PATCH"
| "DELETE"
)[] = ["GET", "POST"];
runtime = "edge" as const;
preferredRegion = preferredRegion;
name = "nextchat" as const; name = "nextchat" as const;
metas = NextChatMetas; metas = NextChatMetas;
@ -65,33 +86,6 @@ export default class NextChatProvider
settingItems, settingItems,
}; };
readonly REQUEST_TIMEOUT_MS = 60000;
private path(): string {
const path = NextChatMetas.ChatPath;
let baseUrl = "/api/openai";
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { accessCode } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(accessCode)) {
headers["Authorization"] = makeBearer(ACCESS_CODE_PREFIX + accessCode);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) { private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload; const { messages, isVisionModel, model, stream, modelConfig } = payload;
const { const {
@ -125,46 +119,106 @@ export default class NextChatProvider
console.log("[Request] openai payload: ", requestPayload); console.log("[Request] openai payload: ", requestPayload);
return { return {
headers: this.getHeaders(payload), headers: getHeaders(payload.providerConfig.accessCode!),
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
method: "POST", method: "POST",
url: this.path(), url: [this.apiRouteRootName, NextChatMetas.ChatPath].join("/"),
}; };
} }
private readWholeMessageResponseBody(res: any) { private async requestOpenai(req: NextRequest, serverConfig: ServerConfig) {
return { const { baseUrl = OPENAI_BASE_URL, openaiOrgId } = serverConfig;
message: res.choices?.at(0)?.message?.content ?? "",
};
}
private getTimer = () => {
const controller = new AbortController(); const controller = new AbortController();
const authValue = req.headers.get(authHeaderName) ?? "";
// make a fetch request const path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
const requestTimeoutId = setTimeout( this.apiRouteRootName,
() => controller.abort(), "",
this.REQUEST_TIMEOUT_MS,
); );
return { console.log("[Proxy] ", path);
...controller, console.log("[Base Url]", baseUrl);
clear: () => {
clearTimeout(requestTimeoutId); const timeoutId = setTimeout(
() => {
controller.abort();
}, },
10 * 60 * 1000,
);
const fetchUrl = `${baseUrl}/${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
[authHeaderName]: authValue,
...(openaiOrgId && {
"OpenAI-Organization": openaiOrgId,
}),
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
}; };
};
try {
const res = await fetch(fetchUrl, fetchOptions);
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (openaiOrgId && openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
// Also, this is to prevent the header from being sent to the client
if (!openaiOrgId || openaiOrgId.trim() === "") {
newHeaders.delete("OpenAI-Organization");
}
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
streamChat( streamChat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers, handlers: ChatHandlers,
fetch: typeof window.fetch,
) { ) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
fetchEventSource(requestPayload.url, { fetchEventSource(requestPayload.url, {
...requestPayload, ...requestPayload,
fetch,
async onopen(res) { async onopen(res) {
timer.clear(); timer.clear();
const contentType = res.headers.get("content-type"); const contentType = res.headers.get("content-type");
@ -236,10 +290,11 @@ export default class NextChatProvider
async chat( async chat(
payload: InternalChatRequestPayload<"accessCode">, payload: InternalChatRequestPayload<"accessCode">,
fetch: typeof window.fetch,
): Promise<StandChatReponseMessage> { ): Promise<StandChatReponseMessage> {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
const res = await fetch(requestPayload.url, { const res = await fetch(requestPayload.url, {
headers: { headers: {
@ -253,8 +308,41 @@ export default class NextChatProvider
timer.clear(); timer.clear();
const resJson = await res.json(); const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson); const message = parseResp(resJson);
return message; return message;
} }
serverSideRequestHandler: ProviderTemplate["serverSideRequestHandler"] =
async (req, config) => {
const { subpath } = req;
const ALLOWD_PATH = new Set(Object.values(NextChatMetas));
if (!ALLOWD_PATH.has(subpath)) {
return NextResponse.json(
{
error: true,
message: "you are not allowed to request " + subpath,
},
{
status: 403,
},
);
}
const authResult = auth(req, config);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
try {
const response = await this.requestOpenai(req, config);
return response;
} catch (e) {
return NextResponse.json(prettyObject(e));
}
};
} }

View File

@ -1,3 +1,13 @@
import { NextRequest } from "next/server";
import { ServerConfig, getIP } from "../../common";
import md5 from "spark-md5";
export const ACCESS_CODE_PREFIX = "nk-";
export const REQUEST_TIMEOUT_MS = 60000;
export const authHeaderName = "Authorization";
export const makeBearer = (s: string) => `Bearer ${s.trim()}`; export const makeBearer = (s: string) => `Bearer ${s.trim()}`;
export const validString = (x?: string): x is string => export const validString = (x?: string): x is string =>
@ -16,3 +26,87 @@ export function prettyObject(msg: any) {
} }
return ["```json", msg, "```"].join("\n"); return ["```json", msg, "```"].join("\n");
} }
export function getTimer() {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
}
export function getHeaders(accessCode: string) {
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
[authHeaderName]: makeBearer(ACCESS_CODE_PREFIX + accessCode),
};
return headers;
}
export function parseResp(res: { choices: { message: { content: any } }[] }) {
return {
message: res.choices?.[0]?.message?.content ?? "",
};
}
function parseApiKey(req: NextRequest) {
const authToken = req.headers.get("Authorization") ?? "";
return {
accessCode:
authToken.startsWith(ACCESS_CODE_PREFIX) &&
authToken.slice(ACCESS_CODE_PREFIX.length),
};
}
export function auth(req: NextRequest, serverConfig: ServerConfig) {
// check if it is openai api key or user token
const { accessCode } = parseApiKey(req);
const { googleApiKey, apiKey, anthropicApiKey, azureApiKey, codes } =
serverConfig;
const hashedCode = md5.hash(accessCode || "").trim();
console.log("[Auth] allowed hashed codes: ", [...codes]);
console.log("[Auth] got access code:", accessCode);
console.log("[Auth] hashed access code:", hashedCode);
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
if (!codes.has(hashedCode)) {
return {
error: true,
message: !accessCode ? "empty access code" : "wrong access code",
};
}
const systemApiKey = googleApiKey || apiKey || anthropicApiKey || azureApiKey;
if (systemApiKey) {
console.log("[Auth] use system api key");
return {
error: false,
accessCode,
systemApiKey,
};
}
console.log("[Auth] admin did not provide an api key");
return {
error: true,
message: `Server internal error`,
};
}

View File

@ -5,6 +5,26 @@ export const OPENAI_BASE_URL = "https://api.openai.com";
export const ROLES = ["system", "user", "assistant"] as const; export const ROLES = ["system", "user", "assistant"] as const;
export const preferredRegion: string | string[] = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
export const OpenaiMetas = { export const OpenaiMetas = {
ChatPath: "v1/chat/completions", ChatPath: "v1/chat/completions",
UsagePath: "dashboard/billing/usage", UsagePath: "dashboard/billing/usage",
@ -157,9 +177,9 @@ export const modelConfigs = [
}, },
]; ];
const defaultEndpoint = "/api/openai"; export const settingItems: (
defaultEndpoint: string,
export const settingItems: SettingItem<SettingKeys>[] = [ ) => SettingItem<SettingKeys>[] = (defaultEndpoint) => [
{ {
name: "openaiUrl", name: "openaiUrl",
title: Locale.Endpoint.Title, title: Locale.Endpoint.Title,
@ -189,6 +209,6 @@ export const settingItems: SettingItem<SettingKeys>[] = [
placeholder: Locale.ApiKey.Placeholder, placeholder: Locale.ApiKey.Placeholder,
type: "input", type: "input",
inputType: "password", inputType: "password",
validators: ["required"], // validators: ["required"],
}, },
]; ];

View File

@ -4,20 +4,32 @@ import {
IProviderTemplate, IProviderTemplate,
ModelInfo, ModelInfo,
getMessageTextContent, getMessageTextContent,
ServerConfig,
} from "../../common"; } from "../../common";
import { import {
EventStreamContentType, EventStreamContentType,
fetchEventSource, fetchEventSource,
} from "@fortaine/fetch-event-source"; } from "@fortaine/fetch-event-source";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
import { makeBearer, validString, prettyObject } from "./utils"; import {
authHeaderName,
prettyObject,
parseResp,
auth,
getTimer,
getHeaders,
} from "./utils";
import { import {
modelConfigs, modelConfigs,
settingItems, settingItems,
SettingKeys, SettingKeys,
OpenaiMetas, OpenaiMetas,
ROLES, ROLES,
OPENAI_BASE_URL,
preferredRegion,
} from "./config"; } from "./config";
import { NextRequest, NextResponse } from "next/server";
import { ModelList } from "./type";
export type OpenAIProviderSettingKeys = SettingKeys; export type OpenAIProviderSettingKeys = SettingKeys;
@ -49,66 +61,54 @@ interface RequestPayload {
max_tokens?: number; max_tokens?: number;
} }
interface ModelList { type ProviderTemplate = IProviderTemplate<
object: "list"; SettingKeys,
data: Array<{ "azure",
id: string; typeof OpenaiMetas
object: "model"; >;
created: number;
owned_by: "system" | "openai-internal";
}>;
}
class OpenAIProvider class OpenAIProvider
implements IProviderTemplate<SettingKeys, "openai", typeof OpenaiMetas> implements IProviderTemplate<SettingKeys, "openai", typeof OpenaiMetas>
{ {
apiRouteRootName: "/api/provider/openai" = "/api/provider/openai";
allowedApiMethods: (
| "POST"
| "GET"
| "OPTIONS"
| "PUT"
| "PATCH"
| "DELETE"
)[] = ["GET", "POST"];
runtime = "edge" as const;
preferredRegion = preferredRegion;
name = "openai" as const; name = "openai" as const;
metas = OpenaiMetas; metas = OpenaiMetas;
readonly REQUEST_TIMEOUT_MS = 60000;
defaultModels = modelConfigs; defaultModels = modelConfigs;
providerMeta = { providerMeta = {
displayName: "OpenAI", displayName: "OpenAI",
settingItems, settingItems: settingItems(
`${this.apiRouteRootName}/${OpenaiMetas.ChatPath}`,
),
}; };
private path(payload: InternalChatRequestPayload<SettingKeys>): string { private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { const {
messages,
isVisionModel,
model,
stream,
modelConfig: {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
},
providerConfig: { openaiUrl }, providerConfig: { openaiUrl },
} = payload; } = payload;
const path = OpenaiMetas.ChatPath;
console.log("[Proxy Endpoint] ", openaiUrl, path);
return [openaiUrl, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { openaiApiKey } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(openaiApiKey)) {
headers["Authorization"] = makeBearer(openaiApiKey);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload;
const {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
} = modelConfig;
const openAiMessages = messages.map((v) => ({ const openAiMessages = messages.map((v) => ({
role: v.role, role: v.role,
@ -133,42 +133,101 @@ class OpenAIProvider
console.log("[Request] openai payload: ", requestPayload); console.log("[Request] openai payload: ", requestPayload);
return { return {
headers: this.getHeaders(payload), headers: getHeaders(payload.providerConfig.openaiApiKey),
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
method: "POST", method: "POST",
url: this.path(payload), url: openaiUrl!,
}; };
} }
private readWholeMessageResponseBody(res: { private async requestOpenai(req: NextRequest, serverConfig: ServerConfig) {
choices: { message: { content: any } }[]; const { baseUrl = OPENAI_BASE_URL, openaiOrgId } = serverConfig;
}) {
return {
message: res.choices?.[0]?.message?.content ?? "",
};
}
private getTimer = () => {
const controller = new AbortController(); const controller = new AbortController();
const authValue = req.headers.get(authHeaderName) ?? "";
// make a fetch request const path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
const requestTimeoutId = setTimeout( this.apiRouteRootName,
() => controller.abort(), "",
this.REQUEST_TIMEOUT_MS,
); );
return { console.log("[Proxy] ", path);
...controller, console.log("[Base Url]", baseUrl);
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
async chat(payload: InternalChatRequestPayload<SettingKeys>) { const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const fetchUrl = `${baseUrl}/${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
[authHeaderName]: authValue,
...(openaiOrgId && {
"OpenAI-Organization": openaiOrgId,
}),
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
try {
const res = await fetch(fetchUrl, fetchOptions);
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (openaiOrgId && openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
// Also, this is to prevent the header from being sent to the client
if (!openaiOrgId || openaiOrgId.trim() === "") {
newHeaders.delete("OpenAI-Organization");
}
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
async chat(
payload: InternalChatRequestPayload<SettingKeys>,
fetch: typeof window.fetch,
) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
const res = await fetch(requestPayload.url, { const res = await fetch(requestPayload.url, {
headers: { headers: {
@ -182,7 +241,7 @@ class OpenAIProvider
timer.clear(); timer.clear();
const resJson = await res.json(); const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson); const message = parseResp(resJson);
return message; return message;
} }
@ -190,13 +249,15 @@ class OpenAIProvider
streamChat( streamChat(
payload: InternalChatRequestPayload<SettingKeys>, payload: InternalChatRequestPayload<SettingKeys>,
handlers: ChatHandlers, handlers: ChatHandlers,
fetch: typeof window.fetch,
) { ) {
const requestPayload = this.formatChatPayload(payload); const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer(); const timer = getTimer();
fetchEventSource(requestPayload.url, { fetchEventSource(requestPayload.url, {
...requestPayload, ...requestPayload,
fetch,
async onopen(res) { async onopen(res) {
timer.clear(); timer.clear();
const contentType = res.headers.get("content-type"); const contentType = res.headers.get("content-type");
@ -270,7 +331,7 @@ class OpenAIProvider
providerConfig: Record<SettingKeys, string>, providerConfig: Record<SettingKeys, string>,
): Promise<ModelInfo[]> { ): Promise<ModelInfo[]> {
const { openaiApiKey, openaiUrl } = providerConfig; const { openaiApiKey, openaiUrl } = providerConfig;
const res = await fetch(`${openaiUrl}/vi/models`, { const res = await fetch(`${openaiUrl}/v1/models`, {
headers: { headers: {
Authorization: `Bearer ${openaiApiKey}`, Authorization: `Bearer ${openaiApiKey}`,
}, },
@ -282,6 +343,39 @@ class OpenAIProvider
name: o.id, name: o.id,
})); }));
} }
serverSideRequestHandler: ProviderTemplate["serverSideRequestHandler"] =
async (req, config) => {
const { subpath } = req;
const ALLOWD_PATH = new Set(Object.values(OpenaiMetas));
if (!ALLOWD_PATH.has(subpath)) {
return NextResponse.json(
{
error: true,
message: "you are not allowed to request " + subpath,
},
{
status: 403,
},
);
}
const authResult = auth(req, config);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
try {
const response = await this.requestOpenai(req, config);
return response;
} catch (e) {
return NextResponse.json(prettyObject(e));
}
};
} }
export default OpenAIProvider; export default OpenAIProvider;

View File

@ -0,0 +1,18 @@
export interface ModelList {
object: "list";
data: Array<{
id: string;
object: "model";
created: number;
owned_by: "system" | "openai-internal";
}>;
}
export interface OpenAIListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}

View File

@ -1,7 +1,21 @@
export const makeBearer = (s: string) => `Bearer ${s.trim()}`; import { NextRequest } from "next/server";
import { ServerConfig, getIP } from "../../common";
export const validString = (x?: string): x is string => export const REQUEST_TIMEOUT_MS = 60000;
Boolean(x && x.length > 0);
export const authHeaderName = "Authorization";
const makeBearer = (s: string) => `Bearer ${s.trim()}`;
const validString = (x?: string): x is string => Boolean(x && x.length > 0);
function parseApiKey(bearToken: string) {
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
return {
apiKey: token,
};
}
export function prettyObject(msg: any) { export function prettyObject(msg: any) {
const obj = msg; const obj = msg;
@ -16,3 +30,74 @@ export function prettyObject(msg: any) {
} }
return ["```json", msg, "```"].join("\n"); return ["```json", msg, "```"].join("\n");
} }
export function parseResp(res: { choices: { message: { content: any } }[] }) {
return {
message: res.choices?.[0]?.message?.content ?? "",
};
}
export function auth(req: NextRequest, serverConfig: ServerConfig) {
const { hideUserApiKey, apiKey: systemApiKey } = serverConfig;
const authToken = req.headers.get(authHeaderName) ?? "";
const { apiKey } = parseApiKey(authToken);
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
if (hideUserApiKey && apiKey) {
return {
error: true,
message: "you are not allowed to access with your own api key",
};
}
if (apiKey) {
console.log("[Auth] use user api key");
return {
error: false,
};
}
if (systemApiKey) {
console.log("[Auth] use system api key");
req.headers.set(authHeaderName, `Bearer ${systemApiKey}`);
} else {
console.log("[Auth] admin did not provide an api key");
}
return {
error: false,
};
}
export function getTimer() {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
}
export function getHeaders(openaiApiKey?: string) {
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
if (validString(openaiApiKey)) {
headers[authHeaderName] = makeBearer(openaiApiKey);
}
return headers;
}

View File

@ -55,7 +55,10 @@ const ACCESS_CODES = (function getAccessCodes(): Set<string> {
})(); })();
function getApiKey(keys?: string) { function getApiKey(keys?: string) {
const apiKeyEnvVar = keys ?? ""; if (!keys) {
return;
}
const apiKeyEnvVar = keys;
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
const randomIndex = Math.floor(Math.random() * apiKeys.length); const randomIndex = Math.floor(Math.random() * apiKeys.length);
const apiKey = apiKeys[randomIndex]; const apiKey = apiKeys[randomIndex];