support cloudflare ai gateway
This commit is contained in:
parent
89049e1a22
commit
728c38396a
|
@ -11,6 +11,7 @@ import { prettyObject } from "@/app/utils/format";
|
|||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "../../auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
|
||||
|
||||
|
@ -114,7 +115,8 @@ async function request(req: NextRequest) {
|
|||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
// try rebuild url, when using cloudflare ai gateway in server
|
||||
const fetchUrl = cloudflareAIGatewayUrl(`${baseUrl}${path}`);
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
|
@ -164,17 +166,17 @@ async function request(req: NextRequest) {
|
|||
console.error(`[Anthropic] filter`, e);
|
||||
}
|
||||
}
|
||||
console.log("[Anthropic request]", fetchOptions.headers, req.method);
|
||||
// console.log("[Anthropic request]", fetchOptions.headers, req.method);
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
console.log(
|
||||
"[Anthropic response]",
|
||||
res.status,
|
||||
" ",
|
||||
res.headers,
|
||||
res.url,
|
||||
);
|
||||
// console.log(
|
||||
// "[Anthropic response]",
|
||||
// res.status,
|
||||
// " ",
|
||||
// res.headers,
|
||||
// res.url,
|
||||
// );
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import { isModelAvailableInServer } from "../utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "../utils/cloudflare";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
|
@ -37,7 +38,7 @@ export async function requestOpenai(req: NextRequest) {
|
|||
);
|
||||
|
||||
let baseUrl =
|
||||
serverConfig.azureUrl || serverConfig.baseUrl || OPENAI_BASE_URL;
|
||||
(isAzure ? serverConfig.azureUrl : serverConfig.baseUrl) || OPENAI_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
|
@ -95,7 +96,8 @@ export async function requestOpenai(req: NextRequest) {
|
|||
}
|
||||
}
|
||||
|
||||
const fetchUrl = `${baseUrl}/${path}`;
|
||||
const fetchUrl = cloudflareAIGatewayUrl(`${baseUrl}/${path}`);
|
||||
console.log("fetchUrl", fetchUrl);
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { ACCESS_CODE_PREFIX, Anthropic, ApiPath } from "@/app/constant";
|
||||
import { ChatOptions, getHeaders, LLMApi, MultimodalContent, } from "../api";
|
||||
import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
|
@ -12,6 +12,7 @@ import {
|
|||
import Locale from "../../locales";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
export type MultiBlockContent = {
|
||||
type: "image" | "text";
|
||||
|
@ -375,7 +376,8 @@ export class ClaudeApi implements LLMApi {
|
|||
|
||||
baseUrl = trimEnd(baseUrl, "/");
|
||||
|
||||
return `${baseUrl}/${path}`;
|
||||
// try rebuild url, when using cloudflare ai gateway in client
|
||||
return cloudflareAIGatewayUrl(`${baseUrl}/${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { collectModelsWithDefaultModel } from "@/app/utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
import {
|
||||
ChatOptions,
|
||||
|
@ -94,7 +95,8 @@ export class ChatGPTApi implements LLMApi {
|
|||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
// try rebuild url, when using cloudflare ai gateway in client
|
||||
return cloudflareAIGatewayUrl([baseUrl, path].join("/"));
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
export function cloudflareAIGatewayUrl(fetchUrl: string) {
|
||||
// rebuild fetchUrl, if using cloudflare ai gateway
|
||||
// document: https://developers.cloudflare.com/ai-gateway/providers/openai/
|
||||
|
||||
const paths = fetchUrl.split("/");
|
||||
if ("gateway.ai.cloudflare.com" == paths[2]) {
|
||||
// is cloudflare.com ai gateway
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/azure-openai/{resource_name}/{deployment_name}/chat/completions?api-version=2023-05-15'
|
||||
if ("azure-openai" == paths[6]) {
|
||||
// is azure gateway
|
||||
return paths.slice(0, 8).concat(paths.slice(-3)).join("/"); // rebuild ai gateway azure_url
|
||||
}
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai/chat/completions
|
||||
if ("openai" == paths[6]) {
|
||||
// is openai gateway
|
||||
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway openai_url
|
||||
}
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/anthropic/v1/messages \
|
||||
if ("anthropic" == paths[6]) {
|
||||
// is anthropic gateway
|
||||
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway anthropic_url
|
||||
}
|
||||
// TODO: Amazon Bedrock, Groq, HuggingFace...
|
||||
}
|
||||
return fetchUrl;
|
||||
}
|
Loading…
Reference in New Issue