feat: sync upstream code
This commit is contained in:
parent
52726d42e9
commit
c948a28ef2
|
@ -0,0 +1,76 @@
|
||||||
|
import { ApiPath } from "@/app/constant";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
import { handle as openaiHandler } from "../../openai";
|
||||||
|
import { handle as azureHandler } from "../../azure";
|
||||||
|
import { handle as googleHandler } from "../../google";
|
||||||
|
import { handle as anthropicHandler } from "../../anthropic";
|
||||||
|
import { handle as baiduHandler } from "../../baidu";
|
||||||
|
import { handle as bytedanceHandler } from "../../bytedance";
|
||||||
|
import { handle as alibabaHandler } from "../../alibaba";
|
||||||
|
import { handle as moonshotHandler } from "../../moonshot";
|
||||||
|
import { handle as stabilityHandler } from "../../stability";
|
||||||
|
import { handle as iflytekHandler } from "../../iflytek";
|
||||||
|
import { handle as xaiHandler } from "../../xai";
|
||||||
|
import { handle as chatglmHandler } from "../../glm";
|
||||||
|
import { handle as proxyHandler } from "../../proxy";
|
||||||
|
|
||||||
|
async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { provider: string; path: string[] } },
|
||||||
|
) {
|
||||||
|
const apiPath = `/api/${params.provider}`;
|
||||||
|
console.log(`[${params.provider} Route] params `, params);
|
||||||
|
switch (apiPath) {
|
||||||
|
case ApiPath.Azure:
|
||||||
|
return azureHandler(req, { params });
|
||||||
|
case ApiPath.Google:
|
||||||
|
return googleHandler(req, { params });
|
||||||
|
case ApiPath.Anthropic:
|
||||||
|
return anthropicHandler(req, { params });
|
||||||
|
case ApiPath.Baidu:
|
||||||
|
return baiduHandler(req, { params });
|
||||||
|
case ApiPath.ByteDance:
|
||||||
|
return bytedanceHandler(req, { params });
|
||||||
|
case ApiPath.Alibaba:
|
||||||
|
return alibabaHandler(req, { params });
|
||||||
|
// case ApiPath.Tencent: using "/api/tencent"
|
||||||
|
case ApiPath.Moonshot:
|
||||||
|
return moonshotHandler(req, { params });
|
||||||
|
case ApiPath.Stability:
|
||||||
|
return stabilityHandler(req, { params });
|
||||||
|
case ApiPath.Iflytek:
|
||||||
|
return iflytekHandler(req, { params });
|
||||||
|
case ApiPath.XAI:
|
||||||
|
return xaiHandler(req, { params });
|
||||||
|
case ApiPath.ChatGLM:
|
||||||
|
return chatglmHandler(req, { params });
|
||||||
|
case ApiPath.OpenAI:
|
||||||
|
return openaiHandler(req, { params });
|
||||||
|
default:
|
||||||
|
return proxyHandler(req, { params });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET = handle;
|
||||||
|
export const POST = handle;
|
||||||
|
|
||||||
|
export const runtime = "edge";
|
||||||
|
export const preferredRegion = [
|
||||||
|
"arn1",
|
||||||
|
"bom1",
|
||||||
|
"cdg1",
|
||||||
|
"cle1",
|
||||||
|
"cpt1",
|
||||||
|
"dub1",
|
||||||
|
"fra1",
|
||||||
|
"gru1",
|
||||||
|
"hnd1",
|
||||||
|
"iad1",
|
||||||
|
"icn1",
|
||||||
|
"kix1",
|
||||||
|
"lhr1",
|
||||||
|
"pdx1",
|
||||||
|
"sfo1",
|
||||||
|
"sin1",
|
||||||
|
"syd1",
|
||||||
|
];
|
|
@ -1,6 +1,5 @@
|
||||||
import { getServerSideConfig } from "@/app/config/server";
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
import {
|
import {
|
||||||
Alibaba,
|
|
||||||
ALIBABA_BASE_URL,
|
ALIBABA_BASE_URL,
|
||||||
ApiPath,
|
ApiPath,
|
||||||
ModelProvider,
|
ModelProvider,
|
||||||
|
@ -10,11 +9,10 @@ import { prettyObject } from "@/app/utils/format";
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "@/app/api/auth";
|
import { auth } from "@/app/api/auth";
|
||||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
import type { RequestPayload } from "@/app/client/platforms/openai";
|
|
||||||
|
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
) {
|
) {
|
||||||
|
@ -40,30 +38,6 @@ async function handle(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const POST = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const preferredRegion = [
|
|
||||||
"arn1",
|
|
||||||
"bom1",
|
|
||||||
"cdg1",
|
|
||||||
"cle1",
|
|
||||||
"cpt1",
|
|
||||||
"dub1",
|
|
||||||
"fra1",
|
|
||||||
"gru1",
|
|
||||||
"hnd1",
|
|
||||||
"iad1",
|
|
||||||
"icn1",
|
|
||||||
"kix1",
|
|
||||||
"lhr1",
|
|
||||||
"pdx1",
|
|
||||||
"sfo1",
|
|
||||||
"sin1",
|
|
||||||
"syd1",
|
|
||||||
];
|
|
||||||
|
|
||||||
async function request(req: NextRequest) {
|
async function request(req: NextRequest) {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
|
|
|
@ -3,19 +3,18 @@ import {
|
||||||
ANTHROPIC_BASE_URL,
|
ANTHROPIC_BASE_URL,
|
||||||
Anthropic,
|
Anthropic,
|
||||||
ApiPath,
|
ApiPath,
|
||||||
DEFAULT_MODELS,
|
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
ModelProvider,
|
ModelProvider,
|
||||||
} from "@/app/constant";
|
} from "@/app/constant";
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "../../auth";
|
import { auth } from "./auth";
|
||||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||||
|
|
||||||
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
|
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
) {
|
) {
|
||||||
|
@ -56,30 +55,6 @@ async function handle(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const POST = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const preferredRegion = [
|
|
||||||
"arn1",
|
|
||||||
"bom1",
|
|
||||||
"cdg1",
|
|
||||||
"cle1",
|
|
||||||
"cpt1",
|
|
||||||
"dub1",
|
|
||||||
"fra1",
|
|
||||||
"gru1",
|
|
||||||
"hnd1",
|
|
||||||
"iad1",
|
|
||||||
"icn1",
|
|
||||||
"kix1",
|
|
||||||
"lhr1",
|
|
||||||
"pdx1",
|
|
||||||
"sfo1",
|
|
||||||
"sin1",
|
|
||||||
"syd1",
|
|
||||||
];
|
|
||||||
|
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
async function request(req: NextRequest) {
|
async function request(req: NextRequest) {
|
||||||
|
@ -122,6 +97,7 @@ async function request(req: NextRequest) {
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Cache-Control": "no-store",
|
"Cache-Control": "no-store",
|
||||||
|
"anthropic-dangerous-direct-browser-access": "true",
|
||||||
[authHeaderName]: authValue,
|
[authHeaderName]: authValue,
|
||||||
"anthropic-version":
|
"anthropic-version":
|
||||||
req.headers.get("anthropic-version") ||
|
req.headers.get("anthropic-version") ||
|
|
@ -1,11 +1,10 @@
|
||||||
import { getServerSideConfig } from "@/app/config/server";
|
|
||||||
import { ModelProvider } from "@/app/constant";
|
import { ModelProvider } from "@/app/constant";
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "../../auth";
|
import { auth } from "./auth";
|
||||||
import { requestOpenai } from "../../common";
|
import { requestOpenai } from "./common";
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
) {
|
) {
|
||||||
|
@ -31,27 +30,3 @@ async function handle(
|
||||||
return NextResponse.json(prettyObject(e));
|
return NextResponse.json(prettyObject(e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const POST = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const preferredRegion = [
|
|
||||||
"arn1",
|
|
||||||
"bom1",
|
|
||||||
"cdg1",
|
|
||||||
"cle1",
|
|
||||||
"cpt1",
|
|
||||||
"dub1",
|
|
||||||
"fra1",
|
|
||||||
"gru1",
|
|
||||||
"hnd1",
|
|
||||||
"iad1",
|
|
||||||
"icn1",
|
|
||||||
"kix1",
|
|
||||||
"lhr1",
|
|
||||||
"pdx1",
|
|
||||||
"sfo1",
|
|
||||||
"sin1",
|
|
||||||
"syd1",
|
|
||||||
];
|
|
|
@ -3,7 +3,6 @@ import {
|
||||||
BAIDU_BASE_URL,
|
BAIDU_BASE_URL,
|
||||||
ApiPath,
|
ApiPath,
|
||||||
ModelProvider,
|
ModelProvider,
|
||||||
BAIDU_OATUH_URL,
|
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
} from "@/app/constant";
|
} from "@/app/constant";
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
@ -14,7 +13,7 @@ import { getAccessToken } from "@/app/utils/baidu";
|
||||||
|
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
) {
|
) {
|
||||||
|
@ -52,30 +51,6 @@ async function handle(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const POST = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const preferredRegion = [
|
|
||||||
"arn1",
|
|
||||||
"bom1",
|
|
||||||
"cdg1",
|
|
||||||
"cle1",
|
|
||||||
"cpt1",
|
|
||||||
"dub1",
|
|
||||||
"fra1",
|
|
||||||
"gru1",
|
|
||||||
"hnd1",
|
|
||||||
"iad1",
|
|
||||||
"icn1",
|
|
||||||
"kix1",
|
|
||||||
"lhr1",
|
|
||||||
"pdx1",
|
|
||||||
"sfo1",
|
|
||||||
"sin1",
|
|
||||||
"syd1",
|
|
||||||
];
|
|
||||||
|
|
||||||
async function request(req: NextRequest) {
|
async function request(req: NextRequest) {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
|
|
|
@ -12,7 +12,7 @@ import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
|
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
) {
|
) {
|
||||||
|
@ -38,30 +38,6 @@ async function handle(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const POST = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const preferredRegion = [
|
|
||||||
"arn1",
|
|
||||||
"bom1",
|
|
||||||
"cdg1",
|
|
||||||
"cle1",
|
|
||||||
"cpt1",
|
|
||||||
"dub1",
|
|
||||||
"fra1",
|
|
||||||
"gru1",
|
|
||||||
"hnd1",
|
|
||||||
"iad1",
|
|
||||||
"icn1",
|
|
||||||
"kix1",
|
|
||||||
"lhr1",
|
|
||||||
"pdx1",
|
|
||||||
"sfo1",
|
|
||||||
"sin1",
|
|
||||||
"syd1",
|
|
||||||
];
|
|
||||||
|
|
||||||
async function request(req: NextRequest) {
|
async function request(req: NextRequest) {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
|
||||||
|
|
||||||
async function handle(req: NextRequest) {
|
|
||||||
if (req.method === "OPTIONS") {
|
|
||||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
|
||||||
}
|
|
||||||
|
|
||||||
const targetUrl = req.nextUrl.searchParams.get("url");
|
|
||||||
|
|
||||||
if (!targetUrl) {
|
|
||||||
return NextResponse.json({ body: "Bad Url" }, { status: 500 });
|
|
||||||
}
|
|
||||||
|
|
||||||
const method = req.headers.get("method") ?? undefined;
|
|
||||||
const fetchOptions: RequestInit = {
|
|
||||||
headers: {
|
|
||||||
authorization: req.headers.get("authorization") ?? "",
|
|
||||||
},
|
|
||||||
method,
|
|
||||||
// @ts-ignore
|
|
||||||
duplex: "half",
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchResult = await fetch(targetUrl, fetchOptions);
|
|
||||||
|
|
||||||
console.log("[Any Proxy]", targetUrl, {
|
|
||||||
status: fetchResult.status,
|
|
||||||
statusText: fetchResult.statusText,
|
|
||||||
});
|
|
||||||
|
|
||||||
return fetchResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const OPTIONS = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const revalidate = 0;
|
|
|
@ -0,0 +1,129 @@
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import {
|
||||||
|
CHATGLM_BASE_URL,
|
||||||
|
ApiPath,
|
||||||
|
ModelProvider,
|
||||||
|
ServiceProvider,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { auth } from "@/app/api/auth";
|
||||||
|
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
export async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[GLM Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.ChatGLM);
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request(req);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[GLM] ", e);
|
||||||
|
return NextResponse.json(prettyObject(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function request(req: NextRequest) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
// alibaba use base url or just remove the path
|
||||||
|
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ChatGLM, "");
|
||||||
|
|
||||||
|
let baseUrl = serverConfig.chatglmUrl || CHATGLM_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy] ", path);
|
||||||
|
console.log("[Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const fetchUrl = `${baseUrl}${path}`;
|
||||||
|
console.log("[Fetch Url] ", fetchUrl);
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: req.headers.get("Authorization") ?? "",
|
||||||
|
},
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
// #1815 try to refuse some request to some models
|
||||||
|
if (serverConfig.customModels && req.body) {
|
||||||
|
try {
|
||||||
|
const clonedBody = await req.text();
|
||||||
|
fetchOptions.body = clonedBody;
|
||||||
|
|
||||||
|
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||||
|
|
||||||
|
// not undefined and is false
|
||||||
|
if (
|
||||||
|
isModelAvailableInServer(
|
||||||
|
serverConfig.customModels,
|
||||||
|
jsonBody?.model as string,
|
||||||
|
ServiceProvider.ChatGLM as string,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 403,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[GLM] filter`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,19 +1,14 @@
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "../../auth";
|
import { auth } from "./auth";
|
||||||
import { getServerSideConfig } from "@/app/config/server";
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
import {
|
import { ApiPath, GEMINI_BASE_URL, ModelProvider } from "@/app/constant";
|
||||||
ApiPath,
|
|
||||||
GEMINI_BASE_URL,
|
|
||||||
Google,
|
|
||||||
ModelProvider,
|
|
||||||
} from "@/app/constant";
|
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { provider: string; path: string[] } },
|
||||||
) {
|
) {
|
||||||
console.log("[Google Route] params ", params);
|
console.log("[Google Route] params ", params);
|
||||||
|
|
||||||
|
@ -28,7 +23,8 @@ async function handle(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const bearToken = req.headers.get("x-goog-api-key") ?? "";
|
const bearToken =
|
||||||
|
req.headers.get("x-goog-api-key") || req.headers.get("Authorization") || "";
|
||||||
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
|
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
|
||||||
|
|
||||||
const apiKey = token ? token : serverConfig.googleApiKey;
|
const apiKey = token ? token : serverConfig.googleApiKey;
|
||||||
|
@ -96,8 +92,8 @@ async function request(req: NextRequest, apiKey: string) {
|
||||||
},
|
},
|
||||||
10 * 60 * 1000,
|
10 * 60 * 1000,
|
||||||
);
|
);
|
||||||
const fetchUrl = `${baseUrl}${path}?key=${apiKey}${
|
const fetchUrl = `${baseUrl}${path}${
|
||||||
req?.nextUrl?.searchParams?.get("alt") === "sse" ? "&alt=sse" : ""
|
req?.nextUrl?.searchParams?.get("alt") === "sse" ? "?alt=sse" : ""
|
||||||
}`;
|
}`;
|
||||||
|
|
||||||
console.log("[Fetch Url] ", fetchUrl);
|
console.log("[Fetch Url] ", fetchUrl);
|
||||||
|
@ -105,6 +101,9 @@ async function request(req: NextRequest, apiKey: string) {
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Cache-Control": "no-store",
|
"Cache-Control": "no-store",
|
||||||
|
"x-goog-api-key":
|
||||||
|
req.headers.get("x-goog-api-key") ||
|
||||||
|
(req.headers.get("Authorization") ?? "").replace("Bearer ", ""),
|
||||||
},
|
},
|
||||||
method: req.method,
|
method: req.method,
|
||||||
body: req.body,
|
body: req.body,
|
|
@ -0,0 +1,129 @@
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import {
|
||||||
|
IFLYTEK_BASE_URL,
|
||||||
|
ApiPath,
|
||||||
|
ModelProvider,
|
||||||
|
ServiceProvider,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { auth } from "@/app/api/auth";
|
||||||
|
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
|
// iflytek
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
export async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[Iflytek Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.Iflytek);
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request(req);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Iflytek] ", e);
|
||||||
|
return NextResponse.json(prettyObject(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function request(req: NextRequest) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
// iflytek use base url or just remove the path
|
||||||
|
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Iflytek, "");
|
||||||
|
|
||||||
|
let baseUrl = serverConfig.iflytekUrl || IFLYTEK_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy] ", path);
|
||||||
|
console.log("[Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const fetchUrl = `${baseUrl}${path}`;
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: req.headers.get("Authorization") ?? "",
|
||||||
|
},
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
// try to refuse some request to some models
|
||||||
|
if (serverConfig.customModels && req.body) {
|
||||||
|
try {
|
||||||
|
const clonedBody = await req.text();
|
||||||
|
fetchOptions.body = clonedBody;
|
||||||
|
|
||||||
|
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||||
|
|
||||||
|
// not undefined and is false
|
||||||
|
if (
|
||||||
|
isModelAvailableInServer(
|
||||||
|
serverConfig.customModels,
|
||||||
|
jsonBody?.model as string,
|
||||||
|
ServiceProvider.Iflytek as string,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 403,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[Iflytek] filter`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,128 @@
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import {
|
||||||
|
MOONSHOT_BASE_URL,
|
||||||
|
ApiPath,
|
||||||
|
ModelProvider,
|
||||||
|
ServiceProvider,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { auth } from "@/app/api/auth";
|
||||||
|
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
export async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[Moonshot Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.Moonshot);
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request(req);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Moonshot] ", e);
|
||||||
|
return NextResponse.json(prettyObject(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function request(req: NextRequest) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
// alibaba use base url or just remove the path
|
||||||
|
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Moonshot, "");
|
||||||
|
|
||||||
|
let baseUrl = serverConfig.moonshotUrl || MOONSHOT_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy] ", path);
|
||||||
|
console.log("[Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const fetchUrl = `${baseUrl}${path}`;
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: req.headers.get("Authorization") ?? "",
|
||||||
|
},
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
// #1815 try to refuse some request to some models
|
||||||
|
if (serverConfig.customModels && req.body) {
|
||||||
|
try {
|
||||||
|
const clonedBody = await req.text();
|
||||||
|
fetchOptions.body = clonedBody;
|
||||||
|
|
||||||
|
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||||
|
|
||||||
|
// not undefined and is false
|
||||||
|
if (
|
||||||
|
isModelAvailableInServer(
|
||||||
|
serverConfig.customModels,
|
||||||
|
jsonBody?.model as string,
|
||||||
|
ServiceProvider.Moonshot as string,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 403,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[Moonshot] filter`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,24 +3,26 @@ import { getServerSideConfig } from "@/app/config/server";
|
||||||
import { ModelProvider, OpenaiPath } from "@/app/constant";
|
import { ModelProvider, OpenaiPath } from "@/app/constant";
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "../../auth";
|
import { auth } from "./auth";
|
||||||
import { requestOpenai } from "../../common";
|
import { requestOpenai } from "./common";
|
||||||
|
|
||||||
const ALLOWD_PATH = new Set(Object.values(OpenaiPath));
|
const ALLOWED_PATH = new Set(Object.values(OpenaiPath));
|
||||||
|
|
||||||
function getModels(remoteModelRes: OpenAIListModelResponse) {
|
function getModels(remoteModelRes: OpenAIListModelResponse) {
|
||||||
const config = getServerSideConfig();
|
const config = getServerSideConfig();
|
||||||
|
|
||||||
if (config.disableGPT4) {
|
if (config.disableGPT4) {
|
||||||
remoteModelRes.data = remoteModelRes.data.filter(
|
remoteModelRes.data = remoteModelRes.data.filter(
|
||||||
(m) => !m.id.startsWith("gpt-4"),
|
(m) =>
|
||||||
|
!(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
|
||||||
|
m.id.startsWith("gpt-4o-mini"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return remoteModelRes;
|
return remoteModelRes;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handle(
|
export async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
) {
|
) {
|
||||||
|
@ -32,7 +34,7 @@ async function handle(
|
||||||
|
|
||||||
const subpath = params.path.join("/");
|
const subpath = params.path.join("/");
|
||||||
|
|
||||||
if (!ALLOWD_PATH.has(subpath)) {
|
if (!ALLOWED_PATH.has(subpath)) {
|
||||||
console.log("[OpenAI Route] forbidden path ", subpath);
|
console.log("[OpenAI Route] forbidden path ", subpath);
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
|
@ -70,27 +72,3 @@ async function handle(
|
||||||
return NextResponse.json(prettyObject(e));
|
return NextResponse.json(prettyObject(e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET = handle;
|
|
||||||
export const POST = handle;
|
|
||||||
|
|
||||||
export const runtime = "edge";
|
|
||||||
export const preferredRegion = [
|
|
||||||
"arn1",
|
|
||||||
"bom1",
|
|
||||||
"cdg1",
|
|
||||||
"cle1",
|
|
||||||
"cpt1",
|
|
||||||
"dub1",
|
|
||||||
"fra1",
|
|
||||||
"gru1",
|
|
||||||
"hnd1",
|
|
||||||
"iad1",
|
|
||||||
"icn1",
|
|
||||||
"kix1",
|
|
||||||
"lhr1",
|
|
||||||
"pdx1",
|
|
||||||
"sfo1",
|
|
||||||
"sin1",
|
|
||||||
"syd1",
|
|
||||||
];
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
|
||||||
|
export async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[Proxy Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove path params from searchParams
|
||||||
|
req.nextUrl.searchParams.delete("path");
|
||||||
|
req.nextUrl.searchParams.delete("provider");
|
||||||
|
|
||||||
|
const subpath = params.path.join("/");
|
||||||
|
const fetchUrl = `${req.headers.get(
|
||||||
|
"x-base-url",
|
||||||
|
)}/${subpath}?${req.nextUrl.searchParams.toString()}`;
|
||||||
|
const skipHeaders = ["connection", "host", "origin", "referer", "cookie"];
|
||||||
|
const headers = new Headers(
|
||||||
|
Array.from(req.headers.entries()).filter((item) => {
|
||||||
|
if (
|
||||||
|
item[0].indexOf("x-") > -1 ||
|
||||||
|
item[0].indexOf("sec-") > -1 ||
|
||||||
|
skipHeaders.includes(item[0])
|
||||||
|
) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
const controller = new AbortController();
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers,
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
|
||||||
|
// So if the streaming is disabled, we need to remove the content-encoding header
|
||||||
|
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
|
||||||
|
// The browser will try to decode the response with brotli and fail
|
||||||
|
newHeaders.delete("content-encoding");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,99 @@
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import { ModelProvider, STABILITY_BASE_URL } from "@/app/constant";
|
||||||
|
import { auth } from "@/app/api/auth";
|
||||||
|
|
||||||
|
export async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[Stability] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
let baseUrl = serverConfig.stabilityUrl || STABILITY_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = `${req.nextUrl.pathname}`.replaceAll("/api/stability/", "");
|
||||||
|
|
||||||
|
console.log("[Stability Proxy] ", path);
|
||||||
|
console.log("[Stability Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.Stability);
|
||||||
|
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const bearToken = req.headers.get("Authorization") ?? "";
|
||||||
|
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
|
||||||
|
|
||||||
|
const key = token ? token : serverConfig.stabilityApiKey;
|
||||||
|
|
||||||
|
if (!key) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
message: `missing STABILITY_API_KEY in server env vars`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 401,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchUrl = `${baseUrl}/${path}`;
|
||||||
|
console.log("[Stability Url] ", fetchUrl);
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": req.headers.get("Content-Type") || "multipart/form-data",
|
||||||
|
Accept: req.headers.get("Accept") || "application/json",
|
||||||
|
Authorization: `Bearer ${key}`,
|
||||||
|
},
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import { TENCENT_BASE_URL, ModelProvider } from "@/app/constant";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { auth } from "@/app/api/auth";
|
||||||
|
import { getHeader } from "@/app/utils/tencent";
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[Tencent Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.Hunyuan);
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request(req);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Tencent] ", e);
|
||||||
|
return NextResponse.json(prettyObject(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET = handle;
|
||||||
|
export const POST = handle;
|
||||||
|
|
||||||
|
export const runtime = "edge";
|
||||||
|
export const preferredRegion = [
|
||||||
|
"arn1",
|
||||||
|
"bom1",
|
||||||
|
"cdg1",
|
||||||
|
"cle1",
|
||||||
|
"cpt1",
|
||||||
|
"dub1",
|
||||||
|
"fra1",
|
||||||
|
"gru1",
|
||||||
|
"hnd1",
|
||||||
|
"iad1",
|
||||||
|
"icn1",
|
||||||
|
"kix1",
|
||||||
|
"lhr1",
|
||||||
|
"pdx1",
|
||||||
|
"sfo1",
|
||||||
|
"sin1",
|
||||||
|
"syd1",
|
||||||
|
];
|
||||||
|
|
||||||
|
async function request(req: NextRequest) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
let baseUrl = serverConfig.tencentUrl || TENCENT_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const fetchUrl = baseUrl;
|
||||||
|
|
||||||
|
const body = await req.text();
|
||||||
|
const headers = await getHeader(
|
||||||
|
body,
|
||||||
|
serverConfig.tencentSecretId as string,
|
||||||
|
serverConfig.tencentSecretKey as string,
|
||||||
|
);
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers,
|
||||||
|
method: req.method,
|
||||||
|
body,
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -29,6 +29,7 @@ async function handle(
|
||||||
|
|
||||||
const requestUrl = new URL(req.url);
|
const requestUrl = new URL(req.url);
|
||||||
let endpoint = requestUrl.searchParams.get("endpoint");
|
let endpoint = requestUrl.searchParams.get("endpoint");
|
||||||
|
let proxy_method = requestUrl.searchParams.get("proxy_method") || req.method;
|
||||||
|
|
||||||
// Validate the endpoint to prevent potential SSRF attacks
|
// Validate the endpoint to prevent potential SSRF attacks
|
||||||
if (
|
if (
|
||||||
|
@ -65,7 +66,11 @@ async function handle(
|
||||||
const targetPath = `${endpoint}${endpointPath}`;
|
const targetPath = `${endpoint}${endpointPath}`;
|
||||||
|
|
||||||
// only allow MKCOL, GET, PUT
|
// only allow MKCOL, GET, PUT
|
||||||
if (req.method !== "MKCOL" && req.method !== "GET" && req.method !== "PUT") {
|
if (
|
||||||
|
proxy_method !== "MKCOL" &&
|
||||||
|
proxy_method !== "GET" &&
|
||||||
|
proxy_method !== "PUT"
|
||||||
|
) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
error: true,
|
error: true,
|
||||||
|
@ -78,7 +83,7 @@ async function handle(
|
||||||
}
|
}
|
||||||
|
|
||||||
// for MKCOL request, only allow request ${folder}
|
// for MKCOL request, only allow request ${folder}
|
||||||
if (req.method === "MKCOL" && !targetPath.endsWith(folder)) {
|
if (proxy_method === "MKCOL" && !targetPath.endsWith(folder)) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
error: true,
|
error: true,
|
||||||
|
@ -91,7 +96,7 @@ async function handle(
|
||||||
}
|
}
|
||||||
|
|
||||||
// for GET request, only allow request ending with fileName
|
// for GET request, only allow request ending with fileName
|
||||||
if (req.method === "GET" && !targetPath.endsWith(fileName)) {
|
if (proxy_method === "GET" && !targetPath.endsWith(fileName)) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
error: true,
|
error: true,
|
||||||
|
@ -104,7 +109,7 @@ async function handle(
|
||||||
}
|
}
|
||||||
|
|
||||||
// for PUT request, only allow request ending with fileName
|
// for PUT request, only allow request ending with fileName
|
||||||
if (req.method === "PUT" && !targetPath.endsWith(fileName)) {
|
if (proxy_method === "PUT" && !targetPath.endsWith(fileName)) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
error: true,
|
error: true,
|
||||||
|
@ -118,7 +123,7 @@ async function handle(
|
||||||
|
|
||||||
const targetUrl = targetPath;
|
const targetUrl = targetPath;
|
||||||
|
|
||||||
const method = req.method;
|
const method = proxy_method || req.method;
|
||||||
const shouldNotHaveBody = ["get", "head"].includes(
|
const shouldNotHaveBody = ["get", "head"].includes(
|
||||||
method?.toLowerCase() ?? "",
|
method?.toLowerCase() ?? "",
|
||||||
);
|
);
|
||||||
|
@ -143,7 +148,7 @@ async function handle(
|
||||||
"[Any Proxy]",
|
"[Any Proxy]",
|
||||||
targetUrl,
|
targetUrl,
|
||||||
{
|
{
|
||||||
method: req.method,
|
method: method,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
status: fetchResult?.status,
|
status: fetchResult?.status,
|
||||||
|
|
|
@ -0,0 +1,128 @@
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import {
|
||||||
|
XAI_BASE_URL,
|
||||||
|
ApiPath,
|
||||||
|
ModelProvider,
|
||||||
|
ServiceProvider,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { auth } from "@/app/api/auth";
|
||||||
|
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
export async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[XAI Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.XAI);
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request(req);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[XAI] ", e);
|
||||||
|
return NextResponse.json(prettyObject(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function request(req: NextRequest) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
// alibaba use base url or just remove the path
|
||||||
|
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.XAI, "");
|
||||||
|
|
||||||
|
let baseUrl = serverConfig.xaiUrl || XAI_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy] ", path);
|
||||||
|
console.log("[Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const fetchUrl = `${baseUrl}${path}`;
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: req.headers.get("Authorization") ?? "",
|
||||||
|
},
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
// #1815 try to refuse some request to some models
|
||||||
|
if (serverConfig.customModels && req.body) {
|
||||||
|
try {
|
||||||
|
const clonedBody = await req.text();
|
||||||
|
fetchOptions.body = clonedBody;
|
||||||
|
|
||||||
|
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||||
|
|
||||||
|
// not undefined and is false
|
||||||
|
if (
|
||||||
|
isModelAvailableInServer(
|
||||||
|
serverConfig.customModels,
|
||||||
|
jsonBody?.model as string,
|
||||||
|
ServiceProvider.XAI as string,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 403,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[XAI] filter`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +1,16 @@
|
||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import {
|
import {
|
||||||
ACCESS_CODE_PREFIX,
|
ACCESS_CODE_PREFIX,
|
||||||
Azure,
|
|
||||||
ModelProvider,
|
ModelProvider,
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
} from "../constant";
|
} from "../constant";
|
||||||
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
import {
|
||||||
|
ChatMessageTool,
|
||||||
|
ChatMessage,
|
||||||
|
ModelType,
|
||||||
|
useAccessStore,
|
||||||
|
useChatStore,
|
||||||
|
} from "../store";
|
||||||
import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
|
import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
|
||||||
import { FileApi, FileInfo } from "./platforms/utils";
|
import { FileApi, FileInfo } from "./platforms/utils";
|
||||||
import { GeminiProApi } from "./platforms/google";
|
import { GeminiProApi } from "./platforms/google";
|
||||||
|
@ -13,6 +18,11 @@ import { ClaudeApi } from "./platforms/anthropic";
|
||||||
import { ErnieApi } from "./platforms/baidu";
|
import { ErnieApi } from "./platforms/baidu";
|
||||||
import { DoubaoApi } from "./platforms/bytedance";
|
import { DoubaoApi } from "./platforms/bytedance";
|
||||||
import { QwenApi } from "./platforms/alibaba";
|
import { QwenApi } from "./platforms/alibaba";
|
||||||
|
import { HunyuanApi } from "./platforms/tencent";
|
||||||
|
import { MoonshotApi } from "./platforms/moonshot";
|
||||||
|
import { SparkApi } from "./platforms/iflytek";
|
||||||
|
import { XAIApi } from "./platforms/xai";
|
||||||
|
import { ChatGLMApi } from "./platforms/glm";
|
||||||
|
|
||||||
export const ROLES = ["system", "user", "assistant"] as const;
|
export const ROLES = ["system", "user", "assistant"] as const;
|
||||||
export type MessageRole = (typeof ROLES)[number];
|
export type MessageRole = (typeof ROLES)[number];
|
||||||
|
@ -79,9 +89,11 @@ export interface ChatOptions {
|
||||||
|
|
||||||
onToolUpdate?: (toolName: string, toolInput: string) => void;
|
onToolUpdate?: (toolName: string, toolInput: string) => void;
|
||||||
onUpdate?: (message: string, chunk: string) => void;
|
onUpdate?: (message: string, chunk: string) => void;
|
||||||
onFinish: (message: string) => void;
|
onFinish: (message: string, responseRes: Response) => void;
|
||||||
onError?: (err: Error) => void;
|
onError?: (err: Error) => void;
|
||||||
onController?: (controller: AbortController) => void;
|
onController?: (controller: AbortController) => void;
|
||||||
|
onBeforeTool?: (tool: ChatMessageTool) => void;
|
||||||
|
onAfterTool?: (tool: ChatMessageTool) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AgentChatOptions {
|
export interface AgentChatOptions {
|
||||||
|
@ -94,6 +106,8 @@ export interface AgentChatOptions {
|
||||||
onFinish: (message: string) => void;
|
onFinish: (message: string) => void;
|
||||||
onError?: (err: Error) => void;
|
onError?: (err: Error) => void;
|
||||||
onController?: (controller: AbortController) => void;
|
onController?: (controller: AbortController) => void;
|
||||||
|
onBeforeTool?: (tool: ChatMessageTool) => void;
|
||||||
|
onAfterTool?: (tool: ChatMessageTool) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CreateRAGStoreOptions {
|
export interface CreateRAGStoreOptions {
|
||||||
|
@ -113,12 +127,14 @@ export interface LLMModel {
|
||||||
displayName?: string;
|
displayName?: string;
|
||||||
available: boolean;
|
available: boolean;
|
||||||
provider: LLMModelProvider;
|
provider: LLMModelProvider;
|
||||||
|
sorted: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LLMModelProvider {
|
export interface LLMModelProvider {
|
||||||
id: string;
|
id: string;
|
||||||
providerName: string;
|
providerName: string;
|
||||||
providerType: string;
|
providerType: string;
|
||||||
|
sorted: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export abstract class LLMApi {
|
export abstract class LLMApi {
|
||||||
|
@ -179,6 +195,21 @@ export class ClientApi {
|
||||||
case ModelProvider.Qwen:
|
case ModelProvider.Qwen:
|
||||||
this.llm = new QwenApi();
|
this.llm = new QwenApi();
|
||||||
break;
|
break;
|
||||||
|
case ModelProvider.Hunyuan:
|
||||||
|
this.llm = new HunyuanApi();
|
||||||
|
break;
|
||||||
|
case ModelProvider.Moonshot:
|
||||||
|
this.llm = new MoonshotApi();
|
||||||
|
break;
|
||||||
|
case ModelProvider.Iflytek:
|
||||||
|
this.llm = new SparkApi();
|
||||||
|
break;
|
||||||
|
case ModelProvider.XAI:
|
||||||
|
this.llm = new XAIApi();
|
||||||
|
break;
|
||||||
|
case ModelProvider.ChatGLM:
|
||||||
|
this.llm = new ChatGLMApi();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
this.llm = new ChatGPTApi();
|
this.llm = new ChatGPTApi();
|
||||||
}
|
}
|
||||||
|
@ -231,7 +262,20 @@ export class ClientApi {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getHeaders(ignoreHeaders?: boolean) {
|
export function getBearerToken(
|
||||||
|
apiKey: string,
|
||||||
|
noBearer: boolean = false,
|
||||||
|
): string {
|
||||||
|
return validString(apiKey)
|
||||||
|
? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
|
||||||
|
: "";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validString(x: string): boolean {
|
||||||
|
return x?.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getHeaders(ignoreHeaders: boolean = false) {
|
||||||
const accessStore = useAccessStore.getState();
|
const accessStore = useAccessStore.getState();
|
||||||
const chatStore = useChatStore.getState();
|
const chatStore = useChatStore.getState();
|
||||||
let headers: Record<string, string> = {};
|
let headers: Record<string, string> = {};
|
||||||
|
@ -246,12 +290,16 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||||
|
|
||||||
function getConfig() {
|
function getConfig() {
|
||||||
const modelConfig = chatStore.currentSession().mask.modelConfig;
|
const modelConfig = chatStore.currentSession().mask.modelConfig;
|
||||||
const isGoogle = modelConfig.providerName == ServiceProvider.Google;
|
const isGoogle = modelConfig.providerName === ServiceProvider.Google;
|
||||||
const isAzure = modelConfig.providerName === ServiceProvider.Azure;
|
const isAzure = modelConfig.providerName === ServiceProvider.Azure;
|
||||||
const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
|
const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
|
||||||
const isBaidu = modelConfig.providerName == ServiceProvider.Baidu;
|
const isBaidu = modelConfig.providerName == ServiceProvider.Baidu;
|
||||||
const isByteDance = modelConfig.providerName === ServiceProvider.ByteDance;
|
const isByteDance = modelConfig.providerName === ServiceProvider.ByteDance;
|
||||||
const isAlibaba = modelConfig.providerName === ServiceProvider.Alibaba;
|
const isAlibaba = modelConfig.providerName === ServiceProvider.Alibaba;
|
||||||
|
const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot;
|
||||||
|
const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek;
|
||||||
|
const isXAI = modelConfig.providerName === ServiceProvider.XAI;
|
||||||
|
const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
|
||||||
const isEnabledAccessControl = accessStore.enabledAccessControl();
|
const isEnabledAccessControl = accessStore.enabledAccessControl();
|
||||||
const apiKey = isGoogle
|
const apiKey = isGoogle
|
||||||
? accessStore.googleApiKey
|
? accessStore.googleApiKey
|
||||||
|
@ -263,7 +311,20 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||||
? accessStore.bytedanceApiKey
|
? accessStore.bytedanceApiKey
|
||||||
: isAlibaba
|
: isAlibaba
|
||||||
? accessStore.alibabaApiKey
|
? accessStore.alibabaApiKey
|
||||||
: accessStore.openaiApiKey;
|
: isMoonshot
|
||||||
|
? accessStore.moonshotApiKey
|
||||||
|
: isXAI
|
||||||
|
? accessStore.xaiApiKey
|
||||||
|
: isChatGLM
|
||||||
|
? accessStore.chatglmApiKey
|
||||||
|
: isIflytek
|
||||||
|
? accessStore.iflytekApiKey &&
|
||||||
|
accessStore.iflytekApiSecret
|
||||||
|
? accessStore.iflytekApiKey +
|
||||||
|
":" +
|
||||||
|
accessStore.iflytekApiSecret
|
||||||
|
: ""
|
||||||
|
: accessStore.openaiApiKey;
|
||||||
if (accessStore.isUseOpenAIEndpointForAllModels || ignoreHeaders) {
|
if (accessStore.isUseOpenAIEndpointForAllModels || ignoreHeaders) {
|
||||||
return {
|
return {
|
||||||
isGoogle: false,
|
isGoogle: false,
|
||||||
|
@ -272,6 +333,10 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||||
isBaidu: false,
|
isBaidu: false,
|
||||||
isByteDance: false,
|
isByteDance: false,
|
||||||
isAlibaba: false,
|
isAlibaba: false,
|
||||||
|
isMoonshot: false,
|
||||||
|
isIflytek: false,
|
||||||
|
isXAI: false,
|
||||||
|
isChatGLM: false,
|
||||||
apiKey: accessStore.openaiApiKey,
|
apiKey: accessStore.openaiApiKey,
|
||||||
isEnabledAccessControl,
|
isEnabledAccessControl,
|
||||||
};
|
};
|
||||||
|
@ -283,24 +348,25 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||||
isBaidu,
|
isBaidu,
|
||||||
isByteDance,
|
isByteDance,
|
||||||
isAlibaba,
|
isAlibaba,
|
||||||
|
isMoonshot,
|
||||||
|
isIflytek,
|
||||||
|
isXAI,
|
||||||
|
isChatGLM,
|
||||||
apiKey,
|
apiKey,
|
||||||
isEnabledAccessControl,
|
isEnabledAccessControl,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAuthHeader(): string {
|
function getAuthHeader(): string {
|
||||||
return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
|
return isAzure
|
||||||
|
? "api-key"
|
||||||
|
: isAnthropic
|
||||||
|
? "x-api-key"
|
||||||
|
: isGoogle
|
||||||
|
? "x-goog-api-key"
|
||||||
|
: "Authorization";
|
||||||
}
|
}
|
||||||
|
|
||||||
function getBearerToken(apiKey: string, noBearer: boolean = false): string {
|
|
||||||
return validString(apiKey)
|
|
||||||
? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
|
|
||||||
: "";
|
|
||||||
}
|
|
||||||
|
|
||||||
function validString(x: string): boolean {
|
|
||||||
return x?.length > 0;
|
|
||||||
}
|
|
||||||
const {
|
const {
|
||||||
isGoogle,
|
isGoogle,
|
||||||
isAzure,
|
isAzure,
|
||||||
|
@ -309,14 +375,15 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||||
apiKey,
|
apiKey,
|
||||||
isEnabledAccessControl,
|
isEnabledAccessControl,
|
||||||
} = getConfig();
|
} = getConfig();
|
||||||
// when using google api in app, not set auth header
|
|
||||||
if (isGoogle && clientConfig?.isApp) return headers;
|
|
||||||
// when using baidu api in app, not set auth header
|
// when using baidu api in app, not set auth header
|
||||||
if (isBaidu && clientConfig?.isApp) return headers;
|
if (isBaidu && clientConfig?.isApp) return headers;
|
||||||
|
|
||||||
const authHeader = getAuthHeader();
|
const authHeader = getAuthHeader();
|
||||||
|
|
||||||
const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
|
const bearerToken = getBearerToken(
|
||||||
|
apiKey,
|
||||||
|
isAzure || isAnthropic || isGoogle,
|
||||||
|
);
|
||||||
|
|
||||||
if (bearerToken) {
|
if (bearerToken) {
|
||||||
headers[authHeader] = bearerToken;
|
headers[authHeader] = bearerToken;
|
||||||
|
@ -345,6 +412,16 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
|
||||||
return new ClientApi(ModelProvider.Doubao);
|
return new ClientApi(ModelProvider.Doubao);
|
||||||
case ServiceProvider.Alibaba:
|
case ServiceProvider.Alibaba:
|
||||||
return new ClientApi(ModelProvider.Qwen);
|
return new ClientApi(ModelProvider.Qwen);
|
||||||
|
case ServiceProvider.Tencent:
|
||||||
|
return new ClientApi(ModelProvider.Hunyuan);
|
||||||
|
case ServiceProvider.Moonshot:
|
||||||
|
return new ClientApi(ModelProvider.Moonshot);
|
||||||
|
case ServiceProvider.Iflytek:
|
||||||
|
return new ClientApi(ModelProvider.Iflytek);
|
||||||
|
case ServiceProvider.XAI:
|
||||||
|
return new ClientApi(ModelProvider.XAI);
|
||||||
|
case ServiceProvider.ChatGLM:
|
||||||
|
return new ClientApi(ModelProvider.ChatGLM);
|
||||||
default:
|
default:
|
||||||
return new ClientApi(ModelProvider.GPT);
|
return new ClientApi(ModelProvider.GPT);
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,14 +8,14 @@ import {
|
||||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
AgentChatOptions,
|
|
||||||
ChatOptions,
|
ChatOptions,
|
||||||
CreateRAGStoreOptions,
|
|
||||||
getHeaders,
|
getHeaders,
|
||||||
LLMApi,
|
LLMApi,
|
||||||
LLMModel,
|
LLMModel,
|
||||||
MultimodalContent,
|
|
||||||
SpeechOptions,
|
SpeechOptions,
|
||||||
|
MultimodalContent,
|
||||||
|
AgentChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
TranscriptionOptions,
|
TranscriptionOptions,
|
||||||
} from "../api";
|
} from "../api";
|
||||||
import Locale from "../../locales";
|
import Locale from "../../locales";
|
||||||
|
@ -26,6 +26,7 @@ import {
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { getClientConfig } from "@/app/config/client";
|
import { getClientConfig } from "@/app/config/client";
|
||||||
import { getMessageTextContent } from "@/app/utils";
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
export interface OpenAIListModelResponse {
|
export interface OpenAIListModelResponse {
|
||||||
object: string;
|
object: string;
|
||||||
|
@ -57,9 +58,6 @@ interface RequestPayload {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class QwenApi implements LLMApi {
|
export class QwenApi implements LLMApi {
|
||||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
|
||||||
throw new Error("Method not implemented.");
|
|
||||||
}
|
|
||||||
transcription(options: TranscriptionOptions): Promise<string> {
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
}
|
}
|
||||||
|
@ -99,6 +97,10 @@ export class QwenApi implements LLMApi {
|
||||||
return res?.output?.choices?.at(0)?.message?.content ?? "";
|
return res?.output?.choices?.at(0)?.message?.content ?? "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
async chat(options: ChatOptions) {
|
async chat(options: ChatOptions) {
|
||||||
const messages = options.messages.map((v) => ({
|
const messages = options.messages.map((v) => ({
|
||||||
role: v.role,
|
role: v.role,
|
||||||
|
@ -153,6 +155,7 @@ export class QwenApi implements LLMApi {
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
let remainText = "";
|
let remainText = "";
|
||||||
let finished = false;
|
let finished = false;
|
||||||
|
let responseRes: Response;
|
||||||
|
|
||||||
// animate response to make it looks smooth
|
// animate response to make it looks smooth
|
||||||
function animateResponseText() {
|
function animateResponseText() {
|
||||||
|
@ -182,13 +185,14 @@ export class QwenApi implements LLMApi {
|
||||||
const finish = () => {
|
const finish = () => {
|
||||||
if (!finished) {
|
if (!finished) {
|
||||||
finished = true;
|
finished = true;
|
||||||
options.onFinish(responseText + remainText);
|
options.onFinish(responseText + remainText, responseRes);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
controller.signal.onabort = finish;
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
fetchEventSource(chatPath, {
|
fetchEventSource(chatPath, {
|
||||||
|
fetch: fetch as any,
|
||||||
...chatPayload,
|
...chatPayload,
|
||||||
async onopen(res) {
|
async onopen(res) {
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
|
@ -197,6 +201,7 @@ export class QwenApi implements LLMApi {
|
||||||
"[Alibaba] request response content type: ",
|
"[Alibaba] request response content type: ",
|
||||||
contentType,
|
contentType,
|
||||||
);
|
);
|
||||||
|
responseRes = res;
|
||||||
|
|
||||||
if (contentType?.startsWith("text/plain")) {
|
if (contentType?.startsWith("text/plain")) {
|
||||||
responseText = await res.clone().text();
|
responseText = await res.clone().text();
|
||||||
|
@ -263,7 +268,7 @@ export class QwenApi implements LLMApi {
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
const message = this.extractMessage(resJson);
|
const message = this.extractMessage(resJson);
|
||||||
options.onFinish(message);
|
options.onFinish(message, res);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("[Request] failed to make a chat request", e);
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
|
|
@ -1,33 +1,27 @@
|
||||||
import {
|
import { Anthropic, ApiPath } from "@/app/constant";
|
||||||
ACCESS_CODE_PREFIX,
|
|
||||||
Anthropic,
|
|
||||||
ApiPath,
|
|
||||||
REQUEST_TIMEOUT_MS,
|
|
||||||
ServiceProvider,
|
|
||||||
} from "@/app/constant";
|
|
||||||
import {
|
import {
|
||||||
AgentChatOptions,
|
AgentChatOptions,
|
||||||
ChatOptions,
|
ChatOptions,
|
||||||
CreateRAGStoreOptions,
|
CreateRAGStoreOptions,
|
||||||
getHeaders,
|
getHeaders,
|
||||||
LLMApi,
|
LLMApi,
|
||||||
MultimodalContent,
|
|
||||||
SpeechOptions,
|
SpeechOptions,
|
||||||
TranscriptionOptions,
|
TranscriptionOptions,
|
||||||
} from "../api";
|
} from "../api";
|
||||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
|
||||||
import { getClientConfig } from "@/app/config/client";
|
|
||||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
|
||||||
import {
|
import {
|
||||||
EventStreamContentType,
|
useAccessStore,
|
||||||
fetchEventSource,
|
useAppConfig,
|
||||||
} from "@fortaine/fetch-event-source";
|
useChatStore,
|
||||||
|
usePluginStore,
|
||||||
import Locale from "../../locales";
|
ChatMessageTool,
|
||||||
import { prettyObject } from "@/app/utils/format";
|
} from "@/app/store";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { ANTHROPIC_BASE_URL } from "@/app/constant";
|
||||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||||
import { preProcessImageContent } from "@/app/utils/chat";
|
import { preProcessImageContent, stream } from "@/app/utils/chat";
|
||||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||||
|
import { RequestPayload } from "./openai";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
export type MultiBlockContent = {
|
export type MultiBlockContent = {
|
||||||
type: "image" | "text";
|
type: "image" | "text";
|
||||||
|
@ -88,173 +82,19 @@ const ClaudeMapper = {
|
||||||
const keys = ["claude-2, claude-instant-1"];
|
const keys = ["claude-2, claude-instant-1"];
|
||||||
|
|
||||||
export class ClaudeApi implements LLMApi {
|
export class ClaudeApi implements LLMApi {
|
||||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
|
||||||
throw new Error("Method not implemented.");
|
|
||||||
}
|
|
||||||
transcription(options: TranscriptionOptions): Promise<string> {
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
}
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
async toolAgentChat(options: AgentChatOptions) {
|
throw new Error("Method not implemented.");
|
||||||
const visionModel = isVisionModel(options.config.model);
|
|
||||||
const messages: AgentChatOptions["messages"] = [];
|
|
||||||
for (const v of options.messages) {
|
|
||||||
const content = visionModel
|
|
||||||
? await preProcessImageContent(v.content)
|
|
||||||
: getMessageTextContent(v);
|
|
||||||
messages.push({ role: v.role, content });
|
|
||||||
}
|
|
||||||
|
|
||||||
const modelConfig = {
|
|
||||||
...useAppConfig.getState().modelConfig,
|
|
||||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
|
||||||
...{
|
|
||||||
model: options.config.model,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const accessStore = useAccessStore.getState();
|
|
||||||
let baseUrl = accessStore.anthropicUrl;
|
|
||||||
const requestPayload = {
|
|
||||||
chatSessionId: options.chatSessionId,
|
|
||||||
messages,
|
|
||||||
isAzure: false,
|
|
||||||
azureApiVersion: accessStore.azureApiVersion,
|
|
||||||
stream: options.config.stream,
|
|
||||||
model: modelConfig.model,
|
|
||||||
temperature: modelConfig.temperature,
|
|
||||||
presence_penalty: modelConfig.presence_penalty,
|
|
||||||
frequency_penalty: modelConfig.frequency_penalty,
|
|
||||||
top_p: modelConfig.top_p,
|
|
||||||
baseUrl: baseUrl,
|
|
||||||
maxIterations: options.agentConfig.maxIterations,
|
|
||||||
returnIntermediateSteps: options.agentConfig.returnIntermediateSteps,
|
|
||||||
useTools: options.agentConfig.useTools,
|
|
||||||
provider: ServiceProvider.Anthropic,
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log("[Request] anthropic payload: ", requestPayload);
|
|
||||||
|
|
||||||
const shouldStream = true;
|
|
||||||
const controller = new AbortController();
|
|
||||||
options.onController?.(controller);
|
|
||||||
|
|
||||||
try {
|
|
||||||
let path = "/api/langchain/tool/agent/";
|
|
||||||
const enableNodeJSPlugin = !!process.env.NEXT_PUBLIC_ENABLE_NODEJS_PLUGIN;
|
|
||||||
path = enableNodeJSPlugin ? path + "nodejs" : path + "edge";
|
|
||||||
const chatPayload = {
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify(requestPayload),
|
|
||||||
signal: controller.signal,
|
|
||||||
headers: getHeaders(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// make a fetch request
|
|
||||||
const requestTimeoutId = setTimeout(
|
|
||||||
() => controller.abort(),
|
|
||||||
REQUEST_TIMEOUT_MS,
|
|
||||||
);
|
|
||||||
// console.log("shouldStream", shouldStream);
|
|
||||||
|
|
||||||
if (shouldStream) {
|
|
||||||
let responseText = "";
|
|
||||||
let finished = false;
|
|
||||||
|
|
||||||
const finish = () => {
|
|
||||||
if (!finished) {
|
|
||||||
options.onFinish(responseText);
|
|
||||||
finished = true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
controller.signal.onabort = finish;
|
|
||||||
|
|
||||||
fetchEventSource(path, {
|
|
||||||
...chatPayload,
|
|
||||||
async onopen(res) {
|
|
||||||
clearTimeout(requestTimeoutId);
|
|
||||||
const contentType = res.headers.get("content-type");
|
|
||||||
console.log(
|
|
||||||
"[OpenAI] request response content type: ",
|
|
||||||
contentType,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (contentType?.startsWith("text/plain")) {
|
|
||||||
responseText = await res.clone().text();
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
!res.ok ||
|
|
||||||
!res.headers
|
|
||||||
.get("content-type")
|
|
||||||
?.startsWith(EventStreamContentType) ||
|
|
||||||
res.status !== 200
|
|
||||||
) {
|
|
||||||
const responseTexts = [responseText];
|
|
||||||
let extraInfo = await res.clone().text();
|
|
||||||
console.warn(`extraInfo: ${extraInfo}`);
|
|
||||||
|
|
||||||
if (res.status === 401) {
|
|
||||||
responseTexts.push(Locale.Error.Unauthorized);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (extraInfo) {
|
|
||||||
responseTexts.push(extraInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
responseText = responseTexts.join("\n\n");
|
|
||||||
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onmessage(msg) {
|
|
||||||
let response = JSON.parse(msg.data);
|
|
||||||
if (!response.isSuccess) {
|
|
||||||
console.error("[Request]", msg.data);
|
|
||||||
responseText = msg.data;
|
|
||||||
throw Error(response.message);
|
|
||||||
}
|
|
||||||
if (msg.data === "[DONE]" || finished) {
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
if (response && !response.isToolMessage) {
|
|
||||||
responseText += response.message;
|
|
||||||
options.onUpdate?.(responseText, response.message);
|
|
||||||
} else {
|
|
||||||
options.onToolUpdate?.(response.toolName!, response.message);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error("[Request] parse error", response, msg);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onclose() {
|
|
||||||
finish();
|
|
||||||
},
|
|
||||||
onerror(e) {
|
|
||||||
options.onError?.(e);
|
|
||||||
throw e;
|
|
||||||
},
|
|
||||||
openWhenHidden: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const res = await fetch(path, chatPayload);
|
|
||||||
clearTimeout(requestTimeoutId);
|
|
||||||
|
|
||||||
const resJson = await res.json();
|
|
||||||
const message = this.extractMessage(resJson);
|
|
||||||
options.onFinish(message);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.log("[Request] failed to make a chat reqeust", e);
|
|
||||||
options.onError?.(e as Error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
}
|
}
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
extractMessage(res: any) {
|
extractMessage(res: any) {
|
||||||
console.log("[Response] claude response: ", res);
|
console.log("[Response] claude response: ", res);
|
||||||
|
|
||||||
|
@ -373,120 +213,136 @@ export class ClaudeApi implements LLMApi {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
options.onController?.(controller);
|
options.onController?.(controller);
|
||||||
|
|
||||||
const payload = {
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify(requestBody),
|
|
||||||
signal: controller.signal,
|
|
||||||
headers: {
|
|
||||||
...getHeaders(), // get common headers
|
|
||||||
"anthropic-version": accessStore.anthropicApiVersion,
|
|
||||||
// do not send `anthropicApiKey` in browser!!!
|
|
||||||
// Authorization: getAuthKey(accessStore.anthropicApiKey),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if (shouldStream) {
|
if (shouldStream) {
|
||||||
try {
|
let index = -1;
|
||||||
const context = {
|
const [tools, funcs] = [{}, {}];
|
||||||
text: "",
|
// const [tools, funcs] = usePluginStore
|
||||||
finished: false,
|
// .getState()
|
||||||
};
|
// .getAsTools(
|
||||||
|
// useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
const finish = () => {
|
// );
|
||||||
if (!context.finished) {
|
return stream(
|
||||||
options.onFinish(context.text);
|
path,
|
||||||
context.finished = true;
|
requestBody,
|
||||||
}
|
{
|
||||||
};
|
...getHeaders(),
|
||||||
|
"anthropic-version": accessStore.anthropicApiVersion,
|
||||||
controller.signal.onabort = finish;
|
},
|
||||||
fetchEventSource(path, {
|
// @ts-ignore
|
||||||
...payload,
|
tools.map((tool) => ({
|
||||||
async onopen(res) {
|
name: tool?.function?.name,
|
||||||
const contentType = res.headers.get("content-type");
|
description: tool?.function?.description,
|
||||||
console.log("response content type: ", contentType);
|
input_schema: tool?.function?.parameters,
|
||||||
|
})),
|
||||||
if (contentType?.startsWith("text/plain")) {
|
funcs,
|
||||||
context.text = await res.clone().text();
|
controller,
|
||||||
return finish();
|
// parseSSE
|
||||||
}
|
(text: string, runTools: ChatMessageTool[]) => {
|
||||||
|
// console.log("parseSSE", text, runTools);
|
||||||
if (
|
let chunkJson:
|
||||||
!res.ok ||
|
| undefined
|
||||||
!res.headers
|
| {
|
||||||
.get("content-type")
|
type: "content_block_delta" | "content_block_stop";
|
||||||
?.startsWith(EventStreamContentType) ||
|
content_block?: {
|
||||||
res.status !== 200
|
type: "tool_use";
|
||||||
) {
|
id: string;
|
||||||
const responseTexts = [context.text];
|
name: string;
|
||||||
let extraInfo = await res.clone().text();
|
|
||||||
try {
|
|
||||||
const resJson = await res.clone().json();
|
|
||||||
extraInfo = prettyObject(resJson);
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
if (res.status === 401) {
|
|
||||||
responseTexts.push(Locale.Error.Unauthorized);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (extraInfo) {
|
|
||||||
responseTexts.push(extraInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
context.text = responseTexts.join("\n\n");
|
|
||||||
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onmessage(msg) {
|
|
||||||
let chunkJson:
|
|
||||||
| undefined
|
|
||||||
| {
|
|
||||||
type: "content_block_delta" | "content_block_stop";
|
|
||||||
delta?: {
|
|
||||||
type: "text_delta";
|
|
||||||
text: string;
|
|
||||||
};
|
|
||||||
index: number;
|
|
||||||
};
|
};
|
||||||
try {
|
delta?: {
|
||||||
chunkJson = JSON.parse(msg.data);
|
type: "text_delta" | "input_json_delta";
|
||||||
} catch (e) {
|
text?: string;
|
||||||
console.error("[Response] parse error", msg.data);
|
partial_json?: string;
|
||||||
}
|
};
|
||||||
|
index: number;
|
||||||
|
};
|
||||||
|
chunkJson = JSON.parse(text);
|
||||||
|
|
||||||
if (!chunkJson || chunkJson.type === "content_block_stop") {
|
if (chunkJson?.content_block?.type == "tool_use") {
|
||||||
return finish();
|
index += 1;
|
||||||
}
|
const id = chunkJson?.content_block.id;
|
||||||
|
const name = chunkJson?.content_block.name;
|
||||||
const { delta } = chunkJson;
|
runTools.push({
|
||||||
if (delta?.text) {
|
id,
|
||||||
context.text += delta.text;
|
type: "function",
|
||||||
options.onUpdate?.(context.text, delta.text);
|
function: {
|
||||||
}
|
name,
|
||||||
},
|
arguments: "",
|
||||||
onclose() {
|
},
|
||||||
finish();
|
});
|
||||||
},
|
}
|
||||||
onerror(e) {
|
if (
|
||||||
options.onError?.(e);
|
chunkJson?.delta?.type == "input_json_delta" &&
|
||||||
throw e;
|
chunkJson?.delta?.partial_json
|
||||||
},
|
) {
|
||||||
openWhenHidden: true,
|
// @ts-ignore
|
||||||
});
|
runTools[index]["function"]["arguments"] +=
|
||||||
} catch (e) {
|
chunkJson?.delta?.partial_json;
|
||||||
console.error("failed to chat", e);
|
}
|
||||||
options.onError?.(e as Error);
|
return chunkJson?.delta?.text;
|
||||||
}
|
},
|
||||||
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
|
(
|
||||||
|
requestPayload: RequestPayload,
|
||||||
|
toolCallMessage: any,
|
||||||
|
toolCallResult: any[],
|
||||||
|
) => {
|
||||||
|
// reset index value
|
||||||
|
index = -1;
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.splice(
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.length,
|
||||||
|
0,
|
||||||
|
{
|
||||||
|
role: "assistant",
|
||||||
|
content: toolCallMessage.tool_calls.map(
|
||||||
|
(tool: ChatMessageTool) => ({
|
||||||
|
type: "tool_use",
|
||||||
|
id: tool.id,
|
||||||
|
name: tool?.function?.name,
|
||||||
|
input: tool?.function?.arguments
|
||||||
|
? JSON.parse(tool?.function?.arguments)
|
||||||
|
: {},
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
// @ts-ignore
|
||||||
|
...toolCallResult.map((result) => ({
|
||||||
|
role: "user",
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: "tool_result",
|
||||||
|
tool_use_id: result.tool_call_id,
|
||||||
|
content: result.content,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
options,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
|
const payload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestBody),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: {
|
||||||
|
...getHeaders(), // get common headers
|
||||||
|
"anthropic-version": accessStore.anthropicApiVersion,
|
||||||
|
// do not send `anthropicApiKey` in browser!!!
|
||||||
|
// Authorization: getAuthKey(accessStore.anthropicApiKey),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
controller.signal.onabort = () => options.onFinish("");
|
controller.signal.onabort = () =>
|
||||||
|
options.onFinish("", new Response(null, { status: 400 }));
|
||||||
|
|
||||||
const res = await fetch(path, payload);
|
const res = await fetch(path, payload);
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
|
|
||||||
const message = this.extractMessage(resJson);
|
const message = this.extractMessage(resJson);
|
||||||
options.onFinish(message);
|
options.onFinish(message, res);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("failed to chat", e);
|
console.error("failed to chat", e);
|
||||||
options.onError?.(e as Error);
|
options.onError?.(e as Error);
|
||||||
|
@ -552,9 +408,7 @@ export class ClaudeApi implements LLMApi {
|
||||||
if (baseUrl.trim().length === 0) {
|
if (baseUrl.trim().length === 0) {
|
||||||
const isApp = !!getClientConfig()?.isApp;
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
|
||||||
baseUrl = isApp
|
baseUrl = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
|
||||||
: ApiPath.Anthropic;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import {
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { getClientConfig } from "@/app/config/client";
|
import { getClientConfig } from "@/app/config/client";
|
||||||
import { getMessageTextContent } from "@/app/utils";
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
export interface OpenAIListModelResponse {
|
export interface OpenAIListModelResponse {
|
||||||
object: string;
|
object: string;
|
||||||
|
@ -52,9 +53,6 @@ interface RequestPayload {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ErnieApi implements LLMApi {
|
export class ErnieApi implements LLMApi {
|
||||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
|
||||||
throw new Error("Method not implemented.");
|
|
||||||
}
|
|
||||||
transcription(options: TranscriptionOptions): Promise<string> {
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
}
|
}
|
||||||
|
@ -91,18 +89,30 @@ export class ErnieApi implements LLMApi {
|
||||||
return [baseUrl, path].join("/");
|
return [baseUrl, path].join("/");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
async chat(options: ChatOptions) {
|
async chat(options: ChatOptions) {
|
||||||
const messages = options.messages.map((v) => ({
|
const messages = options.messages.map((v) => ({
|
||||||
role: v.role,
|
// "error_code": 336006, "error_msg": "the role of message with even index in the messages must be user or function",
|
||||||
|
role: v.role === "system" ? "user" : v.role,
|
||||||
content: getMessageTextContent(v),
|
content: getMessageTextContent(v),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// "error_code": 336006, "error_msg": "the length of messages must be an odd number",
|
// "error_code": 336006, "error_msg": "the length of messages must be an odd number",
|
||||||
if (messages.length % 2 === 0) {
|
if (messages.length % 2 === 0) {
|
||||||
messages.unshift({
|
if (messages.at(0)?.role === "user") {
|
||||||
role: "user",
|
messages.splice(1, 0, {
|
||||||
content: " ",
|
role: "assistant",
|
||||||
});
|
content: " ",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
messages.unshift({
|
||||||
|
role: "user",
|
||||||
|
content: " ",
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const modelConfig = {
|
const modelConfig = {
|
||||||
|
@ -164,6 +174,7 @@ export class ErnieApi implements LLMApi {
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
let remainText = "";
|
let remainText = "";
|
||||||
let finished = false;
|
let finished = false;
|
||||||
|
let responseRes: Response;
|
||||||
|
|
||||||
// animate response to make it looks smooth
|
// animate response to make it looks smooth
|
||||||
function animateResponseText() {
|
function animateResponseText() {
|
||||||
|
@ -193,19 +204,20 @@ export class ErnieApi implements LLMApi {
|
||||||
const finish = () => {
|
const finish = () => {
|
||||||
if (!finished) {
|
if (!finished) {
|
||||||
finished = true;
|
finished = true;
|
||||||
options.onFinish(responseText + remainText);
|
options.onFinish(responseText + remainText, responseRes);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
controller.signal.onabort = finish;
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
fetchEventSource(chatPath, {
|
fetchEventSource(chatPath, {
|
||||||
|
fetch: fetch as any,
|
||||||
...chatPayload,
|
...chatPayload,
|
||||||
async onopen(res) {
|
async onopen(res) {
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
const contentType = res.headers.get("content-type");
|
const contentType = res.headers.get("content-type");
|
||||||
console.log("[Baidu] request response content type: ", contentType);
|
console.log("[Baidu] request response content type: ", contentType);
|
||||||
|
responseRes = res;
|
||||||
if (contentType?.startsWith("text/plain")) {
|
if (contentType?.startsWith("text/plain")) {
|
||||||
responseText = await res.clone().text();
|
responseText = await res.clone().text();
|
||||||
return finish();
|
return finish();
|
||||||
|
@ -268,7 +280,7 @@ export class ErnieApi implements LLMApi {
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
const message = resJson?.result;
|
const message = resJson?.result;
|
||||||
options.onFinish(message);
|
options.onFinish(message, res);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("[Request] failed to make a chat request", e);
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
|
|
@ -26,6 +26,7 @@ import {
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { getClientConfig } from "@/app/config/client";
|
import { getClientConfig } from "@/app/config/client";
|
||||||
import { getMessageTextContent } from "@/app/utils";
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
export interface OpenAIListModelResponse {
|
export interface OpenAIListModelResponse {
|
||||||
object: string;
|
object: string;
|
||||||
|
@ -51,9 +52,6 @@ interface RequestPayload {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class DoubaoApi implements LLMApi {
|
export class DoubaoApi implements LLMApi {
|
||||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
|
||||||
throw new Error("Method not implemented.");
|
|
||||||
}
|
|
||||||
transcription(options: TranscriptionOptions): Promise<string> {
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
}
|
}
|
||||||
|
@ -93,6 +91,10 @@ export class DoubaoApi implements LLMApi {
|
||||||
return res.choices?.at(0)?.message?.content ?? "";
|
return res.choices?.at(0)?.message?.content ?? "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
async chat(options: ChatOptions) {
|
async chat(options: ChatOptions) {
|
||||||
const messages = options.messages.map((v) => ({
|
const messages = options.messages.map((v) => ({
|
||||||
role: v.role,
|
role: v.role,
|
||||||
|
@ -140,6 +142,7 @@ export class DoubaoApi implements LLMApi {
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
let remainText = "";
|
let remainText = "";
|
||||||
let finished = false;
|
let finished = false;
|
||||||
|
let responseRes: Response;
|
||||||
|
|
||||||
// animate response to make it looks smooth
|
// animate response to make it looks smooth
|
||||||
function animateResponseText() {
|
function animateResponseText() {
|
||||||
|
@ -169,13 +172,14 @@ export class DoubaoApi implements LLMApi {
|
||||||
const finish = () => {
|
const finish = () => {
|
||||||
if (!finished) {
|
if (!finished) {
|
||||||
finished = true;
|
finished = true;
|
||||||
options.onFinish(responseText + remainText);
|
options.onFinish(responseText + remainText, responseRes);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
controller.signal.onabort = finish;
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
fetchEventSource(chatPath, {
|
fetchEventSource(chatPath, {
|
||||||
|
fetch: fetch as any,
|
||||||
...chatPayload,
|
...chatPayload,
|
||||||
async onopen(res) {
|
async onopen(res) {
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
|
@ -184,7 +188,7 @@ export class DoubaoApi implements LLMApi {
|
||||||
"[ByteDance] request response content type: ",
|
"[ByteDance] request response content type: ",
|
||||||
contentType,
|
contentType,
|
||||||
);
|
);
|
||||||
|
responseRes = res;
|
||||||
if (contentType?.startsWith("text/plain")) {
|
if (contentType?.startsWith("text/plain")) {
|
||||||
responseText = await res.clone().text();
|
responseText = await res.clone().text();
|
||||||
return finish();
|
return finish();
|
||||||
|
@ -250,7 +254,7 @@ export class DoubaoApi implements LLMApi {
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
const message = this.extractMessage(resJson);
|
const message = this.extractMessage(resJson);
|
||||||
options.onFinish(message);
|
options.onFinish(message, res);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("[Request] failed to make a chat request", e);
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
|
|
@ -0,0 +1,210 @@
|
||||||
|
"use client";
|
||||||
|
import {
|
||||||
|
ApiPath,
|
||||||
|
CHATGLM_BASE_URL,
|
||||||
|
ChatGLM,
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import {
|
||||||
|
useAccessStore,
|
||||||
|
useAppConfig,
|
||||||
|
useChatStore,
|
||||||
|
ChatMessageTool,
|
||||||
|
usePluginStore,
|
||||||
|
} from "@/app/store";
|
||||||
|
import { stream } from "@/app/utils/chat";
|
||||||
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
|
getHeaders,
|
||||||
|
LLMApi,
|
||||||
|
LLMModel,
|
||||||
|
SpeechOptions,
|
||||||
|
TranscriptionOptions,
|
||||||
|
} from "../api";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { RequestPayload } from "./openai";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
|
export class ChatGLMApi implements LLMApi {
|
||||||
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
private disableListModels = true;
|
||||||
|
|
||||||
|
path(path: string): string {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
let baseUrl = "";
|
||||||
|
|
||||||
|
if (accessStore.useCustomConfig) {
|
||||||
|
baseUrl = accessStore.chatglmUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.length === 0) {
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
const apiPath = ApiPath.ChatGLM;
|
||||||
|
baseUrl = isApp ? CHATGLM_BASE_URL : apiPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||||
|
}
|
||||||
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) {
|
||||||
|
baseUrl = "https://" + baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||||
|
|
||||||
|
return [baseUrl, path].join("/");
|
||||||
|
}
|
||||||
|
|
||||||
|
extractMessage(res: any) {
|
||||||
|
return res.choices?.at(0)?.message?.content ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
async chat(options: ChatOptions) {
|
||||||
|
const messages: ChatOptions["messages"] = [];
|
||||||
|
for (const v of options.messages) {
|
||||||
|
const content = getMessageTextContent(v);
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
}
|
||||||
|
|
||||||
|
const modelConfig = {
|
||||||
|
...useAppConfig.getState().modelConfig,
|
||||||
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
|
...{
|
||||||
|
model: options.config.model,
|
||||||
|
providerName: options.config.providerName,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestPayload: RequestPayload = {
|
||||||
|
messages,
|
||||||
|
stream: options.config.stream,
|
||||||
|
model: modelConfig.model,
|
||||||
|
temperature: modelConfig.temperature,
|
||||||
|
presence_penalty: modelConfig.presence_penalty,
|
||||||
|
frequency_penalty: modelConfig.frequency_penalty,
|
||||||
|
top_p: modelConfig.top_p,
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("[Request] glm payload: ", requestPayload);
|
||||||
|
|
||||||
|
const shouldStream = !!options.config.stream;
|
||||||
|
const controller = new AbortController();
|
||||||
|
options.onController?.(controller);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const chatPath = this.path(ChatGLM.ChatPath);
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: getHeaders(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// make a fetch request
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldStream) {
|
||||||
|
const [tools, funcs] = [[], {}];
|
||||||
|
// const [tools, funcs] = usePluginStore
|
||||||
|
// .getState()
|
||||||
|
// .getAsTools(
|
||||||
|
// useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
|
// );
|
||||||
|
return stream(
|
||||||
|
chatPath,
|
||||||
|
requestPayload,
|
||||||
|
getHeaders(),
|
||||||
|
tools as any,
|
||||||
|
funcs,
|
||||||
|
controller,
|
||||||
|
// parseSSE
|
||||||
|
(text: string, runTools: ChatMessageTool[]) => {
|
||||||
|
// console.log("parseSSE", text, runTools);
|
||||||
|
const json = JSON.parse(text);
|
||||||
|
const choices = json.choices as Array<{
|
||||||
|
delta: {
|
||||||
|
content: string;
|
||||||
|
tool_calls: ChatMessageTool[];
|
||||||
|
};
|
||||||
|
}>;
|
||||||
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||||
|
if (tool_calls?.length > 0) {
|
||||||
|
const index = tool_calls[0]?.index;
|
||||||
|
const id = tool_calls[0]?.id;
|
||||||
|
const args = tool_calls[0]?.function?.arguments;
|
||||||
|
if (id) {
|
||||||
|
runTools.push({
|
||||||
|
id,
|
||||||
|
type: tool_calls[0]?.type,
|
||||||
|
function: {
|
||||||
|
name: tool_calls[0]?.function?.name as string,
|
||||||
|
arguments: args,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// @ts-ignore
|
||||||
|
runTools[index]["function"]["arguments"] += args;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return choices[0]?.delta?.content;
|
||||||
|
},
|
||||||
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
|
(
|
||||||
|
requestPayload: RequestPayload,
|
||||||
|
toolCallMessage: any,
|
||||||
|
toolCallResult: any[],
|
||||||
|
) => {
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.splice(
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.length,
|
||||||
|
0,
|
||||||
|
toolCallMessage,
|
||||||
|
...toolCallResult,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const res = await fetch(chatPath, chatPayload);
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
|
const resJson = await res.json();
|
||||||
|
const message = this.extractMessage(resJson);
|
||||||
|
options.onFinish(message, res);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async usage() {
|
||||||
|
return {
|
||||||
|
used: 0,
|
||||||
|
total: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async models(): Promise<LLMModel[]> {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
|
@ -10,27 +10,28 @@ import {
|
||||||
SpeechOptions,
|
SpeechOptions,
|
||||||
TranscriptionOptions,
|
TranscriptionOptions,
|
||||||
} from "../api";
|
} from "../api";
|
||||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
|
||||||
import { getClientConfig } from "@/app/config/client";
|
|
||||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
|
||||||
import Locale from "../../locales";
|
|
||||||
import {
|
import {
|
||||||
EventStreamContentType,
|
useAccessStore,
|
||||||
fetchEventSource,
|
useAppConfig,
|
||||||
} from "@fortaine/fetch-event-source";
|
useChatStore,
|
||||||
import { prettyObject } from "@/app/utils/format";
|
usePluginStore,
|
||||||
|
ChatMessageTool,
|
||||||
|
} from "@/app/store";
|
||||||
|
import { stream } from "@/app/utils/chat";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { GEMINI_BASE_URL } from "@/app/constant";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
getMessageTextContent,
|
getMessageTextContent,
|
||||||
getMessageImages,
|
getMessageImages,
|
||||||
isVisionModel,
|
isVisionModel,
|
||||||
} from "@/app/utils";
|
} from "@/app/utils";
|
||||||
import { preProcessImageContent } from "@/app/utils/chat";
|
import { preProcessImageContent } from "@/app/utils/chat";
|
||||||
import options from "cheerio/lib/options";
|
import { nanoid } from "nanoid";
|
||||||
|
import { RequestPayload } from "./openai";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
export class GeminiProApi implements LLMApi {
|
export class GeminiProApi implements LLMApi {
|
||||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
|
||||||
throw new Error("Method not implemented.");
|
|
||||||
}
|
|
||||||
transcription(options: TranscriptionOptions): Promise<string> {
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
throw new Error("Method not implemented.");
|
throw new Error("Method not implemented.");
|
||||||
}
|
}
|
||||||
|
@ -48,11 +49,9 @@ export class GeminiProApi implements LLMApi {
|
||||||
baseUrl = accessStore.googleUrl;
|
baseUrl = accessStore.googleUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
if (baseUrl.length === 0) {
|
if (baseUrl.length === 0) {
|
||||||
const isApp = !!getClientConfig()?.isApp;
|
baseUrl = isApp ? GEMINI_BASE_URL : ApiPath.Google;
|
||||||
baseUrl = isApp
|
|
||||||
? DEFAULT_API_HOST + `/api/proxy/google?key=${accessStore.googleApiKey}`
|
|
||||||
: ApiPath.Google;
|
|
||||||
}
|
}
|
||||||
if (baseUrl.endsWith("/")) {
|
if (baseUrl.endsWith("/")) {
|
||||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||||
|
@ -77,6 +76,10 @@ export class GeminiProApi implements LLMApi {
|
||||||
""
|
""
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
async chat(options: ChatOptions): Promise<void> {
|
async chat(options: ChatOptions): Promise<void> {
|
||||||
const apiClient = this;
|
const apiClient = this;
|
||||||
let multimodal = false;
|
let multimodal = false;
|
||||||
|
@ -191,120 +194,89 @@ export class GeminiProApi implements LLMApi {
|
||||||
);
|
);
|
||||||
|
|
||||||
if (shouldStream) {
|
if (shouldStream) {
|
||||||
let responseText = "";
|
const [tools, funcs] = [[], {}];
|
||||||
let remainText = "";
|
// const [tools, funcs] = usePluginStore
|
||||||
let finished = false;
|
// .getState()
|
||||||
|
// .getAsTools(
|
||||||
|
// useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
|
// );
|
||||||
|
return stream(
|
||||||
|
chatPath,
|
||||||
|
requestPayload,
|
||||||
|
getHeaders(),
|
||||||
|
// @ts-ignore
|
||||||
|
tools.length > 0
|
||||||
|
? // @ts-ignore
|
||||||
|
[{ functionDeclarations: tools.map((tool) => tool.function) }]
|
||||||
|
: [],
|
||||||
|
funcs,
|
||||||
|
controller,
|
||||||
|
// parseSSE
|
||||||
|
(text: string, runTools: ChatMessageTool[]) => {
|
||||||
|
// console.log("parseSSE", text, runTools);
|
||||||
|
const chunkJson = JSON.parse(text);
|
||||||
|
|
||||||
const finish = () => {
|
const functionCall = chunkJson?.candidates
|
||||||
if (!finished) {
|
?.at(0)
|
||||||
finished = true;
|
?.content.parts.at(0)?.functionCall;
|
||||||
options.onFinish(responseText + remainText);
|
if (functionCall) {
|
||||||
}
|
const { name, args } = functionCall;
|
||||||
};
|
runTools.push({
|
||||||
|
id: nanoid(),
|
||||||
// animate response to make it looks smooth
|
type: "function",
|
||||||
function animateResponseText() {
|
function: {
|
||||||
if (finished || controller.signal.aborted) {
|
name,
|
||||||
responseText += remainText;
|
arguments: JSON.stringify(args), // utils.chat call function, using JSON.parse
|
||||||
finish();
|
},
|
||||||
return;
|
});
|
||||||
}
|
}
|
||||||
|
return chunkJson?.candidates?.at(0)?.content.parts.at(0)?.text;
|
||||||
if (remainText.length > 0) {
|
},
|
||||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
const fetchText = remainText.slice(0, fetchCount);
|
(
|
||||||
responseText += fetchText;
|
requestPayload: RequestPayload,
|
||||||
remainText = remainText.slice(fetchCount);
|
toolCallMessage: any,
|
||||||
options.onUpdate?.(responseText, fetchText);
|
toolCallResult: any[],
|
||||||
}
|
) => {
|
||||||
|
// @ts-ignore
|
||||||
requestAnimationFrame(animateResponseText);
|
requestPayload?.contents?.splice(
|
||||||
}
|
// @ts-ignore
|
||||||
|
requestPayload?.contents?.length,
|
||||||
// start animaion
|
0,
|
||||||
animateResponseText();
|
{
|
||||||
|
role: "model",
|
||||||
controller.signal.onabort = finish;
|
parts: toolCallMessage.tool_calls.map(
|
||||||
|
(tool: ChatMessageTool) => ({
|
||||||
fetchEventSource(chatPath, {
|
functionCall: {
|
||||||
...chatPayload,
|
name: tool?.function?.name,
|
||||||
async onopen(res) {
|
args: JSON.parse(tool?.function?.arguments as string),
|
||||||
clearTimeout(requestTimeoutId);
|
},
|
||||||
const contentType = res.headers.get("content-type");
|
}),
|
||||||
console.log(
|
),
|
||||||
"[Gemini] request response content type: ",
|
},
|
||||||
contentType,
|
// @ts-ignore
|
||||||
|
...toolCallResult.map((result) => ({
|
||||||
|
role: "function",
|
||||||
|
parts: [
|
||||||
|
{
|
||||||
|
functionResponse: {
|
||||||
|
name: result.name,
|
||||||
|
response: {
|
||||||
|
name: result.name,
|
||||||
|
content: result.content, // TODO just text content...
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (contentType?.startsWith("text/plain")) {
|
|
||||||
responseText = await res.clone().text();
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
!res.ok ||
|
|
||||||
!res.headers
|
|
||||||
.get("content-type")
|
|
||||||
?.startsWith(EventStreamContentType) ||
|
|
||||||
res.status !== 200
|
|
||||||
) {
|
|
||||||
const responseTexts = [responseText];
|
|
||||||
let extraInfo = await res.clone().text();
|
|
||||||
try {
|
|
||||||
const resJson = await res.clone().json();
|
|
||||||
extraInfo = prettyObject(resJson);
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
if (res.status === 401) {
|
|
||||||
responseTexts.push(Locale.Error.Unauthorized);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (extraInfo) {
|
|
||||||
responseTexts.push(extraInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
responseText = responseTexts.join("\n\n");
|
|
||||||
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
onmessage(msg) {
|
options,
|
||||||
if (msg.data === "[DONE]" || finished) {
|
);
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
const text = msg.data;
|
|
||||||
try {
|
|
||||||
const json = JSON.parse(text);
|
|
||||||
const delta = apiClient.extractMessage(json);
|
|
||||||
|
|
||||||
if (delta) {
|
|
||||||
remainText += delta;
|
|
||||||
}
|
|
||||||
|
|
||||||
const blockReason = json?.promptFeedback?.blockReason;
|
|
||||||
if (blockReason) {
|
|
||||||
// being blocked
|
|
||||||
console.log(`[Google] [Safety Ratings] result:`, blockReason);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error("[Request] parse error", text, msg);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onclose() {
|
|
||||||
finish();
|
|
||||||
},
|
|
||||||
onerror(e) {
|
|
||||||
options.onError?.(e);
|
|
||||||
throw e;
|
|
||||||
},
|
|
||||||
openWhenHidden: true,
|
|
||||||
});
|
|
||||||
} else {
|
} else {
|
||||||
const res = await fetch(chatPath, chatPayload);
|
const res = await fetch(chatPath, chatPayload);
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
|
|
||||||
if (resJson?.promptFeedback?.blockReason) {
|
if (resJson?.promptFeedback?.blockReason) {
|
||||||
// being blocked
|
// being blocked
|
||||||
options.onError?.(
|
options.onError?.(
|
||||||
|
@ -315,7 +287,7 @@ export class GeminiProApi implements LLMApi {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const message = apiClient.extractMessage(resJson);
|
const message = apiClient.extractMessage(resJson);
|
||||||
options.onFinish(message);
|
options.onFinish(message, res);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("[Request] failed to make a chat request", e);
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
|
|
@ -0,0 +1,265 @@
|
||||||
|
"use client";
|
||||||
|
import {
|
||||||
|
ApiPath,
|
||||||
|
IFLYTEK_BASE_URL,
|
||||||
|
Iflytek,
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||||
|
|
||||||
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
|
getHeaders,
|
||||||
|
LLMApi,
|
||||||
|
LLMModel,
|
||||||
|
SpeechOptions,
|
||||||
|
TranscriptionOptions,
|
||||||
|
} from "../api";
|
||||||
|
import Locale from "../../locales";
|
||||||
|
import {
|
||||||
|
EventStreamContentType,
|
||||||
|
fetchEventSource,
|
||||||
|
} from "@fortaine/fetch-event-source";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
|
import { RequestPayload } from "./openai";
|
||||||
|
|
||||||
|
export class SparkApi implements LLMApi {
|
||||||
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
private disableListModels = true;
|
||||||
|
|
||||||
|
path(path: string): string {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
let baseUrl = "";
|
||||||
|
|
||||||
|
if (accessStore.useCustomConfig) {
|
||||||
|
baseUrl = accessStore.iflytekUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.length === 0) {
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
const apiPath = ApiPath.Iflytek;
|
||||||
|
baseUrl = isApp ? IFLYTEK_BASE_URL : apiPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||||
|
}
|
||||||
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Iflytek)) {
|
||||||
|
baseUrl = "https://" + baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||||
|
|
||||||
|
return [baseUrl, path].join("/");
|
||||||
|
}
|
||||||
|
|
||||||
|
extractMessage(res: any) {
|
||||||
|
return res.choices?.at(0)?.message?.content ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
async chat(options: ChatOptions) {
|
||||||
|
const messages: ChatOptions["messages"] = [];
|
||||||
|
for (const v of options.messages) {
|
||||||
|
const content = getMessageTextContent(v);
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
}
|
||||||
|
|
||||||
|
const modelConfig = {
|
||||||
|
...useAppConfig.getState().modelConfig,
|
||||||
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
|
...{
|
||||||
|
model: options.config.model,
|
||||||
|
providerName: options.config.providerName,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestPayload: RequestPayload = {
|
||||||
|
messages,
|
||||||
|
stream: options.config.stream,
|
||||||
|
model: modelConfig.model,
|
||||||
|
temperature: modelConfig.temperature,
|
||||||
|
presence_penalty: modelConfig.presence_penalty,
|
||||||
|
frequency_penalty: modelConfig.frequency_penalty,
|
||||||
|
top_p: modelConfig.top_p,
|
||||||
|
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||||
|
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("[Request] Spark payload: ", requestPayload);
|
||||||
|
|
||||||
|
const shouldStream = !!options.config.stream;
|
||||||
|
const controller = new AbortController();
|
||||||
|
options.onController?.(controller);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const chatPath = this.path(Iflytek.ChatPath);
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: getHeaders(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Make a fetch request
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldStream) {
|
||||||
|
let responseText = "";
|
||||||
|
let remainText = "";
|
||||||
|
let finished = false;
|
||||||
|
let responseRes: Response;
|
||||||
|
|
||||||
|
// Animate response text to make it look smooth
|
||||||
|
function animateResponseText() {
|
||||||
|
if (finished || controller.signal.aborted) {
|
||||||
|
responseText += remainText;
|
||||||
|
console.log("[Response Animation] finished");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (remainText.length > 0) {
|
||||||
|
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||||
|
const fetchText = remainText.slice(0, fetchCount);
|
||||||
|
responseText += fetchText;
|
||||||
|
remainText = remainText.slice(fetchCount);
|
||||||
|
options.onUpdate?.(responseText, fetchText);
|
||||||
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(animateResponseText);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start animation
|
||||||
|
animateResponseText();
|
||||||
|
|
||||||
|
const finish = () => {
|
||||||
|
if (!finished) {
|
||||||
|
finished = true;
|
||||||
|
options.onFinish(responseText + remainText, responseRes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
|
fetchEventSource(chatPath, {
|
||||||
|
fetch: fetch as any,
|
||||||
|
...chatPayload,
|
||||||
|
async onopen(res) {
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
const contentType = res.headers.get("content-type");
|
||||||
|
console.log("[Spark] request response content type: ", contentType);
|
||||||
|
responseRes = res;
|
||||||
|
if (contentType?.startsWith("text/plain")) {
|
||||||
|
responseText = await res.clone().text();
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle different error scenarios
|
||||||
|
if (
|
||||||
|
!res.ok ||
|
||||||
|
!res.headers
|
||||||
|
.get("content-type")
|
||||||
|
?.startsWith(EventStreamContentType) ||
|
||||||
|
res.status !== 200
|
||||||
|
) {
|
||||||
|
let extraInfo = await res.clone().text();
|
||||||
|
try {
|
||||||
|
const resJson = await res.clone().json();
|
||||||
|
extraInfo = prettyObject(resJson);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
if (res.status === 401) {
|
||||||
|
extraInfo = Locale.Error.Unauthorized;
|
||||||
|
}
|
||||||
|
|
||||||
|
options.onError?.(
|
||||||
|
new Error(
|
||||||
|
`Request failed with status ${res.status}: ${extraInfo}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onmessage(msg) {
|
||||||
|
if (msg.data === "[DONE]" || finished) {
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
const text = msg.data;
|
||||||
|
try {
|
||||||
|
const json = JSON.parse(text);
|
||||||
|
const choices = json.choices as Array<{
|
||||||
|
delta: { content: string };
|
||||||
|
}>;
|
||||||
|
const delta = choices[0]?.delta?.content;
|
||||||
|
|
||||||
|
if (delta) {
|
||||||
|
remainText += delta;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Request] parse error", text);
|
||||||
|
options.onError?.(new Error(`Failed to parse response: ${text}`));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onclose() {
|
||||||
|
finish();
|
||||||
|
},
|
||||||
|
onerror(e) {
|
||||||
|
options.onError?.(e);
|
||||||
|
throw e;
|
||||||
|
},
|
||||||
|
openWhenHidden: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const res = await fetch(chatPath, chatPayload);
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
const errorText = await res.text();
|
||||||
|
options.onError?.(
|
||||||
|
new Error(`Request failed with status ${res.status}: ${errorText}`),
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const resJson = await res.json();
|
||||||
|
const message = this.extractMessage(resJson);
|
||||||
|
options.onFinish(message, res);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async usage() {
|
||||||
|
return {
|
||||||
|
used: 0,
|
||||||
|
total: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async models(): Promise<LLMModel[]> {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,213 @@
|
||||||
|
"use client";
|
||||||
|
// azure and openai, using same models. so using same LLMApi.
|
||||||
|
import {
|
||||||
|
ApiPath,
|
||||||
|
MOONSHOT_BASE_URL,
|
||||||
|
Moonshot,
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import {
|
||||||
|
useAccessStore,
|
||||||
|
useAppConfig,
|
||||||
|
useChatStore,
|
||||||
|
ChatMessageTool,
|
||||||
|
usePluginStore,
|
||||||
|
} from "@/app/store";
|
||||||
|
import { stream } from "@/app/utils/chat";
|
||||||
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
|
getHeaders,
|
||||||
|
LLMApi,
|
||||||
|
LLMModel,
|
||||||
|
SpeechOptions,
|
||||||
|
TranscriptionOptions,
|
||||||
|
} from "../api";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { RequestPayload } from "./openai";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
|
export class MoonshotApi implements LLMApi {
|
||||||
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
private disableListModels = true;
|
||||||
|
|
||||||
|
path(path: string): string {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
let baseUrl = "";
|
||||||
|
|
||||||
|
if (accessStore.useCustomConfig) {
|
||||||
|
baseUrl = accessStore.moonshotUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.length === 0) {
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
const apiPath = ApiPath.Moonshot;
|
||||||
|
baseUrl = isApp ? MOONSHOT_BASE_URL : apiPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||||
|
}
|
||||||
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Moonshot)) {
|
||||||
|
baseUrl = "https://" + baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||||
|
|
||||||
|
return [baseUrl, path].join("/");
|
||||||
|
}
|
||||||
|
|
||||||
|
extractMessage(res: any) {
|
||||||
|
return res.choices?.at(0)?.message?.content ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
async chat(options: ChatOptions) {
|
||||||
|
const messages: ChatOptions["messages"] = [];
|
||||||
|
for (const v of options.messages) {
|
||||||
|
const content = getMessageTextContent(v);
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
}
|
||||||
|
|
||||||
|
const modelConfig = {
|
||||||
|
...useAppConfig.getState().modelConfig,
|
||||||
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
|
...{
|
||||||
|
model: options.config.model,
|
||||||
|
providerName: options.config.providerName,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestPayload: RequestPayload = {
|
||||||
|
messages,
|
||||||
|
stream: options.config.stream,
|
||||||
|
model: modelConfig.model,
|
||||||
|
temperature: modelConfig.temperature,
|
||||||
|
presence_penalty: modelConfig.presence_penalty,
|
||||||
|
frequency_penalty: modelConfig.frequency_penalty,
|
||||||
|
top_p: modelConfig.top_p,
|
||||||
|
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||||
|
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("[Request] openai payload: ", requestPayload);
|
||||||
|
|
||||||
|
const shouldStream = !!options.config.stream;
|
||||||
|
const controller = new AbortController();
|
||||||
|
options.onController?.(controller);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const chatPath = this.path(Moonshot.ChatPath);
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: getHeaders(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// make a fetch request
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldStream) {
|
||||||
|
const [tools, funcs] = [[], {}];
|
||||||
|
// const [tools, funcs] = usePluginStore
|
||||||
|
// .getState()
|
||||||
|
// .getAsTools(
|
||||||
|
// useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
|
// );
|
||||||
|
return stream(
|
||||||
|
chatPath,
|
||||||
|
requestPayload,
|
||||||
|
getHeaders(),
|
||||||
|
tools as any,
|
||||||
|
funcs,
|
||||||
|
controller,
|
||||||
|
// parseSSE
|
||||||
|
(text: string, runTools: ChatMessageTool[]) => {
|
||||||
|
// console.log("parseSSE", text, runTools);
|
||||||
|
const json = JSON.parse(text);
|
||||||
|
const choices = json.choices as Array<{
|
||||||
|
delta: {
|
||||||
|
content: string;
|
||||||
|
tool_calls: ChatMessageTool[];
|
||||||
|
};
|
||||||
|
}>;
|
||||||
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||||
|
if (tool_calls?.length > 0) {
|
||||||
|
const index = tool_calls[0]?.index;
|
||||||
|
const id = tool_calls[0]?.id;
|
||||||
|
const args = tool_calls[0]?.function?.arguments;
|
||||||
|
if (id) {
|
||||||
|
runTools.push({
|
||||||
|
id,
|
||||||
|
type: tool_calls[0]?.type,
|
||||||
|
function: {
|
||||||
|
name: tool_calls[0]?.function?.name as string,
|
||||||
|
arguments: args,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// @ts-ignore
|
||||||
|
runTools[index]["function"]["arguments"] += args;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return choices[0]?.delta?.content;
|
||||||
|
},
|
||||||
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
|
(
|
||||||
|
requestPayload: RequestPayload,
|
||||||
|
toolCallMessage: any,
|
||||||
|
toolCallResult: any[],
|
||||||
|
) => {
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.splice(
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.length,
|
||||||
|
0,
|
||||||
|
toolCallMessage,
|
||||||
|
...toolCallResult,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const res = await fetch(chatPath, chatPayload);
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
|
const resJson = await res.json();
|
||||||
|
const message = this.extractMessage(resJson);
|
||||||
|
options.onFinish(message, res);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async usage() {
|
||||||
|
return {
|
||||||
|
used: 0,
|
||||||
|
total: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async models(): Promise<LLMModel[]> {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,12 +2,12 @@
|
||||||
// azure and openai, using same models. so using same LLMApi.
|
// azure and openai, using same models. so using same LLMApi.
|
||||||
import {
|
import {
|
||||||
ApiPath,
|
ApiPath,
|
||||||
DEFAULT_API_HOST,
|
|
||||||
DEFAULT_MODELS,
|
DEFAULT_MODELS,
|
||||||
OpenaiPath,
|
OpenaiPath,
|
||||||
Azure,
|
Azure,
|
||||||
REQUEST_TIMEOUT_MS,
|
REQUEST_TIMEOUT_MS,
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
|
OPENAI_BASE_URL,
|
||||||
} from "@/app/constant";
|
} from "@/app/constant";
|
||||||
import {
|
import {
|
||||||
ChatMessageTool,
|
ChatMessageTool,
|
||||||
|
@ -101,7 +101,7 @@ export class ChatGPTApi implements LLMApi {
|
||||||
if (baseUrl.length === 0) {
|
if (baseUrl.length === 0) {
|
||||||
const isApp = !!getClientConfig()?.isApp;
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
|
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
|
||||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
baseUrl = isApp ? OPENAI_BASE_URL : apiPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (baseUrl.endsWith("/")) {
|
if (baseUrl.endsWith("/")) {
|
||||||
|
@ -383,7 +383,7 @@ export class ChatGPTApi implements LLMApi {
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
const message = await this.extractMessage(resJson);
|
const message = await this.extractMessage(resJson);
|
||||||
options.onFinish(message);
|
options.onFinish(message, res);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("[Request] failed to make a chat request", e);
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
@ -663,20 +663,26 @@ export class ChatGPTApi implements LLMApi {
|
||||||
});
|
});
|
||||||
|
|
||||||
const resJson = (await res.json()) as OpenAIListModelResponse;
|
const resJson = (await res.json()) as OpenAIListModelResponse;
|
||||||
const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
|
const chatModels = resJson.data?.filter(
|
||||||
|
(m) => m.id.startsWith("gpt-") || m.id.startsWith("chatgpt-"),
|
||||||
|
);
|
||||||
console.log("[Models]", chatModels);
|
console.log("[Models]", chatModels);
|
||||||
|
|
||||||
if (!chatModels) {
|
if (!chatModels) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//由于目前 OpenAI 的 disableListModels 默认为 true,所以当前实际不会运行到这场
|
||||||
|
let seq = 1000; //同 Constant.ts 中的排序保持一致
|
||||||
return chatModels.map((m) => ({
|
return chatModels.map((m) => ({
|
||||||
name: m.id,
|
name: m.id,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "openai",
|
id: "openai",
|
||||||
providerName: "OpenAI",
|
providerName: "OpenAI",
|
||||||
providerType: "openai",
|
providerType: "openai",
|
||||||
|
sorted: 1,
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,286 @@
|
||||||
|
"use client";
|
||||||
|
import { ApiPath, TENCENT_BASE_URL, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||||
|
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||||
|
|
||||||
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
|
getHeaders,
|
||||||
|
LLMApi,
|
||||||
|
LLMModel,
|
||||||
|
MultimodalContent,
|
||||||
|
SpeechOptions,
|
||||||
|
TranscriptionOptions,
|
||||||
|
} from "../api";
|
||||||
|
import Locale from "../../locales";
|
||||||
|
import {
|
||||||
|
EventStreamContentType,
|
||||||
|
fetchEventSource,
|
||||||
|
} from "@fortaine/fetch-event-source";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||||
|
import mapKeys from "lodash-es/mapKeys";
|
||||||
|
import mapValues from "lodash-es/mapValues";
|
||||||
|
import isArray from "lodash-es/isArray";
|
||||||
|
import isObject from "lodash-es/isObject";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
|
export interface OpenAIListModelResponse {
|
||||||
|
object: string;
|
||||||
|
data: Array<{
|
||||||
|
id: string;
|
||||||
|
object: string;
|
||||||
|
root: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RequestPayload {
|
||||||
|
Messages: {
|
||||||
|
Role: "system" | "user" | "assistant";
|
||||||
|
Content: string | MultimodalContent[];
|
||||||
|
}[];
|
||||||
|
Stream?: boolean;
|
||||||
|
Model: string;
|
||||||
|
Temperature: number;
|
||||||
|
TopP: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function capitalizeKeys(obj: any): any {
|
||||||
|
if (isArray(obj)) {
|
||||||
|
return obj.map(capitalizeKeys);
|
||||||
|
} else if (isObject(obj)) {
|
||||||
|
return mapValues(
|
||||||
|
mapKeys(obj, (value: any, key: string) =>
|
||||||
|
key.replace(/(^|_)(\w)/g, (m, $1, $2) => $2.toUpperCase()),
|
||||||
|
),
|
||||||
|
capitalizeKeys,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class HunyuanApi implements LLMApi {
|
||||||
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
path(): string {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
let baseUrl = "";
|
||||||
|
|
||||||
|
if (accessStore.useCustomConfig) {
|
||||||
|
baseUrl = accessStore.tencentUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.length === 0) {
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
baseUrl = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||||
|
}
|
||||||
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Tencent)) {
|
||||||
|
baseUrl = "https://" + baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy Endpoint] ", baseUrl);
|
||||||
|
return baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
extractMessage(res: any) {
|
||||||
|
return res.Choices?.at(0)?.Message?.Content ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
async chat(options: ChatOptions) {
|
||||||
|
const visionModel = isVisionModel(options.config.model);
|
||||||
|
const messages = options.messages.map((v, index) => ({
|
||||||
|
// "Messages 中 system 角色必须位于列表的最开始"
|
||||||
|
role: index !== 0 && v.role === "system" ? "user" : v.role,
|
||||||
|
content: visionModel ? v.content : getMessageTextContent(v),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const modelConfig = {
|
||||||
|
...useAppConfig.getState().modelConfig,
|
||||||
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
|
...{
|
||||||
|
model: options.config.model,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestPayload: RequestPayload = capitalizeKeys({
|
||||||
|
model: modelConfig.model,
|
||||||
|
messages,
|
||||||
|
temperature: modelConfig.temperature,
|
||||||
|
top_p: modelConfig.top_p,
|
||||||
|
stream: options.config.stream,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("[Request] Tencent payload: ", requestPayload);
|
||||||
|
|
||||||
|
const shouldStream = !!options.config.stream;
|
||||||
|
const controller = new AbortController();
|
||||||
|
options.onController?.(controller);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const chatPath = this.path();
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: getHeaders(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// make a fetch request
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldStream) {
|
||||||
|
let responseText = "";
|
||||||
|
let remainText = "";
|
||||||
|
let finished = false;
|
||||||
|
let responseRes: Response;
|
||||||
|
|
||||||
|
// animate response to make it looks smooth
|
||||||
|
function animateResponseText() {
|
||||||
|
if (finished || controller.signal.aborted) {
|
||||||
|
responseText += remainText;
|
||||||
|
console.log("[Response Animation] finished");
|
||||||
|
if (responseText?.length === 0) {
|
||||||
|
options.onError?.(new Error("empty response from server"));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (remainText.length > 0) {
|
||||||
|
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||||
|
const fetchText = remainText.slice(0, fetchCount);
|
||||||
|
responseText += fetchText;
|
||||||
|
remainText = remainText.slice(fetchCount);
|
||||||
|
options.onUpdate?.(responseText, fetchText);
|
||||||
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(animateResponseText);
|
||||||
|
}
|
||||||
|
|
||||||
|
// start animaion
|
||||||
|
animateResponseText();
|
||||||
|
|
||||||
|
const finish = () => {
|
||||||
|
if (!finished) {
|
||||||
|
finished = true;
|
||||||
|
options.onFinish(responseText + remainText, responseRes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
|
fetchEventSource(chatPath, {
|
||||||
|
fetch: fetch as any,
|
||||||
|
...chatPayload,
|
||||||
|
async onopen(res) {
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
const contentType = res.headers.get("content-type");
|
||||||
|
console.log(
|
||||||
|
"[Tencent] request response content type: ",
|
||||||
|
contentType,
|
||||||
|
);
|
||||||
|
responseRes = res;
|
||||||
|
if (contentType?.startsWith("text/plain")) {
|
||||||
|
responseText = await res.clone().text();
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!res.ok ||
|
||||||
|
!res.headers
|
||||||
|
.get("content-type")
|
||||||
|
?.startsWith(EventStreamContentType) ||
|
||||||
|
res.status !== 200
|
||||||
|
) {
|
||||||
|
const responseTexts = [responseText];
|
||||||
|
let extraInfo = await res.clone().text();
|
||||||
|
try {
|
||||||
|
const resJson = await res.clone().json();
|
||||||
|
extraInfo = prettyObject(resJson);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
if (res.status === 401) {
|
||||||
|
responseTexts.push(Locale.Error.Unauthorized);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extraInfo) {
|
||||||
|
responseTexts.push(extraInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
responseText = responseTexts.join("\n\n");
|
||||||
|
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onmessage(msg) {
|
||||||
|
if (msg.data === "[DONE]" || finished) {
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
const text = msg.data;
|
||||||
|
try {
|
||||||
|
const json = JSON.parse(text);
|
||||||
|
const choices = json.Choices as Array<{
|
||||||
|
Delta: { Content: string };
|
||||||
|
}>;
|
||||||
|
const delta = choices[0]?.Delta?.Content;
|
||||||
|
if (delta) {
|
||||||
|
remainText += delta;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Request] parse error", text, msg);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onclose() {
|
||||||
|
finish();
|
||||||
|
},
|
||||||
|
onerror(e) {
|
||||||
|
options.onError?.(e);
|
||||||
|
throw e;
|
||||||
|
},
|
||||||
|
openWhenHidden: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const res = await fetch(chatPath, chatPayload);
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
|
const resJson = await res.json();
|
||||||
|
const message = this.extractMessage(resJson);
|
||||||
|
options.onFinish(message, res);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async usage() {
|
||||||
|
return {
|
||||||
|
used: 0,
|
||||||
|
total: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async models(): Promise<LLMModel[]> {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,206 @@
|
||||||
|
"use client";
|
||||||
|
// azure and openai, using same models. so using same LLMApi.
|
||||||
|
import { ApiPath, XAI_BASE_URL, XAI, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||||
|
import {
|
||||||
|
useAccessStore,
|
||||||
|
useAppConfig,
|
||||||
|
useChatStore,
|
||||||
|
ChatMessageTool,
|
||||||
|
usePluginStore,
|
||||||
|
} from "@/app/store";
|
||||||
|
import { stream } from "@/app/utils/chat";
|
||||||
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
|
getHeaders,
|
||||||
|
LLMApi,
|
||||||
|
LLMModel,
|
||||||
|
SpeechOptions,
|
||||||
|
TranscriptionOptions,
|
||||||
|
} from "../api";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { getMessageTextContent } from "@/app/utils";
|
||||||
|
import { RequestPayload } from "./openai";
|
||||||
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
|
export class XAIApi implements LLMApi {
|
||||||
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
private disableListModels = true;
|
||||||
|
|
||||||
|
path(path: string): string {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
let baseUrl = "";
|
||||||
|
|
||||||
|
if (accessStore.useCustomConfig) {
|
||||||
|
baseUrl = accessStore.xaiUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.length === 0) {
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
const apiPath = ApiPath.XAI;
|
||||||
|
baseUrl = isApp ? XAI_BASE_URL : apiPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||||
|
}
|
||||||
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.XAI)) {
|
||||||
|
baseUrl = "https://" + baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||||
|
|
||||||
|
return [baseUrl, path].join("/");
|
||||||
|
}
|
||||||
|
|
||||||
|
extractMessage(res: any) {
|
||||||
|
return res.choices?.at(0)?.message?.content ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
async chat(options: ChatOptions) {
|
||||||
|
const messages: ChatOptions["messages"] = [];
|
||||||
|
for (const v of options.messages) {
|
||||||
|
const content = getMessageTextContent(v);
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
}
|
||||||
|
|
||||||
|
const modelConfig = {
|
||||||
|
...useAppConfig.getState().modelConfig,
|
||||||
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
|
...{
|
||||||
|
model: options.config.model,
|
||||||
|
providerName: options.config.providerName,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestPayload: RequestPayload = {
|
||||||
|
messages,
|
||||||
|
stream: options.config.stream,
|
||||||
|
model: modelConfig.model,
|
||||||
|
temperature: modelConfig.temperature,
|
||||||
|
presence_penalty: modelConfig.presence_penalty,
|
||||||
|
frequency_penalty: modelConfig.frequency_penalty,
|
||||||
|
top_p: modelConfig.top_p,
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("[Request] xai payload: ", requestPayload);
|
||||||
|
|
||||||
|
const shouldStream = !!options.config.stream;
|
||||||
|
const controller = new AbortController();
|
||||||
|
options.onController?.(controller);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const chatPath = this.path(XAI.ChatPath);
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: getHeaders(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// make a fetch request
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (shouldStream) {
|
||||||
|
const [tools, funcs] = [[], {}];
|
||||||
|
// const [tools, funcs] = usePluginStore
|
||||||
|
// .getState()
|
||||||
|
// .getAsTools(
|
||||||
|
// useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
|
// );
|
||||||
|
return stream(
|
||||||
|
chatPath,
|
||||||
|
requestPayload,
|
||||||
|
getHeaders(),
|
||||||
|
tools as any,
|
||||||
|
funcs,
|
||||||
|
controller,
|
||||||
|
// parseSSE
|
||||||
|
(text: string, runTools: ChatMessageTool[]) => {
|
||||||
|
// console.log("parseSSE", text, runTools);
|
||||||
|
const json = JSON.parse(text);
|
||||||
|
const choices = json.choices as Array<{
|
||||||
|
delta: {
|
||||||
|
content: string;
|
||||||
|
tool_calls: ChatMessageTool[];
|
||||||
|
};
|
||||||
|
}>;
|
||||||
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||||
|
if (tool_calls?.length > 0) {
|
||||||
|
const index = tool_calls[0]?.index;
|
||||||
|
const id = tool_calls[0]?.id;
|
||||||
|
const args = tool_calls[0]?.function?.arguments;
|
||||||
|
if (id) {
|
||||||
|
runTools.push({
|
||||||
|
id,
|
||||||
|
type: tool_calls[0]?.type,
|
||||||
|
function: {
|
||||||
|
name: tool_calls[0]?.function?.name as string,
|
||||||
|
arguments: args,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// @ts-ignore
|
||||||
|
runTools[index]["function"]["arguments"] += args;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return choices[0]?.delta?.content;
|
||||||
|
},
|
||||||
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
|
(
|
||||||
|
requestPayload: RequestPayload,
|
||||||
|
toolCallMessage: any,
|
||||||
|
toolCallResult: any[],
|
||||||
|
) => {
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.splice(
|
||||||
|
// @ts-ignore
|
||||||
|
requestPayload?.messages?.length,
|
||||||
|
0,
|
||||||
|
toolCallMessage,
|
||||||
|
...toolCallResult,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const res = await fetch(chatPath, chatPayload);
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
|
const resJson = await res.json();
|
||||||
|
const message = this.extractMessage(resJson);
|
||||||
|
options.onFinish(message, res);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("[Request] failed to make a chat request", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async usage() {
|
||||||
|
return {
|
||||||
|
used: 0,
|
||||||
|
total: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async models(): Promise<LLMModel[]> {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +1,5 @@
|
||||||
/* eslint-disable @next/next/no-img-element */
|
/* eslint-disable @next/next/no-img-element */
|
||||||
import {
|
import { ChatMessage, useAppConfig, useChatStore } from "../store";
|
||||||
ChatMessage,
|
|
||||||
ModelType,
|
|
||||||
useAccessStore,
|
|
||||||
useAppConfig,
|
|
||||||
useChatStore,
|
|
||||||
} from "../store";
|
|
||||||
import Locale from "../locales";
|
import Locale from "../locales";
|
||||||
import styles from "./exporter.module.scss";
|
import styles from "./exporter.module.scss";
|
||||||
import {
|
import {
|
||||||
|
@ -524,18 +518,6 @@ export function ImagePreviewer(props: {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const markdownImageUrlCorsProcess = (markdownContent: string) => {
|
|
||||||
const updatedContent = markdownContent.replace(
|
|
||||||
/!\[.*?\]\((.*?)\)/g,
|
|
||||||
(match, url) => {
|
|
||||||
if (!url.startsWith("http")) return ``;
|
|
||||||
const updatedURL = `/api/cors?url=${encodeURIComponent(url)}`;
|
|
||||||
return ``;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return updatedContent;
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={styles["image-previewer"]}>
|
<div className={styles["image-previewer"]}>
|
||||||
<PreviewActions
|
<PreviewActions
|
||||||
|
@ -561,7 +543,7 @@ export function ImagePreviewer(props: {
|
||||||
<div>
|
<div>
|
||||||
<div className={styles["main-title"]}>NextChat</div>
|
<div className={styles["main-title"]}>NextChat</div>
|
||||||
<div className={styles["sub-title"]}>
|
<div className={styles["sub-title"]}>
|
||||||
github.com/Yidadaa/ChatGPT-Next-Web
|
github.com/ChatGPTNextWeb/ChatGPT-Next-Web
|
||||||
</div>
|
</div>
|
||||||
<div className={styles["icons"]}>
|
<div className={styles["icons"]}>
|
||||||
<ExportAvatar avatar={config.avatar} />
|
<ExportAvatar avatar={config.avatar} />
|
||||||
|
@ -603,6 +585,7 @@ export function ImagePreviewer(props: {
|
||||||
<Markdown
|
<Markdown
|
||||||
content={getMessageTextContent(m)}
|
content={getMessageTextContent(m)}
|
||||||
fontSize={config.fontSize}
|
fontSize={config.fontSize}
|
||||||
|
fontFamily={config.fontFamily}
|
||||||
defaultShow
|
defaultShow
|
||||||
/>
|
/>
|
||||||
{getMessageImages(m).length == 1 && (
|
{getMessageImages(m).length == 1 && (
|
||||||
|
|
|
@ -9,6 +9,7 @@ interface InputRangeProps {
|
||||||
min: string;
|
min: string;
|
||||||
max: string;
|
max: string;
|
||||||
step: string;
|
step: string;
|
||||||
|
aria: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function InputRange({
|
export function InputRange({
|
||||||
|
@ -19,11 +20,13 @@ export function InputRange({
|
||||||
min,
|
min,
|
||||||
max,
|
max,
|
||||||
step,
|
step,
|
||||||
|
aria,
|
||||||
}: InputRangeProps) {
|
}: InputRangeProps) {
|
||||||
return (
|
return (
|
||||||
<div className={styles["input-range"] + ` ${className ?? ""}`}>
|
<div className={styles["input-range"] + ` ${className ?? ""}`}>
|
||||||
{title || value}
|
{title || value}
|
||||||
<input
|
<input
|
||||||
|
aria-label={aria}
|
||||||
type="range"
|
type="range"
|
||||||
title={title}
|
title={title}
|
||||||
value={value}
|
value={value}
|
||||||
|
|
|
@ -48,14 +48,17 @@ import Locale, {
|
||||||
changeLang,
|
changeLang,
|
||||||
getLang,
|
getLang,
|
||||||
} from "../locales";
|
} from "../locales";
|
||||||
import { copyToClipboard } from "../utils";
|
import { copyToClipboard, clientUpdate, semverCompare } from "../utils";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import {
|
import {
|
||||||
Anthropic,
|
Anthropic,
|
||||||
Azure,
|
Azure,
|
||||||
Baidu,
|
Baidu,
|
||||||
|
Tencent,
|
||||||
ByteDance,
|
ByteDance,
|
||||||
Alibaba,
|
Alibaba,
|
||||||
|
Moonshot,
|
||||||
|
XAI,
|
||||||
Google,
|
Google,
|
||||||
GoogleSafetySettingsThreshold,
|
GoogleSafetySettingsThreshold,
|
||||||
OPENAI_BASE_URL,
|
OPENAI_BASE_URL,
|
||||||
|
@ -65,6 +68,9 @@ import {
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
SlotID,
|
SlotID,
|
||||||
UPDATE_URL,
|
UPDATE_URL,
|
||||||
|
Stability,
|
||||||
|
Iflytek,
|
||||||
|
ChatGLM,
|
||||||
} from "../constant";
|
} from "../constant";
|
||||||
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
|
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
|
||||||
import { ErrorBoundary } from "./error";
|
import { ErrorBoundary } from "./error";
|
||||||
|
@ -245,6 +251,7 @@ function DangerItems() {
|
||||||
subTitle={Locale.Settings.Danger.Reset.SubTitle}
|
subTitle={Locale.Settings.Danger.Reset.SubTitle}
|
||||||
>
|
>
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.Settings.Danger.Reset.Title}
|
||||||
text={Locale.Settings.Danger.Reset.Action}
|
text={Locale.Settings.Danger.Reset.Action}
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
if (await showConfirm(Locale.Settings.Danger.Reset.Confirm)) {
|
if (await showConfirm(Locale.Settings.Danger.Reset.Confirm)) {
|
||||||
|
@ -259,6 +266,7 @@ function DangerItems() {
|
||||||
subTitle={Locale.Settings.Danger.Clear.SubTitle}
|
subTitle={Locale.Settings.Danger.Clear.SubTitle}
|
||||||
>
|
>
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.Settings.Danger.Clear.Title}
|
||||||
text={Locale.Settings.Danger.Clear.Action}
|
text={Locale.Settings.Danger.Clear.Action}
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
if (await showConfirm(Locale.Settings.Danger.Clear.Confirm)) {
|
if (await showConfirm(Locale.Settings.Danger.Clear.Confirm)) {
|
||||||
|
@ -512,6 +520,7 @@ function SyncItems() {
|
||||||
>
|
>
|
||||||
<div style={{ display: "flex" }}>
|
<div style={{ display: "flex" }}>
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.Settings.Sync.CloudState + Locale.UI.Config}
|
||||||
icon={<ConfigIcon />}
|
icon={<ConfigIcon />}
|
||||||
text={Locale.UI.Config}
|
text={Locale.UI.Config}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
|
@ -542,6 +551,7 @@ function SyncItems() {
|
||||||
>
|
>
|
||||||
<div style={{ display: "flex" }}>
|
<div style={{ display: "flex" }}>
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.Settings.Sync.LocalState + Locale.UI.Export}
|
||||||
icon={<UploadIcon />}
|
icon={<UploadIcon />}
|
||||||
text={Locale.UI.Export}
|
text={Locale.UI.Export}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
|
@ -549,6 +559,7 @@ function SyncItems() {
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.Settings.Sync.LocalState + Locale.UI.Import}
|
||||||
icon={<DownloadIcon />}
|
icon={<DownloadIcon />}
|
||||||
text={Locale.UI.Import}
|
text={Locale.UI.Import}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
|
@ -576,7 +587,7 @@ export function Settings() {
|
||||||
const [checkingUpdate, setCheckingUpdate] = useState(false);
|
const [checkingUpdate, setCheckingUpdate] = useState(false);
|
||||||
const currentVersion = updateStore.formatVersion(updateStore.version);
|
const currentVersion = updateStore.formatVersion(updateStore.version);
|
||||||
const remoteId = updateStore.formatVersion(updateStore.remoteVersion);
|
const remoteId = updateStore.formatVersion(updateStore.remoteVersion);
|
||||||
const hasNewVersion = currentVersion !== remoteId;
|
const hasNewVersion = semverCompare(currentVersion, remoteId) === -1;
|
||||||
const updateUrl = getClientConfig()?.isApp ? RELEASE_URL : UPDATE_URL;
|
const updateUrl = getClientConfig()?.isApp ? RELEASE_URL : UPDATE_URL;
|
||||||
|
|
||||||
function checkUpdate(force = false) {
|
function checkUpdate(force = false) {
|
||||||
|
@ -686,6 +697,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.CustomEndpoint.SubTitle}
|
subTitle={Locale.Settings.Access.CustomEndpoint.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.CustomEndpoint.Title}
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={accessStore.useCustomConfig}
|
checked={accessStore.useCustomConfig}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
|
@ -705,6 +717,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.OpenAI.Endpoint.SubTitle}
|
subTitle={Locale.Settings.Access.OpenAI.Endpoint.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.OpenAI.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.openaiUrl}
|
value={accessStore.openaiUrl}
|
||||||
placeholder={OPENAI_BASE_URL}
|
placeholder={OPENAI_BASE_URL}
|
||||||
|
@ -720,6 +733,8 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.OpenAI.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.OpenAI.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria={Locale.Settings.ShowPassword}
|
||||||
|
aria-label={Locale.Settings.Access.OpenAI.ApiKey.Title}
|
||||||
value={accessStore.openaiApiKey}
|
value={accessStore.openaiApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
|
||||||
|
@ -743,6 +758,7 @@ export function Settings() {
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Azure.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.azureUrl}
|
value={accessStore.azureUrl}
|
||||||
placeholder={Azure.ExampleEndpoint}
|
placeholder={Azure.ExampleEndpoint}
|
||||||
|
@ -758,6 +774,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Azure.ApiKey.Title}
|
||||||
value={accessStore.azureApiKey}
|
value={accessStore.azureApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.Azure.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.Azure.ApiKey.Placeholder}
|
||||||
|
@ -773,6 +790,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Azure.ApiVerion.SubTitle}
|
subTitle={Locale.Settings.Access.Azure.ApiVerion.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Azure.ApiVerion.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.azureApiVersion}
|
value={accessStore.azureApiVersion}
|
||||||
placeholder="2023-08-01-preview"
|
placeholder="2023-08-01-preview"
|
||||||
|
@ -797,6 +815,7 @@ export function Settings() {
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Google.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.googleUrl}
|
value={accessStore.googleUrl}
|
||||||
placeholder={Google.ExampleEndpoint}
|
placeholder={Google.ExampleEndpoint}
|
||||||
|
@ -812,6 +831,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Google.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.Google.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Google.ApiKey.Title}
|
||||||
value={accessStore.googleApiKey}
|
value={accessStore.googleApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
|
||||||
|
@ -827,6 +847,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Google.ApiVersion.SubTitle}
|
subTitle={Locale.Settings.Access.Google.ApiVersion.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Google.ApiVersion.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.googleApiVersion}
|
value={accessStore.googleApiVersion}
|
||||||
placeholder="2023-08-01-preview"
|
placeholder="2023-08-01-preview"
|
||||||
|
@ -842,6 +863,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Google.GoogleSafetySettings.SubTitle}
|
subTitle={Locale.Settings.Access.Google.GoogleSafetySettings.SubTitle}
|
||||||
>
|
>
|
||||||
<Select
|
<Select
|
||||||
|
aria-label={Locale.Settings.Access.Google.GoogleSafetySettings.Title}
|
||||||
value={accessStore.googleSafetySettings}
|
value={accessStore.googleSafetySettings}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
accessStore.update(
|
accessStore.update(
|
||||||
|
@ -872,6 +894,7 @@ export function Settings() {
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Anthropic.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.anthropicUrl}
|
value={accessStore.anthropicUrl}
|
||||||
placeholder={Anthropic.ExampleEndpoint}
|
placeholder={Anthropic.ExampleEndpoint}
|
||||||
|
@ -887,6 +910,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Anthropic.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.Anthropic.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Anthropic.ApiKey.Title}
|
||||||
value={accessStore.anthropicApiKey}
|
value={accessStore.anthropicApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.Anthropic.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.Anthropic.ApiKey.Placeholder}
|
||||||
|
@ -902,6 +926,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Anthropic.ApiVerion.SubTitle}
|
subTitle={Locale.Settings.Access.Anthropic.ApiVerion.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Anthropic.ApiVerion.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.anthropicApiVersion}
|
value={accessStore.anthropicApiVersion}
|
||||||
placeholder={Anthropic.Vision}
|
placeholder={Anthropic.Vision}
|
||||||
|
@ -923,6 +948,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Baidu.Endpoint.SubTitle}
|
subTitle={Locale.Settings.Access.Baidu.Endpoint.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Baidu.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.baiduUrl}
|
value={accessStore.baiduUrl}
|
||||||
placeholder={Baidu.ExampleEndpoint}
|
placeholder={Baidu.ExampleEndpoint}
|
||||||
|
@ -938,6 +964,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Baidu.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.Baidu.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Baidu.ApiKey.Title}
|
||||||
value={accessStore.baiduApiKey}
|
value={accessStore.baiduApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.Baidu.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.Baidu.ApiKey.Placeholder}
|
||||||
|
@ -953,6 +980,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Baidu.SecretKey.SubTitle}
|
subTitle={Locale.Settings.Access.Baidu.SecretKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Baidu.SecretKey.Title}
|
||||||
value={accessStore.baiduSecretKey}
|
value={accessStore.baiduSecretKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.Baidu.SecretKey.Placeholder}
|
placeholder={Locale.Settings.Access.Baidu.SecretKey.Placeholder}
|
||||||
|
@ -966,6 +994,60 @@ export function Settings() {
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const tencentConfigComponent = accessStore.provider ===
|
||||||
|
ServiceProvider.Tencent && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Tencent.Endpoint.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Tencent.Endpoint.SubTitle}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Tencent.Endpoint.Title}
|
||||||
|
type="text"
|
||||||
|
value={accessStore.tencentUrl}
|
||||||
|
placeholder={Tencent.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.tencentUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Tencent.ApiKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Tencent.ApiKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Tencent.ApiKey.Title}
|
||||||
|
value={accessStore.tencentSecretId}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.Tencent.ApiKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.tencentSecretId = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Tencent.SecretKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Tencent.SecretKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Tencent.SecretKey.Title}
|
||||||
|
value={accessStore.tencentSecretKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.Tencent.SecretKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.tencentSecretKey = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
|
||||||
const byteDanceConfigComponent = accessStore.provider ===
|
const byteDanceConfigComponent = accessStore.provider ===
|
||||||
ServiceProvider.ByteDance && (
|
ServiceProvider.ByteDance && (
|
||||||
<>
|
<>
|
||||||
|
@ -977,6 +1059,7 @@ export function Settings() {
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.ByteDance.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.bytedanceUrl}
|
value={accessStore.bytedanceUrl}
|
||||||
placeholder={ByteDance.ExampleEndpoint}
|
placeholder={ByteDance.ExampleEndpoint}
|
||||||
|
@ -992,6 +1075,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.ByteDance.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.ByteDance.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.ByteDance.ApiKey.Title}
|
||||||
value={accessStore.bytedanceApiKey}
|
value={accessStore.bytedanceApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.ByteDance.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.ByteDance.ApiKey.Placeholder}
|
||||||
|
@ -1016,6 +1100,7 @@ export function Settings() {
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Alibaba.Endpoint.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.alibabaUrl}
|
value={accessStore.alibabaUrl}
|
||||||
placeholder={Alibaba.ExampleEndpoint}
|
placeholder={Alibaba.ExampleEndpoint}
|
||||||
|
@ -1031,6 +1116,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Alibaba.ApiKey.SubTitle}
|
subTitle={Locale.Settings.Access.Alibaba.ApiKey.SubTitle}
|
||||||
>
|
>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Alibaba.ApiKey.Title}
|
||||||
value={accessStore.alibabaApiKey}
|
value={accessStore.alibabaApiKey}
|
||||||
type="text"
|
type="text"
|
||||||
placeholder={Locale.Settings.Access.Alibaba.ApiKey.Placeholder}
|
placeholder={Locale.Settings.Access.Alibaba.ApiKey.Placeholder}
|
||||||
|
@ -1044,6 +1130,225 @@ export function Settings() {
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const moonshotConfigComponent = accessStore.provider ===
|
||||||
|
ServiceProvider.Moonshot && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Moonshot.Endpoint.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.Moonshot.Endpoint.SubTitle +
|
||||||
|
Moonshot.ExampleEndpoint
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Moonshot.Endpoint.Title}
|
||||||
|
type="text"
|
||||||
|
value={accessStore.moonshotUrl}
|
||||||
|
placeholder={Moonshot.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.moonshotUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Moonshot.ApiKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Moonshot.ApiKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Moonshot.ApiKey.Title}
|
||||||
|
value={accessStore.moonshotApiKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.Moonshot.ApiKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.moonshotApiKey = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
|
||||||
|
const XAIConfigComponent = accessStore.provider === ServiceProvider.XAI && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.XAI.Endpoint.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.XAI.Endpoint.SubTitle + XAI.ExampleEndpoint
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.XAI.Endpoint.Title}
|
||||||
|
type="text"
|
||||||
|
value={accessStore.xaiUrl}
|
||||||
|
placeholder={XAI.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.xaiUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.XAI.ApiKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.XAI.ApiKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.XAI.ApiKey.Title}
|
||||||
|
value={accessStore.xaiApiKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.XAI.ApiKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.xaiApiKey = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
|
||||||
|
const chatglmConfigComponent = accessStore.provider ===
|
||||||
|
ServiceProvider.ChatGLM && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.ChatGLM.Endpoint.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.ChatGLM.Endpoint.SubTitle +
|
||||||
|
ChatGLM.ExampleEndpoint
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.ChatGLM.Endpoint.Title}
|
||||||
|
type="text"
|
||||||
|
value={accessStore.chatglmUrl}
|
||||||
|
placeholder={ChatGLM.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.chatglmUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.ChatGLM.ApiKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.ChatGLM.ApiKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.ChatGLM.ApiKey.Title}
|
||||||
|
value={accessStore.chatglmApiKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.ChatGLM.ApiKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.chatglmApiKey = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
|
||||||
|
const stabilityConfigComponent = accessStore.provider ===
|
||||||
|
ServiceProvider.Stability && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Stability.Endpoint.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.Stability.Endpoint.SubTitle +
|
||||||
|
Stability.ExampleEndpoint
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Stability.Endpoint.Title}
|
||||||
|
type="text"
|
||||||
|
value={accessStore.stabilityUrl}
|
||||||
|
placeholder={Stability.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.stabilityUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Stability.ApiKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Stability.ApiKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Stability.ApiKey.Title}
|
||||||
|
value={accessStore.stabilityApiKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.Stability.ApiKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.stabilityApiKey = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
const lflytekConfigComponent = accessStore.provider ===
|
||||||
|
ServiceProvider.Iflytek && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Iflytek.Endpoint.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.Iflytek.Endpoint.SubTitle +
|
||||||
|
Iflytek.ExampleEndpoint
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.Iflytek.Endpoint.Title}
|
||||||
|
type="text"
|
||||||
|
value={accessStore.iflytekUrl}
|
||||||
|
placeholder={Iflytek.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.iflytekUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Iflytek.ApiKey.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Iflytek.ApiKey.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Iflytek.ApiKey.Title}
|
||||||
|
value={accessStore.iflytekApiKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.Iflytek.ApiKey.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.iflytekApiKey = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Iflytek.ApiSecret.Title}
|
||||||
|
subTitle={Locale.Settings.Access.Iflytek.ApiSecret.SubTitle}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
aria-label={Locale.Settings.Access.Iflytek.ApiSecret.Title}
|
||||||
|
value={accessStore.iflytekApiSecret}
|
||||||
|
type="text"
|
||||||
|
placeholder={Locale.Settings.Access.Iflytek.ApiSecret.Placeholder}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) => (access.iflytekApiSecret = e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ErrorBoundary>
|
<ErrorBoundary>
|
||||||
<div className="window-header" data-tauri-drag-region>
|
<div className="window-header" data-tauri-drag-region>
|
||||||
|
@ -1060,6 +1365,7 @@ export function Settings() {
|
||||||
<div className="window-action-button"></div>
|
<div className="window-action-button"></div>
|
||||||
<div className="window-action-button">
|
<div className="window-action-button">
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.UI.Close}
|
||||||
icon={<CloseIcon />}
|
icon={<CloseIcon />}
|
||||||
onClick={() => navigate(Path.Home)}
|
onClick={() => navigate(Path.Home)}
|
||||||
bordered
|
bordered
|
||||||
|
@ -1083,6 +1389,8 @@ export function Settings() {
|
||||||
open={showEmojiPicker}
|
open={showEmojiPicker}
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
|
aria-label={Locale.Settings.Avatar}
|
||||||
|
tabIndex={0}
|
||||||
className={styles.avatar}
|
className={styles.avatar}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
setShowEmojiPicker(!showEmojiPicker);
|
setShowEmojiPicker(!showEmojiPicker);
|
||||||
|
@ -1106,9 +1414,17 @@ export function Settings() {
|
||||||
{checkingUpdate ? (
|
{checkingUpdate ? (
|
||||||
<LoadingIcon />
|
<LoadingIcon />
|
||||||
) : hasNewVersion ? (
|
) : hasNewVersion ? (
|
||||||
<Link href={updateUrl} target="_blank" className="link">
|
clientConfig?.isApp ? (
|
||||||
{Locale.Settings.Update.GoToUpdate}
|
<IconButton
|
||||||
</Link>
|
icon={<ResetIcon></ResetIcon>}
|
||||||
|
text={Locale.Settings.Update.GoToUpdate}
|
||||||
|
onClick={() => clientUpdate()}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<Link href={updateUrl} target="_blank" className="link">
|
||||||
|
{Locale.Settings.Update.GoToUpdate}
|
||||||
|
</Link>
|
||||||
|
)
|
||||||
) : (
|
) : (
|
||||||
<IconButton
|
<IconButton
|
||||||
icon={<ResetIcon></ResetIcon>}
|
icon={<ResetIcon></ResetIcon>}
|
||||||
|
@ -1120,6 +1436,7 @@ export function Settings() {
|
||||||
|
|
||||||
<ListItem title={Locale.Settings.SendKey}>
|
<ListItem title={Locale.Settings.SendKey}>
|
||||||
<Select
|
<Select
|
||||||
|
aria-label={Locale.Settings.SendKey}
|
||||||
value={config.submitKey}
|
value={config.submitKey}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
updateConfig(
|
updateConfig(
|
||||||
|
@ -1138,6 +1455,7 @@ export function Settings() {
|
||||||
|
|
||||||
<ListItem title={Locale.Settings.Theme}>
|
<ListItem title={Locale.Settings.Theme}>
|
||||||
<Select
|
<Select
|
||||||
|
aria-label={Locale.Settings.Theme}
|
||||||
value={config.theme}
|
value={config.theme}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
updateConfig(
|
updateConfig(
|
||||||
|
@ -1155,6 +1473,7 @@ export function Settings() {
|
||||||
|
|
||||||
<ListItem title={Locale.Settings.Lang.Name}>
|
<ListItem title={Locale.Settings.Lang.Name}>
|
||||||
<Select
|
<Select
|
||||||
|
aria-label={Locale.Settings.Lang.Name}
|
||||||
value={getLang()}
|
value={getLang()}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
changeLang(e.target.value as any);
|
changeLang(e.target.value as any);
|
||||||
|
@ -1173,6 +1492,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.FontSize.SubTitle}
|
subTitle={Locale.Settings.FontSize.SubTitle}
|
||||||
>
|
>
|
||||||
<InputRange
|
<InputRange
|
||||||
|
aria={Locale.Settings.FontSize.Title}
|
||||||
title={`${config.fontSize ?? 14}px`}
|
title={`${config.fontSize ?? 14}px`}
|
||||||
value={config.fontSize}
|
value={config.fontSize}
|
||||||
min="12"
|
min="12"
|
||||||
|
@ -1187,11 +1507,29 @@ export function Settings() {
|
||||||
></InputRange>
|
></InputRange>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
|
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.FontFamily.Title}
|
||||||
|
subTitle={Locale.Settings.FontFamily.SubTitle}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
aria-label={Locale.Settings.FontFamily.Title}
|
||||||
|
type="text"
|
||||||
|
value={config.fontFamily}
|
||||||
|
placeholder={Locale.Settings.FontFamily.Placeholder}
|
||||||
|
onChange={(e) =>
|
||||||
|
updateConfig(
|
||||||
|
(config) => (config.fontFamily = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
|
||||||
<ListItem
|
<ListItem
|
||||||
title={Locale.Settings.AutoGenerateTitle.Title}
|
title={Locale.Settings.AutoGenerateTitle.Title}
|
||||||
subTitle={Locale.Settings.AutoGenerateTitle.SubTitle}
|
subTitle={Locale.Settings.AutoGenerateTitle.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.AutoGenerateTitle.Title}
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
disabled={!!process.env.NEXT_PUBLIC_DISABLE_AUTOGENERATETITLE}
|
disabled={!!process.env.NEXT_PUBLIC_DISABLE_AUTOGENERATETITLE}
|
||||||
checked={
|
checked={
|
||||||
|
@ -1212,6 +1550,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.SendPreviewBubble.SubTitle}
|
subTitle={Locale.Settings.SendPreviewBubble.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.SendPreviewBubble.Title}
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={config.sendPreviewBubble}
|
checked={config.sendPreviewBubble}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
|
@ -1265,6 +1604,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Mask.Splash.SubTitle}
|
subTitle={Locale.Settings.Mask.Splash.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Mask.Splash.Title}
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={!config.dontShowMaskSplashScreen}
|
checked={!config.dontShowMaskSplashScreen}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
|
@ -1282,6 +1622,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Mask.Builtin.SubTitle}
|
subTitle={Locale.Settings.Mask.Builtin.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Mask.Builtin.Title}
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={config.hideBuiltinMasks}
|
checked={config.hideBuiltinMasks}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
|
@ -1300,6 +1641,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Prompt.Disable.SubTitle}
|
subTitle={Locale.Settings.Prompt.Disable.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Prompt.Disable.Title}
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={config.disablePromptHint}
|
checked={config.disablePromptHint}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
|
@ -1319,6 +1661,7 @@ export function Settings() {
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<IconButton
|
<IconButton
|
||||||
|
aria={Locale.Settings.Prompt.List + Locale.Settings.Prompt.Edit}
|
||||||
icon={<EditIcon />}
|
icon={<EditIcon />}
|
||||||
text={Locale.Settings.Prompt.Edit}
|
text={Locale.Settings.Prompt.Edit}
|
||||||
onClick={() => setShowPromptModal(true)}
|
onClick={() => setShowPromptModal(true)}
|
||||||
|
@ -1340,6 +1683,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.Provider.SubTitle}
|
subTitle={Locale.Settings.Access.Provider.SubTitle}
|
||||||
>
|
>
|
||||||
<Select
|
<Select
|
||||||
|
aria-label={Locale.Settings.Access.Provider.Title}
|
||||||
value={accessStore.provider}
|
value={accessStore.provider}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
accessStore.update(
|
accessStore.update(
|
||||||
|
@ -1364,6 +1708,12 @@ export function Settings() {
|
||||||
{baiduConfigComponent}
|
{baiduConfigComponent}
|
||||||
{byteDanceConfigComponent}
|
{byteDanceConfigComponent}
|
||||||
{alibabaConfigComponent}
|
{alibabaConfigComponent}
|
||||||
|
{tencentConfigComponent}
|
||||||
|
{moonshotConfigComponent}
|
||||||
|
{stabilityConfigComponent}
|
||||||
|
{lflytekConfigComponent}
|
||||||
|
{XAIConfigComponent}
|
||||||
|
{chatglmConfigComponent}
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
|
@ -1400,6 +1750,7 @@ export function Settings() {
|
||||||
subTitle={Locale.Settings.Access.CustomModel.SubTitle}
|
subTitle={Locale.Settings.Access.CustomModel.SubTitle}
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
|
aria-label={Locale.Settings.Access.CustomModel.Title}
|
||||||
type="text"
|
type="text"
|
||||||
value={config.customModels}
|
value={config.customModels}
|
||||||
placeholder="model1,model2,model3"
|
placeholder="model1,model2,model3"
|
||||||
|
|
|
@ -129,6 +129,7 @@ export const getServerSideConfig = () => {
|
||||||
const isMoonshot = !!process.env.MOONSHOT_API_KEY;
|
const isMoonshot = !!process.env.MOONSHOT_API_KEY;
|
||||||
const isIflytek = !!process.env.IFLYTEK_API_KEY;
|
const isIflytek = !!process.env.IFLYTEK_API_KEY;
|
||||||
const isXAI = !!process.env.XAI_API_KEY;
|
const isXAI = !!process.env.XAI_API_KEY;
|
||||||
|
const isChatGLM = !!process.env.CHATGLM_API_KEY;
|
||||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||||
|
@ -195,6 +196,10 @@ export const getServerSideConfig = () => {
|
||||||
xaiUrl: process.env.XAI_URL,
|
xaiUrl: process.env.XAI_URL,
|
||||||
xaiApiKey: getApiKey(process.env.XAI_API_KEY),
|
xaiApiKey: getApiKey(process.env.XAI_API_KEY),
|
||||||
|
|
||||||
|
isChatGLM,
|
||||||
|
chatglmUrl: process.env.CHATGLM_URL,
|
||||||
|
chatglmApiKey: getApiKey(process.env.CHATGLM_API_KEY),
|
||||||
|
|
||||||
cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID,
|
cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID,
|
||||||
cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID,
|
cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID,
|
||||||
cloudflareKVApiKey: getApiKey(process.env.CLOUDFLARE_KV_API_KEY),
|
cloudflareKVApiKey: getApiKey(process.env.CLOUDFLARE_KV_API_KEY),
|
||||||
|
|
171
app/constant.ts
171
app/constant.ts
|
@ -1,6 +1,7 @@
|
||||||
export const OWNER = "Hk-Gosuto";
|
export const OWNER = "Hk-Gosuto";
|
||||||
export const REPO = "ChatGPT-Next-Web-LangChain";
|
export const REPO = "ChatGPT-Next-Web-LangChain";
|
||||||
export const REPO_URL = `https://github.com/${OWNER}/${REPO}`;
|
export const REPO_URL = `https://github.com/${OWNER}/${REPO}`;
|
||||||
|
export const PLUGINS_REPO_URL = `https://github.com/${OWNER}/NextChat-Awesome-Plugins`;
|
||||||
export const ISSUE_URL = `https://github.com/${OWNER}/${REPO}/issues`;
|
export const ISSUE_URL = `https://github.com/${OWNER}/${REPO}/issues`;
|
||||||
export const UPDATE_URL = `${REPO_URL}#keep-updated`;
|
export const UPDATE_URL = `${REPO_URL}#keep-updated`;
|
||||||
export const RELEASE_URL = `${REPO_URL}/releases`;
|
export const RELEASE_URL = `${REPO_URL}/releases`;
|
||||||
|
@ -8,11 +9,13 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
|
||||||
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
|
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
|
||||||
export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
||||||
|
|
||||||
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
|
export const STABILITY_BASE_URL = "https://api.stability.ai";
|
||||||
|
|
||||||
export const OPENAI_BASE_URL = "https://api.openai.com";
|
export const OPENAI_BASE_URL = "https://api.openai.com";
|
||||||
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
|
||||||
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
||||||
|
|
||||||
|
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
||||||
|
|
||||||
export const BAIDU_BASE_URL = "https://aip.baidubce.com";
|
export const BAIDU_BASE_URL = "https://aip.baidubce.com";
|
||||||
export const BAIDU_OATUH_URL = `${BAIDU_BASE_URL}/oauth/2.0/token`;
|
export const BAIDU_OATUH_URL = `${BAIDU_BASE_URL}/oauth/2.0/token`;
|
||||||
|
|
||||||
|
@ -20,6 +23,15 @@ export const BYTEDANCE_BASE_URL = "https://ark.cn-beijing.volces.com";
|
||||||
|
|
||||||
export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/";
|
export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/";
|
||||||
|
|
||||||
|
export const TENCENT_BASE_URL = "https://hunyuan.tencentcloudapi.com";
|
||||||
|
|
||||||
|
export const MOONSHOT_BASE_URL = "https://api.moonshot.cn";
|
||||||
|
export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com";
|
||||||
|
|
||||||
|
export const XAI_BASE_URL = "https://api.x.ai";
|
||||||
|
|
||||||
|
export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
|
||||||
|
|
||||||
export const CACHE_URL_PREFIX = "/api/cache";
|
export const CACHE_URL_PREFIX = "/api/cache";
|
||||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
|
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
|
||||||
|
|
||||||
|
@ -38,13 +50,18 @@ export enum ApiPath {
|
||||||
Cors = "",
|
Cors = "",
|
||||||
Azure = "/api/azure",
|
Azure = "/api/azure",
|
||||||
OpenAI = "/api/openai",
|
OpenAI = "/api/openai",
|
||||||
GoogleAI = "/api/google",
|
|
||||||
Anthropic = "/api/anthropic",
|
Anthropic = "/api/anthropic",
|
||||||
Google = "/api/google",
|
Google = "/api/google",
|
||||||
Baidu = "/api/baidu",
|
Baidu = "/api/baidu",
|
||||||
ByteDance = "/api/bytedance",
|
ByteDance = "/api/bytedance",
|
||||||
Alibaba = "/api/alibaba",
|
Alibaba = "/api/alibaba",
|
||||||
|
Tencent = "/api/tencent",
|
||||||
|
Moonshot = "/api/moonshot",
|
||||||
|
Iflytek = "/api/iflytek",
|
||||||
|
Stability = "/api/stability",
|
||||||
Artifacts = "/api/artifacts",
|
Artifacts = "/api/artifacts",
|
||||||
|
XAI = "/api/xai",
|
||||||
|
ChatGLM = "/api/chatglm",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SlotID {
|
export enum SlotID {
|
||||||
|
@ -93,6 +110,12 @@ export enum ServiceProvider {
|
||||||
Baidu = "Baidu",
|
Baidu = "Baidu",
|
||||||
ByteDance = "ByteDance",
|
ByteDance = "ByteDance",
|
||||||
Alibaba = "Alibaba",
|
Alibaba = "Alibaba",
|
||||||
|
Tencent = "Tencent",
|
||||||
|
Moonshot = "Moonshot",
|
||||||
|
Stability = "Stability",
|
||||||
|
Iflytek = "Iflytek",
|
||||||
|
XAI = "XAI",
|
||||||
|
ChatGLM = "ChatGLM",
|
||||||
}
|
}
|
||||||
|
|
||||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
|
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
|
||||||
|
@ -105,14 +128,25 @@ export enum GoogleSafetySettingsThreshold {
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ModelProvider {
|
export enum ModelProvider {
|
||||||
|
Stability = "Stability",
|
||||||
GPT = "GPT",
|
GPT = "GPT",
|
||||||
GeminiPro = "GeminiPro",
|
GeminiPro = "GeminiPro",
|
||||||
Claude = "Claude",
|
Claude = "Claude",
|
||||||
Ernie = "Ernie",
|
Ernie = "Ernie",
|
||||||
Doubao = "Doubao",
|
Doubao = "Doubao",
|
||||||
Qwen = "Qwen",
|
Qwen = "Qwen",
|
||||||
|
Hunyuan = "Hunyuan",
|
||||||
|
Moonshot = "Moonshot",
|
||||||
|
Iflytek = "Iflytek",
|
||||||
|
XAI = "XAI",
|
||||||
|
ChatGLM = "ChatGLM",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const Stability = {
|
||||||
|
GeneratePath: "v2beta/stable-image/generate",
|
||||||
|
ExampleEndpoint: "https://api.stability.ai",
|
||||||
|
};
|
||||||
|
|
||||||
export const Anthropic = {
|
export const Anthropic = {
|
||||||
ChatPath: "v1/messages",
|
ChatPath: "v1/messages",
|
||||||
ChatPath1: "v1/complete",
|
ChatPath1: "v1/complete",
|
||||||
|
@ -174,6 +208,30 @@ export const Alibaba = {
|
||||||
ChatPath: "v1/services/aigc/text-generation/generation",
|
ChatPath: "v1/services/aigc/text-generation/generation",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const Tencent = {
|
||||||
|
ExampleEndpoint: TENCENT_BASE_URL,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Moonshot = {
|
||||||
|
ExampleEndpoint: MOONSHOT_BASE_URL,
|
||||||
|
ChatPath: "v1/chat/completions",
|
||||||
|
};
|
||||||
|
|
||||||
|
export const Iflytek = {
|
||||||
|
ExampleEndpoint: IFLYTEK_BASE_URL,
|
||||||
|
ChatPath: "v1/chat/completions",
|
||||||
|
};
|
||||||
|
|
||||||
|
export const XAI = {
|
||||||
|
ExampleEndpoint: XAI_BASE_URL,
|
||||||
|
ChatPath: "v1/chat/completions",
|
||||||
|
};
|
||||||
|
|
||||||
|
export const ChatGLM = {
|
||||||
|
ExampleEndpoint: CHATGLM_BASE_URL,
|
||||||
|
ChatPath: "/api/paas/v4/chat/completions",
|
||||||
|
};
|
||||||
|
|
||||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
|
// export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
||||||
|
@ -269,8 +327,12 @@ const anthropicModels = [
|
||||||
"claude-2.1",
|
"claude-2.1",
|
||||||
"claude-3-sonnet-20240229",
|
"claude-3-sonnet-20240229",
|
||||||
"claude-3-opus-20240229",
|
"claude-3-opus-20240229",
|
||||||
|
"claude-3-opus-latest",
|
||||||
"claude-3-haiku-20240307",
|
"claude-3-haiku-20240307",
|
||||||
"claude-3-5-sonnet-20240620",
|
"claude-3-5-sonnet-20240620",
|
||||||
|
"claude-3-5-sonnet-20241022",
|
||||||
|
"claude-3-5-sonnet-latest",
|
||||||
|
"claude-3-5-haiku-latest",
|
||||||
];
|
];
|
||||||
|
|
||||||
const baiduModels = [
|
const baiduModels = [
|
||||||
|
@ -306,68 +368,171 @@ const alibabaModes = [
|
||||||
"qwen-max-longcontext",
|
"qwen-max-longcontext",
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const tencentModels = [
|
||||||
|
"hunyuan-pro",
|
||||||
|
"hunyuan-standard",
|
||||||
|
"hunyuan-lite",
|
||||||
|
"hunyuan-role",
|
||||||
|
"hunyuan-functioncall",
|
||||||
|
"hunyuan-code",
|
||||||
|
"hunyuan-vision",
|
||||||
|
];
|
||||||
|
|
||||||
|
const moonshotModes = ["moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"];
|
||||||
|
|
||||||
|
const iflytekModels = [
|
||||||
|
"general",
|
||||||
|
"generalv3",
|
||||||
|
"pro-128k",
|
||||||
|
"generalv3.5",
|
||||||
|
"4.0Ultra",
|
||||||
|
];
|
||||||
|
|
||||||
|
const xAIModes = ["grok-beta"];
|
||||||
|
|
||||||
|
const chatglmModels = [
|
||||||
|
"glm-4-plus",
|
||||||
|
"glm-4-0520",
|
||||||
|
"glm-4",
|
||||||
|
"glm-4-air",
|
||||||
|
"glm-4-airx",
|
||||||
|
"glm-4-long",
|
||||||
|
"glm-4-flashx",
|
||||||
|
"glm-4-flash",
|
||||||
|
];
|
||||||
|
|
||||||
|
let seq = 1000; // 内置的模型序号生成器从1000开始
|
||||||
export const DEFAULT_MODELS = [
|
export const DEFAULT_MODELS = [
|
||||||
...openaiModels.map((name) => ({
|
...openaiModels.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++, // Global sequence sort(index)
|
||||||
provider: {
|
provider: {
|
||||||
id: "openai",
|
id: "openai",
|
||||||
providerName: "OpenAI",
|
providerName: "OpenAI",
|
||||||
providerType: "openai",
|
providerType: "openai",
|
||||||
|
sorted: 1, // 这里是固定的,确保顺序与之前内置的版本一致
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
...openaiModels.map((name) => ({
|
...openaiModels.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "azure",
|
id: "azure",
|
||||||
providerName: "Azure",
|
providerName: "Azure",
|
||||||
providerType: "azure",
|
providerType: "azure",
|
||||||
|
sorted: 2,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
...googleModels.map((name) => ({
|
...googleModels.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "google",
|
id: "google",
|
||||||
providerName: "Google",
|
providerName: "Google",
|
||||||
providerType: "google",
|
providerType: "google",
|
||||||
|
sorted: 3,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
...anthropicModels.map((name) => ({
|
...anthropicModels.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "anthropic",
|
id: "anthropic",
|
||||||
providerName: "Anthropic",
|
providerName: "Anthropic",
|
||||||
providerType: "anthropic",
|
providerType: "anthropic",
|
||||||
|
sorted: 4,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
...baiduModels.map((name) => ({
|
...baiduModels.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "baidu",
|
id: "baidu",
|
||||||
providerName: "Baidu",
|
providerName: "Baidu",
|
||||||
providerType: "baidu",
|
providerType: "baidu",
|
||||||
|
sorted: 5,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
...bytedanceModels.map((name) => ({
|
...bytedanceModels.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "bytedance",
|
id: "bytedance",
|
||||||
providerName: "ByteDance",
|
providerName: "ByteDance",
|
||||||
providerType: "bytedance",
|
providerType: "bytedance",
|
||||||
|
sorted: 6,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
...alibabaModes.map((name) => ({
|
...alibabaModes.map((name) => ({
|
||||||
name,
|
name,
|
||||||
available: true,
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
provider: {
|
provider: {
|
||||||
id: "alibaba",
|
id: "alibaba",
|
||||||
providerName: "Alibaba",
|
providerName: "Alibaba",
|
||||||
providerType: "alibaba",
|
providerType: "alibaba",
|
||||||
|
sorted: 7,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
...tencentModels.map((name) => ({
|
||||||
|
name,
|
||||||
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
|
provider: {
|
||||||
|
id: "tencent",
|
||||||
|
providerName: "Tencent",
|
||||||
|
providerType: "tencent",
|
||||||
|
sorted: 8,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
...moonshotModes.map((name) => ({
|
||||||
|
name,
|
||||||
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
|
provider: {
|
||||||
|
id: "moonshot",
|
||||||
|
providerName: "Moonshot",
|
||||||
|
providerType: "moonshot",
|
||||||
|
sorted: 9,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
...iflytekModels.map((name) => ({
|
||||||
|
name,
|
||||||
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
|
provider: {
|
||||||
|
id: "iflytek",
|
||||||
|
providerName: "Iflytek",
|
||||||
|
providerType: "iflytek",
|
||||||
|
sorted: 10,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
...xAIModes.map((name) => ({
|
||||||
|
name,
|
||||||
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
|
provider: {
|
||||||
|
id: "xai",
|
||||||
|
providerName: "XAI",
|
||||||
|
providerType: "xai",
|
||||||
|
sorted: 11,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
...chatglmModels.map((name) => ({
|
||||||
|
name,
|
||||||
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
|
provider: {
|
||||||
|
id: "chatglm",
|
||||||
|
providerName: "ChatGLM",
|
||||||
|
providerType: "chatglm",
|
||||||
|
sorted: 12,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
] as const;
|
] as const;
|
||||||
|
|
|
@ -21,10 +21,23 @@ declare interface Window {
|
||||||
writeBinaryFile(path: string, data: Uint8Array): Promise<void>;
|
writeBinaryFile(path: string, data: Uint8Array): Promise<void>;
|
||||||
writeTextFile(path: string, data: string): Promise<void>;
|
writeTextFile(path: string, data: string): Promise<void>;
|
||||||
};
|
};
|
||||||
notification:{
|
notification: {
|
||||||
requestPermission(): Promise<Permission>;
|
requestPermission(): Promise<Permission>;
|
||||||
isPermissionGranted(): Promise<boolean>;
|
isPermissionGranted(): Promise<boolean>;
|
||||||
sendNotification(options: string | Options): void;
|
sendNotification(options: string | Options): void;
|
||||||
};
|
};
|
||||||
|
updater: {
|
||||||
|
checkUpdate(): Promise<UpdateResult>;
|
||||||
|
installUpdate(): Promise<void>;
|
||||||
|
onUpdaterEvent(
|
||||||
|
handler: (status: UpdateStatusResult) => void,
|
||||||
|
): Promise<UnlistenFn>;
|
||||||
|
};
|
||||||
|
http: {
|
||||||
|
fetch<T>(
|
||||||
|
url: string,
|
||||||
|
options?: Record<string, unknown>,
|
||||||
|
): Promise<Response<T>>;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,6 +140,7 @@ const cn = {
|
||||||
Settings: {
|
Settings: {
|
||||||
Title: "设置",
|
Title: "设置",
|
||||||
SubTitle: "所有设置选项",
|
SubTitle: "所有设置选项",
|
||||||
|
ShowPassword: "显示密码",
|
||||||
|
|
||||||
Danger: {
|
Danger: {
|
||||||
Reset: {
|
Reset: {
|
||||||
|
@ -164,6 +165,11 @@ const cn = {
|
||||||
Title: "字体大小",
|
Title: "字体大小",
|
||||||
SubTitle: "聊天内容的字体大小",
|
SubTitle: "聊天内容的字体大小",
|
||||||
},
|
},
|
||||||
|
FontFamily: {
|
||||||
|
Title: "聊天字体",
|
||||||
|
SubTitle: "聊天内容的字体,若置空则应用全局默认字体",
|
||||||
|
Placeholder: "字体名称",
|
||||||
|
},
|
||||||
InjectSystemPrompts: {
|
InjectSystemPrompts: {
|
||||||
Title: "注入系统级提示信息",
|
Title: "注入系统级提示信息",
|
||||||
SubTitle: "强制给每次请求的消息列表开头添加一个模拟 ChatGPT 的系统提示",
|
SubTitle: "强制给每次请求的消息列表开头添加一个模拟 ChatGPT 的系统提示",
|
||||||
|
@ -180,6 +186,8 @@ const cn = {
|
||||||
IsChecking: "正在检查更新...",
|
IsChecking: "正在检查更新...",
|
||||||
FoundUpdate: (x: string) => `发现新版本:${x}`,
|
FoundUpdate: (x: string) => `发现新版本:${x}`,
|
||||||
GoToUpdate: "前往更新",
|
GoToUpdate: "前往更新",
|
||||||
|
Success: "更新成功!",
|
||||||
|
Failed: "更新失败",
|
||||||
},
|
},
|
||||||
SendKey: "发送键",
|
SendKey: "发送键",
|
||||||
Theme: "主题",
|
Theme: "主题",
|
||||||
|
@ -346,7 +354,7 @@ const cn = {
|
||||||
ApiKey: {
|
ApiKey: {
|
||||||
Title: "API 密钥",
|
Title: "API 密钥",
|
||||||
SubTitle: "从 Google AI 获取您的 API 密钥",
|
SubTitle: "从 Google AI 获取您的 API 密钥",
|
||||||
Placeholder: "输入您的 Google AI Studio API 密钥",
|
Placeholder: "Google AI API KEY",
|
||||||
},
|
},
|
||||||
|
|
||||||
Endpoint: {
|
Endpoint: {
|
||||||
|
@ -379,6 +387,22 @@ const cn = {
|
||||||
SubTitle: "不支持自定义前往.env配置",
|
SubTitle: "不支持自定义前往.env配置",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Tencent: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "API Key",
|
||||||
|
SubTitle: "使用自定义腾讯云API Key",
|
||||||
|
Placeholder: "Tencent API Key",
|
||||||
|
},
|
||||||
|
SecretKey: {
|
||||||
|
Title: "Secret Key",
|
||||||
|
SubTitle: "使用自定义腾讯云Secret Key",
|
||||||
|
Placeholder: "Tencent Secret Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "不支持自定义前往.env配置",
|
||||||
|
},
|
||||||
|
},
|
||||||
ByteDance: {
|
ByteDance: {
|
||||||
ApiKey: {
|
ApiKey: {
|
||||||
Title: "接口密钥",
|
Title: "接口密钥",
|
||||||
|
@ -401,6 +425,66 @@ const cn = {
|
||||||
SubTitle: "样例:",
|
SubTitle: "样例:",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Moonshot: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "接口密钥",
|
||||||
|
SubTitle: "使用自定义月之暗面API Key",
|
||||||
|
Placeholder: "Moonshot API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "样例:",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
XAI: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "接口密钥",
|
||||||
|
SubTitle: "使用自定义XAI API Key",
|
||||||
|
Placeholder: "XAI API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "样例:",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ChatGLM: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "接口密钥",
|
||||||
|
SubTitle: "使用自定义 ChatGLM API Key",
|
||||||
|
Placeholder: "ChatGLM API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "样例:",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Stability: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "接口密钥",
|
||||||
|
SubTitle: "使用自定义 Stability API Key",
|
||||||
|
Placeholder: "Stability API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "样例:",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Iflytek: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "ApiKey",
|
||||||
|
SubTitle: "从讯飞星火控制台获取的 APIKey",
|
||||||
|
Placeholder: "APIKey",
|
||||||
|
},
|
||||||
|
ApiSecret: {
|
||||||
|
Title: "ApiSecret",
|
||||||
|
SubTitle: "从讯飞星火控制台获取的 APISecret",
|
||||||
|
Placeholder: "APISecret",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "样例:",
|
||||||
|
},
|
||||||
|
},
|
||||||
CustomModel: {
|
CustomModel: {
|
||||||
Title: "自定义模型名",
|
Title: "自定义模型名",
|
||||||
SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
|
SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
|
||||||
|
|
|
@ -143,6 +143,7 @@ const en: LocaleType = {
|
||||||
Settings: {
|
Settings: {
|
||||||
Title: "Settings",
|
Title: "Settings",
|
||||||
SubTitle: "All Settings",
|
SubTitle: "All Settings",
|
||||||
|
ShowPassword: "ShowPassword",
|
||||||
Danger: {
|
Danger: {
|
||||||
Reset: {
|
Reset: {
|
||||||
Title: "Reset All Settings",
|
Title: "Reset All Settings",
|
||||||
|
@ -166,6 +167,12 @@ const en: LocaleType = {
|
||||||
Title: "Font Size",
|
Title: "Font Size",
|
||||||
SubTitle: "Adjust font size of chat content",
|
SubTitle: "Adjust font size of chat content",
|
||||||
},
|
},
|
||||||
|
FontFamily: {
|
||||||
|
Title: "Chat Font Family",
|
||||||
|
SubTitle:
|
||||||
|
"Font Family of the chat content, leave empty to apply global default font",
|
||||||
|
Placeholder: "Font Family Name",
|
||||||
|
},
|
||||||
InjectSystemPrompts: {
|
InjectSystemPrompts: {
|
||||||
Title: "Inject System Prompts",
|
Title: "Inject System Prompts",
|
||||||
SubTitle: "Inject a global system prompt for every request",
|
SubTitle: "Inject a global system prompt for every request",
|
||||||
|
@ -182,6 +189,8 @@ const en: LocaleType = {
|
||||||
IsChecking: "Checking update...",
|
IsChecking: "Checking update...",
|
||||||
FoundUpdate: (x: string) => `Found new version: ${x}`,
|
FoundUpdate: (x: string) => `Found new version: ${x}`,
|
||||||
GoToUpdate: "Update",
|
GoToUpdate: "Update",
|
||||||
|
Success: "Update Successful.",
|
||||||
|
Failed: "Update Failed.",
|
||||||
},
|
},
|
||||||
SendKey: "Send Key",
|
SendKey: "Send Key",
|
||||||
Theme: "Theme",
|
Theme: "Theme",
|
||||||
|
@ -362,6 +371,22 @@ const en: LocaleType = {
|
||||||
SubTitle: "not supported, configure in .env",
|
SubTitle: "not supported, configure in .env",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Tencent: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "Tencent API Key",
|
||||||
|
SubTitle: "Use a custom Tencent API Key",
|
||||||
|
Placeholder: "Tencent API Key",
|
||||||
|
},
|
||||||
|
SecretKey: {
|
||||||
|
Title: "Tencent Secret Key",
|
||||||
|
SubTitle: "Use a custom Tencent Secret Key",
|
||||||
|
Placeholder: "Tencent Secret Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "not supported, configure in .env",
|
||||||
|
},
|
||||||
|
},
|
||||||
ByteDance: {
|
ByteDance: {
|
||||||
ApiKey: {
|
ApiKey: {
|
||||||
Title: "ByteDance API Key",
|
Title: "ByteDance API Key",
|
||||||
|
@ -384,6 +409,66 @@ const en: LocaleType = {
|
||||||
SubTitle: "Example: ",
|
SubTitle: "Example: ",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Moonshot: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "Moonshot API Key",
|
||||||
|
SubTitle: "Use a custom Moonshot API Key",
|
||||||
|
Placeholder: "Moonshot API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example: ",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
XAI: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "XAI API Key",
|
||||||
|
SubTitle: "Use a custom XAI API Key",
|
||||||
|
Placeholder: "XAI API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example: ",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ChatGLM: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "ChatGLM API Key",
|
||||||
|
SubTitle: "Use a custom ChatGLM API Key",
|
||||||
|
Placeholder: "ChatGLM API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example: ",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Stability: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "Stability API Key",
|
||||||
|
SubTitle: "Use a custom Stability API Key",
|
||||||
|
Placeholder: "Stability API Key",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example: ",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Iflytek: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "Iflytek API Key",
|
||||||
|
SubTitle: "Use a Iflytek API Key",
|
||||||
|
Placeholder: "Iflytek API Key",
|
||||||
|
},
|
||||||
|
ApiSecret: {
|
||||||
|
Title: "Iflytek API Secret",
|
||||||
|
SubTitle: "Use a Iflytek API Secret",
|
||||||
|
Placeholder: "Iflytek API Secret",
|
||||||
|
},
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example: ",
|
||||||
|
},
|
||||||
|
},
|
||||||
CustomModel: {
|
CustomModel: {
|
||||||
Title: "Custom Models",
|
Title: "Custom Models",
|
||||||
SubTitle: "Custom model options, seperated by comma",
|
SubTitle: "Custom model options, seperated by comma",
|
||||||
|
@ -392,7 +477,7 @@ const en: LocaleType = {
|
||||||
ApiKey: {
|
ApiKey: {
|
||||||
Title: "API Key",
|
Title: "API Key",
|
||||||
SubTitle: "Obtain your API Key from Google AI",
|
SubTitle: "Obtain your API Key from Google AI",
|
||||||
Placeholder: "Enter your Google AI Studio API Key",
|
Placeholder: "Google AI API Key",
|
||||||
},
|
},
|
||||||
|
|
||||||
Endpoint: {
|
Endpoint: {
|
||||||
|
|
|
@ -1,45 +1,55 @@
|
||||||
import {
|
import {
|
||||||
ApiPath,
|
|
||||||
DEFAULT_API_HOST,
|
|
||||||
GoogleSafetySettingsThreshold,
|
GoogleSafetySettingsThreshold,
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
StoreKey,
|
StoreKey,
|
||||||
|
ApiPath,
|
||||||
|
OPENAI_BASE_URL,
|
||||||
|
ANTHROPIC_BASE_URL,
|
||||||
|
GEMINI_BASE_URL,
|
||||||
|
BAIDU_BASE_URL,
|
||||||
|
BYTEDANCE_BASE_URL,
|
||||||
|
ALIBABA_BASE_URL,
|
||||||
|
TENCENT_BASE_URL,
|
||||||
|
MOONSHOT_BASE_URL,
|
||||||
|
STABILITY_BASE_URL,
|
||||||
|
IFLYTEK_BASE_URL,
|
||||||
|
XAI_BASE_URL,
|
||||||
|
CHATGLM_BASE_URL,
|
||||||
} from "../constant";
|
} from "../constant";
|
||||||
import { getHeaders } from "../client/api";
|
import { getHeaders } from "../client/api";
|
||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { createPersistStore } from "../utils/store";
|
import { createPersistStore } from "../utils/store";
|
||||||
import { ensure } from "../utils/clone";
|
import { ensure } from "../utils/clone";
|
||||||
import { DEFAULT_CONFIG } from "./config";
|
import { DEFAULT_CONFIG } from "./config";
|
||||||
|
import { getModelProvider } from "../utils/model";
|
||||||
|
|
||||||
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
||||||
|
|
||||||
const isApp = getClientConfig()?.buildMode === "export";
|
const isApp = getClientConfig()?.buildMode === "export";
|
||||||
|
|
||||||
const DEFAULT_OPENAI_URL = isApp
|
const DEFAULT_OPENAI_URL = isApp ? OPENAI_BASE_URL : ApiPath.OpenAI;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/openai"
|
|
||||||
: ApiPath.OpenAI;
|
|
||||||
|
|
||||||
const DEFAULT_GOOGLE_URL = isApp
|
const DEFAULT_GOOGLE_URL = isApp ? GEMINI_BASE_URL : ApiPath.Google;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/google"
|
|
||||||
: ApiPath.Google;
|
|
||||||
|
|
||||||
const DEFAULT_ANTHROPIC_URL = isApp
|
const DEFAULT_ANTHROPIC_URL = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
|
||||||
: ApiPath.Anthropic;
|
|
||||||
|
|
||||||
const DEFAULT_BAIDU_URL = isApp
|
const DEFAULT_BAIDU_URL = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/baidu"
|
|
||||||
: ApiPath.Baidu;
|
|
||||||
|
|
||||||
const DEFAULT_BYTEDANCE_URL = isApp
|
const DEFAULT_BYTEDANCE_URL = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/bytedance"
|
|
||||||
: ApiPath.ByteDance;
|
|
||||||
|
|
||||||
const DEFAULT_ALIBABA_URL = isApp
|
const DEFAULT_ALIBABA_URL = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;
|
||||||
? DEFAULT_API_HOST + "/api/proxy/alibaba"
|
|
||||||
: ApiPath.Alibaba;
|
|
||||||
|
|
||||||
console.log("DEFAULT_ANTHROPIC_URL", DEFAULT_ANTHROPIC_URL);
|
const DEFAULT_TENCENT_URL = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
|
||||||
|
|
||||||
|
const DEFAULT_MOONSHOT_URL = isApp ? MOONSHOT_BASE_URL : ApiPath.Moonshot;
|
||||||
|
|
||||||
|
const DEFAULT_STABILITY_URL = isApp ? STABILITY_BASE_URL : ApiPath.Stability;
|
||||||
|
|
||||||
|
const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
|
||||||
|
|
||||||
|
const DEFAULT_XAI_URL = isApp ? XAI_BASE_URL : ApiPath.XAI;
|
||||||
|
|
||||||
|
const DEFAULT_CHATGLM_URL = isApp ? CHATGLM_BASE_URL : ApiPath.ChatGLM;
|
||||||
|
|
||||||
const DEFAULT_ACCESS_STATE = {
|
const DEFAULT_ACCESS_STATE = {
|
||||||
accessCode: "",
|
accessCode: "",
|
||||||
|
@ -54,7 +64,7 @@ const DEFAULT_ACCESS_STATE = {
|
||||||
// azure
|
// azure
|
||||||
azureUrl: "",
|
azureUrl: "",
|
||||||
azureApiKey: "",
|
azureApiKey: "",
|
||||||
azureApiVersion: "2024-02-15-preview",
|
azureApiVersion: "2023-08-01-preview",
|
||||||
|
|
||||||
// google ai studio
|
// google ai studio
|
||||||
googleUrl: DEFAULT_GOOGLE_URL,
|
googleUrl: DEFAULT_GOOGLE_URL,
|
||||||
|
@ -80,6 +90,32 @@ const DEFAULT_ACCESS_STATE = {
|
||||||
alibabaUrl: DEFAULT_ALIBABA_URL,
|
alibabaUrl: DEFAULT_ALIBABA_URL,
|
||||||
alibabaApiKey: "",
|
alibabaApiKey: "",
|
||||||
|
|
||||||
|
// moonshot
|
||||||
|
moonshotUrl: DEFAULT_MOONSHOT_URL,
|
||||||
|
moonshotApiKey: "",
|
||||||
|
|
||||||
|
//stability
|
||||||
|
stabilityUrl: DEFAULT_STABILITY_URL,
|
||||||
|
stabilityApiKey: "",
|
||||||
|
|
||||||
|
// tencent
|
||||||
|
tencentUrl: DEFAULT_TENCENT_URL,
|
||||||
|
tencentSecretKey: "",
|
||||||
|
tencentSecretId: "",
|
||||||
|
|
||||||
|
// iflytek
|
||||||
|
iflytekUrl: DEFAULT_IFLYTEK_URL,
|
||||||
|
iflytekApiKey: "",
|
||||||
|
iflytekApiSecret: "",
|
||||||
|
|
||||||
|
// xai
|
||||||
|
xaiUrl: DEFAULT_XAI_URL,
|
||||||
|
xaiApiKey: "",
|
||||||
|
|
||||||
|
// chatglm
|
||||||
|
chatglmUrl: DEFAULT_CHATGLM_URL,
|
||||||
|
chatglmApiKey: "",
|
||||||
|
|
||||||
// server config
|
// server config
|
||||||
needCode: true,
|
needCode: true,
|
||||||
hideUserApiKey: false,
|
hideUserApiKey: false,
|
||||||
|
@ -160,6 +196,25 @@ export const useAccessStore = createPersistStore(
|
||||||
return ensure(get(), ["alibabaApiKey"]);
|
return ensure(get(), ["alibabaApiKey"]);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
isValidTencent() {
|
||||||
|
return ensure(get(), ["tencentSecretKey", "tencentSecretId"]);
|
||||||
|
},
|
||||||
|
|
||||||
|
isValidMoonshot() {
|
||||||
|
return ensure(get(), ["moonshotApiKey"]);
|
||||||
|
},
|
||||||
|
isValidIflytek() {
|
||||||
|
return ensure(get(), ["iflytekApiKey"]);
|
||||||
|
},
|
||||||
|
|
||||||
|
isValidXAI() {
|
||||||
|
return ensure(get(), ["xaiApiKey"]);
|
||||||
|
},
|
||||||
|
|
||||||
|
isValidChatGLM() {
|
||||||
|
return ensure(get(), ["chatglmApiKey"]);
|
||||||
|
},
|
||||||
|
|
||||||
isAuthorized() {
|
isAuthorized() {
|
||||||
this.fetch();
|
this.fetch();
|
||||||
|
|
||||||
|
@ -172,6 +227,11 @@ export const useAccessStore = createPersistStore(
|
||||||
this.isValidBaidu() ||
|
this.isValidBaidu() ||
|
||||||
this.isValidByteDance() ||
|
this.isValidByteDance() ||
|
||||||
this.isValidAlibaba() ||
|
this.isValidAlibaba() ||
|
||||||
|
this.isValidTencent() ||
|
||||||
|
this.isValidMoonshot() ||
|
||||||
|
this.isValidIflytek() ||
|
||||||
|
this.isValidXAI() ||
|
||||||
|
this.isValidChatGLM() ||
|
||||||
!this.enabledAccessControl() ||
|
!this.enabledAccessControl() ||
|
||||||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
||||||
);
|
);
|
||||||
|
@ -188,10 +248,13 @@ export const useAccessStore = createPersistStore(
|
||||||
})
|
})
|
||||||
.then((res) => res.json())
|
.then((res) => res.json())
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
// Set default model from env request
|
const defaultModel = res.defaultModel ?? "";
|
||||||
let defaultModel = res.defaultModel ?? "";
|
if (defaultModel !== "") {
|
||||||
DEFAULT_CONFIG.modelConfig.model =
|
const [model, providerName] = getModelProvider(defaultModel);
|
||||||
defaultModel !== "" ? defaultModel : "gpt-3.5-turbo";
|
DEFAULT_CONFIG.modelConfig.model = model;
|
||||||
|
DEFAULT_CONFIG.modelConfig.providerName = providerName as any;
|
||||||
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
})
|
})
|
||||||
.then((res: DangerConfig) => {
|
.then((res: DangerConfig) => {
|
||||||
|
@ -218,8 +281,7 @@ export const useAccessStore = createPersistStore(
|
||||||
googleApiKey: string;
|
googleApiKey: string;
|
||||||
};
|
};
|
||||||
state.openaiApiKey = state.token;
|
state.openaiApiKey = state.token;
|
||||||
state.azureApiVersion = "2024-02-15-preview";
|
state.azureApiVersion = "2023-08-01-preview";
|
||||||
state.googleApiKey = state.token;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return persistedState as any;
|
return persistedState as any;
|
||||||
|
|
|
@ -47,6 +47,7 @@ export const DEFAULT_CONFIG = {
|
||||||
submitKey: SubmitKey.Enter,
|
submitKey: SubmitKey.Enter,
|
||||||
avatar: "1f603",
|
avatar: "1f603",
|
||||||
fontSize: 14,
|
fontSize: 14,
|
||||||
|
fontFamily: "",
|
||||||
theme: Theme.Auto as Theme,
|
theme: Theme.Auto as Theme,
|
||||||
tightBorder: !!config?.isApp,
|
tightBorder: !!config?.isApp,
|
||||||
sendPreviewBubble: true,
|
sendPreviewBubble: true,
|
||||||
|
@ -76,6 +77,8 @@ export const DEFAULT_CONFIG = {
|
||||||
sendMemory: true,
|
sendMemory: true,
|
||||||
historyMessageCount: 4,
|
historyMessageCount: 4,
|
||||||
compressMessageLengthThreshold: 1000,
|
compressMessageLengthThreshold: 1000,
|
||||||
|
compressModel: "",
|
||||||
|
compressProviderName: "",
|
||||||
enableInjectSystemPrompts: true,
|
enableInjectSystemPrompts: true,
|
||||||
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
|
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
|
||||||
size: "1024x1024" as DalleSize,
|
size: "1024x1024" as DalleSize,
|
||||||
|
|
|
@ -18,6 +18,8 @@ export type Mask = {
|
||||||
lang: Lang;
|
lang: Lang;
|
||||||
builtin: boolean;
|
builtin: boolean;
|
||||||
usePlugins?: boolean;
|
usePlugins?: boolean;
|
||||||
|
// 上游插件业务参数
|
||||||
|
plugin?: string[];
|
||||||
enableArtifacts?: boolean;
|
enableArtifacts?: boolean;
|
||||||
enableCodeFold?: boolean;
|
enableCodeFold?: boolean;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { Updater } from "../typing";
|
|
||||||
import { ApiPath, STORAGE_KEY, StoreKey } from "../constant";
|
import { ApiPath, STORAGE_KEY, StoreKey } from "../constant";
|
||||||
import { createPersistStore } from "../utils/store";
|
import { createPersistStore } from "../utils/store";
|
||||||
import {
|
import {
|
||||||
|
@ -13,7 +12,6 @@ import { downloadAs, readFromFile } from "../utils";
|
||||||
import { showToast } from "../components/ui-lib";
|
import { showToast } from "../components/ui-lib";
|
||||||
import Locale from "../locales";
|
import Locale from "../locales";
|
||||||
import { createSyncClient, ProviderType } from "../utils/cloud";
|
import { createSyncClient, ProviderType } from "../utils/cloud";
|
||||||
import { corsPath } from "../utils/cors";
|
|
||||||
|
|
||||||
export interface WebDavConfig {
|
export interface WebDavConfig {
|
||||||
server: string;
|
server: string;
|
||||||
|
@ -27,7 +25,7 @@ export type SyncStore = GetStoreState<typeof useSyncStore>;
|
||||||
const DEFAULT_SYNC_STATE = {
|
const DEFAULT_SYNC_STATE = {
|
||||||
provider: ProviderType.WebDAV,
|
provider: ProviderType.WebDAV,
|
||||||
useProxy: true,
|
useProxy: true,
|
||||||
proxyUrl: corsPath(ApiPath.Cors),
|
proxyUrl: ApiPath.Cors as string,
|
||||||
|
|
||||||
webdav: {
|
webdav: {
|
||||||
endpoint: "",
|
endpoint: "",
|
||||||
|
|
96
app/utils.ts
96
app/utils.ts
|
@ -3,6 +3,9 @@ import { showToast } from "./components/ui-lib";
|
||||||
import Locale from "./locales";
|
import Locale from "./locales";
|
||||||
import { RequestMessage } from "./client/api";
|
import { RequestMessage } from "./client/api";
|
||||||
import { DEFAULT_MODELS } from "./constant";
|
import { DEFAULT_MODELS } from "./constant";
|
||||||
|
import { ServiceProvider } from "./constant";
|
||||||
|
// import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
|
||||||
|
import { fetch as tauriStreamFetch } from "./utils/stream";
|
||||||
|
|
||||||
export function trimTopic(topic: string) {
|
export function trimTopic(topic: string) {
|
||||||
// Fix an issue where double quotes still show in the Indonesian language
|
// Fix an issue where double quotes still show in the Indonesian language
|
||||||
|
@ -195,6 +198,7 @@ export function autoGrowTextArea(dom: HTMLTextAreaElement) {
|
||||||
measureDom.style.width = width + "px";
|
measureDom.style.width = width + "px";
|
||||||
measureDom.innerText = dom.value !== "" ? dom.value : "1";
|
measureDom.innerText = dom.value !== "" ? dom.value : "1";
|
||||||
measureDom.style.fontSize = dom.style.fontSize;
|
measureDom.style.fontSize = dom.style.fontSize;
|
||||||
|
measureDom.style.fontFamily = dom.style.fontFamily;
|
||||||
const endWithEmptyLine = dom.value.endsWith("\n");
|
const endWithEmptyLine = dom.value.endsWith("\n");
|
||||||
const height = parseFloat(window.getComputedStyle(measureDom).height);
|
const height = parseFloat(window.getComputedStyle(measureDom).height);
|
||||||
const singleLineHeight = parseFloat(
|
const singleLineHeight = parseFloat(
|
||||||
|
@ -263,7 +267,9 @@ export function isVisionModel(model: string) {
|
||||||
model.includes("gpt-4-turbo") && !model.includes("preview");
|
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||||
|
|
||||||
return (
|
return (
|
||||||
visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo
|
visionKeywords.some((keyword) => model.includes(keyword)) ||
|
||||||
|
isGpt4Turbo ||
|
||||||
|
isDalle3(model)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -318,6 +324,48 @@ export function isFunctionCallModel(modelName: string) {
|
||||||
).some((model) => model.name === modelName);
|
).some((model) => model.name === modelName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function showPlugins(provider: ServiceProvider, model: string) {
|
||||||
|
if (
|
||||||
|
provider == ServiceProvider.OpenAI ||
|
||||||
|
provider == ServiceProvider.Azure ||
|
||||||
|
provider == ServiceProvider.Moonshot ||
|
||||||
|
provider == ServiceProvider.ChatGLM
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (provider == ServiceProvider.Anthropic && !model.includes("claude-2")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (provider == ServiceProvider.Google && !model.includes("vision")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function fetch(
|
||||||
|
url: string,
|
||||||
|
options?: Record<string, unknown>,
|
||||||
|
): Promise<any> {
|
||||||
|
if (window.__TAURI__) {
|
||||||
|
return tauriStreamFetch(url, options);
|
||||||
|
}
|
||||||
|
return window.fetch(url, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function adapter(config: Record<string, unknown>) {
|
||||||
|
const { baseURL, url, params, data: body, ...rest } = config;
|
||||||
|
const path = baseURL ? `${baseURL}${url}` : url;
|
||||||
|
const fetchUrl = params
|
||||||
|
? `${path}?${new URLSearchParams(params as any).toString()}`
|
||||||
|
: path;
|
||||||
|
return fetch(fetchUrl as string, { ...rest, body }).then((res) => {
|
||||||
|
const { status, headers, statusText } = res;
|
||||||
|
return res
|
||||||
|
.text()
|
||||||
|
.then((data: string) => ({ status, statusText, headers, data }));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function safeLocalStorage(): {
|
export function safeLocalStorage(): {
|
||||||
getItem: (key: string) => string | null;
|
getItem: (key: string) => string | null;
|
||||||
setItem: (key: string, value: string) => void;
|
setItem: (key: string, value: string) => void;
|
||||||
|
@ -377,3 +425,49 @@ export function safeLocalStorage(): {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getOperationId(operation: {
|
||||||
|
operationId?: string;
|
||||||
|
method: string;
|
||||||
|
path: string;
|
||||||
|
}) {
|
||||||
|
// pattern '^[a-zA-Z0-9_-]+$'
|
||||||
|
return (
|
||||||
|
operation?.operationId ||
|
||||||
|
`${operation.method.toUpperCase()}${operation.path.replaceAll("/", "_")}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function clientUpdate() {
|
||||||
|
// this a wild for updating client app
|
||||||
|
return window.__TAURI__?.updater
|
||||||
|
.checkUpdate()
|
||||||
|
.then((updateResult) => {
|
||||||
|
if (updateResult.shouldUpdate) {
|
||||||
|
window.__TAURI__?.updater
|
||||||
|
.installUpdate()
|
||||||
|
.then((result) => {
|
||||||
|
showToast(Locale.Settings.Update.Success);
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error("[Install Update Error]", e);
|
||||||
|
showToast(Locale.Settings.Update.Failed);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error("[Check Update Error]", e);
|
||||||
|
showToast(Locale.Settings.Update.Failed);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://gist.github.com/iwill/a83038623ba4fef6abb9efca87ae9ccb
|
||||||
|
export function semverCompare(a: string, b: string) {
|
||||||
|
if (a.startsWith(b + "-")) return -1;
|
||||||
|
if (b.startsWith(a + "-")) return 1;
|
||||||
|
return a.localeCompare(b, undefined, {
|
||||||
|
numeric: true,
|
||||||
|
sensitivity: "case",
|
||||||
|
caseFirst: "upper",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
import { getClientConfig } from "../config/client";
|
|
||||||
import { ApiPath, DEFAULT_API_HOST } from "../constant";
|
|
||||||
|
|
||||||
export function corsPath(path: string) {
|
|
||||||
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
|
|
||||||
|
|
||||||
if (baseUrl === "" && path === "") {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
if (!path.startsWith("/")) {
|
|
||||||
path = "/" + path;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!path.endsWith("/")) {
|
|
||||||
path += "/";
|
|
||||||
}
|
|
||||||
|
|
||||||
return `${baseUrl}${path}`;
|
|
||||||
}
|
|
|
@ -0,0 +1,246 @@
|
||||||
|
// From https://gist.github.com/guillermodlpa/f6d955f838e9b10d1ef95b8e259b2c58
|
||||||
|
// From https://gist.github.com/stevendesu/2d52f7b5e1f1184af3b667c0b5e054b8
|
||||||
|
|
||||||
|
// To ensure cross-browser support even without a proper SubtleCrypto
|
||||||
|
// impelmentation (or without access to the impelmentation, as is the case with
|
||||||
|
// Chrome loaded over HTTP instead of HTTPS), this library can create SHA-256
|
||||||
|
// HMAC signatures using nothing but raw JavaScript
|
||||||
|
|
||||||
|
/* eslint-disable no-magic-numbers, id-length, no-param-reassign, new-cap */
|
||||||
|
|
||||||
|
// By giving internal functions names that we can mangle, future calls to
|
||||||
|
// them are reduced to a single byte (minor space savings in minified file)
|
||||||
|
const uint8Array = Uint8Array;
|
||||||
|
const uint32Array = Uint32Array;
|
||||||
|
const pow = Math.pow;
|
||||||
|
|
||||||
|
// Will be initialized below
|
||||||
|
// Using a Uint32Array instead of a simple array makes the minified code
|
||||||
|
// a bit bigger (we lose our `unshift()` hack), but comes with huge
|
||||||
|
// performance gains
|
||||||
|
const DEFAULT_STATE = new uint32Array(8);
|
||||||
|
const ROUND_CONSTANTS: number[] = [];
|
||||||
|
|
||||||
|
// Reusable object for expanded message
|
||||||
|
// Using a Uint32Array instead of a simple array makes the minified code
|
||||||
|
// 7 bytes larger, but comes with huge performance gains
|
||||||
|
const M = new uint32Array(64);
|
||||||
|
|
||||||
|
// After minification the code to compute the default state and round
|
||||||
|
// constants is smaller than the output. More importantly, this serves as a
|
||||||
|
// good educational aide for anyone wondering where the magic numbers come
|
||||||
|
// from. No magic numbers FTW!
|
||||||
|
function getFractionalBits(n: number) {
|
||||||
|
return ((n - (n | 0)) * pow(2, 32)) | 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let n = 2;
|
||||||
|
let nPrime = 0;
|
||||||
|
while (nPrime < 64) {
|
||||||
|
// isPrime() was in-lined from its original function form to save
|
||||||
|
// a few bytes
|
||||||
|
let isPrime = true;
|
||||||
|
// Math.sqrt() was replaced with pow(n, 1/2) to save a few bytes
|
||||||
|
// var sqrtN = pow(n, 1 / 2);
|
||||||
|
// So technically to determine if a number is prime you only need to
|
||||||
|
// check numbers up to the square root. However this function only runs
|
||||||
|
// once and we're only computing the first 64 primes (up to 311), so on
|
||||||
|
// any modern CPU this whole function runs in a couple milliseconds.
|
||||||
|
// By going to n / 2 instead of sqrt(n) we net 8 byte savings and no
|
||||||
|
// scaling performance cost
|
||||||
|
for (let factor = 2; factor <= n / 2; factor++) {
|
||||||
|
if (n % factor === 0) {
|
||||||
|
isPrime = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isPrime) {
|
||||||
|
if (nPrime < 8) {
|
||||||
|
DEFAULT_STATE[nPrime] = getFractionalBits(pow(n, 1 / 2));
|
||||||
|
}
|
||||||
|
ROUND_CONSTANTS[nPrime] = getFractionalBits(pow(n, 1 / 3));
|
||||||
|
|
||||||
|
nPrime++;
|
||||||
|
}
|
||||||
|
|
||||||
|
n++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// For cross-platform support we need to ensure that all 32-bit words are
|
||||||
|
// in the same endianness. A UTF-8 TextEncoder will return BigEndian data,
|
||||||
|
// so upon reading or writing to our ArrayBuffer we'll only swap the bytes
|
||||||
|
// if our system is LittleEndian (which is about 99% of CPUs)
|
||||||
|
const LittleEndian = !!new uint8Array(new uint32Array([1]).buffer)[0];
|
||||||
|
|
||||||
|
function convertEndian(word: number) {
|
||||||
|
if (LittleEndian) {
|
||||||
|
return (
|
||||||
|
// byte 1 -> byte 4
|
||||||
|
(word >>> 24) |
|
||||||
|
// byte 2 -> byte 3
|
||||||
|
(((word >>> 16) & 0xff) << 8) |
|
||||||
|
// byte 3 -> byte 2
|
||||||
|
((word & 0xff00) << 8) |
|
||||||
|
// byte 4 -> byte 1
|
||||||
|
(word << 24)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return word;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function rightRotate(word: number, bits: number) {
|
||||||
|
return (word >>> bits) | (word << (32 - bits));
|
||||||
|
}
|
||||||
|
|
||||||
|
function sha256(data: Uint8Array) {
|
||||||
|
// Copy default state
|
||||||
|
const STATE = DEFAULT_STATE.slice();
|
||||||
|
|
||||||
|
// Caching this reduces occurrences of ".length" in minified JavaScript
|
||||||
|
// 3 more byte savings! :D
|
||||||
|
const legth = data.length;
|
||||||
|
|
||||||
|
// Pad data
|
||||||
|
const bitLength = legth * 8;
|
||||||
|
const newBitLength = 512 - ((bitLength + 64) % 512) - 1 + bitLength + 65;
|
||||||
|
|
||||||
|
// "bytes" and "words" are stored BigEndian
|
||||||
|
const bytes = new uint8Array(newBitLength / 8);
|
||||||
|
const words = new uint32Array(bytes.buffer);
|
||||||
|
|
||||||
|
bytes.set(data, 0);
|
||||||
|
// Append a 1
|
||||||
|
bytes[legth] = 0b10000000;
|
||||||
|
// Store length in BigEndian
|
||||||
|
words[words.length - 1] = convertEndian(bitLength);
|
||||||
|
|
||||||
|
// Loop iterator (avoid two instances of "var") -- saves 2 bytes
|
||||||
|
let round;
|
||||||
|
|
||||||
|
// Process blocks (512 bits / 64 bytes / 16 words at a time)
|
||||||
|
for (let block = 0; block < newBitLength / 32; block += 16) {
|
||||||
|
const workingState = STATE.slice();
|
||||||
|
|
||||||
|
// Rounds
|
||||||
|
for (round = 0; round < 64; round++) {
|
||||||
|
let MRound;
|
||||||
|
// Expand message
|
||||||
|
if (round < 16) {
|
||||||
|
// Convert to platform Endianness for later math
|
||||||
|
MRound = convertEndian(words[block + round]);
|
||||||
|
} else {
|
||||||
|
const gamma0x = M[round - 15];
|
||||||
|
const gamma1x = M[round - 2];
|
||||||
|
MRound =
|
||||||
|
M[round - 7] +
|
||||||
|
M[round - 16] +
|
||||||
|
(rightRotate(gamma0x, 7) ^
|
||||||
|
rightRotate(gamma0x, 18) ^
|
||||||
|
(gamma0x >>> 3)) +
|
||||||
|
(rightRotate(gamma1x, 17) ^
|
||||||
|
rightRotate(gamma1x, 19) ^
|
||||||
|
(gamma1x >>> 10));
|
||||||
|
}
|
||||||
|
|
||||||
|
// M array matches platform endianness
|
||||||
|
M[round] = MRound |= 0;
|
||||||
|
|
||||||
|
// Computation
|
||||||
|
const t1 =
|
||||||
|
(rightRotate(workingState[4], 6) ^
|
||||||
|
rightRotate(workingState[4], 11) ^
|
||||||
|
rightRotate(workingState[4], 25)) +
|
||||||
|
((workingState[4] & workingState[5]) ^
|
||||||
|
(~workingState[4] & workingState[6])) +
|
||||||
|
workingState[7] +
|
||||||
|
MRound +
|
||||||
|
ROUND_CONSTANTS[round];
|
||||||
|
const t2 =
|
||||||
|
(rightRotate(workingState[0], 2) ^
|
||||||
|
rightRotate(workingState[0], 13) ^
|
||||||
|
rightRotate(workingState[0], 22)) +
|
||||||
|
((workingState[0] & workingState[1]) ^
|
||||||
|
(workingState[2] & (workingState[0] ^ workingState[1])));
|
||||||
|
for (let i = 7; i > 0; i--) {
|
||||||
|
workingState[i] = workingState[i - 1];
|
||||||
|
}
|
||||||
|
workingState[0] = (t1 + t2) | 0;
|
||||||
|
workingState[4] = (workingState[4] + t1) | 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update state
|
||||||
|
for (round = 0; round < 8; round++) {
|
||||||
|
STATE[round] = (STATE[round] + workingState[round]) | 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally the state needs to be converted to BigEndian for output
|
||||||
|
// And we want to return a Uint8Array, not a Uint32Array
|
||||||
|
return new uint8Array(
|
||||||
|
new uint32Array(
|
||||||
|
STATE.map(function (val) {
|
||||||
|
return convertEndian(val);
|
||||||
|
}),
|
||||||
|
).buffer,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hmac(key: Uint8Array, data: ArrayLike<number>) {
|
||||||
|
if (key.length > 64) key = sha256(key);
|
||||||
|
|
||||||
|
if (key.length < 64) {
|
||||||
|
const tmp = new Uint8Array(64);
|
||||||
|
tmp.set(key, 0);
|
||||||
|
key = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate inner and outer keys
|
||||||
|
const innerKey = new Uint8Array(64);
|
||||||
|
const outerKey = new Uint8Array(64);
|
||||||
|
for (let i = 0; i < 64; i++) {
|
||||||
|
innerKey[i] = 0x36 ^ key[i];
|
||||||
|
outerKey[i] = 0x5c ^ key[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append the innerKey
|
||||||
|
const msg = new Uint8Array(data.length + 64);
|
||||||
|
msg.set(innerKey, 0);
|
||||||
|
msg.set(data, 64);
|
||||||
|
|
||||||
|
// Has the previous message and append the outerKey
|
||||||
|
const result = new Uint8Array(64 + 32);
|
||||||
|
result.set(outerKey, 0);
|
||||||
|
result.set(sha256(msg), 64);
|
||||||
|
|
||||||
|
// Hash the previous message
|
||||||
|
return sha256(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert a string to a Uint8Array, SHA-256 it, and convert back to string
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
|
export function sign(
|
||||||
|
inputKey: string | Uint8Array,
|
||||||
|
inputData: string | Uint8Array,
|
||||||
|
) {
|
||||||
|
const key =
|
||||||
|
typeof inputKey === "string" ? encoder.encode(inputKey) : inputKey;
|
||||||
|
const data =
|
||||||
|
typeof inputData === "string" ? encoder.encode(inputData) : inputData;
|
||||||
|
return hmac(key, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hex(bin: Uint8Array) {
|
||||||
|
return bin.reduce((acc, val) => {
|
||||||
|
const hexVal = "00" + val.toString(16);
|
||||||
|
return acc + hexVal.substring(hexVal.length - 2);
|
||||||
|
}, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hash(str: string) {
|
||||||
|
return hex(sha256(encoder.encode(str)));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hashWithSecret(str: string, secret: string) {
|
||||||
|
return hex(sign(secret, str)).toString();
|
||||||
|
}
|
|
@ -1,12 +1,53 @@
|
||||||
import { DEFAULT_MODELS } from "../constant";
|
import { DEFAULT_MODELS } from "../constant";
|
||||||
import { LLMModel } from "../client/api";
|
import { LLMModel } from "../client/api";
|
||||||
|
|
||||||
|
const CustomSeq = {
|
||||||
|
val: -1000, //To ensure the custom model located at front, start from -1000, refer to constant.ts
|
||||||
|
cache: new Map<string, number>(),
|
||||||
|
next: (id: string) => {
|
||||||
|
if (CustomSeq.cache.has(id)) {
|
||||||
|
return CustomSeq.cache.get(id) as number;
|
||||||
|
} else {
|
||||||
|
let seq = CustomSeq.val++;
|
||||||
|
CustomSeq.cache.set(id, seq);
|
||||||
|
return seq;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const customProvider = (providerName: string) => ({
|
const customProvider = (providerName: string) => ({
|
||||||
id: providerName.toLowerCase(),
|
id: providerName.toLowerCase(),
|
||||||
providerName: providerName,
|
providerName: providerName,
|
||||||
providerType: "custom",
|
providerType: "custom",
|
||||||
|
sorted: CustomSeq.next(providerName),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sorts an array of models based on specified rules.
|
||||||
|
*
|
||||||
|
* First, sorted by provider; if the same, sorted by model
|
||||||
|
*/
|
||||||
|
const sortModelTable = (models: ReturnType<typeof collectModels>) =>
|
||||||
|
models.sort((a, b) => {
|
||||||
|
if (a.provider && b.provider) {
|
||||||
|
let cmp = a.provider.sorted - b.provider.sorted;
|
||||||
|
return cmp === 0 ? a.sorted - b.sorted : cmp;
|
||||||
|
} else {
|
||||||
|
return a.sorted - b.sorted;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get model name and provider from a formatted string,
|
||||||
|
* e.g. `gpt-4@OpenAi` or `claude-3-5-sonnet@20240620@Google`
|
||||||
|
* @param modelWithProvider model name with provider separated by last `@` char,
|
||||||
|
* @returns [model, provider] tuple, if no `@` char found, provider is undefined
|
||||||
|
*/
|
||||||
|
export function getModelProvider(modelWithProvider: string): [string, string?] {
|
||||||
|
const [model, provider] = modelWithProvider.split(/@(?!.*@)/);
|
||||||
|
return [model, provider];
|
||||||
|
}
|
||||||
|
|
||||||
export function collectModelTable(
|
export function collectModelTable(
|
||||||
models: readonly LLMModel[],
|
models: readonly LLMModel[],
|
||||||
customModels: string,
|
customModels: string,
|
||||||
|
@ -17,6 +58,7 @@ export function collectModelTable(
|
||||||
available: boolean;
|
available: boolean;
|
||||||
name: string;
|
name: string;
|
||||||
displayName: string;
|
displayName: string;
|
||||||
|
sorted: number;
|
||||||
provider?: LLMModel["provider"]; // Marked as optional
|
provider?: LLMModel["provider"]; // Marked as optional
|
||||||
isDefault?: boolean;
|
isDefault?: boolean;
|
||||||
}
|
}
|
||||||
|
@ -48,10 +90,10 @@ export function collectModelTable(
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// 1. find model by name, and set available value
|
// 1. find model by name, and set available value
|
||||||
const [customModelName, customProviderName] = name.split("@");
|
const [customModelName, customProviderName] = getModelProvider(name);
|
||||||
let count = 0;
|
let count = 0;
|
||||||
for (const fullName in modelTable) {
|
for (const fullName in modelTable) {
|
||||||
const [modelName, providerName] = fullName.split("@");
|
const [modelName, providerName] = getModelProvider(fullName);
|
||||||
if (
|
if (
|
||||||
customModelName == modelName &&
|
customModelName == modelName &&
|
||||||
(customProviderName === undefined ||
|
(customProviderName === undefined ||
|
||||||
|
@ -71,7 +113,7 @@ export function collectModelTable(
|
||||||
}
|
}
|
||||||
// 2. if model not exists, create new model with available value
|
// 2. if model not exists, create new model with available value
|
||||||
if (count === 0) {
|
if (count === 0) {
|
||||||
let [customModelName, customProviderName] = name.split("@");
|
let [customModelName, customProviderName] = getModelProvider(name);
|
||||||
const provider = customProvider(
|
const provider = customProvider(
|
||||||
customProviderName || customModelName,
|
customProviderName || customModelName,
|
||||||
);
|
);
|
||||||
|
@ -84,6 +126,7 @@ export function collectModelTable(
|
||||||
displayName: displayName || customModelName,
|
displayName: displayName || customModelName,
|
||||||
available,
|
available,
|
||||||
provider, // Use optional chaining
|
provider, // Use optional chaining
|
||||||
|
sorted: CustomSeq.next(`${customModelName}@${provider?.id}`),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -99,13 +142,16 @@ export function collectModelTableWithDefaultModel(
|
||||||
) {
|
) {
|
||||||
let modelTable = collectModelTable(models, customModels);
|
let modelTable = collectModelTable(models, customModels);
|
||||||
if (defaultModel && defaultModel !== "") {
|
if (defaultModel && defaultModel !== "") {
|
||||||
if (defaultModel.includes('@')) {
|
if (defaultModel.includes("@")) {
|
||||||
if (defaultModel in modelTable) {
|
if (defaultModel in modelTable) {
|
||||||
modelTable[defaultModel].isDefault = true;
|
modelTable[defaultModel].isDefault = true;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (const key of Object.keys(modelTable)) {
|
for (const key of Object.keys(modelTable)) {
|
||||||
if (modelTable[key].available && key.split('@').shift() == defaultModel) {
|
if (
|
||||||
|
modelTable[key].available &&
|
||||||
|
getModelProvider(key)[0] == defaultModel
|
||||||
|
) {
|
||||||
modelTable[key].isDefault = true;
|
modelTable[key].isDefault = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -123,7 +169,9 @@ export function collectModels(
|
||||||
customModels: string,
|
customModels: string,
|
||||||
) {
|
) {
|
||||||
const modelTable = collectModelTable(models, customModels);
|
const modelTable = collectModelTable(models, customModels);
|
||||||
const allModels = Object.values(modelTable);
|
let allModels = Object.values(modelTable);
|
||||||
|
|
||||||
|
allModels = sortModelTable(allModels);
|
||||||
|
|
||||||
return allModels;
|
return allModels;
|
||||||
}
|
}
|
||||||
|
@ -138,7 +186,10 @@ export function collectModelsWithDefaultModel(
|
||||||
customModels,
|
customModels,
|
||||||
defaultModel,
|
defaultModel,
|
||||||
);
|
);
|
||||||
const allModels = Object.values(modelTable);
|
let allModels = Object.values(modelTable);
|
||||||
|
|
||||||
|
allModels = sortModelTable(allModels);
|
||||||
|
|
||||||
return allModels;
|
return allModels;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,108 @@
|
||||||
|
// using tauri command to send request
|
||||||
|
// see src-tauri/src/stream.rs, and src-tauri/src/main.rs
|
||||||
|
// 1. invoke('stream_fetch', {url, method, headers, body}), get response with headers.
|
||||||
|
// 2. listen event: `stream-response` multi times to get body
|
||||||
|
|
||||||
|
type ResponseEvent = {
|
||||||
|
id: number;
|
||||||
|
payload: {
|
||||||
|
request_id: number;
|
||||||
|
status?: number;
|
||||||
|
chunk?: number[];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type StreamResponse = {
|
||||||
|
request_id: number;
|
||||||
|
status: number;
|
||||||
|
status_text: string;
|
||||||
|
headers: Record<string, string>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function fetch(url: string, options?: RequestInit): Promise<Response> {
|
||||||
|
if (window.__TAURI__) {
|
||||||
|
const {
|
||||||
|
signal,
|
||||||
|
method = "GET",
|
||||||
|
headers: _headers = {},
|
||||||
|
body = [],
|
||||||
|
} = options || {};
|
||||||
|
let unlisten: Function | undefined;
|
||||||
|
let setRequestId: Function | undefined;
|
||||||
|
const requestIdPromise = new Promise((resolve) => (setRequestId = resolve));
|
||||||
|
const ts = new TransformStream();
|
||||||
|
const writer = ts.writable.getWriter();
|
||||||
|
|
||||||
|
let closed = false;
|
||||||
|
const close = () => {
|
||||||
|
if (closed) return;
|
||||||
|
closed = true;
|
||||||
|
unlisten && unlisten();
|
||||||
|
writer.ready.then(() => {
|
||||||
|
writer.close().catch((e) => console.error(e));
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if (signal) {
|
||||||
|
signal.addEventListener("abort", () => close());
|
||||||
|
}
|
||||||
|
// @ts-ignore 2. listen response multi times, and write to Response.body
|
||||||
|
window.__TAURI__.event
|
||||||
|
.listen("stream-response", (e: ResponseEvent) =>
|
||||||
|
requestIdPromise.then((request_id) => {
|
||||||
|
const { request_id: rid, chunk, status } = e?.payload || {};
|
||||||
|
if (request_id != rid) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (chunk) {
|
||||||
|
writer.ready.then(() => {
|
||||||
|
writer.write(new Uint8Array(chunk));
|
||||||
|
});
|
||||||
|
} else if (status === 0) {
|
||||||
|
// end of body
|
||||||
|
close();
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.then((u: Function) => (unlisten = u));
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
Accept: "application/json, text/plain, */*",
|
||||||
|
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
|
||||||
|
"User-Agent": navigator.userAgent,
|
||||||
|
};
|
||||||
|
for (const item of new Headers(_headers || {})) {
|
||||||
|
headers[item[0]] = item[1];
|
||||||
|
}
|
||||||
|
return window.__TAURI__
|
||||||
|
.invoke("stream_fetch", {
|
||||||
|
method: method.toUpperCase(),
|
||||||
|
url,
|
||||||
|
headers,
|
||||||
|
// TODO FormData
|
||||||
|
body:
|
||||||
|
typeof body === "string"
|
||||||
|
? Array.from(new TextEncoder().encode(body))
|
||||||
|
: [],
|
||||||
|
})
|
||||||
|
.then((res: StreamResponse) => {
|
||||||
|
const { request_id, status, status_text: statusText, headers } = res;
|
||||||
|
setRequestId?.(request_id);
|
||||||
|
const response = new Response(ts.readable, {
|
||||||
|
status,
|
||||||
|
statusText,
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
if (status >= 300) {
|
||||||
|
setTimeout(close, 100);
|
||||||
|
}
|
||||||
|
return response;
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error("stream error", e);
|
||||||
|
// throw e;
|
||||||
|
return new Response("", { status: 599 });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return window.fetch(url, options);
|
||||||
|
}
|
|
@ -0,0 +1,102 @@
|
||||||
|
import { sign, hash as getHash, hex } from "./hmac";
|
||||||
|
|
||||||
|
// 使用 SHA-256 和 secret 进行 HMAC 加密
|
||||||
|
function sha256(message: any, secret: any, encoding?: string) {
|
||||||
|
const result = sign(secret, message);
|
||||||
|
return encoding == "hex" ? hex(result).toString() : result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDate(timestamp: number) {
|
||||||
|
const date = new Date(timestamp * 1000);
|
||||||
|
const year = date.getUTCFullYear();
|
||||||
|
const month = ("0" + (date.getUTCMonth() + 1)).slice(-2);
|
||||||
|
const day = ("0" + date.getUTCDate()).slice(-2);
|
||||||
|
return `${year}-${month}-${day}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getHeader(
|
||||||
|
payload: any,
|
||||||
|
SECRET_ID: string,
|
||||||
|
SECRET_KEY: string,
|
||||||
|
) {
|
||||||
|
// https://cloud.tencent.com/document/api/1729/105701
|
||||||
|
|
||||||
|
const endpoint = "hunyuan.tencentcloudapi.com";
|
||||||
|
const service = "hunyuan";
|
||||||
|
const region = ""; // optional
|
||||||
|
const action = "ChatCompletions";
|
||||||
|
const version = "2023-09-01";
|
||||||
|
const timestamp = Math.floor(Date.now() / 1000);
|
||||||
|
//时间处理, 获取世界时间日期
|
||||||
|
const date = getDate(timestamp);
|
||||||
|
|
||||||
|
// ************* 步骤 1:拼接规范请求串 *************
|
||||||
|
|
||||||
|
const hashedRequestPayload = getHash(payload);
|
||||||
|
const httpRequestMethod = "POST";
|
||||||
|
const contentType = "application/json";
|
||||||
|
const canonicalUri = "/";
|
||||||
|
const canonicalQueryString = "";
|
||||||
|
const canonicalHeaders =
|
||||||
|
`content-type:${contentType}\n` +
|
||||||
|
"host:" +
|
||||||
|
endpoint +
|
||||||
|
"\n" +
|
||||||
|
"x-tc-action:" +
|
||||||
|
action.toLowerCase() +
|
||||||
|
"\n";
|
||||||
|
const signedHeaders = "content-type;host;x-tc-action";
|
||||||
|
|
||||||
|
const canonicalRequest = [
|
||||||
|
httpRequestMethod,
|
||||||
|
canonicalUri,
|
||||||
|
canonicalQueryString,
|
||||||
|
canonicalHeaders,
|
||||||
|
signedHeaders,
|
||||||
|
hashedRequestPayload,
|
||||||
|
].join("\n");
|
||||||
|
|
||||||
|
// ************* 步骤 2:拼接待签名字符串 *************
|
||||||
|
const algorithm = "TC3-HMAC-SHA256";
|
||||||
|
const hashedCanonicalRequest = getHash(canonicalRequest);
|
||||||
|
const credentialScope = date + "/" + service + "/" + "tc3_request";
|
||||||
|
const stringToSign =
|
||||||
|
algorithm +
|
||||||
|
"\n" +
|
||||||
|
timestamp +
|
||||||
|
"\n" +
|
||||||
|
credentialScope +
|
||||||
|
"\n" +
|
||||||
|
hashedCanonicalRequest;
|
||||||
|
|
||||||
|
// ************* 步骤 3:计算签名 *************
|
||||||
|
const kDate = sha256(date, "TC3" + SECRET_KEY);
|
||||||
|
const kService = sha256(service, kDate);
|
||||||
|
const kSigning = sha256("tc3_request", kService);
|
||||||
|
const signature = sha256(stringToSign, kSigning, "hex");
|
||||||
|
|
||||||
|
// ************* 步骤 4:拼接 Authorization *************
|
||||||
|
const authorization =
|
||||||
|
algorithm +
|
||||||
|
" " +
|
||||||
|
"Credential=" +
|
||||||
|
SECRET_ID +
|
||||||
|
"/" +
|
||||||
|
credentialScope +
|
||||||
|
", " +
|
||||||
|
"SignedHeaders=" +
|
||||||
|
signedHeaders +
|
||||||
|
", " +
|
||||||
|
"Signature=" +
|
||||||
|
signature;
|
||||||
|
|
||||||
|
return {
|
||||||
|
Authorization: authorization,
|
||||||
|
"Content-Type": contentType,
|
||||||
|
Host: endpoint,
|
||||||
|
"X-TC-Action": action,
|
||||||
|
"X-TC-Timestamp": timestamp.toString(),
|
||||||
|
"X-TC-Version": version,
|
||||||
|
"X-TC-Region": region,
|
||||||
|
};
|
||||||
|
}
|
19
package.json
19
package.json
|
@ -59,6 +59,7 @@
|
||||||
"nanoid": "^5.0.3",
|
"nanoid": "^5.0.3",
|
||||||
"next": "^14.1.1",
|
"next": "^14.1.1",
|
||||||
"node-fetch": "^3.3.1",
|
"node-fetch": "^3.3.1",
|
||||||
|
"openapi-client-axios": "^7.5.5",
|
||||||
"officeparser": "^4.0.8",
|
"officeparser": "^4.0.8",
|
||||||
"pdf-parse": "^1.1.1",
|
"pdf-parse": "^1.1.1",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
|
@ -78,22 +79,34 @@
|
||||||
"zustand": "^4.3.8"
|
"zustand": "^4.3.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@tauri-apps/api": "^1.6.0",
|
||||||
"@tauri-apps/cli": "1.5.11",
|
"@tauri-apps/cli": "1.5.11",
|
||||||
"@types/html-to-text": "^9.0.1",
|
"@testing-library/dom": "^10.4.0",
|
||||||
"@types/md5": "^2.3.5",
|
"@testing-library/jest-dom": "^6.6.2",
|
||||||
|
"@testing-library/react": "^16.0.1",
|
||||||
|
"@types/jest": "^29.5.14",
|
||||||
|
"@types/js-yaml": "4.0.9",
|
||||||
|
"@types/lodash-es": "^4.17.12",
|
||||||
"@types/node": "^20.11.30",
|
"@types/node": "^20.11.30",
|
||||||
"@types/react": "^18.2.70",
|
"@types/react": "^18.2.70",
|
||||||
"@types/react-dom": "^18.2.7",
|
"@types/react-dom": "^18.2.7",
|
||||||
"@types/react-katex": "^3.0.0",
|
"@types/react-katex": "^3.0.0",
|
||||||
"@types/spark-md5": "^3.0.4",
|
"@types/spark-md5": "^3.0.4",
|
||||||
|
"@types/html-to-text": "^9.0.1",
|
||||||
|
"@types/md5": "^2.3.5",
|
||||||
|
"concurrently": "^8.2.2",
|
||||||
"cross-env": "^7.0.3",
|
"cross-env": "^7.0.3",
|
||||||
"eslint": "^8.49.0",
|
"eslint": "^8.49.0",
|
||||||
"eslint-config-next": "13.4.19",
|
"eslint-config-next": "13.4.19",
|
||||||
"eslint-config-prettier": "^8.8.0",
|
"eslint-config-prettier": "^8.8.0",
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
|
"eslint-plugin-unused-imports": "^3.2.0",
|
||||||
"husky": "^8.0.0",
|
"husky": "^8.0.0",
|
||||||
|
"jest": "^29.7.0",
|
||||||
|
"jest-environment-jsdom": "^29.7.0",
|
||||||
"lint-staged": "^13.2.2",
|
"lint-staged": "^13.2.2",
|
||||||
"prettier": "^3.0.2",
|
"prettier": "^3.0.2",
|
||||||
|
"ts-node": "^10.9.2",
|
||||||
"tsx": "^4.16.0",
|
"tsx": "^4.16.0",
|
||||||
"typescript": "5.2.2",
|
"typescript": "5.2.2",
|
||||||
"watch": "^1.0.2",
|
"watch": "^1.0.2",
|
||||||
|
@ -104,4 +117,4 @@
|
||||||
"@langchain/core": "0.2.23"
|
"@langchain/core": "0.2.23"
|
||||||
},
|
},
|
||||||
"packageManager": "yarn@1.22.19"
|
"packageManager": "yarn@1.22.19"
|
||||||
}
|
}
|
Loading…
Reference in New Issue