Merge branch 'ChatGPTNextWeb:main' into personal
This commit is contained in:
commit
ff7fe69c2b
13
README.md
13
README.md
|
@ -31,7 +31,7 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4
|
|||
[MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple
|
||||
[Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu
|
||||
|
||||
[<img src="https://vercel.com/button" alt="Deploy on Zeabur" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) [<img src="https://img.shields.io/badge/BT_Deploy-Install-20a53a" alt="Open in Gitpod" height="30">](https://www.bt.cn/new/download.html)
|
||||
[<img src="https://vercel.com/button" alt="Deploy on Vercel" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) [<img src="https://img.shields.io/badge/BT_Deploy-Install-20a53a" alt="BT Deply Install" height="30">](https://www.bt.cn/new/download.html) [<img src="https://svgshare.com/i/1AVg.svg" alt="Deploy to Alibaba Cloud" height="30">](https://computenest.aliyun.com/market/service-f1c9b75e59814dc49d52)
|
||||
|
||||
[<img src="https://github.com/user-attachments/assets/903482d4-3e87-4134-9af1-f2588fa90659" height="60" width="288" >](https://monica.im/?utm=nxcrp)
|
||||
|
||||
|
@ -301,6 +301,14 @@ iflytek Api Key.
|
|||
|
||||
iflytek Api Secret.
|
||||
|
||||
### `CHATGLM_API_KEY` (optional)
|
||||
|
||||
ChatGLM Api Key.
|
||||
|
||||
### `CHATGLM_URL` (optional)
|
||||
|
||||
ChatGLM Api Url.
|
||||
|
||||
### `HIDE_USER_API_KEY` (optional)
|
||||
|
||||
> Default: Empty
|
||||
|
@ -397,6 +405,9 @@ yarn dev
|
|||
|
||||
> [简体中文 > 如何部署到私人服务器](./README_CN.md#部署)
|
||||
|
||||
### BT Install
|
||||
> [简体中文 > 如何通过宝塔一键部署](./docs/bt-cn.md)
|
||||
|
||||
### Docker (Recommended)
|
||||
|
||||
```shell
|
||||
|
|
10
README_CN.md
10
README_CN.md
|
@ -184,6 +184,13 @@ ByteDance Api Url.
|
|||
|
||||
讯飞星火Api Secret.
|
||||
|
||||
### `CHATGLM_API_KEY` (可选)
|
||||
|
||||
ChatGLM Api Key.
|
||||
|
||||
### `CHATGLM_URL` (可选)
|
||||
|
||||
ChatGLM Api Url.
|
||||
|
||||
|
||||
### `HIDE_USER_API_KEY` (可选)
|
||||
|
@ -264,6 +271,9 @@ BASE_URL=https://b.nextweb.fun/api/proxy
|
|||
|
||||
## 部署
|
||||
|
||||
### 宝塔面板部署
|
||||
> [简体中文 > 如何通过宝塔一键部署](./docs/bt-cn.md)
|
||||
|
||||
### 容器部署 (推荐)
|
||||
|
||||
> Docker 版本需要在 20 及其以上,否则会提示找不到镜像。
|
||||
|
|
|
@ -11,6 +11,7 @@ import { handle as moonshotHandler } from "../../moonshot";
|
|||
import { handle as stabilityHandler } from "../../stability";
|
||||
import { handle as iflytekHandler } from "../../iflytek";
|
||||
import { handle as xaiHandler } from "../../xai";
|
||||
import { handle as chatglmHandler } from "../../glm";
|
||||
import { handle as proxyHandler } from "../../proxy";
|
||||
|
||||
async function handle(
|
||||
|
@ -41,6 +42,8 @@ async function handle(
|
|||
return iflytekHandler(req, { params });
|
||||
case ApiPath.XAI:
|
||||
return xaiHandler(req, { params });
|
||||
case ApiPath.ChatGLM:
|
||||
return chatglmHandler(req, { params });
|
||||
case ApiPath.OpenAI:
|
||||
return openaiHandler(req, { params });
|
||||
default:
|
||||
|
|
|
@ -95,6 +95,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
|
|||
case ModelProvider.XAI:
|
||||
systemApiKey = serverConfig.xaiApiKey;
|
||||
break;
|
||||
case ModelProvider.ChatGLM:
|
||||
systemApiKey = serverConfig.chatglmApiKey;
|
||||
break;
|
||||
case ModelProvider.GPT:
|
||||
default:
|
||||
if (req.nextUrl.pathname.includes("azure/deployments")) {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSideConfig } from "../config/server";
|
||||
import { OPENAI_BASE_URL, ServiceProvider } from "../constant";
|
||||
import { isModelAvailableInServer } from "../utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "../utils/cloudflare";
|
||||
import { getModelProvider, isModelAvailableInServer } from "../utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
|
@ -71,7 +71,7 @@ export async function requestOpenai(req: NextRequest) {
|
|||
.filter((v) => !!v && !v.startsWith("-") && v.includes(modelName))
|
||||
.forEach((m) => {
|
||||
const [fullName, displayName] = m.split("=");
|
||||
const [_, providerName] = fullName.split("@");
|
||||
const [_, providerName] = getModelProvider(fullName);
|
||||
if (providerName === "azure" && !displayName) {
|
||||
const [_, deployId] = (serverConfig?.azureUrl ?? "").split(
|
||||
"deployments/",
|
||||
|
|
|
@ -0,0 +1,129 @@
|
|||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
CHATGLM_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
export async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[GLM Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.ChatGLM);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[GLM] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ChatGLM, "");
|
||||
|
||||
let baseUrl = serverConfig.chatglmUrl || CHATGLM_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
console.log("[Fetch Url] ", fetchUrl);
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.ChatGLM as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[GLM] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ import { HunyuanApi } from "./platforms/tencent";
|
|||
import { MoonshotApi } from "./platforms/moonshot";
|
||||
import { SparkApi } from "./platforms/iflytek";
|
||||
import { XAIApi } from "./platforms/xai";
|
||||
import { ChatGLMApi } from "./platforms/glm";
|
||||
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
|
@ -69,7 +70,7 @@ export interface ChatOptions {
|
|||
config: LLMConfig;
|
||||
|
||||
onUpdate?: (message: string, chunk: string) => void;
|
||||
onFinish: (message: string) => void;
|
||||
onFinish: (message: string, responseRes: Response) => void;
|
||||
onError?: (err: Error) => void;
|
||||
onController?: (controller: AbortController) => void;
|
||||
onBeforeTool?: (tool: ChatMessageTool) => void;
|
||||
|
@ -156,6 +157,9 @@ export class ClientApi {
|
|||
case ModelProvider.XAI:
|
||||
this.llm = new XAIApi();
|
||||
break;
|
||||
case ModelProvider.ChatGLM:
|
||||
this.llm = new ChatGLMApi();
|
||||
break;
|
||||
default:
|
||||
this.llm = new ChatGPTApi();
|
||||
}
|
||||
|
@ -244,6 +248,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot;
|
||||
const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek;
|
||||
const isXAI = modelConfig.providerName === ServiceProvider.XAI;
|
||||
const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
|
||||
const isEnabledAccessControl = accessStore.enabledAccessControl();
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
|
@ -259,6 +264,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
? accessStore.moonshotApiKey
|
||||
: isXAI
|
||||
? accessStore.xaiApiKey
|
||||
: isChatGLM
|
||||
? accessStore.chatglmApiKey
|
||||
: isIflytek
|
||||
? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
|
||||
? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
|
||||
|
@ -274,6 +281,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
isMoonshot,
|
||||
isIflytek,
|
||||
isXAI,
|
||||
isChatGLM,
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
};
|
||||
|
@ -338,6 +346,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
|
|||
return new ClientApi(ModelProvider.Iflytek);
|
||||
case ServiceProvider.XAI:
|
||||
return new ClientApi(ModelProvider.XAI);
|
||||
case ServiceProvider.ChatGLM:
|
||||
return new ClientApi(ModelProvider.ChatGLM);
|
||||
default:
|
||||
return new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
|
|
@ -143,6 +143,7 @@ export class QwenApi implements LLMApi {
|
|||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -172,7 +173,7 @@ export class QwenApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -188,6 +189,7 @@ export class QwenApi implements LLMApi {
|
|||
"[Alibaba] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
responseRes = res;
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
|
@ -254,7 +256,7 @@ export class QwenApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -317,13 +317,14 @@ export class ClaudeApi implements LLMApi {
|
|||
};
|
||||
|
||||
try {
|
||||
controller.signal.onabort = () => options.onFinish("");
|
||||
controller.signal.onabort = () =>
|
||||
options.onFinish("", new Response(null, { status: 400 }));
|
||||
|
||||
const res = await fetch(path, payload);
|
||||
const resJson = await res.json();
|
||||
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
} catch (e) {
|
||||
console.error("failed to chat", e);
|
||||
options.onError?.(e as Error);
|
||||
|
|
|
@ -162,6 +162,7 @@ export class ErnieApi implements LLMApi {
|
|||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -191,7 +192,7 @@ export class ErnieApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -204,7 +205,7 @@ export class ErnieApi implements LLMApi {
|
|||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Baidu] request response content type: ", contentType);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -267,7 +268,7 @@ export class ErnieApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = resJson?.result;
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -130,6 +130,7 @@ export class DoubaoApi implements LLMApi {
|
|||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -159,7 +160,7 @@ export class DoubaoApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -175,7 +176,7 @@ export class DoubaoApi implements LLMApi {
|
|||
"[ByteDance] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -241,7 +242,7 @@ export class DoubaoApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -0,0 +1,197 @@
|
|||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
CHATGLM_BASE_URL,
|
||||
ChatGLM,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { stream } from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class ChatGLMApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.chatglmUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.ChatGLM;
|
||||
baseUrl = isApp ? CHATGLM_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = getMessageTextContent(v);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
};
|
||||
|
||||
console.log("[Request] glm payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(ChatGLM.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// console.log("parseSSE", text, runTools);
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: {
|
||||
content: string;
|
||||
tool_calls: ChatMessageTool[];
|
||||
};
|
||||
}>;
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
return choices[0]?.delta?.content;
|
||||
},
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.splice(
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -274,7 +274,7 @@ export class GeminiProApi implements LLMApi {
|
|||
);
|
||||
}
|
||||
const message = apiClient.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -117,6 +117,7 @@ export class SparkApi implements LLMApi {
|
|||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// Animate response text to make it look smooth
|
||||
function animateResponseText() {
|
||||
|
@ -143,7 +144,7 @@ export class SparkApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -156,7 +157,7 @@ export class SparkApi implements LLMApi {
|
|||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Spark] request response content type: ", contentType);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -231,7 +232,7 @@ export class SparkApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -180,7 +180,7 @@ export class MoonshotApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -65,6 +65,7 @@ export interface RequestPayload {
|
|||
frequency_penalty: number;
|
||||
top_p: number;
|
||||
max_tokens?: number;
|
||||
max_completion_tokens?: number;
|
||||
}
|
||||
|
||||
export interface DalleRequestPayload {
|
||||
|
@ -233,6 +234,11 @@ export class ChatGPTApi implements LLMApi {
|
|||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
// O1 使用 max_completion_tokens 控制token数 (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
|
||||
if (isO1) {
|
||||
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
|
||||
}
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel) {
|
||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||
|
@ -361,7 +367,7 @@ export class ChatGPTApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = await this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -142,6 +142,7 @@ export class HunyuanApi implements LLMApi {
|
|||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -171,7 +172,7 @@ export class HunyuanApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -187,7 +188,7 @@ export class HunyuanApi implements LLMApi {
|
|||
"[Tencent] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -253,7 +254,7 @@ export class HunyuanApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -173,7 +173,7 @@ export class XAIApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -18,6 +18,8 @@ import {
|
|||
trackSettingsPageGuideToCPaymentClick,
|
||||
trackAuthorizationPageButtonToCPaymentClick,
|
||||
} from "../utils/auth-settings-events";
|
||||
import clsx from "clsx";
|
||||
|
||||
const storage = safeLocalStorage();
|
||||
|
||||
export function AuthPage() {
|
||||
|
@ -54,7 +56,7 @@ export function AuthPage() {
|
|||
onClick={() => navigate(Path.Home)}
|
||||
></IconButton>
|
||||
</div>
|
||||
<div className={`no-dark ${styles["auth-logo"]}`}>
|
||||
<div className={clsx("no-dark", styles["auth-logo"])}>
|
||||
<BotIcon />
|
||||
</div>
|
||||
|
||||
|
@ -163,7 +165,7 @@ function TopBanner() {
|
|||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
>
|
||||
<div className={`${styles["top-banner-inner"]} no-dark`}>
|
||||
<div className={clsx(styles["top-banner-inner"], "no-dark")}>
|
||||
<Logo className={styles["top-banner-logo"]}></Logo>
|
||||
<span>
|
||||
{Locale.Auth.TopTips}
|
||||
|
|
|
@ -2,6 +2,7 @@ import * as React from "react";
|
|||
|
||||
import styles from "./button.module.scss";
|
||||
import { CSSProperties } from "react";
|
||||
import clsx from "clsx";
|
||||
|
||||
export type ButtonType = "primary" | "danger" | null;
|
||||
|
||||
|
@ -22,12 +23,16 @@ export function IconButton(props: {
|
|||
}) {
|
||||
return (
|
||||
<button
|
||||
className={
|
||||
styles["icon-button"] +
|
||||
` ${props.bordered && styles.border} ${props.shadow && styles.shadow} ${
|
||||
props.className ?? ""
|
||||
} clickable ${styles[props.type ?? ""]}`
|
||||
}
|
||||
className={clsx(
|
||||
"clickable",
|
||||
styles["icon-button"],
|
||||
{
|
||||
[styles.border]: props.bordered,
|
||||
[styles.shadow]: props.shadow,
|
||||
},
|
||||
styles[props.type ?? ""],
|
||||
props.className,
|
||||
)}
|
||||
onClick={props.onClick}
|
||||
title={props.title}
|
||||
disabled={props.disabled}
|
||||
|
@ -40,10 +45,9 @@ export function IconButton(props: {
|
|||
{props.icon && (
|
||||
<div
|
||||
aria-label={props.text || props.title}
|
||||
className={
|
||||
styles["icon-button-icon"] +
|
||||
` ${props.type === "primary" && "no-dark"}`
|
||||
}
|
||||
className={clsx(styles["icon-button-icon"], {
|
||||
"no-dark": props.type === "primary",
|
||||
})}
|
||||
>
|
||||
{props.icon}
|
||||
</div>
|
||||
|
|
|
@ -18,6 +18,7 @@ import { Mask } from "../store/mask";
|
|||
import { useRef, useEffect } from "react";
|
||||
import { showConfirm } from "./ui-lib";
|
||||
import { useMobileScreen } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function ChatItem(props: {
|
||||
onClick?: () => void;
|
||||
|
@ -45,11 +46,11 @@ export function ChatItem(props: {
|
|||
<Draggable draggableId={`${props.id}`} index={props.index}>
|
||||
{(provided) => (
|
||||
<div
|
||||
className={`${styles["chat-item"]} ${
|
||||
props.selected &&
|
||||
(currentPath === Path.Chat || currentPath === Path.Home) &&
|
||||
styles["chat-item-selected"]
|
||||
}`}
|
||||
className={clsx(styles["chat-item"], {
|
||||
[styles["chat-item-selected"]]:
|
||||
props.selected &&
|
||||
(currentPath === Path.Chat || currentPath === Path.Home),
|
||||
})}
|
||||
onClick={props.onClick}
|
||||
ref={(ele) => {
|
||||
draggableRef.current = ele;
|
||||
|
@ -63,7 +64,7 @@ export function ChatItem(props: {
|
|||
>
|
||||
{props.narrow ? (
|
||||
<div className={styles["chat-item-narrow"]}>
|
||||
<div className={styles["chat-item-avatar"] + " no-dark"}>
|
||||
<div className={clsx(styles["chat-item-avatar"], "no-dark")}>
|
||||
<MaskAvatar
|
||||
avatar={props.mask.avatar}
|
||||
model={props.mask.modelConfig.model}
|
||||
|
|
|
@ -120,6 +120,8 @@ import { createTTSPlayer } from "../utils/audio";
|
|||
import { MsEdgeTTS, OUTPUT_FORMAT } from "../utils/ms_edge_tts";
|
||||
|
||||
import { isEmpty } from "lodash-es";
|
||||
import { getModelProvider } from "../utils/model";
|
||||
import clsx from "clsx";
|
||||
|
||||
const localStorage = safeLocalStorage();
|
||||
|
||||
|
@ -148,7 +150,8 @@ export function SessionConfigModel(props: { onClose: () => void }) {
|
|||
text={Locale.Chat.Config.Reset}
|
||||
onClick={async () => {
|
||||
if (await showConfirm(Locale.Memory.ResetConfirm)) {
|
||||
chatStore.updateCurrentSession(
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => (session.memoryPrompt = ""),
|
||||
);
|
||||
}
|
||||
|
@ -173,7 +176,10 @@ export function SessionConfigModel(props: { onClose: () => void }) {
|
|||
updateMask={(updater) => {
|
||||
const mask = { ...session.mask };
|
||||
updater(mask);
|
||||
chatStore.updateCurrentSession((session) => (session.mask = mask));
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => (session.mask = mask),
|
||||
);
|
||||
}}
|
||||
shouldSyncFromGlobal
|
||||
extraListItems={
|
||||
|
@ -206,7 +212,7 @@ function PromptToast(props: {
|
|||
<div className={styles["prompt-toast"]} key="prompt-toast">
|
||||
{props.showToast && context.length > 0 && (
|
||||
<div
|
||||
className={styles["prompt-toast-inner"] + " clickable"}
|
||||
className={clsx(styles["prompt-toast-inner"], "clickable")}
|
||||
role="button"
|
||||
onClick={() => props.setShowModal(true)}
|
||||
>
|
||||
|
@ -327,10 +333,9 @@ export function PromptHints(props: {
|
|||
{props.prompts.map((prompt, i) => (
|
||||
<div
|
||||
ref={i === selectIndex ? selectedRef : null}
|
||||
className={
|
||||
styles["prompt-hint"] +
|
||||
` ${i === selectIndex ? styles["prompt-hint-selected"] : ""}`
|
||||
}
|
||||
className={clsx(styles["prompt-hint"], {
|
||||
[styles["prompt-hint-selected"]]: i === selectIndex,
|
||||
})}
|
||||
key={prompt.title + i.toString()}
|
||||
onClick={() => props.onPromptSelect(prompt)}
|
||||
onMouseEnter={() => setSelectIndex(i)}
|
||||
|
@ -345,12 +350,14 @@ export function PromptHints(props: {
|
|||
|
||||
function ClearContextDivider() {
|
||||
const chatStore = useChatStore();
|
||||
const session = chatStore.currentSession();
|
||||
|
||||
return (
|
||||
<div
|
||||
className={styles["clear-context"]}
|
||||
onClick={() =>
|
||||
chatStore.updateCurrentSession(
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => (session.clearContextIndex = undefined),
|
||||
)
|
||||
}
|
||||
|
@ -388,7 +395,7 @@ export function ChatAction(props: {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={`${styles["chat-input-action"]} clickable`}
|
||||
className={clsx(styles["chat-input-action"], "clickable")}
|
||||
onClick={() => {
|
||||
props.onClick();
|
||||
setTimeout(updateWidth, 1);
|
||||
|
@ -460,6 +467,7 @@ export function ChatActions(props: {
|
|||
const navigate = useNavigate();
|
||||
const chatStore = useChatStore();
|
||||
const pluginStore = usePluginStore();
|
||||
const session = chatStore.currentSession();
|
||||
|
||||
// switch themes
|
||||
const theme = config.theme;
|
||||
|
@ -476,10 +484,9 @@ export function ChatActions(props: {
|
|||
const stopAll = () => ChatControllerPool.stopAll();
|
||||
|
||||
// switch model
|
||||
const currentModel = chatStore.currentSession().mask.modelConfig.model;
|
||||
const currentModel = session.mask.modelConfig.model;
|
||||
const currentProviderName =
|
||||
chatStore.currentSession().mask.modelConfig?.providerName ||
|
||||
ServiceProvider.OpenAI;
|
||||
session.mask.modelConfig?.providerName || ServiceProvider.OpenAI;
|
||||
const allModels = useAllModels();
|
||||
const models = useMemo(() => {
|
||||
const filteredModels = allModels.filter((m) => m.available);
|
||||
|
@ -513,12 +520,9 @@ export function ChatActions(props: {
|
|||
const dalle3Sizes: DalleSize[] = ["1024x1024", "1792x1024", "1024x1792"];
|
||||
const dalle3Qualitys: DalleQuality[] = ["standard", "hd"];
|
||||
const dalle3Styles: DalleStyle[] = ["vivid", "natural"];
|
||||
const currentSize =
|
||||
chatStore.currentSession().mask.modelConfig?.size ?? "1024x1024";
|
||||
const currentQuality =
|
||||
chatStore.currentSession().mask.modelConfig?.quality ?? "standard";
|
||||
const currentStyle =
|
||||
chatStore.currentSession().mask.modelConfig?.style ?? "vivid";
|
||||
const currentSize = session.mask.modelConfig?.size ?? "1024x1024";
|
||||
const currentQuality = session.mask.modelConfig?.quality ?? "standard";
|
||||
const currentStyle = session.mask.modelConfig?.style ?? "vivid";
|
||||
|
||||
const isMobileScreen = useMobileScreen();
|
||||
|
||||
|
@ -536,7 +540,7 @@ export function ChatActions(props: {
|
|||
if (isUnavailableModel && models.length > 0) {
|
||||
// show next model to default model if exist
|
||||
let nextModel = models.find((model) => model.isDefault) || models[0];
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.modelConfig.model = nextModel.name;
|
||||
session.mask.modelConfig.providerName = nextModel?.provider
|
||||
?.providerName as ServiceProvider;
|
||||
|
@ -547,7 +551,7 @@ export function ChatActions(props: {
|
|||
: nextModel.name,
|
||||
);
|
||||
}
|
||||
}, [chatStore, currentModel, models]);
|
||||
}, [chatStore, currentModel, models, session]);
|
||||
|
||||
return (
|
||||
<div className={styles["chat-input-actions"]}>
|
||||
|
@ -614,7 +618,7 @@ export function ChatActions(props: {
|
|||
text={Locale.Chat.InputActions.Clear}
|
||||
icon={<BreakIcon />}
|
||||
onClick={() => {
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
if (session.clearContextIndex === session.messages.length) {
|
||||
session.clearContextIndex = undefined;
|
||||
} else {
|
||||
|
@ -645,8 +649,8 @@ export function ChatActions(props: {
|
|||
onClose={() => setShowModelSelector(false)}
|
||||
onSelection={(s) => {
|
||||
if (s.length === 0) return;
|
||||
const [model, providerName] = s[0].split("@");
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
const [model, providerName] = getModelProvider(s[0]);
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.modelConfig.model = model as ModelType;
|
||||
session.mask.modelConfig.providerName =
|
||||
providerName as ServiceProvider;
|
||||
|
@ -684,7 +688,7 @@ export function ChatActions(props: {
|
|||
onSelection={(s) => {
|
||||
if (s.length === 0) return;
|
||||
const size = s[0];
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.modelConfig.size = size;
|
||||
});
|
||||
showToast(size);
|
||||
|
@ -711,7 +715,7 @@ export function ChatActions(props: {
|
|||
onSelection={(q) => {
|
||||
if (q.length === 0) return;
|
||||
const quality = q[0];
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.modelConfig.quality = quality;
|
||||
});
|
||||
showToast(quality);
|
||||
|
@ -738,7 +742,7 @@ export function ChatActions(props: {
|
|||
onSelection={(s) => {
|
||||
if (s.length === 0) return;
|
||||
const style = s[0];
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.modelConfig.style = style;
|
||||
});
|
||||
showToast(style);
|
||||
|
@ -769,7 +773,7 @@ export function ChatActions(props: {
|
|||
}))}
|
||||
onClose={() => setShowPluginSelector(false)}
|
||||
onSelection={(s) => {
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.plugin = s as string[];
|
||||
});
|
||||
}}
|
||||
|
@ -812,7 +816,8 @@ export function EditMessageModal(props: { onClose: () => void }) {
|
|||
icon={<ConfirmIcon />}
|
||||
key="ok"
|
||||
onClick={() => {
|
||||
chatStore.updateCurrentSession(
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => (session.messages = messages),
|
||||
);
|
||||
props.onClose();
|
||||
|
@ -829,7 +834,8 @@ export function EditMessageModal(props: { onClose: () => void }) {
|
|||
type="text"
|
||||
value={session.topic}
|
||||
onInput={(e) =>
|
||||
chatStore.updateCurrentSession(
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => (session.topic = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
|
@ -990,7 +996,8 @@ function _Chat() {
|
|||
prev: () => chatStore.nextSession(-1),
|
||||
next: () => chatStore.nextSession(1),
|
||||
clear: () =>
|
||||
chatStore.updateCurrentSession(
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => (session.clearContextIndex = session.messages.length),
|
||||
),
|
||||
fork: () => chatStore.forkSession(),
|
||||
|
@ -1061,7 +1068,7 @@ function _Chat() {
|
|||
};
|
||||
|
||||
useEffect(() => {
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
const stopTiming = Date.now() - REQUEST_TIMEOUT_MS;
|
||||
session.messages.forEach((m) => {
|
||||
// check if should stop all stale messages
|
||||
|
@ -1087,7 +1094,7 @@ function _Chat() {
|
|||
}
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
}, [session]);
|
||||
|
||||
// check if should send message
|
||||
const onInputKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
|
@ -1118,7 +1125,8 @@ function _Chat() {
|
|||
};
|
||||
|
||||
const deleteMessage = (msgId?: string) => {
|
||||
chatStore.updateCurrentSession(
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) =>
|
||||
(session.messages = session.messages.filter((m) => m.id !== msgId)),
|
||||
);
|
||||
|
@ -1185,7 +1193,7 @@ function _Chat() {
|
|||
};
|
||||
|
||||
const onPinMessage = (message: ChatMessage) => {
|
||||
chatStore.updateCurrentSession((session) =>
|
||||
chatStore.updateTargetSession(session, (session) =>
|
||||
session.mask.context.push(message),
|
||||
);
|
||||
|
||||
|
@ -1588,9 +1596,12 @@ function _Chat() {
|
|||
</div>
|
||||
)}
|
||||
|
||||
<div className={`window-header-title ${styles["chat-body-title"]}`}>
|
||||
<div className={clsx("window-header-title", styles["chat-body-title"])}>
|
||||
<div
|
||||
className={`window-header-main-title ${styles["chat-body-main-title"]}`}
|
||||
className={clsx(
|
||||
"window-header-main-title",
|
||||
styles["chat-body-main-title"],
|
||||
)}
|
||||
onClickCapture={() => setIsEditingMessage(true)}
|
||||
>
|
||||
{!session.topic ? DEFAULT_TOPIC : session.topic}
|
||||
|
@ -1607,7 +1618,7 @@ function _Chat() {
|
|||
title={Locale.Chat.Actions.RefreshTitle}
|
||||
onClick={() => {
|
||||
showToast(Locale.Chat.Actions.RefreshToast);
|
||||
chatStore.summarizeSession(true);
|
||||
chatStore.summarizeSession(true, session);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
@ -1711,14 +1722,17 @@ function _Chat() {
|
|||
});
|
||||
}
|
||||
}
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
const m = session.mask.context
|
||||
.concat(session.messages)
|
||||
.find((m) => m.id === message.id);
|
||||
if (m) {
|
||||
m.content = newContent;
|
||||
}
|
||||
});
|
||||
chatStore.updateTargetSession(
|
||||
session,
|
||||
(session) => {
|
||||
const m = session.mask.context
|
||||
.concat(session.messages)
|
||||
.find((m) => m.id === message.id);
|
||||
if (m) {
|
||||
m.content = newContent;
|
||||
}
|
||||
},
|
||||
);
|
||||
}}
|
||||
></IconButton>
|
||||
</div>
|
||||
|
@ -1861,7 +1875,7 @@ function _Chat() {
|
|||
)}
|
||||
{getMessageImages(message).length > 1 && (
|
||||
<div
|
||||
className={styles["chat-message-item-images"]}
|
||||
className={clsx(styles["chat-message-item-images"])}
|
||||
style={
|
||||
{
|
||||
"--image-count": getMessageImages(message).length,
|
||||
|
@ -1923,11 +1937,10 @@ function _Chat() {
|
|||
setUserInput={setUserInput}
|
||||
/>
|
||||
<label
|
||||
className={`${styles["chat-input-panel-inner"]} ${
|
||||
attachImages.length != 0
|
||||
? styles["chat-input-panel-inner-attach"]
|
||||
: ""
|
||||
}`}
|
||||
className={clsx(styles["chat-input-panel-inner"], {
|
||||
[styles["chat-input-panel-inner-attach"]]:
|
||||
attachImages.length !== 0,
|
||||
})}
|
||||
htmlFor="chat-input"
|
||||
>
|
||||
<textarea
|
||||
|
|
|
@ -40,6 +40,7 @@ import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
|
|||
import { getClientConfig } from "../config/client";
|
||||
import { type ClientApi, getClientApi } from "../client/api";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||
loading: () => <LoadingIcon />,
|
||||
|
@ -118,9 +119,10 @@ function Steps<
|
|||
return (
|
||||
<div
|
||||
key={i}
|
||||
className={`${styles["step"]} ${
|
||||
styles[i <= props.index ? "step-finished" : ""]
|
||||
} ${i === props.index && styles["step-current"]} clickable`}
|
||||
className={clsx("clickable", styles["step"], {
|
||||
[styles["step-finished"]]: i <= props.index,
|
||||
[styles["step-current"]]: i === props.index,
|
||||
})}
|
||||
onClick={() => {
|
||||
props.onStepChange?.(i);
|
||||
}}
|
||||
|
@ -525,11 +527,11 @@ export function ImagePreviewer(props: {
|
|||
messages={props.messages}
|
||||
/>
|
||||
<div
|
||||
className={`${styles["preview-body"]} ${styles["default-theme"]}`}
|
||||
className={clsx(styles["preview-body"], styles["default-theme"])}
|
||||
ref={previewRef}
|
||||
>
|
||||
<div className={styles["chat-info"]}>
|
||||
<div className={styles["logo"] + " no-dark"}>
|
||||
<div className={clsx(styles["logo"], "no-dark")}>
|
||||
<NextImage
|
||||
src={ChatGptIcon.src}
|
||||
alt="logo"
|
||||
|
@ -570,7 +572,7 @@ export function ImagePreviewer(props: {
|
|||
{props.messages.map((m, i) => {
|
||||
return (
|
||||
<div
|
||||
className={styles["message"] + " " + styles["message-" + m.role]}
|
||||
className={clsx(styles["message"], styles["message-" + m.role])}
|
||||
key={i}
|
||||
>
|
||||
<div className={styles["avatar"]}>
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
require("../polyfill");
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
import styles from "./home.module.scss";
|
||||
|
||||
import BotIcon from "../icons/bot.svg";
|
||||
|
@ -29,10 +28,11 @@ import { AuthPage } from "./auth";
|
|||
import { getClientConfig } from "../config/client";
|
||||
import { type ClientApi, getClientApi } from "../client/api";
|
||||
import { useAccessStore } from "../store";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function Loading(props: { noLogo?: boolean }) {
|
||||
return (
|
||||
<div className={styles["loading-content"] + " no-dark"}>
|
||||
<div className={clsx("no-dark", styles["loading-content"])}>
|
||||
{!props.noLogo && <BotIcon />}
|
||||
<LoadingIcon />
|
||||
</div>
|
||||
|
@ -179,7 +179,11 @@ function Screen() {
|
|||
if (isSdNew) return <Sd />;
|
||||
return (
|
||||
<>
|
||||
<SideBar className={isHome ? styles["sidebar-show"] : ""} />
|
||||
<SideBar
|
||||
className={clsx({
|
||||
[styles["sidebar-show"]]: isHome,
|
||||
})}
|
||||
/>
|
||||
<WindowContent>
|
||||
<Routes>
|
||||
<Route path={Path.Home} element={<Chat />} />
|
||||
|
@ -197,9 +201,10 @@ function Screen() {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={`${styles.container} ${
|
||||
shouldTightBorder ? styles["tight-container"] : styles.container
|
||||
} ${getLang() === "ar" ? styles["rtl-screen"] : ""}`}
|
||||
className={clsx(styles.container, {
|
||||
[styles["tight-container"]]: shouldTightBorder,
|
||||
[styles["rtl-screen"]]: getLang() === "ar",
|
||||
})}
|
||||
>
|
||||
{renderContent()}
|
||||
</div>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import * as React from "react";
|
||||
import styles from "./input-range.module.scss";
|
||||
import clsx from "clsx";
|
||||
|
||||
interface InputRangeProps {
|
||||
onChange: React.ChangeEventHandler<HTMLInputElement>;
|
||||
|
@ -23,7 +24,7 @@ export function InputRange({
|
|||
aria,
|
||||
}: InputRangeProps) {
|
||||
return (
|
||||
<div className={styles["input-range"] + ` ${className ?? ""}`}>
|
||||
<div className={clsx(styles["input-range"], className)}>
|
||||
{title || value}
|
||||
<input
|
||||
aria-label={aria}
|
||||
|
|
|
@ -23,6 +23,7 @@ import { useChatStore } from "../store";
|
|||
import { IconButton } from "./button";
|
||||
|
||||
import { useAppConfig } from "../store/config";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function Mermaid(props: { code: string }) {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
|
@ -57,7 +58,7 @@ export function Mermaid(props: { code: string }) {
|
|||
|
||||
return (
|
||||
<div
|
||||
className="no-dark mermaid"
|
||||
className={clsx("no-dark", "mermaid")}
|
||||
style={{
|
||||
cursor: "pointer",
|
||||
overflow: "auto",
|
||||
|
@ -193,7 +194,12 @@ function CustomCode(props: { children: any; className?: string }) {
|
|||
const renderShowMoreButton = () => {
|
||||
if (showToggle && enableCodeFold && collapsed) {
|
||||
return (
|
||||
<div className={`show-hide-button ${collapsed ? "collapsed" : "expanded"}`}>
|
||||
<div
|
||||
className={clsx("show-hide-button", {
|
||||
collapsed,
|
||||
expanded: !collapsed,
|
||||
})}
|
||||
>
|
||||
<button onClick={toggleCollapsed}>{Locale.NewChat.More}</button>
|
||||
</div>
|
||||
);
|
||||
|
@ -203,7 +209,7 @@ function CustomCode(props: { children: any; className?: string }) {
|
|||
return (
|
||||
<>
|
||||
<code
|
||||
className={props?.className}
|
||||
className={clsx(props?.className)}
|
||||
ref={ref}
|
||||
style={{
|
||||
maxHeight: enableCodeFold && collapsed ? "400px" : "none",
|
||||
|
|
|
@ -55,6 +55,7 @@ import {
|
|||
OnDragEndResponder,
|
||||
} from "@hello-pangea/dnd";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
// drag and drop helper function
|
||||
function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
|
||||
|
@ -588,7 +589,7 @@ export function MaskPage() {
|
|||
</div>
|
||||
<div className={styles["mask-title"]}>
|
||||
<div className={styles["mask-name"]}>{m.name}</div>
|
||||
<div className={styles["mask-info"] + " one-line"}>
|
||||
<div className={clsx(styles["mask-info"], "one-line")}>
|
||||
{`${Locale.Mask.Item.Info(m.context.length)} / ${
|
||||
ALL_LANG_OPTIONS[m.lang]
|
||||
} / ${m.modelConfig.model}`}
|
||||
|
|
|
@ -8,6 +8,7 @@ import Locale from "../locales";
|
|||
|
||||
import styles from "./message-selector.module.scss";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
function useShiftRange() {
|
||||
const [startIndex, setStartIndex] = useState<number>();
|
||||
|
@ -71,6 +72,7 @@ export function MessageSelector(props: {
|
|||
defaultSelectAll?: boolean;
|
||||
onSelected?: (messages: ChatMessage[]) => void;
|
||||
}) {
|
||||
const LATEST_COUNT = 4;
|
||||
const chatStore = useChatStore();
|
||||
const session = chatStore.currentSession();
|
||||
const isValid = (m: ChatMessage) => m.content && !m.isError && !m.streaming;
|
||||
|
@ -141,15 +143,13 @@ export function MessageSelector(props: {
|
|||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [startIndex, endIndex]);
|
||||
|
||||
const LATEST_COUNT = 4;
|
||||
|
||||
return (
|
||||
<div className={styles["message-selector"]}>
|
||||
<div className={styles["message-filter"]}>
|
||||
<input
|
||||
type="text"
|
||||
placeholder={Locale.Select.Search}
|
||||
className={styles["filter-item"] + " " + styles["search-bar"]}
|
||||
className={clsx(styles["filter-item"], styles["search-bar"])}
|
||||
value={searchInput}
|
||||
onInput={(e) => {
|
||||
setSearchInput(e.currentTarget.value);
|
||||
|
@ -196,9 +196,9 @@ export function MessageSelector(props: {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={`${styles["message"]} ${
|
||||
props.selection.has(m.id!) && styles["message-selected"]
|
||||
}`}
|
||||
className={clsx(styles["message"], {
|
||||
[styles["message-selected"]]: props.selection.has(m.id!),
|
||||
})}
|
||||
key={i}
|
||||
onClick={() => {
|
||||
props.updateSelection((selection) => {
|
||||
|
@ -221,7 +221,7 @@ export function MessageSelector(props: {
|
|||
<div className={styles["date"]}>
|
||||
{new Date(m.date).toLocaleString()}
|
||||
</div>
|
||||
<div className={`${styles["content"]} one-line`}>
|
||||
<div className={clsx(styles["content"], "one-line")}>
|
||||
{getMessageTextContent(m)}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -7,6 +7,7 @@ import { ListItem, Select } from "./ui-lib";
|
|||
import { useAllModels } from "../utils/hooks";
|
||||
import { groupBy } from "lodash-es";
|
||||
import styles from "./model-config.module.scss";
|
||||
import { getModelProvider } from "../utils/model";
|
||||
|
||||
export function ModelConfigList(props: {
|
||||
modelConfig: ModelConfig;
|
||||
|
@ -28,7 +29,9 @@ export function ModelConfigList(props: {
|
|||
value={value}
|
||||
align="left"
|
||||
onChange={(e) => {
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
const [model, providerName] = getModelProvider(
|
||||
e.currentTarget.value,
|
||||
);
|
||||
props.updateConfig((config) => {
|
||||
config.model = ModalConfigValidator.model(model);
|
||||
config.providerName = providerName as ServiceProvider;
|
||||
|
@ -247,7 +250,9 @@ export function ModelConfigList(props: {
|
|||
aria-label={Locale.Settings.CompressModel.Title}
|
||||
value={compressModelValue}
|
||||
onChange={(e) => {
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
const [model, providerName] = getModelProvider(
|
||||
e.currentTarget.value,
|
||||
);
|
||||
props.updateConfig((config) => {
|
||||
config.compressModel = ModalConfigValidator.model(model);
|
||||
config.compressProviderName = providerName as ServiceProvider;
|
||||
|
|
|
@ -16,6 +16,7 @@ import { MaskAvatar } from "./mask";
|
|||
import { useCommand } from "../command";
|
||||
import { showConfirm } from "./ui-lib";
|
||||
import { BUILTIN_MASK_STORE } from "../masks";
|
||||
import clsx from "clsx";
|
||||
|
||||
function MaskItem(props: { mask: Mask; onClick?: () => void }) {
|
||||
return (
|
||||
|
@ -24,7 +25,9 @@ function MaskItem(props: { mask: Mask; onClick?: () => void }) {
|
|||
avatar={props.mask.avatar}
|
||||
model={props.mask.modelConfig.model}
|
||||
/>
|
||||
<div className={styles["mask-name"] + " one-line"}>{props.mask.name}</div>
|
||||
<div className={clsx(styles["mask-name"], "one-line")}>
|
||||
{props.mask.name}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import {
|
|||
import Locale from "../locales";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { useState } from "react";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function PluginPage() {
|
||||
const navigate = useNavigate();
|
||||
|
@ -199,7 +200,7 @@ export function PluginPage() {
|
|||
<div className={styles["mask-name"]}>
|
||||
{m.title}@<small>{m.version}</small>
|
||||
</div>
|
||||
<div className={styles["mask-info"] + " one-line"}>
|
||||
<div className={clsx(styles["mask-info"], "one-line")}>
|
||||
{Locale.Plugin.Item.Info(
|
||||
FunctionToolService.add(m).length,
|
||||
)}
|
||||
|
@ -335,7 +336,10 @@ export function PluginPage() {
|
|||
<ListItem
|
||||
subTitle={
|
||||
<div
|
||||
className={`markdown-body ${pluginStyles["plugin-content"]}`}
|
||||
className={clsx(
|
||||
"markdown-body",
|
||||
pluginStyles["plugin-content"],
|
||||
)}
|
||||
dir="auto"
|
||||
>
|
||||
<pre>
|
||||
|
|
|
@ -4,6 +4,7 @@ import { Select } from "@/app/components/ui-lib";
|
|||
import { IconButton } from "@/app/components/button";
|
||||
import Locale from "@/app/locales";
|
||||
import { useSdStore } from "@/app/store/sd";
|
||||
import clsx from "clsx";
|
||||
|
||||
export const params = [
|
||||
{
|
||||
|
@ -136,7 +137,7 @@ export function ControlParamItem(props: {
|
|||
className?: string;
|
||||
}) {
|
||||
return (
|
||||
<div className={styles["ctrl-param-item"] + ` ${props.className || ""}`}>
|
||||
<div className={clsx(styles["ctrl-param-item"], props.className)}>
|
||||
<div className={styles["ctrl-param-item-header"]}>
|
||||
<div className={styles["ctrl-param-item-title"]}>
|
||||
<div>
|
||||
|
|
|
@ -36,6 +36,7 @@ import { removeImage } from "@/app/utils/chat";
|
|||
import { SideBar } from "./sd-sidebar";
|
||||
import { WindowContent } from "@/app/components/home";
|
||||
import { params } from "./sd-panel";
|
||||
import clsx from "clsx";
|
||||
|
||||
function getSdTaskStatus(item: any) {
|
||||
let s: string;
|
||||
|
@ -104,7 +105,7 @@ export function Sd() {
|
|||
|
||||
return (
|
||||
<>
|
||||
<SideBar className={isSd ? homeStyles["sidebar-show"] : ""} />
|
||||
<SideBar className={clsx({ [homeStyles["sidebar-show"]]: isSd })} />
|
||||
<WindowContent>
|
||||
<div className={chatStyles.chat} key={"1"}>
|
||||
<div className="window-header" data-tauri-drag-region>
|
||||
|
@ -121,7 +122,10 @@ export function Sd() {
|
|||
</div>
|
||||
)}
|
||||
<div
|
||||
className={`window-header-title ${chatStyles["chat-body-title"]}`}
|
||||
className={clsx(
|
||||
"window-header-title",
|
||||
chatStyles["chat-body-title"],
|
||||
)}
|
||||
>
|
||||
<div className={`window-header-main-title`}>Stability AI</div>
|
||||
<div className="window-header-sub-title">
|
||||
|
|
|
@ -72,6 +72,7 @@ import {
|
|||
Stability,
|
||||
Iflytek,
|
||||
SAAS_CHAT_URL,
|
||||
ChatGLM,
|
||||
} from "../constant";
|
||||
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
|
||||
import { ErrorBoundary } from "./error";
|
||||
|
@ -1234,6 +1235,47 @@ export function Settings() {
|
|||
</>
|
||||
);
|
||||
|
||||
const chatglmConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.ChatGLM && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.ChatGLM.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.ChatGLM.Endpoint.SubTitle +
|
||||
ChatGLM.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.ChatGLM.Endpoint.Title}
|
||||
type="text"
|
||||
value={accessStore.chatglmUrl}
|
||||
placeholder={ChatGLM.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.chatglmUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.ChatGLM.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.ChatGLM.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria-label={Locale.Settings.Access.ChatGLM.ApiKey.Title}
|
||||
value={accessStore.chatglmApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.ChatGLM.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.chatglmApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const stabilityConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Stability && (
|
||||
<>
|
||||
|
@ -1693,6 +1735,7 @@ export function Settings() {
|
|||
{stabilityConfigComponent}
|
||||
{lflytekConfigComponent}
|
||||
{XAIConfigComponent}
|
||||
{chatglmConfigComponent}
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
|
|
|
@ -30,6 +30,7 @@ import { Link, useNavigate } from "react-router-dom";
|
|||
import { isIOS, useMobileScreen } from "../utils";
|
||||
import dynamic from "next/dynamic";
|
||||
import { showConfirm, Selector } from "./ui-lib";
|
||||
import clsx from "clsx";
|
||||
|
||||
const ChatList = dynamic(async () => (await import("./chat-list")).ChatList, {
|
||||
loading: () => null,
|
||||
|
@ -141,9 +142,9 @@ export function SideBarContainer(props: {
|
|||
const { children, className, onDragStart, shouldNarrow } = props;
|
||||
return (
|
||||
<div
|
||||
className={`${styles.sidebar} ${className} ${
|
||||
shouldNarrow && styles["narrow-sidebar"]
|
||||
}`}
|
||||
className={clsx(styles.sidebar, className, {
|
||||
[styles["narrow-sidebar"]]: shouldNarrow,
|
||||
})}
|
||||
style={{
|
||||
// #3016 disable transition on ios mobile screen
|
||||
transition: isMobileScreen && isIOSMobile ? "none" : undefined,
|
||||
|
@ -171,9 +172,9 @@ export function SideBarHeader(props: {
|
|||
return (
|
||||
<Fragment>
|
||||
<div
|
||||
className={`${styles["sidebar-header"]} ${
|
||||
shouldNarrow ? styles["sidebar-header-narrow"] : ""
|
||||
}`}
|
||||
className={clsx(styles["sidebar-header"], {
|
||||
[styles["sidebar-header-narrow"]]: shouldNarrow,
|
||||
})}
|
||||
data-tauri-drag-region
|
||||
>
|
||||
<div className={styles["sidebar-title-container"]}>
|
||||
|
@ -182,7 +183,7 @@ export function SideBarHeader(props: {
|
|||
</div>
|
||||
<div className={styles["sidebar-sub-title"]}>{subTitle}</div>
|
||||
</div>
|
||||
<div className={styles["sidebar-logo"] + " no-dark"}>{logo}</div>
|
||||
<div className={clsx(styles["sidebar-logo"], "no-dark")}>{logo}</div>
|
||||
</div>
|
||||
{children}
|
||||
</Fragment>
|
||||
|
@ -286,7 +287,7 @@ export function SideBar(props: { className?: string }) {
|
|||
<SideBarTail
|
||||
primaryAction={
|
||||
<>
|
||||
<div className={styles["sidebar-action"] + " " + styles.mobile}>
|
||||
<div className={clsx(styles["sidebar-action"], styles.mobile)}>
|
||||
<IconButton
|
||||
icon={<DeleteIcon />}
|
||||
onClick={async () => {
|
||||
|
|
|
@ -23,6 +23,7 @@ import React, {
|
|||
useRef,
|
||||
} from "react";
|
||||
import { IconButton } from "./button";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function Popover(props: {
|
||||
children: JSX.Element;
|
||||
|
@ -45,7 +46,7 @@ export function Popover(props: {
|
|||
|
||||
export function Card(props: { children: JSX.Element[]; className?: string }) {
|
||||
return (
|
||||
<div className={styles.card + " " + props.className}>{props.children}</div>
|
||||
<div className={clsx(styles.card, props.className)}>{props.children}</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -60,11 +61,13 @@ export function ListItem(props: {
|
|||
}) {
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
styles["list-item"] +
|
||||
` ${props.vertical ? styles["vertical"] : ""} ` +
|
||||
` ${props.className || ""}`
|
||||
}
|
||||
className={clsx(
|
||||
styles["list-item"],
|
||||
{
|
||||
[styles["vertical"]]: props.vertical,
|
||||
},
|
||||
props.className,
|
||||
)}
|
||||
onClick={props.onClick}
|
||||
>
|
||||
<div className={styles["list-header"]}>
|
||||
|
@ -135,9 +138,9 @@ export function Modal(props: ModalProps) {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
styles["modal-container"] + ` ${isMax && styles["modal-container-max"]}`
|
||||
}
|
||||
className={clsx(styles["modal-container"], {
|
||||
[styles["modal-container-max"]]: isMax,
|
||||
})}
|
||||
>
|
||||
<div className={styles["modal-header"]}>
|
||||
<div className={styles["modal-title"]}>{props.title}</div>
|
||||
|
@ -260,7 +263,7 @@ export function Input(props: InputProps) {
|
|||
return (
|
||||
<textarea
|
||||
{...props}
|
||||
className={`${styles["input"]} ${props.className}`}
|
||||
className={clsx(styles["input"], props.className)}
|
||||
></textarea>
|
||||
);
|
||||
}
|
||||
|
@ -301,9 +304,13 @@ export function Select(
|
|||
const { className, children, align, ...otherProps } = props;
|
||||
return (
|
||||
<div
|
||||
className={`${styles["select-with-icon"]} ${
|
||||
align === "left" ? styles["left-align-option"] : ""
|
||||
} ${className}`}
|
||||
className={clsx(
|
||||
styles["select-with-icon"],
|
||||
{
|
||||
[styles["left-align-option"]]: align === "left",
|
||||
},
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<select className={styles["select-with-icon-select"]} {...otherProps}>
|
||||
{children}
|
||||
|
@ -509,9 +516,9 @@ export function Selector<T>(props: {
|
|||
const selected = selectedValues.includes(item.value);
|
||||
return (
|
||||
<ListItem
|
||||
className={`${styles["selector-item"]} ${
|
||||
item.disable && styles["selector-item-disabled"]
|
||||
}`}
|
||||
className={clsx(styles["selector-item"], {
|
||||
[styles["selector-item-disabled"]]: item.disable,
|
||||
})}
|
||||
key={i}
|
||||
title={item.title}
|
||||
subTitle={item.subTitle}
|
||||
|
|
|
@ -75,6 +75,10 @@ declare global {
|
|||
XAI_URL?: string;
|
||||
XAI_API_KEY?: string;
|
||||
|
||||
// chatglm only
|
||||
CHATGLM_URL?: string;
|
||||
CHATGLM_API_KEY?: string;
|
||||
|
||||
// custom template for preprocessing user input
|
||||
DEFAULT_INPUT_TEMPLATE?: string;
|
||||
}
|
||||
|
@ -151,6 +155,7 @@ export const getServerSideConfig = () => {
|
|||
const isMoonshot = !!process.env.MOONSHOT_API_KEY;
|
||||
const isIflytek = !!process.env.IFLYTEK_API_KEY;
|
||||
const isXAI = !!process.env.XAI_API_KEY;
|
||||
const isChatGLM = !!process.env.CHATGLM_API_KEY;
|
||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
|
@ -217,6 +222,10 @@ export const getServerSideConfig = () => {
|
|||
xaiUrl: process.env.XAI_URL,
|
||||
xaiApiKey: getApiKey(process.env.XAI_API_KEY),
|
||||
|
||||
isChatGLM,
|
||||
chatglmUrl: process.env.CHATGLM_URL,
|
||||
chatglmApiKey: getApiKey(process.env.CHATGLM_API_KEY),
|
||||
|
||||
cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID,
|
||||
cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID,
|
||||
cloudflareKVApiKey: getApiKey(process.env.CLOUDFLARE_KV_API_KEY),
|
||||
|
|
|
@ -30,6 +30,8 @@ export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com";
|
|||
|
||||
export const XAI_BASE_URL = "https://api.x.ai";
|
||||
|
||||
export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
|
||||
|
||||
export const CACHE_URL_PREFIX = "/api/cache";
|
||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
|
||||
|
||||
|
@ -62,6 +64,7 @@ export enum ApiPath {
|
|||
Stability = "/api/stability",
|
||||
Artifacts = "/api/artifacts",
|
||||
XAI = "/api/xai",
|
||||
ChatGLM = "/api/chatglm",
|
||||
}
|
||||
|
||||
export enum SlotID {
|
||||
|
@ -115,6 +118,7 @@ export enum ServiceProvider {
|
|||
Stability = "Stability",
|
||||
Iflytek = "Iflytek",
|
||||
XAI = "XAI",
|
||||
ChatGLM = "ChatGLM",
|
||||
}
|
||||
|
||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
|
||||
|
@ -138,6 +142,7 @@ export enum ModelProvider {
|
|||
Moonshot = "Moonshot",
|
||||
Iflytek = "Iflytek",
|
||||
XAI = "XAI",
|
||||
ChatGLM = "ChatGLM",
|
||||
}
|
||||
|
||||
export const Stability = {
|
||||
|
@ -225,6 +230,11 @@ export const XAI = {
|
|||
ChatPath: "v1/chat/completions",
|
||||
};
|
||||
|
||||
export const ChatGLM = {
|
||||
ExampleEndpoint: CHATGLM_BASE_URL,
|
||||
ChatPath: "api/paas/v4/chat/completions",
|
||||
};
|
||||
|
||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
||||
|
@ -317,8 +327,12 @@ const anthropicModels = [
|
|||
"claude-2.1",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-5-haiku-latest",
|
||||
];
|
||||
|
||||
const baiduModels = [
|
||||
|
@ -376,6 +390,17 @@ const iflytekModels = [
|
|||
|
||||
const xAIModes = ["grok-beta"];
|
||||
|
||||
const chatglmModels = [
|
||||
"glm-4-plus",
|
||||
"glm-4-0520",
|
||||
"glm-4",
|
||||
"glm-4-air",
|
||||
"glm-4-airx",
|
||||
"glm-4-long",
|
||||
"glm-4-flashx",
|
||||
"glm-4-flash",
|
||||
];
|
||||
|
||||
let seq = 1000; // 内置的模型序号生成器从1000开始
|
||||
export const DEFAULT_MODELS = [
|
||||
...openaiModels.map((name) => ({
|
||||
|
@ -499,6 +524,17 @@ export const DEFAULT_MODELS = [
|
|||
sorted: 11,
|
||||
},
|
||||
})),
|
||||
...chatglmModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
sorted: seq++,
|
||||
provider: {
|
||||
id: "chatglm",
|
||||
providerName: "ChatGLM",
|
||||
providerType: "chatglm",
|
||||
sorted: 12,
|
||||
},
|
||||
})),
|
||||
] as const;
|
||||
|
||||
export const CHAT_PAGE_SIZE = 15;
|
||||
|
|
|
@ -473,6 +473,17 @@ const cn = {
|
|||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
ChatGLM: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义 ChatGLM API Key",
|
||||
Placeholder: "ChatGLM API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
Stability: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
|
|
|
@ -457,6 +457,17 @@ const en: LocaleType = {
|
|||
SubTitle: "Example: ",
|
||||
},
|
||||
},
|
||||
ChatGLM: {
|
||||
ApiKey: {
|
||||
Title: "ChatGLM API Key",
|
||||
SubTitle: "Use a custom ChatGLM API Key",
|
||||
Placeholder: "ChatGLM API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Endpoint Address",
|
||||
SubTitle: "Example: ",
|
||||
},
|
||||
},
|
||||
Stability: {
|
||||
ApiKey: {
|
||||
Title: "Stability API Key",
|
||||
|
|
|
@ -14,12 +14,14 @@ import {
|
|||
STABILITY_BASE_URL,
|
||||
IFLYTEK_BASE_URL,
|
||||
XAI_BASE_URL,
|
||||
CHATGLM_BASE_URL,
|
||||
} from "../constant";
|
||||
import { getHeaders } from "../client/api";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { ensure } from "../utils/clone";
|
||||
import { DEFAULT_CONFIG } from "./config";
|
||||
import { getModelProvider } from "../utils/model";
|
||||
|
||||
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
||||
|
||||
|
@ -47,6 +49,8 @@ const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
|
|||
|
||||
const DEFAULT_XAI_URL = isApp ? XAI_BASE_URL : ApiPath.XAI;
|
||||
|
||||
const DEFAULT_CHATGLM_URL = isApp ? CHATGLM_BASE_URL : ApiPath.ChatGLM;
|
||||
|
||||
const DEFAULT_ACCESS_STATE = {
|
||||
accessCode: "",
|
||||
useCustomConfig: false,
|
||||
|
@ -108,6 +112,10 @@ const DEFAULT_ACCESS_STATE = {
|
|||
xaiUrl: DEFAULT_XAI_URL,
|
||||
xaiApiKey: "",
|
||||
|
||||
// chatglm
|
||||
chatglmUrl: DEFAULT_CHATGLM_URL,
|
||||
chatglmApiKey: "",
|
||||
|
||||
// server config
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
|
@ -180,6 +188,10 @@ export const useAccessStore = createPersistStore(
|
|||
return ensure(get(), ["xaiApiKey"]);
|
||||
},
|
||||
|
||||
isValidChatGLM() {
|
||||
return ensure(get(), ["chatglmApiKey"]);
|
||||
},
|
||||
|
||||
isAuthorized() {
|
||||
this.fetch();
|
||||
|
||||
|
@ -196,6 +208,7 @@ export const useAccessStore = createPersistStore(
|
|||
this.isValidMoonshot() ||
|
||||
this.isValidIflytek() ||
|
||||
this.isValidXAI() ||
|
||||
this.isValidChatGLM() ||
|
||||
!this.enabledAccessControl() ||
|
||||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
||||
);
|
||||
|
@ -214,9 +227,9 @@ export const useAccessStore = createPersistStore(
|
|||
.then((res) => {
|
||||
const defaultModel = res.defaultModel ?? "";
|
||||
if (defaultModel !== "") {
|
||||
const [model, providerName] = defaultModel.split("@");
|
||||
const [model, providerName] = getModelProvider(defaultModel);
|
||||
DEFAULT_CONFIG.modelConfig.model = model;
|
||||
DEFAULT_CONFIG.modelConfig.providerName = providerName;
|
||||
DEFAULT_CONFIG.modelConfig.providerName = providerName as any;
|
||||
}
|
||||
|
||||
return res;
|
||||
|
|
|
@ -352,13 +352,13 @@ export const useChatStore = createPersistStore(
|
|||
return session;
|
||||
},
|
||||
|
||||
onNewMessage(message: ChatMessage) {
|
||||
get().updateCurrentSession((session) => {
|
||||
onNewMessage(message: ChatMessage, targetSession: ChatSession) {
|
||||
get().updateTargetSession(targetSession, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
session.lastUpdate = Date.now();
|
||||
});
|
||||
get().updateStat(message);
|
||||
get().summarizeSession();
|
||||
get().updateStat(message, targetSession);
|
||||
get().summarizeSession(false, targetSession);
|
||||
},
|
||||
|
||||
async onUserInput(content: string, attachImages?: string[]) {
|
||||
|
@ -396,10 +396,10 @@ export const useChatStore = createPersistStore(
|
|||
// get recent messages
|
||||
const recentMessages = get().getMessagesWithMemory();
|
||||
const sendMessages = recentMessages.concat(userMessage);
|
||||
const messageIndex = get().currentSession().messages.length + 1;
|
||||
const messageIndex = session.messages.length + 1;
|
||||
|
||||
// save user's and bot's message
|
||||
get().updateCurrentSession((session) => {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
const savedUserMessage = {
|
||||
...userMessage,
|
||||
content: mContent,
|
||||
|
@ -420,7 +420,7 @@ export const useChatStore = createPersistStore(
|
|||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
|
@ -428,13 +428,14 @@ export const useChatStore = createPersistStore(
|
|||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
botMessage.date = new Date().toLocaleString();
|
||||
get().onNewMessage(botMessage, session);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onBeforeTool(tool: ChatMessageTool) {
|
||||
(botMessage.tools = botMessage?.tools || []).push(tool);
|
||||
get().updateCurrentSession((session) => {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
|
@ -444,7 +445,7 @@ export const useChatStore = createPersistStore(
|
|||
tools[i] = { ...tool };
|
||||
}
|
||||
});
|
||||
get().updateCurrentSession((session) => {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
|
@ -459,7 +460,7 @@ export const useChatStore = createPersistStore(
|
|||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
|
@ -591,16 +592,19 @@ export const useChatStore = createPersistStore(
|
|||
set(() => ({ sessions }));
|
||||
},
|
||||
|
||||
resetSession() {
|
||||
get().updateCurrentSession((session) => {
|
||||
resetSession(session: ChatSession) {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = [];
|
||||
session.memoryPrompt = "";
|
||||
});
|
||||
},
|
||||
|
||||
summarizeSession(refreshTitle: boolean = false) {
|
||||
summarizeSession(
|
||||
refreshTitle: boolean = false,
|
||||
targetSession: ChatSession,
|
||||
) {
|
||||
const config = useAppConfig.getState();
|
||||
const session = get().currentSession();
|
||||
const session = targetSession;
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
// skip summarize when using dalle3?
|
||||
if (isDalle3(modelConfig.model)) {
|
||||
|
@ -649,13 +653,15 @@ export const useChatStore = createPersistStore(
|
|||
stream: false,
|
||||
providerName,
|
||||
},
|
||||
onFinish(message) {
|
||||
if (!isValidMessage(message)) return;
|
||||
get().updateCurrentSession(
|
||||
(session) =>
|
||||
(session.topic =
|
||||
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
|
||||
);
|
||||
onFinish(message, responseRes) {
|
||||
if (responseRes?.status === 200) {
|
||||
get().updateTargetSession(
|
||||
session,
|
||||
(session) =>
|
||||
(session.topic =
|
||||
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -669,7 +675,7 @@ export const useChatStore = createPersistStore(
|
|||
|
||||
const historyMsgLength = countMessages(toBeSummarizedMsgs);
|
||||
|
||||
if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
|
||||
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
|
||||
const n = toBeSummarizedMsgs.length;
|
||||
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
|
||||
Math.max(0, n - modelConfig.historyMessageCount),
|
||||
|
@ -715,38 +721,38 @@ export const useChatStore = createPersistStore(
|
|||
onUpdate(message) {
|
||||
session.memoryPrompt = message;
|
||||
},
|
||||
onFinish(message) {
|
||||
console.log("[Memory] ", message);
|
||||
get().updateCurrentSession((session) => {
|
||||
session.lastSummarizeIndex = lastSummarizeIndex;
|
||||
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
|
||||
});
|
||||
onFinish(message, responseRes) {
|
||||
if (responseRes?.status === 200) {
|
||||
console.log("[Memory] ", message);
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.lastSummarizeIndex = lastSummarizeIndex;
|
||||
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
|
||||
});
|
||||
}
|
||||
},
|
||||
onError(err) {
|
||||
console.error("[Summarize] ", err);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function isValidMessage(message: any): boolean {
|
||||
return typeof message === "string" && !message.startsWith("```json");
|
||||
}
|
||||
},
|
||||
|
||||
updateStat(message: ChatMessage) {
|
||||
get().updateCurrentSession((session) => {
|
||||
updateStat(message: ChatMessage, session: ChatSession) {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.stat.charCount += message.content.length;
|
||||
// TODO: should update chat count and word count
|
||||
});
|
||||
},
|
||||
|
||||
updateCurrentSession(updater: (session: ChatSession) => void) {
|
||||
updateTargetSession(
|
||||
targetSession: ChatSession,
|
||||
updater: (session: ChatSession) => void,
|
||||
) {
|
||||
const sessions = get().sessions;
|
||||
const index = get().currentSessionIndex;
|
||||
const index = sessions.findIndex((s) => s.id === targetSession.id);
|
||||
if (index < 0) return;
|
||||
updater(sessions[index]);
|
||||
set(() => ({ sessions }));
|
||||
},
|
||||
|
||||
async clearAllData() {
|
||||
await indexedDBStorage.clear();
|
||||
localStorage.clear();
|
||||
|
|
|
@ -266,7 +266,9 @@ export function isVisionModel(model: string) {
|
|||
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||
|
||||
return (
|
||||
visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo
|
||||
visionKeywords.some((keyword) => model.includes(keyword)) ||
|
||||
isGpt4Turbo ||
|
||||
isDalle3(model)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -278,7 +280,8 @@ export function showPlugins(provider: ServiceProvider, model: string) {
|
|||
if (
|
||||
provider == ServiceProvider.OpenAI ||
|
||||
provider == ServiceProvider.Azure ||
|
||||
provider == ServiceProvider.Moonshot
|
||||
provider == ServiceProvider.Moonshot ||
|
||||
provider == ServiceProvider.ChatGLM
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -174,6 +174,7 @@ export function stream(
|
|||
let finished = false;
|
||||
let running = false;
|
||||
let runTools: any[] = [];
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -272,7 +273,7 @@ export function stream(
|
|||
}
|
||||
console.debug("[ChatAPI] end");
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes); // 将res传递给onFinish
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -304,6 +305,7 @@ export function stream(
|
|||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Request] response content type: ", contentType);
|
||||
responseRes = res;
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
|
|
|
@ -37,6 +37,17 @@ const sortModelTable = (models: ReturnType<typeof collectModels>) =>
|
|||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* get model name and provider from a formatted string,
|
||||
* e.g. `gpt-4@OpenAi` or `claude-3-5-sonnet@20240620@Google`
|
||||
* @param modelWithProvider model name with provider separated by last `@` char,
|
||||
* @returns [model, provider] tuple, if no `@` char found, provider is undefined
|
||||
*/
|
||||
export function getModelProvider(modelWithProvider: string): [string, string?] {
|
||||
const [model, provider] = modelWithProvider.split(/@(?!.*@)/);
|
||||
return [model, provider];
|
||||
}
|
||||
|
||||
export function collectModelTable(
|
||||
models: readonly LLMModel[],
|
||||
customModels: string,
|
||||
|
@ -79,10 +90,10 @@ export function collectModelTable(
|
|||
);
|
||||
} else {
|
||||
// 1. find model by name, and set available value
|
||||
const [customModelName, customProviderName] = name.split("@");
|
||||
const [customModelName, customProviderName] = getModelProvider(name);
|
||||
let count = 0;
|
||||
for (const fullName in modelTable) {
|
||||
const [modelName, providerName] = fullName.split("@");
|
||||
const [modelName, providerName] = getModelProvider(fullName);
|
||||
if (
|
||||
customModelName == modelName &&
|
||||
(customProviderName === undefined ||
|
||||
|
@ -102,7 +113,7 @@ export function collectModelTable(
|
|||
}
|
||||
// 2. if model not exists, create new model with available value
|
||||
if (count === 0) {
|
||||
let [customModelName, customProviderName] = name.split("@");
|
||||
let [customModelName, customProviderName] = getModelProvider(name);
|
||||
const provider = customProvider(
|
||||
customProviderName || customModelName,
|
||||
);
|
||||
|
@ -139,7 +150,7 @@ export function collectModelTableWithDefaultModel(
|
|||
for (const key of Object.keys(modelTable)) {
|
||||
if (
|
||||
modelTable[key].available &&
|
||||
key.split("@").shift() == defaultModel
|
||||
getModelProvider(key)[0] == defaultModel
|
||||
) {
|
||||
modelTable[key].isDefault = true;
|
||||
break;
|
||||
|
|
|
@ -19,7 +19,7 @@ type StreamResponse = {
|
|||
headers: Record<string, string>;
|
||||
};
|
||||
|
||||
export function fetch(url: string, options?: RequestInit): Promise<any> {
|
||||
export function fetch(url: string, options?: RequestInit): Promise<Response> {
|
||||
if (window.__TAURI__) {
|
||||
const {
|
||||
signal,
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
# 宝塔面板 的部署说明
|
||||
|
||||
## 拥有自己的宝塔
|
||||
当你需要通过 宝塔面板 部署本项目之前,需要在服务器上先安装好 宝塔面板工具。 接下来的 部署流程 都建立在已有宝塔面板的前提下。宝塔安装请参考 ([宝塔官网](https://www.bt.cn/new/download.html))
|
||||
|
||||
> 注意:本项目需要宝塔面板版本 9.2.0 及以上
|
||||
|
||||
## 一键安装
|
||||

|
||||
1. 在 宝塔面板 -> Docker -> 应用商店 页面,搜索 ChatGPT-Next-Web 找到本项目的docker应用;
|
||||
2. 点击 安装 开始部署本项目
|
||||
|
||||

|
||||
1. 在项目配置页,根据要求开始配置环境变量;
|
||||
2. 如勾选 允许外部访问 配置,请注意为配置的 web端口 开放安全组端口访问权限;
|
||||
3. 请确保你添加了正确的 Open Api Key,否则无法使用;当配置 OpenAI官方 提供的key(国内无法访问),请配置代理地址;
|
||||
4. 建议配置 访问权限密码,否则部署后所有人均可使用已配置的 Open Api Key(当允许外部访问时);
|
||||
5. 点击 确认 开始自动部署。
|
||||
|
||||
## 如何访问
|
||||

|
||||
通过根据 服务器IP地址 和配置的 web端口 http://$(host):$(port),在浏览器中打开 ChatGPT-Next-Web。
|
||||
|
||||

|
||||
若配置了 访问权限密码,访问大模型前需要登录,请点击 登录,获取访问权限。
|
||||
|
||||

|
||||
|
||||

|
Binary file not shown.
After Width: | Height: | Size: 161 KiB |
Binary file not shown.
After Width: | Height: | Size: 196 KiB |
Binary file not shown.
After Width: | Height: | Size: 117 KiB |
Binary file not shown.
After Width: | Height: | Size: 159 KiB |
Binary file not shown.
After Width: | Height: | Size: 74 KiB |
Binary file not shown.
After Width: | Height: | Size: 146 KiB |
|
@ -1,2 +1,24 @@
|
|||
// Learn more: https://github.com/testing-library/jest-dom
|
||||
import "@testing-library/jest-dom";
|
||||
|
||||
global.fetch = jest.fn(() =>
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
status: 200,
|
||||
json: () => Promise.resolve({}),
|
||||
headers: new Headers(),
|
||||
redirected: false,
|
||||
statusText: "OK",
|
||||
type: "basic",
|
||||
url: "",
|
||||
clone: function () {
|
||||
return this;
|
||||
},
|
||||
body: null,
|
||||
bodyUsed: false,
|
||||
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
|
||||
blob: () => Promise.resolve(new Blob()),
|
||||
formData: () => Promise.resolve(new FormData()),
|
||||
text: () => Promise.resolve(""),
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -94,6 +94,10 @@ if (mode !== "export") {
|
|||
source: "/sharegpt",
|
||||
destination: "https://sharegpt.com/api/conversations",
|
||||
},
|
||||
{
|
||||
source: "/api/proxy/alibaba/:path*",
|
||||
destination: "https://dashscope.aliyuncs.com/api/:path*",
|
||||
},
|
||||
];
|
||||
|
||||
return {
|
||||
|
|
10
package.json
10
package.json
|
@ -27,14 +27,15 @@
|
|||
"@vercel/analytics": "^0.1.11",
|
||||
"@vercel/speed-insights": "^1.0.2",
|
||||
"axios": "^1.7.5",
|
||||
"clsx": "^2.1.1",
|
||||
"emoji-picker-react": "^4.9.2",
|
||||
"fuse.js": "^7.0.0",
|
||||
"heic2any": "^0.0.4",
|
||||
"html-to-image": "^1.11.11",
|
||||
"idb-keyval": "^6.2.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mermaid": "^10.6.1",
|
||||
"markdown-to-txt": "^2.0.1",
|
||||
"mermaid": "^10.6.1",
|
||||
"nanoid": "^5.0.3",
|
||||
"next": "^14.1.1",
|
||||
"node-fetch": "^3.3.1",
|
||||
|
@ -56,9 +57,10 @@
|
|||
"devDependencies": {
|
||||
"@tauri-apps/api": "^1.6.0",
|
||||
"@tauri-apps/cli": "1.5.11",
|
||||
"@testing-library/jest-dom": "^6.4.8",
|
||||
"@testing-library/react": "^16.0.0",
|
||||
"@types/jest": "^29.5.13",
|
||||
"@testing-library/dom": "^10.4.0",
|
||||
"@testing-library/jest-dom": "^6.6.2",
|
||||
"@testing-library/react": "^16.0.1",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/node": "^20.11.30",
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
},
|
||||
"package": {
|
||||
"productName": "NextChat",
|
||||
"version": "2.15.6"
|
||||
"version": "2.15.7"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
import { getModelProvider } from "../app/utils/model";
|
||||
|
||||
describe("getModelProvider", () => {
|
||||
test("should return model and provider when input contains '@'", () => {
|
||||
const input = "model@provider";
|
||||
const [model, provider] = getModelProvider(input);
|
||||
expect(model).toBe("model");
|
||||
expect(provider).toBe("provider");
|
||||
});
|
||||
|
||||
test("should return model and undefined provider when input does not contain '@'", () => {
|
||||
const input = "model";
|
||||
const [model, provider] = getModelProvider(input);
|
||||
expect(model).toBe("model");
|
||||
expect(provider).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should handle multiple '@' characters correctly", () => {
|
||||
const input = "model@provider@extra";
|
||||
const [model, provider] = getModelProvider(input);
|
||||
expect(model).toBe("model@provider");
|
||||
expect(provider).toBe("extra");
|
||||
});
|
||||
|
||||
test("should return empty strings when input is empty", () => {
|
||||
const input = "";
|
||||
const [model, provider] = getModelProvider(input);
|
||||
expect(model).toBe("");
|
||||
expect(provider).toBeUndefined();
|
||||
});
|
||||
});
|
107
yarn.lock
107
yarn.lock
|
@ -27,6 +27,15 @@
|
|||
dependencies:
|
||||
"@babel/highlight" "^7.18.6"
|
||||
|
||||
"@babel/code-frame@^7.10.4":
|
||||
version "7.26.0"
|
||||
resolved "https://registry.npmmirror.com/@babel/code-frame/-/code-frame-7.26.0.tgz#9374b5cd068d128dac0b94ff482594273b1c2815"
|
||||
integrity sha512-INCKxTtbXtcNbUZ3YXutwMpEleqttcswhAdee7dhuoVrD2cnuc3PqtERBtxkX5nziX9vnBL8WXmSGwv8CuPV6g==
|
||||
dependencies:
|
||||
"@babel/helper-validator-identifier" "^7.25.9"
|
||||
js-tokens "^4.0.0"
|
||||
picocolors "^1.0.0"
|
||||
|
||||
"@babel/code-frame@^7.12.13", "@babel/code-frame@^7.24.7":
|
||||
version "7.24.7"
|
||||
resolved "https://registry.npmmirror.com/@babel/code-frame/-/code-frame-7.24.7.tgz#882fd9e09e8ee324e496bd040401c6f046ef4465"
|
||||
|
@ -394,6 +403,11 @@
|
|||
resolved "https://registry.npmmirror.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz#75b889cfaf9e35c2aaf42cf0d72c8e91719251db"
|
||||
integrity sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==
|
||||
|
||||
"@babel/helper-validator-identifier@^7.25.9":
|
||||
version "7.25.9"
|
||||
resolved "https://registry.npmmirror.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz#24b64e2c3ec7cd3b3c547729b8d16871f22cbdc7"
|
||||
integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==
|
||||
|
||||
"@babel/helper-validator-option@^7.18.6", "@babel/helper-validator-option@^7.21.0":
|
||||
version "7.21.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz#8224c7e13ace4bafdc4004da2cf064ef42673180"
|
||||
|
@ -1187,14 +1201,7 @@
|
|||
resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310"
|
||||
integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
|
||||
|
||||
"@babel/runtime@^7.12.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.23.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.23.6"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d"
|
||||
integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.12.5", "@babel/runtime@^7.21.0":
|
||||
"@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.23.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.25.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.25.0.tgz#3af9a91c1b739c569d5d80cc917280919c544ecb"
|
||||
integrity sha512-7dRy4DwXwtzBrPbZflqxnvfxLF8kdZXPkhymtDeFoFqE6ldzjQFgYTtYIFARcLEYDrqfBfYcZt1WqFxRoyC9Rw==
|
||||
|
@ -2093,13 +2100,26 @@
|
|||
"@tauri-apps/cli-win32-ia32-msvc" "1.5.11"
|
||||
"@tauri-apps/cli-win32-x64-msvc" "1.5.11"
|
||||
|
||||
"@testing-library/jest-dom@^6.4.8":
|
||||
version "6.4.8"
|
||||
resolved "https://registry.npmmirror.com/@testing-library/jest-dom/-/jest-dom-6.4.8.tgz#9c435742b20c6183d4e7034f2b329d562c079daa"
|
||||
integrity sha512-JD0G+Zc38f5MBHA4NgxQMR5XtO5Jx9g86jqturNTt2WUfRmLDIY7iKkWHDCCTiDuFMre6nxAD5wHw9W5kI4rGw==
|
||||
"@testing-library/dom@^10.4.0":
|
||||
version "10.4.0"
|
||||
resolved "https://registry.npmmirror.com/@testing-library/dom/-/dom-10.4.0.tgz#82a9d9462f11d240ecadbf406607c6ceeeff43a8"
|
||||
integrity sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.10.4"
|
||||
"@babel/runtime" "^7.12.5"
|
||||
"@types/aria-query" "^5.0.1"
|
||||
aria-query "5.3.0"
|
||||
chalk "^4.1.0"
|
||||
dom-accessibility-api "^0.5.9"
|
||||
lz-string "^1.5.0"
|
||||
pretty-format "^27.0.2"
|
||||
|
||||
"@testing-library/jest-dom@^6.6.2":
|
||||
version "6.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-6.6.2.tgz#8186aa9a07263adef9cc5a59a4772db8c31f4a5b"
|
||||
integrity sha512-P6GJD4yqc9jZLbe98j/EkyQDTPgqftohZF5FBkHY5BUERZmcf4HeO2k0XaefEg329ux2p21i1A1DmyQ1kKw2Jw==
|
||||
dependencies:
|
||||
"@adobe/css-tools" "^4.4.0"
|
||||
"@babel/runtime" "^7.9.2"
|
||||
aria-query "^5.0.0"
|
||||
chalk "^3.0.0"
|
||||
css.escape "^1.5.1"
|
||||
|
@ -2107,10 +2127,10 @@
|
|||
lodash "^4.17.21"
|
||||
redent "^3.0.0"
|
||||
|
||||
"@testing-library/react@^16.0.0":
|
||||
version "16.0.0"
|
||||
resolved "https://registry.npmmirror.com/@testing-library/react/-/react-16.0.0.tgz#0a1e0c7a3de25841c3591b8cb7fb0cf0c0a27321"
|
||||
integrity sha512-guuxUKRWQ+FgNX0h0NS0FIq3Q3uLtWVpBzcLOggmfMoUpgBnzBzvLLd4fbm6yS8ydJd94cIfY4yP9qUQjM2KwQ==
|
||||
"@testing-library/react@^16.0.1":
|
||||
version "16.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-16.0.1.tgz#29c0ee878d672703f5e7579f239005e4e0faa875"
|
||||
integrity sha512-dSmwJVtJXmku+iocRhWOUFbrERC76TX2Mnf0ATODz8brzAZrMBbzLwQixlBSanZxR6LddK3eiwpSFZgDET1URg==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.12.5"
|
||||
|
||||
|
@ -2144,6 +2164,11 @@
|
|||
resolved "https://registry.npmmirror.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9"
|
||||
integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==
|
||||
|
||||
"@types/aria-query@^5.0.1":
|
||||
version "5.0.4"
|
||||
resolved "https://registry.npmmirror.com/@types/aria-query/-/aria-query-5.0.4.tgz#1a31c3d378850d2778dabb6374d036dcba4ba708"
|
||||
integrity sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==
|
||||
|
||||
"@types/babel__core@^7.1.14":
|
||||
version "7.20.5"
|
||||
resolved "https://registry.npmmirror.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017"
|
||||
|
@ -2263,10 +2288,10 @@
|
|||
dependencies:
|
||||
"@types/istanbul-lib-report" "*"
|
||||
|
||||
"@types/jest@^29.5.13":
|
||||
version "29.5.13"
|
||||
resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.13.tgz#8bc571659f401e6a719a7bf0dbcb8b78c71a8adc"
|
||||
integrity sha512-wd+MVEZCHt23V0/L642O5APvspWply/rGY5BcW4SUETo2UzPU3Z26qr8jC2qxpimI2jjx9h7+2cj2FwIr01bXg==
|
||||
"@types/jest@^29.5.14":
|
||||
version "29.5.14"
|
||||
resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.14.tgz#2b910912fa1d6856cadcd0c1f95af7df1d6049e5"
|
||||
integrity sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==
|
||||
dependencies:
|
||||
expect "^29.0.0"
|
||||
pretty-format "^29.0.0"
|
||||
|
@ -2738,20 +2763,13 @@ argparse@^2.0.1:
|
|||
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
|
||||
integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
|
||||
|
||||
aria-query@^5.0.0:
|
||||
aria-query@5.3.0, aria-query@^5.0.0, aria-query@^5.1.3:
|
||||
version "5.3.0"
|
||||
resolved "https://registry.npmmirror.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e"
|
||||
integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==
|
||||
dependencies:
|
||||
dequal "^2.0.3"
|
||||
|
||||
aria-query@^5.1.3:
|
||||
version "5.1.3"
|
||||
resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.1.3.tgz#19db27cd101152773631396f7a95a3b58c22c35e"
|
||||
integrity sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==
|
||||
dependencies:
|
||||
deep-equal "^2.0.5"
|
||||
|
||||
array-buffer-byte-length@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead"
|
||||
|
@ -3081,7 +3099,7 @@ chalk@^3.0.0:
|
|||
ansi-styles "^4.1.0"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
chalk@^4.0.0, chalk@^4.1.2:
|
||||
chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
|
||||
integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
|
||||
|
@ -3171,6 +3189,11 @@ cliui@^8.0.1:
|
|||
strip-ansi "^6.0.1"
|
||||
wrap-ansi "^7.0.0"
|
||||
|
||||
clsx@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999"
|
||||
integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==
|
||||
|
||||
co@^4.6.0:
|
||||
version "4.6.0"
|
||||
resolved "https://registry.npmmirror.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
|
||||
|
@ -3877,6 +3900,11 @@ doctrine@^3.0.0:
|
|||
dependencies:
|
||||
esutils "^2.0.2"
|
||||
|
||||
dom-accessibility-api@^0.5.9:
|
||||
version "0.5.16"
|
||||
resolved "https://registry.npmmirror.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz#5a7429e6066eb3664d911e33fb0e45de8eb08453"
|
||||
integrity sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==
|
||||
|
||||
dom-accessibility-api@^0.6.3:
|
||||
version "0.6.3"
|
||||
resolved "https://registry.npmmirror.com/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz#993e925cc1d73f2c662e7d75dd5a5445259a8fd8"
|
||||
|
@ -6052,6 +6080,11 @@ lru-cache@^6.0.0:
|
|||
dependencies:
|
||||
yallist "^4.0.0"
|
||||
|
||||
lz-string@^1.5.0:
|
||||
version "1.5.0"
|
||||
resolved "https://registry.npmmirror.com/lz-string/-/lz-string-1.5.0.tgz#c1ab50f77887b712621201ba9fd4e3a6ed099941"
|
||||
integrity sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==
|
||||
|
||||
make-dir@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.npmmirror.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e"
|
||||
|
@ -7018,6 +7051,15 @@ prettier@^3.0.2:
|
|||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.2.tgz#78fcecd6d870551aa5547437cdae39d4701dca5b"
|
||||
integrity sha512-o2YR9qtniXvwEZlOKbveKfDQVyqxbEIWn48Z8m3ZJjBjcCmUy3xZGIv+7AkaeuaTr6yPXJjwv07ZWlsWbEy1rQ==
|
||||
|
||||
pretty-format@^27.0.2:
|
||||
version "27.5.1"
|
||||
resolved "https://registry.npmmirror.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e"
|
||||
integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==
|
||||
dependencies:
|
||||
ansi-regex "^5.0.1"
|
||||
ansi-styles "^5.0.0"
|
||||
react-is "^17.0.1"
|
||||
|
||||
pretty-format@^29.0.0, pretty-format@^29.7.0:
|
||||
version "29.7.0"
|
||||
resolved "https://registry.npmmirror.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812"
|
||||
|
@ -7109,6 +7151,11 @@ react-is@^16.13.1, react-is@^16.7.0:
|
|||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
|
||||
|
||||
react-is@^17.0.1:
|
||||
version "17.0.2"
|
||||
resolved "https://registry.npmmirror.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0"
|
||||
integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==
|
||||
|
||||
react-is@^18.0.0:
|
||||
version "18.2.0"
|
||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b"
|
||||
|
|
Loading…
Reference in New Issue