Merge branch 'ChatGPTNextWeb:main' into main

This commit is contained in:
Joe 2023-12-27 08:54:10 +08:00 committed by GitHub
commit a04c4fea88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 2225 additions and 381 deletions

8
.dockerignore Normal file
View File

@ -0,0 +1,8 @@
# local env files
.env*.local
# docker-compose env files
.env
*.key
*.key.pub

View File

@ -8,6 +8,16 @@ CODE=your-password
# You can start service behind a proxy
PROXY_URL=http://localhost:7890
# (optional)
# Default: Empty
# Googel Gemini Pro API key, set if you want to use Google Gemini Pro API.
GOOGLE_API_KEY=
# (optional)
# Default: https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent
# Googel Gemini Pro API url, set if you want to customize Google Gemini Pro API url.
GOOGLE_URL=
# Override openai api request base url. (optional)
# Default: https://api.openai.com
# Examples: http://your-openai-proxy.com
@ -36,3 +46,4 @@ ENABLE_BALANCE_QUERY=
# Default: Empty
# If you want to disable parse settings from url, set this value to 1.
DISABLE_FAST_LINK=

View File

@ -18,7 +18,7 @@ jobs:
- name: setup node
uses: actions/setup-node@v3
with:
node-version: 16
node-version: 18
- name: get version
run: echo "PACKAGE_VERSION=$(node -p "require('./src-tauri/tauri.conf.json').package.version")" >> $GITHUB_ENV
- name: create release
@ -59,7 +59,7 @@ jobs:
- name: setup node
uses: actions/setup-node@v3
with:
node-version: 16
node-version: 18
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:

View File

@ -24,7 +24,7 @@ jobs:
id: sync
uses: aormsby/Fork-Sync-With-Upstream-action@v3.4
with:
upstream_sync_repo: Yidadaa/ChatGPT-Next-Web
upstream_sync_repo: ChatGPTNextWeb/ChatGPT-Next-Web
upstream_sync_branch: main
target_sync_branch: main
target_repo_token: ${{ secrets.GITHUB_TOKEN }} # automatically generated, no need to set

View File

@ -16,6 +16,7 @@ FROM base AS builder
RUN apk update && apk add --no-cache git
ENV OPENAI_API_KEY=""
ENV GOOGLE_API_KEY=""
ENV CODE=""
WORKDIR /app
@ -31,6 +32,7 @@ RUN apk add proxychains-ng
ENV PROXY_URL=""
ENV OPENAI_API_KEY=""
ENV GOOGLE_API_KEY=""
ENV CODE=""
COPY --from=builder /app/public ./public
@ -41,22 +43,22 @@ COPY --from=builder /app/.next/server ./.next/server
EXPOSE 3000
CMD if [ -n "$PROXY_URL" ]; then \
export HOSTNAME="127.0.0.1"; \
protocol=$(echo $PROXY_URL | cut -d: -f1); \
host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \
port=$(echo $PROXY_URL | cut -d: -f3); \
conf=/etc/proxychains.conf; \
echo "strict_chain" > $conf; \
echo "proxy_dns" >> $conf; \
echo "remote_dns_subnet 224" >> $conf; \
echo "tcp_read_time_out 15000" >> $conf; \
echo "tcp_connect_time_out 8000" >> $conf; \
echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \
echo "localnet ::1/128" >> $conf; \
echo "[ProxyList]" >> $conf; \
echo "$protocol $host $port" >> $conf; \
cat /etc/proxychains.conf; \
proxychains -f $conf node server.js; \
export HOSTNAME="127.0.0.1"; \
protocol=$(echo $PROXY_URL | cut -d: -f1); \
host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \
port=$(echo $PROXY_URL | cut -d: -f3); \
conf=/etc/proxychains.conf; \
echo "strict_chain" > $conf; \
echo "proxy_dns" >> $conf; \
echo "remote_dns_subnet 224" >> $conf; \
echo "tcp_read_time_out 15000" >> $conf; \
echo "tcp_connect_time_out 8000" >> $conf; \
echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \
echo "localnet ::1/128" >> $conf; \
echo "[ProxyList]" >> $conf; \
echo "$protocol $host $port" >> $conf; \
cat /etc/proxychains.conf; \
proxychains -f $conf node server.js; \
else \
node server.js; \
node server.js; \
fi

View File

@ -1,22 +1,22 @@
<div align="center">
<img src="./docs/images/icon.svg" alt="icon"/>
<h1 align="center">ChatGPT Next Web</h1>
<h1 align="center">NextChat (ChatGPT Next Web)</h1>
English / [简体中文](./README_CN.md)
One-Click to get well-designed cross-platform ChatGPT web UI.
One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4 & Gemini Pro support.
一键免费部署你的跨平台私人 ChatGPT 应用。
一键免费部署你的跨平台私人 ChatGPT 应用, 支持 GPT3, GPT4 & Gemini Pro 模型
[![Web][Web-image]][web-url]
[![Windows][Windows-image]][download-url]
[![MacOS][MacOS-image]][download-url]
[![Linux][Linux-image]][download-url]
[Web App](https://chatgpt.nextweb.fun/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa)
[Web App](https://app.nextchat.dev/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa)
[网页版](https://chatgpt.nextweb.fun/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg)
[网页版](https://app.nextchat.dev/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg)
[web-url]: https://chatgpt.nextweb.fun
[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
@ -25,7 +25,9 @@ One-Click to get well-designed cross-platform ChatGPT web UI.
[MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple
[Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&env=GOOGLE_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
[![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA)
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
@ -37,8 +39,8 @@ One-Click to get well-designed cross-platform ChatGPT web UI.
- **Deploy for free with one-click** on Vercel in under 1 minute
- Compact client (~5MB) on Linux/Windows/MacOS, [download it now](https://github.com/Yidadaa/ChatGPT-Next-Web/releases)
- Fully compatible with self-deployed llms, recommended for use with [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) or [LocalAI](https://github.com/go-skynet/LocalAI)
- Privacy first, all data stored locally in the browser
- Fully compatible with self-deployed LLMs, recommended for use with [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) or [LocalAI](https://github.com/go-skynet/LocalAI)
- Privacy first, all data is stored locally in the browser
- Markdown support: LaTex, mermaid, code highlight, etc.
- Responsive design, dark mode and PWA
- Fast first screen loading speed (~100kb), support streaming response
@ -75,7 +77,7 @@ One-Click to get well-designed cross-platform ChatGPT web UI.
- 预制角色功能(面具),方便地创建、分享和调试你的个性化对话
- 海量的内置 prompt 列表,来自[中文](https://github.com/PlexPt/awesome-chatgpt-prompts-zh)和[英文](https://github.com/f/awesome-chatgpt-prompts)
- 自动压缩上下文聊天记录,在节省 Token 的同时支持超长对话
- 多国语言支持English, 简体中文, 繁体中文, 日本語, Español, Italiano, Türkçe, Deutsch, Tiếng Việt, Русский, Čeština
- 多国语言支持English, 简体中文, 繁体中文, 日本語, Español, Italiano, Türkçe, Deutsch, Tiếng Việt, Русский, Čeština, 한국어, Indonesia
- 拥有自己的域名?好上加好,绑定后即可在任何地方**无障碍**快速访问
## 开发计划
@ -161,7 +163,7 @@ Access password, separated by comma.
### `OPENAI_API_KEY` (required)
Your openai api key.
Your openai api key, join multiple api keys with comma.
### `BASE_URL` (optional)
@ -189,6 +191,14 @@ Azure Api Key.
Azure Api Version, find it at [Azure Documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions).
### `GOOGLE_API_KEY` (optional)
Google Gemini Pro Api Key.
### `GOOGLE_URL` (optional)
Google Gemini Pro Api Url.
### `HIDE_USER_API_KEY` (optional)
> Default: Empty
@ -216,9 +226,11 @@ If you want to disable parse settings from url, set this to 1.
### `CUSTOM_MODELS` (optional)
> Default: Empty
> Example: `+llama,+claude-2,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list, and display `gpt-4-1106-preview` as `gpt-4-turbo`.
> Example: `+llama,+claude-2,-gpt-3.5-turbo,gpt-4-1106-preview=gpt-4-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list, and display `gpt-4-1106-preview` as `gpt-4-turbo`.
To control custom models, use `+` to add a custom model, use `-` to hide a model, use `name:displayName` to customize model name, separated by comma.
To control custom models, use `+` to add a custom model, use `-` to hide a model, use `name=displayName` to customize model name, separated by comma.
User `-all` to disable all default models, `+all` to enable all default models.
## Requirements
@ -344,6 +356,9 @@ If you want to add a new translation, read this [document](./docs/translation.md
[@piksonGit](https://github.com/piksonGit)
[@ouyangzhiping](https://github.com/ouyangzhiping)
[@wenjiavv](https://github.com/wenjiavv)
[@LeXwDeX](https://github.com/LeXwDeX)
[@Licoy](https://github.com/Licoy)
[@shangmin2009](https://github.com/shangmin2009)
### Contributor

View File

@ -1,14 +1,16 @@
<div align="center">
<img src="./docs/images/icon.svg" alt="预览"/>
<h1 align="center">ChatGPT Next Web</h1>
<h1 align="center">NextChat</h1>
一键免费部署你的私人 ChatGPT 网页应用。
一键免费部署你的私人 ChatGPT 网页应用,支持 GPT3, GPT4 & Gemini Pro 模型
[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt)
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
[![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA)
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
![主界面](./docs/images/cover.png)
@ -19,7 +21,7 @@
1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys);
2. 点击右侧按钮开始部署:
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登录即可,记得在环境变量页填入 API Key 和[页面访问密码](#配置页面访问密码) CODE
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&env=GOOGLE_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登录即可,记得在环境变量页填入 API Key 和[页面访问密码](#配置页面访问密码) CODE
3. 部署完毕后,即可开始使用;
4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain)Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。
@ -68,7 +70,7 @@ code1,code2,code3
### `OPENAI_API_KEY` (必填项)
OpanAI 密钥,你在 openai 账户页面申请的 api key。
OpanAI 密钥,你在 openai 账户页面申请的 api key,使用英文逗号隔开多个 key这样可以随机轮询这些 key
### `CODE` (可选)
@ -104,6 +106,14 @@ Azure 密钥。
Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions)。
### `GOOGLE_API_KEY` (optional)
Google Gemini Pro 密钥.
### `GOOGLE_URL` (optional)
Google Gemini Pro Api Url.
### `HIDE_USER_API_KEY` (可选)
如果你不想让用户自行填入 API Key将此环境变量设置为 1 即可。
@ -122,9 +132,10 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro
### `CUSTOM_MODELS` (可选)
> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` 表示增加 `qwen-7b-chat``glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`
> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview=gpt-4-turbo` 表示增加 `qwen-7b-chat``glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`
> 如果你想先禁用所有模型,再启用指定模型,可以使用 `-all,+gpt-3.5-turbo`,则表示仅启用 `gpt-3.5-turbo`
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名:展示名` 来自定义模型的展示名,用英文逗号隔开。
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。
## 开发
@ -138,7 +149,7 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro
OPENAI_API_KEY=<your api key here>
# 中国大陆用户,可以使用本项目自带的代理进行开发,你也可以自由选择其他代理地址
BASE_URL=https://a.nextweb.fun/api/proxy
BASE_URL=https://b.nextweb.fun/api/proxy
```
### 本地开发

View File

@ -1,7 +1,7 @@
import { NextRequest } from "next/server";
import { getServerSideConfig } from "../config/server";
import md5 from "spark-md5";
import { ACCESS_CODE_PREFIX } from "../constant";
import { ACCESS_CODE_PREFIX, ModelProvider } from "../constant";
function getIP(req: NextRequest) {
let ip = req.ip ?? req.headers.get("x-real-ip");
@ -16,15 +16,15 @@ function getIP(req: NextRequest) {
function parseApiKey(bearToken: string) {
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
const isOpenAiKey = !token.startsWith(ACCESS_CODE_PREFIX);
const isApiKey = !token.startsWith(ACCESS_CODE_PREFIX);
return {
accessCode: isOpenAiKey ? "" : token.slice(ACCESS_CODE_PREFIX.length),
apiKey: isOpenAiKey ? token : "",
accessCode: isApiKey ? "" : token.slice(ACCESS_CODE_PREFIX.length),
apiKey: isApiKey ? token : "",
};
}
export function auth(req: NextRequest) {
export function auth(req: NextRequest, modelProvider: ModelProvider) {
const authToken = req.headers.get("Authorization") ?? "";
// check if it is openai api key or user token
@ -49,22 +49,23 @@ export function auth(req: NextRequest) {
if (serverConfig.hideUserApiKey && !!apiKey) {
return {
error: true,
msg: "you are not allowed to access openai with your own api key",
msg: "you are not allowed to access with your own api key",
};
}
// if user does not provide an api key, inject system api key
if (!apiKey) {
const serverApiKey = serverConfig.isAzure
? serverConfig.azureApiKey
: serverConfig.apiKey;
const serverConfig = getServerSideConfig();
if (serverApiKey) {
const systemApiKey =
modelProvider === ModelProvider.GeminiPro
? serverConfig.googleApiKey
: serverConfig.isAzure
? serverConfig.azureApiKey
: serverConfig.apiKey;
if (systemApiKey) {
console.log("[Auth] use system api key");
req.headers.set(
"Authorization",
`${serverConfig.isAzure ? "" : "Bearer "}${serverApiKey}`,
);
req.headers.set("Authorization", `Bearer ${systemApiKey}`);
} else {
console.log("[Auth] admin did not provide an api key");
}

View File

@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server";
import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant";
import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
import { collectModelTable } from "../utils/model";
import { makeAzurePath } from "../azure";
@ -9,8 +9,21 @@ const serverConfig = getServerSideConfig();
export async function requestOpenai(req: NextRequest) {
const controller = new AbortController();
const authValue = req.headers.get("Authorization") ?? "";
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";
var authValue,
authHeaderName = "";
if (serverConfig.isAzure) {
authValue =
req.headers
.get("Authorization")
?.trim()
.replaceAll("Bearer ", "")
.trim() ?? "";
authHeaderName = "api-key";
} else {
authValue = req.headers.get("Authorization") ?? "";
authHeaderName = "Authorization";
}
let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
"/api/openai/",
@ -30,7 +43,10 @@ export async function requestOpenai(req: NextRequest) {
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
console.log("[Org ID]", serverConfig.openaiOrgId);
// this fix [Org ID] undefined in server side if not using custom point
if (serverConfig.openaiOrgId !== undefined) {
console.log("[Org ID]", serverConfig.openaiOrgId);
}
const timeoutId = setTimeout(
() => {
@ -106,6 +122,12 @@ export async function requestOpenai(req: NextRequest) {
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,

View File

@ -0,0 +1,121 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { getServerSideConfig } from "@/app/config/server";
import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant";
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[Google Route] params ", params);
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const controller = new AbortController();
const serverConfig = getServerSideConfig();
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}
let path = `${req.nextUrl.pathname}`.replaceAll("/api/google/", "");
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const authResult = auth(req, ModelProvider.GeminiPro);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
const bearToken = req.headers.get("Authorization") ?? "";
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
const key = token ? token : serverConfig.googleApiKey;
if (!key) {
return NextResponse.json(
{
error: true,
message: `missing GOOGLE_API_KEY in server env vars`,
},
{
status: 401,
},
);
}
const fetchUrl = `${baseUrl}/${path}?key=${key}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
export const GET = handle;
export const POST = handle;
export const runtime = "edge";
export const preferredRegion = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];

View File

@ -1,6 +1,6 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
import { getServerSideConfig } from "@/app/config/server";
import { OpenaiPath } from "@/app/constant";
import { ModelProvider, OpenaiPath } from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
@ -45,7 +45,7 @@ async function handle(
);
}
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
@ -75,3 +75,22 @@ export const GET = handle;
export const POST = handle;
export const runtime = "edge";
export const preferredRegion = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];

View File

@ -1,8 +1,13 @@
import { getClientConfig } from "../config/client";
import { ACCESS_CODE_PREFIX, Azure, ServiceProvider } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store";
import {
ACCESS_CODE_PREFIX,
Azure,
ModelProvider,
ServiceProvider,
} from "../constant";
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";
import { GeminiProApi } from "./platforms/google";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
@ -41,6 +46,13 @@ export interface LLMUsage {
export interface LLMModel {
name: string;
available: boolean;
provider: LLMModelProvider;
}
export interface LLMModelProvider {
id: string;
providerName: string;
providerType: string;
}
export abstract class LLMApi {
@ -73,7 +85,11 @@ interface ChatProvider {
export class ClientApi {
public llm: LLMApi;
constructor() {
constructor(provider: ModelProvider = ModelProvider.GPT) {
if (provider === ModelProvider.GeminiPro) {
this.llm = new GeminiProApi();
return;
}
this.llm = new ChatGPTApi();
}
@ -93,7 +109,7 @@ export class ClientApi {
{
from: "human",
value:
"Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web",
"Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web",
},
]);
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
@ -123,18 +139,22 @@ export class ClientApi {
}
}
export const api = new ClientApi();
export function getHeaders() {
const accessStore = useAccessStore.getState();
const headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
"Accept": "application/json",
};
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro";
const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey;
const apiKey = isGoogle
? accessStore.googleApiKey
: isAzure
? accessStore.azureApiKey
: accessStore.openaiApiKey;
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0;

View File

@ -0,0 +1,222 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales";
import { getServerSideConfig } from "@/app/config/server";
export class GeminiProApi implements LLMApi {
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);
return (
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
res?.error?.message ||
""
);
}
async chat(options: ChatOptions): Promise<void> {
const messages = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }],
}));
// google requires that role in neighboring messages must not be the same
for (let i = 0; i < messages.length - 1; ) {
// Check if current and next item both have the role "model"
if (messages[i].role === messages[i + 1].role) {
// Concatenate the 'parts' of the current and next item
messages[i].parts = messages[i].parts.concat(messages[i + 1].parts);
// Remove the next item
messages.splice(i + 1, 1);
} else {
// Move to the next item
i++;
}
}
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
const requestPayload = {
contents: messages,
generationConfig: {
// stopSequences: [
// "Title"
// ],
temperature: modelConfig.temperature,
maxOutputTokens: modelConfig.max_tokens,
topP: modelConfig.top_p,
// "topK": modelConfig.top_k,
},
};
console.log("[Request] google payload: ", requestPayload);
// todo: support stream later
const shouldStream = false;
const controller = new AbortController();
options.onController?.(controller);
try {
const chatPath = this.path(Google.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
if (shouldStream) {
let responseText = "";
let remainText = "";
let finished = false;
// animate response to make it looks smooth
function animateResponseText() {
if (finished || controller.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
options.onUpdate?.(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
}
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
}
};
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text) as {
choices: Array<{
delta: {
content: string;
};
}>;
};
const delta = json.choices[0]?.delta?.content;
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) {
// being blocked
options.onError?.(
new Error(
"Message is being blocked for reason: " +
resJson.promptFeedback.blockReason,
),
);
}
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
options.onError?.(e as Error);
}
}
usage(): Promise<LLMUsage> {
throw new Error("Method not implemented.");
}
async models(): Promise<LLMModel[]> {
return [];
}
path(path: string): string {
return "/api/google/" + path;
}
}

View File

@ -115,12 +115,35 @@ export class ChatGPTApi implements LLMApi {
if (shouldStream) {
let responseText = "";
let remainText = "";
let finished = false;
// animate response to make it looks smooth
function animateResponseText() {
if (finished || controller.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
options.onUpdate?.(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
}
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
options.onFinish(responseText);
finished = true;
options.onFinish(responseText + remainText);
}
};
@ -183,8 +206,7 @@ export class ChatGPTApi implements LLMApi {
};
const delta = json.choices[0]?.delta?.content;
if (delta) {
responseText += delta;
options.onUpdate?.(responseText, delta);
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text);
@ -301,6 +323,11 @@ export class ChatGPTApi implements LLMApi {
return chatModels.map((m) => ({
name: m.id,
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
}));
}
}

View File

@ -64,6 +64,17 @@ export function AuthPage() {
);
}}
/>
<input
className={styles["auth-input"]}
type="password"
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
value={accessStore.googleApiKey}
onChange={(e) => {
accessStore.update(
(access) => (access.googleApiKey = e.currentTarget.value),
);
}}
/>
</>
) : null}

View File

@ -449,8 +449,7 @@ export function ChatActions(props: {
);
showToast(nextModel);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [currentModel, models]);
}, [chatStore, currentModel, models]);
return (
<div className={styles["chat-input-actions"]}>

View File

@ -10,7 +10,10 @@ import BotIcon from "../icons/bot.svg";
import BlackBotIcon from "../icons/black-bot.svg";
export function getEmojiUrl(unified: string, style: EmojiStyle) {
return `https://cdn.staticfile.org/emoji-datasource-apple/14.0.0/img/${style}/64/${unified}.png`;
// Whoever owns this Content Delivery Network (CDN), I am using your CDN to serve emojis
// Old CDN broken, so I had to switch to this one
// Author: https://github.com/H0llyW00dzZ
return `https://cdn.jsdelivr.net/npm/emoji-datasource-apple/img/${style}/64/${unified}.png`;
}
export function AvatarPicker(props: {

View File

@ -29,10 +29,11 @@ import NextImage from "next/image";
import { toBlob, toPng } from "html-to-image";
import { DEFAULT_MASK_AVATAR } from "../store/mask";
import { api } from "../client/api";
import { prettyObject } from "../utils/format";
import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant";
import { getClientConfig } from "../config/client";
import { ClientApi } from "../client/api";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />,
@ -301,10 +302,17 @@ export function PreviewActions(props: {
}) {
const [loading, setLoading] = useState(false);
const [shouldExport, setShouldExport] = useState(false);
const config = useAppConfig();
const onRenderMsgs = (msgs: ChatMessage[]) => {
setShouldExport(false);
var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
api
.share(msgs)
.then((res) => {
@ -530,7 +538,7 @@ export function ImagePreviewer(props: {
</div>
<div>
<div className={styles["main-title"]}>ChatGPT Next Web</div>
<div className={styles["main-title"]}>NextChat</div>
<div className={styles["sub-title"]}>
github.com/Yidadaa/ChatGPT-Next-Web
</div>

View File

@ -12,7 +12,7 @@ import LoadingIcon from "../icons/three-dots.svg";
import { getCSSVar, useMobileScreen } from "../utils";
import dynamic from "next/dynamic";
import { Path, SlotID } from "../constant";
import { ModelProvider, Path, SlotID } from "../constant";
import { ErrorBoundary } from "./error";
import { getISOLang, getLang } from "../locales";
@ -27,7 +27,7 @@ import { SideBar } from "./sidebar";
import { useAppConfig } from "../store/config";
import { AuthPage } from "./auth";
import { getClientConfig } from "../config/client";
import { api } from "../client/api";
import { ClientApi } from "../client/api";
import { useAccessStore } from "../store";
export function Loading(props: { noLogo?: boolean }) {
@ -128,7 +128,8 @@ function Screen() {
const isHome = location.pathname === Path.Home;
const isAuth = location.pathname === Path.Auth;
const isMobileScreen = useMobileScreen();
const shouldTightBorder = getClientConfig()?.isApp || (config.tightBorder && !isMobileScreen);
const shouldTightBorder =
getClientConfig()?.isApp || (config.tightBorder && !isMobileScreen);
useEffect(() => {
loadAsyncGoogleFont();
@ -169,6 +170,12 @@ function Screen() {
export function useLoadData() {
const config = useAppConfig();
var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
useEffect(() => {
(async () => {
const models = await api.llm.models();

View File

@ -29,7 +29,7 @@ export function ModelConfigList(props: {
.filter((v) => v.available)
.map((v, i) => (
<option value={v.name} key={i}>
{v.displayName}
{v.displayName}({v.provider?.providerName})
</option>
))}
</Select>
@ -91,79 +91,84 @@ export function ModelConfigList(props: {
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.PresencePenalty.Title}
subTitle={Locale.Settings.PresencePenalty.SubTitle}
>
<InputRange
value={props.modelConfig.presence_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.presence_penalty =
ModalConfigValidator.presence_penalty(
e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.FrequencyPenalty.Title}
subTitle={Locale.Settings.FrequencyPenalty.SubTitle}
>
<InputRange
value={props.modelConfig.frequency_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.frequency_penalty =
ModalConfigValidator.frequency_penalty(
e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
{props.modelConfig.model === "gemini-pro" ? null : (
<>
<ListItem
title={Locale.Settings.PresencePenalty.Title}
subTitle={Locale.Settings.PresencePenalty.SubTitle}
>
<InputRange
value={props.modelConfig.presence_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.presence_penalty =
ModalConfigValidator.presence_penalty(
e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.InjectSystemPrompts.Title}
subTitle={Locale.Settings.InjectSystemPrompts.SubTitle}
>
<input
type="checkbox"
checked={props.modelConfig.enableInjectSystemPrompts}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.enableInjectSystemPrompts = e.currentTarget.checked),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.FrequencyPenalty.Title}
subTitle={Locale.Settings.FrequencyPenalty.SubTitle}
>
<InputRange
value={props.modelConfig.frequency_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.frequency_penalty =
ModalConfigValidator.frequency_penalty(
e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.InputTemplate.Title}
subTitle={Locale.Settings.InputTemplate.SubTitle}
>
<input
type="text"
value={props.modelConfig.template}
onChange={(e) =>
props.updateConfig(
(config) => (config.template = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.InjectSystemPrompts.Title}
subTitle={Locale.Settings.InjectSystemPrompts.SubTitle}
>
<input
type="checkbox"
checked={props.modelConfig.enableInjectSystemPrompts}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.enableInjectSystemPrompts =
e.currentTarget.checked),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.InputTemplate.Title}
subTitle={Locale.Settings.InputTemplate.SubTitle}
>
<input
type="text"
value={props.modelConfig.template}
onChange={(e) =>
props.updateConfig(
(config) => (config.template = e.currentTarget.value),
)
}
></input>
</ListItem>
</>
)}
<ListItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}

View File

@ -52,6 +52,7 @@ import { copyToClipboard } from "../utils";
import Link from "next/link";
import {
Azure,
Google,
OPENAI_BASE_URL,
Path,
RELEASE_URL,
@ -583,6 +584,7 @@ export function Settings() {
const accessStore = useAccessStore();
const shouldHideBalanceQuery = useMemo(() => {
const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL);
return (
accessStore.hideBalanceQuery ||
isOpenAiUrl ||
@ -635,6 +637,12 @@ export function Settings() {
navigate(Path.Home);
}
};
if (clientConfig?.isApp) {
// Force to set custom endpoint to true if it's app
accessStore.update((state) => {
state.useCustomConfig = true;
});
}
document.addEventListener("keydown", keydownEvent);
return () => {
document.removeEventListener("keydown", keydownEvent);
@ -909,21 +917,26 @@ export function Settings() {
{!accessStore.hideUserApiKey && (
<>
<ListItem
title={Locale.Settings.Access.CustomEndpoint.Title}
subTitle={Locale.Settings.Access.CustomEndpoint.SubTitle}
>
<input
type="checkbox"
checked={accessStore.useCustomConfig}
onChange={(e) =>
accessStore.update(
(access) =>
(access.useCustomConfig = e.currentTarget.checked),
)
}
></input>
</ListItem>
{
// Conditionally render the following ListItem based on clientConfig.isApp
!clientConfig?.isApp && ( // only show if isApp is false
<ListItem
title={Locale.Settings.Access.CustomEndpoint.Title}
subTitle={Locale.Settings.Access.CustomEndpoint.SubTitle}
>
<input
type="checkbox"
checked={accessStore.useCustomConfig}
onChange={(e) =>
accessStore.update(
(access) =>
(access.useCustomConfig = e.currentTarget.checked),
)
}
></input>
</ListItem>
)
}
{accessStore.useCustomConfig && (
<>
<ListItem
@ -987,7 +1000,7 @@ export function Settings() {
/>
</ListItem>
</>
) : (
) : accessStore.provider === "Azure" ? (
<>
<ListItem
title={Locale.Settings.Access.Azure.Endpoint.Title}
@ -1046,7 +1059,66 @@ export function Settings() {
></input>
</ListItem>
</>
)}
) : accessStore.provider === "Google" ? (
<>
<ListItem
title={Locale.Settings.Access.Google.Endpoint.Title}
subTitle={
Locale.Settings.Access.Google.Endpoint.SubTitle +
Google.ExampleEndpoint
}
>
<input
type="text"
value={accessStore.googleUrl}
placeholder={Google.ExampleEndpoint}
onChange={(e) =>
accessStore.update(
(access) =>
(access.googleUrl = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.Access.Azure.ApiKey.Title}
subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}
>
<PasswordInput
value={accessStore.googleApiKey}
type="text"
placeholder={
Locale.Settings.Access.Google.ApiKey.Placeholder
}
onChange={(e) => {
accessStore.update(
(access) =>
(access.googleApiKey = e.currentTarget.value),
);
}}
/>
</ListItem>
<ListItem
title={Locale.Settings.Access.Google.ApiVerion.Title}
subTitle={
Locale.Settings.Access.Google.ApiVerion.SubTitle
}
>
<input
type="text"
value={accessStore.googleApiVersion}
placeholder="2023-08-01-preview"
onChange={(e) =>
accessStore.update(
(access) =>
(access.googleApiVersion =
e.currentTarget.value),
)
}
></input>
</ListItem>
</>
) : null}
</>
)}
</>

View File

@ -155,7 +155,7 @@ export function SideBar(props: { className?: string }) {
>
<div className={styles["sidebar-header"]} data-tauri-drag-region>
<div className={styles["sidebar-title"]} data-tauri-drag-region>
ChatGPT Next
NextChat
</div>
<div className={styles["sidebar-sub-title"]}>
Build your own AI assistant.

View File

@ -26,6 +26,10 @@ declare global {
AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name}
AZURE_API_KEY?: string;
AZURE_API_VERSION?: string;
// google only
GOOGLE_API_KEY?: string;
GOOGLE_URL?: string;
}
}
}
@ -61,10 +65,19 @@ export const getServerSideConfig = () => {
}
const isAzure = !!process.env.AZURE_URL;
const isGoogle = !!process.env.GOOGLE_API_KEY;
const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
const randomIndex = Math.floor(Math.random() * apiKeys.length);
const apiKey = apiKeys[randomIndex];
console.log(
`[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
);
return {
baseUrl: process.env.BASE_URL,
apiKey: process.env.OPENAI_API_KEY,
apiKey,
openaiOrgId: process.env.OPENAI_ORG_ID,
isAzure,
@ -72,6 +85,10 @@ export const getServerSideConfig = () => {
azureApiKey: process.env.AZURE_API_KEY,
azureApiVersion: process.env.AZURE_API_VERSION,
isGoogle,
googleApiKey: process.env.GOOGLE_API_KEY,
googleUrl: process.env.GOOGLE_URL,
needCode: ACCESS_CODES.size > 0,
code: process.env.CODE,
codes: ACCESS_CODES,

View File

@ -12,6 +12,8 @@ export const DEFAULT_CORS_HOST = "https://a.nextweb.fun";
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
export const OPENAI_BASE_URL = "https://api.openai.com";
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
export enum Path {
Home = "/",
Chat = "/chat",
@ -65,6 +67,12 @@ export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown";
export enum ServiceProvider {
OpenAI = "OpenAI",
Azure = "Azure",
Google = "Google",
}
export enum ModelProvider {
GPT = "GPT",
GeminiPro = "GeminiPro",
}
export const OpenaiPath = {
@ -78,6 +86,14 @@ export const Azure = {
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
};
export const Google = {
ExampleEndpoint:
"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent",
ChatPath: "v1beta/models/gemini-pro:generateContent",
// /api/openai/v1/chat/completions
};
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
export const DEFAULT_SYSTEM_TEMPLATE = `
You are ChatGPT, a large language model trained by OpenAI.
@ -100,58 +116,137 @@ export const DEFAULT_MODELS = [
{
name: "gpt-4",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-0314",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-0613",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-32k",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-32k-0314",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-32k-0613",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-1106-preview",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4-vision-preview",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-3.5-turbo",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-3.5-turbo-0301",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-3.5-turbo-0613",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-3.5-turbo-1106",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-3.5-turbo-16k",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-3.5-turbo-16k-0613",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gemini-pro",
available: true,
provider: {
id: "google",
providerName: "Google",
providerType: "google",
},
},
] as const;

View File

@ -6,7 +6,7 @@ import { getClientConfig } from "./config/client";
import { type Metadata } from "next";
export const metadata: Metadata = {
title: "ChatGPT Next Web",
title: "NextChat",
description: "Your personal ChatGPT Chat Bot.",
viewport: {
width: "device-width",
@ -18,7 +18,7 @@ export const metadata: Metadata = {
{ media: "(prefers-color-scheme: dark)", color: "#151515" },
],
appleWebApp: {
title: "ChatGPT Next Web",
title: "NextChat",
statusBarStyle: "default",
},
};

View File

@ -13,7 +13,7 @@ const cn = {
Auth: {
Title: "需要密码",
Tips: "管理员开启了密码验证,请在下方填入访问码",
SubTips: "或者输入你的 OpenAI API 密钥",
SubTips: "或者输入你的 OpenAI 或 Google API 密钥",
Input: "在此处填写访问码",
Confirm: "确认",
Later: "稍后再说",
@ -312,6 +312,23 @@ const cn = {
SubTitle: "选择指定的部分版本",
},
},
Google: {
ApiKey: {
Title: "接口密钥",
SubTitle: "使用自定义 Google AI Studio API Key 绕过密码访问限制",
Placeholder: "Google AI Studio API Key",
},
Endpoint: {
Title: "接口地址",
SubTitle: "样例:",
},
ApiVerion: {
Title: "接口版本 (gemini-pro api version)",
SubTitle: "选择指定的部分版本",
},
},
CustomModel: {
Title: "自定义模型名",
SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
@ -347,7 +364,7 @@ const cn = {
Prompt: {
History: (content: string) => "这是历史聊天总结作为前情提要:" + content,
Topic:
"使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,如果没有主题,请直接返回“闲聊”",
"使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,不要加粗,如果没有主题,请直接返回“闲聊”",
Summarize:
"简要总结一下对话内容,用作后续的上下文提示 prompt控制在 200 字以内",
},
@ -441,9 +458,9 @@ const cn = {
Config: "配置",
},
Exporter: {
Description : {
Title: "只有清除上下文之后的消息会被展示"
},
Description: {
Title: "只有清除上下文之后的消息会被展示",
},
Model: "模型",
Messages: "消息",
Topic: "主题",

View File

@ -15,7 +15,7 @@ const en: LocaleType = {
Auth: {
Title: "Need Access Code",
Tips: "Please enter access code below",
SubTips: "Or enter your OpenAI API Key",
SubTips: "Or enter your OpenAI or Google API Key",
Input: "access code",
Confirm: "Confirm",
Later: "Later",
@ -319,6 +319,24 @@ const en: LocaleType = {
Title: "Custom Models",
SubTitle: "Custom model options, seperated by comma",
},
Google: {
ApiKey: {
Title: "API Key",
SubTitle:
"Bypass password access restrictions using a custom Google AI Studio API Key",
Placeholder: "Google AI Studio API Key",
},
Endpoint: {
Title: "Endpoint Address",
SubTitle: "Example:",
},
ApiVerion: {
Title: "API Version (gemini-pro api version)",
SubTitle: "Select a specific part version",
},
},
},
Model: "Model",
@ -353,7 +371,7 @@ const en: LocaleType = {
History: (content: string) =>
"This is a summary of the chat history as a recap: " + content,
Topic:
"Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, or additional text. Remove enclosing quotation marks.",
"Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, bold text, or additional text. Remove enclosing quotation marks.",
Summarize:
"Summarize the discussion briefly in 200 words or less to use as a prompt for future context.",
},
@ -443,8 +461,8 @@ const en: LocaleType = {
},
Exporter: {
Description: {
Title: "Only messages after clearing the context will be displayed"
},
Title: "Only messages after clearing the context will be displayed",
},
Model: "Model",
Messages: "Messages",
Topic: "Topic",

View File

@ -1,5 +1,6 @@
import cn from "./cn";
import en from "./en";
import pt from "./pt";
import tw from "./tw";
import id from "./id";
import fr from "./fr";
@ -24,6 +25,7 @@ const ALL_LANGS = {
cn,
en,
tw,
pt,
jp,
ko,
id,
@ -47,6 +49,7 @@ export const AllLangs = Object.keys(ALL_LANGS) as Lang[];
export const ALL_LANG_OPTIONS: Record<Lang, string> = {
cn: "简体中文",
en: "English",
pt: "Português",
tw: "繁體中文",
jp: "日本語",
ko: "한국어",

466
app/locales/pt.ts Normal file
View File

@ -0,0 +1,466 @@
import { SubmitKey } from "../store/config";
import { PartialLocaleType } from "../locales/index";
import { getClientConfig } from "../config/client";
const isApp = !!getClientConfig()?.isApp;
const pt: PartialLocaleType = {
WIP: "Em breve...",
Error: {
Unauthorized: isApp
? "Chave API inválida, por favor verifique em [Configurações](/#/settings)."
: "Acesso não autorizado, por favor insira o código de acesso em [auth](/#/auth) ou insira sua Chave API OpenAI.",
},
Auth: {
Title: "Necessário Código de Acesso",
Tips: "Por favor, insira o código de acesso abaixo",
SubTips: "Ou insira sua Chave API OpenAI",
Input: "código de acesso",
Confirm: "Confirmar",
Later: "Depois",
},
ChatItem: {
ChatItemCount: (count: number) => `${count} mensagens`,
},
Chat: {
SubTitle: (count: number) => `${count} mensagens`,
EditMessage: {
Title: "Editar Todas as Mensagens",
Topic: {
Title: "Tópico",
SubTitle: "Mudar o tópico atual",
},
},
Actions: {
ChatList: "Ir Para Lista de Chat",
CompressedHistory: "Prompt de Memória Histórica Comprimida",
Export: "Exportar Todas as Mensagens como Markdown",
Copy: "Copiar",
Stop: "Parar",
Retry: "Tentar Novamente",
Pin: "Fixar",
PinToastContent: "Fixada 1 mensagem para prompts contextuais",
PinToastAction: "Visualizar",
Delete: "Deletar",
Edit: "Editar",
},
Commands: {
new: "Iniciar um novo chat",
newm: "Iniciar um novo chat com máscara",
next: "Próximo Chat",
prev: "Chat Anterior",
clear: "Limpar Contexto",
del: "Deletar Chat",
},
InputActions: {
Stop: "Parar",
ToBottom: "Para o Mais Recente",
Theme: {
auto: "Automático",
light: "Tema Claro",
dark: "Tema Escuro",
},
Prompt: "Prompts",
Masks: "Máscaras",
Clear: "Limpar Contexto",
Settings: "Configurações",
},
Rename: "Renomear Chat",
Typing: "Digitando…",
Input: (submitKey: string) => {
var inputHints = `${submitKey} para enviar`;
if (submitKey === String(SubmitKey.Enter)) {
inputHints += ", Shift + Enter para quebrar linha";
}
return inputHints + ", / para buscar prompts, : para usar comandos";
},
Send: "Enviar",
Config: {
Reset: "Redefinir para Padrão",
SaveAs: "Salvar como Máscara",
},
IsContext: "Prompt Contextual",
},
Export: {
Title: "Exportar Mensagens",
Copy: "Copiar Tudo",
Download: "Baixar",
MessageFromYou: "Mensagem De Você",
MessageFromChatGPT: "Mensagem De ChatGPT",
Share: "Compartilhar para ShareGPT",
Format: {
Title: "Formato de Exportação",
SubTitle: "Markdown ou Imagem PNG",
},
IncludeContext: {
Title: "Incluindo Contexto",
SubTitle: "Exportar prompts de contexto na máscara ou não",
},
Steps: {
Select: "Selecionar",
Preview: "Pré-visualizar",
},
Image: {
Toast: "Capturando Imagem...",
Modal:
"Pressione longamente ou clique com o botão direito para salvar a imagem",
},
},
Select: {
Search: "Buscar",
All: "Selecionar Tudo",
Latest: "Selecionar Mais Recente",
Clear: "Limpar",
},
Memory: {
Title: "Prompt de Memória",
EmptyContent: "Nada ainda.",
Send: "Enviar Memória",
Copy: "Copiar Memória",
Reset: "Resetar Sessão",
ResetConfirm:
"Resetar irá limpar o histórico de conversa atual e a memória histórica. Você tem certeza que quer resetar?",
},
Home: {
NewChat: "Novo Chat",
DeleteChat: "Confirmar para deletar a conversa selecionada?",
DeleteToast: "Chat Deletado",
Revert: "Reverter",
},
Settings: {
Title: "Configurações",
SubTitle: "Todas as Configurações",
Danger: {
Reset: {
Title: "Resetar Todas as Configurações",
SubTitle: "Resetar todos os itens de configuração para o padrão",
Action: "Resetar",
Confirm: "Confirmar para resetar todas as configurações para o padrão?",
},
Clear: {
Title: "Limpar Todos os Dados",
SubTitle: "Limpar todas as mensagens e configurações",
Action: "Limpar",
Confirm: "Confirmar para limpar todas as mensagens e configurações?",
},
},
Lang: {
Name: "Language",
All: "Todos os Idiomas",
},
Avatar: "Avatar",
FontSize: {
Title: "Tamanho da Fonte",
SubTitle: "Ajustar o tamanho da fonte do conteúdo do chat",
},
InjectSystemPrompts: {
Title: "Inserir Prompts de Sistema",
SubTitle: "Inserir um prompt de sistema global para cada requisição",
},
InputTemplate: {
Title: "Modelo de Entrada",
SubTitle: "A mensagem mais recente será preenchida neste modelo",
},
Update: {
Version: (x: string) => `Versão: ${x}`,
IsLatest: "Última versão",
CheckUpdate: "Verificar Atualização",
IsChecking: "Verificando atualização...",
FoundUpdate: (x: string) => `Nova versão encontrada: ${x}`,
GoToUpdate: "Atualizar",
},
SendKey: "Tecla de Envio",
Theme: "Tema",
TightBorder: "Borda Ajustada",
SendPreviewBubble: {
Title: "Bolha de Pré-visualização de Envio",
SubTitle: "Pré-visualizar markdown na bolha",
},
AutoGenerateTitle: {
Title: "Gerar Título Automaticamente",
SubTitle: "Gerar um título adequado baseado no conteúdo da conversa",
},
Sync: {
CloudState: "Última Atualização",
NotSyncYet: "Ainda não sincronizado",
Success: "Sincronização bem sucedida",
Fail: "Falha na sincronização",
Config: {
Modal: {
Title: "Configurar Sincronização",
Check: "Verificar Conexão",
},
SyncType: {
Title: "Tipo de Sincronização",
SubTitle: "Escolha seu serviço de sincronização favorito",
},
Proxy: {
Title: "Habilitar Proxy CORS",
SubTitle: "Habilitar um proxy para evitar restrições de cross-origin",
},
ProxyUrl: {
Title: "Endpoint de Proxy",
SubTitle: "Apenas aplicável ao proxy CORS embutido para este projeto",
},
WebDav: {
Endpoint: "Endpoint WebDAV",
UserName: "Nome de Usuário",
Password: "Senha",
},
UpStash: {
Endpoint: "URL REST Redis UpStash",
UserName: "Nome do Backup",
Password: "Token REST Redis UpStash",
},
},
LocalState: "Dados Locais",
Overview: (overview: any) => {
return `${overview.chat} chats${overview.message} mensagens${overview.prompt} prompts${overview.mask} máscaras`;
},
ImportFailed: "Falha ao importar do arquivo",
},
Mask: {
Splash: {
Title: "Tela de Início da Máscara",
SubTitle:
"Mostrar uma tela de início da máscara antes de iniciar novo chat",
},
Builtin: {
Title: "Esconder Máscaras Embutidas",
SubTitle: "Esconder máscaras embutidas na lista de máscaras",
},
},
Prompt: {
Disable: {
Title: "Desabilitar auto-completar",
SubTitle: "Digite / para acionar auto-completar",
},
List: "Lista de Prompts",
ListCount: (builtin: number, custom: number) =>
`${builtin} embutidos, ${custom} definidos pelo usuário`,
Edit: "Editar",
Modal: {
Title: "Lista de Prompts",
Add: "Adicionar Um",
Search: "Buscar Prompts",
},
EditModal: {
Title: "Editar Prompt",
},
},
HistoryCount: {
Title: "Contagem de Mensagens Anexadas",
SubTitle: "Número de mensagens enviadas anexadas por requisição",
},
CompressThreshold: {
Title: "Limite de Compressão de Histórico",
SubTitle:
"Irá comprimir se o comprimento das mensagens não comprimidas exceder o valor",
},
Usage: {
Title: "Saldo da Conta",
SubTitle(used: any, total: any) {
return `Usado este mês ${used}, assinatura ${total}`;
},
IsChecking: "Verificando...",
Check: "Verificar",
NoAccess: "Insira a Chave API para verificar o saldo",
},
Access: {
AccessCode: {
Title: "Código de Acesso",
SubTitle: "Controle de Acesso Habilitado",
Placeholder: "Insira o Código",
},
CustomEndpoint: {
Title: "Endpoint Personalizado",
SubTitle: "Use serviço personalizado Azure ou OpenAI",
},
Provider: {
Title: "Provedor do Modelo",
SubTitle: "Selecione Azure ou OpenAI",
},
OpenAI: {
ApiKey: {
Title: "Chave API OpenAI",
SubTitle: "Usar Chave API OpenAI personalizada",
Placeholder: "sk-xxx",
},
Endpoint: {
Title: "Endpoint OpenAI",
SubTitle:
"Deve começar com http(s):// ou usar /api/openai como padrão",
},
},
Azure: {
ApiKey: {
Title: "Chave API Azure",
SubTitle: "Verifique sua chave API do console Azure",
Placeholder: "Chave API Azure",
},
Endpoint: {
Title: "Endpoint Azure",
SubTitle: "Exemplo: ",
},
ApiVerion: {
Title: "Versão API Azure",
SubTitle: "Verifique sua versão API do console Azure",
},
},
CustomModel: {
Title: "Modelos Personalizados",
SubTitle: "Opções de modelo personalizado, separados por vírgula",
},
},
Model: "Modelo",
Temperature: {
Title: "Temperatura",
SubTitle: "Um valor maior torna a saída mais aleatória",
},
TopP: {
Title: "Top P",
SubTitle: "Não altere este valor junto com a temperatura",
},
MaxTokens: {
Title: "Máximo de Tokens",
SubTitle: "Comprimento máximo de tokens de entrada e tokens gerados",
},
PresencePenalty: {
Title: "Penalidade de Presença",
SubTitle:
"Um valor maior aumenta a probabilidade de falar sobre novos tópicos",
},
FrequencyPenalty: {
Title: "Penalidade de Frequência",
SubTitle:
"Um valor maior diminui a probabilidade de repetir a mesma linha",
},
},
Store: {
DefaultTopic: "Nova Conversa",
BotHello: "Olá! Como posso ajudá-lo hoje?",
Error: "Algo deu errado, por favor tente novamente mais tarde.",
Prompt: {
History: (content: string) =>
"Este é um resumo do histórico de chat como um recapitulativo: " +
content,
Topic:
"Por favor, gere um título de quatro a cinco palavras resumindo nossa conversa sem qualquer introdução, pontuação, aspas, períodos, símbolos ou texto adicional. Remova as aspas que o envolvem.",
Summarize:
"Resuma a discussão brevemente em 200 palavras ou menos para usar como um prompt para o contexto futuro.",
},
},
Copy: {
Success: "Copiado para a área de transferência",
Failed:
"Falha na cópia, por favor conceda permissão para acessar a área de transferência",
},
Download: {
Success: "Conteúdo baixado para seu diretório.",
Failed: "Falha no download.",
},
Context: {
Toast: (x: any) => `Com ${x} prompts contextuais`,
Edit: "Configurações do Chat Atual",
Add: "Adicionar um Prompt",
Clear: "Contexto Limpo",
Revert: "Reverter",
},
Plugin: {
Name: "Plugin",
},
FineTuned: {
Sysmessage: "Você é um assistente que",
},
Mask: {
Name: "Máscara",
Page: {
Title: "Template de Prompt",
SubTitle: (count: number) => `${count} templates de prompt`,
Search: "Buscar Templates",
Create: "Criar",
},
Item: {
Info: (count: number) => `${count} prompts`,
Chat: "Chat",
View: "Visualizar",
Edit: "Editar",
Delete: "Deletar",
DeleteConfirm: "Confirmar para deletar?",
},
EditModal: {
Title: (readonly: boolean) =>
`Editar Template de Prompt ${readonly ? "(somente leitura)" : ""}`,
Download: "Baixar",
Clone: "Clonar",
},
Config: {
Avatar: "Avatar do Bot",
Name: "Nome do Bot",
Sync: {
Title: "Usar Configuração Global",
SubTitle: "Usar configuração global neste chat",
Confirm:
"Confirmar para substituir a configuração personalizada pela configuração global?",
},
HideContext: {
Title: "Esconder Prompts de Contexto",
SubTitle: "Não mostrar prompts de contexto no chat",
},
Share: {
Title: "Compartilhar Esta Máscara",
SubTitle: "Gerar um link para esta máscara",
Action: "Copiar Link",
},
},
},
NewChat: {
Return: "Retornar",
Skip: "Apenas Começar",
Title: "Escolher uma Máscara",
SubTitle: "Converse com a Alma por trás da Máscara",
More: "Encontre Mais",
NotShow: "Nunca Mostrar Novamente",
ConfirmNoShow:
"Confirmar para desabilitarVocê pode habilitar nas configurações depois.",
},
UI: {
Confirm: "Confirmar",
Cancel: "Cancelar",
Close: "Fechar",
Create: "Criar",
Edit: "Editar",
Export: "Exportar",
Import: "Importar",
Sync: "Sincronizar",
Config: "Configurar",
},
Exporter: {
Description: {
Title: "Apenas mensagens após a limpeza do contexto serão exibidas",
},
Model: "Modelo",
Messages: "Mensagens",
Topic: "Tópico",
Time: "Tempo",
},
URLCommand: {
Code: "Código de acesso detectado a partir da url, confirmar para aplicar? ",
Settings:
"Configurações detectadas a partir da url, confirmar para aplicar?",
},
};
export default pt;

View File

@ -29,6 +29,11 @@ const DEFAULT_ACCESS_STATE = {
azureApiKey: "",
azureApiVersion: "2023-08-01-preview",
// google ai studio
googleUrl: "",
googleApiKey: "",
googleApiVersion: "v1",
// server config
needCode: true,
hideUserApiKey: false,
@ -56,6 +61,10 @@ export const useAccessStore = createPersistStore(
return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]);
},
isValidGoogle() {
return ensure(get(), ["googleApiKey"]);
},
isAuthorized() {
this.fetch();
@ -63,6 +72,7 @@ export const useAccessStore = createPersistStore(
return (
this.isValidOpenAI() ||
this.isValidAzure() ||
this.isValidGoogle() ||
!this.enabledAccessControl() ||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
);
@ -99,6 +109,7 @@ export const useAccessStore = createPersistStore(
token: string;
openaiApiKey: string;
azureApiVersion: string;
googleApiKey: string;
};
state.openaiApiKey = state.token;
state.azureApiVersion = "2023-08-01-preview";

View File

@ -8,10 +8,11 @@ import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate,
ModelProvider,
StoreKey,
SUMMARIZE_MODEL,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { ClientApi, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
import { estimateTokenLength } from "../utils/token";
@ -301,6 +302,13 @@ export const useChatStore = createPersistStore(
]);
});
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// make request
api.llm.chat({
messages: sendMessages,
@ -379,7 +387,9 @@ export const useChatStore = createPersistStore(
// system prompts, to get close to OpenAI Web ChatGPT
const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts;
const systemPrompts = shouldInjectSystemPrompts
var systemPrompts: ChatMessage[] = [];
systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
@ -473,6 +483,14 @@ export const useChatStore = createPersistStore(
summarizeSession() {
const config = useAppConfig.getState();
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// remove error messages if any
const messages = session.messages;
@ -504,8 +522,6 @@ export const useChatStore = createPersistStore(
},
});
}
const modelConfig = session.mask.modelConfig;
const summarizeIndex = Math.max(
session.lastSummarizeIndex,
session.clearContextIndex ?? 0,
@ -557,7 +573,10 @@ export const useChatStore = createPersistStore(
},
onFinish(message) {
console.log("[Memory] ", message);
session.lastSummarizeIndex = lastSummarizeIndex;
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
});
},
onError(err) {
console.error("[Summarize] ", err);

View File

@ -1,9 +1,16 @@
import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant";
import { api } from "../client/api";
import {
FETCH_COMMIT_URL,
FETCH_TAG_URL,
ModelProvider,
StoreKey,
} from "../constant";
import { getClientConfig } from "../config/client";
import { createPersistStore } from "../utils/store";
import ChatGptIcon from "../icons/chatgpt.png";
import Locale from "../locales";
import { use } from "react";
import { useAppConfig } from ".";
import { ClientApi } from "../client/api";
const ONE_MINUTE = 60 * 1000;
const isApp = !!getClientConfig()?.isApp;
@ -85,35 +92,40 @@ export const useUpdateStore = createPersistStore(
}));
if (window.__TAURI__?.notification && isApp) {
// Check if notification permission is granted
await window.__TAURI__?.notification.isPermissionGranted().then((granted) => {
if (!granted) {
return;
} else {
// Request permission to show notifications
window.__TAURI__?.notification.requestPermission().then((permission) => {
if (permission === 'granted') {
if (version === remoteId) {
// Show a notification using Tauri
window.__TAURI__?.notification.sendNotification({
title: "ChatGPT Next Web",
body: `${Locale.Settings.Update.IsLatest}`,
icon: `${ChatGptIcon.src}`,
sound: "Default"
});
} else {
const updateMessage = Locale.Settings.Update.FoundUpdate(`${remoteId}`);
// Show a notification for the new version using Tauri
window.__TAURI__?.notification.sendNotification({
title: "ChatGPT Next Web",
body: updateMessage,
icon: `${ChatGptIcon.src}`,
sound: "Default"
});
}
}
});
}
});
await window.__TAURI__?.notification
.isPermissionGranted()
.then((granted) => {
if (!granted) {
return;
} else {
// Request permission to show notifications
window.__TAURI__?.notification
.requestPermission()
.then((permission) => {
if (permission === "granted") {
if (version === remoteId) {
// Show a notification using Tauri
window.__TAURI__?.notification.sendNotification({
title: "NextChat",
body: `${Locale.Settings.Update.IsLatest}`,
icon: `${ChatGptIcon.src}`,
sound: "Default",
});
} else {
const updateMessage =
Locale.Settings.Update.FoundUpdate(`${remoteId}`);
// Show a notification for the new version using Tauri
window.__TAURI__?.notification.sendNotification({
title: "NextChat",
body: updateMessage,
icon: `${ChatGptIcon.src}`,
sound: "Default",
});
}
}
});
}
});
}
console.log("[Got Upstream] ", remoteId);
} catch (error) {
@ -122,6 +134,7 @@ export const useUpdateStore = createPersistStore(
},
async updateUsage(force = false) {
// only support openai for now
const overOneMinute = Date.now() - get().lastUpdateUsage >= ONE_MINUTE;
if (!overOneMinute && !force) return;
@ -130,6 +143,7 @@ export const useUpdateStore = createPersistStore(
}));
try {
const api = new ClientApi(ModelProvider.GPT);
const usage = await api.llm.usage();
if (usage) {

View File

@ -3,7 +3,10 @@ import { showToast } from "./components/ui-lib";
import Locale from "./locales";
export function trimTopic(topic: string) {
return topic.replace(/[,。!?”“"、,.!?]*$/, "");
// Fix an issue where double quotes still show in the Indonesian language
// This will remove the specified punctuation from the end of the string
// and also trim quotes from both the start and end if they exist.
return topic.replace(/^["“”]+|["“”]+$/g, "").replace(/[,。!?”“"、,.!?]*$/, "");
}
export async function copyToClipboard(text: string) {

View File

@ -6,32 +6,43 @@ export function collectModelTable(
) {
const modelTable: Record<
string,
{ available: boolean; name: string; displayName: string }
{
available: boolean;
name: string;
displayName: string;
provider?: LLMModel["provider"]; // Marked as optional
}
> = {};
// default models
models.forEach(
(m) =>
(modelTable[m.name] = {
...m,
displayName: m.name,
}),
);
models.forEach((m) => {
modelTable[m.name] = {
...m,
displayName: m.name, // 'provider' is copied over if it exists
};
});
// server custom models
customModels
.split(",")
.filter((v) => !!v && v.length > 0)
.map((m) => {
.forEach((m) => {
const available = !m.startsWith("-");
const nameConfig =
m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m;
const [name, displayName] = nameConfig.split(":");
modelTable[name] = {
name,
displayName: displayName || name,
available,
};
const [name, displayName] = nameConfig.split("=");
// enable or disable all models
if (name === "all") {
Object.values(modelTable).forEach((model) => (model.available = available));
} else {
modelTable[name] = {
name,
displayName: displayName || name,
available,
provider: modelTable[name]?.provider, // Use optional chaining
};
}
});
return modelTable;
}

View File

@ -1,13 +1,14 @@
version: "3.9"
services:
chatgpt-next-web:
profiles: ["no-proxy"]
profiles: [ "no-proxy" ]
container_name: chatgpt-next-web
image: yidadaa/chatgpt-next-web
ports:
- 3000:3000
environment:
- OPENAI_API_KEY=$OPENAI_API_KEY
- GOOGLE_API_KEY=$GOOGLE_API_KEY
- CODE=$CODE
- BASE_URL=$BASE_URL
- OPENAI_ORG_ID=$OPENAI_ORG_ID
@ -18,13 +19,14 @@ services:
- OPENAI_SB=$OPENAI_SB
chatgpt-next-web-proxy:
profiles: ["proxy"]
profiles: [ "proxy" ]
container_name: chatgpt-next-web-proxy
image: yidadaa/chatgpt-next-web
ports:
- 3000:3000
environment:
- OPENAI_API_KEY=$OPENAI_API_KEY
- GOOGLE_API_KEY=$GOOGLE_API_KEY
- CODE=$CODE
- PROXY_URL=$PROXY_URL
- BASE_URL=$BASE_URL

View File

@ -23,7 +23,7 @@ Docker 版本相当于稳定版latest Docker 总是与 latest release version
## 如何修改 Vercel 环境变量
- 进入 vercel 的控制台页面;
- 选中你的 chatgpt next web 项目;
- 选中你的 NextChat 项目;
- 点击页面头部的 Settings 选项;
- 找到侧边栏的 Environment Variables 选项;
- 修改对应的值即可。

View File

@ -23,7 +23,7 @@ Docker 버전은 사실상 안정된 버전과 같습니다. latest Docker는
## Vercel 환경 변수를 어떻게 수정하나요?
- Vercel의 제어판 페이지로 이동합니다.
- chatgpt next web 프로젝트를 선택합니다.
- NextChat 프로젝트를 선택합니다.
- 페이지 상단의 Settings 옵션을 클릭합니다.
- 사이드바의 Environment Variables 옵션을 찾습니다.
- 해당 값을 수정합니다.

View File

@ -2,7 +2,7 @@
> No english version yet, please read this doc with ChatGPT or other translation tools.
本文档用于解释 ChatGPT Next Web 的部分功能介绍和设计原则。
本文档用于解释 NextChat 的部分功能介绍和设计原则。
## 面具 (Mask)
@ -22,7 +22,7 @@
编辑步骤如下:
1. 在 ChatGPT Next Web 中配置好一个面具;
1. 在 NextChat 中配置好一个面具;
2. 使用面具编辑页面的下载按钮,将面具保存为 JSON 格式;
3. 让 ChatGPT 帮你将 json 文件格式化为对应的 ts 代码;
4. 放入对应的 .ts 文件。

View File

@ -17,11 +17,11 @@
},
"dependencies": {
"@fortaine/fetch-event-source": "^3.0.6",
"@hello-pangea/dnd": "^16.3.0",
"@hello-pangea/dnd": "^16.5.0",
"@svgr/webpack": "^6.5.1",
"@vercel/analytics": "^0.1.11",
"emoji-picker-react": "^4.5.15",
"fuse.js": "^6.6.2",
"fuse.js": "^7.0.0",
"html-to-image": "^1.11.11",
"mermaid": "^10.6.1",
"nanoid": "^5.0.3",
@ -42,7 +42,7 @@
"zustand": "^4.3.8"
},
"devDependencies": {
"@tauri-apps/cli": "^1.4.0",
"@tauri-apps/cli": "^1.5.8",
"@types/node": "^20.9.0",
"@types/react": "^18.2.14",
"@types/react-dom": "^18.2.7",

View File

@ -1,21 +1,20 @@
{
"name": "ChatGPT Next Web",
"short_name": "ChatGPT",
"icons": [
{
"src": "/android-chrome-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/android-chrome-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"start_url": "/",
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone"
}
"name": "NextChat",
"short_name": "NextChat",
"icons": [
{
"src": "/android-chrome-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/android-chrome-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"start_url": "/",
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone"
}

523
src-tauri/Cargo.lock generated
View File

@ -56,6 +56,128 @@ version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "async-broadcast"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c48ccdbf6ca6b121e0f586cbc0e73ae440e56c67c30fa0873b4e110d9c26d2b"
dependencies = [
"event-listener",
"futures-core",
]
[[package]]
name = "async-channel"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
dependencies = [
"concurrent-queue",
"event-listener",
"futures-core",
]
[[package]]
name = "async-executor"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b0c4a4f319e45986f347ee47fef8bf5e81c9abc3f6f58dc2391439f30df65f0"
dependencies = [
"async-lock",
"async-task",
"concurrent-queue",
"fastrand 2.0.1",
"futures-lite",
"slab",
]
[[package]]
name = "async-fs"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06"
dependencies = [
"async-lock",
"autocfg",
"blocking",
"futures-lite",
]
[[package]]
name = "async-io"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
dependencies = [
"async-lock",
"autocfg",
"cfg-if",
"concurrent-queue",
"futures-lite",
"log",
"parking",
"polling",
"rustix",
"slab",
"socket2",
"waker-fn",
]
[[package]]
name = "async-lock"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b"
dependencies = [
"event-listener",
]
[[package]]
name = "async-process"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9"
dependencies = [
"async-io",
"async-lock",
"autocfg",
"blocking",
"cfg-if",
"event-listener",
"futures-lite",
"rustix",
"signal-hook",
"windows-sys 0.48.0",
]
[[package]]
name = "async-recursion"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]]
name = "async-task"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d90cd0b264dfdd8eb5bad0a2c217c1f88fa96a8573f40e7b12de23fb468f46"
[[package]]
name = "async-trait"
version = "0.1.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2d0f03b3640e3a630367e40c468cb7f309529c708ed1d88597047b0e7c6ef7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]]
name = "atk"
version = "0.15.1"
@ -80,6 +202,12 @@ dependencies = [
"system-deps 6.1.0",
]
[[package]]
name = "atomic-waker"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]]
name = "attohttpc"
version = "0.22.0"
@ -150,6 +278,22 @@ dependencies = [
"generic-array",
]
[[package]]
name = "blocking"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c36a4d0d48574b3dd360b4b7d95cc651d2b6557b6402848a27d4b228a473e2a"
dependencies = [
"async-channel",
"async-lock",
"async-task",
"fastrand 2.0.1",
"futures-io",
"futures-lite",
"piper",
"tracing",
]
[[package]]
name = "brotli"
version = "3.3.4"
@ -358,6 +502,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "concurrent-queue"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "convert_case"
version = "0.4.0"
@ -530,6 +683,17 @@ dependencies = [
"syn 2.0.16",
]
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "derive_more"
version = "0.99.17"
@ -629,6 +793,27 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "enumflags2"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5998b4f30320c9d93aed72f63af821bfdac50465b75428fce77b48ec482c3939"
dependencies = [
"enumflags2_derive",
"serde",
]
[[package]]
name = "enumflags2_derive"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f95e2801cd355d4a1a3e3953ce6ee5ae9603a5c833455343a8bfe3f44d418246"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]]
name = "errno"
version = "0.3.1"
@ -650,6 +835,12 @@ dependencies = [
"libc",
]
[[package]]
name = "event-listener"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]]
name = "fastrand"
version = "1.9.0"
@ -659,6 +850,12 @@ dependencies = [
"instant",
]
[[package]]
name = "fastrand"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "fdeflate"
version = "0.3.0"
@ -674,7 +871,7 @@ version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3cf3a800ff6e860c863ca6d4b16fd999db8b752819c1606884047b73e468535"
dependencies = [
"memoffset",
"memoffset 0.8.0",
"rustc_version",
]
@ -772,6 +969,21 @@ version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
[[package]]
name = "futures-lite"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
dependencies = [
"fastrand 1.9.0",
"futures-core",
"futures-io",
"memchr",
"parking",
"pin-project-lite",
"waker-fn",
]
[[package]]
name = "futures-macro"
version = "0.3.28"
@ -783,6 +995,12 @@ dependencies = [
"syn 2.0.16",
]
[[package]]
name = "futures-sink"
version = "0.3.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817"
[[package]]
name = "futures-task"
version = "0.3.28"
@ -796,8 +1014,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
dependencies = [
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
"pin-utils",
"slab",
@ -1451,6 +1672,19 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "mac-notification-sys"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51fca4d74ff9dbaac16a01b924bc3693fa2bba0862c2c633abc73f9a8ea21f64"
dependencies = [
"cc",
"dirs-next",
"objc-foundation",
"objc_id",
"time",
]
[[package]]
name = "malloc_buf"
version = "0.0.6"
@ -1495,6 +1729,15 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.8.0"
@ -1504,6 +1747,15 @@ dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
dependencies = [
"autocfg",
]
[[package]]
name = "minisign-verify"
version = "0.2.1"
@ -1572,12 +1824,37 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset 0.7.1",
]
[[package]]
name = "nodrop"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "notify-rust"
version = "4.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "827c5edfa80235ded4ab3fe8e9dc619b4f866ef16fe9b1c6b8a7f8692c0f2226"
dependencies = [
"log",
"mac-notification-sys",
"serde",
"tauri-winrt-notification",
"zbus",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
@ -1757,6 +2034,16 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "ordered-stream"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50"
dependencies = [
"futures-core",
"pin-project-lite",
]
[[package]]
name = "overload"
version = "0.1.1"
@ -1788,6 +2075,12 @@ dependencies = [
"system-deps 6.1.0",
]
[[package]]
name = "parking"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae"
[[package]]
name = "parking_lot"
version = "0.12.1"
@ -1933,6 +2226,17 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "piper"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4"
dependencies = [
"atomic-waker",
"fastrand 2.0.1",
"futures-io",
]
[[package]]
name = "pkg-config"
version = "0.3.27"
@ -1948,7 +2252,7 @@ dependencies = [
"base64 0.21.0",
"indexmap",
"line-wrap",
"quick-xml",
"quick-xml 0.28.2",
"serde",
"time",
]
@ -1966,6 +2270,22 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "polling"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
dependencies = [
"autocfg",
"bitflags 1.3.2",
"cfg-if",
"concurrent-queue",
"libc",
"log",
"pin-project-lite",
"windows-sys 0.48.0",
]
[[package]]
name = "ppv-lite86"
version = "0.2.17"
@ -2027,6 +2347,15 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "quick-xml"
version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11bafc859c6815fbaffbbbf4229ecb767ac913fecb27f9ad4343662e9ef099ea"
dependencies = [
"memchr",
]
[[package]]
name = "quick-xml"
version = "0.28.2"
@ -2466,6 +2795,17 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "sha1"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sha2"
version = "0.10.6"
@ -2486,6 +2826,25 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "signal-hook"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801"
dependencies = [
"libc",
"signal-hook-registry",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
dependencies = [
"libc",
]
[[package]]
name = "simd-adler32"
version = "0.3.5"
@ -2513,6 +2872,16 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "socket2"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "soup2"
version = "0.2.1"
@ -2556,6 +2925,12 @@ dependencies = [
"loom",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "string_cache"
version = "0.8.7"
@ -2733,6 +3108,7 @@ dependencies = [
"http",
"ignore",
"minisign-verify",
"notify-rust",
"objc",
"once_cell",
"open",
@ -2915,6 +3291,16 @@ dependencies = [
"toml 0.7.3",
]
[[package]]
name = "tauri-winrt-notification"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f5bff1d532fead7c43324a0fa33643b8621a47ce2944a633be4cb6c0240898f"
dependencies = [
"quick-xml 0.23.1",
"windows 0.39.0",
]
[[package]]
name = "tempfile"
version = "3.5.0"
@ -2922,7 +3308,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
dependencies = [
"cfg-if",
"fastrand",
"fastrand 1.9.0",
"redox_syscall 0.3.5",
"rustix",
"windows-sys 0.45.0",
@ -3135,6 +3521,17 @@ version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "uds_windows"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9"
dependencies = [
"memoffset 0.9.0",
"tempfile",
"winapi",
]
[[package]]
name = "unicode-bidi"
version = "0.3.13"
@ -3239,6 +3636,12 @@ dependencies = [
"libc",
]
[[package]]
name = "waker-fn"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690"
[[package]]
name = "walkdir"
version = "2.3.3"
@ -3815,6 +4218,82 @@ dependencies = [
"libc",
]
[[package]]
name = "xdg-home"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2769203cd13a0c6015d515be729c526d041e9cf2c0cc478d57faee85f40c6dcd"
dependencies = [
"nix",
"winapi",
]
[[package]]
name = "zbus"
version = "3.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31de390a2d872e4cd04edd71b425e29853f786dc99317ed72d73d6fcf5ebb948"
dependencies = [
"async-broadcast",
"async-executor",
"async-fs",
"async-io",
"async-lock",
"async-process",
"async-recursion",
"async-task",
"async-trait",
"blocking",
"byteorder",
"derivative",
"enumflags2",
"event-listener",
"futures-core",
"futures-sink",
"futures-util",
"hex",
"nix",
"once_cell",
"ordered-stream",
"rand 0.8.5",
"serde",
"serde_repr",
"sha1",
"static_assertions",
"tracing",
"uds_windows",
"winapi",
"xdg-home",
"zbus_macros",
"zbus_names",
"zvariant",
]
[[package]]
name = "zbus_macros"
version = "3.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d1794a946878c0e807f55a397187c11fc7a038ba5d868e7db4f3bd7760bc9d"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"regex",
"syn 1.0.109",
"zvariant_utils",
]
[[package]]
name = "zbus_names"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb80bb776dbda6e23d705cf0123c3b95df99c4ebeaec6c2599d4a5419902b4a9"
dependencies = [
"serde",
"static_assertions",
"zvariant",
]
[[package]]
name = "zip"
version = "0.6.6"
@ -3825,3 +4304,41 @@ dependencies = [
"crc32fast",
"crossbeam-utils",
]
[[package]]
name = "zvariant"
version = "3.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44b291bee0d960c53170780af148dca5fa260a63cdd24f1962fa82e03e53338c"
dependencies = [
"byteorder",
"enumflags2",
"libc",
"serde",
"static_assertions",
"zvariant_derive",
]
[[package]]
name = "zvariant_derive"
version = "3.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "934d7a7dfc310d6ee06c87ffe88ef4eca7d3e37bb251dece2ef93da8f17d8ecd"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 1.0.109",
"zvariant_utils",
]
[[package]]
name = "zvariant_utils"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7234f0d811589db492d16893e3f21e8e2fd282e6d01b0cddee310322062cc200"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]

View File

@ -8,8 +8,8 @@
"withGlobalTauri": true
},
"package": {
"productName": "ChatGPT Next Web",
"version": "2.9.11"
"productName": "NextChat",
"version": "2.9.13"
},
"tauri": {
"allowlist": {
@ -68,7 +68,7 @@
"icons/icon.ico"
],
"identifier": "com.yida.chatgpt.next.web",
"longDescription": "ChatGPT Next Web is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.",
"longDescription": "NextChat is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.",
"macOS": {
"entitlements": null,
"exceptionDomain": "",
@ -77,7 +77,7 @@
"signingIdentity": null
},
"resources": [],
"shortDescription": "ChatGPT Next Web App",
"shortDescription": "NextChat App",
"targets": "all",
"windows": {
"certificateThumbprint": null,
@ -104,11 +104,11 @@
"fullscreen": false,
"height": 600,
"resizable": true,
"title": "ChatGPT Next Web",
"title": "NextChat",
"width": 960,
"hiddenTitle": true,
"titleBarStyle": "Overlay"
}
]
}
}
}

View File

@ -1,24 +1,5 @@
{
"github": {
"silent": true
},
"headers": [
{
"source": "/(.*)",
"headers": [
{
"key": "X-Real-IP",
"value": "$remote_addr"
},
{
"key": "X-Forwarded-For",
"value": "$proxy_add_x_forwarded_for"
},
{
"key": "Host",
"value": "$http_host"
}
]
}
]
}
}

267
yarn.lock
View File

@ -22,6 +22,14 @@
dependencies:
"@babel/highlight" "^7.18.6"
"@babel/code-frame@^7.22.13":
version "7.22.13"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e"
integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==
dependencies:
"@babel/highlight" "^7.22.13"
chalk "^2.4.2"
"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.1", "@babel/compat-data@^7.20.5":
version "7.21.0"
resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.21.0.tgz#c241dc454e5b5917e40d37e525e2f4530c399298"
@ -58,6 +66,16 @@
"@jridgewell/trace-mapping" "^0.3.17"
jsesc "^2.5.1"
"@babel/generator@^7.23.0":
version "7.23.0"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420"
integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==
dependencies:
"@babel/types" "^7.23.0"
"@jridgewell/gen-mapping" "^0.3.2"
"@jridgewell/trace-mapping" "^0.3.17"
jsesc "^2.5.1"
"@babel/helper-annotate-as-pure@^7.18.6":
version "7.18.6"
resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb"
@ -123,6 +141,11 @@
resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be"
integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==
"@babel/helper-environment-visitor@^7.22.20":
version "7.22.20"
resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167"
integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==
"@babel/helper-explode-assignable-expression@^7.18.6":
version "7.18.6"
resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096"
@ -138,6 +161,14 @@
"@babel/template" "^7.20.7"
"@babel/types" "^7.21.0"
"@babel/helper-function-name@^7.23.0":
version "7.23.0"
resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759"
integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==
dependencies:
"@babel/template" "^7.22.15"
"@babel/types" "^7.23.0"
"@babel/helper-hoist-variables@^7.18.6":
version "7.18.6"
resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678"
@ -145,6 +176,13 @@
dependencies:
"@babel/types" "^7.18.6"
"@babel/helper-hoist-variables@^7.22.5":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb"
integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==
dependencies:
"@babel/types" "^7.22.5"
"@babel/helper-member-expression-to-functions@^7.20.7", "@babel/helper-member-expression-to-functions@^7.21.0":
version "7.21.0"
resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.21.0.tgz#319c6a940431a133897148515877d2f3269c3ba5"
@ -228,16 +266,33 @@
dependencies:
"@babel/types" "^7.18.6"
"@babel/helper-split-export-declaration@^7.22.6":
version "7.22.6"
resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c"
integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==
dependencies:
"@babel/types" "^7.22.5"
"@babel/helper-string-parser@^7.19.4":
version "7.19.4"
resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63"
integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==
"@babel/helper-string-parser@^7.22.5":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f"
integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==
"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1":
version "7.19.1"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2"
integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==
"@babel/helper-validator-identifier@^7.22.20":
version "7.22.20"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0"
integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==
"@babel/helper-validator-option@^7.18.6", "@babel/helper-validator-option@^7.21.0":
version "7.21.0"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz#8224c7e13ace4bafdc4004da2cf064ef42673180"
@ -271,11 +326,25 @@
chalk "^2.0.0"
js-tokens "^4.0.0"
"@babel/highlight@^7.22.13":
version "7.22.20"
resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54"
integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==
dependencies:
"@babel/helper-validator-identifier" "^7.22.20"
chalk "^2.4.2"
js-tokens "^4.0.0"
"@babel/parser@^7.20.7", "@babel/parser@^7.21.3":
version "7.21.3"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.3.tgz#1d285d67a19162ff9daa358d4cb41d50c06220b3"
integrity sha512-lobG0d7aOfQRXh8AyklEAgZGvA4FShxo6xQbUrrT/cNBPUdIDojlokwJsQyCC/eKia7ifqM0yP+2DRZ4WKw2RQ==
"@babel/parser@^7.22.15", "@babel/parser@^7.23.0":
version "7.23.0"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719"
integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6":
version "7.18.6"
resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2"
@ -959,12 +1028,12 @@
resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310"
integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
"@babel/runtime@^7.12.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.22.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.5.tgz#8564dd588182ce0047d55d7a75e93921107b57ec"
integrity sha512-ecjvYlnAaZ/KVneE/OdKYBYfgXV3Ptu6zQWmgEF7vwKhQnvVS6bjMD2XYgj+SNvQ1GfK/pjgokfPkC/2CO8CuA==
"@babel/runtime@^7.12.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.23.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
version "7.23.6"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d"
integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ==
dependencies:
regenerator-runtime "^0.13.11"
regenerator-runtime "^0.14.0"
"@babel/template@^7.18.10", "@babel/template@^7.20.7":
version "7.20.7"
@ -975,19 +1044,28 @@
"@babel/parser" "^7.20.7"
"@babel/types" "^7.20.7"
"@babel/traverse@^7.20.5", "@babel/traverse@^7.20.7", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2", "@babel/traverse@^7.21.3":
version "7.21.3"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.3.tgz#4747c5e7903d224be71f90788b06798331896f67"
integrity sha512-XLyopNeaTancVitYZe2MlUEvgKb6YVVPXzofHgqHijCImG33b/uTurMS488ht/Hbsb2XK3U2BnSTxKVNGV3nGQ==
"@babel/template@^7.22.15":
version "7.22.15"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38"
integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==
dependencies:
"@babel/code-frame" "^7.18.6"
"@babel/generator" "^7.21.3"
"@babel/helper-environment-visitor" "^7.18.9"
"@babel/helper-function-name" "^7.21.0"
"@babel/helper-hoist-variables" "^7.18.6"
"@babel/helper-split-export-declaration" "^7.18.6"
"@babel/parser" "^7.21.3"
"@babel/types" "^7.21.3"
"@babel/code-frame" "^7.22.13"
"@babel/parser" "^7.22.15"
"@babel/types" "^7.22.15"
"@babel/traverse@^7.20.5", "@babel/traverse@^7.20.7", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2", "@babel/traverse@^7.21.3":
version "7.23.2"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8"
integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==
dependencies:
"@babel/code-frame" "^7.22.13"
"@babel/generator" "^7.23.0"
"@babel/helper-environment-visitor" "^7.22.20"
"@babel/helper-function-name" "^7.23.0"
"@babel/helper-hoist-variables" "^7.22.5"
"@babel/helper-split-export-declaration" "^7.22.6"
"@babel/parser" "^7.23.0"
"@babel/types" "^7.23.0"
debug "^4.1.0"
globals "^11.1.0"
@ -1000,6 +1078,15 @@
"@babel/helper-validator-identifier" "^7.19.1"
to-fast-properties "^2.0.0"
"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0":
version "7.23.0"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb"
integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==
dependencies:
"@babel/helper-string-parser" "^7.22.5"
"@babel/helper-validator-identifier" "^7.22.20"
to-fast-properties "^2.0.0"
"@braintree/sanitize-url@^6.0.1":
version "6.0.4"
resolved "https://registry.yarnpkg.com/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz#923ca57e173c6b232bbbb07347b1be982f03e783"
@ -1042,16 +1129,16 @@
resolved "https://registry.npmmirror.com/@fortaine/fetch-event-source/-/fetch-event-source-3.0.6.tgz#b8552a2ca2c5202f5699b93a92be0188d422b06e"
integrity sha512-621GAuLMvKtyZQ3IA6nlDWhV1V/7PGOTNIGLUifxt0KzM+dZIweJ6F3XvQF3QnqeNfS1N7WQ0Kil1Di/lhChEw==
"@hello-pangea/dnd@^16.3.0":
version "16.3.0"
resolved "https://registry.yarnpkg.com/@hello-pangea/dnd/-/dnd-16.3.0.tgz#3776212f812df4e8e69c42831ec8ab7ff3a087d6"
integrity sha512-RYQ/K8shtJoyNPvFWz0gfXIK7HF3P3mL9UZFGMuHB0ljRSXVgMjVFI/FxcZmakMzw6tO7NflWLriwTNBow/4vw==
"@hello-pangea/dnd@^16.5.0":
version "16.5.0"
resolved "https://registry.yarnpkg.com/@hello-pangea/dnd/-/dnd-16.5.0.tgz#f323ff9f813204818bc67648a383e8715f47c59c"
integrity sha512-n+am6O32jo/CFXciCysz83lPM3I3F58FJw4uS44TceieymcyxQSfzK5OhzPAKrVBZktmuOI6Zim9WABTMtXv4A==
dependencies:
"@babel/runtime" "^7.22.5"
"@babel/runtime" "^7.23.2"
css-box-model "^1.2.1"
memoize-one "^6.0.0"
raf-schd "^4.0.3"
react-redux "^8.1.1"
react-redux "^8.1.3"
redux "^4.2.1"
use-memo-one "^1.1.3"
@ -1344,71 +1431,71 @@
dependencies:
tslib "^2.4.0"
"@tauri-apps/cli-darwin-arm64@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-1.4.0.tgz#e76bb8515ae31f03f2cbd440c1a09b237a79b3ac"
integrity sha512-nA/ml0SfUt6/CYLVbHmT500Y+ijqsuv5+s9EBnVXYSLVg9kbPUZJJHluEYK+xKuOj6xzyuT/+rZFMRapmJD3jQ==
"@tauri-apps/cli-darwin-arm64@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-1.5.8.tgz#28ca810b910979260dd77c92951d16340fcaa711"
integrity sha512-/AksDWfAt3NUSt8Rq2a3gTLASChKzldPVUjmJhcbtsuzFg2nx5g+hhOHxfBYzss2Te1K5mzlu+73LAMy1Sb9Gw==
"@tauri-apps/cli-darwin-x64@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-1.4.0.tgz#dd1472460550d0aa0ec6e699b073be2d77e5b962"
integrity sha512-ov/F6Zr+dg9B0PtRu65stFo2G0ow2TUlneqYYrkj+vA3n+moWDHfVty0raDjMLQbQt3rv3uayFMXGPMgble9OA==
"@tauri-apps/cli-darwin-x64@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-1.5.8.tgz#4060fb0ffcc8312cf48701df51e0e9b665f18382"
integrity sha512-gcfSh+BFRDdbIGpggZ1+5R5SgToz2A9LthH8P4ak3OHagDzDvI6ov6zy2UQE3XDWJKdnlna2rSR1dIuRZ0T9bA==
"@tauri-apps/cli-linux-arm-gnueabihf@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-1.4.0.tgz#325e90e47d260ba71a499850ce769b5a6bdfd48d"
integrity sha512-zwjbiMncycXDV7doovymyKD7sCg53ouAmfgpUqEBOTY3vgBi9TwijyPhJOqoG5vUVWhouNBC08akGmE4dja15g==
"@tauri-apps/cli-linux-arm-gnueabihf@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-1.5.8.tgz#00256432520edf04004962caa92cd84fbcc8b63f"
integrity sha512-ZHQYuOBGvZubPnh5n8bNaN2VMxPBZWs26960FGQWamm9569UV/TNDHb6mD0Jjk9o0f9P+f98qNhuu5Y37P+vfQ==
"@tauri-apps/cli-linux-arm64-gnu@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-1.4.0.tgz#b5d8f5cba3f8f7c7d44d071681f0ab0a37f2c46e"
integrity sha512-5MCBcziqXC72mMXnkZU68mutXIR6zavDxopArE2gQtK841IlE06bIgtLi0kUUhlFJk2nhPRgiDgdLbrPlyt7fw==
"@tauri-apps/cli-linux-arm64-gnu@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-1.5.8.tgz#7869571b06e8b36a072f2e0e7bb49baab9d3c868"
integrity sha512-FFs28Ew3R2EFPYKuyAIouTbp6YnR+shAmJGFNnVy7ibKHL0wxamVKqv1N5N9gUUr+EhbZu2syMBRfG9XQ5mgng==
"@tauri-apps/cli-linux-arm64-musl@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.4.0.tgz#f805ab2ee415875900f4b456f17dc4900d2a7911"
integrity sha512-7J3pRB6n6uNYgIfCeKt2Oz8J7oSaz2s8GGFRRH2HPxuTHrBNCinzVYm68UhVpJrL3bnGkU0ziVZLsW/iaOGfUg==
"@tauri-apps/cli-linux-arm64-musl@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.5.8.tgz#7cbe0395cbd09d4b49c945e36c2de99478c50a51"
integrity sha512-dEYvNyLMmWD0jb30FNfVPXmBq6OGg6is3km+4RlGg8tZU5Zvq78ClUZtaZuER+N/hv27+Uc6UHl9X3hin8cGGw==
"@tauri-apps/cli-linux-x64-gnu@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-1.4.0.tgz#d3f5e69c22420c7ac9e4021b7a94bce2e48cb45d"
integrity sha512-Zh5gfAJxOv5AVWxcwuueaQ2vIAhlg0d6nZui6nMyfIJ8dbf3aZQ5ZzP38sYow5h/fbvgL+3GSQxZRBIa3c2E1w==
"@tauri-apps/cli-linux-x64-gnu@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-1.5.8.tgz#d03ba73f1ac68bf6bace7bf45b50e6b12ce4468b"
integrity sha512-ut3TDbtLXmZhz6Q4wim57PV02wG+AfuLSWRPhTL9MsPsg/E7Y6sJhv0bIMAq6SwC59RCH52ZGft6RH7samV2NQ==
"@tauri-apps/cli-linux-x64-musl@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-1.4.0.tgz#2e7f718272ffdd9ace80f57a35023ba0c74767ad"
integrity sha512-OLAYoICU3FaYiTdBsI+lQTKnDHeMmFMXIApN0M+xGiOkoIOQcV9CConMPjgmJQ867+NHRNgUGlvBEAh9CiJodQ==
"@tauri-apps/cli-linux-x64-musl@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-1.5.8.tgz#4ce560aa102e9031d4c51c7bc853263cf3ab9616"
integrity sha512-k6ei7ETXVZlNpFOhl/8Cnj709UbEr+VuY9xKK/HgwvNfjA5f8HQ9TSKk/Um7oeT1Y61/eEcvcgF/hDURhFJDPQ==
"@tauri-apps/cli-win32-arm64-msvc@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-1.4.0.tgz#85cdb52a06feb92da785def4d02512099464525e"
integrity sha512-gZ05GENFbI6CB5MlOUsLlU0kZ9UtHn9riYtSXKT6MYs8HSPRffPHaHSL0WxsJweWh9nR5Hgh/TUU8uW3sYCzCg==
"@tauri-apps/cli-win32-arm64-msvc@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-1.5.8.tgz#df83af81c6d89d4a505f2e96b3d443dd411c1a4a"
integrity sha512-l6zm31x1inkS2K5e7otUZ90XBoK+xr2KJObFCZbzmluBE+LM0fgIXCrj7xwH/f0RCUX3VY9HHx4EIo7eLGBXKQ==
"@tauri-apps/cli-win32-ia32-msvc@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-1.4.0.tgz#0b7c921204058215aec9a5a00f735e73909bd330"
integrity sha512-JsetT/lTx/Zq98eo8T5CiRyF1nKeX04RO8JlJrI3ZOYsZpp/A5RJvMd/szQ17iOzwiHdge+tx7k2jHysR6oBlQ==
"@tauri-apps/cli-win32-ia32-msvc@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-1.5.8.tgz#92e5acc4dcd44aec88099059a04bb5ad3b4e59ff"
integrity sha512-0k3YpWl6PKV4Qp2N52Sb45egXafSgQXcBaO7TIJG4EDfaEf5f6StN+hYSzdnrq9idrK5x9DDCPuebZTuJ+Q8EA==
"@tauri-apps/cli-win32-x64-msvc@1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-1.4.0.tgz#23abe3f08c0df89111c29602f91c21a23577b908"
integrity sha512-z8Olcnwp5aYhzqUAarFjqF+oELCjuYWnB2HAJHlfsYNfDCAORY5kct3Fklz8PSsubC3U2EugWn8n42DwnThurg==
"@tauri-apps/cli-win32-x64-msvc@1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-1.5.8.tgz#a0c363969cf5a21c95c235e5bf6a94a410130761"
integrity sha512-XjBg8VMswmD9JAHKlb10NRPfBVAZoiOJBbPRte+GP1BUQtqDnbIYcOLSnUCmNZoy3fUBJuKJUBT9tDCbkMr5fQ==
"@tauri-apps/cli@^1.4.0":
version "1.4.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-1.4.0.tgz#72732ae61e6b7d097e44a8a2ef5f211b2d01d98b"
integrity sha512-VXYr2i2iVFl98etQSQsqLzXgX96bnWiNZd1YADgatqwy/qecbd6Kl5ZAPB5R4ynsgE8A1gU7Fbzh7dCEQYFfmA==
"@tauri-apps/cli@^1.5.8":
version "1.5.8"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-1.5.8.tgz#feaf055af370cb192b24ea4c51edf0e577269fb2"
integrity sha512-c/mzk5vjjfxtH5uNXSc9h1eiprsolnoBcUwAa4/SZ3gxJ176CwrUKODz3cZBOnzs8omwagwgSN/j7K8NrdFL9g==
optionalDependencies:
"@tauri-apps/cli-darwin-arm64" "1.4.0"
"@tauri-apps/cli-darwin-x64" "1.4.0"
"@tauri-apps/cli-linux-arm-gnueabihf" "1.4.0"
"@tauri-apps/cli-linux-arm64-gnu" "1.4.0"
"@tauri-apps/cli-linux-arm64-musl" "1.4.0"
"@tauri-apps/cli-linux-x64-gnu" "1.4.0"
"@tauri-apps/cli-linux-x64-musl" "1.4.0"
"@tauri-apps/cli-win32-arm64-msvc" "1.4.0"
"@tauri-apps/cli-win32-ia32-msvc" "1.4.0"
"@tauri-apps/cli-win32-x64-msvc" "1.4.0"
"@tauri-apps/cli-darwin-arm64" "1.5.8"
"@tauri-apps/cli-darwin-x64" "1.5.8"
"@tauri-apps/cli-linux-arm-gnueabihf" "1.5.8"
"@tauri-apps/cli-linux-arm64-gnu" "1.5.8"
"@tauri-apps/cli-linux-arm64-musl" "1.5.8"
"@tauri-apps/cli-linux-x64-gnu" "1.5.8"
"@tauri-apps/cli-linux-x64-musl" "1.5.8"
"@tauri-apps/cli-win32-arm64-msvc" "1.5.8"
"@tauri-apps/cli-win32-ia32-msvc" "1.5.8"
"@tauri-apps/cli-win32-x64-msvc" "1.5.8"
"@trysound/sax@0.2.0":
version "0.2.0"
@ -2053,7 +2140,7 @@ chalk@5.2.0:
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.2.0.tgz#249623b7d66869c673699fb66d65723e54dfcfb3"
integrity sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==
chalk@^2.0.0:
chalk@^2.0.0, chalk@^2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
@ -3301,10 +3388,10 @@ functions-have-names@^1.2.2:
resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834"
integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==
fuse.js@^6.6.2:
version "6.6.2"
resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-6.6.2.tgz#fe463fed4b98c0226ac3da2856a415576dc9a111"
integrity sha512-cJaJkxCCxC8qIIcPBF9yGxY0W/tVZS3uEISDxhYIdtk8OL93pe+6Zj7LjCqVV4dzbqcriOZ+kQ/NE4RXZHsIGA==
fuse.js@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-7.0.0.tgz#6573c9fcd4c8268e403b4fc7d7131ffcf99a9eb2"
integrity sha512-14F4hBIxqKvD4Zz/XjDc3y94mNZN6pRv3U13Udo0lNLCWRBUsrMv2xwcF/y/Z5sV6+FQW+/ow68cHpm4sunt8Q==
gensync@^1.0.0-beta.2:
version "1.0.0-beta.2"
@ -5073,10 +5160,10 @@ react-markdown@^8.0.7:
unist-util-visit "^4.0.0"
vfile "^5.0.0"
react-redux@^8.1.1:
version "8.1.1"
resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-8.1.1.tgz#8e740f3fd864a4cd0de5ba9cdc8ad39cc9e7c81a"
integrity sha512-5W0QaKtEhj+3bC0Nj0NkqkhIv8gLADH/2kYFMTHxCVqQILiWzLv6MaLuV5wJU3BQEdHKzTfcvPN0WMS6SC1oyA==
react-redux@^8.1.3:
version "8.1.3"
resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-8.1.3.tgz#4fdc0462d0acb59af29a13c27ffef6f49ab4df46"
integrity sha512-n0ZrutD7DaX/j9VscF+uTALI3oUPa/pO4Z3soOBIjuRn/FzVu6aehhysxZCLi6y7duMf52WNZGMl7CtuK5EnRw==
dependencies:
"@babel/runtime" "^7.12.1"
"@types/hoist-non-react-statics" "^3.3.1"
@ -5133,10 +5220,10 @@ regenerate@^1.4.2:
resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
regenerator-runtime@^0.13.11:
version "0.13.11"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9"
integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==
regenerator-runtime@^0.14.0:
version "0.14.1"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f"
integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==
regenerator-transform@^0.15.1:
version "0.15.1"