diff --git a/.lintstagedrc.json b/.lintstagedrc.json index 023bf16ab..58784bad8 100644 --- a/.lintstagedrc.json +++ b/.lintstagedrc.json @@ -1,6 +1,6 @@ { - "./app/**/*.{js,ts,jsx,tsx,json,html,css,scss,md}": [ - "eslint --fix", - "prettier --write" - ] -} \ No newline at end of file + "./app/**/*.{js,ts,jsx,tsx,json,html,css,md}": [ + "eslint --fix", + "prettier --write" + ] +} diff --git a/README.md b/README.md index a6f9d5fa8..5952aa33a 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ One-Click to deploy your own ChatGPT web UI. -[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) +[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt) -[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) +[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) [![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) @@ -40,7 +40,7 @@ One-Click to deploy your own ChatGPT web UI. 1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys); 2. 点击右侧按钮开始部署: - [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登陆即可,记得在环境变量页填入 API Key; + [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登陆即可,记得在环境变量页填入 API Key; 3. 部署完毕后,即可开始使用; 4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain):Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。 @@ -48,7 +48,7 @@ One-Click to deploy your own ChatGPT web UI. 1. Get [OpenAI API Key](https://platform.openai.com/account/api-keys); 2. Click - [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web); + [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web); 3. Enjoy :) ## 保持更新 Keep Updated @@ -78,9 +78,9 @@ This project will be continuously maintained. If you want to keep the code repos You can star or watch this project or follow author to get release notifictions in time. -## 访问控制 Access Control +## 配置密码 Password -本项目提供有限的权限控制功能,请在环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义控制码: +本项目提供有限的权限控制功能,请在 Vercel 项目控制面板的环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义密码: ``` code1,code2,code3 @@ -88,7 +88,7 @@ code1,code2,code3 增加或修改该环境变量后,请**重新部署**项目使改动生效。 -This project provides limited access control. Please add an environment variable named `CODE` on the environment variables page. The value should be a custom control code separated by comma like this: +This project provides limited access control. Please add an environment variable named `CODE` on the vercel environment variables page. The value should be passwords separated by comma like this: ``` code1,code2,code3 @@ -96,6 +96,38 @@ code1,code2,code3 After adding or modifying this environment variable, please redeploy the project for the changes to take effect. +## 环境变量 Environment Variables + +### `OPENAI_API_KEY` (required) + +OpanAI 密钥。 + +Your openai api key. + +### `CODE` (optional) + +访问密码,可选,可以使用逗号隔开多个密码。 + +Access passsword, separated by comma. + +### `BASE_URL` (optional) + +> Default: `api.openai.com` + +OpenAI 接口代理 URL。 + +Override openai api request base url. + +### `PROTOCOL` (optional) + +> Default: `https` + +> Values: `http` | `https` + +OpenAI 接口协议。 + +Override openai api request protocol. + ## 开发 Development 点击下方按钮,开始二次开发: @@ -118,11 +150,11 @@ OPENAI_API_KEY= 2. 执行 `yarn install && yarn dev` 即可。 ### 本地部署 Local Deployment + ```shell bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh) ``` - ### 容器部署 Docker Deployment ```shell @@ -137,15 +169,12 @@ docker run -d -p 3000:3000 -e OPENAI_API_KEY="" -e CODE="" yidadaa/chatgpt-next- ![更多展示 More](./static/more.png) -## 说明 Attention -本项目的演示地址所用的 OpenAI 账户的免费额度将于 2023-04-01 过期,届时将无法通过演示地址在线体验。 - -如果你想贡献出自己的 API Key,可以通过作者主页的邮箱发送给作者,并标注过期时间。 - -The free trial of the OpenAI account used by the demo will expire on April 1, 2023, and the demo will not be available at that time. - -If you would like to contribute your API key, you can email it to the author and indicate the expiration date of the API key. +## 捐赠 Donate USDT +> BNB Smart Chain (BEP 20) +``` +0x67cD02c7EB62641De576a1fA3EdB32eA0c3ffD89 +``` ## 鸣谢 Special Thanks @@ -157,6 +186,7 @@ If you would like to contribute your API key, you can email it to the author and [@hoochanlon](https://github.com/hoochanlon) ### 贡献者 Contributor + [Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors) ## LICENSE diff --git a/app/api/chat-stream/route.ts b/app/api/chat-stream/route.ts index ad40c6be1..e7bdfc5fb 100644 --- a/app/api/chat-stream/route.ts +++ b/app/api/chat-stream/route.ts @@ -1,26 +1,12 @@ import { createParser } from "eventsource-parser"; import { NextRequest } from "next/server"; +import { requestOpenai } from "../common"; async function createStream(req: NextRequest) { const encoder = new TextEncoder(); const decoder = new TextDecoder(); - let apiKey = process.env.OPENAI_API_KEY; - - const userApiKey = req.headers.get("token"); - if (userApiKey) { - apiKey = userApiKey; - console.log("[Stream] using user api key"); - } - - const res = await fetch("https://api.openai.com/v1/chat/completions", { - headers: { - "Content-Type": "application/json", - Authorization: `Bearer ${apiKey}`, - }, - method: "POST", - body: req.body, - }); + const res = await requestOpenai(req); const stream = new ReadableStream({ async start(controller) { diff --git a/app/api/chat/.gitignore b/app/api/chat/.gitignore deleted file mode 100644 index 1b8afd087..000000000 --- a/app/api/chat/.gitignore +++ /dev/null @@ -1 +0,0 @@ -config.ts \ No newline at end of file diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts deleted file mode 100644 index 18c7db148..000000000 --- a/app/api/chat/route.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { OpenAIApi, Configuration } from "openai"; -import { ChatRequest } from "./typing"; - -export async function POST(req: Request) { - try { - let apiKey = process.env.OPENAI_API_KEY; - - const userApiKey = req.headers.get("token"); - if (userApiKey) { - apiKey = userApiKey; - } - - const openai = new OpenAIApi( - new Configuration({ - apiKey, - }) - ); - - const requestBody = (await req.json()) as ChatRequest; - const completion = await openai!.createChatCompletion({ - ...requestBody, - }); - - return new Response(JSON.stringify(completion.data)); - } catch (e) { - console.error("[Chat] ", e); - return new Response(JSON.stringify(e)); - } -} diff --git a/app/api/common.ts b/app/api/common.ts new file mode 100644 index 000000000..842eeacaf --- /dev/null +++ b/app/api/common.ts @@ -0,0 +1,22 @@ +import { NextRequest } from "next/server"; + +const OPENAI_URL = "api.openai.com"; +const DEFAULT_PROTOCOL = "https"; +const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL; +const BASE_URL = process.env.BASE_URL ?? OPENAI_URL; + +export async function requestOpenai(req: NextRequest) { + const apiKey = req.headers.get("token"); + const openaiPath = req.headers.get("path"); + + console.log("[Proxy] ", openaiPath); + + return fetch(`${PROTOCOL}://${BASE_URL}/${openaiPath}`, { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + method: req.method, + body: req.body, + }); +} diff --git a/app/api/openai/route.ts b/app/api/openai/route.ts new file mode 100644 index 000000000..5bc317e55 --- /dev/null +++ b/app/api/openai/route.ts @@ -0,0 +1,28 @@ +import { NextRequest, NextResponse } from "next/server"; +import { requestOpenai } from "../common"; + +async function makeRequest(req: NextRequest) { + try { + const res = await requestOpenai(req); + return new Response(res.body); + } catch (e) { + console.error("[OpenAI] ", req.body, e); + return NextResponse.json( + { + error: true, + msg: JSON.stringify(e), + }, + { + status: 500, + }, + ); + } +} + +export async function POST(req: NextRequest) { + return makeRequest(req); +} + +export async function GET(req: NextRequest) { + return makeRequest(req); +} diff --git a/app/api/chat/typing.ts b/app/api/openai/typing.ts similarity index 100% rename from app/api/chat/typing.ts rename to app/api/openai/typing.ts diff --git a/app/components/home.module.scss b/app/components/home.module.scss index c1b08d25c..87231feee 100644 --- a/app/components/home.module.scss +++ b/app/components/home.module.scss @@ -221,6 +221,14 @@ margin-bottom: 100px; } +.chat-body-title { + cursor: pointer; + + &:hover { + text-decoration: underline; + } +} + .chat-message { display: flex; flex-direction: row; @@ -410,7 +418,7 @@ background-color: var(--white); color: var(--black); font-family: inherit; - padding: 10px 14px; + padding: 10px 14px 50px; resize: none; outline: none; } diff --git a/app/components/home.tsx b/app/components/home.tsx index 45e399704..128603d75 100644 --- a/app/components/home.tsx +++ b/app/components/home.tsx @@ -199,7 +199,10 @@ export function PromptHints(props: { ); } -export function Chat(props: { showSideBar?: () => void }) { +export function Chat(props: { + showSideBar?: () => void; + sideBarShowing?: boolean; +}) { type RenderMessage = Message & { preview?: boolean }; const chatStore = useChatStore(); @@ -219,7 +222,6 @@ export function Chat(props: { showSideBar?: () => void }) { const [promptHints, setPromptHints] = useState([]); const onSearch = useDebouncedCallback( (text: string) => { - if (chatStore.config.disablePromptHint) return; setPromptHints(promptStore.search(text)); }, 100, @@ -232,15 +234,31 @@ export function Chat(props: { showSideBar?: () => void }) { inputRef.current?.focus(); }; + const scrollInput = () => { + const dom = inputRef.current; + if (!dom) return; + const paddingBottomNum: number = parseInt( + window.getComputedStyle(dom).paddingBottom, + 10 + ); + dom.scrollTop = dom.scrollHeight - dom.offsetHeight + paddingBottomNum; + }; + // only search prompts when user input is short const SEARCH_TEXT_LIMIT = 30; const onInput = (text: string) => { + scrollInput(); setUserInput(text); const n = text.trim().length; - if (n === 0 || n > SEARCH_TEXT_LIMIT) { + + // clear search results + if (n === 0) { setPromptHints([]); - } else { - onSearch(text); + } else if (!chatStore.config.disablePromptHint && n < SEARCH_TEXT_LIMIT) { + // check if need to trigger auto completion + if (text.startsWith("/") && text.length > 1) { + onSearch(text.slice(1)); + } } }; @@ -250,6 +268,7 @@ export function Chat(props: { showSideBar?: () => void }) { setIsLoading(true); chatStore.onUserInput(userInput).then(() => setIsLoading(false)); setUserInput(""); + setPromptHints([]); inputRef.current?.focus(); }; @@ -286,6 +305,7 @@ export function Chat(props: { showSideBar?: () => void }) { chatStore .onUserInput(messages[i].content) .then(() => setIsLoading(false)); + inputRef.current?.focus(); return; } } @@ -330,7 +350,6 @@ export function Chat(props: { showSideBar?: () => void }) { const dom = latestMessageRef.current; if (dom && !isIOS() && autoScroll) { dom.scrollIntoView({ - behavior: "smooth", block: "end", }); } @@ -344,7 +363,17 @@ export function Chat(props: { showSideBar?: () => void }) { className={styles["window-header-title"]} onClick={props?.showSideBar} > -
+
{ + const newTopic = prompt(Locale.Chat.Rename, session.topic); + if (newTopic && newTopic !== session.topic) { + chatStore.updateCurrentSession( + (session) => (session.topic = newTopic!) + ); + } + }} + > {session.topic}
@@ -439,6 +468,7 @@ export function Chat(props: { showSideBar?: () => void }) { className="markdown-body" style={{ fontSize: `${fontSize}px` }} onContextMenu={(e) => onRightClick(e, message)} + onDoubleClickCapture={() => setUserInput(message.content)} >
@@ -455,7 +485,7 @@ export function Chat(props: { showSideBar?: () => void }) {
); })} -
+
-
@@ -476,7 +506,7 @@ export function Chat(props: { showSideBar?: () => void }) { setAutoScroll(false); setTimeout(() => setPromptHints([]), 100); }} - autoFocus + autoFocus={!props?.sideBarShowing} /> } @@ -678,7 +708,11 @@ export function Home() { }} /> ) : ( - setShowSideBar(true)} /> + setShowSideBar(true)} + sideBarShowing={showSideBar} + /> )} diff --git a/app/components/markdown.tsx b/app/components/markdown.tsx index 6e0e6d867..6d3cd0bf6 100644 --- a/app/components/markdown.tsx +++ b/app/components/markdown.tsx @@ -1,6 +1,7 @@ import ReactMarkdown from "react-markdown"; import "katex/dist/katex.min.css"; import RemarkMath from "remark-math"; +import RemarkBreaks from "remark-breaks"; import RehypeKatex from "rehype-katex"; import RemarkGfm from "remark-gfm"; import RehypePrsim from "rehype-prism-plus"; @@ -29,7 +30,7 @@ export function PreCode(props: { children: any }) { export function Markdown(props: { content: string }) { return ( void }) { state.updateConfig, state.resetConfig, state.clearAllData, - ] + ], ); const updateStore = useUpdateStore(); @@ -70,14 +71,34 @@ export function Settings(props: { closeSettings: () => void }) { }); } + const [usage, setUsage] = useState<{ + granted?: number; + used?: number; + }>(); + const [loadingUsage, setLoadingUsage] = useState(false); + function checkUsage() { + setLoadingUsage(true); + requestUsage() + .then((res) => + setUsage({ + granted: res?.total_granted, + used: res?.total_used, + }), + ) + .finally(() => { + setLoadingUsage(false); + }); + } + useEffect(() => { checkUpdate(); + checkUsage(); }, []); const accessStore = useAccessStore(); const enabledAccessControl = useMemo( () => accessStore.enabledAccessControl(), - [] + [], ); const promptStore = usePromptStore(); @@ -179,7 +200,7 @@ export function Settings(props: { closeSettings: () => void }) { onChange={(e) => { updateConfig( (config) => - (config.submitKey = e.target.value as any as SubmitKey) + (config.submitKey = e.target.value as any as SubmitKey), ); }} > @@ -199,7 +220,7 @@ export function Settings(props: { closeSettings: () => void }) { value={config.theme} onChange={(e) => { updateConfig( - (config) => (config.theme = e.target.value as any as Theme) + (config) => (config.theme = e.target.value as any as Theme), ); }} > @@ -232,7 +253,7 @@ export function Settings(props: { closeSettings: () => void }) { > void }) { onChange={(e) => updateConfig( (config) => - (config.fontSize = Number.parseInt(e.currentTarget.value)) + (config.fontSize = Number.parseInt(e.currentTarget.value)), ) } > @@ -253,7 +274,7 @@ export function Settings(props: { closeSettings: () => void }) { checked={config.tightBorder} onChange={(e) => updateConfig( - (config) => (config.tightBorder = e.currentTarget.checked) + (config) => (config.tightBorder = e.currentTarget.checked), ) } > @@ -271,7 +292,7 @@ export function Settings(props: { closeSettings: () => void }) { onChange={(e) => updateConfig( (config) => - (config.disablePromptHint = e.currentTarget.checked) + (config.disablePromptHint = e.currentTarget.checked), ) } > @@ -281,7 +302,7 @@ export function Settings(props: { closeSettings: () => void }) { title={Locale.Settings.Prompt.List} subTitle={Locale.Settings.Prompt.ListCount( builtinCount, - customCount + customCount, )} > void }) { > + + {loadingUsage ? ( +
+ ) : ( + } + text={Locale.Settings.Usage.Check} + onClick={checkUsage} + /> + )} + + void }) { onChange={(e) => updateConfig( (config) => - (config.historyMessageCount = e.target.valueAsNumber) + (config.historyMessageCount = e.target.valueAsNumber), ) } > @@ -357,7 +400,7 @@ export function Settings(props: { closeSettings: () => void }) { updateConfig( (config) => (config.compressMessageLengthThreshold = - e.currentTarget.valueAsNumber) + e.currentTarget.valueAsNumber), ) } > @@ -370,7 +413,8 @@ export function Settings(props: { closeSettings: () => void }) { value={config.modelConfig.model} onChange={(e) => { updateConfig( - (config) => (config.modelConfig.model = e.currentTarget.value) + (config) => + (config.modelConfig.model = e.currentTarget.value), ); }} > @@ -389,13 +433,13 @@ export function Settings(props: { closeSettings: () => void }) { type="range" value={config.modelConfig.temperature.toFixed(1)} min="0" - max="1" + max="2" step="0.1" onChange={(e) => { updateConfig( (config) => (config.modelConfig.temperature = - e.currentTarget.valueAsNumber) + e.currentTarget.valueAsNumber), ); }} > @@ -413,7 +457,7 @@ export function Settings(props: { closeSettings: () => void }) { updateConfig( (config) => (config.modelConfig.max_tokens = - e.currentTarget.valueAsNumber) + e.currentTarget.valueAsNumber), ) } > @@ -432,7 +476,7 @@ export function Settings(props: { closeSettings: () => void }) { updateConfig( (config) => (config.modelConfig.presence_penalty = - e.currentTarget.valueAsNumber) + e.currentTarget.valueAsNumber), ); }} > diff --git a/app/locales/cn.ts b/app/locales/cn.ts index 87566c145..239da23fa 100644 --- a/app/locales/cn.ts +++ b/app/locales/cn.ts @@ -1,3 +1,5 @@ +import { SubmitKey } from "../store/app"; + const cn = { WIP: "该功能仍在开发中……", Error: { @@ -16,8 +18,15 @@ const cn = { Stop: "停止", Retry: "重试", }, + Rename: "重命名对话", Typing: "正在输入…", - Input: (submitKey: string) => `输入消息,${submitKey} 发送`, + Input: (submitKey: string) => { + var inputHints = `输入消息,${submitKey} 发送`; + if (submitKey === String(SubmitKey.Enter)) { + inputHints += ",Shift + Enter 换行"; + } + return inputHints; + }, Send: "发送", }, Export: { @@ -55,6 +64,7 @@ const cn = { Title: "字体大小", SubTitle: "聊天内容的字体大小", }, + Update: { Version: (x: string) => `当前版本:${x}`, IsLatest: "已是最新版本", @@ -69,7 +79,7 @@ const cn = { Prompt: { Disable: { Title: "禁用提示词自动补全", - SubTitle: "禁用后将无法自动根据输入补全", + SubTitle: "在输入框开头输入 / 即可触发自动补全", }, List: "自定义提示词列表", ListCount: (builtin: number, custom: number) => @@ -89,6 +99,14 @@ const cn = { SubTitle: "使用自己的 Key 可绕过受控访问限制", Placeholder: "OpenAI API Key", }, + Usage: { + Title: "账户余额", + SubTitle(granted: any, used: any) { + return `总共 $${granted},已使用 $${used}`; + }, + IsChecking: "正在检查…", + Check: "重新检查", + }, AccessCode: { Title: "访问码", SubTitle: "现在是受控访问状态", @@ -116,7 +134,7 @@ const cn = { History: (content: string) => "这是 ai 和用户的历史聊天总结作为前情提要:" + content, Topic: - "直接返回这句话的简要主题,不要解释,如果没有主题,请直接返回“闲聊”", + "使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,如果没有主题,请直接返回“闲聊”", Summarize: "简要总结一下你和用户的对话,用作后续的上下文提示 prompt,控制在 50 字以内", }, diff --git a/app/locales/en.ts b/app/locales/en.ts index 8d0701fab..296992435 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -1,3 +1,4 @@ +import { SubmitKey } from "../store/app"; import type { LocaleType } from "./index"; const en: LocaleType = { @@ -19,9 +20,15 @@ const en: LocaleType = { Stop: "Stop", Retry: "Retry", }, + Rename: "Rename Chat", Typing: "Typing…", - Input: (submitKey: string) => - `Type something and press ${submitKey} to send`, + Input: (submitKey: string) => { + var inputHints = `Type something and press ${submitKey} to send`; + if (submitKey === String(SubmitKey.Enter)) { + inputHints += ", press Shift + Enter to newline"; + } + return inputHints; + }, Send: "Send", }, Export: { @@ -73,7 +80,7 @@ const en: LocaleType = { Prompt: { Disable: { Title: "Disable auto-completion", - SubTitle: "After disabling, auto-completion will not be available", + SubTitle: "Input / to trigger auto-completion", }, List: "Prompt List", ListCount: (builtin: number, custom: number) => @@ -94,6 +101,14 @@ const en: LocaleType = { SubTitle: "Use your key to ignore access code limit", Placeholder: "OpenAI API Key", }, + Usage: { + Title: "Account Balance", + SubTitle(granted: any, used: any) { + return `Total $${granted}, Used $${used}`; + }, + IsChecking: "Checking...", + Check: "Check Again", + }, AccessCode: { Title: "Access Code", SubTitle: "Access control enabled", @@ -123,7 +138,7 @@ const en: LocaleType = { "This is a summary of the chat history between the AI and the user as a recap: " + content, Topic: - "Provide a brief topic of the sentence without explanation. If there is no topic, return 'Chitchat'.", + "Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, or additional text. Remove enclosing quotation marks.", Summarize: "Summarize our discussion briefly in 50 characters or less to use as a prompt for future context.", }, diff --git a/app/locales/tw.ts b/app/locales/tw.ts index 58c077e1d..e63c57a6e 100644 --- a/app/locales/tw.ts +++ b/app/locales/tw.ts @@ -1,3 +1,4 @@ +import { SubmitKey } from "../store/app"; import type { LocaleType } from "./index"; const tw: LocaleType = { @@ -18,8 +19,15 @@ const tw: LocaleType = { Stop: "停止", Retry: "重試", }, + Rename: "重命名對話", Typing: "正在輸入…", - Input: (submitKey: string) => `輸入訊息後,按下 ${submitKey} 鍵即可發送`, + Input: (submitKey: string) => { + var inputHints = `輸入訊息後,按下 ${submitKey} 鍵即可發送`; + if (submitKey === String(SubmitKey.Enter)) { + inputHints += ",Shift + Enter 鍵換行"; + } + return inputHints; + }, Send: "發送", }, Export: { @@ -71,7 +79,7 @@ const tw: LocaleType = { Prompt: { Disable: { Title: "停用提示詞自動補全", - SubTitle: "若停用後,將無法自動根據輸入進行補全", + SubTitle: "在輸入框開頭輸入 / 即可觸發自動補全", }, List: "自定義提示詞列表", ListCount: (builtin: number, custom: number) => @@ -91,6 +99,14 @@ const tw: LocaleType = { SubTitle: "使用自己的 Key 可規避受控訪問限制", Placeholder: "OpenAI API Key", }, + Usage: { + Title: "帳戶餘額", + SubTitle(granted: any, used: any) { + return `總共 $${granted},已使用 $${used}`; + }, + IsChecking: "正在檢查…", + Check: "重新檢查", + }, AccessCode: { Title: "訪問碼", SubTitle: "現在是受控訪問狀態", @@ -117,8 +133,7 @@ const tw: LocaleType = { Prompt: { History: (content: string) => "這是 AI 與用戶的歷史聊天總結,作為前情提要:" + content, - Topic: - "直接返回這句話的簡要主題,無須解釋,若無主題,請直接返回「閒聊」", + Topic: "直接返回這句話的簡要主題,無須解釋,若無主題,請直接返回「閒聊」", Summarize: "簡要總結一下你和用戶的對話,作為後續的上下文提示 prompt,且字數控制在 50 字以內", }, diff --git a/app/requests.ts b/app/requests.ts index e9da87084..d173eb0de 100644 --- a/app/requests.ts +++ b/app/requests.ts @@ -1,4 +1,4 @@ -import type { ChatRequest, ChatReponse } from "./api/chat/typing"; +import type { ChatRequest, ChatReponse } from "./api/openai/typing"; import { filterConfig, Message, ModelConfig, useAccessStore } from "./store"; import Locale from "./locales"; @@ -9,7 +9,7 @@ const makeRequestParam = ( options?: { filterBot?: boolean; stream?: boolean; - } + }, ): ChatRequest => { let sendMessages = messages.map((v) => ({ role: v.role, @@ -42,19 +42,48 @@ function getHeaders() { return headers; } +export function requestOpenaiClient(path: string) { + return (body: any, method = "POST") => + fetch("/api/openai", { + method, + headers: { + "Content-Type": "application/json", + path, + ...getHeaders(), + }, + body: body && JSON.stringify(body), + }); +} + export async function requestChat(messages: Message[]) { const req: ChatRequest = makeRequestParam(messages, { filterBot: true }); - const res = await fetch("/api/chat", { - method: "POST", - headers: { - "Content-Type": "application/json", - ...getHeaders(), - }, - body: JSON.stringify(req), - }); + const res = await requestOpenaiClient("v1/chat/completions")(req); - return (await res.json()) as ChatReponse; + try { + const response = (await res.json()) as ChatReponse; + return response; + } catch (error) { + console.error("[Request Chat] ", error, res.body); + } +} + +export async function requestUsage() { + const res = await requestOpenaiClient("dashboard/billing/credit_grants")( + null, + "GET", + ); + + try { + const response = (await res.json()) as { + total_available: number; + total_granted: number; + total_used: number; + }; + return response; + } catch (error) { + console.error("[Request usage] ", error, res.body); + } } export async function requestChatStream( @@ -65,7 +94,7 @@ export async function requestChatStream( onMessage: (message: string, done: boolean) => void; onError: (error: Error) => void; onController?: (controller: AbortController) => void; - } + }, ) { const req = makeRequestParam(messages, { stream: true, @@ -87,6 +116,7 @@ export async function requestChatStream( method: "POST", headers: { "Content-Type": "application/json", + path: "v1/chat/completions", ...getHeaders(), }, body: JSON.stringify(req), @@ -129,7 +159,7 @@ export async function requestChatStream( responseText = Locale.Error.Unauthorized; finish(); } else { - console.error("Stream Error"); + console.error("Stream Error", res.body); options?.onError(new Error("Stream Error")); } } catch (err) { @@ -149,7 +179,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) { const res = await requestChat(messages); - return res.choices.at(0)?.message?.content ?? ""; + return res?.choices?.at(0)?.message?.content ?? ""; } // To store message streaming controller @@ -159,7 +189,7 @@ export const ControllerPool = { addController( sessionIndex: number, messageIndex: number, - controller: AbortController + controller: AbortController, ) { const key = this.key(sessionIndex, messageIndex); this.controllers[key] = controller; diff --git a/app/store/app.ts b/app/store/app.ts index 3043a7afe..a2345915b 100644 --- a/app/store/app.ts +++ b/app/store/app.ts @@ -207,6 +207,10 @@ interface ChatStore { clearAllData: () => void; } +function countMessages(msgs: Message[]) { + return msgs.reduce((pre, cur) => pre + cur.content.length, 0); +} + const LOCAL_KEY = "chat-next-web-store"; export const useChatStore = create()( @@ -398,8 +402,12 @@ export const useChatStore = create()( summarizeSession() { const session = get().currentSession(); - if (session.topic === DEFAULT_TOPIC && session.messages.length >= 3) { - // should summarize topic + // should summarize topic after chating more than 50 words + const SUMMARIZE_MIN_LEN = 50; + if ( + session.topic === DEFAULT_TOPIC && + countMessages(session.messages) >= SUMMARIZE_MIN_LEN + ) { requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then( (res) => { get().updateCurrentSession( @@ -413,10 +421,7 @@ export const useChatStore = create()( let toBeSummarizedMsgs = session.messages.slice( session.lastSummarizeIndex, ); - const historyMsgLength = toBeSummarizedMsgs.reduce( - (pre, cur) => pre + cur.content.length, - 0, - ); + const historyMsgLength = countMessages(toBeSummarizedMsgs); if (historyMsgLength > 4000) { toBeSummarizedMsgs = toBeSummarizedMsgs.slice( diff --git a/app/styles/markdown.scss b/app/styles/markdown.scss index 0a6b3bc56..107c1b80d 100644 --- a/app/styles/markdown.scss +++ b/app/styles/markdown.scss @@ -1116,4 +1116,4 @@ .markdown-body ::-webkit-calendar-picker-indicator { filter: invert(50%); -} +} \ No newline at end of file diff --git a/middleware.ts b/middleware.ts index 7e671ff1c..9338a2c6b 100644 --- a/middleware.ts +++ b/middleware.ts @@ -3,10 +3,10 @@ import { ACCESS_CODES } from "./app/api/access"; import md5 from "spark-md5"; export const config = { - matcher: ["/api/chat", "/api/chat-stream"], + matcher: ["/api/openai", "/api/chat-stream"], }; -export function middleware(req: NextRequest, res: NextResponse) { +export function middleware(req: NextRequest) { const accessCode = req.headers.get("access-code"); const token = req.headers.get("token"); const hashedCode = md5.hash(accessCode ?? "").trim(); @@ -18,14 +18,40 @@ export function middleware(req: NextRequest, res: NextResponse) { if (ACCESS_CODES.size > 0 && !ACCESS_CODES.has(hashedCode) && !token) { return NextResponse.json( { + error: true, needAccessCode: true, - hint: "Please go settings page and fill your access code.", + msg: "Please go settings page and fill your access code.", }, { status: 401, - } + }, ); } - return NextResponse.next(); + // inject api key + if (!token) { + const apiKey = process.env.OPENAI_API_KEY; + if (apiKey) { + console.log("[Auth] set system token"); + req.headers.set("token", apiKey); + } else { + return NextResponse.json( + { + error: true, + msg: "Empty Api Key", + }, + { + status: 401, + }, + ); + } + } else { + console.log("[Auth] set user token"); + } + + return NextResponse.next({ + request: { + headers: req.headers, + }, + }); } diff --git a/package.json b/package.json index e8537362a..eb17000ed 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "react": "^18.2.0", "react-dom": "^18.2.0", "react-markdown": "^8.0.5", + "remark-breaks": "^3.0.2", "rehype-katex": "^6.0.2", "rehype-prism-plus": "^1.5.1", "remark-gfm": "^3.0.1", diff --git a/public/serviceWorker.js b/public/serviceWorker.js index 585633fcb..028c79a89 100644 --- a/public/serviceWorker.js +++ b/public/serviceWorker.js @@ -1,24 +1,13 @@ const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache"; -self.addEventListener('activate', function (event) { - console.log('ServiceWorker activated.'); +self.addEventListener("activate", function (event) { + console.log("ServiceWorker activated."); }); -self.addEventListener('install', function (event) { +self.addEventListener("install", function (event) { event.waitUntil( - caches.open(CHATGPT_NEXT_WEB_CACHE) - .then(function (cache) { - return cache.addAll([ - ]); - }) + caches.open(CHATGPT_NEXT_WEB_CACHE).then(function (cache) { + return cache.addAll([]); + }), ); }); - -self.addEventListener('fetch', function (event) { - event.respondWith( - caches.match(event.request) - .then(function (response) { - return response || fetch(event.request); - }) - ); -}); \ No newline at end of file