diff --git a/README.md b/README.md index ec11948e6..29d58caf4 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,6 @@ One-Click to deploy well-designed ChatGPT web UI on Vercel. [演示](https://chat-gpt-next-web.vercel.app/) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://user-images.githubusercontent.com/16968934/234462588-e8eff256-f5ca-46ef-8f5f-d7db6d28735a.jpg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) - [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) [![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) @@ -50,6 +49,7 @@ One-Click to deploy well-designed ChatGPT web UI on Vercel. - UI text customize ## What's New + - 🚀 v2.0 is released, now you can create prompt templates, turn your ideas into reality! Read this: [ChatGPT Prompt Engineering Tips: Zero, One and Few Shot Prompting](https://www.allabtai.com/prompt-engineering-tips-zero-one-and-few-shot-prompting/). ## 主要功能 @@ -80,10 +80,9 @@ One-Click to deploy well-designed ChatGPT web UI on Vercel. - 用户登录、账号管理、消息云同步 ## 最新动态 + - 🚀 v2.0 已经发布,现在你可以使用面具功能快速创建预制对话了! 了解更多: [ChatGPT 提示词高阶技能:零次、一次和少样本提示](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/138)。 - - ## Get Started > [简体中文 > 如何开始使用](./README_CN.md#开始使用) @@ -163,6 +162,12 @@ Override openai api request base url. Specify OpenAI organization ID. +### `HIDE_USER_API_KEY` (optional) + +> Default: Empty + +If you do not want users to input their own API key, set this environment variable to 1. + ## Development > [简体中文 > 如何进行二次开发](./README_CN.md#开发) @@ -249,7 +254,6 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s [@WingCH](https://github.com/WingCH) [@jtung4](https://github.com/jtung4) - ### Contributor [Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors) diff --git a/README_CN.md b/README_CN.md index fda80a6ff..1da68f655 100644 --- a/README_CN.md +++ b/README_CN.md @@ -33,6 +33,7 @@ - 在 Vercel 重新选择并部署,[请查看详细教程](./docs/vercel-cn.md#如何新建项目)。 ### 打开自动更新 + > 如果你遇到了 Upstream Sync 执行错误,请手动 Sync Fork 一次! 当你 fork 项目之后,由于 Github 的限制,需要手动去你 fork 后的项目的 Actions 页面启用 Workflows,并启用 Upstream Sync Action,启用之后即可开启每小时定时自动更新: @@ -89,6 +90,10 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填 指定 OpenAI 中的组织 ID。 +### `HIDE_USER_API_KEY` (可选) + +如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。 + ## 开发 > 强烈不建议在本地进行开发或者部署,由于一些技术原因,很难在本地配置好 OpenAI API 代理,除非你能保证可以直连 OpenAI 服务器。 @@ -106,15 +111,16 @@ OPENAI_API_KEY= ### 本地开发 1. 安装 nodejs 18 和 yarn,具体细节请询问 ChatGPT; -2. 执行 `yarn install && yarn dev` 即可。⚠️注意:此命令仅用于本地开发,不要用于部署! +2. 执行 `yarn install && yarn dev` 即可。⚠️ 注意:此命令仅用于本地开发,不要用于部署! 3. 如果你想本地部署,请使用 `yarn install && yarn start` 命令,你可以配合 pm2 来守护进程,防止被杀死,详情询问 ChatGPT。 ## 部署 ### 容器部署 (推荐) + > Docker 版本需要在 20 及其以上,否则会提示找不到镜像。 -> ⚠️注意:docker 版本在大多数时间都会落后最新的版本 1 到 2 天,所以部署后会持续出现“存在更新”的提示,属于正常现象。 +> ⚠️ 注意:docker 版本在大多数时间都会落后最新的版本 1 到 2 天,所以部署后会持续出现“存在更新”的提示,属于正常现象。 ```shell docker pull yidadaa/chatgpt-next-web @@ -146,7 +152,7 @@ docker run -d -p 3000:3000 \ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh) ``` -⚠️注意:如果你安装过程中遇到了问题,请使用 docker 部署。 +⚠️ 注意:如果你安装过程中遇到了问题,请使用 docker 部署。 ## 鸣谢 diff --git a/app/api/auth.ts b/app/api/auth.ts new file mode 100644 index 000000000..9a8348326 --- /dev/null +++ b/app/api/auth.ts @@ -0,0 +1,70 @@ +import { NextRequest } from "next/server"; +import { getServerSideConfig } from "../config/server"; +import md5 from "spark-md5"; + +const serverConfig = getServerSideConfig(); + +function getIP(req: NextRequest) { + let ip = req.ip ?? req.headers.get("x-real-ip"); + const forwardedFor = req.headers.get("x-forwarded-for"); + + if (!ip && forwardedFor) { + ip = forwardedFor.split(",").at(0) ?? ""; + } + + return ip; +} + +function parseApiKey(bearToken: string) { + const token = bearToken.trim().replaceAll("Bearer ", "").trim(); + const isOpenAiKey = token.startsWith("sk-"); + + return { + accessCode: isOpenAiKey ? "" : token, + apiKey: isOpenAiKey ? token : "", + }; +} + +export function auth(req: NextRequest) { + const authToken = req.headers.get("Authorization") ?? ""; + + // check if it is openai api key or user token + const { accessCode, apiKey: token } = parseApiKey(authToken); + + const hashedCode = md5.hash(accessCode ?? "").trim(); + + console.log("[Auth] allowed hashed codes: ", [...serverConfig.codes]); + console.log("[Auth] got access code:", accessCode); + console.log("[Auth] hashed access code:", hashedCode); + console.log("[User IP] ", getIP(req)); + console.log("[Time] ", new Date().toLocaleString()); + + if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !token) { + return { + error: true, + needAccessCode: true, + msg: "Please go settings page and fill your access code.", + }; + } + + // if user does not provide an api key, inject system api key + if (!token) { + const apiKey = serverConfig.apiKey; + if (apiKey) { + console.log("[Auth] use system api key"); + req.headers.set("Authorization", `Bearer ${apiKey}`); + } else { + console.log("[Auth] admin did not provide an api key"); + return { + error: true, + msg: "Empty Api Key", + }; + } + } else { + console.log("[Auth] use user api key"); + } + + return { + error: false, + }; +} diff --git a/app/api/chat-stream/route.ts b/app/api/chat-stream/route.ts deleted file mode 100644 index 2775ff068..000000000 --- a/app/api/chat-stream/route.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { createParser } from "eventsource-parser"; -import { NextRequest } from "next/server"; -import { requestOpenai } from "../common"; - -async function createStream(req: NextRequest) { - const encoder = new TextEncoder(); - const decoder = new TextDecoder(); - - const res = await requestOpenai(req); - - const contentType = res.headers.get("Content-Type") ?? ""; - if (!contentType.includes("stream")) { - const content = await ( - await res.text() - ).replace(/provided:.*. You/, "provided: ***. You"); - console.log("[Stream] error ", content); - return "```json\n" + content + "```"; - } - - const stream = new ReadableStream({ - async start(controller) { - function onParse(event: any) { - if (event.type === "event") { - const data = event.data; - // https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream - if (data === "[DONE]") { - controller.close(); - return; - } - try { - const json = JSON.parse(data); - const text = json.choices[0].delta.content; - const queue = encoder.encode(text); - controller.enqueue(queue); - } catch (e) { - controller.error(e); - } - } - } - - const parser = createParser(onParse); - for await (const chunk of res.body as any) { - parser.feed(decoder.decode(chunk, { stream: true })); - } - }, - }); - return stream; -} - -export async function POST(req: NextRequest) { - try { - const stream = await createStream(req); - return new Response(stream); - } catch (error) { - console.error("[Chat Stream]", error); - return new Response( - ["```json\n", JSON.stringify(error, null, " "), "\n```"].join(""), - ); - } -} - -export const runtime = "edge"; diff --git a/app/api/common.ts b/app/api/common.ts index a86d68617..861caf3ba 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -6,8 +6,11 @@ const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL; const BASE_URL = process.env.BASE_URL ?? OPENAI_URL; export async function requestOpenai(req: NextRequest) { - const apiKey = req.headers.get("token"); - const openaiPath = req.headers.get("path"); + const authValue = req.headers.get("Authorization") ?? ""; + const openaiPath = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll( + "/api/openai/", + "", + ); let baseUrl = BASE_URL; @@ -22,10 +25,14 @@ export async function requestOpenai(req: NextRequest) { console.log("[Org ID]", process.env.OPENAI_ORG_ID); } + if (!authValue || !authValue.startsWith("Bearer sk-")) { + console.error("[OpenAI Request] invlid api key provided", authValue); + } + return fetch(`${baseUrl}/${openaiPath}`, { headers: { "Content-Type": "application/json", - Authorization: `Bearer ${apiKey}`, + Authorization: authValue, ...(process.env.OPENAI_ORG_ID && { "OpenAI-Organization": process.env.OPENAI_ORG_ID, }), diff --git a/app/api/config/route.ts b/app/api/config/route.ts index 65290a476..62b84c2ea 100644 --- a/app/api/config/route.ts +++ b/app/api/config/route.ts @@ -8,16 +8,15 @@ const serverConfig = getServerSideConfig(); // 警告!不要在这里写入任何敏感信息! const DANGER_CONFIG = { needCode: serverConfig.needCode, + hideUserApiKey: serverConfig.hideUserApiKey, }; declare global { type DangerConfig = typeof DANGER_CONFIG; } -export async function POST(req: NextRequest) { - return NextResponse.json({ - needCode: serverConfig.needCode, - }); +export async function POST() { + return NextResponse.json(DANGER_CONFIG); } export const runtime = "edge"; diff --git a/app/api/openai/[...path]/route.ts b/app/api/openai/[...path]/route.ts new file mode 100644 index 000000000..1ca103c64 --- /dev/null +++ b/app/api/openai/[...path]/route.ts @@ -0,0 +1,101 @@ +import { createParser } from "eventsource-parser"; +import { NextRequest, NextResponse } from "next/server"; +import { auth } from "../../auth"; +import { requestOpenai } from "../../common"; + +async function createStream(res: Response) { + const encoder = new TextEncoder(); + const decoder = new TextDecoder(); + + const stream = new ReadableStream({ + async start(controller) { + function onParse(event: any) { + if (event.type === "event") { + const data = event.data; + // https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream + if (data === "[DONE]") { + controller.close(); + return; + } + try { + const json = JSON.parse(data); + const text = json.choices[0].delta.content; + const queue = encoder.encode(text); + controller.enqueue(queue); + } catch (e) { + controller.error(e); + } + } + } + + const parser = createParser(onParse); + for await (const chunk of res.body as any) { + parser.feed(decoder.decode(chunk, { stream: true })); + } + }, + }); + return stream; +} + +function formatResponse(msg: any) { + const jsonMsg = ["```json\n", JSON.stringify(msg, null, " "), "\n```"].join( + "", + ); + return new Response(jsonMsg); +} + +async function handle( + req: NextRequest, + { params }: { params: { path: string[] } }, +) { + console.log("[OpenAI Route] params ", params); + + const authResult = auth(req); + if (authResult.error) { + return NextResponse.json(authResult, { + status: 401, + }); + } + + try { + const api = await requestOpenai(req); + + const contentType = api.headers.get("Content-Type") ?? ""; + + // streaming response + if (contentType.includes("stream")) { + const stream = await createStream(api); + const res = new Response(stream); + res.headers.set("Content-Type", contentType); + return res; + } + + // try to parse error msg + try { + const mayBeErrorBody = await api.json(); + if (mayBeErrorBody.error) { + console.error("[OpenAI Response] ", mayBeErrorBody); + return formatResponse(mayBeErrorBody); + } else { + const res = new Response(JSON.stringify(mayBeErrorBody)); + res.headers.set("Content-Type", "application/json"); + res.headers.set("Cache-Control", "no-cache"); + return res; + } + } catch (e) { + console.error("[OpenAI Parse] ", e); + return formatResponse({ + msg: "invalid response from openai server", + error: e, + }); + } + } catch (e) { + console.error("[OpenAI] ", e); + return formatResponse(e); + } +} + +export const GET = handle; +export const POST = handle; + +export const runtime = "edge"; diff --git a/app/api/openai/route.ts b/app/api/openai/route.ts deleted file mode 100644 index d49027c61..000000000 --- a/app/api/openai/route.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { NextRequest, NextResponse } from "next/server"; -import { requestOpenai } from "../common"; - -async function makeRequest(req: NextRequest) { - try { - const api = await requestOpenai(req); - const res = new NextResponse(api.body); - res.headers.set("Content-Type", "application/json"); - res.headers.set("Cache-Control", "no-cache"); - return res; - } catch (e) { - console.error("[OpenAI] ", req.body, e); - return NextResponse.json( - { - error: true, - msg: JSON.stringify(e), - }, - { - status: 500, - }, - ); - } -} - -export async function POST(req: NextRequest) { - return makeRequest(req); -} - -export async function GET(req: NextRequest) { - return makeRequest(req); -} - -export const runtime = "edge"; diff --git a/app/components/chat.tsx b/app/components/chat.tsx index 4aaa8437f..4173fc3ac 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -54,7 +54,7 @@ import styles from "./home.module.scss"; import chatStyle from "./chat.module.scss"; import { ListItem, Modal, showModal } from "./ui-lib"; -import { useNavigate } from "react-router-dom"; +import { useLocation, useNavigate } from "react-router-dom"; import { Path } from "../constant"; import { Avatar } from "./emoji"; import { MaskAvatar, MaskConfig } from "./mask"; @@ -224,15 +224,63 @@ export function PromptHints(props: { prompts: Prompt[]; onPromptSelect: (prompt: Prompt) => void; }) { - if (props.prompts.length === 0) return null; + const noPrompts = props.prompts.length === 0; + const [selectIndex, setSelectIndex] = useState(0); + const selectedRef = useRef(null); + useEffect(() => { + setSelectIndex(0); + }, [props.prompts.length]); + + useEffect(() => { + const onKeyDown = (e: KeyboardEvent) => { + if (noPrompts) return; + + // arrow up / down to select prompt + const changeIndex = (delta: number) => { + e.stopPropagation(); + e.preventDefault(); + const nextIndex = Math.max( + 0, + Math.min(props.prompts.length - 1, selectIndex + delta), + ); + setSelectIndex(nextIndex); + selectedRef.current?.scrollIntoView({ + block: "center", + }); + }; + + if (e.key === "ArrowUp") { + changeIndex(1); + } else if (e.key === "ArrowDown") { + changeIndex(-1); + } else if (e.key === "Enter") { + const selectedPrompt = props.prompts.at(selectIndex); + if (selectedPrompt) { + props.onPromptSelect(selectedPrompt); + } + } + }; + + window.addEventListener("keydown", onKeyDown); + + return () => window.removeEventListener("keydown", onKeyDown); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [noPrompts, selectIndex]); + + if (noPrompts) return null; return (
{props.prompts.map((prompt, i) => (
props.onPromptSelect(prompt)} + onMouseEnter={() => setSelectIndex(i)} >
{prompt.title}
{prompt.content}
@@ -370,7 +418,7 @@ export function Chat() { const navigate = useNavigate(); const onChatBodyScroll = (e: HTMLElement) => { - const isTouchBottom = e.scrollTop + e.clientHeight >= e.scrollHeight - 20; + const isTouchBottom = e.scrollTop + e.clientHeight >= e.scrollHeight - 100; setHitBottom(isTouchBottom); }; @@ -397,7 +445,7 @@ export function Chat() { () => { const rows = inputRef.current ? autoGrowTextArea(inputRef.current) : 1; const inputRows = Math.min( - 5, + 20, Math.max(2 + Number(!isMobileScreen), rows), ); setInputRows(inputRows); @@ -566,12 +614,9 @@ export function Chat() { } }; - // Auto focus - useEffect(() => { - if (isMobileScreen) return; - inputRef.current?.focus(); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []); + const location = useLocation(); + const isChat = location.pathname === Path.Chat; + const autoFocus = !isMobileScreen || isChat; // only focus in chat page return (
@@ -762,16 +807,9 @@ export function Chat() { value={userInput} onKeyDown={onInputKeyDown} onFocus={() => setAutoScroll(true)} - onBlur={() => { - setTimeout(() => { - if (document.activeElement !== inputRef.current) { - setAutoScroll(false); - setPromptHints([]); - } - }, 100); - }} - autoFocus + onBlur={() => setAutoScroll(false)} rows={inputRows} + autoFocus={autoFocus} /> } diff --git a/app/components/settings.tsx b/app/components/settings.tsx index 5d0a663fe..945149b7c 100644 --- a/app/components/settings.tsx +++ b/app/components/settings.tsx @@ -183,6 +183,19 @@ function UserPromptModal(props: { onClose?: () => void }) { ); } +function formatVersionDate(t: string) { + const d = new Date(+t); + const year = d.getUTCFullYear(); + const month = d.getUTCMonth() + 1; + const day = d.getUTCDate(); + + return [ + year.toString(), + month.toString().padStart(2, "0"), + day.toString().padStart(2, "0"), + ].join(""); +} + export function Settings() { const navigate = useNavigate(); const [showEmojiPicker, setShowEmojiPicker] = useState(false); @@ -193,8 +206,8 @@ export function Settings() { const updateStore = useUpdateStore(); const [checkingUpdate, setCheckingUpdate] = useState(false); - const currentVersion = updateStore.version; - const remoteId = updateStore.remoteVersion; + const currentVersion = formatVersionDate(updateStore.version); + const remoteId = formatVersionDate(updateStore.remoteVersion); const hasNewVersion = currentVersion !== remoteId; function checkUpdate(force = false) { @@ -202,6 +215,15 @@ export function Settings() { updateStore.getLatestVersion(force).then(() => { setCheckingUpdate(false); }); + + console.log( + "[Update] local version ", + new Date(+updateStore.version).toLocaleString(), + ); + console.log( + "[Update] remote version ", + new Date(+updateStore.remoteVersion).toLocaleString(), + ); } const usage = { @@ -466,19 +488,21 @@ export function Settings() { <> )} - - { - accessStore.updateToken(e.currentTarget.value); - }} - /> - + {!accessStore.hideUserApiKey ? ( + + { + accessStore.updateToken(e.currentTarget.value); + }} + /> + + ) : null} { if (config.dontShowMaskSplashScreen) { chatStore.newSession(); + navigate(Path.Chat); } else { navigate(Path.NewChat); } diff --git a/app/config/build.ts b/app/config/build.ts index 49205c9b9..79ed5d3e8 100644 --- a/app/config/build.ts +++ b/app/config/build.ts @@ -1,13 +1,10 @@ const COMMIT_ID: string = (() => { try { const childProcess = require("child_process"); - return ( - childProcess - // .execSync("git describe --tags --abbrev=0") - .execSync("git rev-parse --short HEAD") - .toString() - .trim() - ); + return childProcess + .execSync('git log -1 --format="%at000" --date=unix') + .toString() + .trim(); } catch (e) { console.error("[Build Config] No git or not from git repo."); return "unknown"; diff --git a/app/config/server.ts b/app/config/server.ts index 798177e59..c1cf439b7 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -7,6 +7,7 @@ declare global { CODE?: string; PROXY_URL?: string; VERCEL?: string; + HIDE_USER_API_KEY?: string; // disable user's api key input } } } @@ -38,5 +39,6 @@ export const getServerSideConfig = () => { needCode: ACCESS_CODES.size > 0, proxyUrl: process.env.PROXY_URL, isVercel: !!process.env.VERCEL, + hideUserApiKey: !!process.env.HIDE_USER_API_KEY, }; }; diff --git a/app/locales/index.ts b/app/locales/index.ts index 2ce59261c..40f0a1ade 100644 --- a/app/locales/index.ts +++ b/app/locales/index.ts @@ -22,6 +22,7 @@ export const AllLangs = [ export type Lang = (typeof AllLangs)[number]; const LANG_KEY = "lang"; +const DEFAULT_LANG = "en"; function getItem(key: string) { try { @@ -41,7 +42,8 @@ function getLanguage() { try { return navigator.language.toLowerCase(); } catch { - return "cn"; + console.log("[Lang] failed to detect user lang."); + return DEFAULT_LANG; } } @@ -60,7 +62,7 @@ export function getLang(): Lang { } } - return "en"; + return DEFAULT_LANG; } export function changeLang(lang: Lang) { diff --git a/app/requests.ts b/app/requests.ts index 582ab4862..6a1553943 100644 --- a/app/requests.ts +++ b/app/requests.ts @@ -44,29 +44,33 @@ const makeRequestParam = ( function getHeaders() { const accessStore = useAccessStore.getState(); - let headers: Record = {}; + const headers = { + Authorization: "", + }; - if (accessStore.enabledAccessControl()) { - headers["access-code"] = accessStore.accessCode; - } + const makeBearer = (token: string) => `Bearer ${token.trim()}`; + const validString = (x: string) => x && x.length > 0; - if (accessStore.token && accessStore.token.length > 0) { - headers["token"] = accessStore.token; + // use user's api key first + if (validString(accessStore.token)) { + headers.Authorization = makeBearer(accessStore.token); + } else if ( + accessStore.enabledAccessControl() && + validString(accessStore.accessCode) + ) { + headers.Authorization = makeBearer(accessStore.accessCode); } return headers; } export function requestOpenaiClient(path: string) { + const openaiUrl = useAccessStore.getState().openaiUrl; return (body: any, method = "POST") => - fetch("/api/openai", { + fetch(openaiUrl + path, { method, - headers: { - "Content-Type": "application/json", - path, - ...getHeaders(), - }, body: body && JSON.stringify(body), + headers: getHeaders(), }); } @@ -161,16 +165,17 @@ export async function requestChatStream( const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS); try { - const res = await fetch("/api/chat-stream", { + const openaiUrl = useAccessStore.getState().openaiUrl; + const res = await fetch(openaiUrl + "v1/chat/completions", { method: "POST", headers: { "Content-Type": "application/json", - path: "v1/chat/completions", ...getHeaders(), }, body: JSON.stringify(req), signal: controller.signal, }); + clearTimeout(reqTimeoutId); let responseText = ""; diff --git a/app/store/access.ts b/app/store/access.ts index e72052b43..663e1c6b1 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -1,12 +1,15 @@ import { create } from "zustand"; import { persist } from "zustand/middleware"; import { StoreKey } from "../constant"; +import { BOT_HELLO } from "./chat"; export interface AccessControlStore { accessCode: string; token: string; needCode: boolean; + hideUserApiKey: boolean; + openaiUrl: string; updateToken: (_: string) => void; updateCode: (_: string) => void; @@ -23,16 +26,19 @@ export const useAccessStore = create()( token: "", accessCode: "", needCode: true, + hideUserApiKey: false, + openaiUrl: "/api/openai/", + enabledAccessControl() { get().fetch(); return get().needCode; }, updateCode(code: string) { - set((state) => ({ accessCode: code })); + set(() => ({ accessCode: code })); }, updateToken(token: string) { - set((state) => ({ token })); + set(() => ({ token })); }, isAuthorized() { // has token or has code or disabled access control @@ -51,6 +57,10 @@ export const useAccessStore = create()( .then((res: DangerConfig) => { console.log("[Config] got config from server", res); set(() => ({ ...res })); + + if ((res as any).botHello) { + BOT_HELLO.content = (res as any).botHello; + } }) .catch(() => { console.error("[Config] failed to fetch config"); diff --git a/app/store/update.ts b/app/store/update.ts index 888741b8b..8d8808220 100644 --- a/app/store/update.ts +++ b/app/store/update.ts @@ -53,10 +53,9 @@ export const useUpdateStore = create()( })); try { - // const data = await (await fetch(FETCH_TAG_URL)).json(); - // const remoteId = data[0].name as string; const data = await (await fetch(FETCH_COMMIT_URL)).json(); - const remoteId = (data[0].sha as string).substring(0, 7); + const remoteCommitTime = data[0].commit.committer.date; + const remoteId = new Date(remoteCommitTime).getTime().toString(); set(() => ({ remoteVersion: remoteId, })); diff --git a/middleware.ts b/middleware.ts deleted file mode 100644 index d16a812d9..000000000 --- a/middleware.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { NextRequest, NextResponse } from "next/server"; -import { getServerSideConfig } from "./app/config/server"; -import md5 from "spark-md5"; - -export const config = { - matcher: ["/api/openai", "/api/chat-stream"], -}; - -const serverConfig = getServerSideConfig(); - -function getIP(req: NextRequest) { - let ip = req.ip ?? req.headers.get("x-real-ip"); - const forwardedFor = req.headers.get("x-forwarded-for"); - - if (!ip && forwardedFor) { - ip = forwardedFor.split(",").at(0) ?? ""; - } - - return ip; -} - -export function middleware(req: NextRequest) { - const accessCode = req.headers.get("access-code"); - const token = req.headers.get("token"); - const hashedCode = md5.hash(accessCode ?? "").trim(); - - console.log("[Auth] allowed hashed codes: ", [...serverConfig.codes]); - console.log("[Auth] got access code:", accessCode); - console.log("[Auth] hashed access code:", hashedCode); - console.log("[User IP] ", getIP(req)); - console.log("[Time] ", new Date().toLocaleString()); - - if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !token) { - return NextResponse.json( - { - error: true, - needAccessCode: true, - msg: "Please go settings page and fill your access code.", - }, - { - status: 401, - }, - ); - } - - // inject api key - if (!token) { - const apiKey = serverConfig.apiKey; - if (apiKey) { - console.log("[Auth] set system token"); - req.headers.set("token", apiKey); - } else { - return NextResponse.json( - { - error: true, - msg: "Empty Api Key", - }, - { - status: 401, - }, - ); - } - } else { - console.log("[Auth] set user token"); - } - - return NextResponse.next({ - request: { - headers: req.headers, - }, - }); -} diff --git a/next.config.js b/next.config.js deleted file mode 100644 index f7d5ff086..000000000 --- a/next.config.js +++ /dev/null @@ -1,18 +0,0 @@ -/** @type {import('next').NextConfig} */ - -const nextConfig = { - experimental: { - appDir: true, - }, - webpack(config) { - config.module.rules.push({ - test: /\.svg$/, - use: ["@svgr/webpack"], - }); - - return config; - }, - output: "standalone", -}; - -module.exports = nextConfig; diff --git a/next.config.mjs b/next.config.mjs new file mode 100644 index 000000000..2a96df23e --- /dev/null +++ b/next.config.mjs @@ -0,0 +1,32 @@ +/** @type {import('next').NextConfig} */ + +const nextConfig = { + experimental: { + appDir: true, + }, + async rewrites() { + const ret = []; + + const apiUrl = process.env.API_URL; + if (apiUrl) { + console.log("[Next] using api url ", apiUrl); + ret.push({ + source: "/api/:path*", + destination: `${apiUrl}/:path*`, + }); + } + + return ret; + }, + webpack(config) { + config.module.rules.push({ + test: /\.svg$/, + use: ["@svgr/webpack"], + }); + + return config; + }, + output: "standalone", +}; + +export default nextConfig;