feat: close #2 add check account balance

This commit is contained in:
Yifei Zhang 2023-03-29 17:45:26 +00:00
parent 45088a3e06
commit 447dec9444
14 changed files with 245 additions and 99 deletions

View File

@ -78,9 +78,9 @@ This project will be continuously maintained. If you want to keep the code repos
You can star or watch this project or follow author to get release notifictions in time.
## 访问控制 Access Control
## 配置密码 Password
本项目提供有限的权限控制功能,请在环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义控制码:
本项目提供有限的权限控制功能,请在 Vercel 项目控制面板的环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义码:
```
code1,code2,code3
@ -88,7 +88,7 @@ code1,code2,code3
增加或修改该环境变量后,请**重新部署**项目使改动生效。
This project provides limited access control. Please add an environment variable named `CODE` on the environment variables page. The value should be a custom control code separated by comma like this:
This project provides limited access control. Please add an environment variable named `CODE` on the vercel environment variables page. The value should be passwords separated by comma like this:
```
code1,code2,code3
@ -96,6 +96,38 @@ code1,code2,code3
After adding or modifying this environment variable, please redeploy the project for the changes to take effect.
## 环境变量 Environment Variables
### `OPENAI_API_KEY` (required)
OpanAI 密钥。
Your openai api key.
### `CODE` (optional)
访问密码,可选,可以使用逗号隔开多个密码。
Access passsword, separated by comma.
### `BASE_URL` (optional)
> Default: `api.openai.com`
OpenAI 接口代理 URL。
Override openai api request base url.
### `PROTOCOL` (optional)
> Default: `https`
> Values: `http` | `https`
OpenAI 接口协议。
Override openai api request protocol.
## 开发 Development
点击下方按钮,开始二次开发:
@ -118,11 +150,11 @@ OPENAI_API_KEY=<your api key here>
2. 执行 `yarn install && yarn dev` 即可。
### 本地部署 Local Deployment
```shell
bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh)
```
### 容器部署 Docker Deployment
```shell
@ -157,6 +189,7 @@ If you would like to contribute your API key, you can email it to the author and
[@hoochanlon](https://github.com/hoochanlon)
### 贡献者 Contributor
[Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors)
## LICENSE

View File

@ -1,26 +1,12 @@
import { createParser } from "eventsource-parser";
import { NextRequest } from "next/server";
import { requestOpenai } from "../common";
async function createStream(req: NextRequest) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
let apiKey = process.env.OPENAI_API_KEY;
const userApiKey = req.headers.get("token");
if (userApiKey) {
apiKey = userApiKey;
console.log("[Stream] using user api key");
}
const res = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: "POST",
body: req.body,
});
const res = await requestOpenai(req);
const stream = new ReadableStream({
async start(controller) {

View File

@ -1 +0,0 @@
config.ts

View File

@ -1,29 +0,0 @@
import { OpenAIApi, Configuration } from "openai";
import { ChatRequest } from "./typing";
export async function POST(req: Request) {
try {
let apiKey = process.env.OPENAI_API_KEY;
const userApiKey = req.headers.get("token");
if (userApiKey) {
apiKey = userApiKey;
}
const openai = new OpenAIApi(
new Configuration({
apiKey,
})
);
const requestBody = (await req.json()) as ChatRequest;
const completion = await openai!.createChatCompletion({
...requestBody,
});
return new Response(JSON.stringify(completion.data));
} catch (e) {
console.error("[Chat] ", e);
return new Response(JSON.stringify(e));
}
}

22
app/api/common.ts Normal file
View File

@ -0,0 +1,22 @@
import { NextRequest } from "next/server";
const OPENAI_URL = "api.openai.com";
const DEFAULT_PROTOCOL = "https";
const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL;
const BASE_URL = process.env.BASE_URL ?? OPENAI_URL;
export async function requestOpenai(req: NextRequest) {
const apiKey = req.headers.get("token");
const openaiPath = req.headers.get("path");
console.log("[Proxy] ", openaiPath);
return fetch(`${PROTOCOL}://${BASE_URL}/${openaiPath}`, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: req.method,
body: req.body,
});
}

28
app/api/openai/route.ts Normal file
View File

@ -0,0 +1,28 @@
import { NextRequest, NextResponse } from "next/server";
import { requestOpenai } from "../common";
async function makeRequest(req: NextRequest) {
try {
const res = await requestOpenai(req);
return new Response(res.body);
} catch (e) {
console.error("[OpenAI] ", req.body, e);
return NextResponse.json(
{
error: true,
msg: JSON.stringify(e),
},
{
status: 500,
},
);
}
}
export async function POST(req: NextRequest) {
return makeRequest(req);
}
export async function GET(req: NextRequest) {
return makeRequest(req);
}

View File

@ -27,6 +27,7 @@ import { getCurrentCommitId } from "../utils";
import Link from "next/link";
import { UPDATE_URL } from "../constant";
import { SearchService, usePromptStore } from "../store/prompt";
import { requestUsage } from "../requests";
function SettingItem(props: {
title: string;
@ -54,7 +55,7 @@ export function Settings(props: { closeSettings: () => void }) {
state.updateConfig,
state.resetConfig,
state.clearAllData,
]
],
);
const updateStore = useUpdateStore();
@ -70,14 +71,34 @@ export function Settings(props: { closeSettings: () => void }) {
});
}
const [usage, setUsage] = useState<{
granted?: number;
used?: number;
}>();
const [loadingUsage, setLoadingUsage] = useState(false);
function checkUsage() {
setLoadingUsage(true);
requestUsage()
.then((res) =>
setUsage({
granted: res?.total_granted,
used: res?.total_used,
}),
)
.finally(() => {
setLoadingUsage(false);
});
}
useEffect(() => {
checkUpdate();
checkUsage();
}, []);
const accessStore = useAccessStore();
const enabledAccessControl = useMemo(
() => accessStore.enabledAccessControl(),
[]
[],
);
const promptStore = usePromptStore();
@ -179,7 +200,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) => {
updateConfig(
(config) =>
(config.submitKey = e.target.value as any as SubmitKey)
(config.submitKey = e.target.value as any as SubmitKey),
);
}}
>
@ -199,7 +220,7 @@ export function Settings(props: { closeSettings: () => void }) {
value={config.theme}
onChange={(e) => {
updateConfig(
(config) => (config.theme = e.target.value as any as Theme)
(config) => (config.theme = e.target.value as any as Theme),
);
}}
>
@ -240,7 +261,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) =>
updateConfig(
(config) =>
(config.fontSize = Number.parseInt(e.currentTarget.value))
(config.fontSize = Number.parseInt(e.currentTarget.value)),
)
}
></input>
@ -253,7 +274,7 @@ export function Settings(props: { closeSettings: () => void }) {
checked={config.tightBorder}
onChange={(e) =>
updateConfig(
(config) => (config.tightBorder = e.currentTarget.checked)
(config) => (config.tightBorder = e.currentTarget.checked),
)
}
></input>
@ -271,7 +292,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) =>
updateConfig(
(config) =>
(config.disablePromptHint = e.currentTarget.checked)
(config.disablePromptHint = e.currentTarget.checked),
)
}
></input>
@ -281,7 +302,7 @@ export function Settings(props: { closeSettings: () => void }) {
title={Locale.Settings.Prompt.List}
subTitle={Locale.Settings.Prompt.ListCount(
builtinCount,
customCount
customCount,
)}
>
<IconButton
@ -324,6 +345,28 @@ export function Settings(props: { closeSettings: () => void }) {
></input>
</SettingItem>
<SettingItem
title={Locale.Settings.Usage.Title}
subTitle={
loadingUsage
? Locale.Settings.Usage.IsChecking
: Locale.Settings.Usage.SubTitle(
usage?.granted ?? "[?]",
usage?.used ?? "[?]",
)
}
>
{loadingUsage ? (
<div />
) : (
<IconButton
icon={<ResetIcon></ResetIcon>}
text={Locale.Settings.Usage.Check}
onClick={checkUsage}
/>
)}
</SettingItem>
<SettingItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
@ -338,7 +381,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) =>
updateConfig(
(config) =>
(config.historyMessageCount = e.target.valueAsNumber)
(config.historyMessageCount = e.target.valueAsNumber),
)
}
></input>
@ -357,7 +400,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.compressMessageLengthThreshold =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
)
}
></input>
@ -370,7 +413,8 @@ export function Settings(props: { closeSettings: () => void }) {
value={config.modelConfig.model}
onChange={(e) => {
updateConfig(
(config) => (config.modelConfig.model = e.currentTarget.value)
(config) =>
(config.modelConfig.model = e.currentTarget.value),
);
}}
>
@ -395,7 +439,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.temperature =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
);
}}
></input>
@ -413,7 +457,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.max_tokens =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
)
}
></input>
@ -432,7 +476,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.presence_penalty =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
);
}}
></input>

View File

@ -64,6 +64,7 @@ const cn = {
Title: "字体大小",
SubTitle: "聊天内容的字体大小",
},
Update: {
Version: (x: string) => `当前版本:${x}`,
IsLatest: "已是最新版本",
@ -98,6 +99,14 @@ const cn = {
SubTitle: "使用自己的 Key 可绕过受控访问限制",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "账户余额",
SubTitle(granted: any, used: any) {
return `总共 $${granted},已使用 $${used}`;
},
IsChecking: "正在检查…",
Check: "重新检查",
},
AccessCode: {
Title: "访问码",
SubTitle: "现在是受控访问状态",

View File

@ -101,6 +101,14 @@ const en: LocaleType = {
SubTitle: "Use your key to ignore access code limit",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "Account Balance",
SubTitle(granted: any, used: any) {
return `Total $${granted}, Used $${used}`;
},
IsChecking: "Checking...",
Check: "Check Again",
},
AccessCode: {
Title: "Access Code",
SubTitle: "Access control enabled",

View File

@ -99,6 +99,14 @@ const tw: LocaleType = {
SubTitle: "使用自己的 Key 可規避受控訪問限制",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "帳戶餘額",
SubTitle(granted: any, used: any) {
return `總共 $${granted},已使用 $${used}`;
},
IsChecking: "正在檢查…",
Check: "重新檢查",
},
AccessCode: {
Title: "訪問碼",
SubTitle: "現在是受控訪問狀態",

View File

@ -1,4 +1,4 @@
import type { ChatRequest, ChatReponse } from "./api/chat/typing";
import type { ChatRequest, ChatReponse } from "./api/openai/typing";
import { filterConfig, Message, ModelConfig, useAccessStore } from "./store";
import Locale from "./locales";
@ -9,7 +9,7 @@ const makeRequestParam = (
options?: {
filterBot?: boolean;
stream?: boolean;
}
},
): ChatRequest => {
let sendMessages = messages.map((v) => ({
role: v.role,
@ -42,19 +42,48 @@ function getHeaders() {
return headers;
}
export function requestOpenaiClient(path: string) {
return (body: any, method = "POST") =>
fetch("/api/openai", {
method,
headers: {
"Content-Type": "application/json",
path,
...getHeaders(),
},
body: body && JSON.stringify(body),
});
}
export async function requestChat(messages: Message[]) {
const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
const res = await fetch("/api/chat", {
method: "POST",
headers: {
"Content-Type": "application/json",
...getHeaders(),
},
body: JSON.stringify(req),
});
const res = await requestOpenaiClient("v1/chat/completions")(req);
return (await res.json()) as ChatReponse;
try {
const response = (await res.json()) as ChatReponse;
return response;
} catch (error) {
console.error("[Request Chat] ", error, res.body);
}
}
export async function requestUsage() {
const res = await requestOpenaiClient("dashboard/billing/credit_grants")(
null,
"GET",
);
try {
const response = (await res.json()) as {
total_available: number;
total_granted: number;
total_used: number;
};
return response;
} catch (error) {
console.error("[Request usage] ", error, res.body);
}
}
export async function requestChatStream(
@ -65,7 +94,7 @@ export async function requestChatStream(
onMessage: (message: string, done: boolean) => void;
onError: (error: Error) => void;
onController?: (controller: AbortController) => void;
}
},
) {
const req = makeRequestParam(messages, {
stream: true,
@ -87,6 +116,7 @@ export async function requestChatStream(
method: "POST",
headers: {
"Content-Type": "application/json",
path: "v1/chat/completions",
...getHeaders(),
},
body: JSON.stringify(req),
@ -129,7 +159,7 @@ export async function requestChatStream(
responseText = Locale.Error.Unauthorized;
finish();
} else {
console.error("Stream Error");
console.error("Stream Error", res.body);
options?.onError(new Error("Stream Error"));
}
} catch (err) {
@ -149,7 +179,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) {
const res = await requestChat(messages);
return res.choices.at(0)?.message?.content ?? "";
return res?.choices?.at(0)?.message?.content ?? "";
}
// To store message streaming controller
@ -159,7 +189,7 @@ export const ControllerPool = {
addController(
sessionIndex: number,
messageIndex: number,
controller: AbortController
controller: AbortController,
) {
const key = this.key(sessionIndex, messageIndex);
this.controllers[key] = controller;

View File

@ -6,7 +6,7 @@ export const config = {
matcher: ["/api/chat", "/api/chat-stream"],
};
export function middleware(req: NextRequest, res: NextResponse) {
export function middleware(req: NextRequest) {
const accessCode = req.headers.get("access-code");
const token = req.headers.get("token");
const hashedCode = md5.hash(accessCode ?? "").trim();
@ -18,14 +18,33 @@ export function middleware(req: NextRequest, res: NextResponse) {
if (ACCESS_CODES.size > 0 && !ACCESS_CODES.has(hashedCode) && !token) {
return NextResponse.json(
{
error: true,
needAccessCode: true,
hint: "Please go settings page and fill your access code.",
msg: "Please go settings page and fill your access code.",
},
{
status: 401,
}
},
);
}
// inject api key
if (!token) {
const apiKey = process.env.OPENAI_API_KEY;
if (apiKey) {
req.headers.set("token", apiKey);
} else {
return NextResponse.json(
{
error: true,
msg: "Empty Api Key",
},
{
status: 401,
},
);
}
}
return NextResponse.next();
}

View File

@ -1,24 +1,13 @@
const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache";
self.addEventListener('activate', function (event) {
console.log('ServiceWorker activated.');
self.addEventListener("activate", function (event) {
console.log("ServiceWorker activated.");
});
self.addEventListener('install', function (event) {
self.addEventListener("install", function (event) {
event.waitUntil(
caches.open(CHATGPT_NEXT_WEB_CACHE)
.then(function (cache) {
return cache.addAll([
]);
})
caches.open(CHATGPT_NEXT_WEB_CACHE).then(function (cache) {
return cache.addAll([]);
}),
);
});
self.addEventListener('fetch', function (event) {
event.respondWith(
caches.match(event.request)
.then(function (response) {
return response || fetch(event.request);
})
);
});