From b799213a144d2686f49614271b742edec4f88199 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 23:52:00 +0000 Subject: [PATCH 01/40] chore(deps): bump @babel/traverse from 7.21.3 to 7.23.2 Bumps [@babel/traverse](https://github.com/babel/babel/tree/HEAD/packages/babel-traverse) from 7.21.3 to 7.23.2. - [Release notes](https://github.com/babel/babel/releases) - [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md) - [Commits](https://github.com/babel/babel/commits/v7.23.2/packages/babel-traverse) --- updated-dependencies: - dependency-name: "@babel/traverse" dependency-type: indirect ... Signed-off-by: dependabot[bot] --- yarn.lock | 113 +++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 100 insertions(+), 13 deletions(-) diff --git a/yarn.lock b/yarn.lock index 8adbf7dc2..8c0a3e231 100644 --- a/yarn.lock +++ b/yarn.lock @@ -22,6 +22,14 @@ dependencies: "@babel/highlight" "^7.18.6" +"@babel/code-frame@^7.22.13": + version "7.22.13" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== + dependencies: + "@babel/highlight" "^7.22.13" + chalk "^2.4.2" + "@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.1", "@babel/compat-data@^7.20.5": version "7.21.0" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.21.0.tgz#c241dc454e5b5917e40d37e525e2f4530c399298" @@ -58,6 +66,16 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" +"@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== + dependencies: + "@babel/types" "^7.23.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + "@babel/helper-annotate-as-pure@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" @@ -123,6 +141,11 @@ resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + "@babel/helper-explode-assignable-expression@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" @@ -138,6 +161,14 @@ "@babel/template" "^7.20.7" "@babel/types" "^7.21.0" +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + "@babel/helper-hoist-variables@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" @@ -145,6 +176,13 @@ dependencies: "@babel/types" "^7.18.6" +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-member-expression-to-functions@^7.20.7", "@babel/helper-member-expression-to-functions@^7.21.0": version "7.21.0" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.21.0.tgz#319c6a940431a133897148515877d2f3269c3ba5" @@ -228,16 +266,33 @@ dependencies: "@babel/types" "^7.18.6" +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-string-parser@^7.19.4": version "7.19.4" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + "@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": version "7.19.1" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-option@^7.18.6", "@babel/helper-validator-option@^7.21.0": version "7.21.0" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz#8224c7e13ace4bafdc4004da2cf064ef42673180" @@ -271,11 +326,25 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.22.13": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" + integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + "@babel/parser@^7.20.7", "@babel/parser@^7.21.3": version "7.21.3" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.3.tgz#1d285d67a19162ff9daa358d4cb41d50c06220b3" integrity sha512-lobG0d7aOfQRXh8AyklEAgZGvA4FShxo6xQbUrrT/cNBPUdIDojlokwJsQyCC/eKia7ifqM0yP+2DRZ4WKw2RQ== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" @@ -975,19 +1044,28 @@ "@babel/parser" "^7.20.7" "@babel/types" "^7.20.7" -"@babel/traverse@^7.20.5", "@babel/traverse@^7.20.7", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2", "@babel/traverse@^7.21.3": - version "7.21.3" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.3.tgz#4747c5e7903d224be71f90788b06798331896f67" - integrity sha512-XLyopNeaTancVitYZe2MlUEvgKb6YVVPXzofHgqHijCImG33b/uTurMS488ht/Hbsb2XK3U2BnSTxKVNGV3nGQ== +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.21.3" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.21.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.21.3" - "@babel/types" "^7.21.3" + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.20.5", "@babel/traverse@^7.20.7", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2", "@babel/traverse@^7.21.3": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" debug "^4.1.0" globals "^11.1.0" @@ -1000,6 +1078,15 @@ "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@braintree/sanitize-url@^6.0.1": version "6.0.4" resolved "https://registry.yarnpkg.com/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz#923ca57e173c6b232bbbb07347b1be982f03e783" @@ -2051,7 +2138,7 @@ chalk@5.2.0: resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.2.0.tgz#249623b7d66869c673699fb66d65723e54dfcfb3" integrity sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA== -chalk@^2.0.0: +chalk@^2.0.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== From 93cee183003a5e0ced3deac5e19779ba4ba251fd Mon Sep 17 00:00:00 2001 From: "Qian (Keven) Li" Date: Tue, 12 Dec 2023 15:08:39 +0800 Subject: [PATCH 02/40] ci: add .dockerignore to exclude env files --- .dockerignore | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..60da41dd8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +# local env files +.env*.local + +# docker-compose env files +.env + +*.key +*.key.pub \ No newline at end of file From d676f88fed2f70ac4f3a2f9f7e19e9cd4725b533 Mon Sep 17 00:00:00 2001 From: Yifei Zhang Date: Fri, 15 Dec 2023 17:47:36 +0800 Subject: [PATCH 03/40] Update README_CN.md --- README_CN.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README_CN.md b/README_CN.md index d73479658..08b385573 100644 --- a/README_CN.md +++ b/README_CN.md @@ -205,7 +205,6 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s [见项目贡献者列表](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors) ### 相关项目 - - [one-api](https://github.com/songquanpeng/one-api): 一站式大模型额度管理平台,支持市面上所有主流大语言模型 ## 开源协议 From 0b731edd21c92a110ac0a904827067f2f2abbbfb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=92=8C=E5=B9=B3peaceful?= <102772286+MCheping8108@users.noreply.github.com> Date: Fri, 15 Dec 2023 21:24:37 +0800 Subject: [PATCH 04/40] fix sync.yml --- .github/workflows/sync.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index ebf5587d0..e04e30adb 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -24,7 +24,7 @@ jobs: id: sync uses: aormsby/Fork-Sync-With-Upstream-action@v3.4 with: - upstream_sync_repo: Yidadaa/ChatGPT-Next-Web + upstream_sync_repo: ChatGPTNextWeb/ChatGPT-Next-Web upstream_sync_branch: main target_sync_branch: main target_repo_token: ${{ secrets.GITHUB_TOKEN }} # automatically generated, no need to set From 2c63dde6c50280c7db416d48e1716f4ed47737b8 Mon Sep 17 00:00:00 2001 From: Fei Date: Sun, 17 Dec 2023 21:24:36 -0600 Subject: [PATCH 05/40] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7c7a6f243..52ed68c0f 100644 --- a/README.md +++ b/README.md @@ -37,8 +37,8 @@ One-Click to get well-designed cross-platform ChatGPT web UI. - **Deploy for free with one-click** on Vercel in under 1 minute - Compact client (~5MB) on Linux/Windows/MacOS, [download it now](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) -- Fully compatible with self-deployed llms, recommended for use with [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) or [LocalAI](https://github.com/go-skynet/LocalAI) -- Privacy first, all data stored locally in the browser +- Fully compatible with self-deployed LLMs, recommended for use with [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) or [LocalAI](https://github.com/go-skynet/LocalAI) +- Privacy first, all data is stored locally in the browser - Markdown support: LaTex, mermaid, code highlight, etc. - Responsive design, dark mode and PWA - Fast first screen loading speed (~100kb), support streaming response From 860c5b8f20cab9acdad88cd0643702363a0f6722 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 10:29:13 +0000 Subject: [PATCH 06/40] chore(deps): bump @hello-pangea/dnd from 16.3.0 to 16.5.0 Bumps [@hello-pangea/dnd](https://github.com/hello-pangea/dnd) from 16.3.0 to 16.5.0. - [Release notes](https://github.com/hello-pangea/dnd/releases) - [Changelog](https://github.com/hello-pangea/dnd/blob/main/CHANGELOG.md) - [Commits](https://github.com/hello-pangea/dnd/compare/v16.3.0...v16.5.0) --- updated-dependencies: - dependency-name: "@hello-pangea/dnd" dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- package.json | 2 +- yarn.lock | 38 +++++++++++++++++++------------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/package.json b/package.json index c8673cf31..57891333b 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "@fortaine/fetch-event-source": "^3.0.6", - "@hello-pangea/dnd": "^16.3.0", + "@hello-pangea/dnd": "^16.5.0", "@svgr/webpack": "^6.5.1", "@vercel/analytics": "^0.1.11", "emoji-picker-react": "^4.5.15", diff --git a/yarn.lock b/yarn.lock index 340282108..16c5d6994 100644 --- a/yarn.lock +++ b/yarn.lock @@ -959,12 +959,12 @@ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@^7.12.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.22.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.5.tgz#8564dd588182ce0047d55d7a75e93921107b57ec" - integrity sha512-ecjvYlnAaZ/KVneE/OdKYBYfgXV3Ptu6zQWmgEF7vwKhQnvVS6bjMD2XYgj+SNvQ1GfK/pjgokfPkC/2CO8CuA== +"@babel/runtime@^7.12.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.23.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d" + integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ== dependencies: - regenerator-runtime "^0.13.11" + regenerator-runtime "^0.14.0" "@babel/template@^7.18.10", "@babel/template@^7.20.7": version "7.20.7" @@ -1042,16 +1042,16 @@ resolved "https://registry.npmmirror.com/@fortaine/fetch-event-source/-/fetch-event-source-3.0.6.tgz#b8552a2ca2c5202f5699b93a92be0188d422b06e" integrity sha512-621GAuLMvKtyZQ3IA6nlDWhV1V/7PGOTNIGLUifxt0KzM+dZIweJ6F3XvQF3QnqeNfS1N7WQ0Kil1Di/lhChEw== -"@hello-pangea/dnd@^16.3.0": - version "16.3.0" - resolved "https://registry.yarnpkg.com/@hello-pangea/dnd/-/dnd-16.3.0.tgz#3776212f812df4e8e69c42831ec8ab7ff3a087d6" - integrity sha512-RYQ/K8shtJoyNPvFWz0gfXIK7HF3P3mL9UZFGMuHB0ljRSXVgMjVFI/FxcZmakMzw6tO7NflWLriwTNBow/4vw== +"@hello-pangea/dnd@^16.5.0": + version "16.5.0" + resolved "https://registry.yarnpkg.com/@hello-pangea/dnd/-/dnd-16.5.0.tgz#f323ff9f813204818bc67648a383e8715f47c59c" + integrity sha512-n+am6O32jo/CFXciCysz83lPM3I3F58FJw4uS44TceieymcyxQSfzK5OhzPAKrVBZktmuOI6Zim9WABTMtXv4A== dependencies: - "@babel/runtime" "^7.22.5" + "@babel/runtime" "^7.23.2" css-box-model "^1.2.1" memoize-one "^6.0.0" raf-schd "^4.0.3" - react-redux "^8.1.1" + react-redux "^8.1.3" redux "^4.2.1" use-memo-one "^1.1.3" @@ -5073,10 +5073,10 @@ react-markdown@^8.0.7: unist-util-visit "^4.0.0" vfile "^5.0.0" -react-redux@^8.1.1: - version "8.1.1" - resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-8.1.1.tgz#8e740f3fd864a4cd0de5ba9cdc8ad39cc9e7c81a" - integrity sha512-5W0QaKtEhj+3bC0Nj0NkqkhIv8gLADH/2kYFMTHxCVqQILiWzLv6MaLuV5wJU3BQEdHKzTfcvPN0WMS6SC1oyA== +react-redux@^8.1.3: + version "8.1.3" + resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-8.1.3.tgz#4fdc0462d0acb59af29a13c27ffef6f49ab4df46" + integrity sha512-n0ZrutD7DaX/j9VscF+uTALI3oUPa/pO4Z3soOBIjuRn/FzVu6aehhysxZCLi6y7duMf52WNZGMl7CtuK5EnRw== dependencies: "@babel/runtime" "^7.12.1" "@types/hoist-non-react-statics" "^3.3.1" @@ -5133,10 +5133,10 @@ regenerate@^1.4.2: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.13.11: - version "0.13.11" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" - integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== +regenerator-runtime@^0.14.0: + version "0.14.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" + integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== regenerator-transform@^0.15.1: version "0.15.1" From 1f356241219d727eca2927f6b1579024bb1ca735 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 10:30:04 +0000 Subject: [PATCH 07/40] chore(deps-dev): bump @tauri-apps/cli from 1.4.0 to 1.5.8 Bumps [@tauri-apps/cli](https://github.com/tauri-apps/tauri) from 1.4.0 to 1.5.8. - [Release notes](https://github.com/tauri-apps/tauri/releases) - [Commits](https://github.com/tauri-apps/tauri/compare/@tauri-apps/cli-v1.4.0...@tauri-apps/cli-v1.5.8) --- updated-dependencies: - dependency-name: "@tauri-apps/cli" dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- package.json | 2 +- yarn.lock | 108 +++++++++++++++++++++++++-------------------------- 2 files changed, 55 insertions(+), 55 deletions(-) diff --git a/package.json b/package.json index c8673cf31..65fe3e343 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "zustand": "^4.3.8" }, "devDependencies": { - "@tauri-apps/cli": "^1.4.0", + "@tauri-apps/cli": "^1.5.8", "@types/node": "^20.9.0", "@types/react": "^18.2.14", "@types/react-dom": "^18.2.7", diff --git a/yarn.lock b/yarn.lock index 340282108..783f5156a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1344,71 +1344,71 @@ dependencies: tslib "^2.4.0" -"@tauri-apps/cli-darwin-arm64@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-1.4.0.tgz#e76bb8515ae31f03f2cbd440c1a09b237a79b3ac" - integrity sha512-nA/ml0SfUt6/CYLVbHmT500Y+ijqsuv5+s9EBnVXYSLVg9kbPUZJJHluEYK+xKuOj6xzyuT/+rZFMRapmJD3jQ== +"@tauri-apps/cli-darwin-arm64@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-1.5.8.tgz#28ca810b910979260dd77c92951d16340fcaa711" + integrity sha512-/AksDWfAt3NUSt8Rq2a3gTLASChKzldPVUjmJhcbtsuzFg2nx5g+hhOHxfBYzss2Te1K5mzlu+73LAMy1Sb9Gw== -"@tauri-apps/cli-darwin-x64@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-1.4.0.tgz#dd1472460550d0aa0ec6e699b073be2d77e5b962" - integrity sha512-ov/F6Zr+dg9B0PtRu65stFo2G0ow2TUlneqYYrkj+vA3n+moWDHfVty0raDjMLQbQt3rv3uayFMXGPMgble9OA== +"@tauri-apps/cli-darwin-x64@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-1.5.8.tgz#4060fb0ffcc8312cf48701df51e0e9b665f18382" + integrity sha512-gcfSh+BFRDdbIGpggZ1+5R5SgToz2A9LthH8P4ak3OHagDzDvI6ov6zy2UQE3XDWJKdnlna2rSR1dIuRZ0T9bA== -"@tauri-apps/cli-linux-arm-gnueabihf@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-1.4.0.tgz#325e90e47d260ba71a499850ce769b5a6bdfd48d" - integrity sha512-zwjbiMncycXDV7doovymyKD7sCg53ouAmfgpUqEBOTY3vgBi9TwijyPhJOqoG5vUVWhouNBC08akGmE4dja15g== +"@tauri-apps/cli-linux-arm-gnueabihf@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-1.5.8.tgz#00256432520edf04004962caa92cd84fbcc8b63f" + integrity sha512-ZHQYuOBGvZubPnh5n8bNaN2VMxPBZWs26960FGQWamm9569UV/TNDHb6mD0Jjk9o0f9P+f98qNhuu5Y37P+vfQ== -"@tauri-apps/cli-linux-arm64-gnu@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-1.4.0.tgz#b5d8f5cba3f8f7c7d44d071681f0ab0a37f2c46e" - integrity sha512-5MCBcziqXC72mMXnkZU68mutXIR6zavDxopArE2gQtK841IlE06bIgtLi0kUUhlFJk2nhPRgiDgdLbrPlyt7fw== +"@tauri-apps/cli-linux-arm64-gnu@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-1.5.8.tgz#7869571b06e8b36a072f2e0e7bb49baab9d3c868" + integrity sha512-FFs28Ew3R2EFPYKuyAIouTbp6YnR+shAmJGFNnVy7ibKHL0wxamVKqv1N5N9gUUr+EhbZu2syMBRfG9XQ5mgng== -"@tauri-apps/cli-linux-arm64-musl@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.4.0.tgz#f805ab2ee415875900f4b456f17dc4900d2a7911" - integrity sha512-7J3pRB6n6uNYgIfCeKt2Oz8J7oSaz2s8GGFRRH2HPxuTHrBNCinzVYm68UhVpJrL3bnGkU0ziVZLsW/iaOGfUg== +"@tauri-apps/cli-linux-arm64-musl@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.5.8.tgz#7cbe0395cbd09d4b49c945e36c2de99478c50a51" + integrity sha512-dEYvNyLMmWD0jb30FNfVPXmBq6OGg6is3km+4RlGg8tZU5Zvq78ClUZtaZuER+N/hv27+Uc6UHl9X3hin8cGGw== -"@tauri-apps/cli-linux-x64-gnu@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-1.4.0.tgz#d3f5e69c22420c7ac9e4021b7a94bce2e48cb45d" - integrity sha512-Zh5gfAJxOv5AVWxcwuueaQ2vIAhlg0d6nZui6nMyfIJ8dbf3aZQ5ZzP38sYow5h/fbvgL+3GSQxZRBIa3c2E1w== +"@tauri-apps/cli-linux-x64-gnu@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-1.5.8.tgz#d03ba73f1ac68bf6bace7bf45b50e6b12ce4468b" + integrity sha512-ut3TDbtLXmZhz6Q4wim57PV02wG+AfuLSWRPhTL9MsPsg/E7Y6sJhv0bIMAq6SwC59RCH52ZGft6RH7samV2NQ== -"@tauri-apps/cli-linux-x64-musl@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-1.4.0.tgz#2e7f718272ffdd9ace80f57a35023ba0c74767ad" - integrity sha512-OLAYoICU3FaYiTdBsI+lQTKnDHeMmFMXIApN0M+xGiOkoIOQcV9CConMPjgmJQ867+NHRNgUGlvBEAh9CiJodQ== +"@tauri-apps/cli-linux-x64-musl@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-1.5.8.tgz#4ce560aa102e9031d4c51c7bc853263cf3ab9616" + integrity sha512-k6ei7ETXVZlNpFOhl/8Cnj709UbEr+VuY9xKK/HgwvNfjA5f8HQ9TSKk/Um7oeT1Y61/eEcvcgF/hDURhFJDPQ== -"@tauri-apps/cli-win32-arm64-msvc@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-1.4.0.tgz#85cdb52a06feb92da785def4d02512099464525e" - integrity sha512-gZ05GENFbI6CB5MlOUsLlU0kZ9UtHn9riYtSXKT6MYs8HSPRffPHaHSL0WxsJweWh9nR5Hgh/TUU8uW3sYCzCg== +"@tauri-apps/cli-win32-arm64-msvc@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-1.5.8.tgz#df83af81c6d89d4a505f2e96b3d443dd411c1a4a" + integrity sha512-l6zm31x1inkS2K5e7otUZ90XBoK+xr2KJObFCZbzmluBE+LM0fgIXCrj7xwH/f0RCUX3VY9HHx4EIo7eLGBXKQ== -"@tauri-apps/cli-win32-ia32-msvc@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-1.4.0.tgz#0b7c921204058215aec9a5a00f735e73909bd330" - integrity sha512-JsetT/lTx/Zq98eo8T5CiRyF1nKeX04RO8JlJrI3ZOYsZpp/A5RJvMd/szQ17iOzwiHdge+tx7k2jHysR6oBlQ== +"@tauri-apps/cli-win32-ia32-msvc@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-1.5.8.tgz#92e5acc4dcd44aec88099059a04bb5ad3b4e59ff" + integrity sha512-0k3YpWl6PKV4Qp2N52Sb45egXafSgQXcBaO7TIJG4EDfaEf5f6StN+hYSzdnrq9idrK5x9DDCPuebZTuJ+Q8EA== -"@tauri-apps/cli-win32-x64-msvc@1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-1.4.0.tgz#23abe3f08c0df89111c29602f91c21a23577b908" - integrity sha512-z8Olcnwp5aYhzqUAarFjqF+oELCjuYWnB2HAJHlfsYNfDCAORY5kct3Fklz8PSsubC3U2EugWn8n42DwnThurg== +"@tauri-apps/cli-win32-x64-msvc@1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-1.5.8.tgz#a0c363969cf5a21c95c235e5bf6a94a410130761" + integrity sha512-XjBg8VMswmD9JAHKlb10NRPfBVAZoiOJBbPRte+GP1BUQtqDnbIYcOLSnUCmNZoy3fUBJuKJUBT9tDCbkMr5fQ== -"@tauri-apps/cli@^1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-1.4.0.tgz#72732ae61e6b7d097e44a8a2ef5f211b2d01d98b" - integrity sha512-VXYr2i2iVFl98etQSQsqLzXgX96bnWiNZd1YADgatqwy/qecbd6Kl5ZAPB5R4ynsgE8A1gU7Fbzh7dCEQYFfmA== +"@tauri-apps/cli@^1.5.8": + version "1.5.8" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-1.5.8.tgz#feaf055af370cb192b24ea4c51edf0e577269fb2" + integrity sha512-c/mzk5vjjfxtH5uNXSc9h1eiprsolnoBcUwAa4/SZ3gxJ176CwrUKODz3cZBOnzs8omwagwgSN/j7K8NrdFL9g== optionalDependencies: - "@tauri-apps/cli-darwin-arm64" "1.4.0" - "@tauri-apps/cli-darwin-x64" "1.4.0" - "@tauri-apps/cli-linux-arm-gnueabihf" "1.4.0" - "@tauri-apps/cli-linux-arm64-gnu" "1.4.0" - "@tauri-apps/cli-linux-arm64-musl" "1.4.0" - "@tauri-apps/cli-linux-x64-gnu" "1.4.0" - "@tauri-apps/cli-linux-x64-musl" "1.4.0" - "@tauri-apps/cli-win32-arm64-msvc" "1.4.0" - "@tauri-apps/cli-win32-ia32-msvc" "1.4.0" - "@tauri-apps/cli-win32-x64-msvc" "1.4.0" + "@tauri-apps/cli-darwin-arm64" "1.5.8" + "@tauri-apps/cli-darwin-x64" "1.5.8" + "@tauri-apps/cli-linux-arm-gnueabihf" "1.5.8" + "@tauri-apps/cli-linux-arm64-gnu" "1.5.8" + "@tauri-apps/cli-linux-arm64-musl" "1.5.8" + "@tauri-apps/cli-linux-x64-gnu" "1.5.8" + "@tauri-apps/cli-linux-x64-musl" "1.5.8" + "@tauri-apps/cli-win32-arm64-msvc" "1.5.8" + "@tauri-apps/cli-win32-ia32-msvc" "1.5.8" + "@tauri-apps/cli-win32-x64-msvc" "1.5.8" "@trysound/sax@0.2.0": version "0.2.0" From 4b7139d9ae6416bdf1bdcdf4cda4115ec81c2462 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 10:30:29 +0000 Subject: [PATCH 08/40] chore(deps): bump fuse.js from 6.6.2 to 7.0.0 Bumps [fuse.js](https://github.com/krisk/Fuse) from 6.6.2 to 7.0.0. - [Release notes](https://github.com/krisk/Fuse/releases) - [Changelog](https://github.com/krisk/Fuse/blob/main/CHANGELOG.md) - [Commits](https://github.com/krisk/Fuse/compare/v6.6.2...v7.0.0) --- updated-dependencies: - dependency-name: fuse.js dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index c8673cf31..ae571ca99 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "@svgr/webpack": "^6.5.1", "@vercel/analytics": "^0.1.11", "emoji-picker-react": "^4.5.15", - "fuse.js": "^6.6.2", + "fuse.js": "^7.0.0", "html-to-image": "^1.11.11", "mermaid": "^10.6.1", "nanoid": "^5.0.3", diff --git a/yarn.lock b/yarn.lock index 340282108..264a5b119 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3301,10 +3301,10 @@ functions-have-names@^1.2.2: resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== -fuse.js@^6.6.2: - version "6.6.2" - resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-6.6.2.tgz#fe463fed4b98c0226ac3da2856a415576dc9a111" - integrity sha512-cJaJkxCCxC8qIIcPBF9yGxY0W/tVZS3uEISDxhYIdtk8OL93pe+6Zj7LjCqVV4dzbqcriOZ+kQ/NE4RXZHsIGA== +fuse.js@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-7.0.0.tgz#6573c9fcd4c8268e403b4fc7d7131ffcf99a9eb2" + integrity sha512-14F4hBIxqKvD4Zz/XjDc3y94mNZN6pRv3U13Udo0lNLCWRBUsrMv2xwcF/y/Z5sV6+FQW+/ow68cHpm4sunt8Q== gensync@^1.0.0-beta.2: version "1.0.0-beta.2" From 2bc84ec908d06312145a750549e98dddf08bf01b Mon Sep 17 00:00:00 2001 From: Fei Date: Mon, 18 Dec 2023 22:43:59 -0600 Subject: [PATCH 09/40] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 52ed68c0f..366a9e1ef 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ English / [简体中文](./README_CN.md) -One-Click to get well-designed cross-platform ChatGPT web UI. +One-Click to get a well-designed cross-platform ChatGPT web UI. 一键免费部署你的跨平台私人 ChatGPT 应用。 From bf412fd3098854b667d2bead19ac5886e25cd79a Mon Sep 17 00:00:00 2001 From: pdx <39936160+qiuqfang@users.noreply.github.com> Date: Wed, 20 Dec 2023 13:55:22 +0800 Subject: [PATCH 10/40] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 52ed68c0f..5cca284d3 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ One-Click to get well-designed cross-platform ChatGPT web UI. [![MacOS][MacOS-image]][download-url] [![Linux][Linux-image]][download-url] -[Web App](https://chatgpt.nextweb.fun/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) +[Web App]((https://chat-gpt-next-web.vercel.app/)/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) [网页版](https://chatgpt.nextweb.fun/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) From bc5b8f0ff9c2cf95b51690adee32c2961b367481 Mon Sep 17 00:00:00 2001 From: pdx <39936160+qiuqfang@users.noreply.github.com> Date: Wed, 20 Dec 2023 13:55:53 +0800 Subject: [PATCH 11/40] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5cca284d3..286db2d60 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ One-Click to get well-designed cross-platform ChatGPT web UI. [![MacOS][MacOS-image]][download-url] [![Linux][Linux-image]][download-url] -[Web App]((https://chat-gpt-next-web.vercel.app/)/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) +[Web App](https://chat-gpt-next-web.vercel.app/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) [网页版](https://chatgpt.nextweb.fun/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) From 1d096eeb17974370df1ef23674b0274ad433c17b Mon Sep 17 00:00:00 2001 From: Yuhang <2312744987@qq.com> Date: Fri, 22 Dec 2023 14:57:54 +0800 Subject: [PATCH 12/40] docs: Add deploy to Zeabur button in README --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 52ed68c0f..f9c35227f 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,8 @@ One-Click to get well-designed cross-platform ChatGPT web UI. [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) +[![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA) + [![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) ![cover](./docs/images/cover.png) From 42515899d96c5f2725eb852a9a9369251cc1ba0b Mon Sep 17 00:00:00 2001 From: Yuhang <2312744987@qq.com> Date: Fri, 22 Dec 2023 14:58:30 +0800 Subject: [PATCH 13/40] docs: Add deploy to Zeabur button in REAME for Chinese --- README_CN.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README_CN.md b/README_CN.md index 08b385573..40f67e9be 100644 --- a/README_CN.md +++ b/README_CN.md @@ -9,6 +9,8 @@ [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) +[![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA) + [![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) ![主界面](./docs/images/cover.png) From 3ef0621eb0a43213a5e415f738d116dc6d30d6c8 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sat, 23 Dec 2023 16:48:16 +0800 Subject: [PATCH 14/40] refactor: update product name --- README.md | 2 +- README_CN.md | 2 +- app/client/api.ts | 2 +- app/components/exporter.tsx | 2 +- app/components/sidebar.tsx | 2 +- app/layout.tsx | 4 +- app/store/update.ts | 4 +- docs/faq-cn.md | 2 +- docs/faq-ko.md | 2 +- docs/user-manual-cn.md | 4 +- public/site.webmanifest | 39 ++- src-tauri/Cargo.lock | 523 +++++++++++++++++++++++++++++++++++- src-tauri/tauri.conf.json | 10 +- 13 files changed, 557 insertions(+), 41 deletions(-) diff --git a/README.md b/README.md index f9c35227f..26c4e0a3b 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@
icon -

ChatGPT Next Web

+

NextChat

English / [简体中文](./README_CN.md) diff --git a/README_CN.md b/README_CN.md index 40f67e9be..b7a95b11c 100644 --- a/README_CN.md +++ b/README_CN.md @@ -1,7 +1,7 @@
预览 -

ChatGPT Next Web

+

NextChat

一键免费部署你的私人 ChatGPT 网页应用。 diff --git a/app/client/api.ts b/app/client/api.ts index eedd2c9ab..c7e33c71b 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -93,7 +93,7 @@ export class ClientApi { { from: "human", value: - "Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web", + "Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web", }, ]); // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用 diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index 4ca6427a7..8cae7ac97 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -530,7 +530,7 @@ export function ImagePreviewer(props: {
-
ChatGPT Next Web
+
NextChat
github.com/Yidadaa/ChatGPT-Next-Web
diff --git a/app/components/sidebar.tsx b/app/components/sidebar.tsx index beeee865a..69b2e71f8 100644 --- a/app/components/sidebar.tsx +++ b/app/components/sidebar.tsx @@ -155,7 +155,7 @@ export function SideBar(props: { className?: string }) { >
- ChatGPT Next + NextChat
Build your own AI assistant. diff --git a/app/layout.tsx b/app/layout.tsx index 5e0762653..b234051f9 100644 --- a/app/layout.tsx +++ b/app/layout.tsx @@ -6,7 +6,7 @@ import { getClientConfig } from "./config/client"; import { type Metadata } from "next"; export const metadata: Metadata = { - title: "ChatGPT Next Web", + title: "NextChat", description: "Your personal ChatGPT Chat Bot.", viewport: { width: "device-width", @@ -18,7 +18,7 @@ export const metadata: Metadata = { { media: "(prefers-color-scheme: dark)", color: "#151515" }, ], appleWebApp: { - title: "ChatGPT Next Web", + title: "NextChat", statusBarStyle: "default", }, }; diff --git a/app/store/update.ts b/app/store/update.ts index 2b088a13d..2ab7ec19a 100644 --- a/app/store/update.ts +++ b/app/store/update.ts @@ -95,7 +95,7 @@ export const useUpdateStore = createPersistStore( if (version === remoteId) { // Show a notification using Tauri window.__TAURI__?.notification.sendNotification({ - title: "ChatGPT Next Web", + title: "NextChat", body: `${Locale.Settings.Update.IsLatest}`, icon: `${ChatGptIcon.src}`, sound: "Default" @@ -104,7 +104,7 @@ export const useUpdateStore = createPersistStore( const updateMessage = Locale.Settings.Update.FoundUpdate(`${remoteId}`); // Show a notification for the new version using Tauri window.__TAURI__?.notification.sendNotification({ - title: "ChatGPT Next Web", + title: "NextChat", body: updateMessage, icon: `${ChatGptIcon.src}`, sound: "Default" diff --git a/docs/faq-cn.md b/docs/faq-cn.md index bf79ef7d9..06a96852b 100644 --- a/docs/faq-cn.md +++ b/docs/faq-cn.md @@ -23,7 +23,7 @@ Docker 版本相当于稳定版,latest Docker 总是与 latest release version ## 如何修改 Vercel 环境变量 - 进入 vercel 的控制台页面; -- 选中你的 chatgpt next web 项目; +- 选中你的 NextChat 项目; - 点击页面头部的 Settings 选项; - 找到侧边栏的 Environment Variables 选项; - 修改对应的值即可。 diff --git a/docs/faq-ko.md b/docs/faq-ko.md index 9eb6bbbb2..b0d28917f 100644 --- a/docs/faq-ko.md +++ b/docs/faq-ko.md @@ -23,7 +23,7 @@ Docker 버전은 사실상 안정된 버전과 같습니다. latest Docker는 ## Vercel 환경 변수를 어떻게 수정하나요? - Vercel의 제어판 페이지로 이동합니다. -- chatgpt next web 프로젝트를 선택합니다. +- NextChat 프로젝트를 선택합니다. - 페이지 상단의 Settings 옵션을 클릭합니다. - 사이드바의 Environment Variables 옵션을 찾습니다. - 해당 값을 수정합니다. diff --git a/docs/user-manual-cn.md b/docs/user-manual-cn.md index 883bbc23e..6109fcf57 100644 --- a/docs/user-manual-cn.md +++ b/docs/user-manual-cn.md @@ -2,7 +2,7 @@ > No english version yet, please read this doc with ChatGPT or other translation tools. -本文档用于解释 ChatGPT Next Web 的部分功能介绍和设计原则。 +本文档用于解释 NextChat 的部分功能介绍和设计原则。 ## 面具 (Mask) @@ -22,7 +22,7 @@ 编辑步骤如下: -1. 在 ChatGPT Next Web 中配置好一个面具; +1. 在 NextChat 中配置好一个面具; 2. 使用面具编辑页面的下载按钮,将面具保存为 JSON 格式; 3. 让 ChatGPT 帮你将 json 文件格式化为对应的 ts 代码; 4. 放入对应的 .ts 文件。 diff --git a/public/site.webmanifest b/public/site.webmanifest index 117f33b86..cf77f68e4 100644 --- a/public/site.webmanifest +++ b/public/site.webmanifest @@ -1,21 +1,20 @@ { - "name": "ChatGPT Next Web", - "short_name": "ChatGPT", - "icons": [ - { - "src": "/android-chrome-192x192.png", - "sizes": "192x192", - "type": "image/png" - }, - { - "src": "/android-chrome-512x512.png", - "sizes": "512x512", - "type": "image/png" - } - ], - "start_url": "/", - "theme_color": "#ffffff", - "background_color": "#ffffff", - "display": "standalone" - } - \ No newline at end of file + "name": "NextChat", + "short_name": "NextChat", + "icons": [ + { + "src": "/android-chrome-192x192.png", + "sizes": "192x192", + "type": "image/png" + }, + { + "src": "/android-chrome-512x512.png", + "sizes": "512x512", + "type": "image/png" + } + ], + "start_url": "/", + "theme_color": "#ffffff", + "background_color": "#ffffff", + "display": "standalone" +} \ No newline at end of file diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index bb72a88e7..d93210fc5 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -56,6 +56,128 @@ version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +[[package]] +name = "async-broadcast" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c48ccdbf6ca6b121e0f586cbc0e73ae440e56c67c30fa0873b4e110d9c26d2b" +dependencies = [ + "event-listener", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-executor" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b0c4a4f319e45986f347ee47fef8bf5e81c9abc3f6f58dc2391439f30df65f0" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand 2.0.1", + "futures-lite", + "slab", +] + +[[package]] +name = "async-fs" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +dependencies = [ + "async-lock", + "autocfg", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite", + "log", + "parking", + "polling", + "rustix", + "slab", + "socket2", + "waker-fn", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener", +] + +[[package]] +name = "async-process" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9" +dependencies = [ + "async-io", + "async-lock", + "autocfg", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix", + "signal-hook", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-recursion" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.16", +] + +[[package]] +name = "async-task" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d90cd0b264dfdd8eb5bad0a2c217c1f88fa96a8573f40e7b12de23fb468f46" + +[[package]] +name = "async-trait" +version = "0.1.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2d0f03b3640e3a630367e40c468cb7f309529c708ed1d88597047b0e7c6ef7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.16", +] + [[package]] name = "atk" version = "0.15.1" @@ -80,6 +202,12 @@ dependencies = [ "system-deps 6.1.0", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "attohttpc" version = "0.22.0" @@ -150,6 +278,22 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blocking" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c36a4d0d48574b3dd360b4b7d95cc651d2b6557b6402848a27d4b228a473e2a" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "fastrand 2.0.1", + "futures-io", + "futures-lite", + "piper", + "tracing", +] + [[package]] name = "brotli" version = "3.3.4" @@ -358,6 +502,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "concurrent-queue" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -530,6 +683,17 @@ dependencies = [ "syn 2.0.16", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -629,6 +793,27 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enumflags2" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5998b4f30320c9d93aed72f63af821bfdac50465b75428fce77b48ec482c3939" +dependencies = [ + "enumflags2_derive", + "serde", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f95e2801cd355d4a1a3e3953ce6ee5ae9603a5c833455343a8bfe3f44d418246" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.16", +] + [[package]] name = "errno" version = "0.3.1" @@ -650,6 +835,12 @@ dependencies = [ "libc", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + [[package]] name = "fastrand" version = "1.9.0" @@ -659,6 +850,12 @@ dependencies = [ "instant", ] +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + [[package]] name = "fdeflate" version = "0.3.0" @@ -674,7 +871,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3cf3a800ff6e860c863ca6d4b16fd999db8b752819c1606884047b73e468535" dependencies = [ - "memoffset", + "memoffset 0.8.0", "rustc_version", ] @@ -772,6 +969,21 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + [[package]] name = "futures-macro" version = "0.3.28" @@ -783,6 +995,12 @@ dependencies = [ "syn 2.0.16", ] +[[package]] +name = "futures-sink" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" + [[package]] name = "futures-task" version = "0.3.28" @@ -796,8 +1014,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ "futures-core", + "futures-io", "futures-macro", + "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", "slab", @@ -1451,6 +1672,19 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +[[package]] +name = "mac-notification-sys" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51fca4d74ff9dbaac16a01b924bc3693fa2bba0862c2c633abc73f9a8ea21f64" +dependencies = [ + "cc", + "dirs-next", + "objc-foundation", + "objc_id", + "time", +] + [[package]] name = "malloc_buf" version = "0.0.6" @@ -1495,6 +1729,15 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg", +] + [[package]] name = "memoffset" version = "0.8.0" @@ -1504,6 +1747,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + [[package]] name = "minisign-verify" version = "0.2.1" @@ -1572,12 +1824,37 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +[[package]] +name = "nix" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", + "memoffset 0.7.1", +] + [[package]] name = "nodrop" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" +[[package]] +name = "notify-rust" +version = "4.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "827c5edfa80235ded4ab3fe8e9dc619b4f866ef16fe9b1c6b8a7f8692c0f2226" +dependencies = [ + "log", + "mac-notification-sys", + "serde", + "tauri-winrt-notification", + "zbus", +] + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -1757,6 +2034,16 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "ordered-stream" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +dependencies = [ + "futures-core", + "pin-project-lite", +] + [[package]] name = "overload" version = "0.1.1" @@ -1788,6 +2075,12 @@ dependencies = [ "system-deps 6.1.0", ] +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + [[package]] name = "parking_lot" version = "0.12.1" @@ -1933,6 +2226,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.1", + "futures-io", +] + [[package]] name = "pkg-config" version = "0.3.27" @@ -1948,7 +2252,7 @@ dependencies = [ "base64 0.21.0", "indexmap", "line-wrap", - "quick-xml", + "quick-xml 0.28.2", "serde", "time", ] @@ -1966,6 +2270,22 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite", + "windows-sys 0.48.0", +] + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2027,6 +2347,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "quick-xml" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11bafc859c6815fbaffbbbf4229ecb767ac913fecb27f9ad4343662e9ef099ea" +dependencies = [ + "memchr", +] + [[package]] name = "quick-xml" version = "0.28.2" @@ -2466,6 +2795,17 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha2" version = "0.10.6" @@ -2486,6 +2826,25 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + [[package]] name = "simd-adler32" version = "0.3.5" @@ -2513,6 +2872,16 @@ version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "soup2" version = "0.2.1" @@ -2556,6 +2925,12 @@ dependencies = [ "loom", ] +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + [[package]] name = "string_cache" version = "0.8.7" @@ -2733,6 +3108,7 @@ dependencies = [ "http", "ignore", "minisign-verify", + "notify-rust", "objc", "once_cell", "open", @@ -2915,6 +3291,16 @@ dependencies = [ "toml 0.7.3", ] +[[package]] +name = "tauri-winrt-notification" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f5bff1d532fead7c43324a0fa33643b8621a47ce2944a633be4cb6c0240898f" +dependencies = [ + "quick-xml 0.23.1", + "windows 0.39.0", +] + [[package]] name = "tempfile" version = "3.5.0" @@ -2922,7 +3308,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" dependencies = [ "cfg-if", - "fastrand", + "fastrand 1.9.0", "redox_syscall 0.3.5", "rustix", "windows-sys 0.45.0", @@ -3135,6 +3521,17 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +[[package]] +name = "uds_windows" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" +dependencies = [ + "memoffset 0.9.0", + "tempfile", + "winapi", +] + [[package]] name = "unicode-bidi" version = "0.3.13" @@ -3239,6 +3636,12 @@ dependencies = [ "libc", ] +[[package]] +name = "waker-fn" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" + [[package]] name = "walkdir" version = "2.3.3" @@ -3815,6 +4218,82 @@ dependencies = [ "libc", ] +[[package]] +name = "xdg-home" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2769203cd13a0c6015d515be729c526d041e9cf2c0cc478d57faee85f40c6dcd" +dependencies = [ + "nix", + "winapi", +] + +[[package]] +name = "zbus" +version = "3.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31de390a2d872e4cd04edd71b425e29853f786dc99317ed72d73d6fcf5ebb948" +dependencies = [ + "async-broadcast", + "async-executor", + "async-fs", + "async-io", + "async-lock", + "async-process", + "async-recursion", + "async-task", + "async-trait", + "blocking", + "byteorder", + "derivative", + "enumflags2", + "event-listener", + "futures-core", + "futures-sink", + "futures-util", + "hex", + "nix", + "once_cell", + "ordered-stream", + "rand 0.8.5", + "serde", + "serde_repr", + "sha1", + "static_assertions", + "tracing", + "uds_windows", + "winapi", + "xdg-home", + "zbus_macros", + "zbus_names", + "zvariant", +] + +[[package]] +name = "zbus_macros" +version = "3.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1794a946878c0e807f55a397187c11fc7a038ba5d868e7db4f3bd7760bc9d" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "regex", + "syn 1.0.109", + "zvariant_utils", +] + +[[package]] +name = "zbus_names" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb80bb776dbda6e23d705cf0123c3b95df99c4ebeaec6c2599d4a5419902b4a9" +dependencies = [ + "serde", + "static_assertions", + "zvariant", +] + [[package]] name = "zip" version = "0.6.6" @@ -3825,3 +4304,41 @@ dependencies = [ "crc32fast", "crossbeam-utils", ] + +[[package]] +name = "zvariant" +version = "3.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44b291bee0d960c53170780af148dca5fa260a63cdd24f1962fa82e03e53338c" +dependencies = [ + "byteorder", + "enumflags2", + "libc", + "serde", + "static_assertions", + "zvariant_derive", +] + +[[package]] +name = "zvariant_derive" +version = "3.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "934d7a7dfc310d6ee06c87ffe88ef4eca7d3e37bb251dece2ef93da8f17d8ecd" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", + "zvariant_utils", +] + +[[package]] +name = "zvariant_utils" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7234f0d811589db492d16893e3f21e8e2fd282e6d01b0cddee310322062cc200" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 7f2ffb0f6..30546227d 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -8,7 +8,7 @@ "withGlobalTauri": true }, "package": { - "productName": "ChatGPT Next Web", + "productName": "NextChat", "version": "2.9.13" }, "tauri": { @@ -68,7 +68,7 @@ "icons/icon.ico" ], "identifier": "com.yida.chatgpt.next.web", - "longDescription": "ChatGPT Next Web is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.", + "longDescription": "NextChat is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.", "macOS": { "entitlements": null, "exceptionDomain": "", @@ -77,7 +77,7 @@ "signingIdentity": null }, "resources": [], - "shortDescription": "ChatGPT Next Web App", + "shortDescription": "NextChat App", "targets": "all", "windows": { "certificateThumbprint": null, @@ -104,11 +104,11 @@ "fullscreen": false, "height": 600, "resizable": true, - "title": "ChatGPT Next Web", + "title": "NextChat", "width": 960, "hiddenTitle": true, "titleBarStyle": "Overlay" } ] } -} +} \ No newline at end of file From 75acd4c1aa8dda89685eedd192723c521b4aa07a Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sat, 23 Dec 2023 22:21:27 +0800 Subject: [PATCH 15/40] chore: append previous name --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 26c4e0a3b..c53150745 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@
icon -

NextChat

+

NextChat (ChatGPT Next Web)

English / [简体中文](./README_CN.md) From 778e88cb5677dcc0658ea3ef85ed2707ff9d398a Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 02:15:30 +0800 Subject: [PATCH 16/40] chore: resolve conflict --- .env.template | 11 ++ app/api/common.ts | 2 +- app/api/google/[...path]/route.ts | 104 ++++++++++++++++ app/client/api.ts | 34 +++-- app/client/platforms/google.ts | 199 ++++++++++++++++++++++++++++++ app/components/exporter.tsx | 14 ++- app/components/home.tsx | 13 +- app/components/model-config.tsx | 2 +- app/components/settings.tsx | 67 +++++++++- app/config/server.ts | 3 + app/constant.ts | 95 ++++++++++++++ app/locales/cn.ts | 25 +++- app/store/access.ts | 11 ++ app/store/chat.ts | 54 +++++--- app/store/update.ts | 64 +++++----- app/utils/model.ts | 6 +- 16 files changed, 630 insertions(+), 74 deletions(-) create mode 100644 app/api/google/[...path]/route.ts create mode 100644 app/client/platforms/google.ts diff --git a/.env.template b/.env.template index 3e3290369..89bab2cb1 100644 --- a/.env.template +++ b/.env.template @@ -8,6 +8,16 @@ CODE=your-password # You can start service behind a proxy PROXY_URL=http://localhost:7890 +# (optional) +# Default: Empty +# Googel Gemini Pro API key, set if you want to use Google Gemini Pro API. +GOOGLE_API_KEY= + +# (optional) +# Default: https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent +# Googel Gemini Pro API url, set if you want to customize Google Gemini Pro API url. +GOOGLE_URL= + # Override openai api request base url. (optional) # Default: https://api.openai.com # Examples: http://your-openai-proxy.com @@ -36,3 +46,4 @@ ENABLE_BALANCE_QUERY= # Default: Empty # If you want to disable parse settings from url, set this value to 1. DISABLE_FAST_LINK= + diff --git a/app/api/common.ts b/app/api/common.ts index 6b0d619df..13cfab03c 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -1,6 +1,6 @@ import { NextRequest, NextResponse } from "next/server"; import { getServerSideConfig } from "../config/server"; -import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant"; +import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant"; import { collectModelTable } from "../utils/model"; import { makeAzurePath } from "../azure"; diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts new file mode 100644 index 000000000..5b19740af --- /dev/null +++ b/app/api/google/[...path]/route.ts @@ -0,0 +1,104 @@ +import { NextRequest, NextResponse } from "next/server"; +import { auth } from "../../auth"; +import { getServerSideConfig } from "@/app/config/server"; +import { GEMINI_BASE_URL, Google } from "@/app/constant"; + +async function handle( + req: NextRequest, + { params }: { params: { path: string[] } }, +) { + console.log("[Google Route] params ", params); + + if (req.method === "OPTIONS") { + return NextResponse.json({ body: "OK" }, { status: 200 }); + } + + const controller = new AbortController(); + + const serverConfig = getServerSideConfig(); + + let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL; + + if (!baseUrl.startsWith("http")) { + baseUrl = `https://${baseUrl}`; + } + + if (baseUrl.endsWith("/")) { + baseUrl = baseUrl.slice(0, -1); + } + + let path = `${req.nextUrl.pathname}`.replaceAll("/api/google/", ""); + + console.log("[Proxy] ", path); + console.log("[Base Url]", baseUrl); + // this fix [Org ID] undefined in server side if not using custom point + if (serverConfig.openaiOrgId !== undefined) { + console.log("[Org ID]", serverConfig.openaiOrgId); + } + + const timeoutId = setTimeout( + () => { + controller.abort(); + }, + 10 * 60 * 1000, + ); + + const fetchUrl = `${baseUrl}/${path}?key=${req.nextUrl.searchParams.get( + "key", + )}`; + + const fetchOptions: RequestInit = { + headers: { + "Content-Type": "application/json", + "Cache-Control": "no-store", + }, + method: req.method, + body: req.body, + // to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body + redirect: "manual", + // @ts-ignore + duplex: "half", + signal: controller.signal, + }; + + try { + const res = await fetch(fetchUrl, fetchOptions); + // to prevent browser prompt for credentials + const newHeaders = new Headers(res.headers); + newHeaders.delete("www-authenticate"); + // to disable nginx buffering + newHeaders.set("X-Accel-Buffering", "no"); + + return new Response(res.body, { + status: res.status, + statusText: res.statusText, + headers: newHeaders, + }); + } finally { + clearTimeout(timeoutId); + } +} + +export const GET = handle; +export const POST = handle; + +export const runtime = "edge"; +export const preferredRegion = [ + "arn1", + "bom1", + "cdg1", + "cle1", + "cpt1", + "dub1", + "fra1", + "gru1", + "hnd1", + "iad1", + "icn1", + "kix1", + "lhr1", + "pdx1", + "sfo1", + "sin1", + "syd1", +]; diff --git a/app/client/api.ts b/app/client/api.ts index c7e33c71b..50865d4b9 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -1,8 +1,13 @@ import { getClientConfig } from "../config/client"; -import { ACCESS_CODE_PREFIX, Azure, ServiceProvider } from "../constant"; -import { ChatMessage, ModelType, useAccessStore } from "../store"; +import { + ACCESS_CODE_PREFIX, + Azure, + ModelProvider, + ServiceProvider, +} from "../constant"; +import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store"; import { ChatGPTApi } from "./platforms/openai"; - +import { GeminiApi } from "./platforms/google"; export const ROLES = ["system", "user", "assistant"] as const; export type MessageRole = (typeof ROLES)[number]; @@ -40,7 +45,15 @@ export interface LLMUsage { export interface LLMModel { name: string; + displayName: string; available: boolean; + provider: LLMModelProvider; +} + +export interface LLMModelProvider { + id: string; + providerName: string; + providerType: string; } export abstract class LLMApi { @@ -73,7 +86,11 @@ interface ChatProvider { export class ClientApi { public llm: LLMApi; - constructor() { + constructor(provider: ModelProvider = ModelProvider.GPT) { + if (provider === ModelProvider.Gemini) { + this.llm = new GeminiApi(); + return; + } this.llm = new ChatGPTApi(); } @@ -123,8 +140,6 @@ export class ClientApi { } } -export const api = new ClientApi(); - export function getHeaders() { const accessStore = useAccessStore.getState(); const headers: Record = { @@ -132,9 +147,14 @@ export function getHeaders() { "x-requested-with": "XMLHttpRequest", }; + const isGoogle = accessStore.provider === ServiceProvider.Google; const isAzure = accessStore.provider === ServiceProvider.Azure; const authHeader = isAzure ? "api-key" : "Authorization"; - const apiKey = isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey; + const apiKey = isGoogle + ? accessStore.googleApiKey + : isAzure + ? accessStore.azureApiKey + : accessStore.openaiApiKey; const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`; const validString = (x: string) => x && x.length > 0; diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts new file mode 100644 index 000000000..90584571e --- /dev/null +++ b/app/client/platforms/google.ts @@ -0,0 +1,199 @@ +import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant"; +import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; +import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; +import { + EventStreamContentType, + fetchEventSource, +} from "@fortaine/fetch-event-source"; +import { prettyObject } from "@/app/utils/format"; +import { getClientConfig } from "@/app/config/client"; +import Locale from "../../locales"; +export class GeminiApi implements LLMApi { + extractMessage(res: any) { + console.log("[Response] gemini response: ", res); + return ( + res?.candidates?.at(0)?.content?.parts.at(0)?.text || + res?.error?.message || + "" + ); + } + async chat(options: ChatOptions): Promise { + const messages = options.messages.map((v) => ({ + role: v.role.replace("assistant", "model").replace("system", "model"), + parts: [{ text: v.content }], + })); + + const modelConfig = { + ...useAppConfig.getState().modelConfig, + ...useChatStore.getState().currentSession().mask.modelConfig, + ...{ + model: options.config.model, + }, + }; + const accessStore = useAccessStore.getState(); + + const requestPayload = { + contents: messages, + // stream: options.config.stream, + // model: modelConfig.model, + // temperature: modelConfig.temperature, + // presence_penalty: modelConfig.presence_penalty, + // frequency_penalty: modelConfig.frequency_penalty, + // top_p: modelConfig.top_p, + // max_tokens: Math.max(modelConfig.max_tokens, 1024), + // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. + }; + + console.log("[Request] openai payload: ", requestPayload); + + // todo: support stream later + const shouldStream = false; + const controller = new AbortController(); + options.onController?.(controller); + + try { + const chatPath = + this.path(Google.ChatPath) + `?key=${accessStore.googleApiKey}`; + const chatPayload = { + method: "POST", + body: JSON.stringify(requestPayload), + signal: controller.signal, + headers: getHeaders(), + }; + + // make a fetch request + const requestTimeoutId = setTimeout( + () => controller.abort(), + REQUEST_TIMEOUT_MS, + ); + if (shouldStream) { + let responseText = ""; + let remainText = ""; + let finished = false; + + // animate response to make it looks smooth + function animateResponseText() { + if (finished || controller.signal.aborted) { + responseText += remainText; + console.log("[Response Animation] finished"); + return; + } + + if (remainText.length > 0) { + const fetchCount = Math.max(1, Math.round(remainText.length / 60)); + const fetchText = remainText.slice(0, fetchCount); + responseText += fetchText; + remainText = remainText.slice(fetchCount); + options.onUpdate?.(responseText, fetchText); + } + + requestAnimationFrame(animateResponseText); + } + + // start animaion + animateResponseText(); + + const finish = () => { + if (!finished) { + finished = true; + options.onFinish(responseText + remainText); + } + }; + + controller.signal.onabort = finish; + + fetchEventSource(chatPath, { + ...chatPayload, + async onopen(res) { + clearTimeout(requestTimeoutId); + const contentType = res.headers.get("content-type"); + console.log( + "[OpenAI] request response content type: ", + contentType, + ); + + if (contentType?.startsWith("text/plain")) { + responseText = await res.clone().text(); + return finish(); + } + + if ( + !res.ok || + !res.headers + .get("content-type") + ?.startsWith(EventStreamContentType) || + res.status !== 200 + ) { + const responseTexts = [responseText]; + let extraInfo = await res.clone().text(); + try { + const resJson = await res.clone().json(); + extraInfo = prettyObject(resJson); + } catch {} + + if (res.status === 401) { + responseTexts.push(Locale.Error.Unauthorized); + } + + if (extraInfo) { + responseTexts.push(extraInfo); + } + + responseText = responseTexts.join("\n\n"); + + return finish(); + } + }, + onmessage(msg) { + if (msg.data === "[DONE]" || finished) { + return finish(); + } + const text = msg.data; + try { + const json = JSON.parse(text) as { + choices: Array<{ + delta: { + content: string; + }; + }>; + }; + const delta = json.choices[0]?.delta?.content; + if (delta) { + remainText += delta; + } + } catch (e) { + console.error("[Request] parse error", text); + } + }, + onclose() { + finish(); + }, + onerror(e) { + options.onError?.(e); + throw e; + }, + openWhenHidden: true, + }); + } else { + const res = await fetch(chatPath, chatPayload); + clearTimeout(requestTimeoutId); + + const resJson = await res.json(); + const message = this.extractMessage(resJson); + options.onFinish(message); + } + } catch (e) { + console.log("[Request] failed to make a chat request", e); + options.onError?.(e as Error); + } + } + usage(): Promise { + throw new Error("Method not implemented."); + } + async models(): Promise { + return []; + } + path(path: string): string { + return "/api/google/" + path; + } +} diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index 8cae7ac97..70b4ab91f 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -29,10 +29,11 @@ import NextImage from "next/image"; import { toBlob, toPng } from "html-to-image"; import { DEFAULT_MASK_AVATAR } from "../store/mask"; -import { api } from "../client/api"; + import { prettyObject } from "../utils/format"; -import { EXPORT_MESSAGE_CLASS_NAME } from "../constant"; +import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant"; import { getClientConfig } from "../config/client"; +import { ClientApi } from "../client/api"; const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { loading: () => , @@ -301,10 +302,17 @@ export function PreviewActions(props: { }) { const [loading, setLoading] = useState(false); const [shouldExport, setShouldExport] = useState(false); - + const config = useAppConfig(); const onRenderMsgs = (msgs: ChatMessage[]) => { setShouldExport(false); + var api: ClientApi; + if (config.modelConfig.model === "gemini") { + api = new ClientApi(ModelProvider.Gemini); + } else { + api = new ClientApi(ModelProvider.GPT); + } + api .share(msgs) .then((res) => { diff --git a/app/components/home.tsx b/app/components/home.tsx index 811cbdf51..928c2d905 100644 --- a/app/components/home.tsx +++ b/app/components/home.tsx @@ -12,7 +12,7 @@ import LoadingIcon from "../icons/three-dots.svg"; import { getCSSVar, useMobileScreen } from "../utils"; import dynamic from "next/dynamic"; -import { Path, SlotID } from "../constant"; +import { ModelProvider, Path, SlotID } from "../constant"; import { ErrorBoundary } from "./error"; import { getISOLang, getLang } from "../locales"; @@ -27,7 +27,7 @@ import { SideBar } from "./sidebar"; import { useAppConfig } from "../store/config"; import { AuthPage } from "./auth"; import { getClientConfig } from "../config/client"; -import { api } from "../client/api"; +import { ClientApi } from "../client/api"; import { useAccessStore } from "../store"; export function Loading(props: { noLogo?: boolean }) { @@ -128,7 +128,8 @@ function Screen() { const isHome = location.pathname === Path.Home; const isAuth = location.pathname === Path.Auth; const isMobileScreen = useMobileScreen(); - const shouldTightBorder = getClientConfig()?.isApp || (config.tightBorder && !isMobileScreen); + const shouldTightBorder = + getClientConfig()?.isApp || (config.tightBorder && !isMobileScreen); useEffect(() => { loadAsyncGoogleFont(); @@ -169,6 +170,12 @@ function Screen() { export function useLoadData() { const config = useAppConfig(); + var api: ClientApi; + if (config.modelConfig.model === "gemini") { + api = new ClientApi(ModelProvider.Gemini); + } else { + api = new ClientApi(ModelProvider.GPT); + } useEffect(() => { (async () => { const models = await api.llm.models(); diff --git a/app/components/model-config.tsx b/app/components/model-config.tsx index 214a18c79..a077b82cb 100644 --- a/app/components/model-config.tsx +++ b/app/components/model-config.tsx @@ -29,7 +29,7 @@ export function ModelConfigList(props: { .filter((v) => v.available) .map((v, i) => ( ))} diff --git a/app/components/settings.tsx b/app/components/settings.tsx index f53024d6c..9a622af31 100644 --- a/app/components/settings.tsx +++ b/app/components/settings.tsx @@ -52,6 +52,7 @@ import { copyToClipboard } from "../utils"; import Link from "next/link"; import { Azure, + Google, OPENAI_BASE_URL, Path, RELEASE_URL, @@ -635,7 +636,8 @@ export function Settings() { navigate(Path.Home); } }; - if (clientConfig?.isApp) { // Force to set custom endpoint to true if it's app + if (clientConfig?.isApp) { + // Force to set custom endpoint to true if it's app accessStore.update((state) => { state.useCustomConfig = true; }); @@ -997,7 +999,7 @@ export function Settings() { /> - ) : ( + ) : accessStore.provider === "Azure" ? ( <> - )} + ) : accessStore.provider === "Google" ? ( + <> + + + accessStore.update( + (access) => + (access.googleUrl = e.currentTarget.value), + ) + } + > + + + { + accessStore.update( + (access) => + (access.googleApiKey = e.currentTarget.value), + ); + }} + /> + + + + accessStore.update( + (access) => + (access.googleApiVersion = + e.currentTarget.value), + ) + } + > + + + ) : null} )} diff --git a/app/config/server.ts b/app/config/server.ts index 2398805a2..becad8429 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -80,6 +80,9 @@ export const getServerSideConfig = () => { azureApiKey: process.env.AZURE_API_KEY, azureApiVersion: process.env.AZURE_API_VERSION, + googleApiKey: process.env.GOOGLE_API_KEY, + googleUrl: process.env.GOOGLE_URL, + needCode: ACCESS_CODES.size > 0, code: process.env.CODE, codes: ACCESS_CODES, diff --git a/app/constant.ts b/app/constant.ts index 69d5c511f..1f6a647da 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -12,6 +12,8 @@ export const DEFAULT_CORS_HOST = "https://a.nextweb.fun"; export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`; export const OPENAI_BASE_URL = "https://api.openai.com"; +export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/"; + export enum Path { Home = "/", Chat = "/chat", @@ -65,6 +67,12 @@ export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown"; export enum ServiceProvider { OpenAI = "OpenAI", Azure = "Azure", + Google = "Google", +} + +export enum ModelProvider { + GPT = "GPT", + Gemini = "Gemini", } export const OpenaiPath = { @@ -78,6 +86,14 @@ export const Azure = { ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}", }; +export const Google = { + ExampleEndpoint: + "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent", + ChatPath: "v1beta/models/gemini-pro:generateContent", + + // /api/openai/v1/chat/completions +}; + export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_SYSTEM_TEMPLATE = ` You are ChatGPT, a large language model trained by OpenAI. @@ -100,58 +116,137 @@ export const DEFAULT_MODELS = [ { name: "gpt-4", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-0314", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-0613", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-32k", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-32k-0314", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-32k-0613", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-1106-preview", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-4-vision-preview", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-3.5-turbo", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-3.5-turbo-0301", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-3.5-turbo-0613", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-3.5-turbo-1106", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-3.5-turbo-16k", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, }, { name: "gpt-3.5-turbo-16k-0613", available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, + }, + { + name: "gemini", + available: true, + provider: { + id: "google", + providerName: "Google", + providerType: "google", + }, }, ] as const; diff --git a/app/locales/cn.ts b/app/locales/cn.ts index 50dd44284..42270b2fb 100644 --- a/app/locales/cn.ts +++ b/app/locales/cn.ts @@ -312,6 +312,23 @@ const cn = { SubTitle: "选择指定的部分版本", }, }, + Google: { + ApiKey: { + Title: "接口密钥", + SubTitle: "使用自定义 Google AI Studio API Key 绕过密码访问限制", + Placeholder: "Google AI Studio API Key", + }, + + Endpoint: { + Title: "接口地址", + SubTitle: "样例:", + }, + + ApiVerion: { + Title: "接口版本 (gemini api version)", + SubTitle: "选择指定的部分版本", + }, + }, CustomModel: { Title: "自定义模型名", SubTitle: "增加自定义模型可选项,使用英文逗号隔开", @@ -347,7 +364,7 @@ const cn = { Prompt: { History: (content: string) => "这是历史聊天总结作为前情提要:" + content, Topic: - "使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,如果没有主题,请直接返回“闲聊”", + "使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,不要加粗,如果没有主题,请直接返回“闲聊”", Summarize: "简要总结一下对话内容,用作后续的上下文提示 prompt,控制在 200 字以内", }, @@ -441,9 +458,9 @@ const cn = { Config: "配置", }, Exporter: { - Description : { - Title: "只有清除上下文之后的消息会被展示" - }, + Description: { + Title: "只有清除上下文之后的消息会被展示", + }, Model: "模型", Messages: "消息", Topic: "主题", diff --git a/app/store/access.ts b/app/store/access.ts index 3b9008ba8..9e8024a6a 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -29,6 +29,11 @@ const DEFAULT_ACCESS_STATE = { azureApiKey: "", azureApiVersion: "2023-08-01-preview", + // google ai studio + googleUrl: "", + googleApiKey: "", + googleApiVersion: "v1", + // server config needCode: true, hideUserApiKey: false, @@ -56,6 +61,10 @@ export const useAccessStore = createPersistStore( return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]); }, + isValidGoogle() { + return ensure(get(), ["googleApiKey"]); + }, + isAuthorized() { this.fetch(); @@ -63,6 +72,7 @@ export const useAccessStore = createPersistStore( return ( this.isValidOpenAI() || this.isValidAzure() || + this.isValidGoogle() || !this.enabledAccessControl() || (this.enabledAccessControl() && ensure(get(), ["accessCode"])) ); @@ -99,6 +109,7 @@ export const useAccessStore = createPersistStore( token: string; openaiApiKey: string; azureApiVersion: string; + googleApiKey: string; }; state.openaiApiKey = state.token; state.azureApiVersion = "2023-08-01-preview"; diff --git a/app/store/chat.ts b/app/store/chat.ts index 66a39d2b2..f53f61154 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -8,10 +8,11 @@ import { DEFAULT_INPUT_TEMPLATE, DEFAULT_SYSTEM_TEMPLATE, KnowledgeCutOffDate, + ModelProvider, StoreKey, SUMMARIZE_MODEL, } from "../constant"; -import { api, RequestMessage } from "../client/api"; +import { ClientApi, RequestMessage } from "../client/api"; import { ChatControllerPool } from "../client/controller"; import { prettyObject } from "../utils/format"; import { estimateTokenLength } from "../utils/token"; @@ -301,6 +302,13 @@ export const useChatStore = createPersistStore( ]); }); + var api: ClientApi; + if (modelConfig.model === "gemini") { + api = new ClientApi(ModelProvider.Gemini); + } else { + api = new ClientApi(ModelProvider.GPT); + } + // make request api.llm.chat({ messages: sendMessages, @@ -379,22 +387,26 @@ export const useChatStore = createPersistStore( // system prompts, to get close to OpenAI Web ChatGPT const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; - const systemPrompts = shouldInjectSystemPrompts - ? [ - createMessage({ - role: "system", - content: fillTemplateWith("", { - ...modelConfig, - template: DEFAULT_SYSTEM_TEMPLATE, + + var systemPrompts: ChatMessage[] = []; + if (modelConfig.model !== "gemini") { + systemPrompts = shouldInjectSystemPrompts + ? [ + createMessage({ + role: "system", + content: fillTemplateWith("", { + ...modelConfig, + template: DEFAULT_SYSTEM_TEMPLATE, + }), }), - }), - ] - : []; - if (shouldInjectSystemPrompts) { - console.log( - "[Global System Prompt] ", - systemPrompts.at(0)?.content ?? "empty", - ); + ] + : []; + if (shouldInjectSystemPrompts) { + console.log( + "[Global System Prompt] ", + systemPrompts.at(0)?.content ?? "empty", + ); + } } // long term memory @@ -473,6 +485,14 @@ export const useChatStore = createPersistStore( summarizeSession() { const config = useAppConfig.getState(); const session = get().currentSession(); + const modelConfig = session.mask.modelConfig; + + var api: ClientApi; + if (modelConfig.model === "gemini") { + api = new ClientApi(ModelProvider.Gemini); + } else { + api = new ClientApi(ModelProvider.GPT); + } // remove error messages if any const messages = session.messages; @@ -504,8 +524,6 @@ export const useChatStore = createPersistStore( }, }); } - - const modelConfig = session.mask.modelConfig; const summarizeIndex = Math.max( session.lastSummarizeIndex, session.clearContextIndex ?? 0, diff --git a/app/store/update.ts b/app/store/update.ts index 2ab7ec19a..3c88866dc 100644 --- a/app/store/update.ts +++ b/app/store/update.ts @@ -1,5 +1,4 @@ import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant"; -import { api } from "../client/api"; import { getClientConfig } from "../config/client"; import { createPersistStore } from "../utils/store"; import ChatGptIcon from "../icons/chatgpt.png"; @@ -85,35 +84,40 @@ export const useUpdateStore = createPersistStore( })); if (window.__TAURI__?.notification && isApp) { // Check if notification permission is granted - await window.__TAURI__?.notification.isPermissionGranted().then((granted) => { - if (!granted) { - return; - } else { - // Request permission to show notifications - window.__TAURI__?.notification.requestPermission().then((permission) => { - if (permission === 'granted') { - if (version === remoteId) { - // Show a notification using Tauri - window.__TAURI__?.notification.sendNotification({ - title: "NextChat", - body: `${Locale.Settings.Update.IsLatest}`, - icon: `${ChatGptIcon.src}`, - sound: "Default" - }); - } else { - const updateMessage = Locale.Settings.Update.FoundUpdate(`${remoteId}`); - // Show a notification for the new version using Tauri - window.__TAURI__?.notification.sendNotification({ - title: "NextChat", - body: updateMessage, - icon: `${ChatGptIcon.src}`, - sound: "Default" - }); - } - } - }); - } - }); + await window.__TAURI__?.notification + .isPermissionGranted() + .then((granted) => { + if (!granted) { + return; + } else { + // Request permission to show notifications + window.__TAURI__?.notification + .requestPermission() + .then((permission) => { + if (permission === "granted") { + if (version === remoteId) { + // Show a notification using Tauri + window.__TAURI__?.notification.sendNotification({ + title: "NextChat", + body: `${Locale.Settings.Update.IsLatest}`, + icon: `${ChatGptIcon.src}`, + sound: "Default", + }); + } else { + const updateMessage = + Locale.Settings.Update.FoundUpdate(`${remoteId}`); + // Show a notification for the new version using Tauri + window.__TAURI__?.notification.sendNotification({ + title: "NextChat", + body: updateMessage, + icon: `${ChatGptIcon.src}`, + sound: "Default", + }); + } + } + }); + } + }); } console.log("[Got Upstream] ", remoteId); } catch (error) { diff --git a/app/utils/model.ts b/app/utils/model.ts index 74b28a66a..16bcc19c4 100644 --- a/app/utils/model.ts +++ b/app/utils/model.ts @@ -4,10 +4,7 @@ export function collectModelTable( models: readonly LLMModel[], customModels: string, ) { - const modelTable: Record< - string, - { available: boolean; name: string; displayName: string } - > = {}; + const modelTable: { [key: string]: LLMModel } = {}; // default models models.forEach( @@ -37,6 +34,7 @@ export function collectModelTable( name, displayName: displayName || name, available, + provider: modelTable[name].provider, }; }); return modelTable; From ae0d68c27e4f7f65d9467e724561fab3e924400d Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 02:22:32 +0800 Subject: [PATCH 17/40] fix: fix llm models field --- app/client/api.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/app/client/api.ts b/app/client/api.ts index 50865d4b9..163e5fc55 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -45,7 +45,6 @@ export interface LLMUsage { export interface LLMModel { name: string; - displayName: string; available: boolean; provider: LLMModelProvider; } From 45798f993d3ae852206398b25ef4fda4f642f412 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 02:39:06 +0800 Subject: [PATCH 18/40] fix: fix type errors --- app/client/platforms/openai.ts | 5 +++++ app/components/settings.tsx | 1 + app/locales/en.ts | 22 ++++++++++++++++++++-- app/store/update.ts | 12 +++++++++++- app/utils/model.ts | 10 +++++++++- 5 files changed, 46 insertions(+), 4 deletions(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 8ea864692..68a0fda75 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -323,6 +323,11 @@ export class ChatGPTApi implements LLMApi { return chatModels.map((m) => ({ name: m.id, available: true, + provider: { + id: "openai", + providerName: "OpenAI", + providerType: "openai", + }, })); } } diff --git a/app/components/settings.tsx b/app/components/settings.tsx index 9a622af31..409af64d3 100644 --- a/app/components/settings.tsx +++ b/app/components/settings.tsx @@ -584,6 +584,7 @@ export function Settings() { const accessStore = useAccessStore(); const shouldHideBalanceQuery = useMemo(() => { const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL); + return ( accessStore.hideBalanceQuery || isOpenAiUrl || diff --git a/app/locales/en.ts b/app/locales/en.ts index f90cffd4c..4d437ffd8 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -319,6 +319,24 @@ const en: LocaleType = { Title: "Custom Models", SubTitle: "Custom model options, seperated by comma", }, + Google: { + ApiKey: { + Title: "API Key", + SubTitle: + "Bypass password access restrictions using a custom Google AI Studio API Key", + Placeholder: "Google AI Studio API Key", + }, + + Endpoint: { + Title: "Endpoint Address", + SubTitle: "Example:", + }, + + ApiVerion: { + Title: "API Version (gemini api version)", + SubTitle: "Select a specific part version", + }, + }, }, Model: "Model", @@ -443,8 +461,8 @@ const en: LocaleType = { }, Exporter: { Description: { - Title: "Only messages after clearing the context will be displayed" - }, + Title: "Only messages after clearing the context will be displayed", + }, Model: "Model", Messages: "Messages", Topic: "Topic", diff --git a/app/store/update.ts b/app/store/update.ts index 3c88866dc..7253caffc 100644 --- a/app/store/update.ts +++ b/app/store/update.ts @@ -1,8 +1,16 @@ -import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant"; +import { + FETCH_COMMIT_URL, + FETCH_TAG_URL, + ModelProvider, + StoreKey, +} from "../constant"; import { getClientConfig } from "../config/client"; import { createPersistStore } from "../utils/store"; import ChatGptIcon from "../icons/chatgpt.png"; import Locale from "../locales"; +import { use } from "react"; +import { useAppConfig } from "."; +import { ClientApi } from "../client/api"; const ONE_MINUTE = 60 * 1000; const isApp = !!getClientConfig()?.isApp; @@ -126,6 +134,7 @@ export const useUpdateStore = createPersistStore( }, async updateUsage(force = false) { + // only support openai for now const overOneMinute = Date.now() - get().lastUpdateUsage >= ONE_MINUTE; if (!overOneMinute && !force) return; @@ -134,6 +143,7 @@ export const useUpdateStore = createPersistStore( })); try { + const api = new ClientApi(ModelProvider.GPT); const usage = await api.llm.usage(); if (usage) { diff --git a/app/utils/model.ts b/app/utils/model.ts index 16bcc19c4..c4a4833ed 100644 --- a/app/utils/model.ts +++ b/app/utils/model.ts @@ -4,7 +4,15 @@ export function collectModelTable( models: readonly LLMModel[], customModels: string, ) { - const modelTable: { [key: string]: LLMModel } = {}; + const modelTable: Record< + string, + { + available: boolean; + name: string; + displayName: string; + provider: LLMModel["provider"]; + } + > = {}; // default models models.forEach( From 4169431f2c5d78345de7704dda4872d7d5e7790f Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 03:05:23 +0800 Subject: [PATCH 19/40] fix: fix add api auth --- app/api/google/[...path]/route.ts | 5 ++--- app/client/api.ts | 4 ++-- app/client/platforms/google.ts | 8 +++----- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index 5b19740af..ec5d65523 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -43,9 +43,8 @@ async function handle( 10 * 60 * 1000, ); - const fetchUrl = `${baseUrl}/${path}?key=${req.nextUrl.searchParams.get( - "key", - )}`; + const key = req.nextUrl.searchParams.get("key") ?? serverConfig.googleApiKey; + const fetchUrl = `${baseUrl}/${path}?key=${key}`; const fetchOptions: RequestInit = { headers: { diff --git a/app/client/api.ts b/app/client/api.ts index 163e5fc55..3215f45ba 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -145,8 +145,8 @@ export function getHeaders() { "Content-Type": "application/json", "x-requested-with": "XMLHttpRequest", }; - - const isGoogle = accessStore.provider === ServiceProvider.Google; + const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; + const isGoogle = modelConfig.model === "gemini"; const isAzure = accessStore.provider === ServiceProvider.Azure; const authHeader = isAzure ? "api-key" : "Authorization"; const apiKey = isGoogle diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 90584571e..b44def99f 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -8,6 +8,7 @@ import { import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; import Locale from "../../locales"; +import { getServerSideConfig } from "@/app/config/server"; export class GeminiApi implements LLMApi { extractMessage(res: any) { console.log("[Response] gemini response: ", res); @@ -30,8 +31,6 @@ export class GeminiApi implements LLMApi { model: options.config.model, }, }; - const accessStore = useAccessStore.getState(); - const requestPayload = { contents: messages, // stream: options.config.stream, @@ -44,7 +43,7 @@ export class GeminiApi implements LLMApi { // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; - console.log("[Request] openai payload: ", requestPayload); + console.log("[Request] google payload: ", requestPayload); // todo: support stream later const shouldStream = false; @@ -52,8 +51,7 @@ export class GeminiApi implements LLMApi { options.onController?.(controller); try { - const chatPath = - this.path(Google.ChatPath) + `?key=${accessStore.googleApiKey}`; + const chatPath = this.path(Google.ChatPath); const chatPayload = { method: "POST", body: JSON.stringify(requestPayload), From 20f2f61349d68b3720623d37803fe968868c834a Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 03:44:13 +0800 Subject: [PATCH 20/40] feat: support set api key from client side --- app/api/google/[...path]/route.ts | 4 +++- app/client/platforms/google.ts | 1 - app/components/auth.tsx | 11 +++++++++++ app/locales/cn.ts | 2 +- app/locales/en.ts | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index ec5d65523..95b1c6418 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -43,7 +43,9 @@ async function handle( 10 * 60 * 1000, ); - const key = req.nextUrl.searchParams.get("key") ?? serverConfig.googleApiKey; + const bearToken = req.headers.get("Authorization") ?? ""; + const token = bearToken.trim().replaceAll("Bearer ", "").trim(); + const key = token ?? serverConfig.googleApiKey; const fetchUrl = `${baseUrl}/${path}?key=${key}`; const fetchOptions: RequestInit = { diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index b44def99f..edff13a2b 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -49,7 +49,6 @@ export class GeminiApi implements LLMApi { const shouldStream = false; const controller = new AbortController(); options.onController?.(controller); - try { const chatPath = this.path(Google.ChatPath); const chatPayload = { diff --git a/app/components/auth.tsx b/app/components/auth.tsx index 7962d46be..57118349b 100644 --- a/app/components/auth.tsx +++ b/app/components/auth.tsx @@ -64,6 +64,17 @@ export function AuthPage() { ); }} /> + { + accessStore.update( + (access) => (access.googleApiKey = e.currentTarget.value), + ); + }} + /> ) : null} diff --git a/app/locales/cn.ts b/app/locales/cn.ts index 42270b2fb..69b6c025f 100644 --- a/app/locales/cn.ts +++ b/app/locales/cn.ts @@ -13,7 +13,7 @@ const cn = { Auth: { Title: "需要密码", Tips: "管理员开启了密码验证,请在下方填入访问码", - SubTips: "或者输入你的 OpenAI API 密钥", + SubTips: "或者输入你的 OpenAI 或 Google API 密钥", Input: "在此处填写访问码", Confirm: "确认", Later: "稍后再说", diff --git a/app/locales/en.ts b/app/locales/en.ts index 4d437ffd8..21aa63833 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -15,7 +15,7 @@ const en: LocaleType = { Auth: { Title: "Need Access Code", Tips: "Please enter access code below", - SubTips: "Or enter your OpenAI API Key", + SubTips: "Or enter your OpenAI or Google API Key", Input: "access code", Confirm: "Confirm", Later: "Later", From 7d9a2132cbdafa896ca6523a284d38ec880328b2 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 03:57:30 +0800 Subject: [PATCH 21/40] fix: fix server token fetch policy --- app/api/google/[...path]/route.ts | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index 95b1c6418..dcd8a65b0 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -45,7 +45,20 @@ async function handle( const bearToken = req.headers.get("Authorization") ?? ""; const token = bearToken.trim().replaceAll("Bearer ", "").trim(); - const key = token ?? serverConfig.googleApiKey; + + const key = token ? token : serverConfig.googleApiKey; + if (!key) { + return NextResponse.json( + { + error: true, + message: `missing GOOGLE_API_KEY in server env vars`, + }, + { + status: 401, + }, + ); + } + const fetchUrl = `${baseUrl}/${path}?key=${key}`; const fetchOptions: RequestInit = { From ae19a0dc5ff6d7dc15e811eed131eb4e1bacb63a Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 04:22:12 +0800 Subject: [PATCH 22/40] chroe: update model name --- app/client/api.ts | 8 ++++---- app/client/platforms/google.ts | 4 ++-- app/components/exporter.tsx | 4 ++-- app/components/home.tsx | 4 ++-- app/constant.ts | 4 ++-- app/locales/cn.ts | 2 +- app/locales/en.ts | 2 +- app/store/chat.ts | 10 +++++----- 8 files changed, 19 insertions(+), 19 deletions(-) diff --git a/app/client/api.ts b/app/client/api.ts index 3215f45ba..30a220ea4 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -7,7 +7,7 @@ import { } from "../constant"; import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store"; import { ChatGPTApi } from "./platforms/openai"; -import { GeminiApi } from "./platforms/google"; +import { GeminiProApi } from "./platforms/google"; export const ROLES = ["system", "user", "assistant"] as const; export type MessageRole = (typeof ROLES)[number]; @@ -86,8 +86,8 @@ export class ClientApi { public llm: LLMApi; constructor(provider: ModelProvider = ModelProvider.GPT) { - if (provider === ModelProvider.Gemini) { - this.llm = new GeminiApi(); + if (provider === ModelProvider.GeminiPro) { + this.llm = new GeminiProApi(); return; } this.llm = new ChatGPTApi(); @@ -146,7 +146,7 @@ export function getHeaders() { "x-requested-with": "XMLHttpRequest", }; const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; - const isGoogle = modelConfig.model === "gemini"; + const isGoogle = modelConfig.model === "gemini-pro"; const isAzure = accessStore.provider === ServiceProvider.Azure; const authHeader = isAzure ? "api-key" : "Authorization"; const apiKey = isGoogle diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index edff13a2b..e1e526637 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -9,9 +9,9 @@ import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; import Locale from "../../locales"; import { getServerSideConfig } from "@/app/config/server"; -export class GeminiApi implements LLMApi { +export class GeminiProApi implements LLMApi { extractMessage(res: any) { - console.log("[Response] gemini response: ", res); + console.log("[Response] gemini-pro response: ", res); return ( res?.candidates?.at(0)?.content?.parts.at(0)?.text || res?.error?.message || diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index 70b4ab91f..dff17e4ab 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -307,8 +307,8 @@ export function PreviewActions(props: { setShouldExport(false); var api: ClientApi; - if (config.modelConfig.model === "gemini") { - api = new ClientApi(ModelProvider.Gemini); + if (config.modelConfig.model === "gemini-pro") { + api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); } diff --git a/app/components/home.tsx b/app/components/home.tsx index 928c2d905..4be7da0fb 100644 --- a/app/components/home.tsx +++ b/app/components/home.tsx @@ -171,8 +171,8 @@ export function useLoadData() { const config = useAppConfig(); var api: ClientApi; - if (config.modelConfig.model === "gemini") { - api = new ClientApi(ModelProvider.Gemini); + if (config.modelConfig.model === "gemini-pro") { + api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); } diff --git a/app/constant.ts b/app/constant.ts index 1f6a647da..7668381c1 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -72,7 +72,7 @@ export enum ServiceProvider { export enum ModelProvider { GPT = "GPT", - Gemini = "Gemini", + GeminiPro = "GeminiPro", } export const OpenaiPath = { @@ -240,7 +240,7 @@ export const DEFAULT_MODELS = [ }, }, { - name: "gemini", + name: "gemini-pro", available: true, provider: { id: "google", diff --git a/app/locales/cn.ts b/app/locales/cn.ts index 69b6c025f..8746047fd 100644 --- a/app/locales/cn.ts +++ b/app/locales/cn.ts @@ -325,7 +325,7 @@ const cn = { }, ApiVerion: { - Title: "接口版本 (gemini api version)", + Title: "接口版本 (gemini-pro api version)", SubTitle: "选择指定的部分版本", }, }, diff --git a/app/locales/en.ts b/app/locales/en.ts index 21aa63833..768275c07 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -333,7 +333,7 @@ const en: LocaleType = { }, ApiVerion: { - Title: "API Version (gemini api version)", + Title: "API Version (gemini-pro api version)", SubTitle: "Select a specific part version", }, }, diff --git a/app/store/chat.ts b/app/store/chat.ts index f53f61154..1dcf4e646 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -303,8 +303,8 @@ export const useChatStore = createPersistStore( }); var api: ClientApi; - if (modelConfig.model === "gemini") { - api = new ClientApi(ModelProvider.Gemini); + if (modelConfig.model === "gemini-pro") { + api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); } @@ -389,7 +389,7 @@ export const useChatStore = createPersistStore( const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; var systemPrompts: ChatMessage[] = []; - if (modelConfig.model !== "gemini") { + if (modelConfig.model !== "gemini-pro") { systemPrompts = shouldInjectSystemPrompts ? [ createMessage({ @@ -488,8 +488,8 @@ export const useChatStore = createPersistStore( const modelConfig = session.mask.modelConfig; var api: ClientApi; - if (modelConfig.model === "gemini") { - api = new ClientApi(ModelProvider.Gemini); + if (modelConfig.model === "gemini-pro") { + api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); } From 7026bd926a979e4a32b09b2369b1e2ee4d387783 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 04:24:38 +0800 Subject: [PATCH 23/40] chroe: update prompts --- app/locales/en.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/locales/en.ts b/app/locales/en.ts index 768275c07..367161d6b 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -371,7 +371,7 @@ const en: LocaleType = { History: (content: string) => "This is a summary of the chat history as a recap: " + content, Topic: - "Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, or additional text. Remove enclosing quotation marks.", + "Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, bold text, or additional text. Remove enclosing quotation marks.", Summarize: "Summarize the discussion briefly in 200 words or less to use as a prompt for future context.", }, From 75057f9a910a22032007db975d493a6cc02c0519 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 04:32:25 +0800 Subject: [PATCH 24/40] chroe: support error reason for gemini pro --- app/client/platforms/google.ts | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index e1e526637..3192d5d5f 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -12,6 +12,7 @@ import { getServerSideConfig } from "@/app/config/server"; export class GeminiProApi implements LLMApi { extractMessage(res: any) { console.log("[Response] gemini-pro response: ", res); + return ( res?.candidates?.at(0)?.content?.parts.at(0)?.text || res?.error?.message || @@ -176,6 +177,16 @@ export class GeminiProApi implements LLMApi { clearTimeout(requestTimeoutId); const resJson = await res.json(); + + if (resJson?.promptFeedback?.blockReason) { + // being blocked + options.onError?.( + new Error( + "Message is being blocked for reason: " + + resJson.promptFeedback.blockReason, + ), + ); + } const message = this.extractMessage(resJson); options.onFinish(message); } From 7c3dfb7bae37a7d0412a8696393b6189cf2a42cb Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 17:24:04 +0800 Subject: [PATCH 25/40] feat: support custom gemini pro params --- app/client/platforms/google.ts | 9 ++ app/components/model-config.tsx | 143 +++++++++++++++++--------------- 2 files changed, 83 insertions(+), 69 deletions(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 3192d5d5f..ec7d79564 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -34,6 +34,15 @@ export class GeminiProApi implements LLMApi { }; const requestPayload = { contents: messages, + generationConfig: { + // stopSequences: [ + // "Title" + // ], + temperature: modelConfig.temperature, + maxOutputTokens: modelConfig.max_tokens, + topP: modelConfig.top_p, + // "topK": modelConfig.top_k, + }, // stream: options.config.stream, // model: modelConfig.model, // temperature: modelConfig.temperature, diff --git a/app/components/model-config.tsx b/app/components/model-config.tsx index a077b82cb..b9f811674 100644 --- a/app/components/model-config.tsx +++ b/app/components/model-config.tsx @@ -91,79 +91,84 @@ export function ModelConfigList(props: { } > - - { - props.updateConfig( - (config) => - (config.presence_penalty = - ModalConfigValidator.presence_penalty( - e.currentTarget.valueAsNumber, - )), - ); - }} - > - - - { - props.updateConfig( - (config) => - (config.frequency_penalty = - ModalConfigValidator.frequency_penalty( - e.currentTarget.valueAsNumber, - )), - ); - }} - > - + {props.modelConfig.model === "gemini-pro" ? null : ( + <> + + { + props.updateConfig( + (config) => + (config.presence_penalty = + ModalConfigValidator.presence_penalty( + e.currentTarget.valueAsNumber, + )), + ); + }} + > + - - - props.updateConfig( - (config) => - (config.enableInjectSystemPrompts = e.currentTarget.checked), - ) - } - > - + + { + props.updateConfig( + (config) => + (config.frequency_penalty = + ModalConfigValidator.frequency_penalty( + e.currentTarget.valueAsNumber, + )), + ); + }} + > + - - - props.updateConfig( - (config) => (config.template = e.currentTarget.value), - ) - } - > - + + + props.updateConfig( + (config) => + (config.enableInjectSystemPrompts = + e.currentTarget.checked), + ) + } + > + + + + props.updateConfig( + (config) => (config.template = e.currentTarget.value), + ) + } + > + + + )} Date: Sun, 24 Dec 2023 17:37:07 +0800 Subject: [PATCH 26/40] chore: remove openai log from google api handler --- app/api/google/[...path]/route.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index dcd8a65b0..217556784 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -31,10 +31,6 @@ async function handle( console.log("[Proxy] ", path); console.log("[Base Url]", baseUrl); - // this fix [Org ID] undefined in server side if not using custom point - if (serverConfig.openaiOrgId !== undefined) { - console.log("[Org ID]", serverConfig.openaiOrgId); - } const timeoutId = setTimeout( () => { From 6369b902bf288b787e64ba5e9f7fa4ca0e4d1cf9 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 17:43:41 +0800 Subject: [PATCH 27/40] chore: update README for google gemini pro model --- README.md | 8 ++++++++ README_CN.md | 9 +++++++++ 2 files changed, 17 insertions(+) diff --git a/README.md b/README.md index 874b169af..0d18a6cc4 100644 --- a/README.md +++ b/README.md @@ -191,6 +191,14 @@ Azure Api Key. Azure Api Version, find it at [Azure Documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions). +### `GOOGLE_API_KEY` (optional) + +Google Gemini Pro Api Key. + +### `GOOGLE_URL` (optional) + +Google Gemini Pro Api Url. + ### `HIDE_USER_API_KEY` (optional) > Default: Empty diff --git a/README_CN.md b/README_CN.md index b7a95b11c..88d72d201 100644 --- a/README_CN.md +++ b/README_CN.md @@ -106,6 +106,14 @@ Azure 密钥。 Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions)。 +### `GOOGLE_API_KEY` (optional) + +Google Gemini Pro 密钥. + +### `GOOGLE_URL` (optional) + +Google Gemini Pro Api Url. + ### `HIDE_USER_API_KEY` (可选) 如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。 @@ -207,6 +215,7 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s [见项目贡献者列表](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors) ### 相关项目 + - [one-api](https://github.com/songquanpeng/one-api): 一站式大模型额度管理平台,支持市面上所有主流大语言模型 ## 开源协议 From 1e05eb1d60798039a2a4e775a23eff5e0db0fb3f Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 17:54:33 +0800 Subject: [PATCH 28/40] chore: update docs for gemini pro --- Dockerfile | 36 +++++++++++++++++++----------------- README.md | 6 +++--- README_CN.md | 4 ++-- app/config/server.ts | 4 ++++ docker-compose.yml | 6 ++++-- 5 files changed, 32 insertions(+), 24 deletions(-) diff --git a/Dockerfile b/Dockerfile index 720a0cfe9..436d39d82 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,6 +16,7 @@ FROM base AS builder RUN apk update && apk add --no-cache git ENV OPENAI_API_KEY="" +ENV GOOGLE_API_KEY="" ENV CODE="" WORKDIR /app @@ -31,6 +32,7 @@ RUN apk add proxychains-ng ENV PROXY_URL="" ENV OPENAI_API_KEY="" +ENV GOOGLE_API_KEY="" ENV CODE="" COPY --from=builder /app/public ./public @@ -41,22 +43,22 @@ COPY --from=builder /app/.next/server ./.next/server EXPOSE 3000 CMD if [ -n "$PROXY_URL" ]; then \ - export HOSTNAME="127.0.0.1"; \ - protocol=$(echo $PROXY_URL | cut -d: -f1); \ - host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \ - port=$(echo $PROXY_URL | cut -d: -f3); \ - conf=/etc/proxychains.conf; \ - echo "strict_chain" > $conf; \ - echo "proxy_dns" >> $conf; \ - echo "remote_dns_subnet 224" >> $conf; \ - echo "tcp_read_time_out 15000" >> $conf; \ - echo "tcp_connect_time_out 8000" >> $conf; \ - echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \ - echo "localnet ::1/128" >> $conf; \ - echo "[ProxyList]" >> $conf; \ - echo "$protocol $host $port" >> $conf; \ - cat /etc/proxychains.conf; \ - proxychains -f $conf node server.js; \ + export HOSTNAME="127.0.0.1"; \ + protocol=$(echo $PROXY_URL | cut -d: -f1); \ + host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \ + port=$(echo $PROXY_URL | cut -d: -f3); \ + conf=/etc/proxychains.conf; \ + echo "strict_chain" > $conf; \ + echo "proxy_dns" >> $conf; \ + echo "remote_dns_subnet 224" >> $conf; \ + echo "tcp_read_time_out 15000" >> $conf; \ + echo "tcp_connect_time_out 8000" >> $conf; \ + echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \ + echo "localnet ::1/128" >> $conf; \ + echo "[ProxyList]" >> $conf; \ + echo "$protocol $host $port" >> $conf; \ + cat /etc/proxychains.conf; \ + proxychains -f $conf node server.js; \ else \ - node server.js; \ + node server.js; \ fi diff --git a/README.md b/README.md index 0d18a6cc4..55061759b 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,9 @@ English / [简体中文](./README_CN.md) -One-Click to get a well-designed cross-platform ChatGPT web UI. +One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4 & Gemini Pro support. -一键免费部署你的跨平台私人 ChatGPT 应用。 +一键免费部署你的跨平台私人 ChatGPT 应用, 支持 GPT3, GPT4 & Gemini Pro 模型。 [![Web][Web-image]][web-url] [![Windows][Windows-image]][download-url] @@ -25,7 +25,7 @@ One-Click to get a well-designed cross-platform ChatGPT web UI. [MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple [Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu -[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) +[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&env=GOOGLE_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) [![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA) diff --git a/README_CN.md b/README_CN.md index 88d72d201..0f390a51c 100644 --- a/README_CN.md +++ b/README_CN.md @@ -3,7 +3,7 @@

NextChat

-一键免费部署你的私人 ChatGPT 网页应用。 +一键免费部署你的私人 ChatGPT 网页应用,支持 GPT3, GPT4 & Gemini Pro 模型。 [演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt) @@ -21,7 +21,7 @@ 1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys); 2. 点击右侧按钮开始部署: - [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登录即可,记得在环境变量页填入 API Key 和[页面访问密码](#配置页面访问密码) CODE; + [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&env=GOOGLE_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登录即可,记得在环境变量页填入 API Key 和[页面访问密码](#配置页面访问密码) CODE; 3. 部署完毕后,即可开始使用; 4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain):Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。 diff --git a/app/config/server.ts b/app/config/server.ts index becad8429..83c711242 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -26,6 +26,10 @@ declare global { AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name} AZURE_API_KEY?: string; AZURE_API_VERSION?: string; + + // google only + GOOGLE_API_KEY?: string; + GOOGLE_URL?: string; } } } diff --git a/docker-compose.yml b/docker-compose.yml index 57ca12e03..935b126a3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,13 +1,14 @@ version: "3.9" services: chatgpt-next-web: - profiles: ["no-proxy"] + profiles: [ "no-proxy" ] container_name: chatgpt-next-web image: yidadaa/chatgpt-next-web ports: - 3000:3000 environment: - OPENAI_API_KEY=$OPENAI_API_KEY + - GOOGLE_API_KEY=$GOOGLE_API_KEY - CODE=$CODE - BASE_URL=$BASE_URL - OPENAI_ORG_ID=$OPENAI_ORG_ID @@ -18,13 +19,14 @@ services: - OPENAI_SB=$OPENAI_SB chatgpt-next-web-proxy: - profiles: ["proxy"] + profiles: [ "proxy" ] container_name: chatgpt-next-web-proxy image: yidadaa/chatgpt-next-web ports: - 3000:3000 environment: - OPENAI_API_KEY=$OPENAI_API_KEY + - GOOGLE_API_KEY=$GOOGLE_API_KEY - CODE=$CODE - PROXY_URL=$PROXY_URL - BASE_URL=$BASE_URL From 35471a41c89cdfdd2b6a59c25b24d415049556fc Mon Sep 17 00:00:00 2001 From: H0llyW00dzZ Date: Mon, 25 Dec 2023 00:30:23 +0700 Subject: [PATCH 29/40] Fix & Feat [Auth] Api Key Variable - [+] fix(auth.ts): fix variable name from serverApiKey to systemApiKey - [+] feat(auth.ts): add support for Google API key in addition to Azure API key --- app/api/auth.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/app/api/auth.ts b/app/api/auth.ts index b41e34e05..043ad0d31 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -55,15 +55,18 @@ export function auth(req: NextRequest) { // if user does not provide an api key, inject system api key if (!apiKey) { - const serverApiKey = serverConfig.isAzure + const serverConfig = getServerSideConfig(); + const systemApiKey = serverConfig.isAzure ? serverConfig.azureApiKey + : serverConfig.isGoogle + ? serverConfig.googleApiKey : serverConfig.apiKey; - if (serverApiKey) { + if (systemApiKey) { console.log("[Auth] use system api key"); req.headers.set( "Authorization", - `${serverConfig.isAzure ? "" : "Bearer "}${serverApiKey}`, + `Bearer ${systemApiKey}`, ); } else { console.log("[Auth] admin did not provide an api key"); From 281fe6927a297bf561032dcc5e80f96a90441a84 Mon Sep 17 00:00:00 2001 From: H0llyW00dzZ Date: Mon, 25 Dec 2023 00:35:33 +0700 Subject: [PATCH 30/40] Feat [Server Side] Google Api Configuration - [+] feat(server.ts): add support for Google API configuration variables --- app/config/server.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/config/server.ts b/app/config/server.ts index 83c711242..bd2cee656 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -65,6 +65,7 @@ export const getServerSideConfig = () => { } const isAzure = !!process.env.AZURE_URL; + const isGoogle = !!process.env.GOOGLE_URL; const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? ""; const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); @@ -84,6 +85,7 @@ export const getServerSideConfig = () => { azureApiKey: process.env.AZURE_API_KEY, azureApiVersion: process.env.AZURE_API_VERSION, + isGoogle, googleApiKey: process.env.GOOGLE_API_KEY, googleUrl: process.env.GOOGLE_URL, From 8ca525dc7adce7860bc79ac8dfe54ab0e896a4ab Mon Sep 17 00:00:00 2001 From: H0llyW00dzZ Date: Mon, 25 Dec 2023 01:09:21 +0700 Subject: [PATCH 31/40] Fix [TypesScript] [LLM Api] Chaining Model - [+] fix(api.ts): make provider property optional in LLMModel interface --- app/client/api.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/client/api.ts b/app/client/api.ts index 30a220ea4..28ff7b162 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -46,7 +46,7 @@ export interface LLMUsage { export interface LLMModel { name: string; available: boolean; - provider: LLMModelProvider; + provider?: LLMModelProvider; } export interface LLMModelProvider { From e9def2cdc5bd8be91c06572de879bdeb4ddd28fb Mon Sep 17 00:00:00 2001 From: H0llyW00dzZ Date: Mon, 25 Dec 2023 01:10:28 +0700 Subject: [PATCH 32/40] Refactor [Model] [LLM Api] Chaining Model - [+] refactor(model.ts): change forEach loop to arrow function for readability and consistency - [+] fix(model.ts): mark 'provider' property as optional in modelTable type - [+] fix(model.ts): use optional chaining when assigning provider property in modelTable --- app/utils/model.ts | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/app/utils/model.ts b/app/utils/model.ts index c4a4833ed..b2a42ef02 100644 --- a/app/utils/model.ts +++ b/app/utils/model.ts @@ -10,24 +10,23 @@ export function collectModelTable( available: boolean; name: string; displayName: string; - provider: LLMModel["provider"]; + provider?: LLMModel["provider"]; // Marked as optional } > = {}; // default models - models.forEach( - (m) => - (modelTable[m.name] = { - ...m, - displayName: m.name, - }), - ); + models.forEach((m) => { + modelTable[m.name] = { + ...m, + displayName: m.name, // 'provider' is copied over if it exists + }; + }); // server custom models customModels .split(",") .filter((v) => !!v && v.length > 0) - .map((m) => { + .forEach((m) => { const available = !m.startsWith("-"); const nameConfig = m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m; @@ -35,15 +34,15 @@ export function collectModelTable( // enable or disable all models if (name === "all") { - Object.values(modelTable).forEach((m) => (m.available = available)); + Object.values(modelTable).forEach((model) => (model.available = available)); + } else { + modelTable[name] = { + name, + displayName: displayName || name, + available, + provider: modelTable[name]?.provider, // Use optional chaining + }; } - - modelTable[name] = { - name, - displayName: displayName || name, - available, - provider: modelTable[name].provider, - }; }); return modelTable; } From 0c116251b1c51d16e3e9b3d025c4feed8d7c069e Mon Sep 17 00:00:00 2001 From: H0llyW00dzZ Date: Mon, 25 Dec 2023 01:45:24 +0700 Subject: [PATCH 33/40] Revert "Fix [TypesScript] [LLM Api] Chaining Model" This reverts commit 8ca525dc7adce7860bc79ac8dfe54ab0e896a4ab. Reason: It's suddenly stopped lmao --- app/client/api.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/client/api.ts b/app/client/api.ts index 28ff7b162..30a220ea4 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -46,7 +46,7 @@ export interface LLMUsage { export interface LLMModel { name: string; available: boolean; - provider?: LLMModelProvider; + provider: LLMModelProvider; } export interface LLMModelProvider { From 753c518d33240ee3abd75152f3b11b6dc4e463b9 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 03:46:35 +0800 Subject: [PATCH 34/40] chore: update how to identify google model --- app/config/server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/config/server.ts b/app/config/server.ts index bd2cee656..c6251a5c2 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -65,7 +65,7 @@ export const getServerSideConfig = () => { } const isAzure = !!process.env.AZURE_URL; - const isGoogle = !!process.env.GOOGLE_URL; + const isGoogle = !!process.env.GOOGLE_API_KEY; const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? ""; const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); From 5af68ac545902e80465235051c39f068baaf9160 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 04:07:35 +0800 Subject: [PATCH 35/40] fix: fix issue https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/3616 --- app/api/auth.ts | 13 +++++-------- app/api/google/[...path]/route.ts | 13 +++++++++++++ 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/app/api/auth.ts b/app/api/auth.ts index 043ad0d31..ed2b67271 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -16,11 +16,11 @@ function getIP(req: NextRequest) { function parseApiKey(bearToken: string) { const token = bearToken.trim().replaceAll("Bearer ", "").trim(); - const isOpenAiKey = !token.startsWith(ACCESS_CODE_PREFIX); + const isApiKey = !token.startsWith(ACCESS_CODE_PREFIX); return { - accessCode: isOpenAiKey ? "" : token.slice(ACCESS_CODE_PREFIX.length), - apiKey: isOpenAiKey ? token : "", + accessCode: isApiKey ? "" : token.slice(ACCESS_CODE_PREFIX.length), + apiKey: isApiKey ? token : "", }; } @@ -49,7 +49,7 @@ export function auth(req: NextRequest) { if (serverConfig.hideUserApiKey && !!apiKey) { return { error: true, - msg: "you are not allowed to access openai with your own api key", + msg: "you are not allowed to access with your own api key", }; } @@ -64,10 +64,7 @@ export function auth(req: NextRequest) { if (systemApiKey) { console.log("[Auth] use system api key"); - req.headers.set( - "Authorization", - `Bearer ${systemApiKey}`, - ); + req.headers.set("Authorization", `Bearer ${systemApiKey}`); } else { console.log("[Auth] admin did not provide an api key"); } diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index 217556784..28b9822a5 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -39,9 +39,22 @@ async function handle( 10 * 60 * 1000, ); + const authResult = auth(req); + if (authResult.error) { + return NextResponse.json(authResult, { + status: 401, + }); + } + const bearToken = req.headers.get("Authorization") ?? ""; const token = bearToken.trim().replaceAll("Bearer ", "").trim(); + console.log( + bearToken, + serverConfig.googleApiKey, + token ? token : serverConfig.googleApiKey, + ); + const key = token ? token : serverConfig.googleApiKey; if (!key) { return NextResponse.json( From cad461b1218e1d3668d4d7005032d46d2986f5d3 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 04:09:38 +0800 Subject: [PATCH 36/40] chore: remove console log --- app/api/google/[...path]/route.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index 28b9822a5..7911f8483 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -49,12 +49,6 @@ async function handle( const bearToken = req.headers.get("Authorization") ?? ""; const token = bearToken.trim().replaceAll("Bearer ", "").trim(); - console.log( - bearToken, - serverConfig.googleApiKey, - token ? token : serverConfig.googleApiKey, - ); - const key = token ? token : serverConfig.googleApiKey; if (!key) { return NextResponse.json( From 199f29e63cd80cd97c530ed5e47a22f6106a1372 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 04:33:47 +0800 Subject: [PATCH 37/40] chore: auto concat messages --- app/client/platforms/google.ts | 24 +++++++++++++++--------- app/store/chat.ts | 32 +++++++++++++++----------------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index ec7d79564..c35e93cb3 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -21,10 +21,24 @@ export class GeminiProApi implements LLMApi { } async chat(options: ChatOptions): Promise { const messages = options.messages.map((v) => ({ - role: v.role.replace("assistant", "model").replace("system", "model"), + role: v.role.replace("assistant", "model").replace("system", "user"), parts: [{ text: v.content }], })); + // google requires that role in neighboring messages must not be the same + for (let i = 0; i < messages.length - 1; ) { + // Check if current and next item both have the role "model" + if (messages[i].role === messages[i + 1].role) { + // Concatenate the 'parts' of the current and next item + messages[i].parts = messages[i].parts.concat(messages[i + 1].parts); + // Remove the next item + messages.splice(i + 1, 1); + } else { + // Move to the next item + i++; + } + } + const modelConfig = { ...useAppConfig.getState().modelConfig, ...useChatStore.getState().currentSession().mask.modelConfig, @@ -43,14 +57,6 @@ export class GeminiProApi implements LLMApi { topP: modelConfig.top_p, // "topK": modelConfig.top_k, }, - // stream: options.config.stream, - // model: modelConfig.model, - // temperature: modelConfig.temperature, - // presence_penalty: modelConfig.presence_penalty, - // frequency_penalty: modelConfig.frequency_penalty, - // top_p: modelConfig.top_p, - // max_tokens: Math.max(modelConfig.max_tokens, 1024), - // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; console.log("[Request] google payload: ", requestPayload); diff --git a/app/store/chat.ts b/app/store/chat.ts index 1dcf4e646..4af5a52ac 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -389,24 +389,22 @@ export const useChatStore = createPersistStore( const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; var systemPrompts: ChatMessage[] = []; - if (modelConfig.model !== "gemini-pro") { - systemPrompts = shouldInjectSystemPrompts - ? [ - createMessage({ - role: "system", - content: fillTemplateWith("", { - ...modelConfig, - template: DEFAULT_SYSTEM_TEMPLATE, - }), + systemPrompts = shouldInjectSystemPrompts + ? [ + createMessage({ + role: "system", + content: fillTemplateWith("", { + ...modelConfig, + template: DEFAULT_SYSTEM_TEMPLATE, }), - ] - : []; - if (shouldInjectSystemPrompts) { - console.log( - "[Global System Prompt] ", - systemPrompts.at(0)?.content ?? "empty", - ); - } + }), + ] + : []; + if (shouldInjectSystemPrompts) { + console.log( + "[Global System Prompt] ", + systemPrompts.at(0)?.content ?? "empty", + ); } // long term memory From 5c638251f866e51d629c5e25cbe1ee11433c08f6 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 05:12:21 +0800 Subject: [PATCH 38/40] fix: fix using different model --- app/api/auth.ts | 21 ++++++++++++++------- app/api/google/[...path]/route.ts | 6 +++--- app/api/openai/[...path]/route.ts | 24 +++++++++++++++++++++--- 3 files changed, 38 insertions(+), 13 deletions(-) diff --git a/app/api/auth.ts b/app/api/auth.ts index ed2b67271..874401a32 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -1,7 +1,7 @@ import { NextRequest } from "next/server"; import { getServerSideConfig } from "../config/server"; import md5 from "spark-md5"; -import { ACCESS_CODE_PREFIX } from "../constant"; +import { ACCESS_CODE_PREFIX, ModelProvider } from "../constant"; function getIP(req: NextRequest) { let ip = req.ip ?? req.headers.get("x-real-ip"); @@ -24,7 +24,7 @@ function parseApiKey(bearToken: string) { }; } -export function auth(req: NextRequest) { +export function auth(req: NextRequest, modelProvider: ModelProvider) { const authToken = req.headers.get("Authorization") ?? ""; // check if it is openai api key or user token @@ -56,12 +56,19 @@ export function auth(req: NextRequest) { // if user does not provide an api key, inject system api key if (!apiKey) { const serverConfig = getServerSideConfig(); - const systemApiKey = serverConfig.isAzure - ? serverConfig.azureApiKey - : serverConfig.isGoogle - ? serverConfig.googleApiKey - : serverConfig.apiKey; + // const systemApiKey = serverConfig.isAzure + // ? serverConfig.azureApiKey + // : serverConfig.isGoogle + // ? serverConfig.googleApiKey + // : serverConfig.apiKey; + + const systemApiKey = + modelProvider === ModelProvider.GeminiPro + ? serverConfig.googleApiKey + : serverConfig.isAzure + ? serverConfig.azureApiKey + : serverConfig.apiKey; if (systemApiKey) { console.log("[Auth] use system api key"); req.headers.set("Authorization", `Bearer ${systemApiKey}`); diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index 7911f8483..869bd5076 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -1,7 +1,7 @@ import { NextRequest, NextResponse } from "next/server"; import { auth } from "../../auth"; import { getServerSideConfig } from "@/app/config/server"; -import { GEMINI_BASE_URL, Google } from "@/app/constant"; +import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant"; async function handle( req: NextRequest, @@ -39,7 +39,7 @@ async function handle( 10 * 60 * 1000, ); - const authResult = auth(req); + const authResult = auth(req, ModelProvider.GeminiPro); if (authResult.error) { return NextResponse.json(authResult, { status: 401, @@ -50,6 +50,7 @@ async function handle( const token = bearToken.trim().replaceAll("Bearer ", "").trim(); const key = token ? token : serverConfig.googleApiKey; + if (!key) { return NextResponse.json( { @@ -63,7 +64,6 @@ async function handle( } const fetchUrl = `${baseUrl}/${path}?key=${key}`; - const fetchOptions: RequestInit = { headers: { "Content-Type": "application/json", diff --git a/app/api/openai/[...path]/route.ts b/app/api/openai/[...path]/route.ts index 2addd53a5..77059c151 100644 --- a/app/api/openai/[...path]/route.ts +++ b/app/api/openai/[...path]/route.ts @@ -1,6 +1,6 @@ import { type OpenAIListModelResponse } from "@/app/client/platforms/openai"; import { getServerSideConfig } from "@/app/config/server"; -import { OpenaiPath } from "@/app/constant"; +import { ModelProvider, OpenaiPath } from "@/app/constant"; import { prettyObject } from "@/app/utils/format"; import { NextRequest, NextResponse } from "next/server"; import { auth } from "../../auth"; @@ -45,7 +45,7 @@ async function handle( ); } - const authResult = auth(req); + const authResult = auth(req, ModelProvider.GPT); if (authResult.error) { return NextResponse.json(authResult, { status: 401, @@ -75,4 +75,22 @@ export const GET = handle; export const POST = handle; export const runtime = "edge"; -export const preferredRegion = ['arn1', 'bom1', 'cdg1', 'cle1', 'cpt1', 'dub1', 'fra1', 'gru1', 'hnd1', 'iad1', 'icn1', 'kix1', 'lhr1', 'pdx1', 'sfo1', 'sin1', 'syd1']; +export const preferredRegion = [ + "arn1", + "bom1", + "cdg1", + "cle1", + "cpt1", + "dub1", + "fra1", + "gru1", + "hnd1", + "iad1", + "icn1", + "kix1", + "lhr1", + "pdx1", + "sfo1", + "sin1", + "syd1", +]; From f5ed1604aa0b3b60a8fcac1cecb03f75a0a65cdb Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 05:24:01 +0800 Subject: [PATCH 39/40] fix: fix removing bearer header --- app/api/auth.ts | 6 ------ app/api/common.ts | 4 +++- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/app/api/auth.ts b/app/api/auth.ts index 874401a32..16c8034eb 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -57,12 +57,6 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) { if (!apiKey) { const serverConfig = getServerSideConfig(); - // const systemApiKey = serverConfig.isAzure - // ? serverConfig.azureApiKey - // : serverConfig.isGoogle - // ? serverConfig.googleApiKey - // : serverConfig.apiKey; - const systemApiKey = modelProvider === ModelProvider.GeminiPro ? serverConfig.googleApiKey diff --git a/app/api/common.ts b/app/api/common.ts index 13cfab03c..8e029c35b 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -9,7 +9,9 @@ const serverConfig = getServerSideConfig(); export async function requestOpenai(req: NextRequest) { const controller = new AbortController(); - const authValue = req.headers.get("Authorization") ?? ""; + const authValue = + req.headers.get("Authorization")?.trim().replaceAll("Bearer ", "").trim() ?? + ""; const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization"; let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll( From 19137b79bcf17d1b1be01740dd5ed0238c784680 Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Mon, 25 Dec 2023 09:56:51 +0800 Subject: [PATCH 40/40] fix: return bearer header when using openai --- app/api/common.ts | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/app/api/common.ts b/app/api/common.ts index 8e029c35b..a6f4c5721 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -9,9 +9,16 @@ const serverConfig = getServerSideConfig(); export async function requestOpenai(req: NextRequest) { const controller = new AbortController(); - const authValue = - req.headers.get("Authorization")?.trim().replaceAll("Bearer ", "").trim() ?? - ""; + if (serverConfig.isAzure) { + const authValue = + req.headers + .get("Authorization") + ?.trim() + .replaceAll("Bearer ", "") + .trim() ?? ""; + } else { + const authValue = req.headers.get("Authorization") ?? ""; + } const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization"; let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(