Merge branch 'main' into fix-export-garbled

This commit is contained in:
fred-bf 2024-03-07 17:07:08 +08:00 committed by GitHub
commit 2bf6111bf5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
41 changed files with 1893 additions and 383 deletions

View File

@ -1,43 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: "[Bug] "
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Deployment**
- [ ] Docker
- [ ] Vercel
- [ ] Server
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional Logs**
Add any logs about the problem here.

146
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@ -0,0 +1,146 @@
name: Bug report
description: Create a report to help us improve
title: "[Bug] "
labels: ["bug"]
body:
- type: markdown
attributes:
value: "## Describe the bug"
- type: textarea
id: bug-description
attributes:
label: "Bug Description"
description: "A clear and concise description of what the bug is."
placeholder: "Explain the bug..."
validations:
required: true
- type: markdown
attributes:
value: "## To Reproduce"
- type: textarea
id: steps-to-reproduce
attributes:
label: "Steps to Reproduce"
description: "Steps to reproduce the behavior:"
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: true
- type: markdown
attributes:
value: "## Expected behavior"
- type: textarea
id: expected-behavior
attributes:
label: "Expected Behavior"
description: "A clear and concise description of what you expected to happen."
placeholder: "Describe what you expected to happen..."
validations:
required: true
- type: markdown
attributes:
value: "## Screenshots"
- type: textarea
id: screenshots
attributes:
label: "Screenshots"
description: "If applicable, add screenshots to help explain your problem."
placeholder: "Paste your screenshots here or write 'N/A' if not applicable..."
validations:
required: false
- type: markdown
attributes:
value: "## Deployment"
- type: checkboxes
id: deployment
attributes:
label: "Deployment Method"
description: "Please select the deployment method you are using."
options:
- label: "Docker"
- label: "Vercel"
- label: "Server"
- type: markdown
attributes:
value: "## Desktop (please complete the following information):"
- type: input
id: desktop-os
attributes:
label: "Desktop OS"
description: "Your desktop operating system."
placeholder: "e.g., Windows 10"
validations:
required: false
- type: input
id: desktop-browser
attributes:
label: "Desktop Browser"
description: "Your desktop browser."
placeholder: "e.g., Chrome, Safari"
validations:
required: false
- type: input
id: desktop-version
attributes:
label: "Desktop Browser Version"
description: "Version of your desktop browser."
placeholder: "e.g., 89.0"
validations:
required: false
- type: markdown
attributes:
value: "## Smartphone (please complete the following information):"
- type: input
id: smartphone-device
attributes:
label: "Smartphone Device"
description: "Your smartphone device."
placeholder: "e.g., iPhone X"
validations:
required: false
- type: input
id: smartphone-os
attributes:
label: "Smartphone OS"
description: "Your smartphone operating system."
placeholder: "e.g., iOS 14.4"
validations:
required: false
- type: input
id: smartphone-browser
attributes:
label: "Smartphone Browser"
description: "Your smartphone browser."
placeholder: "e.g., Safari"
validations:
required: false
- type: input
id: smartphone-version
attributes:
label: "Smartphone Browser Version"
description: "Version of your smartphone browser."
placeholder: "e.g., 14"
validations:
required: false
- type: markdown
attributes:
value: "## Additional Logs"
- type: textarea
id: additional-logs
attributes:
label: "Additional Logs"
description: "Add any logs about the problem here."
placeholder: "Paste any relevant logs here..."
validations:
required: false

View File

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[Feature] "
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -0,0 +1,53 @@
name: Feature request
description: Suggest an idea for this project
title: "[Feature Request]: "
labels: ["enhancement"]
body:
- type: markdown
attributes:
value: "## Is your feature request related to a problem? Please describe."
- type: textarea
id: problem-description
attributes:
label: Problem Description
description: "A clear and concise description of what the problem is. Example: I'm always frustrated when [...]"
placeholder: "Explain the problem you are facing..."
validations:
required: true
- type: markdown
attributes:
value: "## Describe the solution you'd like"
- type: textarea
id: desired-solution
attributes:
label: Solution Description
description: A clear and concise description of what you want to happen.
placeholder: "Describe the solution you'd like..."
validations:
required: true
- type: markdown
attributes:
value: "## Describe alternatives you've considered"
- type: textarea
id: alternatives-considered
attributes:
label: Alternatives Considered
description: A clear and concise description of any alternative solutions or features you've considered.
placeholder: "Describe any alternative solutions or features you've considered..."
validations:
required: false
- type: markdown
attributes:
value: "## Additional context"
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context or screenshots about the feature request here.
placeholder: "Add any other context or screenshots about the feature request here..."
validations:
required: false

View File

@ -1,24 +0,0 @@
---
name: 功能建议
about: 请告诉我们你的灵光一闪
title: "[Feature] "
labels: ''
assignees: ''
---
> 为了提高交流效率,我们设立了官方 QQ 群和 QQ 频道,如果你在使用或者搭建过程中遇到了任何问题,请先第一时间加群或者频道咨询解决,除非是可以稳定复现的 Bug 或者较为有创意的功能建议,否则请不要随意往 Issue 区发送低质无意义帖子。
> [点击加入官方群聊](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724)
**这个功能与现有的问题有关吗?**
如果有关,请在此列出链接或者描述问题。
**你想要什么功能或者有什么建议?**
尽管告诉我们。
**有没有可以参考的同类竞品?**
可以给出参考产品的链接或者截图。
**其他信息**
可以说说你的其他考虑。

View File

@ -1,36 +0,0 @@
---
name: 反馈问题
about: 请告诉我们你遇到的问题
title: "[Bug] "
labels: ''
assignees: ''
---
> 为了提高交流效率,我们设立了官方 QQ 群和 QQ 频道,如果你在使用或者搭建过程中遇到了任何问题,请先第一时间加群或者频道咨询解决,除非是可以稳定复现的 Bug 或者较为有创意的功能建议,否则请不要随意往 Issue 区发送低质无意义帖子。
> [点击加入官方群聊](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724)
**反馈须知**
⚠️ 注意:不遵循此模板的任何帖子都会被立即关闭,如果没有提供下方的信息,我们无法定位你的问题。
请在下方中括号内输入 x 来表示你已经知晓相关内容。
- [ ] 我确认已经在 [常见问题](https://github.com/Yidadaa/ChatGPT-Next-Web/blob/main/docs/faq-cn.md) 中搜索了此次反馈的问题,没有找到解答;
- [ ] 我确认已经在 [Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) 列表(包括已经 Close 的)中搜索了此次反馈的问题,没有找到解答。
- [ ] 我确认已经在 [Vercel 使用教程](https://github.com/Yidadaa/ChatGPT-Next-Web/blob/main/docs/vercel-cn.md) 中搜索了此次反馈的问题,没有找到解答。
**描述问题**
请在此描述你遇到了什么问题。
**如何复现**
请告诉我们你是通过什么操作触发的该问题。
**截图**
请在此提供控制台截图、屏幕截图或者服务端的 log 截图。
**一些必要的信息**
- 系统:[比如 windows 10/ macos 12/ linux / android 11 / ios 16]
- 浏览器: [比如 chrome, safari]
- 版本: [填写设置页面的版本号]
- 部署方式:[比如 vercel、docker 或者服务器部署]

View File

@ -43,12 +43,9 @@ jobs:
- os: ubuntu-latest - os: ubuntu-latest
arch: x86_64 arch: x86_64
rust_target: x86_64-unknown-linux-gnu rust_target: x86_64-unknown-linux-gnu
- os: macos-latest
arch: x86_64
rust_target: x86_64-apple-darwin
- os: macos-latest - os: macos-latest
arch: aarch64 arch: aarch64
rust_target: aarch64-apple-darwin rust_target: x86_64-apple-darwin,aarch64-apple-darwin
- os: windows-latest - os: windows-latest
arch: x86_64 arch: x86_64
rust_target: x86_64-pc-windows-msvc rust_target: x86_64-pc-windows-msvc
@ -60,13 +57,14 @@ jobs:
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18 node-version: 18
cache: 'yarn'
- name: install Rust stable - name: install Rust stable
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
targets: ${{ matrix.config.rust_target }} targets: ${{ matrix.config.rust_target }}
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
with: with:
key: ${{ matrix.config.rust_target }} key: ${{ matrix.config.os }}
- name: install dependencies (ubuntu only) - name: install dependencies (ubuntu only)
if: matrix.config.os == 'ubuntu-latest' if: matrix.config.os == 'ubuntu-latest'
run: | run: |
@ -79,8 +77,15 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
with: with:
releaseId: ${{ needs.create-release.outputs.release_id }} releaseId: ${{ needs.create-release.outputs.release_id }}
args: ${{ matrix.config.os == 'macos-latest' && '--target universal-apple-darwin' || '' }}
publish-release: publish-release:
permissions: permissions:

View File

@ -5,6 +5,7 @@ on:
types: types:
- opened - opened
- synchronize - synchronize
- reopened
env: env:
VERCEL_TEAM: ${{ secrets.VERCEL_TEAM }} VERCEL_TEAM: ${{ secrets.VERCEL_TEAM }}

View File

@ -14,9 +14,17 @@ export type MessageRole = (typeof ROLES)[number];
export const Models = ["gpt-3.5-turbo", "gpt-4"] as const; export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
export type ChatModel = ModelType; export type ChatModel = ModelType;
export interface MultimodalContent {
type: "text" | "image_url";
text?: string;
image_url?: {
url: string;
};
}
export interface RequestMessage { export interface RequestMessage {
role: MessageRole; role: MessageRole;
content: string; content: string | MultimodalContent[];
} }
export interface LLMConfig { export interface LLMConfig {
@ -143,11 +151,10 @@ export function getHeaders() {
const accessStore = useAccessStore.getState(); const accessStore = useAccessStore.getState();
const headers: Record<string, string> = { const headers: Record<string, string> = {
"Content-Type": "application/json", "Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest", Accept: "application/json",
"Accept": "application/json",
}; };
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro"; const isGoogle = modelConfig.model.startsWith("gemini");
const isAzure = accessStore.provider === ServiceProvider.Azure; const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization"; const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isGoogle const apiKey = isGoogle
@ -155,10 +162,12 @@ export function getHeaders() {
: isAzure : isAzure
? accessStore.azureApiKey ? accessStore.azureApiKey
: accessStore.openaiApiKey; : accessStore.openaiApiKey;
const clientConfig = getClientConfig();
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`; const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0; const validString = (x: string) => x && x.length > 0;
// when using google api in app, not set auth header
if (!(isGoogle && clientConfig?.isApp)) {
// use user's api key first // use user's api key first
if (validString(apiKey)) { if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey); headers[authHeader] = makeBearer(apiKey);
@ -170,6 +179,7 @@ export function getHeaders() {
ACCESS_CODE_PREFIX + accessStore.accessCode, ACCESS_CODE_PREFIX + accessStore.accessCode,
); );
} }
}
return headers; return headers;
} }

View File

@ -1,15 +1,14 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant"; import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client"; import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales"; import { DEFAULT_API_HOST } from "@/app/constant";
import { getServerSideConfig } from "@/app/config/server"; import {
import de from "@/app/locales/de"; getMessageTextContent,
getMessageImages,
isVisionModel,
} from "@/app/utils";
export class GeminiProApi implements LLMApi { export class GeminiProApi implements LLMApi {
extractMessage(res: any) { extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res); console.log("[Response] gemini-pro response: ", res);
@ -21,11 +20,34 @@ export class GeminiProApi implements LLMApi {
); );
} }
async chat(options: ChatOptions): Promise<void> { async chat(options: ChatOptions): Promise<void> {
const apiClient = this; // const apiClient = this;
const messages = options.messages.map((v) => ({ const visionModel = isVisionModel(options.config.model);
let multimodal = false;
const messages = options.messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }];
if (visionModel) {
const images = getMessageImages(v);
if (images.length > 0) {
multimodal = true;
parts = parts.concat(
images.map((image) => {
const imageType = image.split(";")[0].split(":")[1];
const imageData = image.split(",")[1];
return {
inline_data: {
mime_type: imageType,
data: imageData,
},
};
}),
);
}
}
return {
role: v.role.replace("assistant", "model").replace("system", "user"), role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }], parts: parts,
})); };
});
// google requires that role in neighboring messages must not be the same // google requires that role in neighboring messages must not be the same
for (let i = 0; i < messages.length - 1; ) { for (let i = 0; i < messages.length - 1; ) {
@ -40,7 +62,9 @@ export class GeminiProApi implements LLMApi {
i++; i++;
} }
} }
// if (visionModel && messages.length > 1) {
// options.onError?.(new Error("Multiturn chat is not enabled for models/gemini-pro-vision"));
// }
const modelConfig = { const modelConfig = {
...useAppConfig.getState().modelConfig, ...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig, ...useChatStore.getState().currentSession().mask.modelConfig,
@ -79,13 +103,30 @@ export class GeminiProApi implements LLMApi {
], ],
}; };
console.log("[Request] google payload: ", requestPayload); const accessStore = useAccessStore.getState();
let baseUrl = accessStore.googleUrl;
const isApp = !!getClientConfig()?.isApp;
const shouldStream = !!options.config.stream; let shouldStream = !!options.config.stream;
const controller = new AbortController(); const controller = new AbortController();
options.onController?.(controller); options.onController?.(controller);
try { try {
const chatPath = this.path(Google.ChatPath); let googleChatPath = visionModel
? Google.VisionChatPath
: Google.ChatPath;
let chatPath = this.path(googleChatPath);
// let baseUrl = accessStore.googleUrl;
if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/" + googleChatPath
: chatPath;
}
if (isApp) {
baseUrl += `?key=${accessStore.googleApiKey}`;
}
const chatPayload = { const chatPayload = {
method: "POST", method: "POST",
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
@ -101,10 +142,6 @@ export class GeminiProApi implements LLMApi {
if (shouldStream) { if (shouldStream) {
let responseText = ""; let responseText = "";
let remainText = ""; let remainText = "";
let streamChatPath = chatPath.replace(
"generateContent",
"streamGenerateContent",
);
let finished = false; let finished = false;
let existingTexts: string[] = []; let existingTexts: string[] = [];
@ -134,7 +171,11 @@ export class GeminiProApi implements LLMApi {
// start animaion // start animaion
animateResponseText(); animateResponseText();
fetch(streamChatPath, chatPayload)
fetch(
baseUrl.replace("generateContent", "streamGenerateContent"),
chatPayload,
)
.then((response) => { .then((response) => {
const reader = response?.body?.getReader(); const reader = response?.body?.getReader();
const decoder = new TextDecoder(); const decoder = new TextDecoder();
@ -145,6 +186,19 @@ export class GeminiProApi implements LLMApi {
value, value,
}): Promise<any> { }): Promise<any> {
if (done) { if (done) {
if (response.status !== 200) {
try {
let data = JSON.parse(ensureProperEnding(partialData));
if (data && data[0].error) {
options.onError?.(new Error(data[0].error.message));
} else {
options.onError?.(new Error("Request failed"));
}
} catch (_) {
options.onError?.(new Error("Request failed"));
}
}
console.log("Stream complete"); console.log("Stream complete");
// options.onFinish(responseText + remainText); // options.onFinish(responseText + remainText);
finished = true; finished = true;
@ -185,11 +239,9 @@ export class GeminiProApi implements LLMApi {
console.error("Error:", error); console.error("Error:", error);
}); });
} else { } else {
const res = await fetch(chatPath, chatPayload); const res = await fetch(baseUrl, chatPayload);
clearTimeout(requestTimeoutId); clearTimeout(requestTimeoutId);
const resJson = await res.json(); const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) { if (resJson?.promptFeedback?.blockReason) {
// being blocked // being blocked
options.onError?.( options.onError?.(

View File

@ -1,3 +1,4 @@
"use client";
import { import {
ApiPath, ApiPath,
DEFAULT_API_HOST, DEFAULT_API_HOST,
@ -8,7 +9,14 @@ import {
} from "@/app/constant"; } from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import {
ChatOptions,
getHeaders,
LLMApi,
LLMModel,
LLMUsage,
MultimodalContent,
} from "../api";
import Locale from "../../locales"; import Locale from "../../locales";
import { import {
EventStreamContentType, EventStreamContentType,
@ -17,6 +25,11 @@ import {
import { prettyObject } from "@/app/utils/format"; import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client"; import { getClientConfig } from "@/app/config/client";
import { makeAzurePath } from "@/app/azure"; import { makeAzurePath } from "@/app/azure";
import {
getMessageTextContent,
getMessageImages,
isVisionModel,
} from "@/app/utils";
export interface OpenAIListModelResponse { export interface OpenAIListModelResponse {
object: string; object: string;
@ -45,7 +58,9 @@ export class ChatGPTApi implements LLMApi {
if (baseUrl.length === 0) { if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp; const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI; baseUrl = isApp
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
: ApiPath.OpenAI;
} }
if (baseUrl.endsWith("/")) { if (baseUrl.endsWith("/")) {
@ -59,6 +74,8 @@ export class ChatGPTApi implements LLMApi {
path = makeAzurePath(path, accessStore.azureApiVersion); path = makeAzurePath(path, accessStore.azureApiVersion);
} }
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/"); return [baseUrl, path].join("/");
} }
@ -67,9 +84,10 @@ export class ChatGPTApi implements LLMApi {
} }
async chat(options: ChatOptions) { async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages = options.messages.map((v) => ({ const messages = options.messages.map((v) => ({
role: v.role, role: v.role,
content: v.content, content: visionModel ? v.content : getMessageTextContent(v),
})); }));
const modelConfig = { const modelConfig = {
@ -92,6 +110,16 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
}; };
// add max_tokens to vision model
if (visionModel) {
Object.defineProperty(requestPayload, "max_tokens", {
enumerable: true,
configurable: true,
writable: true,
value: modelConfig.max_tokens,
});
}
console.log("[Request] openai payload: ", requestPayload); console.log("[Request] openai payload: ", requestPayload);
const shouldStream = !!options.config.stream; const shouldStream = !!options.config.stream;

View File

@ -1,5 +1,47 @@
@import "../styles/animation.scss"; @import "../styles/animation.scss";
.attach-images {
position: absolute;
left: 30px;
bottom: 32px;
display: flex;
}
.attach-image {
cursor: default;
width: 64px;
height: 64px;
border: rgba($color: #888, $alpha: 0.2) 1px solid;
border-radius: 5px;
margin-right: 10px;
background-size: cover;
background-position: center;
background-color: var(--white);
.attach-image-mask {
width: 100%;
height: 100%;
opacity: 0;
transition: all ease 0.2s;
}
.attach-image-mask:hover {
opacity: 1;
}
.delete-image {
width: 24px;
height: 24px;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
border-radius: 5px;
float: right;
background-color: var(--white);
}
}
.chat-input-actions { .chat-input-actions {
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
@ -189,12 +231,10 @@
animation: slide-in ease 0.3s; animation: slide-in ease 0.3s;
$linear: linear-gradient( $linear: linear-gradient(to right,
to right,
rgba(0, 0, 0, 0), rgba(0, 0, 0, 0),
rgba(0, 0, 0, 1), rgba(0, 0, 0, 1),
rgba(0, 0, 0, 0) rgba(0, 0, 0, 0));
);
mask-image: $linear; mask-image: $linear;
@mixin show { @mixin show {
@ -327,7 +367,7 @@
} }
} }
.chat-message-user > .chat-message-container { .chat-message-user>.chat-message-container {
align-items: flex-end; align-items: flex-end;
} }
@ -349,6 +389,7 @@
padding: 7px; padding: 7px;
} }
} }
/* Specific styles for iOS devices */ /* Specific styles for iOS devices */
@media screen and (max-device-width: 812px) and (-webkit-min-device-pixel-ratio: 2) { @media screen and (max-device-width: 812px) and (-webkit-min-device-pixel-ratio: 2) {
@supports (-webkit-touch-callout: none) { @supports (-webkit-touch-callout: none) {
@ -381,6 +422,64 @@
transition: all ease 0.3s; transition: all ease 0.3s;
} }
.chat-message-item-image {
width: 100%;
margin-top: 10px;
}
.chat-message-item-images {
width: 100%;
display: grid;
justify-content: left;
grid-gap: 10px;
grid-template-columns: repeat(var(--image-count), auto);
margin-top: 10px;
}
.chat-message-item-image-multi {
object-fit: cover;
background-size: cover;
background-position: center;
background-repeat: no-repeat;
}
.chat-message-item-image,
.chat-message-item-image-multi {
box-sizing: border-box;
border-radius: 10px;
border: rgba($color: #888, $alpha: 0.2) 1px solid;
}
@media only screen and (max-width: 600px) {
$calc-image-width: calc(100vw/3*2/var(--image-count));
.chat-message-item-image-multi {
width: $calc-image-width;
height: $calc-image-width;
}
.chat-message-item-image {
max-width: calc(100vw/3*2);
}
}
@media screen and (min-width: 600px) {
$max-image-width: calc(calc(1200px - var(--sidebar-width))/3*2/var(--image-count));
$image-width: calc(calc(var(--window-width) - var(--sidebar-width))/3*2/var(--image-count));
.chat-message-item-image-multi {
width: $image-width;
height: $image-width;
max-width: $max-image-width;
max-height: $max-image-width;
}
.chat-message-item-image {
max-width: calc(calc(1200px - var(--sidebar-width))/3*2);
}
}
.chat-message-action-date { .chat-message-action-date {
font-size: 12px; font-size: 12px;
opacity: 0.2; opacity: 0.2;
@ -395,7 +494,7 @@
z-index: 1; z-index: 1;
} }
.chat-message-user > .chat-message-container > .chat-message-item { .chat-message-user>.chat-message-container>.chat-message-item {
background-color: var(--second); background-color: var(--second);
&:hover { &:hover {
@ -460,6 +559,7 @@
@include single-line(); @include single-line();
} }
.hint-content { .hint-content {
font-size: 12px; font-size: 12px;
@ -474,15 +574,26 @@
} }
.chat-input-panel-inner { .chat-input-panel-inner {
cursor: text;
display: flex; display: flex;
flex: 1; flex: 1;
border-radius: 10px;
border: var(--border-in-light);
}
.chat-input-panel-inner-attach {
padding-bottom: 80px;
}
.chat-input-panel-inner:has(.chat-input:focus) {
border: 1px solid var(--primary);
} }
.chat-input { .chat-input {
height: 100%; height: 100%;
width: 100%; width: 100%;
border-radius: 10px; border-radius: 10px;
border: var(--border-in-light); border: none;
box-shadow: 0 -2px 5px rgba(0, 0, 0, 0.03); box-shadow: 0 -2px 5px rgba(0, 0, 0, 0.03);
background-color: var(--white); background-color: var(--white);
color: var(--black); color: var(--black);
@ -494,9 +605,7 @@
min-height: 68px; min-height: 68px;
} }
.chat-input:focus { .chat-input:focus {}
border: 1px solid var(--primary);
}
.chat-input-send { .chat-input-send {
background-color: var(--primary); background-color: var(--primary);

View File

@ -6,6 +6,7 @@ import React, {
useMemo, useMemo,
useCallback, useCallback,
Fragment, Fragment,
RefObject,
} from "react"; } from "react";
import SendWhiteIcon from "../icons/send-white.svg"; import SendWhiteIcon from "../icons/send-white.svg";
@ -15,6 +16,7 @@ import ExportIcon from "../icons/share.svg";
import ReturnIcon from "../icons/return.svg"; import ReturnIcon from "../icons/return.svg";
import CopyIcon from "../icons/copy.svg"; import CopyIcon from "../icons/copy.svg";
import LoadingIcon from "../icons/three-dots.svg"; import LoadingIcon from "../icons/three-dots.svg";
import LoadingButtonIcon from "../icons/loading.svg";
import PromptIcon from "../icons/prompt.svg"; import PromptIcon from "../icons/prompt.svg";
import MaskIcon from "../icons/mask.svg"; import MaskIcon from "../icons/mask.svg";
import MaxIcon from "../icons/max.svg"; import MaxIcon from "../icons/max.svg";
@ -27,6 +29,7 @@ import PinIcon from "../icons/pin.svg";
import EditIcon from "../icons/rename.svg"; import EditIcon from "../icons/rename.svg";
import ConfirmIcon from "../icons/confirm.svg"; import ConfirmIcon from "../icons/confirm.svg";
import CancelIcon from "../icons/cancel.svg"; import CancelIcon from "../icons/cancel.svg";
import ImageIcon from "../icons/image.svg";
import LightIcon from "../icons/light.svg"; import LightIcon from "../icons/light.svg";
import DarkIcon from "../icons/dark.svg"; import DarkIcon from "../icons/dark.svg";
@ -53,6 +56,10 @@ import {
selectOrCopy, selectOrCopy,
autoGrowTextArea, autoGrowTextArea,
useMobileScreen, useMobileScreen,
getMessageTextContent,
getMessageImages,
isVisionModel,
compressImage,
} from "../utils"; } from "../utils";
import dynamic from "next/dynamic"; import dynamic from "next/dynamic";
@ -89,6 +96,7 @@ import { prettyObject } from "../utils/format";
import { ExportMessageModal } from "./exporter"; import { ExportMessageModal } from "./exporter";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { useAllModels } from "../utils/hooks"; import { useAllModels } from "../utils/hooks";
import { MultimodalContent } from "../client/api";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, loading: () => <LoadingIcon />,
@ -375,11 +383,13 @@ function ChatAction(props: {
); );
} }
function useScrollToBottom() { function useScrollToBottom(
scrollRef: RefObject<HTMLDivElement>,
detach: boolean = false,
) {
// for auto-scroll // for auto-scroll
const scrollRef = useRef<HTMLDivElement>(null);
const [autoScroll, setAutoScroll] = useState(true);
const [autoScroll, setAutoScroll] = useState(true);
function scrollDomToBottom() { function scrollDomToBottom() {
const dom = scrollRef.current; const dom = scrollRef.current;
if (dom) { if (dom) {
@ -392,7 +402,7 @@ function useScrollToBottom() {
// auto scroll // auto scroll
useEffect(() => { useEffect(() => {
if (autoScroll) { if (autoScroll && !detach) {
scrollDomToBottom(); scrollDomToBottom();
} }
}); });
@ -406,10 +416,14 @@ function useScrollToBottom() {
} }
export function ChatActions(props: { export function ChatActions(props: {
uploadImage: () => void;
setAttachImages: (images: string[]) => void;
setUploading: (uploading: boolean) => void;
showPromptModal: () => void; showPromptModal: () => void;
scrollToBottom: () => void; scrollToBottom: () => void;
showPromptHints: () => void; showPromptHints: () => void;
hitBottom: boolean; hitBottom: boolean;
uploading: boolean;
}) { }) {
const config = useAppConfig(); const config = useAppConfig();
const navigate = useNavigate(); const navigate = useNavigate();
@ -437,8 +451,16 @@ export function ChatActions(props: {
[allModels], [allModels],
); );
const [showModelSelector, setShowModelSelector] = useState(false); const [showModelSelector, setShowModelSelector] = useState(false);
const [showUploadImage, setShowUploadImage] = useState(false);
useEffect(() => { useEffect(() => {
const show = isVisionModel(currentModel);
setShowUploadImage(show);
if (!show) {
props.setAttachImages([]);
props.setUploading(false);
}
// if current model is not available // if current model is not available
// switch to first available model // switch to first available model
const isUnavaliableModel = !models.some((m) => m.name === currentModel); const isUnavaliableModel = !models.some((m) => m.name === currentModel);
@ -475,6 +497,13 @@ export function ChatActions(props: {
/> />
)} )}
{showUploadImage && (
<ChatAction
onClick={props.uploadImage}
text={Locale.Chat.InputActions.UploadImage}
icon={props.uploading ? <LoadingButtonIcon /> : <ImageIcon />}
/>
)}
<ChatAction <ChatAction
onClick={nextTheme} onClick={nextTheme}
text={Locale.Chat.InputActions.Theme[theme]} text={Locale.Chat.InputActions.Theme[theme]}
@ -610,6 +639,14 @@ export function EditMessageModal(props: { onClose: () => void }) {
); );
} }
export function DeleteImageButton(props: { deleteImage: () => void }) {
return (
<div className={styles["delete-image"]} onClick={props.deleteImage}>
<DeleteIcon />
</div>
);
}
function _Chat() { function _Chat() {
type RenderMessage = ChatMessage & { preview?: boolean }; type RenderMessage = ChatMessage & { preview?: boolean };
@ -624,10 +661,22 @@ function _Chat() {
const [userInput, setUserInput] = useState(""); const [userInput, setUserInput] = useState("");
const [isLoading, setIsLoading] = useState(false); const [isLoading, setIsLoading] = useState(false);
const { submitKey, shouldSubmit } = useSubmitHandler(); const { submitKey, shouldSubmit } = useSubmitHandler();
const { scrollRef, setAutoScroll, scrollDomToBottom } = useScrollToBottom(); const scrollRef = useRef<HTMLDivElement>(null);
const isScrolledToBottom = scrollRef?.current
? Math.abs(
scrollRef.current.scrollHeight -
(scrollRef.current.scrollTop + scrollRef.current.clientHeight),
) <= 1
: false;
const { setAutoScroll, scrollDomToBottom } = useScrollToBottom(
scrollRef,
isScrolledToBottom,
);
const [hitBottom, setHitBottom] = useState(true); const [hitBottom, setHitBottom] = useState(true);
const isMobileScreen = useMobileScreen(); const isMobileScreen = useMobileScreen();
const navigate = useNavigate(); const navigate = useNavigate();
const [attachImages, setAttachImages] = useState<string[]>([]);
const [uploading, setUploading] = useState(false);
// prompt hints // prompt hints
const promptStore = usePromptStore(); const promptStore = usePromptStore();
@ -705,7 +754,10 @@ function _Chat() {
return; return;
} }
setIsLoading(true); setIsLoading(true);
chatStore.onUserInput(userInput).then(() => setIsLoading(false)); chatStore
.onUserInput(userInput, attachImages)
.then(() => setIsLoading(false));
setAttachImages([]);
localStorage.setItem(LAST_INPUT_KEY, userInput); localStorage.setItem(LAST_INPUT_KEY, userInput);
setUserInput(""); setUserInput("");
setPromptHints([]); setPromptHints([]);
@ -783,9 +835,9 @@ function _Chat() {
}; };
const onRightClick = (e: any, message: ChatMessage) => { const onRightClick = (e: any, message: ChatMessage) => {
// copy to clipboard // copy to clipboard
if (selectOrCopy(e.currentTarget, message.content)) { if (selectOrCopy(e.currentTarget, getMessageTextContent(message))) {
if (userInput.length === 0) { if (userInput.length === 0) {
setUserInput(message.content); setUserInput(getMessageTextContent(message));
} }
e.preventDefault(); e.preventDefault();
@ -853,7 +905,9 @@ function _Chat() {
// resend the message // resend the message
setIsLoading(true); setIsLoading(true);
chatStore.onUserInput(userMessage.content).then(() => setIsLoading(false)); const textContent = getMessageTextContent(userMessage);
const images = getMessageImages(userMessage);
chatStore.onUserInput(textContent, images).then(() => setIsLoading(false));
inputRef.current?.focus(); inputRef.current?.focus();
}; };
@ -962,7 +1016,6 @@ function _Chat() {
setHitBottom(isHitBottom); setHitBottom(isHitBottom);
setAutoScroll(isHitBottom); setAutoScroll(isHitBottom);
}; };
function scrollToBottom() { function scrollToBottom() {
setMsgRenderIndex(renderMessages.length - CHAT_PAGE_SIZE); setMsgRenderIndex(renderMessages.length - CHAT_PAGE_SIZE);
scrollDomToBottom(); scrollDomToBottom();
@ -1048,6 +1101,92 @@ function _Chat() {
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
const handlePaste = useCallback(
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
const currentModel = chatStore.currentSession().mask.modelConfig.model;
if(!isVisionModel(currentModel)){return;}
const items = (event.clipboardData || window.clipboardData).items;
for (const item of items) {
if (item.kind === "file" && item.type.startsWith("image/")) {
event.preventDefault();
const file = item.getAsFile();
if (file) {
const images: string[] = [];
images.push(...attachImages);
images.push(
...(await new Promise<string[]>((res, rej) => {
setUploading(true);
const imagesData: string[] = [];
compressImage(file, 256 * 1024)
.then((dataUrl) => {
imagesData.push(dataUrl);
setUploading(false);
res(imagesData);
})
.catch((e) => {
setUploading(false);
rej(e);
});
})),
);
const imagesLength = images.length;
if (imagesLength > 3) {
images.splice(3, imagesLength - 3);
}
setAttachImages(images);
}
}
}
},
[attachImages, chatStore],
);
async function uploadImage() {
const images: string[] = [];
images.push(...attachImages);
images.push(
...(await new Promise<string[]>((res, rej) => {
const fileInput = document.createElement("input");
fileInput.type = "file";
fileInput.accept =
"image/png, image/jpeg, image/webp, image/heic, image/heif";
fileInput.multiple = true;
fileInput.onchange = (event: any) => {
setUploading(true);
const files = event.target.files;
const imagesData: string[] = [];
for (let i = 0; i < files.length; i++) {
const file = event.target.files[i];
compressImage(file, 256 * 1024)
.then((dataUrl) => {
imagesData.push(dataUrl);
if (
imagesData.length === 3 ||
imagesData.length === files.length
) {
setUploading(false);
res(imagesData);
}
})
.catch((e) => {
setUploading(false);
rej(e);
});
}
};
fileInput.click();
})),
);
const imagesLength = images.length;
if (imagesLength > 3) {
images.splice(3, imagesLength - 3);
}
setAttachImages(images);
}
return ( return (
<div className={styles.chat} key={session.id}> <div className={styles.chat} key={session.id}>
<div className="window-header" data-tauri-drag-region> <div className="window-header" data-tauri-drag-region>
@ -1154,15 +1293,29 @@ function _Chat() {
onClick={async () => { onClick={async () => {
const newMessage = await showPrompt( const newMessage = await showPrompt(
Locale.Chat.Actions.Edit, Locale.Chat.Actions.Edit,
message.content, getMessageTextContent(message),
10, 10,
); );
let newContent: string | MultimodalContent[] =
newMessage;
const images = getMessageImages(message);
if (images.length > 0) {
newContent = [{ type: "text", text: newMessage }];
for (let i = 0; i < images.length; i++) {
newContent.push({
type: "image_url",
image_url: {
url: images[i],
},
});
}
}
chatStore.updateCurrentSession((session) => { chatStore.updateCurrentSession((session) => {
const m = session.mask.context const m = session.mask.context
.concat(session.messages) .concat(session.messages)
.find((m) => m.id === message.id); .find((m) => m.id === message.id);
if (m) { if (m) {
m.content = newMessage; m.content = newContent;
} }
}); });
}} }}
@ -1217,7 +1370,11 @@ function _Chat() {
<ChatAction <ChatAction
text={Locale.Chat.Actions.Copy} text={Locale.Chat.Actions.Copy}
icon={<CopyIcon />} icon={<CopyIcon />}
onClick={() => copyToClipboard(message.content)} onClick={() =>
copyToClipboard(
getMessageTextContent(message),
)
}
/> />
</> </>
)} )}
@ -1232,7 +1389,7 @@ function _Chat() {
)} )}
<div className={styles["chat-message-item"]}> <div className={styles["chat-message-item"]}>
<Markdown <Markdown
content={message.content} content={getMessageTextContent(message)}
loading={ loading={
(message.preview || message.streaming) && (message.preview || message.streaming) &&
message.content.length === 0 && message.content.length === 0 &&
@ -1241,12 +1398,42 @@ function _Chat() {
onContextMenu={(e) => onRightClick(e, message)} onContextMenu={(e) => onRightClick(e, message)}
onDoubleClickCapture={() => { onDoubleClickCapture={() => {
if (!isMobileScreen) return; if (!isMobileScreen) return;
setUserInput(message.content); setUserInput(getMessageTextContent(message));
}} }}
fontSize={fontSize} fontSize={fontSize}
parentRef={scrollRef} parentRef={scrollRef}
defaultShow={i >= messages.length - 6} defaultShow={i >= messages.length - 6}
/> />
{getMessageImages(message).length == 1 && (
<img
className={styles["chat-message-item-image"]}
src={getMessageImages(message)[0]}
alt=""
/>
)}
{getMessageImages(message).length > 1 && (
<div
className={styles["chat-message-item-images"]}
style={
{
"--image-count": getMessageImages(message).length,
} as React.CSSProperties
}
>
{getMessageImages(message).map((image, index) => {
return (
<img
className={
styles["chat-message-item-image-multi"]
}
key={index}
src={image}
alt=""
/>
);
})}
</div>
)}
</div> </div>
<div className={styles["chat-message-action-date"]}> <div className={styles["chat-message-action-date"]}>
@ -1266,9 +1453,13 @@ function _Chat() {
<PromptHints prompts={promptHints} onPromptSelect={onPromptSelect} /> <PromptHints prompts={promptHints} onPromptSelect={onPromptSelect} />
<ChatActions <ChatActions
uploadImage={uploadImage}
setAttachImages={setAttachImages}
setUploading={setUploading}
showPromptModal={() => setShowPromptModal(true)} showPromptModal={() => setShowPromptModal(true)}
scrollToBottom={scrollToBottom} scrollToBottom={scrollToBottom}
hitBottom={hitBottom} hitBottom={hitBottom}
uploading={uploading}
showPromptHints={() => { showPromptHints={() => {
// Click again to close // Click again to close
if (promptHints.length > 0) { if (promptHints.length > 0) {
@ -1281,8 +1472,16 @@ function _Chat() {
onSearch(""); onSearch("");
}} }}
/> />
<div className={styles["chat-input-panel-inner"]}> <label
className={`${styles["chat-input-panel-inner"]} ${
attachImages.length != 0
? styles["chat-input-panel-inner-attach"]
: ""
}`}
htmlFor="chat-input"
>
<textarea <textarea
id="chat-input"
ref={inputRef} ref={inputRef}
className={styles["chat-input"]} className={styles["chat-input"]}
placeholder={Locale.Chat.Input(submitKey)} placeholder={Locale.Chat.Input(submitKey)}
@ -1291,12 +1490,36 @@ function _Chat() {
onKeyDown={onInputKeyDown} onKeyDown={onInputKeyDown}
onFocus={scrollToBottom} onFocus={scrollToBottom}
onClick={scrollToBottom} onClick={scrollToBottom}
onPaste={handlePaste}
rows={inputRows} rows={inputRows}
autoFocus={autoFocus} autoFocus={autoFocus}
style={{ style={{
fontSize: config.fontSize, fontSize: config.fontSize,
}} }}
/> />
{attachImages.length != 0 && (
<div className={styles["attach-images"]}>
{attachImages.map((image, index) => {
return (
<div
key={index}
className={styles["attach-image"]}
style={{ backgroundImage: `url("${image}")` }}
>
<div className={styles["attach-image-mask"]}>
<DeleteImageButton
deleteImage={() => {
setAttachImages(
attachImages.filter((_, i) => i !== index),
);
}}
/>
</div>
</div>
);
})}
</div>
)}
<IconButton <IconButton
icon={<SendWhiteIcon />} icon={<SendWhiteIcon />}
text={Locale.Chat.Send} text={Locale.Chat.Send}
@ -1304,7 +1527,7 @@ function _Chat() {
type="primary" type="primary"
onClick={() => doSubmit(userInput)} onClick={() => doSubmit(userInput)}
/> />
</div> </label>
</div> </div>
{showExport && ( {showExport && (

View File

@ -13,7 +13,7 @@ export function getEmojiUrl(unified: string, style: EmojiStyle) {
// Whoever owns this Content Delivery Network (CDN), I am using your CDN to serve emojis // Whoever owns this Content Delivery Network (CDN), I am using your CDN to serve emojis
// Old CDN broken, so I had to switch to this one // Old CDN broken, so I had to switch to this one
// Author: https://github.com/H0llyW00dzZ // Author: https://github.com/H0llyW00dzZ
return `https://cdn.jsdelivr.net/npm/emoji-datasource-apple/img/${style}/64/${unified}.png`; return `https://fastly.jsdelivr.net/npm/emoji-datasource-apple/img/${style}/64/${unified}.png`;
} }
export function AvatarPicker(props: { export function AvatarPicker(props: {

View File

@ -94,6 +94,7 @@
button { button {
flex-grow: 1; flex-grow: 1;
&:not(:last-child) { &:not(:last-child) {
margin-right: 10px; margin-right: 10px;
} }
@ -190,6 +191,59 @@
pre { pre {
overflow: hidden; overflow: hidden;
} }
.message-image {
width: 100%;
margin-top: 10px;
}
.message-images {
display: grid;
justify-content: left;
grid-gap: 10px;
grid-template-columns: repeat(var(--image-count), auto);
margin-top: 10px;
}
@media screen and (max-width: 600px) {
$image-width: calc(calc(100vw/2)/var(--image-count));
.message-image-multi {
width: $image-width;
height: $image-width;
}
.message-image {
max-width: calc(100vw/3*2);
}
}
@media screen and (min-width: 600px) {
$max-image-width: calc(900px/3*2/var(--image-count));
$image-width: calc(80vw/3*2/var(--image-count));
.message-image-multi {
width: $image-width;
height: $image-width;
max-width: $max-image-width;
max-height: $max-image-width;
}
.message-image {
max-width: calc(100vw/3*2);
}
}
.message-image-multi {
object-fit: cover;
}
.message-image,
.message-image-multi {
box-sizing: border-box;
border-radius: 10px;
border: rgba($color: #888, $alpha: 0.2) 1px solid;
}
} }
&-assistant { &-assistant {
@ -213,6 +267,5 @@
} }
} }
.default-theme { .default-theme {}
}
} }

View File

@ -12,7 +12,12 @@ import {
showToast, showToast,
} from "./ui-lib"; } from "./ui-lib";
import { IconButton } from "./button"; import { IconButton } from "./button";
import { copyToClipboard, downloadAs, useMobileScreen } from "../utils"; import {
copyToClipboard,
downloadAs,
getMessageImages,
useMobileScreen,
} from "../utils";
import CopyIcon from "../icons/copy.svg"; import CopyIcon from "../icons/copy.svg";
import LoadingIcon from "../icons/three-dots.svg"; import LoadingIcon from "../icons/three-dots.svg";
@ -34,6 +39,7 @@ import { prettyObject } from "../utils/format";
import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant"; import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { ClientApi } from "../client/api"; import { ClientApi } from "../client/api";
import { getMessageTextContent } from "../utils";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, loading: () => <LoadingIcon />,
@ -287,7 +293,7 @@ export function RenderExport(props: {
id={`${m.role}:${i}`} id={`${m.role}:${i}`}
className={EXPORT_MESSAGE_CLASS_NAME} className={EXPORT_MESSAGE_CLASS_NAME}
> >
<Markdown content={m.content} defaultShow /> <Markdown content={getMessageTextContent(m)} defaultShow />
</div> </div>
))} ))}
</div> </div>
@ -307,7 +313,7 @@ export function PreviewActions(props: {
setShouldExport(false); setShouldExport(false);
var api: ClientApi; var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") { if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);
@ -580,10 +586,37 @@ export function ImagePreviewer(props: {
<div className={styles["body"]}> <div className={styles["body"]}>
<Markdown <Markdown
content={m.content} content={getMessageTextContent(m)}
fontSize={config.fontSize} fontSize={config.fontSize}
defaultShow defaultShow
/> />
{getMessageImages(m).length == 1 && (
<img
key={i}
src={getMessageImages(m)[0]}
alt="message"
className={styles["message-image"]}
/>
)}
{getMessageImages(m).length > 1 && (
<div
className={styles["message-images"]}
style={
{
"--image-count": getMessageImages(m).length,
} as React.CSSProperties
}
>
{getMessageImages(m).map((src, i) => (
<img
key={i}
src={src}
alt="message"
className={styles["message-image-multi"]}
/>
))}
</div>
)}
</div> </div>
</div> </div>
); );
@ -602,8 +635,10 @@ export function MarkdownPreviewer(props: {
props.messages props.messages
.map((m) => { .map((m) => {
return m.role === "user" return m.role === "user"
? `## ${Locale.Export.MessageFromYou}:\n${m.content}` ? `## ${Locale.Export.MessageFromYou}:\n${getMessageTextContent(m)}`
: `## ${Locale.Export.MessageFromChatGPT}:\n${m.content.trim()}`; : `## ${Locale.Export.MessageFromChatGPT}:\n${getMessageTextContent(
m,
).trim()}`;
}) })
.join("\n\n"); .join("\n\n");

View File

@ -171,7 +171,7 @@ export function useLoadData() {
const config = useAppConfig(); const config = useAppConfig();
var api: ClientApi; var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") { if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);

View File

@ -22,7 +22,7 @@ import {
useAppConfig, useAppConfig,
useChatStore, useChatStore,
} from "../store"; } from "../store";
import { ROLES } from "../client/api"; import { MultimodalContent, ROLES } from "../client/api";
import { import {
Input, Input,
List, List,
@ -38,7 +38,12 @@ import { useNavigate } from "react-router-dom";
import chatStyle from "./chat.module.scss"; import chatStyle from "./chat.module.scss";
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { copyToClipboard, downloadAs, readFromFile } from "../utils"; import {
copyToClipboard,
downloadAs,
getMessageImages,
readFromFile,
} from "../utils";
import { Updater } from "../typing"; import { Updater } from "../typing";
import { ModelConfigList } from "./model-config"; import { ModelConfigList } from "./model-config";
import { FileName, Path } from "../constant"; import { FileName, Path } from "../constant";
@ -50,6 +55,7 @@ import {
Draggable, Draggable,
OnDragEndResponder, OnDragEndResponder,
} from "@hello-pangea/dnd"; } from "@hello-pangea/dnd";
import { getMessageTextContent } from "../utils";
// drag and drop helper function // drag and drop helper function
function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] { function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
@ -244,7 +250,7 @@ function ContextPromptItem(props: {
</> </>
)} )}
<Input <Input
value={props.prompt.content} value={getMessageTextContent(props.prompt)}
type="text" type="text"
className={chatStyle["context-content"]} className={chatStyle["context-content"]}
rows={focusingInput ? 5 : 1} rows={focusingInput ? 5 : 1}
@ -289,7 +295,18 @@ export function ContextPrompts(props: {
}; };
const updateContextPrompt = (i: number, prompt: ChatMessage) => { const updateContextPrompt = (i: number, prompt: ChatMessage) => {
props.updateContext((context) => (context[i] = prompt)); props.updateContext((context) => {
const images = getMessageImages(context[i]);
context[i] = prompt;
if (images.length > 0) {
const text = getMessageTextContent(context[i]);
const newContext: MultimodalContent[] = [{ type: "text", text }];
for (const img of images) {
newContext.push({ type: "image_url", image_url: { url: img } });
}
context[i].content = newContext;
}
});
}; };
const onDragEnd: OnDragEndResponder = (result) => { const onDragEnd: OnDragEndResponder = (result) => {

View File

@ -7,6 +7,7 @@ import { MaskAvatar } from "./mask";
import Locale from "../locales"; import Locale from "../locales";
import styles from "./message-selector.module.scss"; import styles from "./message-selector.module.scss";
import { getMessageTextContent } from "../utils";
function useShiftRange() { function useShiftRange() {
const [startIndex, setStartIndex] = useState<number>(); const [startIndex, setStartIndex] = useState<number>();
@ -103,7 +104,9 @@ export function MessageSelector(props: {
const searchResults = new Set<string>(); const searchResults = new Set<string>();
if (text.length > 0) { if (text.length > 0) {
messages.forEach((m) => messages.forEach((m) =>
m.content.includes(text) ? searchResults.add(m.id!) : null, getMessageTextContent(m).includes(text)
? searchResults.add(m.id!)
: null,
); );
} }
setSearchIds(searchResults); setSearchIds(searchResults);
@ -219,7 +222,7 @@ export function MessageSelector(props: {
{new Date(m.date).toLocaleString()} {new Date(m.date).toLocaleString()}
</div> </div>
<div className={`${styles["content"]} one-line`}> <div className={`${styles["content"]} one-line`}>
{m.content} {getMessageTextContent(m)}
</div> </div>
</div> </div>

View File

@ -92,7 +92,7 @@ export function ModelConfigList(props: {
></input> ></input>
</ListItem> </ListItem>
{props.modelConfig.model === "gemini-pro" ? null : ( {props.modelConfig.model.startsWith("gemini") ? null : (
<> <>
<ListItem <ListItem
title={Locale.Settings.PresencePenalty.Title} title={Locale.Settings.PresencePenalty.Title}

View File

@ -268,7 +268,7 @@ function CheckButton() {
const syncStore = useSyncStore(); const syncStore = useSyncStore();
const couldCheck = useMemo(() => { const couldCheck = useMemo(() => {
return syncStore.coundSync(); return syncStore.cloudSync();
}, [syncStore]); }, [syncStore]);
const [checkState, setCheckState] = useState< const [checkState, setCheckState] = useState<
@ -472,7 +472,7 @@ function SyncItems() {
const promptStore = usePromptStore(); const promptStore = usePromptStore();
const maskStore = useMaskStore(); const maskStore = useMaskStore();
const couldSync = useMemo(() => { const couldSync = useMemo(() => {
return syncStore.coundSync(); return syncStore.cloudSync();
}, [syncStore]); }, [syncStore]);
const [showSyncConfigModal, setShowSyncConfigModal] = useState(false); const [showSyncConfigModal, setShowSyncConfigModal] = useState(false);
@ -1081,8 +1081,8 @@ export function Settings() {
></input> ></input>
</ListItem> </ListItem>
<ListItem <ListItem
title={Locale.Settings.Access.Azure.ApiKey.Title} title={Locale.Settings.Access.Google.ApiKey.Title}
subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle} subTitle={Locale.Settings.Access.Google.ApiKey.SubTitle}
> >
<PasswordInput <PasswordInput
value={accessStore.googleApiKey} value={accessStore.googleApiKey}
@ -1099,9 +1099,9 @@ export function Settings() {
/> />
</ListItem> </ListItem>
<ListItem <ListItem
title={Locale.Settings.Access.Google.ApiVerion.Title} title={Locale.Settings.Access.Google.ApiVersion.Title}
subTitle={ subTitle={
Locale.Settings.Access.Google.ApiVerion.SubTitle Locale.Settings.Access.Google.ApiVersion.SubTitle
} }
> >
<input <input

View File

@ -30,6 +30,9 @@ declare global {
// google only // google only
GOOGLE_API_KEY?: string; GOOGLE_API_KEY?: string;
GOOGLE_URL?: string; GOOGLE_URL?: string;
// google tag manager
GTM_ID?: string;
} }
} }
} }

View File

@ -8,8 +8,7 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`; export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
export const RUNTIME_CONFIG_DOM = "danger-runtime-config"; export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
export const DEFAULT_CORS_HOST = "https://a.nextweb.fun"; export const DEFAULT_API_HOST = "https://api.nextchat.dev";
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
export const OPENAI_BASE_URL = "https://api.openai.com"; export const OPENAI_BASE_URL = "https://api.openai.com";
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/"; export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
@ -89,13 +88,14 @@ export const Azure = {
export const Google = { export const Google = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/", ExampleEndpoint: "https://generativelanguage.googleapis.com/",
ChatPath: "v1beta/models/gemini-pro:generateContent", ChatPath: "v1beta/models/gemini-pro:generateContent",
VisionChatPath: "v1beta/models/gemini-pro-vision:generateContent",
// /api/openai/v1/chat/completions // /api/openai/v1/chat/completions
}; };
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
export const DEFAULT_SYSTEM_TEMPLATE = ` export const DEFAULT_SYSTEM_TEMPLATE = `
You are ChatGPT, a large language model trained by OpenAI. You are ChatGPT, a large language model trained by {{ServiceProvider}}.
Knowledge cutoff: {{cutoff}} Knowledge cutoff: {{cutoff}}
Current model: {{model}} Current model: {{model}}
Current time: {{time}} Current time: {{time}}
@ -104,13 +104,17 @@ Latex block: $$e=mc^2$$
`; `;
export const SUMMARIZE_MODEL = "gpt-3.5-turbo"; export const SUMMARIZE_MODEL = "gpt-3.5-turbo";
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
export const KnowledgeCutOffDate: Record<string, string> = { export const KnowledgeCutOffDate: Record<string, string> = {
default: "2021-09", default: "2021-09",
"gpt-4-turbo-preview": "2023-04", "gpt-4-turbo-preview": "2023-12",
"gpt-4-1106-preview": "2023-04", "gpt-4-1106-preview": "2023-04",
"gpt-4-0125-preview": "2023-04", "gpt-4-0125-preview": "2023-12",
"gpt-4-vision-preview": "2023-04", "gpt-4-vision-preview": "2023-04",
// After improvements,
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
"gemini-pro": "2023-12",
}; };
export const DEFAULT_MODELS = [ export const DEFAULT_MODELS = [
@ -213,6 +217,15 @@ export const DEFAULT_MODELS = [
providerType: "openai", providerType: "openai",
}, },
}, },
{
name: "gpt-3.5-turbo-0125",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{ {
name: "gpt-3.5-turbo-0301", name: "gpt-3.5-turbo-0301",
available: true, available: true,
@ -267,6 +280,15 @@ export const DEFAULT_MODELS = [
providerType: "google", providerType: "google",
}, },
}, },
{
name: "gemini-pro-vision",
available: true,
provider: {
id: "google",
providerName: "Google",
providerType: "google",
},
},
] as const; ] as const;
export const CHAT_PAGE_SIZE = 15; export const CHAT_PAGE_SIZE = 15;

1
app/icons/image.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" fill="none" height="16" width="16" version="1.1" xml:space="preserve" style=""><rect id="backgroundrect" width="100%" height="100%" x="0" y="0" fill="none" stroke="none"/><g class="currentLayer" style=""><title>Layer 1</title><g id="svg_1" class="" fill="#333" fill-opacity="1"><polygon points="2.4690866470336914,2.4690725803375244 4.447190761566162,2.4690725803375244 4.447190761566162,1.6882386207580566 1.6882381439208984,1.6882386207580566 1.6882381439208984,4.44719123840332 2.4690866470336914,4.44719123840332 " id="svg_2" fill="#333" fill-opacity="1"/><polygon points="11.552804470062256,1.6882386207580566 11.552804470062256,2.4690725803375244 13.530910968780518,2.4690725803375244 13.530910968780518,4.44719123840332 14.311760425567627,4.44719123840332 14.311760425567627,1.6882386207580566 " id="svg_3" fill="#333" fill-opacity="1"/><polygon points="13.530910968780518,13.530919075012207 11.552804470062256,13.530919075012207 11.552804470062256,14.311760902404785 14.311760425567627,14.311760902404785 14.311760425567627,11.552801132202148 13.530910968780518,11.552801132202148 " id="svg_4" fill="#333" fill-opacity="1"/><polygon points="2.4690866470336914,11.552801132202148 1.6882381439208984,11.552801132202148 1.6882381439208984,14.311760902404785 4.447190761566162,14.311760902404785 4.447190761566162,13.530919075012207 2.4690866470336914,13.530919075012207 " id="svg_5" fill="#333" fill-opacity="1"/><path d="M8.830417847409231,6.243117030680995 c0.68169614081525,0 1.2363241834494423,-0.5546280426341942 1.2363241834494423,-1.2363241834494423 S9.51214001610201,3.770468663782117 8.830417847409231,3.770468663782117 s-1.2363241834494423,0.5546280426341942 -1.2363241834494423,1.2363241834494423 S8.14872170659398,6.243117030680995 8.830417847409231,6.243117030680995 z" id="svg_6" fill="#333" fill-opacity="1"/><polygon points="3.7704806327819824,12.229532241821289 12.229516506195068,12.229532241821289 12.229516506195068,9.709510803222656 10.70320463180542,8.099010467529297 8.852166652679443,9.175727844238281 6.275332450866699,7.334256172180176 3.7704806327819824,9.977211952209473 " id="svg_7" fill="#333" fill-opacity="1"/></g></g></svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

1
app/icons/loading.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="#fff" style=""><rect id="backgroundrect" width="100%" height="100%" x="0" y="0" fill="none" stroke="none" style="" class="" /><g class="currentLayer" style=""><title>Layer 1</title><circle cx="4" cy="8" r="1.926" fill="#333" id="svg_1" class=""><animate attributeName="r" begin="0s" calcMode="linear" dur="0.8s" from="2" repeatCount="indefinite" to="2" values="2;1.2;2" /><animate attributeName="fill-opacity" begin="0s" calcMode="linear" dur="0.8s" from="1" repeatCount="indefinite" to="1" values="1;.5;1" /></circle><circle cx="8" cy="8" r="1.2736" fill="#333" fill-opacity=".3" id="svg_2" class=""><animate attributeName="r" begin="0s" calcMode="linear" dur="0.8s" from="1.2" repeatCount="indefinite" to="1.2" values="1.2;2;1.2" /><animate attributeName="fill-opacity" begin="0s" calcMode="linear" dur="0.8s" from=".5" repeatCount="indefinite" to=".5" values=".5;1;.5" /></circle><circle cx="12" cy="8" r="1.926" fill="#333" id="svg_3" class=""><animate attributeName="r" begin="0s" calcMode="linear" dur="0.8s" from="2" repeatCount="indefinite" to="2" values="2;1.2;2" /><animate attributeName="fill-opacity" begin="0s" calcMode="linear" dur="0.8s" from="1" repeatCount="indefinite" to="1" values="1;.5;1" /></circle></g></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -63,6 +63,7 @@ const cn = {
Masks: "所有面具", Masks: "所有面具",
Clear: "清除聊天", Clear: "清除聊天",
Settings: "对话设置", Settings: "对话设置",
UploadImage: "上传图片",
}, },
Rename: "重命名对话", Rename: "重命名对话",
Typing: "正在输入…", Typing: "正在输入…",
@ -314,19 +315,19 @@ const cn = {
}, },
Google: { Google: {
ApiKey: { ApiKey: {
Title: "接口密钥", Title: "API 密钥",
SubTitle: "使用自定义 Google AI Studio API Key 绕过密码访问限制", SubTitle: "从 Google AI 获取您的 API 密钥",
Placeholder: "Google AI Studio API Key", Placeholder: "输入您的 Google AI Studio API 密钥",
}, },
Endpoint: { Endpoint: {
Title: "接口地址", Title: "终端地址",
SubTitle: "不包含请求路径,样例:", SubTitle: "例:",
}, },
ApiVerion: { ApiVersion: {
Title: "接口版本 (gemini-pro api version)", Title: "API 版本(仅适用于 gemini-pro",
SubTitle: "选择指定的部分版本", SubTitle: "选择一个特定的 API 版本",
}, },
}, },
CustomModel: { CustomModel: {

View File

@ -65,6 +65,7 @@ const en: LocaleType = {
Masks: "Masks", Masks: "Masks",
Clear: "Clear Context", Clear: "Clear Context",
Settings: "Settings", Settings: "Settings",
UploadImage: "Upload Images",
}, },
Rename: "Rename Chat", Rename: "Rename Chat",
Typing: "Typing…", Typing: "Typing…",
@ -322,9 +323,8 @@ const en: LocaleType = {
Google: { Google: {
ApiKey: { ApiKey: {
Title: "API Key", Title: "API Key",
SubTitle: SubTitle: "Obtain your API Key from Google AI",
"Bypass password access restrictions using a custom Google AI Studio API Key", Placeholder: "Enter your Google AI Studio API Key",
Placeholder: "Google AI Studio API Key",
}, },
Endpoint: { Endpoint: {
@ -332,9 +332,9 @@ const en: LocaleType = {
SubTitle: "Example:", SubTitle: "Example:",
}, },
ApiVerion: { ApiVersion: {
Title: "API Version (gemini-pro api version)", Title: "API Version (specific to gemini-pro)",
SubTitle: "Select a specific part version", SubTitle: "Select a specific API version",
}, },
}, },
}, },

View File

@ -334,7 +334,7 @@ const sk: PartialLocaleType = {
SubTitle: "Príklad:", SubTitle: "Príklad:",
}, },
ApiVerion: { ApiVersion: {
Title: "Verzia API (gemini-pro verzia API)", Title: "Verzia API (gemini-pro verzia API)",
SubTitle: "Vyberte špecifickú verziu časti", SubTitle: "Vyberte špecifickú verziu časti",
}, },

View File

@ -1,16 +1,36 @@
import { getClientConfig } from "../config/client";
import { SubmitKey } from "../store/config"; import { SubmitKey } from "../store/config";
import type { PartialLocaleType } from "./index";
const tw: PartialLocaleType = { const isApp = !!getClientConfig()?.isApp;
const tw = {
WIP: "該功能仍在開發中……", WIP: "該功能仍在開發中……",
Error: { Error: {
Unauthorized: "目前您的狀態是未授權,請前往[設定頁面](/#/auth)輸入授權碼。", Unauthorized: isApp
? "檢測到無效 API Key請前往[設定](/#/settings)頁檢查 API Key 是否設定正確。"
: "訪問密碼不正確或為空,請前往[登入](/#/auth)頁輸入正確的訪問密碼,或者在[設定](/#/settings)頁填入你自己的 OpenAI API Key。",
},
Auth: {
Title: "需要密碼",
Tips: "管理員開啟了密碼驗證,請在下方填入訪問碼",
SubTips: "或者輸入你的 OpenAI 或 Google API 密鑰",
Input: "在此處填寫訪問碼",
Confirm: "確認",
Later: "稍候再說",
}, },
ChatItem: { ChatItem: {
ChatItemCount: (count: number) => `${count} 則對話`, ChatItemCount: (count: number) => `${count} 則對話`,
}, },
Chat: { Chat: {
SubTitle: (count: number) => `您已經與 ChatGPT 進行了 ${count} 則對話`, SubTitle: (count: number) => `您已經與 ChatGPT 進行了 ${count} 則對話`,
EditMessage: {
Title: "編輯消息記錄",
Topic: {
Title: "聊天主題",
SubTitle: "更改當前聊天主題",
},
},
Actions: { Actions: {
ChatList: "檢視訊息列表", ChatList: "檢視訊息列表",
CompressedHistory: "檢視壓縮後的歷史 Prompt", CompressedHistory: "檢視壓縮後的歷史 Prompt",
@ -18,7 +38,33 @@ const tw: PartialLocaleType = {
Copy: "複製", Copy: "複製",
Stop: "停止", Stop: "停止",
Retry: "重試", Retry: "重試",
Pin: "固定",
PinToastContent: "已將 1 條對話固定至預設提示詞",
PinToastAction: "查看",
Delete: "刪除", Delete: "刪除",
Edit: "編輯",
},
Commands: {
new: "新建聊天",
newm: "從面具新建聊天",
next: "下一個聊天",
prev: "上一個聊天",
clear: "清除上下文",
del: "刪除聊天",
},
InputActions: {
Stop: "停止回應",
ToBottom: "移至最新",
Theme: {
auto: "自動主題",
light: "亮色模式",
dark: "深色模式",
},
Prompt: "快捷指令",
Masks: "所有面具",
Clear: "清除聊天",
Settings: "對話設定",
UploadImage: "上傳圖片",
}, },
Rename: "重新命名對話", Rename: "重新命名對話",
Typing: "正在輸入…", Typing: "正在輸入…",
@ -34,13 +80,37 @@ const tw: PartialLocaleType = {
Reset: "重設", Reset: "重設",
SaveAs: "另存新檔", SaveAs: "另存新檔",
}, },
IsContext: "預設提示詞",
}, },
Export: { Export: {
Title: "將聊天記錄匯出為 Markdown", Title: "將聊天記錄匯出為 Markdown",
Copy: "複製全部", Copy: "複製全部",
Download: "下載檔案", Download: "下載檔案",
Share: "分享到 ShareGPT",
MessageFromYou: "來自您的訊息", MessageFromYou: "來自您的訊息",
MessageFromChatGPT: "來自 ChatGPT 的訊息", MessageFromChatGPT: "來自 ChatGPT 的訊息",
Format: {
Title: "導出格式",
SubTitle: "可以導出 Markdown 文本或者 PNG 圖片",
},
IncludeContext: {
Title: "包含面具上下文",
SubTitle: "是否在消息中展示面具上下文",
},
Steps: {
Select: "選取",
Preview: "預覽",
},
Image: {
Toast: "正在生成截圖",
Modal: "長按或右鍵保存圖片",
},
},
Select: {
Search: "查詢消息",
All: "選取全部",
Latest: "最近幾條",
Clear: "清除選中",
}, },
Memory: { Memory: {
Title: "上下文記憶 Prompt", Title: "上下文記憶 Prompt",
@ -60,6 +130,20 @@ const tw: PartialLocaleType = {
Title: "設定", Title: "設定",
SubTitle: "設定選項", SubTitle: "設定選項",
Danger: {
Reset: {
Title: "重置所有設定",
SubTitle: "重置所有設定項回預設值",
Action: "立即重置",
Confirm: "確認重置所有設定?",
},
Clear: {
Title: "清除所有資料",
SubTitle: "清除所有聊天、設定資料",
Action: "立即清除",
Confirm: "確認清除所有聊天、設定資料?",
},
},
Lang: { Lang: {
Name: "Language", // ATTENTION: if you wanna add a new translation, please do not translate this value, leave it as `Language` Name: "Language", // ATTENTION: if you wanna add a new translation, please do not translate this value, leave it as `Language`
All: "所有語言", All: "所有語言",
@ -73,6 +157,11 @@ const tw: PartialLocaleType = {
Title: "匯入系統提示", Title: "匯入系統提示",
SubTitle: "強制在每個請求的訊息列表開頭新增一個模擬 ChatGPT 的系統提示", SubTitle: "強制在每個請求的訊息列表開頭新增一個模擬 ChatGPT 的系統提示",
}, },
InputTemplate: {
Title: "用戶輸入預處理",
SubTitle: "用戶最新的一條消息會填充到此模板",
},
Update: { Update: {
Version: (x: string) => `目前版本:${x}`, Version: (x: string) => `目前版本:${x}`,
IsLatest: "已是最新版本", IsLatest: "已是最新版本",
@ -88,11 +177,62 @@ const tw: PartialLocaleType = {
Title: "預覽氣泡", Title: "預覽氣泡",
SubTitle: "在預覽氣泡中預覽 Markdown 內容", SubTitle: "在預覽氣泡中預覽 Markdown 內容",
}, },
AutoGenerateTitle: {
Title: "自動生成標題",
SubTitle: "根據對話內容生成合適的標題",
},
Sync: {
CloudState: "雲端資料",
NotSyncYet: "還沒有進行過同步",
Success: "同步成功",
Fail: "同步失敗",
Config: {
Modal: {
Title: "設定雲端同步",
Check: "檢查可用性",
},
SyncType: {
Title: "同步類型",
SubTitle: "選擇喜愛的同步服務器",
},
Proxy: {
Title: "啟用代理",
SubTitle: "在瀏覽器中同步時,必須啟用代理以避免跨域限制",
},
ProxyUrl: {
Title: "代理地址",
SubTitle: "僅適用於本項目自帶的跨域代理",
},
WebDav: {
Endpoint: "WebDAV 地址",
UserName: "用戶名",
Password: "密碼",
},
UpStash: {
Endpoint: "UpStash Redis REST Url",
UserName: "備份名稱",
Password: "UpStash Redis REST Token",
},
},
LocalState: "本地資料",
Overview: (overview: any) => {
return `${overview.chat} 次對話,${overview.message} 條消息,${overview.prompt} 條提示詞,${overview.mask} 個面具`;
},
ImportFailed: "導入失敗",
},
Mask: { Mask: {
Splash: { Splash: {
Title: "面具啟動頁面", Title: "面具啟動頁面",
SubTitle: "新增聊天時,呈現面具啟動頁面", SubTitle: "新增聊天時,呈現面具啟動頁面",
}, },
Builtin: {
Title: "隱藏內置面具",
SubTitle: "在所有面具列表中隱藏內置面具",
},
}, },
Prompt: { Prompt: {
Disable: { Disable: {
@ -131,11 +271,81 @@ const tw: PartialLocaleType = {
NoAccess: "輸入 API Key 檢視餘額", NoAccess: "輸入 API Key 檢視餘額",
}, },
Access: {
AccessCode: {
Title: "訪問密碼",
SubTitle: "管理員已開啟加密訪問",
Placeholder: "請輸入訪問密碼",
},
CustomEndpoint: {
Title: "自定義接口 (Endpoint)",
SubTitle: "是否使用自定義 Azure 或 OpenAI 服務",
},
Provider: {
Title: "模型服務商",
SubTitle: "切換不同的服務商",
},
OpenAI: {
ApiKey: {
Title: "API Key",
SubTitle: "使用自定義 OpenAI Key 繞過密碼訪問限制",
Placeholder: "OpenAI API Key",
},
Endpoint: {
Title: "接口(Endpoint) 地址",
SubTitle: "除默認地址外,必須包含 http(s)://",
},
},
Azure: {
ApiKey: {
Title: "接口密鑰",
SubTitle: "使用自定義 Azure Key 繞過密碼訪問限制",
Placeholder: "Azure API Key",
},
Endpoint: {
Title: "接口(Endpoint) 地址",
SubTitle: "樣例:",
},
ApiVerion: {
Title: "接口版本 (azure api version)",
SubTitle: "選擇指定的部分版本",
},
},
Google: {
ApiKey: {
Title: "API 密鑰",
SubTitle: "從 Google AI 獲取您的 API 密鑰",
Placeholder: "輸入您的 Google AI Studio API 密鑰",
},
Endpoint: {
Title: "終端地址",
SubTitle: "示例:",
},
ApiVersion: {
Title: "API 版本(僅適用於 gemini-pro",
SubTitle: "選擇一個特定的 API 版本",
},
},
CustomModel: {
Title: "自定義模型名",
SubTitle: "增加自定義模型可選項,使用英文逗號隔開",
},
},
Model: "模型 (model)", Model: "模型 (model)",
Temperature: { Temperature: {
Title: "隨機性 (temperature)", Title: "隨機性 (temperature)",
SubTitle: "值越大,回應越隨機", SubTitle: "值越大,回應越隨機",
}, },
TopP: {
Title: "核采樣 (top_p)",
SubTitle: "與隨機性類似,但不要和隨機性一起更改",
},
MaxTokens: { MaxTokens: {
Title: "單次回應限制 (max_tokens)", Title: "單次回應限制 (max_tokens)",
SubTitle: "單次互動所用的最大 Token 數", SubTitle: "單次互動所用的最大 Token 數",
@ -166,10 +376,16 @@ const tw: PartialLocaleType = {
Success: "已複製到剪貼簿中", Success: "已複製到剪貼簿中",
Failed: "複製失敗,請賦予剪貼簿權限", Failed: "複製失敗,請賦予剪貼簿權限",
}, },
Download: {
Success: "內容已下載到您的目錄。",
Failed: "下載失敗。",
},
Context: { Context: {
Toast: (x: any) => `已設定 ${x} 條前置上下文`, Toast: (x: any) => `已設定 ${x} 條前置上下文`,
Edit: "前置上下文和歷史記憶", Edit: "前置上下文和歷史記憶",
Add: "新增一條", Add: "新增一條",
Clear: "上下文已清除",
Revert: "恢復上下文",
}, },
Plugin: { Name: "外掛" }, Plugin: { Name: "外掛" },
FineTuned: { Sysmessage: "你是一個助手" }, FineTuned: { Sysmessage: "你是一個助手" },
@ -198,16 +414,34 @@ const tw: PartialLocaleType = {
Config: { Config: {
Avatar: "角色頭像", Avatar: "角色頭像",
Name: "角色名稱", Name: "角色名稱",
Sync: {
Title: "使用全局設定",
SubTitle: "當前對話是否使用全局模型設定",
Confirm: "當前對話的自定義設定將會被自動覆蓋,確認啟用全局設定?",
},
HideContext: {
Title: "隱藏預設對話",
SubTitle: "隱藏後預設對話不會出現在聊天界面",
},
Share: {
Title: "分享此面具",
SubTitle: "生成此面具的直達鏈接",
Action: "覆制鏈接",
},
}, },
}, },
NewChat: { NewChat: {
Return: "返回", Return: "返回",
Skip: "跳過", Skip: "跳過",
NotShow: "不再呈現",
ConfirmNoShow: "確認停用?停用後可以隨時在設定中重新啟用。",
Title: "挑選一個面具", Title: "挑選一個面具",
SubTitle: "現在開始,與面具背後的靈魂思維碰撞", SubTitle: "現在開始,與面具背後的靈魂思維碰撞",
More: "搜尋更多", More: "搜尋更多",
NotShow: "不再呈現", },
ConfirmNoShow: "確認停用?停用後可以隨時在設定中重新啟用。", URLCommand: {
Code: "檢測到連結中已經包含訪問碼,是否自動填入?",
Settings: "檢測到連結中包含了預設設定,是否自動填入?",
}, },
UI: { UI: {
Confirm: "確認", Confirm: "確認",
@ -215,8 +449,15 @@ const tw: PartialLocaleType = {
Close: "關閉", Close: "關閉",
Create: "新增", Create: "新增",
Edit: "編輯", Edit: "編輯",
Export: "導出",
Import: "導入",
Sync: "同步",
Config: "設定",
}, },
Exporter: { Exporter: {
Description: {
Title: "只有清除上下文之後的消息會被展示",
},
Model: "模型", Model: "模型",
Messages: "訊息", Messages: "訊息",
Topic: "主題", Topic: "主題",
@ -224,4 +465,14 @@ const tw: PartialLocaleType = {
}, },
}; };
type DeepPartial<T> = T extends object
? {
[P in keyof T]?: DeepPartial<T[P]>;
}
: T;
export type LocaleType = typeof tw;
export type PartialLocaleType = DeepPartial<typeof tw>;
export default tw; export default tw;
// Translated by @chunkiuuu, feel free the submit new pr if there are typo/incorrect translations :D

View File

@ -12,7 +12,9 @@ import { ensure } from "../utils/clone";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
const DEFAULT_OPENAI_URL = const DEFAULT_OPENAI_URL =
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI; getClientConfig()?.buildMode === "export"
? DEFAULT_API_HOST + "/api/proxy/openai"
: ApiPath.OpenAI;
const DEFAULT_ACCESS_STATE = { const DEFAULT_ACCESS_STATE = {
accessCode: "", accessCode: "",

View File

@ -1,4 +1,4 @@
import { trimTopic } from "../utils"; import { trimTopic, getMessageTextContent } from "../utils";
import Locale, { getLang } from "../locales"; import Locale, { getLang } from "../locales";
import { showToast } from "../components/ui-lib"; import { showToast } from "../components/ui-lib";
@ -6,13 +6,15 @@ import { ModelConfig, ModelType, useAppConfig } from "./config";
import { createEmptyMask, Mask } from "./mask"; import { createEmptyMask, Mask } from "./mask";
import { import {
DEFAULT_INPUT_TEMPLATE, DEFAULT_INPUT_TEMPLATE,
DEFAULT_MODELS,
DEFAULT_SYSTEM_TEMPLATE, DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate, KnowledgeCutOffDate,
ModelProvider, ModelProvider,
StoreKey, StoreKey,
SUMMARIZE_MODEL, SUMMARIZE_MODEL,
GEMINI_SUMMARIZE_MODEL,
} from "../constant"; } from "../constant";
import { ClientApi, RequestMessage } from "../client/api"; import { ClientApi, RequestMessage, MultimodalContent } from "../client/api";
import { ChatControllerPool } from "../client/controller"; import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format"; import { prettyObject } from "../utils/format";
import { estimateTokenLength } from "../utils/token"; import { estimateTokenLength } from "../utils/token";
@ -83,18 +85,38 @@ function createEmptySession(): ChatSession {
function getSummarizeModel(currentModel: string) { function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize // if it is using gpt-* models, force to use 3.5 to summarize
return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel; if (currentModel.startsWith("gpt")) {
return SUMMARIZE_MODEL;
}
if (currentModel.startsWith("gemini-pro")) {
return GEMINI_SUMMARIZE_MODEL;
}
return currentModel;
} }
function countMessages(msgs: ChatMessage[]) { function countMessages(msgs: ChatMessage[]) {
return msgs.reduce((pre, cur) => pre + estimateTokenLength(cur.content), 0); return msgs.reduce(
(pre, cur) => pre + estimateTokenLength(getMessageTextContent(cur)),
0,
);
} }
function fillTemplateWith(input: string, modelConfig: ModelConfig) { function fillTemplateWith(input: string, modelConfig: ModelConfig) {
let cutoff = const cutoff =
KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default; KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default;
// Find the model in the DEFAULT_MODELS array that matches the modelConfig.model
const modelInfo = DEFAULT_MODELS.find((m) => m.name === modelConfig.model);
var serviceProvider = "OpenAI";
if (modelInfo) {
// TODO: auto detect the providerName from the modelConfig.model
// Directly use the providerName from the modelInfo
serviceProvider = modelInfo.provider.providerName;
}
const vars = { const vars = {
ServiceProvider: serviceProvider,
cutoff, cutoff,
model: modelConfig.model, model: modelConfig.model,
time: new Date().toLocaleString(), time: new Date().toLocaleString(),
@ -111,7 +133,8 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
} }
Object.entries(vars).forEach(([name, value]) => { Object.entries(vars).forEach(([name, value]) => {
output = output.replaceAll(`{{${name}}}`, value); const regex = new RegExp(`{{${name}}}`, "g");
output = output.replace(regex, value.toString()); // Ensure value is a string
}); });
return output; return output;
@ -267,16 +290,36 @@ export const useChatStore = createPersistStore(
get().summarizeSession(); get().summarizeSession();
}, },
async onUserInput(content: string) { async onUserInput(content: string, attachImages?: string[]) {
const session = get().currentSession(); const session = get().currentSession();
const modelConfig = session.mask.modelConfig; const modelConfig = session.mask.modelConfig;
const userContent = fillTemplateWith(content, modelConfig); const userContent = fillTemplateWith(content, modelConfig);
console.log("[User Input] after template: ", userContent); console.log("[User Input] after template: ", userContent);
const userMessage: ChatMessage = createMessage({ let mContent: string | MultimodalContent[] = userContent;
if (attachImages && attachImages.length > 0) {
mContent = [
{
type: "text",
text: userContent,
},
];
mContent = mContent.concat(
attachImages.map((url) => {
return {
type: "image_url",
image_url: {
url: url,
},
};
}),
);
}
let userMessage: ChatMessage = createMessage({
role: "user", role: "user",
content: userContent, content: mContent,
}); });
const botMessage: ChatMessage = createMessage({ const botMessage: ChatMessage = createMessage({
@ -294,7 +337,7 @@ export const useChatStore = createPersistStore(
get().updateCurrentSession((session) => { get().updateCurrentSession((session) => {
const savedUserMessage = { const savedUserMessage = {
...userMessage, ...userMessage,
content, content: mContent,
}; };
session.messages = session.messages.concat([ session.messages = session.messages.concat([
savedUserMessage, savedUserMessage,
@ -303,7 +346,7 @@ export const useChatStore = createPersistStore(
}); });
var api: ClientApi; var api: ClientApi;
if (modelConfig.model === "gemini-pro") { if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);
@ -448,7 +491,7 @@ export const useChatStore = createPersistStore(
) { ) {
const msg = messages[i]; const msg = messages[i];
if (!msg || msg.isError) continue; if (!msg || msg.isError) continue;
tokenCount += estimateTokenLength(msg.content); tokenCount += estimateTokenLength(getMessageTextContent(msg));
reversedRecentMessages.push(msg); reversedRecentMessages.push(msg);
} }
@ -488,7 +531,7 @@ export const useChatStore = createPersistStore(
const modelConfig = session.mask.modelConfig; const modelConfig = session.mask.modelConfig;
var api: ClientApi; var api: ClientApi;
if (modelConfig.model === "gemini-pro") { if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);

View File

@ -91,7 +91,7 @@ export const ModalConfigValidator = {
return limitNumber(x, -2, 2, 0); return limitNumber(x, -2, 2, 0);
}, },
temperature(x: number) { temperature(x: number) {
return limitNumber(x, 0, 1, 1); return limitNumber(x, 0, 2, 1);
}, },
top_p(x: number) { top_p(x: number) {
return limitNumber(x, 0, 1, 1); return limitNumber(x, 0, 1, 1);

View File

@ -48,7 +48,7 @@ const DEFAULT_SYNC_STATE = {
export const useSyncStore = createPersistStore( export const useSyncStore = createPersistStore(
DEFAULT_SYNC_STATE, DEFAULT_SYNC_STATE,
(set, get) => ({ (set, get) => ({
coundSync() { cloudSync() {
const config = get()[get().provider]; const config = get()[get().provider];
return Object.values(config).every((c) => c.toString().length > 0); return Object.values(config).every((c) => c.toString().length > 0);
}, },
@ -60,7 +60,9 @@ export const useSyncStore = createPersistStore(
export() { export() {
const state = getLocalAppState(); const state = getLocalAppState();
const datePart = isApp const datePart = isApp
? `${new Date().toLocaleDateString().replace(/\//g, '_')} ${new Date().toLocaleTimeString().replace(/:/g, '_')}` ? `${new Date().toLocaleDateString().replace(/\//g, "_")} ${new Date()
.toLocaleTimeString()
.replace(/:/g, "_")}`
: new Date().toLocaleString(); : new Date().toLocaleString();
const fileName = `Backup-${datePart}.json`; const fileName = `Backup-${datePart}.json`;

View File

@ -1,12 +1,17 @@
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { showToast } from "./components/ui-lib"; import { showToast } from "./components/ui-lib";
import Locale from "./locales"; import Locale from "./locales";
import { RequestMessage } from "./client/api";
import { DEFAULT_MODELS } from "./constant";
export function trimTopic(topic: string) { export function trimTopic(topic: string) {
// Fix an issue where double quotes still show in the Indonesian language // Fix an issue where double quotes still show in the Indonesian language
// This will remove the specified punctuation from the end of the string // This will remove the specified punctuation from the end of the string
// and also trim quotes from both the start and end if they exist. // and also trim quotes from both the start and end if they exist.
return topic.replace(/^["“”]+|["“”]+$/g, "").replace(/[,。!?”“"、,.!?]*$/, ""); return topic
// fix for gemini
.replace(/^["“”*]+|["“”*]+$/g, "")
.replace(/[,。!?”“"、,.!?*]*$/, "");
} }
export async function copyToClipboard(text: string) { export async function copyToClipboard(text: string) {
@ -40,8 +45,8 @@ export async function downloadAs(text: string, filename: string) {
defaultPath: `${filename}`, defaultPath: `${filename}`,
filters: [ filters: [
{ {
name: `${filename.split('.').pop()} files`, name: `${filename.split(".").pop()} files`,
extensions: [`${filename.split('.').pop()}`], extensions: [`${filename.split(".").pop()}`],
}, },
{ {
name: "All Files", name: "All Files",
@ -77,8 +82,51 @@ export async function downloadAs(text: string, filename: string) {
element.click(); element.click();
document.body.removeChild(element); document.body.removeChild(element);
}
} }
export function compressImage(file: File, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (readerEvent: any) => {
const image = new Image();
image.onload = () => {
let canvas = document.createElement("canvas");
let ctx = canvas.getContext("2d");
let width = image.width;
let height = image.height;
let quality = 0.9;
let dataUrl;
do {
canvas.width = width;
canvas.height = height;
ctx?.clearRect(0, 0, canvas.width, canvas.height);
ctx?.drawImage(image, 0, 0, width, height);
dataUrl = canvas.toDataURL("image/jpeg", quality);
if (dataUrl.length < maxSize) break;
if (quality > 0.5) {
// Prioritize quality reduction
quality -= 0.1;
} else {
// Then reduce the size
width *= 0.9;
height *= 0.9;
}
} while (dataUrl.length > maxSize);
resolve(dataUrl);
};
image.onerror = reject;
image.src = readerEvent.target.result;
};
reader.onerror = reject;
reader.readAsDataURL(file);
});
} }
export function readFromFile() { export function readFromFile() {
return new Promise<string>((res, rej) => { return new Promise<string>((res, rej) => {
const fileInput = document.createElement("input"); const fileInput = document.createElement("input");
@ -212,8 +260,41 @@ export function getCSSVar(varName: string) {
export function isMacOS(): boolean { export function isMacOS(): boolean {
if (typeof window !== "undefined") { if (typeof window !== "undefined") {
let userAgent = window.navigator.userAgent.toLocaleLowerCase(); let userAgent = window.navigator.userAgent.toLocaleLowerCase();
const macintosh = /iphone|ipad|ipod|macintosh/.test(userAgent) const macintosh = /iphone|ipad|ipod|macintosh/.test(userAgent);
return !!macintosh return !!macintosh;
} }
return false return false;
}
export function getMessageTextContent(message: RequestMessage) {
if (typeof message.content === "string") {
return message.content;
}
for (const c of message.content) {
if (c.type === "text") {
return c.text ?? "";
}
}
return "";
}
export function getMessageImages(message: RequestMessage): string[] {
if (typeof message.content === "string") {
return [];
}
const urls: string[] = [];
for (const c of message.content) {
if (c.type === "image_url") {
urls.push(c.image_url?.url ?? "");
}
}
return urls;
}
export function isVisionModel(model: string) {
return (
// model.startsWith("gpt-4-vision") ||
// model.startsWith("gemini-pro-vision") ||
model.includes("vision")
);
} }

View File

@ -1,8 +1,8 @@
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { ApiPath, DEFAULT_CORS_HOST } from "../constant"; import { ApiPath, DEFAULT_API_HOST } from "../constant";
export function corsPath(path: string) { export function corsPath(path: string) {
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : ""; const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
if (!path.startsWith("/")) { if (!path.startsWith("/")) {
path = "/" + path; path = "/" + path;

View File

@ -64,8 +64,17 @@ if (mode !== "export") {
nextConfig.rewrites = async () => { nextConfig.rewrites = async () => {
const ret = [ const ret = [
// adjust for previous version directly using "/api/proxy/" as proxy base route
{ {
source: "/api/proxy/:path*", source: "/api/proxy/v1/:path*",
destination: "https://api.openai.com/v1/:path*",
},
{
source: "/api/proxy/google/:path*",
destination: "https://generativelanguage.googleapis.com/:path*",
},
{
source: "/api/proxy/openai/:path*",
destination: "https://api.openai.com/:path*", destination: "https://api.openai.com/:path*",
}, },
{ {

View File

@ -1,5 +1,5 @@
{ {
"name": "chatgpt-next-web", "name": "nextchat",
"private": false, "private": false,
"license": "mit", "license": "mit",
"scripts": { "scripts": {

View File

@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y
fi fi
# Clone the repository and install dependencies # Clone the repository and install dependencies
git clone https://github.com/Yidadaa/ChatGPT-Next-Web git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
cd ChatGPT-Next-Web cd ChatGPT-Next-Web
yarn install yarn install

643
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,27 +1,45 @@
[package] [package]
name = "chatgpt-next-web" name = "nextchat"
version = "0.1.0" version = "0.1.0"
description = "A cross platform app for LLM ChatBot." description = "A cross platform app for LLM ChatBot."
authors = ["Yidadaa"] authors = ["Yidadaa"]
license = "mit" license = "mit"
repository = "" repository = ""
default-run = "chatgpt-next-web" default-run = "nextchat"
edition = "2021" edition = "2021"
rust-version = "1.60" rust-version = "1.60"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[build-dependencies] [build-dependencies]
tauri-build = { version = "1.3.0", features = [] } tauri-build = { version = "1.5.1", features = [] }
[dependencies] [dependencies]
serde_json = "1.0" serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.3.0", features = ["notification-all", "fs-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] } tauri = { version = "1.5.4", features = [
"notification-all",
"fs-all",
"clipboard-all",
"dialog-all",
"shell-open",
"updater",
"window-close",
"window-hide",
"window-maximize",
"window-minimize",
"window-set-icon",
"window-set-ignore-cursor-events",
"window-set-resizable",
"window-show",
"window-start-dragging",
"window-unmaximize",
"window-unminimize",
] }
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" } tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
[features] [features]
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled. # this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes. # If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
# DO NOT REMOVE!! # DO NOT REMOVE!!
custom-protocol = [ "tauri/custom-protocol" ] custom-protocol = ["tauri/custom-protocol"]

View File

@ -9,7 +9,7 @@
}, },
"package": { "package": {
"productName": "NextChat", "productName": "NextChat",
"version": "2.10.1" "version": "2.11.2"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {
@ -86,12 +86,13 @@
} }
}, },
"security": { "security": {
"csp": null "csp": null,
"dangerousUseHttpScheme": true
}, },
"updater": { "updater": {
"active": true, "active": true,
"endpoints": [ "endpoints": [
"https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json" "https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
], ],
"dialog": false, "dialog": false,
"windows": { "windows": {