mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-09-07 16:47:03 +08:00
Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
bfefb99192 | ||
|
47ae874e4d | ||
|
d74f636558 | ||
|
b8f0822214 | ||
|
0869455612 | ||
|
bca74241e6 | ||
|
9d5801fb5f | ||
|
462a88ae82 |
1
.github/workflows/deploy_preview.yml
vendored
1
.github/workflows/deploy_preview.yml
vendored
@@ -5,6 +5,7 @@ on:
|
|||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
- synchronize
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
|
||||||
env:
|
env:
|
||||||
VERCEL_TEAM: ${{ secrets.VERCEL_TEAM }}
|
VERCEL_TEAM: ${{ secrets.VERCEL_TEAM }}
|
||||||
|
@@ -144,10 +144,10 @@ export function getHeaders() {
|
|||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"x-requested-with": "XMLHttpRequest",
|
"x-requested-with": "XMLHttpRequest",
|
||||||
"Accept": "application/json",
|
Accept: "application/json",
|
||||||
};
|
};
|
||||||
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
|
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
|
||||||
const isGoogle = modelConfig.model === "gemini-pro";
|
const isGoogle = modelConfig.model.startsWith("gemini");
|
||||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||||
const authHeader = isAzure ? "api-key" : "Authorization";
|
const authHeader = isAzure ? "api-key" : "Authorization";
|
||||||
const apiKey = isGoogle
|
const apiKey = isGoogle
|
||||||
@@ -155,20 +155,23 @@ export function getHeaders() {
|
|||||||
: isAzure
|
: isAzure
|
||||||
? accessStore.azureApiKey
|
? accessStore.azureApiKey
|
||||||
: accessStore.openaiApiKey;
|
: accessStore.openaiApiKey;
|
||||||
|
const clientConfig = getClientConfig();
|
||||||
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
|
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
|
||||||
const validString = (x: string) => x && x.length > 0;
|
const validString = (x: string) => x && x.length > 0;
|
||||||
|
|
||||||
// use user's api key first
|
// when using google api in app, not set auth header
|
||||||
if (validString(apiKey)) {
|
if (!(isGoogle && clientConfig?.isApp)) {
|
||||||
headers[authHeader] = makeBearer(apiKey);
|
// use user's api key first
|
||||||
} else if (
|
if (validString(apiKey)) {
|
||||||
accessStore.enabledAccessControl() &&
|
headers[authHeader] = makeBearer(apiKey);
|
||||||
validString(accessStore.accessCode)
|
} else if (
|
||||||
) {
|
accessStore.enabledAccessControl() &&
|
||||||
headers[authHeader] = makeBearer(
|
validString(accessStore.accessCode)
|
||||||
ACCESS_CODE_PREFIX + accessStore.accessCode,
|
) {
|
||||||
);
|
headers[authHeader] = makeBearer(
|
||||||
|
ACCESS_CODE_PREFIX + accessStore.accessCode,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return headers;
|
return headers;
|
||||||
|
@@ -1,15 +1,8 @@
|
|||||||
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||||
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
|
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
|
||||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||||
import {
|
|
||||||
EventStreamContentType,
|
|
||||||
fetchEventSource,
|
|
||||||
} from "@fortaine/fetch-event-source";
|
|
||||||
import { prettyObject } from "@/app/utils/format";
|
|
||||||
import { getClientConfig } from "@/app/config/client";
|
import { getClientConfig } from "@/app/config/client";
|
||||||
import Locale from "../../locales";
|
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||||
import { getServerSideConfig } from "@/app/config/server";
|
|
||||||
import de from "@/app/locales/de";
|
|
||||||
export class GeminiProApi implements LLMApi {
|
export class GeminiProApi implements LLMApi {
|
||||||
extractMessage(res: any) {
|
extractMessage(res: any) {
|
||||||
console.log("[Response] gemini-pro response: ", res);
|
console.log("[Response] gemini-pro response: ", res);
|
||||||
@@ -21,7 +14,7 @@ export class GeminiProApi implements LLMApi {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
async chat(options: ChatOptions): Promise<void> {
|
async chat(options: ChatOptions): Promise<void> {
|
||||||
const apiClient = this;
|
// const apiClient = this;
|
||||||
const messages = options.messages.map((v) => ({
|
const messages = options.messages.map((v) => ({
|
||||||
role: v.role.replace("assistant", "model").replace("system", "user"),
|
role: v.role.replace("assistant", "model").replace("system", "user"),
|
||||||
parts: [{ text: v.content }],
|
parts: [{ text: v.content }],
|
||||||
@@ -79,13 +72,27 @@ export class GeminiProApi implements LLMApi {
|
|||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log("[Request] google payload: ", requestPayload);
|
const accessStore = useAccessStore.getState();
|
||||||
|
let baseUrl = accessStore.googleUrl;
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
|
||||||
const shouldStream = !!options.config.stream;
|
let shouldStream = !!options.config.stream;
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
options.onController?.(controller);
|
options.onController?.(controller);
|
||||||
try {
|
try {
|
||||||
const chatPath = this.path(Google.ChatPath);
|
let chatPath = this.path(Google.ChatPath);
|
||||||
|
|
||||||
|
// let baseUrl = accessStore.googleUrl;
|
||||||
|
|
||||||
|
if (!baseUrl) {
|
||||||
|
baseUrl = isApp
|
||||||
|
? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath
|
||||||
|
: chatPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isApp) {
|
||||||
|
baseUrl += `?key=${accessStore.googleApiKey}`;
|
||||||
|
}
|
||||||
const chatPayload = {
|
const chatPayload = {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: JSON.stringify(requestPayload),
|
body: JSON.stringify(requestPayload),
|
||||||
@@ -101,10 +108,6 @@ export class GeminiProApi implements LLMApi {
|
|||||||
if (shouldStream) {
|
if (shouldStream) {
|
||||||
let responseText = "";
|
let responseText = "";
|
||||||
let remainText = "";
|
let remainText = "";
|
||||||
let streamChatPath = chatPath.replace(
|
|
||||||
"generateContent",
|
|
||||||
"streamGenerateContent",
|
|
||||||
);
|
|
||||||
let finished = false;
|
let finished = false;
|
||||||
|
|
||||||
let existingTexts: string[] = [];
|
let existingTexts: string[] = [];
|
||||||
@@ -134,7 +137,11 @@ export class GeminiProApi implements LLMApi {
|
|||||||
|
|
||||||
// start animaion
|
// start animaion
|
||||||
animateResponseText();
|
animateResponseText();
|
||||||
fetch(streamChatPath, chatPayload)
|
|
||||||
|
fetch(
|
||||||
|
baseUrl.replace("generateContent", "streamGenerateContent"),
|
||||||
|
chatPayload,
|
||||||
|
)
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
const reader = response?.body?.getReader();
|
const reader = response?.body?.getReader();
|
||||||
const decoder = new TextDecoder();
|
const decoder = new TextDecoder();
|
||||||
@@ -185,11 +192,9 @@ export class GeminiProApi implements LLMApi {
|
|||||||
console.error("Error:", error);
|
console.error("Error:", error);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
const res = await fetch(chatPath, chatPayload);
|
const res = await fetch(baseUrl, chatPayload);
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
|
|
||||||
if (resJson?.promptFeedback?.blockReason) {
|
if (resJson?.promptFeedback?.blockReason) {
|
||||||
// being blocked
|
// being blocked
|
||||||
options.onError?.(
|
options.onError?.(
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
"use client";
|
||||||
import {
|
import {
|
||||||
ApiPath,
|
ApiPath,
|
||||||
DEFAULT_API_HOST,
|
DEFAULT_API_HOST,
|
||||||
@@ -45,7 +46,9 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
|
|
||||||
if (baseUrl.length === 0) {
|
if (baseUrl.length === 0) {
|
||||||
const isApp = !!getClientConfig()?.isApp;
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
|
baseUrl = isApp
|
||||||
|
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
|
||||||
|
: ApiPath.OpenAI;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (baseUrl.endsWith("/")) {
|
if (baseUrl.endsWith("/")) {
|
||||||
@@ -59,6 +62,8 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
path = makeAzurePath(path, accessStore.azureApiVersion);
|
path = makeAzurePath(path, accessStore.azureApiVersion);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||||
|
|
||||||
return [baseUrl, path].join("/");
|
return [baseUrl, path].join("/");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -307,7 +307,7 @@ export function PreviewActions(props: {
|
|||||||
setShouldExport(false);
|
setShouldExport(false);
|
||||||
|
|
||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (config.modelConfig.model === "gemini-pro") {
|
if (config.modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
|
@@ -171,7 +171,7 @@ export function useLoadData() {
|
|||||||
const config = useAppConfig();
|
const config = useAppConfig();
|
||||||
|
|
||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (config.modelConfig.model === "gemini-pro") {
|
if (config.modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
|
@@ -92,7 +92,7 @@ export function ModelConfigList(props: {
|
|||||||
></input>
|
></input>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
|
|
||||||
{props.modelConfig.model === "gemini-pro" ? null : (
|
{props.modelConfig.model.startsWith("gemini") ? null : (
|
||||||
<>
|
<>
|
||||||
<ListItem
|
<ListItem
|
||||||
title={Locale.Settings.PresencePenalty.Title}
|
title={Locale.Settings.PresencePenalty.Title}
|
||||||
|
@@ -8,8 +8,7 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
|
|||||||
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
|
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
|
||||||
export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
||||||
|
|
||||||
export const DEFAULT_CORS_HOST = "https://a.nextweb.fun";
|
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
|
||||||
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
|
|
||||||
export const OPENAI_BASE_URL = "https://api.openai.com";
|
export const OPENAI_BASE_URL = "https://api.openai.com";
|
||||||
|
|
||||||
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
||||||
@@ -111,7 +110,7 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
|||||||
"gpt-4-1106-preview": "2023-04",
|
"gpt-4-1106-preview": "2023-04",
|
||||||
"gpt-4-0125-preview": "2023-04",
|
"gpt-4-0125-preview": "2023-04",
|
||||||
"gpt-4-vision-preview": "2023-04",
|
"gpt-4-vision-preview": "2023-04",
|
||||||
// After improvements,
|
// After improvements,
|
||||||
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
|
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
|
||||||
"gemini-pro": "2023-12",
|
"gemini-pro": "2023-12",
|
||||||
};
|
};
|
||||||
|
@@ -12,7 +12,9 @@ import { ensure } from "../utils/clone";
|
|||||||
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
||||||
|
|
||||||
const DEFAULT_OPENAI_URL =
|
const DEFAULT_OPENAI_URL =
|
||||||
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI;
|
getClientConfig()?.buildMode === "export"
|
||||||
|
? DEFAULT_API_HOST + "/api/proxy/openai"
|
||||||
|
: ApiPath.OpenAI;
|
||||||
|
|
||||||
const DEFAULT_ACCESS_STATE = {
|
const DEFAULT_ACCESS_STATE = {
|
||||||
accessCode: "",
|
accessCode: "",
|
||||||
|
@@ -92,14 +92,18 @@ function countMessages(msgs: ChatMessage[]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function fillTemplateWith(input: string, modelConfig: ModelConfig) {
|
function fillTemplateWith(input: string, modelConfig: ModelConfig) {
|
||||||
const cutoff = KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default;
|
const cutoff =
|
||||||
|
KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default;
|
||||||
// Find the model in the DEFAULT_MODELS array that matches the modelConfig.model
|
// Find the model in the DEFAULT_MODELS array that matches the modelConfig.model
|
||||||
const modelInfo = DEFAULT_MODELS.find(m => m.name === modelConfig.model);
|
const modelInfo = DEFAULT_MODELS.find((m) => m.name === modelConfig.model);
|
||||||
if (!modelInfo) {
|
|
||||||
throw new Error(`Model ${modelConfig.model} not found in DEFAULT_MODELS array.`);
|
var serviceProvider = "OpenAI";
|
||||||
|
if (modelInfo) {
|
||||||
|
// TODO: auto detect the providerName from the modelConfig.model
|
||||||
|
|
||||||
|
// Directly use the providerName from the modelInfo
|
||||||
|
serviceProvider = modelInfo.provider.providerName;
|
||||||
}
|
}
|
||||||
// Directly use the providerName from the modelInfo
|
|
||||||
const serviceProvider = modelInfo.provider.providerName;
|
|
||||||
|
|
||||||
const vars = {
|
const vars = {
|
||||||
ServiceProvider: serviceProvider,
|
ServiceProvider: serviceProvider,
|
||||||
@@ -119,7 +123,7 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object.entries(vars).forEach(([name, value]) => {
|
Object.entries(vars).forEach(([name, value]) => {
|
||||||
const regex = new RegExp(`{{${name}}}`, 'g');
|
const regex = new RegExp(`{{${name}}}`, "g");
|
||||||
output = output.replace(regex, value.toString()); // Ensure value is a string
|
output = output.replace(regex, value.toString()); // Ensure value is a string
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -312,7 +316,7 @@ export const useChatStore = createPersistStore(
|
|||||||
});
|
});
|
||||||
|
|
||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (modelConfig.model === "gemini-pro") {
|
if (modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
@@ -497,7 +501,7 @@ export const useChatStore = createPersistStore(
|
|||||||
const modelConfig = session.mask.modelConfig;
|
const modelConfig = session.mask.modelConfig;
|
||||||
|
|
||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (modelConfig.model === "gemini-pro") {
|
if (modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { ApiPath, DEFAULT_CORS_HOST } from "../constant";
|
import { ApiPath, DEFAULT_API_HOST } from "../constant";
|
||||||
|
|
||||||
export function corsPath(path: string) {
|
export function corsPath(path: string) {
|
||||||
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : "";
|
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
|
||||||
|
|
||||||
if (!path.startsWith("/")) {
|
if (!path.startsWith("/")) {
|
||||||
path = "/" + path;
|
path = "/" + path;
|
||||||
|
@@ -64,8 +64,17 @@ if (mode !== "export") {
|
|||||||
|
|
||||||
nextConfig.rewrites = async () => {
|
nextConfig.rewrites = async () => {
|
||||||
const ret = [
|
const ret = [
|
||||||
|
// adjust for previous verison directly using "/api/proxy/" as proxy base route
|
||||||
{
|
{
|
||||||
source: "/api/proxy/:path*",
|
source: "/api/proxy/v1/:path*",
|
||||||
|
destination: "https://api.openai.com/v1/:path*",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
source: "/api/proxy/google/:path*",
|
||||||
|
destination: "https://generativelanguage.googleapis.com/:path*",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
source: "/api/proxy/openai/:path*",
|
||||||
destination: "https://api.openai.com/:path*",
|
destination: "https://api.openai.com/:path*",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "chatgpt-next-web",
|
"name": "nextchat",
|
||||||
"private": false,
|
"private": false,
|
||||||
"license": "mit",
|
"license": "mit",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -64,4 +64,4 @@
|
|||||||
"resolutions": {
|
"resolutions": {
|
||||||
"lint-staged/yaml": "^2.2.2"
|
"lint-staged/yaml": "^2.2.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Clone the repository and install dependencies
|
# Clone the repository and install dependencies
|
||||||
git clone https://github.com/Yidadaa/ChatGPT-Next-Web
|
git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
|
||||||
cd ChatGPT-Next-Web
|
cd ChatGPT-Next-Web
|
||||||
yarn install
|
yarn install
|
||||||
|
|
||||||
|
643
src-tauri/Cargo.lock
generated
643
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,27 +1,45 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "chatgpt-next-web"
|
name = "nextchat"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = "A cross platform app for LLM ChatBot."
|
description = "A cross platform app for LLM ChatBot."
|
||||||
authors = ["Yidadaa"]
|
authors = ["Yidadaa"]
|
||||||
license = "mit"
|
license = "mit"
|
||||||
repository = ""
|
repository = ""
|
||||||
default-run = "chatgpt-next-web"
|
default-run = "nextchat"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.60"
|
rust-version = "1.60"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
tauri-build = { version = "1.3.0", features = [] }
|
tauri-build = { version = "1.5.1", features = [] }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
tauri = { version = "1.3.0", features = ["notification-all", "fs-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] }
|
tauri = { version = "1.5.4", features = [
|
||||||
|
"notification-all",
|
||||||
|
"fs-all",
|
||||||
|
"clipboard-all",
|
||||||
|
"dialog-all",
|
||||||
|
"shell-open",
|
||||||
|
"updater",
|
||||||
|
"window-close",
|
||||||
|
"window-hide",
|
||||||
|
"window-maximize",
|
||||||
|
"window-minimize",
|
||||||
|
"window-set-icon",
|
||||||
|
"window-set-ignore-cursor-events",
|
||||||
|
"window-set-resizable",
|
||||||
|
"window-show",
|
||||||
|
"window-start-dragging",
|
||||||
|
"window-unmaximize",
|
||||||
|
"window-unminimize",
|
||||||
|
] }
|
||||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
|
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
|
||||||
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
|
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
|
||||||
# DO NOT REMOVE!!
|
# DO NOT REMOVE!!
|
||||||
custom-protocol = [ "tauri/custom-protocol" ]
|
custom-protocol = ["tauri/custom-protocol"]
|
||||||
|
@@ -9,7 +9,7 @@
|
|||||||
},
|
},
|
||||||
"package": {
|
"package": {
|
||||||
"productName": "NextChat",
|
"productName": "NextChat",
|
||||||
"version": "2.10.2"
|
"version": "2.10.3"
|
||||||
},
|
},
|
||||||
"tauri": {
|
"tauri": {
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
@@ -86,12 +86,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"security": {
|
"security": {
|
||||||
"csp": null
|
"csp": null,
|
||||||
|
"dangerousUseHttpScheme": true
|
||||||
},
|
},
|
||||||
"updater": {
|
"updater": {
|
||||||
"active": true,
|
"active": true,
|
||||||
"endpoints": [
|
"endpoints": [
|
||||||
"https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json"
|
"https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
|
||||||
],
|
],
|
||||||
"dialog": false,
|
"dialog": false,
|
||||||
"windows": {
|
"windows": {
|
||||||
|
Reference in New Issue
Block a user