mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-08-09 01:08:41 +08:00
feat: close #2754 add import/export to file
This commit is contained in:
@@ -3,6 +3,7 @@ import { persist } from "zustand/middleware";
|
||||
import { LLMModel } from "../client/api";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { DEFAULT_INPUT_TEMPLATE, DEFAULT_MODELS, StoreKey } from "../constant";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
|
||||
export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
|
||||
|
||||
@@ -21,6 +22,8 @@ export enum Theme {
|
||||
}
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
lastUpdate: Date.now(), // timestamp, to merge state
|
||||
|
||||
submitKey: SubmitKey.CtrlEnter as SubmitKey,
|
||||
avatar: "1f603",
|
||||
fontSize: 14,
|
||||
@@ -55,13 +58,6 @@ export const DEFAULT_CONFIG = {
|
||||
|
||||
export type ChatConfig = typeof DEFAULT_CONFIG;
|
||||
|
||||
export type ChatConfigStore = ChatConfig & {
|
||||
reset: () => void;
|
||||
update: (updater: (config: ChatConfig) => void) => void;
|
||||
mergeModels: (newModels: LLMModel[]) => void;
|
||||
allModels: () => LLMModel[];
|
||||
};
|
||||
|
||||
export type ModelConfig = ChatConfig["modelConfig"];
|
||||
|
||||
export function limitNumber(
|
||||
@@ -98,85 +94,80 @@ export const ModalConfigValidator = {
|
||||
},
|
||||
};
|
||||
|
||||
export const useAppConfig = create<ChatConfigStore>()(
|
||||
persist(
|
||||
(set, get) => ({
|
||||
...DEFAULT_CONFIG,
|
||||
|
||||
reset() {
|
||||
set(() => ({ ...DEFAULT_CONFIG }));
|
||||
},
|
||||
|
||||
update(updater) {
|
||||
const config = { ...get() };
|
||||
updater(config);
|
||||
set(() => config);
|
||||
},
|
||||
|
||||
mergeModels(newModels) {
|
||||
if (!newModels || newModels.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldModels = get().models;
|
||||
const modelMap: Record<string, LLMModel> = {};
|
||||
|
||||
for (const model of oldModels) {
|
||||
model.available = false;
|
||||
modelMap[model.name] = model;
|
||||
}
|
||||
|
||||
for (const model of newModels) {
|
||||
model.available = true;
|
||||
modelMap[model.name] = model;
|
||||
}
|
||||
|
||||
set(() => ({
|
||||
models: Object.values(modelMap),
|
||||
}));
|
||||
},
|
||||
|
||||
allModels() {
|
||||
const customModels = get()
|
||||
.customModels.split(",")
|
||||
.filter((v) => !!v && v.length > 0)
|
||||
.map((m) => ({ name: m, available: true }));
|
||||
|
||||
const models = get().models.concat(customModels);
|
||||
return models;
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Config,
|
||||
version: 3.7,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as ChatConfig;
|
||||
|
||||
if (version < 3.4) {
|
||||
state.modelConfig.sendMemory = true;
|
||||
state.modelConfig.historyMessageCount = 4;
|
||||
state.modelConfig.compressMessageLengthThreshold = 1000;
|
||||
state.modelConfig.frequency_penalty = 0;
|
||||
state.modelConfig.top_p = 1;
|
||||
state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
|
||||
state.dontShowMaskSplashScreen = false;
|
||||
state.hideBuiltinMasks = false;
|
||||
}
|
||||
|
||||
if (version < 3.5) {
|
||||
state.customModels = "claude,claude-100k";
|
||||
}
|
||||
|
||||
if (version < 3.6) {
|
||||
state.modelConfig.enableInjectSystemPrompts = true;
|
||||
}
|
||||
|
||||
if (version < 3.7) {
|
||||
state.enableAutoGenerateTitle = true;
|
||||
}
|
||||
|
||||
return state as any;
|
||||
},
|
||||
export const useAppConfig = createPersistStore(
|
||||
{ ...DEFAULT_CONFIG },
|
||||
(set, get) => ({
|
||||
reset() {
|
||||
set(() => ({ ...DEFAULT_CONFIG }));
|
||||
},
|
||||
),
|
||||
|
||||
mergeModels(newModels: LLMModel[]) {
|
||||
if (!newModels || newModels.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldModels = get().models;
|
||||
const modelMap: Record<string, LLMModel> = {};
|
||||
|
||||
for (const model of oldModels) {
|
||||
model.available = false;
|
||||
modelMap[model.name] = model;
|
||||
}
|
||||
|
||||
for (const model of newModels) {
|
||||
model.available = true;
|
||||
modelMap[model.name] = model;
|
||||
}
|
||||
|
||||
set(() => ({
|
||||
models: Object.values(modelMap),
|
||||
}));
|
||||
},
|
||||
|
||||
allModels() {
|
||||
const customModels = get()
|
||||
.customModels.split(",")
|
||||
.filter((v) => !!v && v.length > 0)
|
||||
.map((m) => ({ name: m, available: true }));
|
||||
|
||||
const models = get().models.concat(customModels);
|
||||
return models;
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Config,
|
||||
version: 3.8,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as ChatConfig;
|
||||
|
||||
if (version < 3.4) {
|
||||
state.modelConfig.sendMemory = true;
|
||||
state.modelConfig.historyMessageCount = 4;
|
||||
state.modelConfig.compressMessageLengthThreshold = 1000;
|
||||
state.modelConfig.frequency_penalty = 0;
|
||||
state.modelConfig.top_p = 1;
|
||||
state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
|
||||
state.dontShowMaskSplashScreen = false;
|
||||
state.hideBuiltinMasks = false;
|
||||
}
|
||||
|
||||
if (version < 3.5) {
|
||||
state.customModels = "claude,claude-100k";
|
||||
}
|
||||
|
||||
if (version < 3.6) {
|
||||
state.modelConfig.enableInjectSystemPrompts = true;
|
||||
}
|
||||
|
||||
if (version < 3.7) {
|
||||
state.enableAutoGenerateTitle = true;
|
||||
}
|
||||
|
||||
if (version < 3.8) {
|
||||
state.lastUpdate = Date.now();
|
||||
}
|
||||
|
||||
return state as any;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
Reference in New Issue
Block a user