feat: add multi-model support

This commit is contained in:
Yidadaa 2023-09-26 00:19:21 +08:00
parent b90dfb48ee
commit 5610f423d0
62 changed files with 1439 additions and 940 deletions

View File

@ -1,4 +1,4 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai"; import { type OpenAI } from "@/app/client/openai/types";
import { getServerSideConfig } from "@/app/config/server"; import { getServerSideConfig } from "@/app/config/server";
import { OpenaiPath } from "@/app/constant"; import { OpenaiPath } from "@/app/constant";
import { prettyObject } from "@/app/utils/format"; import { prettyObject } from "@/app/utils/format";
@ -6,9 +6,9 @@ import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth"; import { auth } from "../../auth";
import { requestOpenai } from "../../common"; import { requestOpenai } from "../../common";
const ALLOWD_PATH = new Set(Object.values(OpenaiPath)); const ALLOWD_PATH = new Set(Object.values(OpenaiPath) as string[]);
function getModels(remoteModelRes: OpenAIListModelResponse) { function getModels(remoteModelRes: OpenAI.ListModelResponse) {
const config = getServerSideConfig(); const config = getServerSideConfig();
if (config.disableGPT4) { if (config.disableGPT4) {
@ -56,8 +56,8 @@ async function handle(
const response = await requestOpenai(req); const response = await requestOpenai(req);
// list models // list models
if (subpath === OpenaiPath.ListModelPath && response.status === 200) { if (subpath === OpenaiPath.ListModel && response.status === 200) {
const resJson = (await response.json()) as OpenAIListModelResponse; const resJson = await response.json();
const availableModels = getModels(resJson); const availableModels = getModels(resJson);
return NextResponse.json(availableModels, { return NextResponse.json(availableModels, {
status: response.status, status: response.status,

View File

@ -1,151 +0,0 @@
import { getClientConfig } from "../config/client";
import { ACCESS_CODE_PREFIX } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
export type ChatModel = ModelType;
export interface RequestMessage {
role: MessageRole;
content: string;
}
export interface LLMConfig {
model: string;
temperature?: number;
top_p?: number;
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
}
export interface ChatOptions {
messages: RequestMessage[];
config: LLMConfig;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}
export interface LLMUsage {
used: number;
total: number;
}
export interface LLMModel {
name: string;
available: boolean;
}
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract usage(): Promise<LLMUsage>;
abstract models(): Promise<LLMModel[]>;
}
type ProviderName = "openai" | "azure" | "claude" | "palm";
interface Model {
name: string;
provider: ProviderName;
ctxlen: number;
}
interface ChatProvider {
name: ProviderName;
apiConfig: {
baseUrl: string;
apiKey: string;
summaryModel: Model;
};
models: Model[];
chat: () => void;
usage: () => void;
}
export class ClientApi {
public llm: LLMApi;
constructor() {
this.llm = new ChatGPTApi();
}
config() {}
prompts() {}
masks() {}
async share(messages: ChatMessage[], avatarUrl: string | null = null) {
const msgs = messages
.map((m) => ({
from: m.role === "user" ? "human" : "gpt",
value: m.content,
}))
.concat([
{
from: "human",
value:
"Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web",
},
]);
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
// Please do not modify this message
console.log("[Share]", messages, msgs);
const clientConfig = getClientConfig();
const proxyUrl = "/sharegpt";
const rawUrl = "https://sharegpt.com/api/conversations";
const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
const res = await fetch(shareUrl, {
body: JSON.stringify({
avatarUrl,
items: msgs,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});
const resJson = await res.json();
console.log("[Share]", resJson);
if (resJson.id) {
return `https://shareg.pt/${resJson.id}`;
}
}
}
export const api = new ClientApi();
export function getHeaders() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
};
const makeBearer = (token: string) => `Bearer ${token.trim()}`;
const validString = (x: string) => x && x.length > 0;
// use user's api key first
if (validString(accessStore.token)) {
headers.Authorization = makeBearer(accessStore.token);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers.Authorization = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
return headers;
}

28
app/client/common/auth.ts Normal file
View File

@ -0,0 +1,28 @@
import { getClientConfig } from "@/app/config/client";
import { ACCESS_CODE_PREFIX } from "@/app/constant";
import { useAccessStore } from "@/app/store";
export function bearer(value: string) {
return `Bearer ${value.trim()}`;
}
export function getAuthHeaders(apiKey = "") {
const accessStore = useAccessStore.getState();
const isApp = !!getClientConfig()?.isApp;
let headers: Record<string, string> = {};
if (apiKey) {
// use user's api key first
headers.Authorization = bearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
!isApp &&
!!accessStore.accessCode
) {
// or use access code
headers.Authorization = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
}
return headers;
}

View File

@ -0,0 +1,5 @@
export const COMMON_PROVIDER_CONFIG = {
customModels: "",
models: [] as string[],
autoFetchModels: false, // fetch available models from server or not
};

View File

@ -0,0 +1,44 @@
import { getClientConfig } from "@/app/config/client";
import { ChatMessage } from "@/app/store";
export async function shareToShareGPT(
messages: ChatMessage[],
avatarUrl: string | null = null,
) {
const msgs = messages
.map((m) => ({
from: m.role === "user" ? "human" : "gpt",
value: m.content,
}))
.concat([
{
from: "human",
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
// Please do not modify this message
value:
"Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web",
},
]);
console.log("[Share]", messages, msgs);
const clientConfig = getClientConfig();
const proxyUrl = "/sharegpt";
const rawUrl = "https://sharegpt.com/api/conversations";
const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
const res = await fetch(shareUrl, {
body: JSON.stringify({
avatarUrl,
items: msgs,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});
const resJson = await res.json();
console.log("[Share]", resJson);
if (resJson.id) {
return `https://shareg.pt/${resJson.id}`;
}
}

28
app/client/core.ts Normal file
View File

@ -0,0 +1,28 @@
import { MaskConfig, ProviderConfig } from "../store";
import { shareToShareGPT } from "./common/share";
import { createOpenAiClient } from "./openai";
import { ChatControllerPool } from "./common/controller";
export const LLMClients = {
openai: createOpenAiClient,
};
export function createLLMClient(
config: ProviderConfig,
maskConfig: MaskConfig,
) {
return LLMClients[maskConfig.provider as any as keyof typeof LLMClients](
config,
maskConfig.modelConfig,
);
}
export function createApi() {
return {
createLLMClient,
shareToShareGPT,
controllerManager: ChatControllerPool,
};
}
export const api = createApi();

2
app/client/index.ts Normal file
View File

@ -0,0 +1,2 @@
export * from "./types";
export * from "./core";

View File

@ -0,0 +1,20 @@
import { COMMON_PROVIDER_CONFIG } from "../common/config";
export const OpenAIConfig = {
model: {
model: "gpt-3.5-turbo" as string,
summarizeModel: "gpt-3.5-turbo",
temperature: 0.5,
top_p: 1,
max_tokens: 2000,
presence_penalty: 0,
frequency_penalty: 0,
},
provider: {
name: "OpenAI",
endpoint: "https://api.openai.com",
apiKey: "",
...COMMON_PROVIDER_CONFIG,
},
};

295
app/client/openai/index.ts Normal file
View File

@ -0,0 +1,295 @@
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import {
API_PREFIX,
ApiPath,
DEFAULT_MODELS,
OpenaiPath,
} from "@/app/constant";
import { ModelConfig, ProviderConfig } from "@/app/store";
import { OpenAI } from "./types";
import { ChatOptions, LLMModel, LLMUsage } from "../types";
import Locale from "@/app/locales";
import { prettyObject } from "@/app/utils/format";
import { getApiPath } from "@/app/utils/path";
import { trimEnd } from "@/app/utils/string";
import { omit } from "@/app/utils/object";
import { createLogger } from "@/app/utils/log";
import { getAuthHeaders } from "../common/auth";
export function createOpenAiClient(
providerConfigs: ProviderConfig,
modelConfig: ModelConfig,
) {
const openaiConfig = { ...providerConfigs.openai };
const logger = createLogger("[OpenAI Client]");
const openaiModelConfig = { ...modelConfig.openai };
return {
headers() {
return {
"Content-Type": "application/json",
...getAuthHeaders(openaiConfig.apiKey),
};
},
path(path: OpenaiPath): string {
let baseUrl = openaiConfig.endpoint;
// if endpoint is empty, use default endpoint
if (baseUrl.trim().length === 0) {
baseUrl = getApiPath(ApiPath.OpenAI);
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(API_PREFIX)) {
baseUrl = "https://" + baseUrl;
}
baseUrl = trimEnd(baseUrl, "/");
return `${baseUrl}/${path}`;
},
extractMessage(res: OpenAI.ChatCompletionResponse) {
return res.choices[0]?.message?.content ?? "";
},
beforeRequest(options: ChatOptions, stream = false) {
const messages = options.messages.map((v) => ({
role: v.role,
content: v.content,
}));
if (options.shouldSummarize) {
openaiModelConfig.model = openaiModelConfig.summarizeModel;
}
const requestBody: OpenAI.ChatCompletionRequest = {
messages,
stream,
...omit(openaiModelConfig, "summarizeModel"),
};
const path = this.path(OpenaiPath.Chat);
logger.log("path = ", path, requestBody);
const controller = new AbortController();
options.onController?.(controller);
const payload = {
method: "POST",
body: JSON.stringify(requestBody),
signal: controller.signal,
headers: this.headers(),
};
return {
path,
payload,
controller,
};
},
async chat(options: ChatOptions) {
try {
const { path, payload, controller } = this.beforeRequest(
options,
false,
);
controller.signal.onabort = () => options.onFinish("");
const res = await fetch(path, payload);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
} catch (e) {
logger.error("failed to chat", e);
options.onError?.(e as Error);
}
},
async chatStream(options: ChatOptions) {
try {
const { path, payload, controller } = this.beforeRequest(options, true);
const context = {
text: "",
finished: false,
};
const finish = () => {
if (!context.finished) {
options.onFinish(context.text);
context.finished = true;
}
};
controller.signal.onabort = finish;
fetchEventSource(path, {
...payload,
async onopen(res) {
const contentType = res.headers.get("content-type");
logger.log("response content type: ", contentType);
if (contentType?.startsWith("text/plain")) {
context.text = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [context.text];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
context.text = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || context.finished) {
return finish();
}
const chunk = msg.data;
try {
const chunkJson = JSON.parse(
chunk,
) as OpenAI.ChatCompletionStreamResponse;
const delta = chunkJson.choices[0].delta.content;
if (delta) {
context.text += delta;
options.onUpdate?.(context.text, delta);
}
} catch (e) {
logger.error("[Request] parse error", chunk, msg);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
},
openWhenHidden: true,
});
} catch (e) {
logger.error("failed to chat", e);
options.onError?.(e as Error);
}
},
async usage() {
const formatDate = (d: Date) =>
`${d.getFullYear()}-${(d.getMonth() + 1)
.toString()
.padStart(2, "0")}-${d.getDate().toString().padStart(2, "0")}`;
const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
const now = new Date();
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
const startDate = formatDate(startOfMonth);
const endDate = formatDate(new Date(Date.now() + ONE_DAY));
const [used, subs] = await Promise.all([
fetch(
`${this.path(
OpenaiPath.Usage,
)}?start_date=${startDate}&end_date=${endDate}`,
{
method: "GET",
headers: this.headers(),
},
),
fetch(this.path(OpenaiPath.Subs), {
method: "GET",
headers: this.headers(),
}),
]);
if (!used.ok || !subs.ok) {
throw new Error("Failed to query usage from openai");
}
const response = (await used.json()) as {
total_usage?: number;
error?: {
type: string;
message: string;
};
};
const total = (await subs.json()) as {
hard_limit_usd?: number;
};
if (response.error?.type) {
throw Error(response.error?.message);
}
response.total_usage = Math.round(response.total_usage ?? 0) / 100;
total.hard_limit_usd =
Math.round((total.hard_limit_usd ?? 0) * 100) / 100;
return {
used: response.total_usage,
total: total.hard_limit_usd,
} as LLMUsage;
},
async models(): Promise<LLMModel[]> {
const customModels = openaiConfig.customModels
.split(",")
.map((v) => v.trim())
.map((v) => ({
name: v,
available: true,
}));
if (!openaiConfig.autoFetchModels) {
return [...DEFAULT_MODELS.slice(), ...customModels];
}
const res = await fetch(this.path(OpenaiPath.ListModel), {
method: "GET",
headers: this.headers(),
});
const resJson = (await res.json()) as OpenAI.ListModelResponse;
const chatModels =
resJson.data?.filter((m) => m.id.startsWith("gpt-")) ?? [];
return chatModels
.map((m) => ({
name: m.id,
available: true,
}))
.concat(customModels);
},
};
}

View File

@ -0,0 +1,79 @@
export namespace OpenAI {
export type Role = "system" | "user" | "assistant" | "function";
export type FinishReason = "stop" | "length" | "function_call";
export interface Message {
role: Role;
content?: string;
function_call?: {
name: string;
arguments: string;
};
}
export interface Function {
name: string;
description?: string;
parameters: object;
}
export interface ListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}
export interface ChatCompletionChoice {
index: number;
message: Message;
finish_reason: FinishReason;
}
export interface ChatCompletionUsage {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
}
export interface ChatCompletionResponse {
id: string;
object: string;
created: number;
model: string;
choices: ChatCompletionChoice[];
usage: ChatCompletionUsage;
}
export interface ChatCompletionChunkChoice {
index: number;
delta: Message;
finish_reason?: FinishReason;
}
export interface ChatCompletionStreamResponse {
object: string;
created: number;
model: string;
choices: ChatCompletionChunkChoice[];
}
export interface ChatCompletionRequest {
model: string;
messages: Message[];
functions?: Function[];
function_call?: "none" | "auto";
temperature?: number;
top_p?: number;
n?: number;
stream?: boolean;
stop?: string | string[];
max_tokens?: number;
presence_penalty?: number;
frequency_penalty?: number;
}
}

View File

@ -1,281 +0,0 @@
import {
DEFAULT_API_HOST,
DEFAULT_MODELS,
OpenaiPath,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import Locale from "../../locales";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
export interface OpenAIListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}
export class ChatGPTApi implements LLMApi {
private disableListModels = true;
path(path: string): string {
let openaiUrl = useAccessStore.getState().openaiUrl;
const apiPath = "/api/openai";
if (openaiUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
openaiUrl = isApp ? DEFAULT_API_HOST : apiPath;
}
if (openaiUrl.endsWith("/")) {
openaiUrl = openaiUrl.slice(0, openaiUrl.length - 1);
}
if (!openaiUrl.startsWith("http") && !openaiUrl.startsWith(apiPath)) {
openaiUrl = "https://" + openaiUrl;
}
return [openaiUrl, path].join("/");
}
extractMessage(res: any) {
return res.choices?.at(0)?.message?.content ?? "";
}
async chat(options: ChatOptions) {
const messages = options.messages.map((v) => ({
role: v.role,
content: v.content,
}));
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
const requestPayload = {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
};
console.log("[Request] openai payload: ", requestPayload);
const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
const chatPath = this.path(OpenaiPath.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
if (shouldStream) {
let responseText = "";
let finished = false;
const finish = () => {
if (!finished) {
options.onFinish(responseText);
finished = true;
}
};
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
const delta = json.choices[0].delta.content;
if (delta) {
responseText += delta;
options.onUpdate?.(responseText, delta);
}
} catch (e) {
console.error("[Request] parse error", text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
options.onError?.(e as Error);
}
}
async usage() {
const formatDate = (d: Date) =>
`${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
.getDate()
.toString()
.padStart(2, "0")}`;
const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
const now = new Date();
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
const startDate = formatDate(startOfMonth);
const endDate = formatDate(new Date(Date.now() + ONE_DAY));
const [used, subs] = await Promise.all([
fetch(
this.path(
`${OpenaiPath.UsagePath}?start_date=${startDate}&end_date=${endDate}`,
),
{
method: "GET",
headers: getHeaders(),
},
),
fetch(this.path(OpenaiPath.SubsPath), {
method: "GET",
headers: getHeaders(),
}),
]);
if (used.status === 401) {
throw new Error(Locale.Error.Unauthorized);
}
if (!used.ok || !subs.ok) {
throw new Error("Failed to query usage from openai");
}
const response = (await used.json()) as {
total_usage?: number;
error?: {
type: string;
message: string;
};
};
const total = (await subs.json()) as {
hard_limit_usd?: number;
};
if (response.error && response.error.type) {
throw Error(response.error.message);
}
if (response.total_usage) {
response.total_usage = Math.round(response.total_usage) / 100;
}
if (total.hard_limit_usd) {
total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
}
return {
used: response.total_usage,
total: total.hard_limit_usd,
} as LLMUsage;
}
async models(): Promise<LLMModel[]> {
if (this.disableListModels) {
return DEFAULT_MODELS.slice();
}
const res = await fetch(this.path(OpenaiPath.ListModelPath), {
method: "GET",
headers: {
...getHeaders(),
},
});
const resJson = (await res.json()) as OpenAIListModelResponse;
const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
console.log("[Models]", chatModels);
if (!chatModels) {
return [];
}
return chatModels.map((m) => ({
name: m.id,
available: true,
}));
}
}
export { OpenaiPath };

39
app/client/types.ts Normal file
View File

@ -0,0 +1,39 @@
import { DEFAULT_MODELS } from "../constant";
export interface LLMUsage {
used: number;
total: number;
available: boolean;
}
export interface LLMModel {
name: string;
available: boolean;
}
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export type ChatModel = (typeof DEFAULT_MODELS)[number]["name"];
export interface RequestMessage {
role: MessageRole;
content: string;
}
export interface ChatOptions {
messages: RequestMessage[];
shouldSummarize?: boolean;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}
export type LLMClient = {
chat(options: ChatOptions): Promise<void>;
chatStream(options: ChatOptions): Promise<void>;
usage(): Promise<LLMUsage>;
models(): Promise<LLMModel[]>;
};

View File

@ -3,7 +3,7 @@ import { IconButton } from "./button";
import { useNavigate } from "react-router-dom"; import { useNavigate } from "react-router-dom";
import { Path } from "../constant"; import { Path } from "../constant";
import { useAccessStore } from "../store"; import { useAccessStore, useAppConfig, useChatStore } from "../store";
import Locale from "../locales"; import Locale from "../locales";
import BotIcon from "../icons/bot.svg"; import BotIcon from "../icons/bot.svg";
@ -13,10 +13,14 @@ import { getClientConfig } from "../config/client";
export function AuthPage() { export function AuthPage() {
const navigate = useNavigate(); const navigate = useNavigate();
const access = useAccessStore(); const access = useAccessStore();
const config = useAppConfig();
const goHome = () => navigate(Path.Home); const goHome = () => navigate(Path.Home);
const goChat = () => navigate(Path.Chat); const goChat = () => navigate(Path.Chat);
const resetAccessCode = () => { access.updateCode(""); access.updateToken(""); }; // Reset access code to empty string const resetAccessCode = () => {
access.update((config) => (config.accessCode = ""));
config.update((config) => (config.providerConfig.openai.apiKey = ""));
}; // Reset access code to empty string
useEffect(() => { useEffect(() => {
if (getClientConfig()?.isApp) { if (getClientConfig()?.isApp) {
@ -40,7 +44,9 @@ export function AuthPage() {
placeholder={Locale.Auth.Input} placeholder={Locale.Auth.Input}
value={access.accessCode} value={access.accessCode}
onChange={(e) => { onChange={(e) => {
access.updateCode(e.currentTarget.value); access.update(
(config) => (config.accessCode = e.currentTarget.value),
);
}} }}
/> />
{!access.hideUserApiKey ? ( {!access.hideUserApiKey ? (
@ -50,9 +56,12 @@ export function AuthPage() {
className={styles["auth-input"]} className={styles["auth-input"]}
type="password" type="password"
placeholder={Locale.Settings.Token.Placeholder} placeholder={Locale.Settings.Token.Placeholder}
value={access.token} value={config.providerConfig.openai.apiKey}
onChange={(e) => { onChange={(e) => {
access.updateToken(e.currentTarget.value); config.update(
(config) =>
(config.providerConfig.openai.apiKey = e.currentTarget.value),
);
}} }}
/> />
</> </>

View File

@ -39,6 +39,9 @@ export function ChatItem(props: {
}); });
} }
}, [props.selected]); }, [props.selected]);
const modelConfig = useChatStore().extractModelConfig(props.mask.config);
return ( return (
<Draggable draggableId={`${props.id}`} index={props.index}> <Draggable draggableId={`${props.id}`} index={props.index}>
{(provided) => ( {(provided) => (
@ -60,7 +63,10 @@ export function ChatItem(props: {
{props.narrow ? ( {props.narrow ? (
<div className={styles["chat-item-narrow"]}> <div className={styles["chat-item-narrow"]}>
<div className={styles["chat-item-avatar"] + " no-dark"}> <div className={styles["chat-item-avatar"] + " no-dark"}>
<MaskAvatar mask={props.mask} /> <MaskAvatar
avatar={props.mask.avatar}
model={modelConfig.model}
/>
</div> </div>
<div className={styles["chat-item-narrow-count"]}> <div className={styles["chat-item-narrow-count"]}>
{props.count} {props.count}

View File

@ -1,12 +1,5 @@
import { useDebouncedCallback } from "use-debounce"; import { useDebouncedCallback } from "use-debounce";
import React, { import React, { useState, useRef, useEffect, useMemo, Fragment } from "react";
useState,
useRef,
useEffect,
useMemo,
useCallback,
Fragment,
} from "react";
import SendWhiteIcon from "../icons/send-white.svg"; import SendWhiteIcon from "../icons/send-white.svg";
import BrainIcon from "../icons/brain.svg"; import BrainIcon from "../icons/brain.svg";
@ -37,15 +30,12 @@ import RobotIcon from "../icons/robot.svg";
import { import {
ChatMessage, ChatMessage,
SubmitKey,
useChatStore, useChatStore,
BOT_HELLO, BOT_HELLO,
createMessage, createMessage,
useAccessStore, useAccessStore,
Theme,
useAppConfig, useAppConfig,
DEFAULT_TOPIC, DEFAULT_TOPIC,
ModelType,
} from "../store"; } from "../store";
import { import {
@ -57,7 +47,7 @@ import {
import dynamic from "next/dynamic"; import dynamic from "next/dynamic";
import { ChatControllerPool } from "../client/controller"; import { ChatControllerPool } from "../client/common/controller";
import { Prompt, usePromptStore } from "../store/prompt"; import { Prompt, usePromptStore } from "../store/prompt";
import Locale from "../locales"; import Locale from "../locales";
@ -73,11 +63,10 @@ import {
showPrompt, showPrompt,
showToast, showToast,
} from "./ui-lib"; } from "./ui-lib";
import { useLocation, useNavigate } from "react-router-dom"; import { useNavigate } from "react-router-dom";
import { import {
CHAT_PAGE_SIZE, CHAT_PAGE_SIZE,
LAST_INPUT_KEY, LAST_INPUT_KEY,
MAX_RENDER_MSG_COUNT,
Path, Path,
REQUEST_TIMEOUT_MS, REQUEST_TIMEOUT_MS,
UNFINISHED_INPUT, UNFINISHED_INPUT,
@ -89,6 +78,8 @@ import { ChatCommandPrefix, useChatCommand, useCommand } from "../command";
import { prettyObject } from "../utils/format"; import { prettyObject } from "../utils/format";
import { ExportMessageModal } from "./exporter"; import { ExportMessageModal } from "./exporter";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { deepClone } from "../utils/clone";
import { SubmitKey, Theme } from "../typing";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, loading: () => <LoadingIcon />,
@ -142,7 +133,7 @@ export function SessionConfigModel(props: { onClose: () => void }) {
}} }}
shouldSyncFromGlobal shouldSyncFromGlobal
extraListItems={ extraListItems={
session.mask.modelConfig.sendMemory ? ( session.mask.config.chatConfig.sendMemory ? (
<ListItem <ListItem
title={`${Locale.Memory.Title} (${session.lastSummarizeIndex} of ${session.messages.length})`} title={`${Locale.Memory.Title} (${session.lastSummarizeIndex} of ${session.messages.length})`}
subTitle={session.memoryPrompt || Locale.Memory.EmptyContent} subTitle={session.memoryPrompt || Locale.Memory.EmptyContent}
@ -427,17 +418,19 @@ export function ChatActions(props: {
// stop all responses // stop all responses
const couldStop = ChatControllerPool.hasPending(); const couldStop = ChatControllerPool.hasPending();
const stopAll = () => ChatControllerPool.stopAll(); const stopAll = () => ChatControllerPool.stopAll();
const client = chatStore.getClient();
const modelConfig = chatStore.getCurrentModelConfig();
const currentModel = modelConfig.model;
// switch model // switch model
const currentModel = chatStore.currentSession().mask.modelConfig.model; const [models, setModels] = useState<string[]>([]);
const models = useMemo( useEffect(() => {
() => client
config .models()
.allModels() .then((_models) =>
.filter((m) => m.available) setModels(_models.filter((v) => v.available).map((v) => v.name)),
.map((m) => m.name),
[config],
); );
}, []);
const [showModelSelector, setShowModelSelector] = useState(false); const [showModelSelector, setShowModelSelector] = useState(false);
return ( return (
@ -526,7 +519,7 @@ export function ChatActions(props: {
onSelection={(s) => { onSelection={(s) => {
if (s.length === 0) return; if (s.length === 0) return;
chatStore.updateCurrentSession((session) => { chatStore.updateCurrentSession((session) => {
session.mask.modelConfig.model = s[0] as ModelType; chatStore.extractModelConfig(session.mask.config).model = s[0];
session.mask.syncGlobalConfig = false; session.mask.syncGlobalConfig = false;
}); });
showToast(s[0]); showToast(s[0]);
@ -603,6 +596,9 @@ function _Chat() {
type RenderMessage = ChatMessage & { preview?: boolean }; type RenderMessage = ChatMessage & { preview?: boolean };
const chatStore = useChatStore(); const chatStore = useChatStore();
const modelConfig = chatStore.getCurrentModelConfig();
const maskConfig = chatStore.getCurrentMaskConfig();
const session = chatStore.currentSession(); const session = chatStore.currentSession();
const config = useAppConfig(); const config = useAppConfig();
const fontSize = config.fontSize; const fontSize = config.fontSize;
@ -747,7 +743,7 @@ function _Chat() {
// auto sync mask config from global config // auto sync mask config from global config
if (session.mask.syncGlobalConfig) { if (session.mask.syncGlobalConfig) {
console.log("[Mask] syncing from global, name = ", session.mask.name); console.log("[Mask] syncing from global, name = ", session.mask.name);
session.mask.modelConfig = { ...config.modelConfig }; session.mask.config = deepClone(config.globalMaskConfig);
} }
}); });
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
@ -979,7 +975,7 @@ function _Chat() {
console.log("[Command] got code from url: ", text); console.log("[Command] got code from url: ", text);
showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => { showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => {
if (res) { if (res) {
accessStore.updateCode(text); accessStore.update((config) => (config.accessCode = text));
} }
}); });
}, },
@ -999,10 +995,10 @@ function _Chat() {
).then((res) => { ).then((res) => {
if (!res) return; if (!res) return;
if (payload.key) { if (payload.key) {
accessStore.updateToken(payload.key); // TODO: auto-fill openai api key here, must specific provider type
} }
if (payload.url) { if (payload.url) {
accessStore.updateOpenAiUrl(payload.url); // TODO: auto-fill openai url here, must specific provider type
} }
}); });
} }
@ -1159,7 +1155,10 @@ function _Chat() {
{["system"].includes(message.role) ? ( {["system"].includes(message.role) ? (
<Avatar avatar="2699-fe0f" /> <Avatar avatar="2699-fe0f" />
) : ( ) : (
<MaskAvatar mask={session.mask} /> <MaskAvatar
avatar={session.mask.avatar}
model={modelConfig.model}
/>
)} )}
</> </>
)} )}

View File

@ -0,0 +1,171 @@
import {
ChatConfig,
LLMProvider,
LLMProviders,
ModelConfig,
ProviderConfig,
} from "@/app/store";
import { Updater } from "@/app/typing";
import { OpenAIModelConfig } from "./openai/model";
import { OpenAIProviderConfig } from "./openai/provider";
import { ListItem, Select } from "../ui-lib";
import Locale from "@/app/locales";
import { InputRange } from "../input-range";
export function ModelConfigList(props: {
provider: LLMProvider;
config: ModelConfig;
updateConfig: Updater<ModelConfig>;
}) {
if (props.provider === "openai") {
return (
<OpenAIModelConfig
config={props.config.openai}
updateConfig={(update) => {
props.updateConfig((config) => update(config.openai));
}}
models={[
{
name: "gpt-3.5-turbo",
available: true,
},
{
name: "gpt-4",
available: true,
},
]}
/>
);
}
return null;
}
export function ProviderConfigList(props: {
provider: LLMProvider;
config: ProviderConfig;
updateConfig: Updater<ProviderConfig>;
}) {
if (props.provider === "openai") {
return (
<OpenAIProviderConfig
config={props.config.openai}
updateConfig={(update) => {
props.updateConfig((config) => update(config.openai));
}}
/>
);
}
return null;
}
export function ProviderSelectItem(props: {
value: LLMProvider;
update: (value: LLMProvider) => void;
}) {
return (
<ListItem title="服务提供商" subTitle="切换不同的模型提供商">
<Select
value={props.value}
onChange={(e) => {
props.update(e.target.value as LLMProvider);
}}
>
{LLMProviders.map(([k, v]) => (
<option value={v} key={k}>
{k}
</option>
))}
</Select>
</ListItem>
);
}
export function ChatConfigList(props: {
config: ChatConfig;
updateConfig: (updater: (config: ChatConfig) => void) => void;
}) {
return (
<>
<ListItem
title={Locale.Settings.InjectSystemPrompts.Title}
subTitle={Locale.Settings.InjectSystemPrompts.SubTitle}
>
<input
type="checkbox"
checked={props.config.enableInjectSystemPrompts}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.enableInjectSystemPrompts = e.currentTarget.checked),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.InputTemplate.Title}
subTitle={Locale.Settings.InputTemplate.SubTitle}
>
<input
type="text"
value={props.config.template}
onChange={(e) =>
props.updateConfig(
(config) => (config.template = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
>
<InputRange
title={props.config.historyMessageCount.toString()}
value={props.config.historyMessageCount}
min="0"
max="64"
step="1"
onChange={(e) =>
props.updateConfig(
(config) => (config.historyMessageCount = e.target.valueAsNumber),
)
}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.CompressThreshold.Title}
subTitle={Locale.Settings.CompressThreshold.SubTitle}
>
<input
type="number"
min={500}
max={4000}
value={props.config.compressMessageLengthThreshold}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.compressMessageLengthThreshold =
e.currentTarget.valueAsNumber),
)
}
></input>
</ListItem>
<ListItem title={Locale.Memory.Title} subTitle={Locale.Memory.Send}>
<input
type="checkbox"
checked={props.config.sendMemory}
onChange={(e) =>
props.updateConfig(
(config) => (config.sendMemory = e.currentTarget.checked),
)
}
></input>
</ListItem>
</>
);
}

View File

@ -0,0 +1,113 @@
import { ModelConfig } from "@/app/store";
import { ModelConfigProps } from "../types";
import { ListItem, Select } from "../../ui-lib";
import Locale from "@/app/locales";
import { InputRange } from "../../input-range";
export function OpenAIModelConfig(
props: ModelConfigProps<ModelConfig["openai"]>,
) {
return (
<>
<ListItem title={Locale.Settings.Model}>
<Select
value={props.config.model}
onChange={(e) => {
props.updateConfig(
(config) => (config.model = e.currentTarget.value),
);
}}
>
{props.models.map((v, i) => (
<option value={v.name} key={i} disabled={!v.available}>
{v.name}
</option>
))}
</Select>
</ListItem>
<ListItem
title={Locale.Settings.Temperature.Title}
subTitle={Locale.Settings.Temperature.SubTitle}
>
<InputRange
value={props.config.temperature?.toFixed(1)}
min="0"
max="1" // lets limit it to 0-1
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) => (config.temperature = e.currentTarget.valueAsNumber),
);
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.TopP.Title}
subTitle={Locale.Settings.TopP.SubTitle}
>
<InputRange
value={(props.config.top_p ?? 1).toFixed(1)}
min="0"
max="1"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) => (config.top_p = e.currentTarget.valueAsNumber),
);
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.MaxTokens.Title}
subTitle={Locale.Settings.MaxTokens.SubTitle}
>
<input
type="number"
min={100}
max={100000}
value={props.config.max_tokens}
onChange={(e) =>
props.updateConfig(
(config) => (config.max_tokens = e.currentTarget.valueAsNumber),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.PresencePenalty.Title}
subTitle={Locale.Settings.PresencePenalty.SubTitle}
>
<InputRange
value={props.config.presence_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.presence_penalty = e.currentTarget.valueAsNumber),
);
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.FrequencyPenalty.Title}
subTitle={Locale.Settings.FrequencyPenalty.SubTitle}
>
<InputRange
value={props.config.frequency_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.frequency_penalty = e.currentTarget.valueAsNumber),
);
}}
></InputRange>
</ListItem>
</>
);
}

View File

@ -0,0 +1,71 @@
import { ProviderConfig } from "@/app/store";
import { ProviderConfigProps } from "../types";
import { ListItem, PasswordInput } from "../../ui-lib";
import Locale from "@/app/locales";
import { REMOTE_API_HOST } from "@/app/constant";
export function OpenAIProviderConfig(
props: ProviderConfigProps<ProviderConfig["openai"]>,
) {
return (
<>
<ListItem
title={Locale.Settings.Endpoint.Title}
subTitle={Locale.Settings.Endpoint.SubTitle}
>
<input
type="text"
value={props.config.endpoint}
placeholder={REMOTE_API_HOST}
onChange={(e) =>
props.updateConfig(
(config) => (config.endpoint = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.Token.Title}
subTitle={Locale.Settings.Token.SubTitle}
>
<PasswordInput
value={props.config.apiKey}
type="text"
placeholder={Locale.Settings.Token.Placeholder}
onChange={(e) => {
props.updateConfig(
(config) => (config.apiKey = e.currentTarget.value),
);
}}
/>
</ListItem>
<ListItem
title={Locale.Settings.CustomModel.Title}
subTitle={Locale.Settings.CustomModel.SubTitle}
>
<input
type="text"
value={props.config.customModels}
placeholder="model1,model2,model3"
onChange={(e) =>
props.updateConfig(
(config) => (config.customModels = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem title="自动拉取可用模型" subTitle="尝试拉取所有可用模型">
<input
type="checkbox"
checked={props.config.autoFetchModels}
onChange={(e) =>
props.updateConfig(
(config) => (config.autoFetchModels = e.currentTarget.checked),
)
}
></input>
</ListItem>
</>
);
}

View File

@ -0,0 +1,14 @@
import { LLMModel } from "@/app/client";
import { Updater } from "@/app/typing";
export type ModelConfigProps<T> = {
models: LLMModel[];
config: T;
updateConfig: Updater<T>;
};
export type ProviderConfigProps<T> = {
readonly?: boolean;
config: T;
updateConfig: Updater<T>;
};

View File

@ -28,7 +28,7 @@ export function AvatarPicker(props: {
); );
} }
export function Avatar(props: { model?: ModelType; avatar?: string }) { export function Avatar(props: { model?: string; avatar?: string }) {
if (props.model) { if (props.model) {
return ( return (
<div className="no-dark"> <div className="no-dark">

View File

@ -27,12 +27,12 @@ import { Avatar } from "./emoji";
import dynamic from "next/dynamic"; import dynamic from "next/dynamic";
import NextImage from "next/image"; import NextImage from "next/image";
import { toBlob, toJpeg, toPng } from "html-to-image"; import { toBlob, toPng } from "html-to-image";
import { DEFAULT_MASK_AVATAR } from "../store/mask"; import { DEFAULT_MASK_AVATAR } from "../store/mask";
import { api } from "../client/api";
import { prettyObject } from "../utils/format"; import { prettyObject } from "../utils/format";
import { EXPORT_MESSAGE_CLASS_NAME } from "../constant"; import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { api } from "../client";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, loading: () => <LoadingIcon />,
@ -290,7 +290,7 @@ export function PreviewActions(props: {
setShouldExport(false); setShouldExport(false);
api api
.share(msgs) .shareToShareGPT(msgs)
.then((res) => { .then((res) => {
if (!res) return; if (!res) return;
showModal({ showModal({
@ -403,6 +403,7 @@ export function ImagePreviewer(props: {
const chatStore = useChatStore(); const chatStore = useChatStore();
const session = chatStore.currentSession(); const session = chatStore.currentSession();
const mask = session.mask; const mask = session.mask;
const modelConfig = chatStore.getCurrentModelConfig();
const config = useAppConfig(); const config = useAppConfig();
const previewRef = useRef<HTMLDivElement>(null); const previewRef = useRef<HTMLDivElement>(null);
@ -526,7 +527,7 @@ export function ImagePreviewer(props: {
</div> </div>
<div> <div>
<div className={styles["chat-info-item"]}> <div className={styles["chat-info-item"]}>
{Locale.Exporter.Model}: {mask.modelConfig.model} {Locale.Exporter.Model}: {modelConfig.model}
</div> </div>
<div className={styles["chat-info-item"]}> <div className={styles["chat-info-item"]}>
{Locale.Exporter.Messages}: {props.messages.length} {Locale.Exporter.Messages}: {props.messages.length}

View File

@ -27,7 +27,6 @@ import { SideBar } from "./sidebar";
import { useAppConfig } from "../store/config"; import { useAppConfig } from "../store/config";
import { AuthPage } from "./auth"; import { AuthPage } from "./auth";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { api } from "../client/api";
import { useAccessStore } from "../store"; import { useAccessStore } from "../store";
export function Loading(props: { noLogo?: boolean }) { export function Loading(props: { noLogo?: boolean }) {
@ -128,7 +127,8 @@ function Screen() {
const isHome = location.pathname === Path.Home; const isHome = location.pathname === Path.Home;
const isAuth = location.pathname === Path.Auth; const isAuth = location.pathname === Path.Auth;
const isMobileScreen = useMobileScreen(); const isMobileScreen = useMobileScreen();
const shouldTightBorder = getClientConfig()?.isApp || (config.tightBorder && !isMobileScreen); const shouldTightBorder =
getClientConfig()?.isApp || (config.tightBorder && !isMobileScreen);
useEffect(() => { useEffect(() => {
loadAsyncGoogleFont(); loadAsyncGoogleFont();
@ -170,10 +170,7 @@ export function useLoadData() {
const config = useAppConfig(); const config = useAppConfig();
useEffect(() => { useEffect(() => {
(async () => { // TODO: fetch available models from server
const models = await api.llm.models();
config.mergeModels(models);
})();
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
} }
@ -185,7 +182,7 @@ export function Home() {
useEffect(() => { useEffect(() => {
console.log("[Config] got config from build time", getClientConfig()); console.log("[Config] got config from build time", getClientConfig());
useAccessStore.getState().fetch(); useAccessStore.getState().fetchConfig();
}, []); }, []);
if (!useHasHydrated()) { if (!useHasHydrated()) {

View File

@ -21,7 +21,6 @@ import {
useAppConfig, useAppConfig,
useChatStore, useChatStore,
} from "../store"; } from "../store";
import { ROLES } from "../client/api";
import { import {
Input, Input,
List, List,
@ -36,19 +35,20 @@ import Locale, { AllLangs, ALL_LANG_OPTIONS, Lang } from "../locales";
import { useNavigate } from "react-router-dom"; import { useNavigate } from "react-router-dom";
import chatStyle from "./chat.module.scss"; import chatStyle from "./chat.module.scss";
import { useEffect, useState } from "react"; import { useState } from "react";
import { copyToClipboard, downloadAs, readFromFile } from "../utils"; import { copyToClipboard, downloadAs, readFromFile } from "../utils";
import { Updater } from "../typing"; import { Updater } from "../typing";
import { ModelConfigList } from "./model-config";
import { FileName, Path } from "../constant"; import { FileName, Path } from "../constant";
import { BUILTIN_MASK_STORE } from "../masks"; import { BUILTIN_MASK_STORE } from "../masks";
import { nanoid } from "nanoid";
import { import {
DragDropContext, DragDropContext,
Droppable, Droppable,
Draggable, Draggable,
OnDragEndResponder, OnDragEndResponder,
} from "@hello-pangea/dnd"; } from "@hello-pangea/dnd";
import { ROLES } from "../client";
import { deepClone } from "../utils/clone";
import { ChatConfigList, ModelConfigList, ProviderSelectItem } from "./config";
// drag and drop helper function // drag and drop helper function
function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] { function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
@ -58,11 +58,11 @@ function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
return result; return result;
} }
export function MaskAvatar(props: { mask: Mask }) { export function MaskAvatar(props: { avatar: string; model: string }) {
return props.mask.avatar !== DEFAULT_MASK_AVATAR ? ( return props.avatar !== DEFAULT_MASK_AVATAR ? (
<Avatar avatar={props.mask.avatar} /> <Avatar avatar={props.avatar} />
) : ( ) : (
<Avatar model={props.mask.modelConfig.model} /> <Avatar model={props.model} />
); );
} }
@ -74,14 +74,15 @@ export function MaskConfig(props: {
shouldSyncFromGlobal?: boolean; shouldSyncFromGlobal?: boolean;
}) { }) {
const [showPicker, setShowPicker] = useState(false); const [showPicker, setShowPicker] = useState(false);
const modelConfig = useChatStore().extractModelConfig(props.mask.config);
const updateConfig = (updater: (config: ModelConfig) => void) => { const updateConfig = (updater: (config: ModelConfig) => void) => {
if (props.readonly) return; if (props.readonly) return;
const config = { ...props.mask.modelConfig }; const config = deepClone(props.mask.config);
updater(config); updater(config.modelConfig);
props.updateMask((mask) => { props.updateMask((mask) => {
mask.modelConfig = config; mask.config = config;
// if user changed current session mask, it will disable auto sync // if user changed current session mask, it will disable auto sync
mask.syncGlobalConfig = false; mask.syncGlobalConfig = false;
}); });
@ -123,7 +124,10 @@ export function MaskConfig(props: {
onClick={() => setShowPicker(true)} onClick={() => setShowPicker(true)}
style={{ cursor: "pointer" }} style={{ cursor: "pointer" }}
> >
<MaskAvatar mask={props.mask} /> <MaskAvatar
avatar={props.mask.avatar}
model={modelConfig.model}
/>
</div> </div>
</Popover> </Popover>
</ListItem> </ListItem>
@ -182,7 +186,7 @@ export function MaskConfig(props: {
) { ) {
props.updateMask((mask) => { props.updateMask((mask) => {
mask.syncGlobalConfig = checked; mask.syncGlobalConfig = checked;
mask.modelConfig = { ...globalConfig.modelConfig }; mask.config = deepClone(globalConfig.globalMaskConfig);
}); });
} else if (!checked) { } else if (!checked) {
props.updateMask((mask) => { props.updateMask((mask) => {
@ -196,10 +200,28 @@ export function MaskConfig(props: {
</List> </List>
<List> <List>
<ProviderSelectItem
value={props.mask.config.provider}
update={(value) => {
props.updateMask((mask) => (mask.config.provider = value));
}}
/>
<ModelConfigList <ModelConfigList
modelConfig={{ ...props.mask.modelConfig }} provider={props.mask.config.provider}
config={props.mask.config.modelConfig}
updateConfig={updateConfig} updateConfig={updateConfig}
/> />
</List>
<List>
<ChatConfigList
config={props.mask.config.chatConfig}
updateConfig={(updater) => {
const chatConfig = deepClone(props.mask.config.chatConfig);
updater(chatConfig);
props.updateMask((mask) => (mask.config.chatConfig = chatConfig));
}}
/>
{props.extraListItems} {props.extraListItems}
</List> </List>
</> </>
@ -398,7 +420,7 @@ export function MaskPage() {
setSearchText(text); setSearchText(text);
if (text.length > 0) { if (text.length > 0) {
const result = allMasks.filter((m) => const result = allMasks.filter((m) =>
m.name.toLowerCase().includes(text.toLowerCase()) m.name.toLowerCase().includes(text.toLowerCase()),
); );
setSearchMasks(result); setSearchMasks(result);
} else { } else {
@ -523,14 +545,17 @@ export function MaskPage() {
<div className={styles["mask-item"]} key={m.id}> <div className={styles["mask-item"]} key={m.id}>
<div className={styles["mask-header"]}> <div className={styles["mask-header"]}>
<div className={styles["mask-icon"]}> <div className={styles["mask-icon"]}>
<MaskAvatar mask={m} /> <MaskAvatar
avatar={m.avatar}
model={chatStore.extractModelConfig(m.config).model}
/>
</div> </div>
<div className={styles["mask-title"]}> <div className={styles["mask-title"]}>
<div className={styles["mask-name"]}>{m.name}</div> <div className={styles["mask-name"]}>{m.name}</div>
<div className={styles["mask-info"] + " one-line"}> <div className={styles["mask-info"] + " one-line"}>
{`${Locale.Mask.Item.Info(m.context.length)} / ${ {`${Locale.Mask.Item.Info(m.context.length)} / ${
ALL_LANG_OPTIONS[m.lang] ALL_LANG_OPTIONS[m.lang]
} / ${m.modelConfig.model}`} } / ${chatStore.extractModelConfig(m.config).model}`}
</div> </div>
</div> </div>
</div> </div>

View File

@ -71,6 +71,7 @@ export function MessageSelector(props: {
onSelected?: (messages: ChatMessage[]) => void; onSelected?: (messages: ChatMessage[]) => void;
}) { }) {
const chatStore = useChatStore(); const chatStore = useChatStore();
const modelConfig = chatStore.getCurrentModelConfig();
const session = chatStore.currentSession(); const session = chatStore.currentSession();
const isValid = (m: ChatMessage) => m.content && !m.isError && !m.streaming; const isValid = (m: ChatMessage) => m.content && !m.isError && !m.streaming;
const messages = session.messages.filter( const messages = session.messages.filter(
@ -195,7 +196,10 @@ export function MessageSelector(props: {
{m.role === "user" ? ( {m.role === "user" ? (
<Avatar avatar={config.avatar}></Avatar> <Avatar avatar={config.avatar}></Avatar>
) : ( ) : (
<MaskAvatar mask={session.mask} /> <MaskAvatar
avatar={session.mask.avatar}
model={modelConfig.model}
/>
)} )}
</div> </div>
<div className={styles["body"]}> <div className={styles["body"]}>

View File

@ -4,10 +4,12 @@ import Locale from "../locales";
import { InputRange } from "./input-range"; import { InputRange } from "./input-range";
import { ListItem, Select } from "./ui-lib"; import { ListItem, Select } from "./ui-lib";
export function ModelConfigList(props: { export function _ModelConfigList(props: {
modelConfig: ModelConfig; modelConfig: ModelConfig;
updateConfig: (updater: (config: ModelConfig) => void) => void; updateConfig: (updater: (config: ModelConfig) => void) => void;
}) { }) {
return null;
/*
const config = useAppConfig(); const config = useAppConfig();
return ( return (
@ -130,84 +132,8 @@ export function ModelConfigList(props: {
></InputRange> ></InputRange>
</ListItem> </ListItem>
<ListItem
title={Locale.Settings.InjectSystemPrompts.Title}
subTitle={Locale.Settings.InjectSystemPrompts.SubTitle}
>
<input
type="checkbox"
checked={props.modelConfig.enableInjectSystemPrompts}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.enableInjectSystemPrompts = e.currentTarget.checked),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.InputTemplate.Title}
subTitle={Locale.Settings.InputTemplate.SubTitle}
>
<input
type="text"
value={props.modelConfig.template}
onChange={(e) =>
props.updateConfig(
(config) => (config.template = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
>
<InputRange
title={props.modelConfig.historyMessageCount.toString()}
value={props.modelConfig.historyMessageCount}
min="0"
max="64"
step="1"
onChange={(e) =>
props.updateConfig(
(config) => (config.historyMessageCount = e.target.valueAsNumber),
)
}
></InputRange>
</ListItem>
<ListItem
title={Locale.Settings.CompressThreshold.Title}
subTitle={Locale.Settings.CompressThreshold.SubTitle}
>
<input
type="number"
min={500}
max={4000}
value={props.modelConfig.compressMessageLengthThreshold}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.compressMessageLengthThreshold =
e.currentTarget.valueAsNumber),
)
}
></input>
</ListItem>
<ListItem title={Locale.Memory.Title} subTitle={Locale.Memory.Send}>
<input
type="checkbox"
checked={props.modelConfig.sendMemory}
onChange={(e) =>
props.updateConfig(
(config) => (config.sendMemory = e.currentTarget.checked),
)
}
></input>
</ListItem>
</> </>
); );
*/
} }

View File

@ -29,9 +29,11 @@ function getIntersectionArea(aRect: DOMRect, bRect: DOMRect) {
} }
function MaskItem(props: { mask: Mask; onClick?: () => void }) { function MaskItem(props: { mask: Mask; onClick?: () => void }) {
const modelConfig = useChatStore().extractModelConfig(props.mask.config);
return ( return (
<div className={styles["mask"]} onClick={props.onClick}> <div className={styles["mask"]} onClick={props.onClick}>
<MaskAvatar mask={props.mask} /> <MaskAvatar avatar={props.mask.avatar} model={modelConfig.model} />
<div className={styles["mask-name"] + " one-line"}>{props.mask.name}</div> <div className={styles["mask-name"] + " one-line"}>{props.mask.name}</div>
</div> </div>
); );

View File

@ -30,16 +30,15 @@ import {
showConfirm, showConfirm,
showToast, showToast,
} from "./ui-lib"; } from "./ui-lib";
import { ModelConfigList } from "./model-config";
import { IconButton } from "./button"; import { IconButton } from "./button";
import { import {
SubmitKey,
useChatStore, useChatStore,
Theme,
useUpdateStore, useUpdateStore,
useAccessStore, useAccessStore,
useAppConfig, useAppConfig,
LLMProvider,
LLMProviders,
} from "../store"; } from "../store";
import Locale, { import Locale, {
@ -61,6 +60,14 @@ import { useSyncStore } from "../store/sync";
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { useMaskStore } from "../store/mask"; import { useMaskStore } from "../store/mask";
import { ProviderType } from "../utils/cloud"; import { ProviderType } from "../utils/cloud";
import {
ChatConfigList,
ModelConfigList,
ProviderConfigList,
ProviderSelectItem,
} from "./config";
import { SubmitKey, Theme } from "../typing";
import { deepClone } from "../utils/clone";
function EditPromptModal(props: { id: string; onClose: () => void }) { function EditPromptModal(props: { id: string; onClose: () => void }) {
const promptStore = usePromptStore(); const promptStore = usePromptStore();
@ -757,8 +764,7 @@ export function Settings() {
step="1" step="1"
onChange={(e) => onChange={(e) =>
updateConfig( updateConfig(
(config) => (config) => (config.fontSize = e.currentTarget.valueAsNumber),
(config.fontSize = Number.parseInt(e.currentTarget.value)),
) )
} }
></InputRange> ></InputRange>
@ -770,11 +776,14 @@ export function Settings() {
> >
<input <input
type="checkbox" type="checkbox"
checked={config.enableAutoGenerateTitle} checked={
config.globalMaskConfig.chatConfig.enableAutoGenerateTitle
}
onChange={(e) => onChange={(e) =>
updateConfig( updateConfig(
(config) => (config) =>
(config.enableAutoGenerateTitle = e.currentTarget.checked), (config.globalMaskConfig.chatConfig.enableAutoGenerateTitle =
e.currentTarget.checked),
) )
} }
></input> ></input>
@ -877,7 +886,9 @@ export function Settings() {
type="text" type="text"
placeholder={Locale.Settings.AccessCode.Placeholder} placeholder={Locale.Settings.AccessCode.Placeholder}
onChange={(e) => { onChange={(e) => {
accessStore.updateCode(e.currentTarget.value); accessStore.update(
(config) => (config.accessCode = e.currentTarget.value),
);
}} }}
/> />
</ListItem> </ListItem>
@ -885,36 +896,7 @@ export function Settings() {
<></> <></>
)} )}
{!accessStore.hideUserApiKey ? ( {!accessStore.hideUserApiKey ? <></> : null}
<>
<ListItem
title={Locale.Settings.Endpoint.Title}
subTitle={Locale.Settings.Endpoint.SubTitle}
>
<input
type="text"
value={accessStore.openaiUrl}
placeholder="https://api.openai.com/"
onChange={(e) =>
accessStore.updateOpenAiUrl(e.currentTarget.value)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.Token.Title}
subTitle={Locale.Settings.Token.SubTitle}
>
<PasswordInput
value={accessStore.token}
type="text"
placeholder={Locale.Settings.Token.Placeholder}
onChange={(e) => {
accessStore.updateToken(e.currentTarget.value);
}}
/>
</ListItem>
</>
) : null}
{!accessStore.hideBalanceQuery ? ( {!accessStore.hideBalanceQuery ? (
<ListItem <ListItem
@ -941,31 +923,44 @@ export function Settings() {
)} )}
</ListItem> </ListItem>
) : null} ) : null}
<ListItem
title={Locale.Settings.CustomModel.Title}
subTitle={Locale.Settings.CustomModel.SubTitle}
>
<input
type="text"
value={config.customModels}
placeholder="model1,model2,model3"
onChange={(e) =>
config.update(
(config) => (config.customModels = e.currentTarget.value),
)
}
></input>
</ListItem>
</List> </List>
<List> <List>
<ProviderSelectItem
value={config.globalMaskConfig.provider}
update={(value) =>
config.update((_config) => {
_config.globalMaskConfig.provider = value;
})
}
/>
<ProviderConfigList
provider={config.globalMaskConfig.provider}
config={config.providerConfig}
updateConfig={(update) => {
config.update((_config) => update(_config.providerConfig));
}}
/>
<ModelConfigList <ModelConfigList
modelConfig={config.modelConfig} provider={config.globalMaskConfig.provider}
config={config.globalMaskConfig.modelConfig}
updateConfig={(updater) => { updateConfig={(updater) => {
const modelConfig = { ...config.modelConfig }; const modelConfig = { ...config.globalMaskConfig.modelConfig };
updater(modelConfig); updater(modelConfig);
config.update((config) => (config.modelConfig = modelConfig)); config.update(
(config) => (config.globalMaskConfig.modelConfig = modelConfig),
);
}}
/>
<ChatConfigList
config={config.globalMaskConfig.chatConfig}
updateConfig={(updater) => {
const chatConfig = deepClone(config.globalMaskConfig.chatConfig);
updater(chatConfig);
config.update(
(config) => (config.globalMaskConfig.chatConfig = chatConfig),
);
}} }}
/> />
</List> </List>

View File

@ -8,8 +8,8 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`; export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
export const RUNTIME_CONFIG_DOM = "danger-runtime-config"; export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
export const DEFAULT_CORS_HOST = "https://ab.nextweb.fun"; export const REMOTE_CORS_HOST = "https://ab.nextweb.fun";
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`; export const REMOTE_API_HOST = `${REMOTE_CORS_HOST}/api/proxy`;
export enum Path { export enum Path {
Home = "/", Home = "/",
@ -20,8 +20,12 @@ export enum Path {
Auth = "/auth", Auth = "/auth",
} }
export const API_PREFIX = "/api";
export enum ApiPath { export enum ApiPath {
OpenAI = "/api/openai",
Cors = "/api/cors", Cors = "/api/cors",
Config = "/api/config",
} }
export enum SlotID { export enum SlotID {
@ -59,12 +63,12 @@ export const REQUEST_TIMEOUT_MS = 60000;
export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown"; export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown";
export const OpenaiPath = { export enum OpenaiPath {
ChatPath: "v1/chat/completions", Chat = "v1/chat/completions",
UsagePath: "dashboard/billing/usage", Usage = "dashboard/billing/usage",
SubsPath: "dashboard/billing/subscription", Subs = "dashboard/billing/subscription",
ListModelPath: "v1/models", ListModel = "v1/models",
}; }
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
export const DEFAULT_SYSTEM_TEMPLATE = ` export const DEFAULT_SYSTEM_TEMPLATE = `

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const ar: PartialLocaleType = { const ar: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import { PartialLocaleType } from "./index"; import { PartialLocaleType } from "./index";
const bn: PartialLocaleType = { const bn: PartialLocaleType = {

View File

@ -1,5 +1,5 @@
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
const isApp = !!getClientConfig()?.isApp; const isApp = !!getClientConfig()?.isApp;

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const cs: PartialLocaleType = { const cs: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const de: PartialLocaleType = { const de: PartialLocaleType = {

View File

@ -1,5 +1,5 @@
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import { LocaleType } from "./index"; import { LocaleType } from "./index";
// if you are adding a new translation, please use PartialLocaleType instead of LocaleType // if you are adding a new translation, please use PartialLocaleType instead of LocaleType

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const es: PartialLocaleType = { const es: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const fr: PartialLocaleType = { const fr: PartialLocaleType = {

View File

@ -1,10 +1,11 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import { PartialLocaleType } from "./index"; import { PartialLocaleType } from "./index";
const id: PartialLocaleType = { const id: PartialLocaleType = {
WIP: "Coming Soon...", WIP: "Coming Soon...",
Error: { Error: {
Unauthorized: "Akses tidak diizinkan, silakan masukkan kode akses atau masukkan kunci API OpenAI Anda. di halaman [autentikasi](/#/auth) atau di halaman [Pengaturan](/#/settings).", Unauthorized:
"Akses tidak diizinkan, silakan masukkan kode akses atau masukkan kunci API OpenAI Anda. di halaman [autentikasi](/#/auth) atau di halaman [Pengaturan](/#/settings).",
}, },
Auth: { Auth: {
Title: "Diperlukan Kode Akses", Title: "Diperlukan Kode Akses",

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const it: PartialLocaleType = { const it: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const jp: PartialLocaleType = { const jp: PartialLocaleType = {
@ -20,7 +20,8 @@ const jp: PartialLocaleType = {
Stop: "停止", Stop: "停止",
Retry: "リトライ", Retry: "リトライ",
Pin: "ピン", Pin: "ピン",
PinToastContent: "コンテキストプロンプトに1つのメッセージをピン留めしました", PinToastContent:
"コンテキストプロンプトに1つのメッセージをピン留めしました",
PinToastAction: "表示", PinToastAction: "表示",
Delete: "削除", Delete: "削除",
Edit: "編集", Edit: "編集",

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const no: PartialLocaleType = { const no: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const ru: PartialLocaleType = { const ru: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const tr: PartialLocaleType = { const tr: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const tw: PartialLocaleType = { const tw: PartialLocaleType = {

View File

@ -1,4 +1,4 @@
import { SubmitKey } from "../store/config"; import { SubmitKey } from "@/app/typing";
import type { PartialLocaleType } from "./index"; import type { PartialLocaleType } from "./index";
const vi: PartialLocaleType = { const vi: PartialLocaleType = {

View File

@ -1,7 +1,9 @@
import { ModelConfig } from "../store"; import { ModelConfig } from "../store";
import { type Mask } from "../store/mask"; import { type Mask } from "../store/mask";
export type BuiltinMask = Omit<Mask, "id" | "modelConfig"> & { export type BuiltinMask =
| any
| (Omit<Mask, "id" | "modelConfig"> & {
builtin: Boolean; builtin: Boolean;
modelConfig: Partial<ModelConfig>; modelConfig: Partial<ModelConfig["openai"]>;
}; });

View File

@ -1,23 +1,20 @@
import { DEFAULT_API_HOST, DEFAULT_MODELS, StoreKey } from "../constant"; import { REMOTE_API_HOST, DEFAULT_MODELS, StoreKey } from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import { getAuthHeaders } from "../client/common/auth";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
const DEFAULT_OPENAI_URL = const DEFAULT_OPENAI_URL =
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : "/api/openai/"; getClientConfig()?.buildMode === "export" ? REMOTE_API_HOST : "/api/openai/";
console.log("[API] default openai url", DEFAULT_OPENAI_URL); console.log("[API] default openai url", DEFAULT_OPENAI_URL);
const DEFAULT_ACCESS_STATE = { const DEFAULT_ACCESS_STATE = {
token: "",
accessCode: "", accessCode: "",
needCode: true, needCode: true,
hideUserApiKey: false, hideUserApiKey: false,
hideBalanceQuery: false, hideBalanceQuery: false,
disableGPT4: false, disableGPT4: false,
openaiUrl: DEFAULT_OPENAI_URL,
}; };
export const useAccessStore = createPersistStore( export const useAccessStore = createPersistStore(
@ -25,35 +22,24 @@ export const useAccessStore = createPersistStore(
(set, get) => ({ (set, get) => ({
enabledAccessControl() { enabledAccessControl() {
this.fetch(); this.fetchConfig();
return get().needCode; return get().needCode;
}, },
updateCode(code: string) {
set(() => ({ accessCode: code?.trim() }));
},
updateToken(token: string) {
set(() => ({ token: token?.trim() }));
},
updateOpenAiUrl(url: string) {
set(() => ({ openaiUrl: url?.trim() }));
},
isAuthorized() { isAuthorized() {
this.fetch(); this.fetchConfig();
// has token or has code or disabled access control // has token or has code or disabled access control
return ( return !!get().accessCode || !this.enabledAccessControl();
!!get().token || !!get().accessCode || !this.enabledAccessControl()
);
}, },
fetch() { fetchConfig() {
if (fetchState > 0 || getClientConfig()?.buildMode === "export") return; if (fetchState > 0 || getClientConfig()?.buildMode === "export") return;
fetchState = 1; fetchState = 1;
fetch("/api/config", { fetch("/api/config", {
method: "post", method: "post",
body: null, body: null,
headers: { headers: {
...getHeaders(), ...getAuthHeaders(),
}, },
}) })
.then((res) => res.json()) .then((res) => res.json())

View File

@ -2,7 +2,13 @@ import { trimTopic } from "../utils";
import Locale, { getLang } from "../locales"; import Locale, { getLang } from "../locales";
import { showToast } from "../components/ui-lib"; import { showToast } from "../components/ui-lib";
import { ModelConfig, ModelType, useAppConfig } from "./config"; import {
LLMProvider,
MaskConfig,
ModelConfig,
ModelType,
useAppConfig,
} from "./config";
import { createEmptyMask, Mask } from "./mask"; import { createEmptyMask, Mask } from "./mask";
import { import {
DEFAULT_INPUT_TEMPLATE, DEFAULT_INPUT_TEMPLATE,
@ -10,19 +16,19 @@ import {
StoreKey, StoreKey,
SUMMARIZE_MODEL, SUMMARIZE_MODEL,
} from "../constant"; } from "../constant";
import { api, RequestMessage } from "../client/api"; import { ChatControllerPool } from "../client/common/controller";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format"; import { prettyObject } from "../utils/format";
import { estimateTokenLength } from "../utils/token"; import { estimateTokenLength } from "../utils/token";
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import { RequestMessage, api } from "../client";
export type ChatMessage = RequestMessage & { export type ChatMessage = RequestMessage & {
date: string; date: string;
streaming?: boolean; streaming?: boolean;
isError?: boolean; isError?: boolean;
id: string; id: string;
model?: ModelType; model?: string;
}; };
export function createMessage(override: Partial<ChatMessage>): ChatMessage { export function createMessage(override: Partial<ChatMessage>): ChatMessage {
@ -84,46 +90,25 @@ function getSummarizeModel(currentModel: string) {
return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel; return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel;
} }
interface ChatStore {
sessions: ChatSession[];
currentSessionIndex: number;
clearSessions: () => void;
moveSession: (from: number, to: number) => void;
selectSession: (index: number) => void;
newSession: (mask?: Mask) => void;
deleteSession: (index: number) => void;
currentSession: () => ChatSession;
nextSession: (delta: number) => void;
onNewMessage: (message: ChatMessage) => void;
onUserInput: (content: string) => Promise<void>;
summarizeSession: () => void;
updateStat: (message: ChatMessage) => void;
updateCurrentSession: (updater: (session: ChatSession) => void) => void;
updateMessage: (
sessionIndex: number,
messageIndex: number,
updater: (message?: ChatMessage) => void,
) => void;
resetSession: () => void;
getMessagesWithMemory: () => ChatMessage[];
getMemoryPrompt: () => ChatMessage;
clearAllData: () => void;
}
function countMessages(msgs: ChatMessage[]) { function countMessages(msgs: ChatMessage[]) {
return msgs.reduce((pre, cur) => pre + estimateTokenLength(cur.content), 0); return msgs.reduce((pre, cur) => pre + estimateTokenLength(cur.content), 0);
} }
function fillTemplateWith(input: string, modelConfig: ModelConfig) { function fillTemplateWith(
input: string,
context: {
model: string;
template?: string;
},
) {
const vars = { const vars = {
model: modelConfig.model, model: context.model,
time: new Date().toLocaleString(), time: new Date().toLocaleString(),
lang: getLang(), lang: getLang(),
input: input, input: input,
}; };
let output = modelConfig.template ?? DEFAULT_INPUT_TEMPLATE; let output = context.template ?? DEFAULT_INPUT_TEMPLATE;
// must contains {{input}} // must contains {{input}}
const inputVar = "{{input}}"; const inputVar = "{{input}}";
@ -197,13 +182,13 @@ export const useChatStore = createPersistStore(
if (mask) { if (mask) {
const config = useAppConfig.getState(); const config = useAppConfig.getState();
const globalModelConfig = config.modelConfig; const globalModelConfig = config.globalMaskConfig;
session.mask = { session.mask = {
...mask, ...mask,
modelConfig: { config: {
...globalModelConfig, ...globalModelConfig,
...mask.modelConfig, ...mask.config,
}, },
}; };
session.topic = mask.name; session.topic = mask.name;
@ -288,11 +273,39 @@ export const useChatStore = createPersistStore(
get().summarizeSession(); get().summarizeSession();
}, },
getCurrentMaskConfig() {
return get().currentSession().mask.config;
},
extractModelConfig(maskConfig: MaskConfig) {
const provider = maskConfig.provider;
if (!maskConfig.modelConfig[provider]) {
throw Error("[Chat] failed to initialize provider: " + provider);
}
return maskConfig.modelConfig[provider];
},
getCurrentModelConfig() {
const maskConfig = this.getCurrentMaskConfig();
return this.extractModelConfig(maskConfig);
},
getClient() {
const appConfig = useAppConfig.getState();
const currentMaskConfig = get().getCurrentMaskConfig();
return api.createLLMClient(appConfig.providerConfig, currentMaskConfig);
},
async onUserInput(content: string) { async onUserInput(content: string) {
const session = get().currentSession(); const session = get().currentSession();
const modelConfig = session.mask.modelConfig; const maskConfig = this.getCurrentMaskConfig();
const modelConfig = this.getCurrentModelConfig();
const userContent = fillTemplateWith(content, modelConfig); const userContent = fillTemplateWith(content, {
model: modelConfig.model,
template: maskConfig.chatConfig.template,
});
console.log("[User Input] after template: ", userContent); console.log("[User Input] after template: ", userContent);
const userMessage: ChatMessage = createMessage({ const userMessage: ChatMessage = createMessage({
@ -323,10 +336,11 @@ export const useChatStore = createPersistStore(
]); ]);
}); });
const client = this.getClient();
// make request // make request
api.llm.chat({ client.chatStream({
messages: sendMessages, messages: sendMessages,
config: { ...modelConfig, stream: true },
onUpdate(message) { onUpdate(message) {
botMessage.streaming = true; botMessage.streaming = true;
if (message) { if (message) {
@ -391,7 +405,9 @@ export const useChatStore = createPersistStore(
getMessagesWithMemory() { getMessagesWithMemory() {
const session = get().currentSession(); const session = get().currentSession();
const modelConfig = session.mask.modelConfig; const maskConfig = this.getCurrentMaskConfig();
const chatConfig = maskConfig.chatConfig;
const modelConfig = this.getCurrentModelConfig();
const clearContextIndex = session.clearContextIndex ?? 0; const clearContextIndex = session.clearContextIndex ?? 0;
const messages = session.messages.slice(); const messages = session.messages.slice();
const totalMessageCount = session.messages.length; const totalMessageCount = session.messages.length;
@ -400,14 +416,14 @@ export const useChatStore = createPersistStore(
const contextPrompts = session.mask.context.slice(); const contextPrompts = session.mask.context.slice();
// system prompts, to get close to OpenAI Web ChatGPT // system prompts, to get close to OpenAI Web ChatGPT
const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; const shouldInjectSystemPrompts = chatConfig.enableInjectSystemPrompts;
const systemPrompts = shouldInjectSystemPrompts const systemPrompts = shouldInjectSystemPrompts
? [ ? [
createMessage({ createMessage({
role: "system", role: "system",
content: fillTemplateWith("", { content: fillTemplateWith("", {
...modelConfig, model: modelConfig.model,
template: DEFAULT_SYSTEM_TEMPLATE, template: chatConfig.template,
}), }),
}), }),
] ]
@ -421,7 +437,7 @@ export const useChatStore = createPersistStore(
// long term memory // long term memory
const shouldSendLongTermMemory = const shouldSendLongTermMemory =
modelConfig.sendMemory && chatConfig.sendMemory &&
session.memoryPrompt && session.memoryPrompt &&
session.memoryPrompt.length > 0 && session.memoryPrompt.length > 0 &&
session.lastSummarizeIndex > clearContextIndex; session.lastSummarizeIndex > clearContextIndex;
@ -433,7 +449,7 @@ export const useChatStore = createPersistStore(
// short term memory // short term memory
const shortTermMemoryStartIndex = Math.max( const shortTermMemoryStartIndex = Math.max(
0, 0,
totalMessageCount - modelConfig.historyMessageCount, totalMessageCount - chatConfig.historyMessageCount,
); );
// lets concat send messages, including 4 parts: // lets concat send messages, including 4 parts:
@ -494,6 +510,8 @@ export const useChatStore = createPersistStore(
summarizeSession() { summarizeSession() {
const config = useAppConfig.getState(); const config = useAppConfig.getState();
const maskConfig = this.getCurrentMaskConfig();
const chatConfig = maskConfig.chatConfig;
const session = get().currentSession(); const session = get().currentSession();
// remove error messages if any // remove error messages if any
@ -502,7 +520,7 @@ export const useChatStore = createPersistStore(
// should summarize topic after chating more than 50 words // should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50; const SUMMARIZE_MIN_LEN = 50;
if ( if (
config.enableAutoGenerateTitle && chatConfig.enableAutoGenerateTitle &&
session.topic === DEFAULT_TOPIC && session.topic === DEFAULT_TOPIC &&
countMessages(messages) >= SUMMARIZE_MIN_LEN countMessages(messages) >= SUMMARIZE_MIN_LEN
) { ) {
@ -512,11 +530,12 @@ export const useChatStore = createPersistStore(
content: Locale.Store.Prompt.Topic, content: Locale.Store.Prompt.Topic,
}), }),
); );
api.llm.chat({
const client = this.getClient();
client.chat({
messages: topicMessages, messages: topicMessages,
config: { shouldSummarize: true,
model: getSummarizeModel(session.mask.modelConfig.model),
},
onFinish(message) { onFinish(message) {
get().updateCurrentSession( get().updateCurrentSession(
(session) => (session) =>
@ -527,7 +546,7 @@ export const useChatStore = createPersistStore(
}); });
} }
const modelConfig = session.mask.modelConfig; const modelConfig = this.getCurrentModelConfig();
const summarizeIndex = Math.max( const summarizeIndex = Math.max(
session.lastSummarizeIndex, session.lastSummarizeIndex,
session.clearContextIndex ?? 0, session.clearContextIndex ?? 0,
@ -541,7 +560,7 @@ export const useChatStore = createPersistStore(
if (historyMsgLength > modelConfig?.max_tokens ?? 4000) { if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
const n = toBeSummarizedMsgs.length; const n = toBeSummarizedMsgs.length;
toBeSummarizedMsgs = toBeSummarizedMsgs.slice( toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
Math.max(0, n - modelConfig.historyMessageCount), Math.max(0, n - chatConfig.historyMessageCount),
); );
} }
@ -554,14 +573,14 @@ export const useChatStore = createPersistStore(
"[Chat History] ", "[Chat History] ",
toBeSummarizedMsgs, toBeSummarizedMsgs,
historyMsgLength, historyMsgLength,
modelConfig.compressMessageLengthThreshold, chatConfig.compressMessageLengthThreshold,
); );
if ( if (
historyMsgLength > modelConfig.compressMessageLengthThreshold && historyMsgLength > chatConfig.compressMessageLengthThreshold &&
modelConfig.sendMemory chatConfig.sendMemory
) { ) {
api.llm.chat({ this.getClient().chatStream({
messages: toBeSummarizedMsgs.concat( messages: toBeSummarizedMsgs.concat(
createMessage({ createMessage({
role: "system", role: "system",
@ -569,11 +588,7 @@ export const useChatStore = createPersistStore(
date: "", date: "",
}), }),
), ),
config: { shouldSummarize: true,
...modelConfig,
stream: true,
model: getSummarizeModel(session.mask.modelConfig.model),
},
onUpdate(message) { onUpdate(message) {
session.memoryPrompt = message; session.memoryPrompt = message;
}, },
@ -614,52 +629,9 @@ export const useChatStore = createPersistStore(
name: StoreKey.Chat, name: StoreKey.Chat,
version: 3.1, version: 3.1,
migrate(persistedState, version) { migrate(persistedState, version) {
const state = persistedState as any; // TODO(yifei): migrate from old versions
const newState = JSON.parse(
JSON.stringify(state),
) as typeof DEFAULT_CHAT_STATE;
if (version < 2) { return persistedState as any;
newState.sessions = [];
const oldSessions = state.sessions;
for (const oldSession of oldSessions) {
const newSession = createEmptySession();
newSession.topic = oldSession.topic;
newSession.messages = [...oldSession.messages];
newSession.mask.modelConfig.sendMemory = true;
newSession.mask.modelConfig.historyMessageCount = 4;
newSession.mask.modelConfig.compressMessageLengthThreshold = 1000;
newState.sessions.push(newSession);
}
}
if (version < 3) {
// migrate id to nanoid
newState.sessions.forEach((s) => {
s.id = nanoid();
s.messages.forEach((m) => (m.id = nanoid()));
});
}
// Enable `enableInjectSystemPrompts` attribute for old sessions.
// Resolve issue of old sessions not automatically enabling.
if (version < 3.1) {
newState.sessions.forEach((s) => {
if (
// Exclude those already set by user
!s.mask.modelConfig.hasOwnProperty("enableInjectSystemPrompts")
) {
// Because users may have changed this configuration,
// the user's current configuration is used instead of the default
const config = useAppConfig.getState();
s.mask.modelConfig.enableInjectSystemPrompts =
config.modelConfig.enableInjectSystemPrompts;
}
});
}
return newState as any;
}, },
}, },
); );

View File

@ -1,4 +1,3 @@
import { LLMModel } from "../client/api";
import { isMacOS } from "../utils"; import { isMacOS } from "../utils";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { import {
@ -8,24 +7,85 @@ import {
StoreKey, StoreKey,
} from "../constant"; } from "../constant";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import { OpenAIConfig } from "../client/openai/config";
import { api } from "../client";
import { SubmitKey, Theme } from "../typing";
export type ModelType = (typeof DEFAULT_MODELS)[number]["name"]; export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
export enum SubmitKey { export const DEFAULT_CHAT_CONFIG = {
Enter = "Enter", enableAutoGenerateTitle: true,
CtrlEnter = "Ctrl + Enter", sendMemory: true,
ShiftEnter = "Shift + Enter", historyMessageCount: 4,
AltEnter = "Alt + Enter", compressMessageLengthThreshold: 1000,
MetaEnter = "Meta + Enter", enableInjectSystemPrompts: true,
} template: DEFAULT_INPUT_TEMPLATE,
};
export type ChatConfig = typeof DEFAULT_CHAT_CONFIG;
export enum Theme { export const DEFAULT_PROVIDER_CONFIG = {
Auto = "auto", openai: OpenAIConfig.provider,
Dark = "dark", // azure: {
Light = "light", // endpoint: "https://api.openai.com",
} // apiKey: "",
// version: "",
// ...COMMON_PROVIDER_CONFIG,
// },
// claude: {
// endpoint: "https://api.anthropic.com",
// apiKey: "",
// ...COMMON_PROVIDER_CONFIG,
// },
// google: {
// endpoint: "https://api.anthropic.com",
// apiKey: "",
// ...COMMON_PROVIDER_CONFIG,
// },
};
export const DEFAULT_CONFIG = { export const DEFAULT_MODEL_CONFIG = {
openai: OpenAIConfig.model,
// azure: {
// model: "gpt-3.5-turbo" as string,
// summarizeModel: "gpt-3.5-turbo",
//
// temperature: 0.5,
// top_p: 1,
// max_tokens: 2000,
// presence_penalty: 0,
// frequency_penalty: 0,
// },
// claude: {
// model: "claude-2",
// summarizeModel: "claude-2",
//
// max_tokens_to_sample: 100000,
// temperature: 1,
// top_p: 0.7,
// top_k: 1,
// },
// google: {
// model: "chat-bison-001",
// summarizeModel: "claude-2",
//
// temperature: 1,
// topP: 0.7,
// topK: 1,
// },
};
export type LLMProvider = keyof typeof DEFAULT_PROVIDER_CONFIG;
export const LLMProviders = Array.from(
Object.entries(DEFAULT_PROVIDER_CONFIG),
).map(([k, v]) => [v.name, k]);
export const DEFAULT_MASK_CONFIG = {
provider: "openai" as LLMProvider,
chatConfig: { ...DEFAULT_CHAT_CONFIG },
modelConfig: { ...DEFAULT_MODEL_CONFIG },
};
export const DEFAULT_APP_CONFIG = {
lastUpdate: Date.now(), // timestamp, to merge state lastUpdate: Date.now(), // timestamp, to merge state
submitKey: isMacOS() ? SubmitKey.MetaEnter : SubmitKey.CtrlEnter, submitKey: isMacOS() ? SubmitKey.MetaEnter : SubmitKey.CtrlEnter,
@ -34,7 +94,6 @@ export const DEFAULT_CONFIG = {
theme: Theme.Auto as Theme, theme: Theme.Auto as Theme,
tightBorder: !!getClientConfig()?.isApp, tightBorder: !!getClientConfig()?.isApp,
sendPreviewBubble: true, sendPreviewBubble: true,
enableAutoGenerateTitle: true,
sidebarWidth: DEFAULT_SIDEBAR_WIDTH, sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
disablePromptHint: false, disablePromptHint: false,
@ -42,27 +101,14 @@ export const DEFAULT_CONFIG = {
dontShowMaskSplashScreen: false, // dont show splash screen when create chat dontShowMaskSplashScreen: false, // dont show splash screen when create chat
hideBuiltinMasks: false, // dont add builtin masks hideBuiltinMasks: false, // dont add builtin masks
customModels: "", providerConfig: { ...DEFAULT_PROVIDER_CONFIG },
models: DEFAULT_MODELS as any as LLMModel[], globalMaskConfig: { ...DEFAULT_MASK_CONFIG },
modelConfig: {
model: "gpt-3.5-turbo" as ModelType,
temperature: 0.5,
top_p: 1,
max_tokens: 2000,
presence_penalty: 0,
frequency_penalty: 0,
sendMemory: true,
historyMessageCount: 4,
compressMessageLengthThreshold: 1000,
enableInjectSystemPrompts: true,
template: DEFAULT_INPUT_TEMPLATE,
},
}; };
export type ChatConfig = typeof DEFAULT_CONFIG; export type AppConfig = typeof DEFAULT_APP_CONFIG;
export type ProviderConfig = typeof DEFAULT_PROVIDER_CONFIG;
export type ModelConfig = ChatConfig["modelConfig"]; export type MaskConfig = typeof DEFAULT_MASK_CONFIG;
export type ModelConfig = typeof DEFAULT_MODEL_CONFIG;
export function limitNumber( export function limitNumber(
x: number, x: number,
@ -99,48 +145,21 @@ export const ModalConfigValidator = {
}; };
export const useAppConfig = createPersistStore( export const useAppConfig = createPersistStore(
{ ...DEFAULT_CONFIG }, { ...DEFAULT_APP_CONFIG },
(set, get) => ({ (set, get) => ({
reset() { reset() {
set(() => ({ ...DEFAULT_CONFIG })); set(() => ({ ...DEFAULT_APP_CONFIG }));
}, },
mergeModels(newModels: LLMModel[]) { getDefaultClient() {
if (!newModels || newModels.length === 0) { return api.createLLMClient(get().providerConfig, get().globalMaskConfig);
return;
}
const oldModels = get().models;
const modelMap: Record<string, LLMModel> = {};
for (const model of oldModels) {
model.available = false;
modelMap[model.name] = model;
}
for (const model of newModels) {
model.available = true;
modelMap[model.name] = model;
}
set(() => ({
models: Object.values(modelMap),
}));
},
allModels() {
const customModels = get()
.customModels.split(",")
.filter((v) => !!v && v.length > 0)
.map((m) => ({ name: m, available: true }));
return get().models.concat(customModels);
}, },
}), }),
{ {
name: StoreKey.Config, name: StoreKey.Config,
version: 3.8, version: 4,
migrate(persistedState, version) { migrate(persistedState, version) {
const state = persistedState as ChatConfig; const state = persistedState as any;
if (version < 3.4) { if (version < 3.4) {
state.modelConfig.sendMemory = true; state.modelConfig.sendMemory = true;
@ -169,6 +188,10 @@ export const useAppConfig = createPersistStore(
state.lastUpdate = Date.now(); state.lastUpdate = Date.now();
} }
if (version < 4) {
// todo: migarte from old versions
}
return state as any; return state as any;
}, },
}, },

View File

@ -1,10 +1,11 @@
import { BUILTIN_MASKS } from "../masks"; import { BUILTIN_MASKS } from "../masks";
import { getLang, Lang } from "../locales"; import { getLang, Lang } from "../locales";
import { DEFAULT_TOPIC, ChatMessage } from "./chat"; import { DEFAULT_TOPIC, ChatMessage } from "./chat";
import { ModelConfig, useAppConfig } from "./config"; import { MaskConfig, ModelConfig, useAppConfig } from "./config";
import { StoreKey } from "../constant"; import { StoreKey } from "../constant";
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import { deepClone } from "../utils/clone";
export type Mask = { export type Mask = {
id: string; id: string;
@ -14,7 +15,9 @@ export type Mask = {
hideContext?: boolean; hideContext?: boolean;
context: ChatMessage[]; context: ChatMessage[];
syncGlobalConfig?: boolean; syncGlobalConfig?: boolean;
modelConfig: ModelConfig;
config: MaskConfig;
lang: Lang; lang: Lang;
builtin: boolean; builtin: boolean;
}; };
@ -33,7 +36,7 @@ export const createEmptyMask = () =>
name: DEFAULT_TOPIC, name: DEFAULT_TOPIC,
context: [], context: [],
syncGlobalConfig: true, // use global config as default syncGlobalConfig: true, // use global config as default
modelConfig: { ...useAppConfig.getState().modelConfig }, config: deepClone(useAppConfig.getState().globalMaskConfig),
lang: getLang(), lang: getLang(),
builtin: false, builtin: false,
createdAt: Date.now(), createdAt: Date.now(),
@ -87,10 +90,11 @@ export const useMaskStore = createPersistStore(
const buildinMasks = BUILTIN_MASKS.map( const buildinMasks = BUILTIN_MASKS.map(
(m) => (m) =>
({ ({
id: m.name,
...m, ...m,
modelConfig: { config: {
...config.modelConfig, ...config.globalMaskConfig,
...m.modelConfig, ...m.config,
}, },
}) as Mask, }) as Mask,
); );
@ -120,6 +124,8 @@ export const useMaskStore = createPersistStore(
newState.masks = updatedMasks; newState.masks = updatedMasks;
} }
// TODO(yifei): migrate old masks
return newState as any; return newState as any;
}, },
}, },

View File

@ -13,7 +13,7 @@ import { downloadAs, readFromFile } from "../utils";
import { showToast } from "../components/ui-lib"; import { showToast } from "../components/ui-lib";
import Locale from "../locales"; import Locale from "../locales";
import { createSyncClient, ProviderType } from "../utils/cloud"; import { createSyncClient, ProviderType } from "../utils/cloud";
import { corsPath } from "../utils/cors"; import { getApiPath } from "../utils/path";
export interface WebDavConfig { export interface WebDavConfig {
server: string; server: string;
@ -27,7 +27,7 @@ export type SyncStore = GetStoreState<typeof useSyncStore>;
const DEFAULT_SYNC_STATE = { const DEFAULT_SYNC_STATE = {
provider: ProviderType.WebDAV, provider: ProviderType.WebDAV,
useProxy: true, useProxy: true,
proxyUrl: corsPath(ApiPath.Cors), proxyUrl: getApiPath(ApiPath.Cors),
webdav: { webdav: {
endpoint: "", endpoint: "",

View File

@ -1,5 +1,4 @@
import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant"; import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant";
import { api } from "../client/api";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import ChatGptIcon from "../icons/chatgpt.png"; import ChatGptIcon from "../icons/chatgpt.png";
@ -85,29 +84,34 @@ export const useUpdateStore = createPersistStore(
})); }));
if (window.__TAURI__?.notification && isApp) { if (window.__TAURI__?.notification && isApp) {
// Check if notification permission is granted // Check if notification permission is granted
await window.__TAURI__?.notification.isPermissionGranted().then((granted) => { await window.__TAURI__?.notification
.isPermissionGranted()
.then((granted) => {
if (!granted) { if (!granted) {
return; return;
} else { } else {
// Request permission to show notifications // Request permission to show notifications
window.__TAURI__?.notification.requestPermission().then((permission) => { window.__TAURI__?.notification
if (permission === 'granted') { .requestPermission()
.then((permission) => {
if (permission === "granted") {
if (version === remoteId) { if (version === remoteId) {
// Show a notification using Tauri // Show a notification using Tauri
window.__TAURI__?.notification.sendNotification({ window.__TAURI__?.notification.sendNotification({
title: "ChatGPT Next Web", title: "ChatGPT Next Web",
body: `${Locale.Settings.Update.IsLatest}`, body: `${Locale.Settings.Update.IsLatest}`,
icon: `${ChatGptIcon.src}`, icon: `${ChatGptIcon.src}`,
sound: "Default" sound: "Default",
}); });
} else { } else {
const updateMessage = Locale.Settings.Update.FoundUpdate(`${remoteId}`); const updateMessage =
Locale.Settings.Update.FoundUpdate(`${remoteId}`);
// Show a notification for the new version using Tauri // Show a notification for the new version using Tauri
window.__TAURI__?.notification.sendNotification({ window.__TAURI__?.notification.sendNotification({
title: "ChatGPT Next Web", title: "ChatGPT Next Web",
body: updateMessage, body: updateMessage,
icon: `${ChatGptIcon.src}`, icon: `${ChatGptIcon.src}`,
sound: "Default" sound: "Default",
}); });
} }
} }
@ -130,14 +134,7 @@ export const useUpdateStore = createPersistStore(
})); }));
try { try {
const usage = await api.llm.usage(); // TODO: add check usage api here
if (usage) {
set(() => ({
used: usage.used,
subscription: usage.total,
}));
}
} catch (e) { } catch (e) {
console.error((e as Error).message); console.error((e as Error).message);
} }

View File

@ -1 +1,15 @@
export type Updater<T> = (updater: (value: T) => void) => void; export type Updater<T> = (updater: (value: T) => void) => void;
export enum SubmitKey {
Enter = "Enter",
CtrlEnter = "Ctrl + Enter",
ShiftEnter = "Shift + Enter",
AltEnter = "Alt + Enter",
MetaEnter = "Meta + Enter",
}
export enum Theme {
Auto = "auto",
Dark = "dark",
Light = "light",
}

View File

@ -1,3 +1,3 @@
export function deepClone<T>(obj: T) { export function deepClone<T>(obj: T): T {
return JSON.parse(JSON.stringify(obj)); return JSON.parse(JSON.stringify(obj));
} }

View File

@ -1,5 +1,6 @@
import { createWebDavClient } from "./webdav"; import { createWebDavClient } from "./webdav";
import { createUpstashClient } from "./upstash"; import { createUpstashClient } from "./upstash";
import { SyncStore } from "@/app/store/sync";
export enum ProviderType { export enum ProviderType {
WebDAV = "webdav", WebDAV = "webdav",
@ -27,7 +28,7 @@ export type SyncClient = {
export function createSyncClient<T extends ProviderType>( export function createSyncClient<T extends ProviderType>(
provider: T, provider: T,
config: SyncClientConfig[T], store: SyncStore,
): SyncClient { ): SyncClient {
return SyncClients[provider](config as any) as any; return SyncClients[provider](store);
} }

View File

@ -57,7 +57,7 @@ export function createUpstashClient(store: SyncStore) {
async get() { async get() {
const chunkCount = Number(await this.redisGet(chunkCountKey)); const chunkCount = Number(await this.redisGet(chunkCountKey));
if (!Number.isInteger(chunkCount)) return; if (!Number.isInteger(chunkCount)) return "";
const chunks = await Promise.all( const chunks = await Promise.all(
new Array(chunkCount) new Array(chunkCount)

View File

@ -1,19 +1,5 @@
import { getClientConfig } from "../config/client"; import { ApiPath } from "../constant";
import { ApiPath, DEFAULT_CORS_HOST } from "../constant"; import { getApiPath } from "./path";
export function corsPath(path: string) {
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : "";
if (!path.startsWith("/")) {
path = "/" + path;
}
if (!path.endsWith("/")) {
path += "/";
}
return `${baseUrl}${path}`;
}
export function corsFetch( export function corsFetch(
url: string, url: string,
@ -25,7 +11,7 @@ export function corsFetch(
throw Error("[CORS Fetch] url must starts with http/https"); throw Error("[CORS Fetch] url must starts with http/https");
} }
let proxyUrl = options.proxyUrl ?? corsPath(ApiPath.Cors); let proxyUrl = options.proxyUrl ?? getApiPath(ApiPath.Cors);
if (!proxyUrl.endsWith("/")) { if (!proxyUrl.endsWith("/")) {
proxyUrl += "/"; proxyUrl += "/";
} }

13
app/utils/log.ts Normal file
View File

@ -0,0 +1,13 @@
export function createLogger(prefix = "") {
return {
log(...args: any[]) {
console.log(prefix, ...args);
},
error(...args: any[]) {
console.error(prefix, ...args);
},
warn(...args: any[]) {
console.warn(prefix, ...args);
},
};
}

17
app/utils/object.ts Normal file
View File

@ -0,0 +1,17 @@
export function pick<T extends object, U extends (keyof T)[]>(
obj: T,
...keys: U
): Pick<T, U[number]> {
const ret: any = {};
keys.forEach((key) => (ret[key] = obj[key]));
return ret;
}
export function omit<T extends object, U extends (keyof T)[]>(
obj: T,
...keys: U
): Omit<T, U[number]> {
const ret: any = { ...obj };
keys.forEach((key) => delete ret[key]);
return ret;
}

16
app/utils/path.ts Normal file
View File

@ -0,0 +1,16 @@
import { getClientConfig } from "../config/client";
import { ApiPath, REMOTE_API_HOST } from "../constant";
/**
* Get api path according to desktop/web env
*
* 1. In desktop app, we always try to use a remote full path for better network experience
* 2. In web app, we always try to use the original relative path
*
* @param path - /api/*
* @returns
*/
export function getApiPath(path: ApiPath) {
const baseUrl = getClientConfig()?.isApp ? `${REMOTE_API_HOST}` : "";
return `${baseUrl}${path}`;
}

19
app/utils/string.ts Normal file
View File

@ -0,0 +1,19 @@
export function trimEnd(s: string, end = " ") {
if (end.length === 0) return s;
while (s.endsWith(end)) {
s = s.slice(0, -end.length);
}
return s;
}
export function trimStart(s: string, start = " ") {
if (start.length === 0) return s;
while (s.endsWith(start)) {
s = s.slice(start.length);
}
return s;
}