Merge remote-tracking branch 'origin/main' into gpt-4o

# Conflicts:
#	public/apple-touch-icon.png
This commit is contained in:
Hao Jia
2024-05-16 14:43:10 +08:00
46 changed files with 2181 additions and 607 deletions

View File

@@ -8,6 +8,7 @@ import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
import { createPersistStore } from "../utils/store";
import { ensure } from "../utils/clone";
import { DEFAULT_CONFIG } from "./config";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
@@ -36,6 +37,11 @@ const DEFAULT_ACCESS_STATE = {
googleApiKey: "",
googleApiVersion: "v1",
// anthropic
anthropicApiKey: "",
anthropicApiVersion: "2023-06-01",
anthropicUrl: "",
// server config
needCode: true,
hideUserApiKey: false,
@@ -43,6 +49,7 @@ const DEFAULT_ACCESS_STATE = {
disableGPT4: false,
disableFastLink: false,
customModels: "",
defaultModel: "",
};
export const useAccessStore = createPersistStore(
@@ -67,6 +74,10 @@ export const useAccessStore = createPersistStore(
return ensure(get(), ["googleApiKey"]);
},
isValidAnthropic() {
return ensure(get(), ["anthropicApiKey"]);
},
isAuthorized() {
this.fetch();
@@ -75,6 +86,7 @@ export const useAccessStore = createPersistStore(
this.isValidOpenAI() ||
this.isValidAzure() ||
this.isValidGoogle() ||
this.isValidAnthropic() ||
!this.enabledAccessControl() ||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
);
@@ -90,6 +102,13 @@ export const useAccessStore = createPersistStore(
},
})
.then((res) => res.json())
.then((res) => {
// Set default model from env request
let defaultModel = res.defaultModel ?? "";
DEFAULT_CONFIG.modelConfig.model =
defaultModel !== "" ? defaultModel : "gpt-3.5-turbo";
return res;
})
.then((res: DangerConfig) => {
console.log("[Config] got config from server", res);
set(() => ({ ...res }));

View File

@@ -20,6 +20,9 @@ import { prettyObject } from "../utils/format";
import { estimateTokenLength } from "../utils/token";
import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store";
import { identifyDefaultClaudeModel } from "../utils/checkers";
import { collectModelsWithDefaultModel } from "../utils/model";
import { useAccessStore } from "./access";
export type ChatMessage = RequestMessage & {
date: string;
@@ -86,9 +89,19 @@ function createEmptySession(): ChatSession {
function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize
if (currentModel.startsWith("gpt")) {
return SUMMARIZE_MODEL;
const configStore = useAppConfig.getState();
const accessStore = useAccessStore.getState();
const allModel = collectModelsWithDefaultModel(
configStore.models,
[configStore.customModels, accessStore.customModels].join(","),
accessStore.defaultModel,
);
const summarizeModel = allModel.find(
(m) => m.name === SUMMARIZE_MODEL && m.available,
);
return summarizeModel?.name ?? currentModel;
}
if (currentModel.startsWith("gemini-pro")) {
if (currentModel.startsWith("gemini")) {
return GEMINI_SUMMARIZE_MODEL;
}
return currentModel;
@@ -119,13 +132,18 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
ServiceProvider: serviceProvider,
cutoff,
model: modelConfig.model,
time: new Date().toLocaleString(),
time: new Date().toString(),
lang: getLang(),
input: input,
};
let output = modelConfig.template ?? DEFAULT_INPUT_TEMPLATE;
// remove duplicate
if (input.startsWith(output)) {
output = "";
}
// must contains {{input}}
const inputVar = "{{input}}";
if (!output.includes(inputVar)) {
@@ -348,6 +366,8 @@ export const useChatStore = createPersistStore(
var api: ClientApi;
if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else if (identifyDefaultClaudeModel(modelConfig.model)) {
api = new ClientApi(ModelProvider.Claude);
} else {
api = new ClientApi(ModelProvider.GPT);
}
@@ -494,7 +514,6 @@ export const useChatStore = createPersistStore(
tokenCount += estimateTokenLength(getMessageTextContent(msg));
reversedRecentMessages.push(msg);
}
// concat all messages
const recentMessages = [
...systemPrompts,
@@ -533,6 +552,8 @@ export const useChatStore = createPersistStore(
var api: ClientApi;
if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else if (identifyDefaultClaudeModel(modelConfig.model)) {
api = new ClientApi(ModelProvider.Claude);
} else {
api = new ClientApi(ModelProvider.GPT);
}
@@ -557,6 +578,7 @@ export const useChatStore = createPersistStore(
messages: topicMessages,
config: {
model: getSummarizeModel(session.mask.modelConfig.model),
stream: false,
},
onFinish(message) {
get().updateCurrentSession(
@@ -600,6 +622,10 @@ export const useChatStore = createPersistStore(
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
modelConfig.sendMemory
) {
/** Destruct max_tokens while summarizing
* this param is just shit
**/
const { max_tokens, ...modelcfg } = modelConfig;
api.llm.chat({
messages: toBeSummarizedMsgs.concat(
createMessage({
@@ -609,7 +635,7 @@ export const useChatStore = createPersistStore(
}),
),
config: {
...modelConfig,
...modelcfg,
stream: true,
model: getSummarizeModel(session.mask.modelConfig.model),
},

View File

@@ -97,13 +97,23 @@ export const useSyncStore = createPersistStore(
const client = this.getClient();
try {
const remoteState = JSON.parse(
await client.get(config.username),
) as AppState;
mergeAppState(localState, remoteState);
setLocalAppState(localState);
const remoteState = await client.get(config.username);
if (!remoteState || remoteState === "") {
await client.set(config.username, JSON.stringify(localState));
console.log(
"[Sync] Remote state is empty, using local state instead.",
);
return;
} else {
const parsedRemoteState = JSON.parse(
await client.get(config.username),
) as AppState;
mergeAppState(localState, parsedRemoteState);
setLocalAppState(localState);
}
} catch (e) {
console.log("[Sync] failed to get remote state", e);
throw e;
}
await client.set(config.username, JSON.stringify(localState));