feat: merge main

This commit is contained in:
Dean-YZG
2024-05-16 15:28:10 +08:00
25 changed files with 293 additions and 215 deletions

View File

@@ -21,6 +21,8 @@ import { estimateTokenLength } from "../utils/token";
import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store";
import { identifyDefaultClaudeModel } from "../utils/checkers";
import { collectModelsWithDefaultModel } from "../utils/model";
import { useAccessStore } from "./access";
export type ChatMessage = RequestMessage & {
date: string;
@@ -104,9 +106,19 @@ function createEmptySession(): ChatSession {
function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize
if (currentModel.startsWith("gpt")) {
return SUMMARIZE_MODEL;
const configStore = useAppConfig.getState();
const accessStore = useAccessStore.getState();
const allModel = collectModelsWithDefaultModel(
configStore.models,
[configStore.customModels, accessStore.customModels].join(","),
accessStore.defaultModel,
);
const summarizeModel = allModel.find(
(m) => m.name === SUMMARIZE_MODEL && m.available,
);
return summarizeModel?.name ?? currentModel;
}
if (currentModel.startsWith("gemini-pro")) {
if (currentModel.startsWith("gemini")) {
return GEMINI_SUMMARIZE_MODEL;
}
return currentModel;
@@ -433,14 +445,13 @@ export const useChatStore = createPersistStore(
getMemoryPrompt() {
const session = get().currentSession();
return {
role: "system",
content:
session.memoryPrompt.length > 0
? Locale.Store.Prompt.History(session.memoryPrompt)
: "",
date: "",
} as ChatMessage;
if (session.memoryPrompt.length) {
return {
role: "system",
content: Locale.Store.Prompt.History(session.memoryPrompt),
date: "",
} as ChatMessage;
}
},
getMessagesWithMemory() {
@@ -476,16 +487,15 @@ export const useChatStore = createPersistStore(
systemPrompts.at(0)?.content ?? "empty",
);
}
const memoryPrompt = get().getMemoryPrompt();
// long term memory
const shouldSendLongTermMemory =
modelConfig.sendMemory &&
session.memoryPrompt &&
session.memoryPrompt.length > 0 &&
session.lastSummarizeIndex > clearContextIndex;
const longTermMemoryPrompts = shouldSendLongTermMemory
? [get().getMemoryPrompt()]
: [];
const longTermMemoryPrompts =
shouldSendLongTermMemory && memoryPrompt ? [memoryPrompt] : [];
const longTermMemoryStartIndex = session.lastSummarizeIndex;
// short term memory
@@ -610,9 +620,11 @@ export const useChatStore = createPersistStore(
Math.max(0, n - modelConfig.historyMessageCount),
);
}
// add memory prompt
toBeSummarizedMsgs.unshift(get().getMemoryPrompt());
const memoryPrompt = get().getMemoryPrompt();
if (memoryPrompt) {
// add memory prompt
toBeSummarizedMsgs.unshift(memoryPrompt);
}
const lastSummarizeIndex = session.messages.length;