feat: close #2580 only use 3.5 to summarize when not using custom models

This commit is contained in:
Yidadaa
2023-08-28 00:02:52 +08:00
parent ada4e3cdcd
commit 3bd76b9156
2 changed files with 14 additions and 2 deletions

View File

@@ -11,6 +11,7 @@ import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
StoreKey,
SUMMARIZE_MODEL,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
@@ -80,6 +81,11 @@ function createEmptySession(): ChatSession {
};
}
function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize
return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel;
}
interface ChatStore {
sessions: ChatSession[];
currentSessionIndex: number;
@@ -501,7 +507,7 @@ export const useChatStore = create<ChatStore>()(
api.llm.chat({
messages: topicMessages,
config: {
model: "gpt-3.5-turbo",
model: getSummarizeModel(session.mask.modelConfig.model),
},
onFinish(message) {
get().updateCurrentSession(
@@ -555,7 +561,11 @@ export const useChatStore = create<ChatStore>()(
date: "",
}),
),
config: { ...modelConfig, stream: true, model: "gpt-3.5-turbo" },
config: {
...modelConfig,
stream: true,
model: getSummarizeModel(session.mask.modelConfig.model),
},
onUpdate(message) {
session.memoryPrompt = message;
},