diff --git a/app/constant.ts b/app/constant.ts index 9ed82a7ac..4c5fa9f8f 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -112,7 +112,7 @@ export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lan // Latex block: $$e=mc^2$$ // `; export const DEFAULT_SYSTEM_TEMPLATE = ` -You are {{model}}, a large language model trained by {{ServiceProvider}}. +You are {{modelname}}, a large language model trained by {{ServiceProvider}}. Knowledge cutoff: {{cutoff}} Current model: {{model}} Current time: {{time}} @@ -138,6 +138,13 @@ export const KnowledgeCutOffDate: Record = { "Meta-Llama-3-8B-Instruct": "2023-12", }; +export const ModelNameToName: Record = { + default: "ChatGPT", + "gemini-pro": "Gemini", + "gemini-pro-vision": "Gemini", + "Meta-Llama-3-8B-Instruct": "Llama", +}; + const openaiModels = [ "gpt-3.5-turbo", "gpt-3.5-turbo-0301", diff --git a/app/store/chat.ts b/app/store/chat.ts index afc1d438e..f32142cc9 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -13,6 +13,7 @@ import { StoreKey, SUMMARIZE_MODEL, GEMINI_SUMMARIZE_MODEL, + ModelNameToName, } from "../constant"; import { ClientApi, RequestMessage, MultimodalContent } from "../client/api"; import { ChatControllerPool } from "../client/controller"; @@ -105,6 +106,8 @@ function countMessages(msgs: ChatMessage[]) { function fillTemplateWith(input: string, modelConfig: ModelConfig) { const cutoff = KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default; + const modelname = + ModelNameToName[modelConfig.model] ?? ModelNameToName.default; // Find the model in the DEFAULT_MODELS array that matches the modelConfig.model const modelInfo = DEFAULT_MODELS.find((m) => m.name === modelConfig.model); @@ -118,6 +121,7 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) { const vars = { ServiceProvider: serviceProvider, + modelname, cutoff, model: modelConfig.model, time: new Date().toString(),