From d12cf44d06707c4241282738ca62806394096d65 Mon Sep 17 00:00:00 2001 From: A-Cepheus <60658915+A-Cepheus@users.noreply.github.com> Date: Wed, 24 Apr 2024 10:26:30 +0800 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat:=20=E6=94=AF=E6=8C=81=E5=85=B6?= =?UTF-8?q?=E4=BB=96=E6=A8=A1=E5=9E=8B=E7=9A=84=E6=8F=90=E7=A4=BA=E8=AF=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/constant.ts | 16 +++++++++++++++- app/store/chat.ts | 6 +++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/app/constant.ts b/app/constant.ts index 17f2229ff..9ed82a7ac 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -112,7 +112,7 @@ export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lan // Latex block: $$e=mc^2$$ // `; export const DEFAULT_SYSTEM_TEMPLATE = ` -You are ChatGPT, a large language model trained by {{ServiceProvider}}. +You are {{model}}, a large language model trained by {{ServiceProvider}}. Knowledge cutoff: {{cutoff}} Current model: {{model}} Current time: {{time}} @@ -135,6 +135,7 @@ export const KnowledgeCutOffDate: Record = { // it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously. "gemini-pro": "2023-12", "gemini-pro-vision": "2023-12", + "Meta-Llama-3-8B-Instruct": "2023-12", }; const openaiModels = [ @@ -174,6 +175,10 @@ const anthropicModels = [ "claude-3-haiku-20240307", ]; +const metaModels = [ + "Meta-Llama-3-8B-Instruct", +]; + export const DEFAULT_MODELS = [ ...openaiModels.map((name) => ({ name, @@ -202,6 +207,15 @@ export const DEFAULT_MODELS = [ providerType: "anthropic", }, })), + ...metaModels.map((name) => ({ + name, + available: true, + provider: { + id: "meta", + providerName: "Meta", + providerType: "meta", + }, + })), ] as const; export const CHAT_PAGE_SIZE = 15; diff --git a/app/store/chat.ts b/app/store/chat.ts index b305264b6..afc1d438e 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -437,9 +437,9 @@ export const useChatStore = createPersistStore( const contextPrompts = session.mask.context.slice(); // system prompts, to get close to OpenAI Web ChatGPT - const shouldInjectSystemPrompts = - modelConfig.enableInjectSystemPrompts && - session.mask.modelConfig.model.startsWith("gpt-"); + const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; + // modelConfig.enableInjectSystemPrompts && + // session.mask.modelConfig.model.startsWith("gpt-"); var systemPrompts: ChatMessage[] = []; systemPrompts = shouldInjectSystemPrompts