feat: add o1 model

This commit is contained in:
skymkmk 2024-09-13 13:18:07 +08:00
parent 07c6fe5975
commit 71df415b14
No known key found for this signature in database
GPG Key ID: 6F4CA5A97C68BD71
2 changed files with 13 additions and 6 deletions

View File

@ -160,6 +160,7 @@ export class ChatGPTApi implements LLMApi {
let requestPayload: RequestPayload | DalleRequestPayload; let requestPayload: RequestPayload | DalleRequestPayload;
const isDalle3 = _isDalle3(options.config.model); const isDalle3 = _isDalle3(options.config.model);
const isO1 = options.config.model.startsWith("o1");
if (isDalle3) { if (isDalle3) {
const prompt = getMessageTextContent( const prompt = getMessageTextContent(
options.messages.slice(-1)?.pop() as any, options.messages.slice(-1)?.pop() as any,
@ -181,17 +182,19 @@ export class ChatGPTApi implements LLMApi {
const content = visionModel const content = visionModel
? await preProcessImageContent(v.content) ? await preProcessImageContent(v.content)
: getMessageTextContent(v); : getMessageTextContent(v);
messages.push({ role: v.role, content }); if(!(isO1 && v.role === "system"))
messages.push({ role: v.role, content });
} }
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
requestPayload = { requestPayload = {
messages, messages,
stream: options.config.stream, stream: !isO1 ? options.config.stream : false,
model: modelConfig.model, model: modelConfig.model,
temperature: modelConfig.temperature, temperature: !isO1 ? modelConfig.temperature : 1,
presence_penalty: modelConfig.presence_penalty, presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
frequency_penalty: modelConfig.frequency_penalty, frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0,
top_p: modelConfig.top_p, top_p: !isO1 ? modelConfig.top_p : 1,
// max_tokens: Math.max(modelConfig.max_tokens, 1024), // max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
}; };

View File

@ -250,6 +250,8 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"gpt-4o-mini": "2023-10", "gpt-4o-mini": "2023-10",
"gpt-4o-mini-2024-07-18": "2023-10", "gpt-4o-mini-2024-07-18": "2023-10",
"gpt-4-vision-preview": "2023-04", "gpt-4-vision-preview": "2023-04",
"o1-mini": "2023-10",
"o1-preview": "2023-10",
// After improvements, // After improvements,
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously. // it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
"gemini-pro": "2023-12", "gemini-pro": "2023-12",
@ -276,6 +278,8 @@ const openaiModels = [
"gpt-4-turbo-2024-04-09", "gpt-4-turbo-2024-04-09",
"gpt-4-1106-preview", "gpt-4-1106-preview",
"dall-e-3", "dall-e-3",
"o1-mini",
"o1-preview"
]; ];
const googleModels = [ const googleModels = [