fix: remove parameter stream for o3 from the request

This commit is contained in:
AndrewS 2025-02-03 21:53:55 +01:00
parent 566406f62d
commit 9117a10b47
1 changed files with 2 additions and 1 deletions

View File

@ -196,6 +196,7 @@ export class ChatGPTApi implements LLMApi {
const isDalle3 = _isDalle3(options.config.model); const isDalle3 = _isDalle3(options.config.model);
const isO1 = options.config.model.startsWith("o1"); const isO1 = options.config.model.startsWith("o1");
const isO1Exact = options.config.model === "o1";
const isO3 = options.config.model.startsWith("o3"); const isO3 = options.config.model.startsWith("o3");
if (isDalle3) { if (isDalle3) {
const prompt = getMessageTextContent( const prompt = getMessageTextContent(
@ -225,7 +226,7 @@ export class ChatGPTApi implements LLMApi {
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet. // O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
requestPayload = { requestPayload = {
messages, messages,
stream: options.config.stream, stream: isO1Exact ? false : options.config.stream,
model: modelConfig.model, model: modelConfig.model,
temperature: !(isO1 || isO3)? modelConfig.temperature : 1, temperature: !(isO1 || isO3)? modelConfig.temperature : 1,
presence_penalty: !(isO1 || isO3) ? modelConfig.presence_penalty : 0, presence_penalty: !(isO1 || isO3) ? modelConfig.presence_penalty : 0,