fix: add max_tokens when using vision model (#4157)

This commit is contained in:
fred-bf 2024-02-27 17:28:01 +08:00 committed by GitHub
parent 44a51273be
commit 08fa22749a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 10 additions and 0 deletions

View File

@ -110,6 +110,16 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
// add max_tokens to vision model
if (visionModel) {
Object.defineProperty(requestPayload, "max_tokens", {
enumerable: true,
configurable: true,
writable: true,
value: Math.max(modelConfig.max_tokens, 4096),
});
}
console.log("[Request] openai payload: ", requestPayload);
const shouldStream = !!options.config.stream;