fix: add max_tokens when using vision model (#4157)
This commit is contained in:
parent
44a51273be
commit
08fa22749a
|
@ -110,6 +110,16 @@ export class ChatGPTApi implements LLMApi {
|
|||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel) {
|
||||
Object.defineProperty(requestPayload, "max_tokens", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: Math.max(modelConfig.max_tokens, 4096),
|
||||
});
|
||||
}
|
||||
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
|
|
Loading…
Reference in New Issue