fix no max_tokens in payload when the vision model name does not contain 'vision'.

This commit is contained in:
yihang3 2024-08-21 15:22:31 +08:00
parent ffe32694b0
commit 56eb9d1430
1 changed files with 1 additions and 1 deletions

View File

@ -190,7 +190,7 @@ export class ChatGPTApi implements LLMApi {
};
// add max_tokens to vision model
if (visionModel && modelConfig.model.includes("preview")) {
if (visionModel) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
}
}