Merge pull request #4564 from MrrDrr/gpt4v_remove_max_tokens
remove max_tokens from the official version of gpt4-turbo
This commit is contained in:
commit
506c17a093
|
@ -129,7 +129,7 @@ export class ChatGPTApi implements LLMApi {
|
||||||
};
|
};
|
||||||
|
|
||||||
// add max_tokens to vision model
|
// add max_tokens to vision model
|
||||||
if (visionModel) {
|
if (visionModel && modelConfig.model.includes("preview")) {
|
||||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue