diff --git a/app/api/common.ts b/app/api/common.ts index d7f6d63ce..b2fae6df2 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -58,7 +58,9 @@ export async function requestOpenai(req: NextRequest) { ); if (isAzure) { - const azureApiVersion = req?.nextUrl?.searchParams?.get("api-version"); + const azureApiVersion = + req?.nextUrl?.searchParams?.get("api-version") || + serverConfig.azureApiVersion; baseUrl = baseUrl.split("/deployments").shift() as string; path = `${req.nextUrl.pathname.replaceAll( "/api/azure/", diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 7cd5ecbbc..8615172a3 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -146,8 +146,11 @@ export class ChatGPTApi implements LLMApi { // find model, and get displayName as deployName const { models: configModels, customModels: configCustomModels } = useAppConfig.getState(); - const { defaultModel, customModels: accessCustomModels } = - useAccessStore.getState(); + const { + defaultModel, + customModels: accessCustomModels, + useCustomConfig, + } = useAccessStore.getState(); const models = collectModelsWithDefaultModel( configModels, [configCustomModels, accessCustomModels].join(","), @@ -161,7 +164,7 @@ export class ChatGPTApi implements LLMApi { chatPath = this.path( Azure.ChatPath( (model?.displayName ?? model?.name) as string, - useAccessStore.getState().azureApiVersion, + useCustomConfig ? useAccessStore.getState().azureApiVersion : "", ), ); } else {