mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-08-08 15:46:06 +08:00
support azure deployment name
This commit is contained in:
@@ -30,6 +30,7 @@ export interface RequestMessage {
|
||||
|
||||
export interface LLMConfig {
|
||||
model: string;
|
||||
providerName?: string;
|
||||
temperature?: number;
|
||||
top_p?: number;
|
||||
stream?: boolean;
|
||||
@@ -54,6 +55,7 @@ export interface LLMUsage {
|
||||
|
||||
export interface LLMModel {
|
||||
name: string;
|
||||
displayName?: string;
|
||||
available: boolean;
|
||||
provider: LLMModelProvider;
|
||||
}
|
||||
@@ -160,10 +162,14 @@ export function getHeaders() {
|
||||
Accept: "application/json",
|
||||
};
|
||||
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
|
||||
const isGoogle = modelConfig.model.startsWith("gemini");
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
const isAnthropic = accessStore.provider === ServiceProvider.Anthropic;
|
||||
const authHeader = isAzure ? "api-key" : isAnthropic ? 'x-api-key' : "Authorization";
|
||||
const isGoogle = modelConfig.providerName == ServiceProvider.Azure;
|
||||
const isAzure = modelConfig.providerName === ServiceProvider.Azure;
|
||||
const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
|
||||
const authHeader = isAzure
|
||||
? "api-key"
|
||||
: isAnthropic
|
||||
? "x-api-key"
|
||||
: "Authorization";
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
: isAzure
|
||||
@@ -172,7 +178,8 @@ export function getHeaders() {
|
||||
? accessStore.anthropicApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
const clientConfig = getClientConfig();
|
||||
const makeBearer = (s: string) => `${isAzure || isAnthropic ? "" : "Bearer "}${s.trim()}`;
|
||||
const makeBearer = (s: string) =>
|
||||
`${isAzure || isAnthropic ? "" : "Bearer "}${s.trim()}`;
|
||||
const validString = (x: string) => x && x.length > 0;
|
||||
|
||||
// when using google api in app, not set auth header
|
||||
@@ -185,7 +192,7 @@ export function getHeaders() {
|
||||
validString(accessStore.accessCode)
|
||||
) {
|
||||
// access_code must send with header named `Authorization`, will using in auth middleware.
|
||||
headers['Authorization'] = makeBearer(
|
||||
headers["Authorization"] = makeBearer(
|
||||
ACCESS_CODE_PREFIX + accessStore.accessCode,
|
||||
);
|
||||
}
|
||||
|
@@ -1,13 +1,16 @@
|
||||
"use client";
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
DEFAULT_MODELS,
|
||||
OpenaiPath,
|
||||
Azure,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { collectModelsWithDefaultModel } from "@/app/utils/model";
|
||||
|
||||
import {
|
||||
ChatOptions,
|
||||
@@ -97,6 +100,15 @@ export class ChatGPTApi implements LLMApi {
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
getBaseUrl(apiPath: string) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
let baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
return baseUrl + "/";
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
@@ -113,6 +125,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -140,7 +153,33 @@ export class ChatGPTApi implements LLMApi {
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(OpenaiPath.ChatPath);
|
||||
let chatPath = "";
|
||||
if (modelConfig.providerName == ServiceProvider.Azure) {
|
||||
// find model, and get displayName as deployName
|
||||
const { models: configModels, customModels: configCustomModels } =
|
||||
useAppConfig.getState();
|
||||
const { defaultModel, customModels: accessCustomModels } =
|
||||
useAccessStore.getState();
|
||||
|
||||
const models = collectModelsWithDefaultModel(
|
||||
configModels,
|
||||
[configCustomModels, accessCustomModels].join(","),
|
||||
defaultModel,
|
||||
);
|
||||
const model = models.find(
|
||||
(model) =>
|
||||
model.name == modelConfig.model &&
|
||||
model?.provider.providerName == ServiceProvider.Azure,
|
||||
);
|
||||
chatPath =
|
||||
this.getBaseUrl(ApiPath.Azure) +
|
||||
Azure.ChatPath(
|
||||
model?.displayName ?? model.name,
|
||||
useAccessStore.getState().azureApiVersion,
|
||||
);
|
||||
} else {
|
||||
chatPath = this.getBaseUrl(ApiPath.OpenAI) + OpenaiPath.ChatPath;
|
||||
}
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
|
Reference in New Issue
Block a user