mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-08-09 09:01:57 +08:00
chore: update
This commit is contained in:
@@ -21,7 +21,7 @@ import { HunyuanApi } from "./platforms/tencent";
|
||||
import { MoonshotApi } from "./platforms/moonshot";
|
||||
import { SparkApi } from "./platforms/iflytek";
|
||||
import { XAIApi } from "./platforms/xai";
|
||||
import { GLMApi } from "./platforms/glm";
|
||||
import { ChatGLMApi } from "./platforms/glm";
|
||||
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
@@ -157,8 +157,8 @@ export class ClientApi {
|
||||
case ModelProvider.XAI:
|
||||
this.llm = new XAIApi();
|
||||
break;
|
||||
case ModelProvider.GLM:
|
||||
this.llm = new GLMApi();
|
||||
case ModelProvider.ChatGLM:
|
||||
this.llm = new ChatGLMApi();
|
||||
break;
|
||||
default:
|
||||
this.llm = new ChatGPTApi();
|
||||
@@ -248,7 +248,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
||||
const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot;
|
||||
const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek;
|
||||
const isXAI = modelConfig.providerName === ServiceProvider.XAI;
|
||||
const isGLM = modelConfig.providerName === ServiceProvider.GLM;
|
||||
const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
|
||||
const isEnabledAccessControl = accessStore.enabledAccessControl();
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
@@ -264,8 +264,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
||||
? accessStore.moonshotApiKey
|
||||
: isXAI
|
||||
? accessStore.xaiApiKey
|
||||
: isGLM
|
||||
? accessStore.glmApiKey
|
||||
: isChatGLM
|
||||
? accessStore.chatglmApiKey
|
||||
: isIflytek
|
||||
? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
|
||||
? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
|
||||
@@ -281,7 +281,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
||||
isMoonshot,
|
||||
isIflytek,
|
||||
isXAI,
|
||||
isGLM,
|
||||
isChatGLM,
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
};
|
||||
@@ -346,8 +346,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
|
||||
return new ClientApi(ModelProvider.Iflytek);
|
||||
case ServiceProvider.XAI:
|
||||
return new ClientApi(ModelProvider.XAI);
|
||||
case ServiceProvider.GLM:
|
||||
return new ClientApi(ModelProvider.GLM);
|
||||
case ServiceProvider.ChatGLM:
|
||||
return new ClientApi(ModelProvider.ChatGLM);
|
||||
default:
|
||||
return new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
@@ -1,5 +1,10 @@
|
||||
"use client";
|
||||
import { ApiPath, GLM_BASE_URL, GLM, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import {
|
||||
ApiPath,
|
||||
CHATGLM_BASE_URL,
|
||||
ChatGLM,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
@@ -20,7 +25,7 @@ import { getMessageTextContent } from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class GLMApi implements LLMApi {
|
||||
export class ChatGLMApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
path(path: string): string {
|
||||
@@ -29,19 +34,19 @@ export class GLMApi implements LLMApi {
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.glmUrl;
|
||||
baseUrl = accessStore.chatglmUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.GLM;
|
||||
baseUrl = isApp ? GLM_BASE_URL : apiPath;
|
||||
const apiPath = ApiPath.ChatGLM;
|
||||
baseUrl = isApp ? CHATGLM_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.GLM)) {
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
@@ -91,7 +96,7 @@ export class GLMApi implements LLMApi {
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(GLM.ChatPath);
|
||||
const chatPath = this.path(ChatGLM.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
|
Reference in New Issue
Block a user