diff --git a/app/api/[provider]/[...path]/route.ts b/app/api/[provider]/[...path]/route.ts
index 78836cc52..3017fd371 100644
--- a/app/api/[provider]/[...path]/route.ts
+++ b/app/api/[provider]/[...path]/route.ts
@@ -11,7 +11,7 @@ import { handle as moonshotHandler } from "../../moonshot";
import { handle as stabilityHandler } from "../../stability";
import { handle as iflytekHandler } from "../../iflytek";
import { handle as xaiHandler } from "../../xai";
-import { handle as glmHandler } from "../../glm";
+import { handle as chatglmHandler } from "../../glm";
import { handle as proxyHandler } from "../../proxy";
async function handle(
@@ -42,8 +42,8 @@ async function handle(
return iflytekHandler(req, { params });
case ApiPath.XAI:
return xaiHandler(req, { params });
- case ApiPath.GLM:
- return glmHandler(req, { params });
+ case ApiPath.ChatGLM:
+ return chatglmHandler(req, { params });
case ApiPath.OpenAI:
return openaiHandler(req, { params });
default:
diff --git a/app/api/auth.ts b/app/api/auth.ts
index db920fc28..6703b64bd 100644
--- a/app/api/auth.ts
+++ b/app/api/auth.ts
@@ -95,8 +95,8 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
case ModelProvider.XAI:
systemApiKey = serverConfig.xaiApiKey;
break;
- case ModelProvider.GLM:
- systemApiKey = serverConfig.glmApiKey;
+ case ModelProvider.ChatGLM:
+ systemApiKey = serverConfig.chatglmApiKey;
break;
case ModelProvider.GPT:
default:
diff --git a/app/api/glm.ts b/app/api/glm.ts
index d40c4b6a8..ea7a766bd 100644
--- a/app/api/glm.ts
+++ b/app/api/glm.ts
@@ -1,6 +1,6 @@
import { getServerSideConfig } from "@/app/config/server";
import {
- GLM_BASE_URL,
+ CHATGLM_BASE_URL,
ApiPath,
ModelProvider,
ServiceProvider,
@@ -42,9 +42,9 @@ async function request(req: NextRequest) {
const controller = new AbortController();
// alibaba use base url or just remove the path
- let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.GLM, "");
+ let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ChatGLM, "");
- let baseUrl = serverConfig.glmUrl || GLM_BASE_URL;
+ let baseUrl = serverConfig.chatglmUrl || CHATGLM_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
@@ -92,7 +92,7 @@ async function request(req: NextRequest) {
isModelAvailableInServer(
serverConfig.customModels,
jsonBody?.model as string,
- ServiceProvider.GLM as string,
+ ServiceProvider.ChatGLM as string,
)
) {
return NextResponse.json(
diff --git a/app/client/api.ts b/app/client/api.ts
index 4082d085c..8fecf841f 100644
--- a/app/client/api.ts
+++ b/app/client/api.ts
@@ -21,7 +21,7 @@ import { HunyuanApi } from "./platforms/tencent";
import { MoonshotApi } from "./platforms/moonshot";
import { SparkApi } from "./platforms/iflytek";
import { XAIApi } from "./platforms/xai";
-import { GLMApi } from "./platforms/glm";
+import { ChatGLMApi } from "./platforms/glm";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
@@ -157,8 +157,8 @@ export class ClientApi {
case ModelProvider.XAI:
this.llm = new XAIApi();
break;
- case ModelProvider.GLM:
- this.llm = new GLMApi();
+ case ModelProvider.ChatGLM:
+ this.llm = new ChatGLMApi();
break;
default:
this.llm = new ChatGPTApi();
@@ -248,7 +248,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot;
const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek;
const isXAI = modelConfig.providerName === ServiceProvider.XAI;
- const isGLM = modelConfig.providerName === ServiceProvider.GLM;
+ const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
const isEnabledAccessControl = accessStore.enabledAccessControl();
const apiKey = isGoogle
? accessStore.googleApiKey
@@ -264,8 +264,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
? accessStore.moonshotApiKey
: isXAI
? accessStore.xaiApiKey
- : isGLM
- ? accessStore.glmApiKey
+ : isChatGLM
+ ? accessStore.chatglmApiKey
: isIflytek
? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
@@ -281,7 +281,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isMoonshot,
isIflytek,
isXAI,
- isGLM,
+ isChatGLM,
apiKey,
isEnabledAccessControl,
};
@@ -346,8 +346,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
return new ClientApi(ModelProvider.Iflytek);
case ServiceProvider.XAI:
return new ClientApi(ModelProvider.XAI);
- case ServiceProvider.GLM:
- return new ClientApi(ModelProvider.GLM);
+ case ServiceProvider.ChatGLM:
+ return new ClientApi(ModelProvider.ChatGLM);
default:
return new ClientApi(ModelProvider.GPT);
}
diff --git a/app/client/platforms/glm.ts b/app/client/platforms/glm.ts
index b88272ae1..10696ee82 100644
--- a/app/client/platforms/glm.ts
+++ b/app/client/platforms/glm.ts
@@ -1,5 +1,10 @@
"use client";
-import { ApiPath, GLM_BASE_URL, GLM, REQUEST_TIMEOUT_MS } from "@/app/constant";
+import {
+ ApiPath,
+ CHATGLM_BASE_URL,
+ ChatGLM,
+ REQUEST_TIMEOUT_MS,
+} from "@/app/constant";
import {
useAccessStore,
useAppConfig,
@@ -20,7 +25,7 @@ import { getMessageTextContent } from "@/app/utils";
import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";
-export class GLMApi implements LLMApi {
+export class ChatGLMApi implements LLMApi {
private disableListModels = true;
path(path: string): string {
@@ -29,19 +34,19 @@ export class GLMApi implements LLMApi {
let baseUrl = "";
if (accessStore.useCustomConfig) {
- baseUrl = accessStore.glmUrl;
+ baseUrl = accessStore.chatglmUrl;
}
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
- const apiPath = ApiPath.GLM;
- baseUrl = isApp ? GLM_BASE_URL : apiPath;
+ const apiPath = ApiPath.ChatGLM;
+ baseUrl = isApp ? CHATGLM_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
- if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.GLM)) {
+ if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) {
baseUrl = "https://" + baseUrl;
}
@@ -91,7 +96,7 @@ export class GLMApi implements LLMApi {
options.onController?.(controller);
try {
- const chatPath = this.path(GLM.ChatPath);
+ const chatPath = this.path(ChatGLM.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
diff --git a/app/components/settings.tsx b/app/components/settings.tsx
index e5859e716..e2666b551 100644
--- a/app/components/settings.tsx
+++ b/app/components/settings.tsx
@@ -72,7 +72,7 @@ import {
Stability,
Iflytek,
SAAS_CHAT_URL,
- GLM,
+ ChatGLM,
} from "../constant";
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
import { ErrorBoundary } from "./error";
@@ -1235,38 +1235,40 @@ export function Settings() {
>
);
- const glmConfigComponent = accessStore.provider === ServiceProvider.GLM && (
+ const chatglmConfigComponent = accessStore.provider ===
+ ServiceProvider.ChatGLM && (
<>
accessStore.update(
- (access) => (access.glmUrl = e.currentTarget.value),
+ (access) => (access.chatglmUrl = e.currentTarget.value),
)
}
>
{
accessStore.update(
- (access) => (access.glmApiKey = e.currentTarget.value),
+ (access) => (access.chatglmApiKey = e.currentTarget.value),
);
}}
/>
@@ -1733,7 +1735,7 @@ export function Settings() {
{stabilityConfigComponent}
{lflytekConfigComponent}
{XAIConfigComponent}
- {glmConfigComponent}
+ {chatglmConfigComponent}
>
)}
>
diff --git a/app/config/server.ts b/app/config/server.ts
index b9a68ce4d..485f950da 100644
--- a/app/config/server.ts
+++ b/app/config/server.ts
@@ -75,9 +75,9 @@ declare global {
XAI_URL?: string;
XAI_API_KEY?: string;
- // glm only
- GLM_URL?: string;
- GLM_API_KEY?: string;
+ // chatglm only
+ CHATGLM_URL?: string;
+ CHATGLM_API_KEY?: string;
// custom template for preprocessing user input
DEFAULT_INPUT_TEMPLATE?: string;
@@ -155,7 +155,7 @@ export const getServerSideConfig = () => {
const isMoonshot = !!process.env.MOONSHOT_API_KEY;
const isIflytek = !!process.env.IFLYTEK_API_KEY;
const isXAI = !!process.env.XAI_API_KEY;
- const isGLM = !!process.env.GLM_API_KEY;
+ const isChatGLM = !!process.env.CHATGLM_API_KEY;
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
@@ -222,9 +222,9 @@ export const getServerSideConfig = () => {
xaiUrl: process.env.XAI_URL,
xaiApiKey: getApiKey(process.env.XAI_API_KEY),
- isGLM,
- glmUrl: process.env.GLM_URL,
- glmApiKey: getApiKey(process.env.GLM_API_KEY),
+ isChatGLM,
+ chatglmUrl: process.env.CHATGLM_URL,
+ chatglmApiKey: getApiKey(process.env.CHATGLM_API_KEY),
cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID,
cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID,
diff --git a/app/constant.ts b/app/constant.ts
index b8b25b7ab..1a84e5c84 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -30,7 +30,7 @@ export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com";
export const XAI_BASE_URL = "https://api.x.ai";
-export const GLM_BASE_URL = "https://open.bigmodel.cn";
+export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
export const CACHE_URL_PREFIX = "/api/cache";
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
@@ -64,7 +64,7 @@ export enum ApiPath {
Stability = "/api/stability",
Artifacts = "/api/artifacts",
XAI = "/api/xai",
- GLM = "/api/glm",
+ ChatGLM = "/api/chatglm",
}
export enum SlotID {
@@ -118,7 +118,7 @@ export enum ServiceProvider {
Stability = "Stability",
Iflytek = "Iflytek",
XAI = "XAI",
- GLM = "ChatGLM",
+ ChatGLM = "ChatGLM",
}
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
@@ -142,7 +142,7 @@ export enum ModelProvider {
Moonshot = "Moonshot",
Iflytek = "Iflytek",
XAI = "XAI",
- GLM = "ChatGLM",
+ ChatGLM = "ChatGLM",
}
export const Stability = {
@@ -230,8 +230,8 @@ export const XAI = {
ChatPath: "v1/chat/completions",
};
-export const GLM = {
- ExampleEndpoint: GLM_BASE_URL,
+export const ChatGLM = {
+ ExampleEndpoint: CHATGLM_BASE_URL,
ChatPath: "/api/paas/v4/chat/completions",
};
@@ -386,7 +386,7 @@ const iflytekModels = [
const xAIModes = ["grok-beta"];
-const glmModels = [
+const chatglmModels = [
"glm-4-plus",
"glm-4-0520",
"glm-4",
@@ -520,7 +520,7 @@ export const DEFAULT_MODELS = [
sorted: 11,
},
})),
- ...glmModels.map((name) => ({
+ ...chatglmModels.map((name) => ({
name,
available: true,
sorted: seq++,
diff --git a/app/locales/cn.ts b/app/locales/cn.ts
index 92aaf6228..9712593c6 100644
--- a/app/locales/cn.ts
+++ b/app/locales/cn.ts
@@ -473,11 +473,11 @@ const cn = {
SubTitle: "样例:",
},
},
- GLM: {
+ ChatGLM: {
ApiKey: {
Title: "接口密钥",
- SubTitle: "使用自定义 GLM API Key",
- Placeholder: "GLM API Key",
+ SubTitle: "使用自定义 ChatGLM API Key",
+ Placeholder: "ChatGLM API Key",
},
Endpoint: {
Title: "接口地址",
diff --git a/app/locales/en.ts b/app/locales/en.ts
index d691925c4..ac8d3aed2 100644
--- a/app/locales/en.ts
+++ b/app/locales/en.ts
@@ -457,11 +457,11 @@ const en: LocaleType = {
SubTitle: "Example: ",
},
},
- GLM: {
+ ChatGLM: {
ApiKey: {
- Title: "GLM API Key",
- SubTitle: "Use a custom GLM API Key",
- Placeholder: "GLM API Key",
+ Title: "ChatGLM API Key",
+ SubTitle: "Use a custom ChatGLM API Key",
+ Placeholder: "ChatGLM API Key",
},
Endpoint: {
Title: "Endpoint Address",
diff --git a/app/store/access.ts b/app/store/access.ts
index 9cc420fdf..3b0e6357b 100644
--- a/app/store/access.ts
+++ b/app/store/access.ts
@@ -14,7 +14,7 @@ import {
STABILITY_BASE_URL,
IFLYTEK_BASE_URL,
XAI_BASE_URL,
- GLM_BASE_URL,
+ CHATGLM_BASE_URL,
} from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
@@ -48,7 +48,7 @@ const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
const DEFAULT_XAI_URL = isApp ? XAI_BASE_URL : ApiPath.XAI;
-const DEFAULT_GLM_URL = isApp ? GLM_BASE_URL : ApiPath.GLM;
+const DEFAULT_CHATGLM_URL = isApp ? CHATGLM_BASE_URL : ApiPath.ChatGLM;
const DEFAULT_ACCESS_STATE = {
accessCode: "",
@@ -111,9 +111,9 @@ const DEFAULT_ACCESS_STATE = {
xaiUrl: DEFAULT_XAI_URL,
xaiApiKey: "",
- // glm
- glmUrl: DEFAULT_GLM_URL,
- glmApiKey: "",
+ // chatglm
+ chatglmUrl: DEFAULT_CHATGLM_URL,
+ chatglmApiKey: "",
// server config
needCode: true,
@@ -187,8 +187,8 @@ export const useAccessStore = createPersistStore(
return ensure(get(), ["xaiApiKey"]);
},
- isValidGLM() {
- return ensure(get(), ["glmApiKey"]);
+ isValidChatGLM() {
+ return ensure(get(), ["chatglmApiKey"]);
},
isAuthorized() {
@@ -207,7 +207,7 @@ export const useAccessStore = createPersistStore(
this.isValidMoonshot() ||
this.isValidIflytek() ||
this.isValidXAI() ||
- this.isValidGLM() ||
+ this.isValidChatGLM() ||
!this.enabledAccessControl() ||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
);
diff --git a/app/utils.ts b/app/utils.ts
index 91f11c0c2..c444f8ef4 100644
--- a/app/utils.ts
+++ b/app/utils.ts
@@ -279,7 +279,7 @@ export function showPlugins(provider: ServiceProvider, model: string) {
provider == ServiceProvider.OpenAI ||
provider == ServiceProvider.Azure ||
provider == ServiceProvider.Moonshot ||
- provider == ServiceProvider.GLM
+ provider == ServiceProvider.ChatGLM
) {
return true;
}