From e791cd441d544a18126ddb825651d0e6274020e9 Mon Sep 17 00:00:00 2001 From: lloydzhou Date: Wed, 23 Oct 2024 11:55:25 +0800 Subject: [PATCH] add xai --- app/api/auth.ts | 2 +- app/client/platforms/xai.ts | 4 +--- app/store/access.ts | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/app/api/auth.ts b/app/api/auth.ts index fb147cf51..d4ac66a11 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -105,7 +105,7 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) { } if (systemApiKey) { - console.log("[Auth] use system api key", systemApiKey); + console.log("[Auth] use system api key"); req.headers.set("Authorization", `Bearer ${systemApiKey}`); } else { console.log("[Auth] admin did not provide an api key"); diff --git a/app/client/platforms/xai.ts b/app/client/platforms/xai.ts index 69f80e9fc..deb74e66c 100644 --- a/app/client/platforms/xai.ts +++ b/app/client/platforms/xai.ts @@ -83,11 +83,9 @@ export class XAIApi implements LLMApi { presence_penalty: modelConfig.presence_penalty, frequency_penalty: modelConfig.frequency_penalty, top_p: modelConfig.top_p, - // max_tokens: Math.max(modelConfig.max_tokens, 1024), - // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; - console.log("[Request] openai payload: ", requestPayload); + console.log("[Request] xai payload: ", requestPayload); const shouldStream = !!options.config.stream; const controller = new AbortController(); diff --git a/app/store/access.ts b/app/store/access.ts index 1a27deb1c..b3d412a2d 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -104,7 +104,7 @@ const DEFAULT_ACCESS_STATE = { iflytekApiKey: "", iflytekApiSecret: "", - // moonshot + // xai xaiUrl: DEFAULT_XAI_URL, xaiApiKey: "",