Merge remote-tracking branch 'upstream/main'
This commit is contained in:
commit
820fb4a748
|
@ -1,12 +1,12 @@
|
|||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { STORAGE_KEY, internalWhiteWebDavEndpoints } from "../../../constant";
|
||||
import { STORAGE_KEY, internalAllowedWebDavEndpoints } from "../../../constant";
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
|
||||
const config = getServerSideConfig();
|
||||
|
||||
const mergedWhiteWebDavEndpoints = [
|
||||
...internalWhiteWebDavEndpoints,
|
||||
...config.whiteWebDevEndpoints,
|
||||
const mergedAllowedWebDavEndpoints = [
|
||||
...internalAllowedWebDavEndpoints,
|
||||
...config.allowedWebDevEndpoints,
|
||||
].filter((domain) => Boolean(domain.trim()));
|
||||
|
||||
async function handle(
|
||||
|
@ -24,7 +24,9 @@ async function handle(
|
|||
|
||||
// Validate the endpoint to prevent potential SSRF attacks
|
||||
if (
|
||||
!mergedWhiteWebDavEndpoints.some((white) => endpoint?.startsWith(white))
|
||||
!mergedAllowedWebDavEndpoints.some((allowedEndpoint) =>
|
||||
endpoint?.startsWith(allowedEndpoint),
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
|
|
|
@ -181,6 +181,13 @@ export class ClaudeApi implements LLMApi {
|
|||
};
|
||||
});
|
||||
|
||||
if (prompt[0]?.role === "assistant") {
|
||||
prompt.unshift({
|
||||
role: "user",
|
||||
content: ";",
|
||||
});
|
||||
}
|
||||
|
||||
const requestBody: AnthropicChatRequest = {
|
||||
messages: prompt,
|
||||
stream: shouldStream,
|
||||
|
|
|
@ -1256,6 +1256,7 @@ function _Chat() {
|
|||
if (payload.url) {
|
||||
accessStore.update((access) => (access.openaiUrl = payload.url!));
|
||||
}
|
||||
accessStore.update((access) => (access.useCustomConfig = true));
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
|
|
|
@ -51,6 +51,22 @@ const ACCESS_CODES = (function getAccessCodes(): Set<string> {
|
|||
}
|
||||
})();
|
||||
|
||||
function getApiKey(keys?: string) {
|
||||
const apiKeyEnvVar = keys ?? "";
|
||||
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
const apiKey = apiKeys[randomIndex];
|
||||
if (apiKey) {
|
||||
console.log(
|
||||
`[Server Config] using ${randomIndex + 1} of ${
|
||||
apiKeys.length
|
||||
} api key - ${apiKey}`,
|
||||
);
|
||||
}
|
||||
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
export const getServerSideConfig = () => {
|
||||
if (typeof process === "undefined") {
|
||||
throw Error(
|
||||
|
@ -74,34 +90,34 @@ export const getServerSideConfig = () => {
|
|||
const isGoogle = !!process.env.GOOGLE_API_KEY;
|
||||
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
const apiKey = apiKeys[randomIndex];
|
||||
console.log(
|
||||
`[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
|
||||
);
|
||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
// const apiKey = apiKeys[randomIndex];
|
||||
// console.log(
|
||||
// `[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
|
||||
// );
|
||||
|
||||
const whiteWebDevEndpoints = (process.env.WHITE_WEBDEV_ENDPOINTS ?? "").split(
|
||||
",",
|
||||
);
|
||||
const allowedWebDevEndpoints = (
|
||||
process.env.WHITE_WEBDEV_ENDPOINTS ?? ""
|
||||
).split(",");
|
||||
|
||||
return {
|
||||
baseUrl: process.env.BASE_URL,
|
||||
apiKey,
|
||||
apiKey: getApiKey(process.env.OPENAI_API_KEY),
|
||||
openaiOrgId: process.env.OPENAI_ORG_ID,
|
||||
|
||||
isAzure,
|
||||
azureUrl: process.env.AZURE_URL,
|
||||
azureApiKey: process.env.AZURE_API_KEY,
|
||||
azureApiKey: getApiKey(process.env.AZURE_API_KEY),
|
||||
azureApiVersion: process.env.AZURE_API_VERSION,
|
||||
|
||||
isGoogle,
|
||||
googleApiKey: process.env.GOOGLE_API_KEY,
|
||||
googleApiKey: getApiKey(process.env.GOOGLE_API_KEY),
|
||||
googleUrl: process.env.GEMINI_BASE_URL ?? process.env.GOOGLE_URL,
|
||||
|
||||
isAnthropic,
|
||||
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
||||
anthropicApiKey: getApiKey(process.env.ANTHROPIC_API_KEY),
|
||||
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
|
||||
anthropicUrl: process.env.ANTHROPIC_URL,
|
||||
|
||||
|
@ -120,8 +136,6 @@ export const getServerSideConfig = () => {
|
|||
disableFastLink: !!process.env.DISABLE_FAST_LINK,
|
||||
customModels,
|
||||
defaultModel,
|
||||
whiteWebDevEndpoints,
|
||||
|
||||
isStoreFileToLocal:
|
||||
!!process.env.NEXT_PUBLIC_ENABLE_NODEJS_PLUGIN &&
|
||||
!process.env.R2_ACCOUNT_ID &&
|
||||
|
@ -133,5 +147,6 @@ export const getServerSideConfig = () => {
|
|||
ragChunkSize: process.env.RAG_CHUNK_SIZE ?? "2000",
|
||||
ragChunkOverlap: process.env.RAG_CHUNK_OVERLAP ?? "200",
|
||||
ragReturnCount: process.env.RAG_RETURN_COUNT ?? "4",
|
||||
allowedWebDevEndpoints,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -218,7 +218,7 @@ export const CHAT_PAGE_SIZE = 15;
|
|||
export const MAX_RENDER_MSG_COUNT = 45;
|
||||
|
||||
// some famous webdav endpoints
|
||||
export const internalWhiteWebDavEndpoints = [
|
||||
export const internalAllowedWebDavEndpoints = [
|
||||
"https://dav.jianguoyun.com/dav/",
|
||||
"https://dav.dropdav.com/",
|
||||
"https://dav.box.com/dav",
|
||||
|
|
|
@ -28,6 +28,8 @@ export interface ChatToolMessage {
|
|||
import { createPersistStore } from "../utils/store";
|
||||
import { FileInfo } from "../client/platforms/utils";
|
||||
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { useAccessStore } from "./access";
|
||||
|
||||
export type ChatMessage = RequestMessage & {
|
||||
date: string;
|
||||
|
@ -102,9 +104,19 @@ function getSummarizeModel(currentModel: string) {
|
|||
if (model.provider.providerType === "google") return GEMINI_SUMMARIZE_MODEL;
|
||||
// if it is using gpt-* models, force to use 3.5 to summarize
|
||||
if (currentModel.startsWith("gpt")) {
|
||||
return SUMMARIZE_MODEL;
|
||||
const configStore = useAppConfig.getState();
|
||||
const accessStore = useAccessStore.getState();
|
||||
const allModel = collectModelsWithDefaultModel(
|
||||
configStore.models,
|
||||
[configStore.customModels, accessStore.customModels].join(","),
|
||||
accessStore.defaultModel,
|
||||
);
|
||||
const summarizeModel = allModel.find(
|
||||
(m) => m.name === SUMMARIZE_MODEL && m.available,
|
||||
);
|
||||
return summarizeModel?.name ?? currentModel;
|
||||
}
|
||||
if (currentModel.startsWith("gemini-pro")) {
|
||||
if (currentModel.startsWith("gemini")) {
|
||||
return GEMINI_SUMMARIZE_MODEL;
|
||||
}
|
||||
return currentModel;
|
||||
|
|
|
@ -64,13 +64,10 @@ export function collectModelTableWithDefaultModel(
|
|||
) {
|
||||
let modelTable = collectModelTable(models, customModels);
|
||||
if (defaultModel && defaultModel !== "") {
|
||||
delete modelTable[defaultModel];
|
||||
modelTable[defaultModel] = {
|
||||
...modelTable[defaultModel],
|
||||
name: defaultModel,
|
||||
displayName: defaultModel,
|
||||
available: true,
|
||||
provider:
|
||||
modelTable[defaultModel]?.provider ?? customProvider(defaultModel),
|
||||
isDefault: true,
|
||||
};
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue