feat: init support for deepseek

This commit is contained in:
Fred
2024-05-15 14:47:43 +08:00
parent 3513c6801e
commit 27ac18d9d7
8 changed files with 54 additions and 25 deletions

View File

@@ -73,6 +73,10 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
case ModelProvider.Claude:
systemApiKey = serverConfig.anthropicApiKey;
break;
case ModelProvider.Deepseek:
systemApiKey = serverConfig.deepseekApiKey;
break;
case ModelProvider.GPT:
default:
if (serverConfig.isAzure) {

View File

@@ -87,6 +87,8 @@ export async function requestOpenai(req: NextRequest) {
DEFAULT_MODELS,
serverConfig.customModels,
);
// check if deepseek model
const clonedBody = await req.text();
fetchOptions.body = clonedBody;
@@ -112,16 +114,16 @@ export async function requestOpenai(req: NextRequest) {
try {
const res = await fetch(fetchUrl, fetchOptions);
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Extract the OpenAI-Organization header from the response
const openaiOrganizationHeader = res.headers.get("OpenAI-Organization");
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// Check if serverConfig.openaiOrgId is defined and not an empty string
if (serverConfig.openaiOrgId && serverConfig.openaiOrgId.trim() !== "") {
// If openaiOrganizationHeader is present, log it; otherwise, log that the header is not present
console.log("[Org ID]", openaiOrganizationHeader);
} else {
console.log("[Org ID] is not set up.");
}
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
@@ -129,7 +131,6 @@ export async function requestOpenai(req: NextRequest) {
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// Conditionally delete the OpenAI-Organization header from the response if [Org ID] is undefined or empty (not setup in ENV)
// Also, this is to prevent the header from being sent to the client
if (!serverConfig.openaiOrgId || serverConfig.openaiOrgId.trim() === "") {
@@ -142,7 +143,6 @@ export async function requestOpenai(req: NextRequest) {
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,

View File

@@ -70,7 +70,7 @@ export abstract class LLMApi {
abstract models(): Promise<LLMModel[]>;
}
type ProviderName = "openai" | "azure" | "claude" | "palm";
type ProviderName = "openai" | "azure" | "claude" | "palm" | "deepseek";
interface Model {
name: string;
@@ -162,6 +162,7 @@ export function getHeaders() {
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model.startsWith("gemini");
const isAzure = accessStore.provider === ServiceProvider.Azure;
const isDeepSeek = accessStore.provider === ServiceProvider.DeepSeek;
const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isGoogle
? accessStore.googleApiKey

View File

@@ -89,6 +89,7 @@ export const getServerSideConfig = () => {
const isAzure = !!process.env.AZURE_URL;
const isGoogle = !!process.env.GOOGLE_API_KEY;
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
const isDeepSeek = !!process.env.DEEPSEEK_API_KEY;
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
@@ -99,7 +100,7 @@ export const getServerSideConfig = () => {
// );
const allowedWebDevEndpoints = (
process.env.WHITE_WEBDEV_ENDPOINTS ?? ""
process.env.WEBDEV_ENDPOINTS_WHITELIST ?? ""
).split(",");
return {
@@ -121,6 +122,8 @@ export const getServerSideConfig = () => {
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
anthropicUrl: process.env.ANTHROPIC_URL,
deepseekApiKey: getApiKey(process.env.DEEPSEEK_API_KEY),
gtmId: process.env.GTM_ID,
needCode: ACCESS_CODES.size > 0,

View File

@@ -1,3 +1,5 @@
import { Chat } from "./components/chat";
export const OWNER = "Yidadaa";
export const REPO = "ChatGPT-Next-Web";
export const REPO_URL = `https://github.com/${OWNER}/${REPO}`;
@@ -70,12 +72,14 @@ export enum ServiceProvider {
Azure = "Azure",
Google = "Google",
Anthropic = "Anthropic",
DeepSeek = "DeepSeek",
}
export enum ModelProvider {
GPT = "GPT",
GeminiPro = "GeminiPro",
Claude = "Claude",
Deepseek = "DeepSeek",
}
export const Anthropic = {
@@ -136,16 +140,11 @@ export const KnowledgeCutOffDate: Record<string, string> = {
const openaiModels = [
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-0125",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4-vision-preview",
"gpt-4-turbo-2024-04-09",
];
const googleModels = [
@@ -163,6 +162,8 @@ const anthropicModels = [
"claude-3-haiku-20240307",
];
const deepseekModels = ["deepseek-chat"];
export const DEFAULT_MODELS = [
...openaiModels.map((name) => ({
name,
@@ -191,6 +192,15 @@ export const DEFAULT_MODELS = [
providerType: "anthropic",
},
})),
...deepseekModels.map((name) => ({
name,
available: true,
provider: {
id: "deepseek",
providerName: "DeepSeek",
providerType: "deepseek",
},
})),
] as const;
export const CHAT_PAGE_SIZE = 15;