Compare commits

...

11 Commits

Author SHA1 Message Date
river
8680182921 feat: Add DeepSeek API key and fix MCP environment variable parsing 2025-01-27 12:48:59 +08:00
suruiqiang
2173c82bb5 add deepseek-reasoner, and change deepseek's summary model to deepseek-chat 2025-01-23 18:47:22 +08:00
suruiqiang
0d5e66a9ae not insert mcpSystemPrompt if not ENABLE_MCP 2025-01-23 18:24:38 +08:00
RiverRay
2f9cb5a68f Merge pull request #6084 from ChatGPTNextWeb/temp-fix
fix: missing mcp_config.json files required for building
2025-01-22 21:40:37 +08:00
Kadxy
55cacfb7e2 fix: missing files required for building 2025-01-22 21:28:29 +08:00
RiverRay
6a862372f7 Merge pull request #6082 from ChatGPTNextWeb/Leizhenpeng-patch-2
Update README_CN.md
2025-01-22 13:11:11 +08:00
RiverRay
81bd83eb44 Update README_CN.md 2025-01-22 13:08:33 +08:00
RiverRay
b2b6fd81be Merge pull request #6075 from Kadxy/main 2025-01-20 10:44:46 +08:00
Kadxy
f22cfd7b33 Update chat.tsx 2025-01-20 10:10:52 +08:00
RiverRay
8111acff34 Update README.md 2025-01-20 00:17:47 +08:00
RiverRay
4cad55379d Merge pull request #5974 from ChatGPTNextWeb/feat-mcp
Support MCP( WIP)
2025-01-20 00:07:41 +08:00
11 changed files with 76 additions and 25 deletions

View File

@@ -1,6 +1,11 @@
# Your openai api key. (required) # Your openai api key. (required)
OPENAI_API_KEY=sk-xxxx OPENAI_API_KEY=sk-xxxx
# DeepSeek Api Key. (Optional)
DEEPSEEK_API_KEY=
# Access password, separated by comma. (optional) # Access password, separated by comma. (optional)
CODE=your-password CODE=your-password
@@ -70,5 +75,6 @@ ANTHROPIC_API_VERSION=
### anthropic claude Api url (optional) ### anthropic claude Api url (optional)
ANTHROPIC_URL= ANTHROPIC_URL=
### (optional) ### (optional)
WHITE_WEBDAV_ENDPOINTS= WHITE_WEBDAV_ENDPOINTS=

View File

@@ -1,2 +1,3 @@
public/serviceWorker.js public/serviceWorker.js
app/mcp/mcp_config.json app/mcp/mcp_config.json
app/mcp/mcp_config.default.json

View File

@@ -42,7 +42,7 @@ COPY --from=builder /app/.next/static ./.next/static
COPY --from=builder /app/.next/server ./.next/server COPY --from=builder /app/.next/server ./.next/server
RUN mkdir -p /app/app/mcp && chmod 777 /app/app/mcp RUN mkdir -p /app/app/mcp && chmod 777 /app/app/mcp
COPY --from=builder /app/app/mcp/mcp_config.json /app/app/mcp/ COPY --from=builder /app/app/mcp/mcp_config.default.json /app/app/mcp/mcp_config.json
EXPOSE 3000 EXPOSE 3000

View File

@@ -5,6 +5,7 @@
</a> </a>
<h1 align="center">NextChat (ChatGPT Next Web)</h1> <h1 align="center">NextChat (ChatGPT Next Web)</h1>
English / [简体中文](./README_CN.md) English / [简体中文](./README_CN.md)
@@ -39,6 +40,12 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with Claude, GPT
</div> </div>
## 🫣 NextChat Support MCP !
> Before build, please set env ENABLE_MCP=true
<img src="https://github.com/user-attachments/assets/d8851f40-4e36-4335-b1a4-ec1e11488c7e"/>
## Enterprise Edition ## Enterprise Edition
Meeting Your Company's Privatization and Customization Deployment Requirements: Meeting Your Company's Privatization and Customization Deployment Requirements:
@@ -333,6 +340,12 @@ Stability API key.
Customize Stability API url. Customize Stability API url.
### `ENABLE_MCP` (optional)
Enable MCPModel Context ProtocolFeature
## Requirements ## Requirements
NodeJS >= 18, Docker >= 20 NodeJS >= 18, Docker >= 20
@@ -391,6 +404,16 @@ If your proxy needs password, use:
-e PROXY_URL="http://127.0.0.1:7890 user pass" -e PROXY_URL="http://127.0.0.1:7890 user pass"
``` ```
If enable MCP, use
```
docker run -d -p 3000:3000 \
-e OPENAI_API_KEY=sk-xxxx \
-e CODE=your-password \
-e ENABLE_MCP=true \
yidadaa/chatgpt-next-web
```
### Shell ### Shell
```shell ```shell

View File

@@ -27,7 +27,8 @@
企业版咨询: **business@nextchat.dev** 企业版咨询: **business@nextchat.dev**
<img width="300" src="https://github.com/user-attachments/assets/3daeb7b6-ab63-4542-9141-2e4a12c80601"> <img width="300" src="https://github.com/user-attachments/assets/bb29a11d-ff75-48a8-b1f8-d2d7238cf987">
## 开始使用 ## 开始使用

View File

@@ -72,6 +72,9 @@ import {
safeLocalStorage, safeLocalStorage,
getModelSizes, getModelSizes,
supportsCustomSize, supportsCustomSize,
useMobileScreen,
selectOrCopy,
showPlugins,
} from "../utils"; } from "../utils";
import { uploadImage as uploadImageRemote } from "@/app/utils/chat"; import { uploadImage as uploadImageRemote } from "@/app/utils/chat";

View File

@@ -255,6 +255,6 @@ export const getServerSideConfig = () => {
defaultModel, defaultModel,
visionModels, visionModels,
allowedWebDavEndpoints, allowedWebDavEndpoints,
enableMcp: !!process.env.ENABLE_MCP, enableMcp: process.env.ENABLE_MCP === "true",
}; };
}; };

View File

@@ -393,6 +393,7 @@ You are an AI assistant with access to system tools. Your role is to help users
export const SUMMARIZE_MODEL = "gpt-4o-mini"; export const SUMMARIZE_MODEL = "gpt-4o-mini";
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro"; export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
export const DEEPSEEK_SUMMARIZE_MODEL = "deepseek-chat";
export const KnowledgeCutOffDate: Record<string, string> = { export const KnowledgeCutOffDate: Record<string, string> = {
default: "2021-09", default: "2021-09",
@@ -561,7 +562,7 @@ const iflytekModels = [
"4.0Ultra", "4.0Ultra",
]; ];
const deepseekModels = ["deepseek-chat", "deepseek-coder"]; const deepseekModels = ["deepseek-chat", "deepseek-coder", "deepseek-reasoner"];
const xAIModes = ["grok-beta"]; const xAIModes = ["grok-beta"];

View File

@@ -365,6 +365,8 @@ export async function getMcpConfigFromFile(): Promise<McpConfigData> {
// 更新 MCP 配置文件 // 更新 MCP 配置文件
async function updateMcpConfig(config: McpConfigData): Promise<void> { async function updateMcpConfig(config: McpConfigData): Promise<void> {
try { try {
// 确保目录存在
await fs.mkdir(path.dirname(CONFIG_PATH), { recursive: true });
await fs.writeFile(CONFIG_PATH, JSON.stringify(config, null, 2)); await fs.writeFile(CONFIG_PATH, JSON.stringify(config, null, 2));
} catch (error) { } catch (error) {
throw error; throw error;

View File

@@ -0,0 +1,3 @@
{
"mcpServers": {}
}

View File

@@ -20,6 +20,7 @@ import {
DEFAULT_MODELS, DEFAULT_MODELS,
DEFAULT_SYSTEM_TEMPLATE, DEFAULT_SYSTEM_TEMPLATE,
GEMINI_SUMMARIZE_MODEL, GEMINI_SUMMARIZE_MODEL,
DEEPSEEK_SUMMARIZE_MODEL,
KnowledgeCutOffDate, KnowledgeCutOffDate,
MCP_SYSTEM_TEMPLATE, MCP_SYSTEM_TEMPLATE,
MCP_TOOLS_TEMPLATE, MCP_TOOLS_TEMPLATE,
@@ -35,7 +36,7 @@ import { ModelConfig, ModelType, useAppConfig } from "./config";
import { useAccessStore } from "./access"; import { useAccessStore } from "./access";
import { collectModelsWithDefaultModel } from "../utils/model"; import { collectModelsWithDefaultModel } from "../utils/model";
import { createEmptyMask, Mask } from "./mask"; import { createEmptyMask, Mask } from "./mask";
import { executeMcpAction, getAllTools } from "../mcp/actions"; import { executeMcpAction, getAllTools, isMcpEnabled } from "../mcp/actions";
import { extractMcpJson, isMcpJson } from "../mcp/utils"; import { extractMcpJson, isMcpJson } from "../mcp/utils";
const localStorage = safeLocalStorage(); const localStorage = safeLocalStorage();
@@ -143,7 +144,10 @@ function getSummarizeModel(
} }
if (currentModel.startsWith("gemini")) { if (currentModel.startsWith("gemini")) {
return [GEMINI_SUMMARIZE_MODEL, ServiceProvider.Google]; return [GEMINI_SUMMARIZE_MODEL, ServiceProvider.Google];
} else if (currentModel.startsWith("deepseek-")) {
return [DEEPSEEK_SUMMARIZE_MODEL, ServiceProvider.DeepSeek];
} }
return [currentModel, providerName]; return [currentModel, providerName];
} }
@@ -245,7 +249,7 @@ export const useChatStore = createPersistStore(
newSession.topic = currentSession.topic; newSession.topic = currentSession.topic;
// 深拷贝消息 // 深拷贝消息
newSession.messages = currentSession.messages.map(msg => ({ newSession.messages = currentSession.messages.map((msg) => ({
...msg, ...msg,
id: nanoid(), // 生成新的消息 ID id: nanoid(), // 生成新的消息 ID
})); }));
@@ -551,27 +555,32 @@ export const useChatStore = createPersistStore(
(session.mask.modelConfig.model.startsWith("gpt-") || (session.mask.modelConfig.model.startsWith("gpt-") ||
session.mask.modelConfig.model.startsWith("chatgpt-")); session.mask.modelConfig.model.startsWith("chatgpt-"));
const mcpSystemPrompt = await getMcpSystemPrompt(); const mcpEnabled = await isMcpEnabled();
const mcpSystemPrompt = mcpEnabled ? await getMcpSystemPrompt() : "";
var systemPrompts: ChatMessage[] = []; var systemPrompts: ChatMessage[] = [];
systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
content:
fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}) + mcpSystemPrompt,
}),
]
: [
createMessage({
role: "system",
content: mcpSystemPrompt,
}),
];
if (shouldInjectSystemPrompts) { if (shouldInjectSystemPrompts) {
systemPrompts = [
createMessage({
role: "system",
content:
fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}) + mcpSystemPrompt,
}),
];
} else if (mcpEnabled) {
systemPrompts = [
createMessage({
role: "system",
content: mcpSystemPrompt,
}),
];
}
if (shouldInjectSystemPrompts || mcpEnabled) {
console.log( console.log(
"[Global System Prompt] ", "[Global System Prompt] ",
systemPrompts.at(0)?.content ?? "empty", systemPrompts.at(0)?.content ?? "empty",
@@ -816,6 +825,8 @@ export const useChatStore = createPersistStore(
/** check if the message contains MCP JSON and execute the MCP action */ /** check if the message contains MCP JSON and execute the MCP action */
checkMcpJson(message: ChatMessage) { checkMcpJson(message: ChatMessage) {
const mcpEnabled = isMcpEnabled();
if (!mcpEnabled) return;
const content = getMessageTextContent(message); const content = getMessageTextContent(message);
if (isMcpJson(content)) { if (isMcpJson(content)) {
try { try {