Compare commits

..

1 Commits

Author SHA1 Message Date
RiverRay
b95b1ac6f3 Update README.md 2025-02-21 08:56:21 +08:00
11 changed files with 54 additions and 118 deletions

View File

@@ -7,7 +7,7 @@
<h1 align="center">NextChat</h1> <h1 align="center">NextChat (ChatGPT Next Web)</h1>
English / [简体中文](./README_CN.md) English / [简体中文](./README_CN.md)
@@ -22,6 +22,7 @@ English / [简体中文](./README_CN.md)
[![MacOS][MacOS-image]][download-url] [![MacOS][MacOS-image]][download-url]
[![Linux][Linux-image]][download-url] [![Linux][Linux-image]][download-url]
[NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases)
[NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev) [NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
@@ -40,12 +41,31 @@ English / [简体中文](./README_CN.md)
</div> </div>
## 🥳 Cheer for NextChat iOS Version Online! ## 👋 Hey, NextChat is going to develop a native app!
> [ 👉 Click Here Install Now](https://apps.apple.com/us/app/nextchat-ai/id6743085599)
![Github iOS Image](https://github.com/user-attachments/assets/e0aa334f-4c13-4dc9-8310-e3b09fa4b9f3) > This week we are going to start working on iOS and Android APP, and we want to find some reliable friends to do it together!
✨ Several key points:
- Starting from 0, you are a veteran
- Completely open source, not hidden
- Native development, pursuing the ultimate experience
Will you come and do something together? 😎
https://github.com/ChatGPTNextWeb/NextChat/issues/6269
#Seeking for talents is thirsty #lack of people
## 🥳 Cheer for DeepSeek, China's AI star!
> Purpose-Built UI for DeepSeek Reasoner Model
<img src="https://github.com/user-attachments/assets/f3952210-3af1-4dc0-9b81-40eaa4847d9a"/>
## 🫣 NextChat Support MCP ! ## 🫣 NextChat Support MCP !
> Before build, please set env ENABLE_MCP=true > Before build, please set env ENABLE_MCP=true

View File

@@ -40,11 +40,6 @@ export interface MultimodalContent {
}; };
} }
export interface MultimodalContentForAlibaba {
text?: string;
image?: string;
}
export interface RequestMessage { export interface RequestMessage {
role: MessageRole; role: MessageRole;
content: string | MultimodalContent[]; content: string | MultimodalContent[];

View File

@@ -7,10 +7,7 @@ import {
ChatMessageTool, ChatMessageTool,
usePluginStore, usePluginStore,
} from "@/app/store"; } from "@/app/store";
import { import { streamWithThink } from "@/app/utils/chat";
preProcessImageContentForAlibabaDashScope,
streamWithThink,
} from "@/app/utils/chat";
import { import {
ChatOptions, ChatOptions,
getHeaders, getHeaders,
@@ -18,14 +15,12 @@ import {
LLMModel, LLMModel,
SpeechOptions, SpeechOptions,
MultimodalContent, MultimodalContent,
MultimodalContentForAlibaba,
} from "../api"; } from "../api";
import { getClientConfig } from "@/app/config/client"; import { getClientConfig } from "@/app/config/client";
import { import {
getMessageTextContent, getMessageTextContent,
getMessageTextContentWithoutThinking, getMessageTextContentWithoutThinking,
getTimeoutMSByModel, getTimeoutMSByModel,
isVisionModel,
} from "@/app/utils"; } from "@/app/utils";
import { fetch } from "@/app/utils/stream"; import { fetch } from "@/app/utils/stream";
@@ -94,6 +89,14 @@ export class QwenApi implements LLMApi {
} }
async chat(options: ChatOptions) { async chat(options: ChatOptions) {
const messages = options.messages.map((v) => ({
role: v.role,
content:
v.role === "assistant"
? getMessageTextContentWithoutThinking(v)
: getMessageTextContent(v),
}));
const modelConfig = { const modelConfig = {
...useAppConfig.getState().modelConfig, ...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig, ...useChatStore.getState().currentSession().mask.modelConfig,
@@ -102,21 +105,6 @@ export class QwenApi implements LLMApi {
}, },
}; };
const visionModel = isVisionModel(options.config.model);
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = (
visionModel
? await preProcessImageContentForAlibabaDashScope(v.content)
: v.role === "assistant"
? getMessageTextContentWithoutThinking(v)
: getMessageTextContent(v)
) as any;
messages.push({ role: v.role, content });
}
const shouldStream = !!options.config.stream; const shouldStream = !!options.config.stream;
const requestPayload: RequestPayload = { const requestPayload: RequestPayload = {
model: modelConfig.model, model: modelConfig.model,
@@ -141,7 +129,7 @@ export class QwenApi implements LLMApi {
"X-DashScope-SSE": shouldStream ? "enable" : "disable", "X-DashScope-SSE": shouldStream ? "enable" : "disable",
}; };
const chatPath = this.path(Alibaba.ChatPath(modelConfig.model)); const chatPath = this.path(Alibaba.ChatPath);
const chatPayload = { const chatPayload = {
method: "POST", method: "POST",
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
@@ -174,7 +162,7 @@ export class QwenApi implements LLMApi {
const json = JSON.parse(text); const json = JSON.parse(text);
const choices = json.output.choices as Array<{ const choices = json.output.choices as Array<{
message: { message: {
content: string | null | MultimodalContentForAlibaba[]; content: string | null;
tool_calls: ChatMessageTool[]; tool_calls: ChatMessageTool[];
reasoning_content: string | null; reasoning_content: string | null;
}; };
@@ -224,9 +212,7 @@ export class QwenApi implements LLMApi {
} else if (content && content.length > 0) { } else if (content && content.length > 0) {
return { return {
isThinking: false, isThinking: false,
content: Array.isArray(content) content: content,
? content.map((item) => item.text).join(",")
: content,
}; };
} }

View File

@@ -75,25 +75,6 @@ export class DeepSeekApi implements LLMApi {
} }
} }
// 检测并修复消息顺序确保除system外的第一个消息是user
const filteredMessages: ChatOptions["messages"] = [];
let hasFoundFirstUser = false;
for (const msg of messages) {
if (msg.role === "system") {
// Keep all system messages
filteredMessages.push(msg);
} else if (msg.role === "user") {
// User message directly added
filteredMessages.push(msg);
hasFoundFirstUser = true;
} else if (hasFoundFirstUser) {
// After finding the first user message, all subsequent non-system messages are retained.
filteredMessages.push(msg);
}
// If hasFoundFirstUser is false and it is not a system message, it will be skipped.
}
const modelConfig = { const modelConfig = {
...useAppConfig.getState().modelConfig, ...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig, ...useChatStore.getState().currentSession().mask.modelConfig,
@@ -104,7 +85,7 @@ export class DeepSeekApi implements LLMApi {
}; };
const requestPayload: RequestPayload = { const requestPayload: RequestPayload = {
messages: filteredMessages, messages,
stream: options.config.stream, stream: options.config.stream,
model: modelConfig.model, model: modelConfig.model,
temperature: modelConfig.temperature, temperature: modelConfig.temperature,

View File

@@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
LlmIcon = BotIconGemma; LlmIcon = BotIconGemma;
} else if (modelName.startsWith("claude")) { } else if (modelName.startsWith("claude")) {
LlmIcon = BotIconClaude; LlmIcon = BotIconClaude;
} else if (modelName.includes("llama")) { } else if (modelName.toLowerCase().includes("llama")) {
LlmIcon = BotIconMeta; LlmIcon = BotIconMeta;
} else if (modelName.startsWith("mixtral") || modelName.startsWith("codestral")) { } else if (modelName.startsWith("mixtral")) {
LlmIcon = BotIconMistral; LlmIcon = BotIconMistral;
} else if (modelName.includes("deepseek")) { } else if (modelName.toLowerCase().includes("deepseek")) {
LlmIcon = BotIconDeepseek; LlmIcon = BotIconDeepseek;
} else if (modelName.startsWith("moonshot")) { } else if (modelName.startsWith("moonshot")) {
LlmIcon = BotIconMoonshot; LlmIcon = BotIconMoonshot;
@@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
} else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) { } else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) {
LlmIcon = BotIconDoubao; LlmIcon = BotIconDoubao;
} else if ( } else if (
modelName.includes("glm") || modelName.toLowerCase().includes("glm") ||
modelName.startsWith("cogview-") || modelName.startsWith("cogview-") ||
modelName.startsWith("cogvideox-") modelName.startsWith("cogvideox-")
) { ) {

View File

@@ -221,12 +221,7 @@ export const ByteDance = {
export const Alibaba = { export const Alibaba = {
ExampleEndpoint: ALIBABA_BASE_URL, ExampleEndpoint: ALIBABA_BASE_URL,
ChatPath: (modelName: string) => { ChatPath: "v1/services/aigc/text-generation/generation",
if (modelName.includes("vl") || modelName.includes("omni")) {
return "v1/services/aigc/multimodal-generation/generation";
}
return `v1/services/aigc/text-generation/generation`;
},
}; };
export const Tencent = { export const Tencent = {
@@ -417,14 +412,6 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"gpt-4-turbo": "2023-12", "gpt-4-turbo": "2023-12",
"gpt-4-turbo-2024-04-09": "2023-12", "gpt-4-turbo-2024-04-09": "2023-12",
"gpt-4-turbo-preview": "2023-12", "gpt-4-turbo-preview": "2023-12",
"gpt-4.1": "2024-06",
"gpt-4.1-2025-04-14": "2024-06",
"gpt-4.1-mini": "2024-06",
"gpt-4.1-mini-2025-04-14": "2024-06",
"gpt-4.1-nano": "2024-06",
"gpt-4.1-nano-2025-04-14": "2024-06",
"gpt-4.5-preview": "2023-10",
"gpt-4.5-preview-2025-02-27": "2023-10",
"gpt-4o": "2023-10", "gpt-4o": "2023-10",
"gpt-4o-2024-05-13": "2023-10", "gpt-4o-2024-05-13": "2023-10",
"gpt-4o-2024-08-06": "2023-10", "gpt-4o-2024-08-06": "2023-10",
@@ -466,7 +453,6 @@ export const DEFAULT_TTS_VOICES = [
export const VISION_MODEL_REGEXES = [ export const VISION_MODEL_REGEXES = [
/vision/, /vision/,
/gpt-4o/, /gpt-4o/,
/gpt-4\.1/,
/claude-3/, /claude-3/,
/gemini-1\.5/, /gemini-1\.5/,
/gemini-exp/, /gemini-exp/,
@@ -494,14 +480,6 @@ const openaiModels = [
"gpt-4-32k-0613", "gpt-4-32k-0613",
"gpt-4-turbo", "gpt-4-turbo",
"gpt-4-turbo-preview", "gpt-4-turbo-preview",
"gpt-4.1",
"gpt-4.1-2025-04-14",
"gpt-4.1-mini",
"gpt-4.1-mini-2025-04-14",
"gpt-4.1-nano",
"gpt-4.1-nano-2025-04-14",
"gpt-4.5-preview",
"gpt-4.5-preview-2025-02-27",
"gpt-4o", "gpt-4o",
"gpt-4o-2024-05-13", "gpt-4o-2024-05-13",
"gpt-4o-2024-08-06", "gpt-4o-2024-08-06",
@@ -557,8 +535,6 @@ const anthropicModels = [
"claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022", "claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-latest", "claude-3-5-sonnet-latest",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
]; ];
const baiduModels = [ const baiduModels = [
@@ -592,9 +568,6 @@ const alibabaModes = [
"qwen-max-0403", "qwen-max-0403",
"qwen-max-0107", "qwen-max-0107",
"qwen-max-longcontext", "qwen-max-longcontext",
"qwen-omni-turbo",
"qwen-vl-plus",
"qwen-vl-max",
]; ];
const tencentModels = [ const tencentModels = [

View File

@@ -3,7 +3,7 @@ import {
UPLOAD_URL, UPLOAD_URL,
REQUEST_TIMEOUT_MS, REQUEST_TIMEOUT_MS,
} from "@/app/constant"; } from "@/app/constant";
import { MultimodalContent, RequestMessage } from "@/app/client/api"; import { RequestMessage } from "@/app/client/api";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
import { import {
EventStreamContentType, EventStreamContentType,
@@ -70,9 +70,8 @@ export function compressImage(file: Blob, maxSize: number): Promise<string> {
}); });
} }
export async function preProcessImageContentBase( export async function preProcessImageContent(
content: RequestMessage["content"], content: RequestMessage["content"],
transformImageUrl: (url: string) => Promise<{ [key: string]: any }>,
) { ) {
if (typeof content === "string") { if (typeof content === "string") {
return content; return content;
@@ -82,7 +81,7 @@ export async function preProcessImageContentBase(
if (part?.type == "image_url" && part?.image_url?.url) { if (part?.type == "image_url" && part?.image_url?.url) {
try { try {
const url = await cacheImageToBase64Image(part?.image_url?.url); const url = await cacheImageToBase64Image(part?.image_url?.url);
result.push(await transformImageUrl(url)); result.push({ type: part.type, image_url: { url } });
} catch (error) { } catch (error) {
console.error("Error processing image URL:", error); console.error("Error processing image URL:", error);
} }
@@ -93,23 +92,6 @@ export async function preProcessImageContentBase(
return result; return result;
} }
export async function preProcessImageContent(
content: RequestMessage["content"],
) {
return preProcessImageContentBase(content, async (url) => ({
type: "image_url",
image_url: { url },
})) as Promise<MultimodalContent[] | string>;
}
export async function preProcessImageContentForAlibabaDashScope(
content: RequestMessage["content"],
) {
return preProcessImageContentBase(content, async (url) => ({
image: url,
}));
}
const imageCaches: Record<string, string> = {}; const imageCaches: Record<string, string> = {};
export function cacheImageToBase64Image(imageUrl: string) { export function cacheImageToBase64Image(imageUrl: string) {
if (imageUrl.includes(CACHE_URL_PREFIX)) { if (imageUrl.includes(CACHE_URL_PREFIX)) {

View File

@@ -15,8 +15,6 @@ const config: Config = {
moduleNameMapper: { moduleNameMapper: {
"^@/(.*)$": "<rootDir>/$1", "^@/(.*)$": "<rootDir>/$1",
}, },
extensionsToTreatAsEsm: [".ts", ".tsx"],
injectGlobals: true,
}; };
// createJestConfig is exported this way to ensure that next/jest can load the Next.js config which is async // createJestConfig is exported this way to ensure that next/jest can load the Next.js config which is async

View File

@@ -1,22 +1,24 @@
// Learn more: https://github.com/testing-library/jest-dom // Learn more: https://github.com/testing-library/jest-dom
import "@testing-library/jest-dom"; import "@testing-library/jest-dom";
import { jest } from "@jest/globals";
global.fetch = jest.fn(() => global.fetch = jest.fn(() =>
Promise.resolve({ Promise.resolve({
ok: true, ok: true,
status: 200, status: 200,
json: () => Promise.resolve([]), json: () => Promise.resolve({}),
headers: new Headers(), headers: new Headers(),
redirected: false, redirected: false,
statusText: "OK", statusText: "OK",
type: "basic", type: "basic",
url: "", url: "",
clone: function () {
return this;
},
body: null, body: null,
bodyUsed: false, bodyUsed: false,
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)), arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
blob: () => Promise.resolve(new Blob()), blob: () => Promise.resolve(new Blob()),
formData: () => Promise.resolve(new FormData()), formData: () => Promise.resolve(new FormData()),
text: () => Promise.resolve(""), text: () => Promise.resolve(""),
} as Response), }),
); );

View File

@@ -17,8 +17,8 @@
"prompts": "node ./scripts/fetch-prompts.mjs", "prompts": "node ./scripts/fetch-prompts.mjs",
"prepare": "husky install", "prepare": "husky install",
"proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev", "proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev",
"test": "node --no-warnings --experimental-vm-modules $(yarn bin jest) --watch", "test": "jest --watch",
"test:ci": "node --no-warnings --experimental-vm-modules $(yarn bin jest) --ci" "test:ci": "jest --ci"
}, },
"dependencies": { "dependencies": {
"@fortaine/fetch-event-source": "^3.0.6", "@fortaine/fetch-event-source": "^3.0.6",

View File

@@ -1,4 +1,3 @@
import { jest } from "@jest/globals";
import { isVisionModel } from "../app/utils"; import { isVisionModel } from "../app/utils";
describe("isVisionModel", () => { describe("isVisionModel", () => {
@@ -51,7 +50,7 @@ describe("isVisionModel", () => {
test("should identify models from VISION_MODELS env var", () => { test("should identify models from VISION_MODELS env var", () => {
process.env.VISION_MODELS = "custom-vision-model,another-vision-model"; process.env.VISION_MODELS = "custom-vision-model,another-vision-model";
expect(isVisionModel("custom-vision-model")).toBe(true); expect(isVisionModel("custom-vision-model")).toBe(true);
expect(isVisionModel("another-vision-model")).toBe(true); expect(isVisionModel("another-vision-model")).toBe(true);
expect(isVisionModel("unrelated-model")).toBe(false); expect(isVisionModel("unrelated-model")).toBe(false);
@@ -65,4 +64,4 @@ describe("isVisionModel", () => {
expect(isVisionModel("unrelated-model")).toBe(false); expect(isVisionModel("unrelated-model")).toBe(false);
expect(isVisionModel("gpt-4-vision")).toBe(true); expect(isVisionModel("gpt-4-vision")).toBe(true);
}); });
}); });