mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-09-09 06:56:41 +08:00
Compare commits
1 Commits
6305-bugth
...
Leizhenpen
Author | SHA1 | Date | |
---|---|---|---|
|
b95b1ac6f3 |
@@ -75,25 +75,6 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 检测并修复消息顺序,确保除system外的第一个消息是user
|
|
||||||
const filteredMessages: ChatOptions["messages"] = [];
|
|
||||||
let hasFoundFirstUser = false;
|
|
||||||
|
|
||||||
for (const msg of messages) {
|
|
||||||
if (msg.role === "system") {
|
|
||||||
// Keep all system messages
|
|
||||||
filteredMessages.push(msg);
|
|
||||||
} else if (msg.role === "user") {
|
|
||||||
// User message directly added
|
|
||||||
filteredMessages.push(msg);
|
|
||||||
hasFoundFirstUser = true;
|
|
||||||
} else if (hasFoundFirstUser) {
|
|
||||||
// After finding the first user message, all subsequent non-system messages are retained.
|
|
||||||
filteredMessages.push(msg);
|
|
||||||
}
|
|
||||||
// If hasFoundFirstUser is false and it is not a system message, it will be skipped.
|
|
||||||
}
|
|
||||||
|
|
||||||
const modelConfig = {
|
const modelConfig = {
|
||||||
...useAppConfig.getState().modelConfig,
|
...useAppConfig.getState().modelConfig,
|
||||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
@@ -104,7 +85,7 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const requestPayload: RequestPayload = {
|
const requestPayload: RequestPayload = {
|
||||||
messages: filteredMessages,
|
messages,
|
||||||
stream: options.config.stream,
|
stream: options.config.stream,
|
||||||
model: modelConfig.model,
|
model: modelConfig.model,
|
||||||
temperature: modelConfig.temperature,
|
temperature: modelConfig.temperature,
|
||||||
|
@@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
|
|||||||
LlmIcon = BotIconGemma;
|
LlmIcon = BotIconGemma;
|
||||||
} else if (modelName.startsWith("claude")) {
|
} else if (modelName.startsWith("claude")) {
|
||||||
LlmIcon = BotIconClaude;
|
LlmIcon = BotIconClaude;
|
||||||
} else if (modelName.includes("llama")) {
|
} else if (modelName.toLowerCase().includes("llama")) {
|
||||||
LlmIcon = BotIconMeta;
|
LlmIcon = BotIconMeta;
|
||||||
} else if (modelName.startsWith("mixtral") || modelName.startsWith("codestral")) {
|
} else if (modelName.startsWith("mixtral")) {
|
||||||
LlmIcon = BotIconMistral;
|
LlmIcon = BotIconMistral;
|
||||||
} else if (modelName.includes("deepseek")) {
|
} else if (modelName.toLowerCase().includes("deepseek")) {
|
||||||
LlmIcon = BotIconDeepseek;
|
LlmIcon = BotIconDeepseek;
|
||||||
} else if (modelName.startsWith("moonshot")) {
|
} else if (modelName.startsWith("moonshot")) {
|
||||||
LlmIcon = BotIconMoonshot;
|
LlmIcon = BotIconMoonshot;
|
||||||
@@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
|
|||||||
} else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) {
|
} else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) {
|
||||||
LlmIcon = BotIconDoubao;
|
LlmIcon = BotIconDoubao;
|
||||||
} else if (
|
} else if (
|
||||||
modelName.includes("glm") ||
|
modelName.toLowerCase().includes("glm") ||
|
||||||
modelName.startsWith("cogview-") ||
|
modelName.startsWith("cogview-") ||
|
||||||
modelName.startsWith("cogvideox-")
|
modelName.startsWith("cogvideox-")
|
||||||
) {
|
) {
|
||||||
|
@@ -535,8 +535,6 @@ const anthropicModels = [
|
|||||||
"claude-3-5-sonnet-20240620",
|
"claude-3-5-sonnet-20240620",
|
||||||
"claude-3-5-sonnet-20241022",
|
"claude-3-5-sonnet-20241022",
|
||||||
"claude-3-5-sonnet-latest",
|
"claude-3-5-sonnet-latest",
|
||||||
"claude-3-7-sonnet-20250219",
|
|
||||||
"claude-3-7-sonnet-latest",
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const baiduModels = [
|
const baiduModels = [
|
||||||
|
Reference in New Issue
Block a user