Merge branch 'main' of https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web into ali_bytedance_reasoning_content

This commit is contained in:
suruiqiang 2025-02-12 17:54:50 +08:00
commit cf140d4228
4 changed files with 72 additions and 31 deletions

View File

@ -1,6 +1,11 @@
"use client";
// azure and openai, using same models. so using same LLMApi.
import { ApiPath, SILICONFLOW_BASE_URL, SiliconFlow } from "@/app/constant";
import {
ApiPath,
SILICONFLOW_BASE_URL,
SiliconFlow,
DEFAULT_MODELS,
} from "@/app/constant";
import {
useAccessStore,
useAppConfig,
@ -8,7 +13,7 @@ import {
ChatMessageTool,
usePluginStore,
} from "@/app/store";
import { streamWithThink } from "@/app/utils/chat";
import { preProcessImageContent, streamWithThink } from "@/app/utils/chat";
import {
ChatOptions,
getHeaders,
@ -20,13 +25,23 @@ import { getClientConfig } from "@/app/config/client";
import {
getMessageTextContent,
getMessageTextContentWithoutThinking,
isVisionModel,
getTimeoutMSByModel,
} from "@/app/utils";
import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";
export interface SiliconFlowListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}
export class SiliconflowApi implements LLMApi {
private disableListModels = true;
private disableListModels = false;
path(path: string): string {
const accessStore = useAccessStore.getState();
@ -67,13 +82,16 @@ export class SiliconflowApi implements LLMApi {
}
async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
if (v.role === "assistant") {
const content = getMessageTextContentWithoutThinking(v);
messages.push({ role: v.role, content });
} else {
const content = getMessageTextContent(v);
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
messages.push({ role: v.role, content });
}
}
@ -234,6 +252,36 @@ export class SiliconflowApi implements LLMApi {
}
async models(): Promise<LLMModel[]> {
return [];
if (this.disableListModels) {
return DEFAULT_MODELS.slice();
}
const res = await fetch(this.path(SiliconFlow.ListModelPath), {
method: "GET",
headers: {
...getHeaders(),
},
});
const resJson = (await res.json()) as SiliconFlowListModelResponse;
const chatModels = resJson.data;
console.log("[Models]", chatModels);
if (!chatModels) {
return [];
}
let seq = 1000; //同 Constant.ts 中的排序保持一致
return chatModels.map((m) => ({
name: m.id,
available: true,
sorted: seq++,
provider: {
id: "siliconflow",
providerName: "SiliconFlow",
providerType: "siliconflow",
sorted: 14,
},
}));
}
}

View File

@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
LlmIcon = BotIconGemma;
} else if (modelName.startsWith("claude")) {
LlmIcon = BotIconClaude;
} else if (modelName.startsWith("llama")) {
} else if (modelName.toLowerCase().includes("llama")) {
LlmIcon = BotIconMeta;
} else if (modelName.startsWith("mixtral")) {
LlmIcon = BotIconMistral;
} else if (modelName.startsWith("deepseek")) {
} else if (modelName.toLowerCase().includes("deepseek")) {
LlmIcon = BotIconDeepseek;
} else if (modelName.startsWith("moonshot")) {
LlmIcon = BotIconMoonshot;
@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
} else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) {
LlmIcon = BotIconDoubao;
} else if (
modelName.startsWith("glm") ||
modelName.toLowerCase().includes("glm") ||
modelName.startsWith("cogview-") ||
modelName.startsWith("cogvideox-")
) {

View File

@ -23,7 +23,6 @@ import CopyIcon from "../icons/copy.svg";
import LoadingIcon from "../icons/three-dots.svg";
import ChatGptIcon from "../icons/chatgpt.png";
import ShareIcon from "../icons/share.svg";
import BotIcon from "../icons/bot.png";
import DownloadIcon from "../icons/download.svg";
import { useEffect, useMemo, useRef, useState } from "react";
@ -33,13 +32,13 @@ import dynamic from "next/dynamic";
import NextImage from "next/image";
import { toBlob, toPng } from "html-to-image";
import { DEFAULT_MASK_AVATAR } from "../store/mask";
import { prettyObject } from "../utils/format";
import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
import { getClientConfig } from "../config/client";
import { type ClientApi, getClientApi } from "../client/api";
import { getMessageTextContent } from "../utils";
import { MaskAvatar } from "./mask";
import clsx from "clsx";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
@ -407,22 +406,6 @@ export function PreviewActions(props: {
);
}
function ExportAvatar(props: { avatar: string }) {
if (props.avatar === DEFAULT_MASK_AVATAR) {
return (
<img
src={BotIcon.src}
width={30}
height={30}
alt="bot"
className="user-avatar"
/>
);
}
return <Avatar avatar={props.avatar} />;
}
export function ImagePreviewer(props: {
messages: ChatMessage[];
topic: string;
@ -546,9 +529,12 @@ export function ImagePreviewer(props: {
github.com/ChatGPTNextWeb/ChatGPT-Next-Web
</div>
<div className={styles["icons"]}>
<ExportAvatar avatar={config.avatar} />
<MaskAvatar avatar={config.avatar} />
<span className={styles["icon-space"]}>&</span>
<ExportAvatar avatar={mask.avatar} />
<MaskAvatar
avatar={mask.avatar}
model={session.mask.modelConfig.model}
/>
</div>
</div>
<div>
@ -576,9 +562,14 @@ export function ImagePreviewer(props: {
key={i}
>
<div className={styles["avatar"]}>
<ExportAvatar
avatar={m.role === "user" ? config.avatar : mask.avatar}
/>
{m.role === "user" ? (
<Avatar avatar={config.avatar}></Avatar>
) : (
<MaskAvatar
avatar={session.mask.avatar}
model={m.model || session.mask.modelConfig.model}
/>
)}
</div>
<div className={styles["body"]}>

View File

@ -258,6 +258,7 @@ export const ChatGLM = {
export const SiliconFlow = {
ExampleEndpoint: SILICONFLOW_BASE_URL,
ChatPath: "v1/chat/completions",
ListModelPath: "v1/models?&sub_type=chat",
};
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
@ -462,6 +463,7 @@ export const VISION_MODEL_REGEXES = [
/gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview"
/^dall-e-3$/, // Matches exactly "dall-e-3"
/glm-4v/,
/vl/i,
];
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];