Compare commits

..

12 Commits

Author SHA1 Message Date
Kadxy
e3cbec30de refactor: clean up chat and markdown components, improve markdown rendering with lazy loading and streaming support 2025-04-05 12:56:52 +08:00
RiverRay
48469bd8ca Merge pull request #6392 from ChatGPTNextWeb/Leizhenpeng-patch-6
Update README.md
2025-03-20 17:52:02 +08:00
RiverRay
5a5e887f2b Update README.md 2025-03-20 17:51:47 +08:00
RiverRay
b6f5d75656 Merge pull request #6344 from vangie/fix/jest-setup-esm
test: fix unit test failures
2025-03-14 20:04:56 +08:00
Vangie Du
0d41a17ef6 test: fix unit test failures 2025-03-07 14:49:17 +08:00
RiverRay
f7cde17919 Merge pull request #6292 from Little-LittleProgrammer/feature/alibaba-omni-support
feat(alibaba): Added alibaba vision model and omni model support
2025-03-01 10:25:16 +08:00
RiverRay
570cbb34b6 Merge pull request #6310 from agi-dude/patch-1
Remove duplicate links
2025-03-01 10:24:38 +08:00
RiverRay
7aa9ae0a3e Merge pull request #6311 from ChatGPTNextWeb/6305-bugthe-first-message-except-the-system-message-of-deepseek-reasoner-must-be-a-user-message-but-an-assistant-message-detected
fix: enforce that the first message (excluding system messages) is a …
2025-02-28 19:48:09 +08:00
Mr. AGI
ad6666eeaf Update README.md 2025-02-28 10:47:52 +05:00
EvanWu
a2c4e468a0 fix(app/utils/chat.ts): fix type error 2025-02-26 19:58:32 +08:00
EvanWu
0a25a1a8cb refacto(app/utils/chat.ts)r: optimize function preProcessImageContentBase 2025-02-25 09:22:47 +08:00
EvanWu
b709ee3983 feat(alibaba): Added alibaba vision model and omni model support 2025-02-24 20:18:07 +08:00
12 changed files with 353 additions and 57 deletions

View File

@@ -7,7 +7,7 @@
<h1 align="center">NextChat (ChatGPT Next Web)</h1> <h1 align="center">NextChat</h1>
English / [简体中文](./README_CN.md) English / [简体中文](./README_CN.md)
@@ -22,7 +22,6 @@ English / [简体中文](./README_CN.md)
[![MacOS][MacOS-image]][download-url] [![MacOS][MacOS-image]][download-url]
[![Linux][Linux-image]][download-url] [![Linux][Linux-image]][download-url]
[NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases)
[NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev) [NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
@@ -41,24 +40,6 @@ English / [简体中文](./README_CN.md)
</div> </div>
## 👋 Hey, NextChat is going to develop a native app!
> This week we are going to start working on iOS and Android APP, and we want to find some reliable friends to do it together!
✨ Several key points:
- Starting from 0, you are a veteran
- Completely open source, not hidden
- Native development, pursuing the ultimate experience
Will you come and do something together? 😎
https://github.com/ChatGPTNextWeb/NextChat/issues/6269
#Seeking for talents is thirsty #lack of people
## 🥳 Cheer for DeepSeek, China's AI star! ## 🥳 Cheer for DeepSeek, China's AI star!
> Purpose-Built UI for DeepSeek Reasoner Model > Purpose-Built UI for DeepSeek Reasoner Model

View File

@@ -40,6 +40,11 @@ export interface MultimodalContent {
}; };
} }
export interface MultimodalContentForAlibaba {
text?: string;
image?: string;
}
export interface RequestMessage { export interface RequestMessage {
role: MessageRole; role: MessageRole;
content: string | MultimodalContent[]; content: string | MultimodalContent[];

View File

@@ -7,7 +7,10 @@ import {
ChatMessageTool, ChatMessageTool,
usePluginStore, usePluginStore,
} from "@/app/store"; } from "@/app/store";
import { streamWithThink } from "@/app/utils/chat"; import {
preProcessImageContentForAlibabaDashScope,
streamWithThink,
} from "@/app/utils/chat";
import { import {
ChatOptions, ChatOptions,
getHeaders, getHeaders,
@@ -15,12 +18,14 @@ import {
LLMModel, LLMModel,
SpeechOptions, SpeechOptions,
MultimodalContent, MultimodalContent,
MultimodalContentForAlibaba,
} from "../api"; } from "../api";
import { getClientConfig } from "@/app/config/client"; import { getClientConfig } from "@/app/config/client";
import { import {
getMessageTextContent, getMessageTextContent,
getMessageTextContentWithoutThinking, getMessageTextContentWithoutThinking,
getTimeoutMSByModel, getTimeoutMSByModel,
isVisionModel,
} from "@/app/utils"; } from "@/app/utils";
import { fetch } from "@/app/utils/stream"; import { fetch } from "@/app/utils/stream";
@@ -89,14 +94,6 @@ export class QwenApi implements LLMApi {
} }
async chat(options: ChatOptions) { async chat(options: ChatOptions) {
const messages = options.messages.map((v) => ({
role: v.role,
content:
v.role === "assistant"
? getMessageTextContentWithoutThinking(v)
: getMessageTextContent(v),
}));
const modelConfig = { const modelConfig = {
...useAppConfig.getState().modelConfig, ...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig, ...useChatStore.getState().currentSession().mask.modelConfig,
@@ -105,6 +102,21 @@ export class QwenApi implements LLMApi {
}, },
}; };
const visionModel = isVisionModel(options.config.model);
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = (
visionModel
? await preProcessImageContentForAlibabaDashScope(v.content)
: v.role === "assistant"
? getMessageTextContentWithoutThinking(v)
: getMessageTextContent(v)
) as any;
messages.push({ role: v.role, content });
}
const shouldStream = !!options.config.stream; const shouldStream = !!options.config.stream;
const requestPayload: RequestPayload = { const requestPayload: RequestPayload = {
model: modelConfig.model, model: modelConfig.model,
@@ -129,7 +141,7 @@ export class QwenApi implements LLMApi {
"X-DashScope-SSE": shouldStream ? "enable" : "disable", "X-DashScope-SSE": shouldStream ? "enable" : "disable",
}; };
const chatPath = this.path(Alibaba.ChatPath); const chatPath = this.path(Alibaba.ChatPath(modelConfig.model));
const chatPayload = { const chatPayload = {
method: "POST", method: "POST",
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
@@ -162,7 +174,7 @@ export class QwenApi implements LLMApi {
const json = JSON.parse(text); const json = JSON.parse(text);
const choices = json.output.choices as Array<{ const choices = json.output.choices as Array<{
message: { message: {
content: string | null; content: string | null | MultimodalContentForAlibaba[];
tool_calls: ChatMessageTool[]; tool_calls: ChatMessageTool[];
reasoning_content: string | null; reasoning_content: string | null;
}; };
@@ -212,7 +224,9 @@ export class QwenApi implements LLMApi {
} else if (content && content.length > 0) { } else if (content && content.length > 0) {
return { return {
isThinking: false, isThinking: false,
content: content, content: Array.isArray(content)
? content.map((item) => item.text).join(",")
: content,
}; };
} }

View File

@@ -18,7 +18,6 @@ import ReturnIcon from "../icons/return.svg";
import CopyIcon from "../icons/copy.svg"; import CopyIcon from "../icons/copy.svg";
import SpeakIcon from "../icons/speak.svg"; import SpeakIcon from "../icons/speak.svg";
import SpeakStopIcon from "../icons/speak-stop.svg"; import SpeakStopIcon from "../icons/speak-stop.svg";
import LoadingIcon from "../icons/three-dots.svg";
import LoadingButtonIcon from "../icons/loading.svg"; import LoadingButtonIcon from "../icons/loading.svg";
import PromptIcon from "../icons/prompt.svg"; import PromptIcon from "../icons/prompt.svg";
import MaskIcon from "../icons/mask.svg"; import MaskIcon from "../icons/mask.svg";
@@ -79,8 +78,6 @@ import {
import { uploadImage as uploadImageRemote } from "@/app/utils/chat"; import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
import dynamic from "next/dynamic";
import { ChatControllerPool } from "../client/controller"; import { ChatControllerPool } from "../client/controller";
import { DalleQuality, DalleStyle, ModelSize } from "../typing"; import { DalleQuality, DalleStyle, ModelSize } from "../typing";
import { Prompt, usePromptStore } from "../store/prompt"; import { Prompt, usePromptStore } from "../store/prompt";
@@ -125,14 +122,15 @@ import { getModelProvider } from "../utils/model";
import { RealtimeChat } from "@/app/components/realtime-chat"; import { RealtimeChat } from "@/app/components/realtime-chat";
import clsx from "clsx"; import clsx from "clsx";
import { getAvailableClientsCount, isMcpEnabled } from "../mcp/actions"; import { getAvailableClientsCount, isMcpEnabled } from "../mcp/actions";
import { Markdown } from "./markdown";
const localStorage = safeLocalStorage(); const localStorage = safeLocalStorage();
const ttsPlayer = createTTSPlayer(); const ttsPlayer = createTTSPlayer();
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { // const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, // loading: () => <LoadingIcon />,
}); // });
const MCPAction = () => { const MCPAction = () => {
const navigate = useNavigate(); const navigate = useNavigate();
@@ -1984,6 +1982,8 @@ function _Chat() {
fontFamily={fontFamily} fontFamily={fontFamily}
parentRef={scrollRef} parentRef={scrollRef}
defaultShow={i >= messages.length - 6} defaultShow={i >= messages.length - 6}
immediatelyRender={i >= messages.length - 3}
streaming={message.streaming}
/> />
{getMessageImages(message).length == 1 && ( {getMessageImages(message).length == 1 && (
<img <img

View File

@@ -267,6 +267,136 @@ function tryWrapHtmlCode(text: string) {
); );
} }
// Split content into paragraphs while preserving code blocks
function splitContentIntoParagraphs(content: string) {
// Check for unclosed code blocks
const codeBlockStartCount = (content.match(/```/g) || []).length;
let processedContent = content;
// Add closing tag if there's an odd number of code block markers
if (codeBlockStartCount % 2 !== 0) {
processedContent = content + "\n```";
}
// Extract code blocks
const codeBlockRegex = /```[\s\S]*?```/g;
const codeBlocks: string[] = [];
let codeBlockCounter = 0;
// Replace code blocks with placeholders
const contentWithPlaceholders = processedContent.replace(
codeBlockRegex,
(match) => {
codeBlocks.push(match);
const placeholder = `__CODE_BLOCK_${codeBlockCounter++}__`;
return placeholder;
},
);
// Split by double newlines
const paragraphs = contentWithPlaceholders
.split(/\n\n+/)
.filter((p) => p.trim());
// Restore code blocks
return paragraphs.map((p) => {
if (p.match(/__CODE_BLOCK_\d+__/)) {
return p.replace(/__CODE_BLOCK_\d+__/g, (match) => {
const index = parseInt(match.match(/\d+/)?.[0] || "0");
return codeBlocks[index] || match;
});
}
return p;
});
}
// Lazy-loaded paragraph component
function MarkdownParagraph({
content,
onLoad,
}: {
content: string;
onLoad?: () => void;
}) {
const [isLoaded, setIsLoaded] = useState(false);
const placeholderRef = useRef<HTMLDivElement>(null);
const [isVisible, setIsVisible] = useState(false);
useEffect(() => {
let observer: IntersectionObserver;
if (placeholderRef.current) {
observer = new IntersectionObserver(
(entries) => {
if (entries[0].isIntersecting) {
setIsVisible(true);
}
},
{ threshold: 0.1, rootMargin: "200px 0px" },
);
observer.observe(placeholderRef.current);
}
return () => observer?.disconnect();
}, []);
useEffect(() => {
if (isVisible && !isLoaded) {
setIsLoaded(true);
onLoad?.();
}
}, [isVisible, isLoaded, onLoad]);
// Generate preview content
const previewContent = useMemo(() => {
if (content.startsWith("```")) {
return "```" + (content.split("\n")[0] || "").slice(3) + "...```";
}
return content.length > 60 ? content.slice(0, 60) + "..." : content;
}, [content]);
return (
<div className="markdown-paragraph" ref={placeholderRef}>
{!isLoaded ? (
<div className="markdown-paragraph-placeholder">{previewContent}</div>
) : (
<_MarkDownContent content={content} />
)}
</div>
);
}
// Memoized paragraph component to prevent unnecessary re-renders
const MemoizedMarkdownParagraph = React.memo(
({ content }: { content: string }) => {
return <_MarkDownContent content={content} />;
},
(prevProps, nextProps) => prevProps.content === nextProps.content,
);
MemoizedMarkdownParagraph.displayName = "MemoizedMarkdownParagraph";
// Specialized component for streaming content
function StreamingMarkdownContent({ content }: { content: string }) {
const paragraphs = useMemo(
() => splitContentIntoParagraphs(content),
[content],
);
const lastParagraphRef = useRef<HTMLDivElement>(null);
return (
<div className="markdown-streaming-content">
{paragraphs.map((paragraph, index) => (
<div
key={`p-${index}-${paragraph.substring(0, 20)}`}
className="markdown-paragraph markdown-streaming-paragraph"
ref={index === paragraphs.length - 1 ? lastParagraphRef : null}
>
<MemoizedMarkdownParagraph content={paragraph} />
</div>
))}
</div>
);
}
function _MarkDownContent(props: { content: string }) { function _MarkDownContent(props: { content: string }) {
const escapedContent = useMemo(() => { const escapedContent = useMemo(() => {
return tryWrapHtmlCode(escapeBrackets(props.content)); return tryWrapHtmlCode(escapeBrackets(props.content));
@@ -326,9 +456,27 @@ export function Markdown(
fontFamily?: string; fontFamily?: string;
parentRef?: RefObject<HTMLDivElement>; parentRef?: RefObject<HTMLDivElement>;
defaultShow?: boolean; defaultShow?: boolean;
immediatelyRender?: boolean;
streaming?: boolean; // Whether this is a streaming response
} & React.DOMAttributes<HTMLDivElement>, } & React.DOMAttributes<HTMLDivElement>,
) { ) {
const mdRef = useRef<HTMLDivElement>(null); const mdRef = useRef<HTMLDivElement>(null);
const paragraphs = useMemo(
() => splitContentIntoParagraphs(props.content),
[props.content],
);
const [loadedCount, setLoadedCount] = useState(0);
// Determine rendering strategy based on props
const shouldAsyncRender =
!props.immediatelyRender && !props.streaming && paragraphs.length > 1;
useEffect(() => {
// Immediately render all paragraphs if specified
if (props.immediatelyRender) {
setLoadedCount(paragraphs.length);
}
}, [props.immediatelyRender, paragraphs.length]);
return ( return (
<div <div
@@ -344,6 +492,24 @@ export function Markdown(
> >
{props.loading ? ( {props.loading ? (
<LoadingIcon /> <LoadingIcon />
) : props.streaming ? (
// Use specialized component for streaming content
<StreamingMarkdownContent content={props.content} />
) : shouldAsyncRender ? (
<div className="markdown-content">
{paragraphs.map((paragraph, index) => (
<MarkdownParagraph
key={index}
content={paragraph}
onLoad={() => setLoadedCount((prev) => prev + 1)}
/>
))}
{loadedCount < paragraphs.length && loadedCount > 0 && (
<div className="markdown-paragraph-loading">
<LoadingIcon />
</div>
)}
</div>
) : ( ) : (
<MarkdownContent content={props.content} /> <MarkdownContent content={props.content} />
)} )}

View File

@@ -221,7 +221,12 @@ export const ByteDance = {
export const Alibaba = { export const Alibaba = {
ExampleEndpoint: ALIBABA_BASE_URL, ExampleEndpoint: ALIBABA_BASE_URL,
ChatPath: "v1/services/aigc/text-generation/generation", ChatPath: (modelName: string) => {
if (modelName.includes("vl") || modelName.includes("omni")) {
return "v1/services/aigc/multimodal-generation/generation";
}
return `v1/services/aigc/text-generation/generation`;
},
}; };
export const Tencent = { export const Tencent = {
@@ -570,6 +575,9 @@ const alibabaModes = [
"qwen-max-0403", "qwen-max-0403",
"qwen-max-0107", "qwen-max-0107",
"qwen-max-longcontext", "qwen-max-longcontext",
"qwen-omni-turbo",
"qwen-vl-plus",
"qwen-vl-max",
]; ];
const tencentModels = [ const tencentModels = [

View File

@@ -99,6 +99,7 @@
font-size: 14px; font-size: 14px;
line-height: 1.5; line-height: 1.5;
word-wrap: break-word; word-wrap: break-word;
margin-bottom: 0;
} }
.light { .light {
@@ -358,8 +359,14 @@
.markdown-body kbd { .markdown-body kbd {
display: inline-block; display: inline-block;
padding: 3px 5px; padding: 3px 5px;
font: 11px ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, font:
Liberation Mono, monospace; 11px ui-monospace,
SFMono-Regular,
SF Mono,
Menlo,
Consolas,
Liberation Mono,
monospace;
line-height: 10px; line-height: 10px;
color: var(--color-fg-default); color: var(--color-fg-default);
vertical-align: middle; vertical-align: middle;
@@ -448,16 +455,28 @@
.markdown-body tt, .markdown-body tt,
.markdown-body code, .markdown-body code,
.markdown-body samp { .markdown-body samp {
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, font-family:
Liberation Mono, monospace; ui-monospace,
SFMono-Regular,
SF Mono,
Menlo,
Consolas,
Liberation Mono,
monospace;
font-size: 12px; font-size: 12px;
} }
.markdown-body pre { .markdown-body pre {
margin-top: 0; margin-top: 0;
margin-bottom: 0; margin-bottom: 0;
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, font-family:
Liberation Mono, monospace; ui-monospace,
SFMono-Regular,
SF Mono,
Menlo,
Consolas,
Liberation Mono,
monospace;
font-size: 12px; font-size: 12px;
word-wrap: normal; word-wrap: normal;
} }
@@ -1130,3 +1149,87 @@
#dmermaid { #dmermaid {
display: none; display: none;
} }
.markdown-content {
width: 100%;
}
.markdown-paragraph {
transition: opacity 0.3s ease;
margin-bottom: 0.5em;
&.markdown-paragraph-visible {
opacity: 1;
}
&.markdown-paragraph-hidden {
opacity: 0.7;
}
}
.markdown-paragraph-placeholder {
padding: 8px;
color: var(--color-fg-subtle);
background-color: var(--color-canvas-subtle);
border-radius: 6px;
border-left: 3px solid var(--color-border-muted);
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-family: var(--font-family-sans);
font-size: 14px;
min-height: 1.2em;
}
.markdown-paragraph-loading {
height: 20px;
background-color: var(--color-canvas-subtle);
border-radius: 6px;
margin-bottom: 8px;
position: relative;
overflow: hidden;
&::after {
content: "";
position: absolute;
top: 0;
left: 0;
width: 30%;
height: 100%;
background: linear-gradient(
90deg,
transparent,
rgba(255, 255, 255, 0.1),
transparent
);
animation: shimmer 1.5s infinite;
}
}
@keyframes shimmer {
0% {
transform: translateX(-100%);
}
100% {
transform: translateX(200%);
}
}
.markdown-streaming-content {
width: 100%;
}
.markdown-streaming-paragraph {
opacity: 1;
animation: fadeIn 0.3s ease-in-out;
margin-bottom: 0.5em;
}
@keyframes fadeIn {
from {
opacity: 0.5;
}
to {
opacity: 1;
}
}

View File

@@ -3,7 +3,7 @@ import {
UPLOAD_URL, UPLOAD_URL,
REQUEST_TIMEOUT_MS, REQUEST_TIMEOUT_MS,
} from "@/app/constant"; } from "@/app/constant";
import { RequestMessage } from "@/app/client/api"; import { MultimodalContent, RequestMessage } from "@/app/client/api";
import Locale from "@/app/locales"; import Locale from "@/app/locales";
import { import {
EventStreamContentType, EventStreamContentType,
@@ -70,8 +70,9 @@ export function compressImage(file: Blob, maxSize: number): Promise<string> {
}); });
} }
export async function preProcessImageContent( export async function preProcessImageContentBase(
content: RequestMessage["content"], content: RequestMessage["content"],
transformImageUrl: (url: string) => Promise<{ [key: string]: any }>,
) { ) {
if (typeof content === "string") { if (typeof content === "string") {
return content; return content;
@@ -81,7 +82,7 @@ export async function preProcessImageContent(
if (part?.type == "image_url" && part?.image_url?.url) { if (part?.type == "image_url" && part?.image_url?.url) {
try { try {
const url = await cacheImageToBase64Image(part?.image_url?.url); const url = await cacheImageToBase64Image(part?.image_url?.url);
result.push({ type: part.type, image_url: { url } }); result.push(await transformImageUrl(url));
} catch (error) { } catch (error) {
console.error("Error processing image URL:", error); console.error("Error processing image URL:", error);
} }
@@ -92,6 +93,23 @@ export async function preProcessImageContent(
return result; return result;
} }
export async function preProcessImageContent(
content: RequestMessage["content"],
) {
return preProcessImageContentBase(content, async (url) => ({
type: "image_url",
image_url: { url },
})) as Promise<MultimodalContent[] | string>;
}
export async function preProcessImageContentForAlibabaDashScope(
content: RequestMessage["content"],
) {
return preProcessImageContentBase(content, async (url) => ({
image: url,
}));
}
const imageCaches: Record<string, string> = {}; const imageCaches: Record<string, string> = {};
export function cacheImageToBase64Image(imageUrl: string) { export function cacheImageToBase64Image(imageUrl: string) {
if (imageUrl.includes(CACHE_URL_PREFIX)) { if (imageUrl.includes(CACHE_URL_PREFIX)) {

View File

@@ -15,6 +15,8 @@ const config: Config = {
moduleNameMapper: { moduleNameMapper: {
"^@/(.*)$": "<rootDir>/$1", "^@/(.*)$": "<rootDir>/$1",
}, },
extensionsToTreatAsEsm: [".ts", ".tsx"],
injectGlobals: true,
}; };
// createJestConfig is exported this way to ensure that next/jest can load the Next.js config which is async // createJestConfig is exported this way to ensure that next/jest can load the Next.js config which is async

View File

@@ -1,24 +1,22 @@
// Learn more: https://github.com/testing-library/jest-dom // Learn more: https://github.com/testing-library/jest-dom
import "@testing-library/jest-dom"; import "@testing-library/jest-dom";
import { jest } from "@jest/globals";
global.fetch = jest.fn(() => global.fetch = jest.fn(() =>
Promise.resolve({ Promise.resolve({
ok: true, ok: true,
status: 200, status: 200,
json: () => Promise.resolve({}), json: () => Promise.resolve([]),
headers: new Headers(), headers: new Headers(),
redirected: false, redirected: false,
statusText: "OK", statusText: "OK",
type: "basic", type: "basic",
url: "", url: "",
clone: function () {
return this;
},
body: null, body: null,
bodyUsed: false, bodyUsed: false,
arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)), arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)),
blob: () => Promise.resolve(new Blob()), blob: () => Promise.resolve(new Blob()),
formData: () => Promise.resolve(new FormData()), formData: () => Promise.resolve(new FormData()),
text: () => Promise.resolve(""), text: () => Promise.resolve(""),
}), } as Response),
); );

View File

@@ -17,8 +17,8 @@
"prompts": "node ./scripts/fetch-prompts.mjs", "prompts": "node ./scripts/fetch-prompts.mjs",
"prepare": "husky install", "prepare": "husky install",
"proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev", "proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev",
"test": "jest --watch", "test": "node --no-warnings --experimental-vm-modules $(yarn bin jest) --watch",
"test:ci": "jest --ci" "test:ci": "node --no-warnings --experimental-vm-modules $(yarn bin jest) --ci"
}, },
"dependencies": { "dependencies": {
"@fortaine/fetch-event-source": "^3.0.6", "@fortaine/fetch-event-source": "^3.0.6",

View File

@@ -1,3 +1,4 @@
import { jest } from "@jest/globals";
import { isVisionModel } from "../app/utils"; import { isVisionModel } from "../app/utils";
describe("isVisionModel", () => { describe("isVisionModel", () => {