support deepseek-r1@OpenAI's reasoning_content, parse <think></think> from stream

This commit is contained in:
suruiqiang
2025-02-11 18:57:16 +08:00
parent b0758cccde
commit 9714258322
2 changed files with 55 additions and 3 deletions

View File

@@ -22,7 +22,7 @@ import {
preProcessImageContent,
uploadImage,
base64Image2Blob,
stream,
streamWithThink,
} from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
import { ModelSize, DalleQuality, DalleStyle } from "@/app/typing";
@@ -294,7 +294,7 @@ export class ChatGPTApi implements LLMApi {
useChatStore.getState().currentSession().mask?.plugin || [],
);
// console.log("getAsTools", tools, funcs);
stream(
streamWithThink(
chatPath,
requestPayload,
getHeaders(),
@@ -309,8 +309,12 @@ export class ChatGPTApi implements LLMApi {
delta: {
content: string;
tool_calls: ChatMessageTool[];
reasoning_content: string | null;
};
}>;
if (!choices?.length) return { isThinking: false, content: "" };
const tool_calls = choices[0]?.delta?.tool_calls;
if (tool_calls?.length > 0) {
const id = tool_calls[0]?.id;
@@ -330,7 +334,37 @@ export class ChatGPTApi implements LLMApi {
runTools[index]["function"]["arguments"] += args;
}
}
return choices[0]?.delta?.content;
const reasoning = choices[0]?.delta?.reasoning_content;
const content = choices[0]?.delta?.content;
// Skip if both content and reasoning_content are empty or null
if (
(!reasoning || reasoning.trim().length === 0) &&
(!content || content.trim().length === 0)
) {
return {
isThinking: false,
content: "",
};
}
if (reasoning && reasoning.trim().length > 0) {
return {
isThinking: true,
content: reasoning,
};
} else if (content && content.trim().length > 0) {
return {
isThinking: false,
content: content,
};
}
return {
isThinking: false,
content: "",
};
},
// processToolMessage, include tool_calls message and tool call results
(