fix: claude function call

This commit is contained in:
Hk-Gosuto 2024-11-25 09:25:51 +00:00 committed by GitHub
parent 6385ba2c64
commit 3369532af9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 170 additions and 3 deletions

View File

@ -1,4 +1,9 @@
import { Anthropic, ApiPath } from "@/app/constant";
import {
Anthropic,
ApiPath,
REQUEST_TIMEOUT_MS,
ServiceProvider,
} from "@/app/constant";
import {
AgentChatOptions,
ChatOptions,
@ -22,6 +27,11 @@ import { preProcessImageContent, stream } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import Locale from "../../locales";
export type MultiBlockContent = {
type: "image" | "text";
@ -85,9 +95,163 @@ export class ClaudeApi implements LLMApi {
transcription(options: TranscriptionOptions): Promise<string> {
throw new Error("Method not implemented.");
}
toolAgentChat(options: AgentChatOptions): Promise<void> {
throw new Error("Method not implemented.");
async toolAgentChat(options: AgentChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages: AgentChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
messages.push({ role: v.role, content });
}
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.anthropicUrl;
const requestPayload = {
chatSessionId: options.chatSessionId,
messages,
isAzure: false,
azureApiVersion: accessStore.azureApiVersion,
stream: options.config.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
baseUrl: baseUrl,
maxIterations: options.agentConfig.maxIterations,
returnIntermediateSteps: options.agentConfig.returnIntermediateSteps,
useTools: options.agentConfig.useTools,
provider: ServiceProvider.Anthropic,
};
console.log("[Request] anthropic payload: ", requestPayload);
const shouldStream = true;
const controller = new AbortController();
options.onController?.(controller);
try {
let path = "/api/langchain/tool/agent/";
const enableNodeJSPlugin = !!process.env.NEXT_PUBLIC_ENABLE_NODEJS_PLUGIN;
path = enableNodeJSPlugin ? path + "nodejs" : path + "edge";
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
// console.log("shouldStream", shouldStream);
if (shouldStream) {
let responseText = "";
let finished = false;
const finish = () => {
if (!finished) {
options.onFinish(responseText);
finished = true;
}
};
controller.signal.onabort = finish;
fetchEventSource(path, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
console.warn(`extraInfo: ${extraInfo}`);
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
let response = JSON.parse(msg.data);
if (!response.isSuccess) {
console.error("[Request]", msg.data);
responseText = msg.data;
throw Error(response.message);
}
if (msg.data === "[DONE]" || finished) {
return finish();
}
try {
if (response && !response.isToolMessage) {
responseText += response.message;
options.onUpdate?.(responseText, response.message);
} else {
options.onToolUpdate?.(response.toolName!, response.message);
}
} catch (e) {
console.error("[Request] parse error", response, msg);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(path, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat reqeust", e);
options.onError?.(e as Error);
}
}
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
throw new Error("Method not implemented.");
}

View File

@ -318,6 +318,9 @@ export function isFunctionCallModel(modelName: string) {
"claude-3-opus-20240229",
"claude-3-haiku-20240307",
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-haiku-latest",
];
if (specialModels.some((keyword) => modelName === keyword)) return true;
return DEFAULT_MODELS.filter(