merge main

This commit is contained in:
lloydzhou
2024-10-09 18:27:23 +08:00
31 changed files with 417 additions and 154 deletions

View File

@@ -6,7 +6,7 @@ import { NextRequest, NextResponse } from "next/server";
import { auth } from "./auth";
import { requestOpenai } from "./common";
const ALLOWD_PATH = new Set(Object.values(OpenaiPath));
const ALLOWED_PATH = new Set(Object.values(OpenaiPath));
function getModels(remoteModelRes: OpenAIListModelResponse) {
const config = getServerSideConfig();
@@ -34,7 +34,7 @@ export async function handle(
const subpath = params.path.join("/");
if (!ALLOWD_PATH.has(subpath)) {
if (!ALLOWED_PATH.has(subpath)) {
console.log("[OpenAI Route] forbidden path ", subpath);
return NextResponse.json(
{

View File

@@ -231,7 +231,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
function getConfig() {
const modelConfig = chatStore.currentSession().mask.modelConfig;
const isGoogle = modelConfig.providerName == ServiceProvider.Google;
const isGoogle = modelConfig.providerName === ServiceProvider.Google;
const isAzure = modelConfig.providerName === ServiceProvider.Azure;
const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
const isBaidu = modelConfig.providerName == ServiceProvider.Baidu;

View File

@@ -23,6 +23,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -178,6 +179,7 @@ export class QwenApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);

View File

@@ -8,7 +8,7 @@ import {
ChatMessageTool,
} from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { ANTHROPIC_BASE_URL } from "@/app/constant";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { preProcessImageContent, stream } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
@@ -388,9 +388,7 @@ export class ClaudeApi implements LLMApi {
if (baseUrl.trim().length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/anthropic"
: ApiPath.Anthropic;
baseUrl = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {

View File

@@ -24,6 +24,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -197,6 +198,7 @@ export class ErnieApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);

View File

@@ -23,6 +23,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -165,6 +166,7 @@ export class DoubaoApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);

View File

@@ -16,7 +16,7 @@ import {
} from "@/app/store";
import { stream } from "@/app/utils/chat";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { GEMINI_BASE_URL } from "@/app/constant";
import {
getMessageTextContent,
@@ -26,6 +26,7 @@ import {
import { preProcessImageContent } from "@/app/utils/chat";
import { nanoid } from "nanoid";
import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";
export class GeminiProApi implements LLMApi {
path(path: string): string {
@@ -38,7 +39,7 @@ export class GeminiProApi implements LLMApi {
const isApp = !!getClientConfig()?.isApp;
if (baseUrl.length === 0) {
baseUrl = isApp ? DEFAULT_API_HOST + `/api/proxy/google` : ApiPath.Google;
baseUrl = isApp ? GEMINI_BASE_URL : ApiPath.Google;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);

View File

@@ -1,7 +1,7 @@
"use client";
import {
ApiPath,
DEFAULT_API_HOST,
IFLYTEK_BASE_URL,
Iflytek,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
@@ -22,6 +22,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";
import { RequestPayload } from "./openai";
@@ -40,7 +41,7 @@ export class SparkApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Iflytek;
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
baseUrl = isApp ? IFLYTEK_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {
@@ -149,6 +150,7 @@ export class SparkApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);

View File

@@ -2,7 +2,7 @@
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
DEFAULT_API_HOST,
MOONSHOT_BASE_URL,
Moonshot,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
@@ -40,7 +40,7 @@ export class MoonshotApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Moonshot;
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
baseUrl = isApp ? MOONSHOT_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {

View File

@@ -2,7 +2,7 @@
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
DEFAULT_API_HOST,
OPENAI_BASE_URL,
DEFAULT_MODELS,
OpenaiPath,
Azure,
@@ -98,7 +98,7 @@ export class ChatGPTApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
baseUrl = isApp ? OPENAI_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {

View File

@@ -1,5 +1,5 @@
"use client";
import { ApiPath, DEFAULT_API_HOST, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ApiPath, TENCENT_BASE_URL, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
@@ -22,6 +22,7 @@ import mapKeys from "lodash-es/mapKeys";
import mapValues from "lodash-es/mapValues";
import isArray from "lodash-es/isArray";
import isObject from "lodash-es/isObject";
import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -70,9 +71,7 @@ export class HunyuanApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/tencent"
: ApiPath.Tencent;
baseUrl = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
}
if (baseUrl.endsWith("/")) {
@@ -179,6 +178,7 @@ export class HunyuanApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);

View File

@@ -1815,6 +1815,7 @@ function _Chat() {
{message?.tools?.map((tool) => (
<div
key={tool.id}
title={tool?.errorMsg}
className={styles["chat-message-tool"]}
>
{tool.isError === false ? (

View File

@@ -207,23 +207,6 @@ function CustomCode(props: { children: any; className?: string }) {
);
}
function escapeDollarNumber(text: string) {
let escapedText = "";
for (let i = 0; i < text.length; i += 1) {
let char = text[i];
const nextChar = text[i + 1] || " ";
if (char === "$" && nextChar >= "0" && nextChar <= "9") {
char = "\\$";
}
escapedText += char;
}
return escapedText;
}
function escapeBrackets(text: string) {
const pattern =
/(```[\s\S]*?```|`.*?`)|\\\[([\s\S]*?[^\\])\\\]|\\\((.*?)\\\)/g;
@@ -261,7 +244,7 @@ function tryWrapHtmlCode(text: string) {
function _MarkDownContent(props: { content: string }) {
const escapedContent = useMemo(() => {
return tryWrapHtmlCode(escapeBrackets(escapeDollarNumber(props.content)));
return tryWrapHtmlCode(escapeBrackets(props.content));
}, [props.content]);
return (

View File

@@ -11,7 +11,6 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
export const STABILITY_BASE_URL = "https://api.stability.ai";
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
export const OPENAI_BASE_URL = "https://api.openai.com";
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";

View File

@@ -1,9 +1,18 @@
import {
ApiPath,
DEFAULT_API_HOST,
GoogleSafetySettingsThreshold,
ServiceProvider,
StoreKey,
ApiPath,
OPENAI_BASE_URL,
ANTHROPIC_BASE_URL,
GEMINI_BASE_URL,
BAIDU_BASE_URL,
BYTEDANCE_BASE_URL,
ALIBABA_BASE_URL,
TENCENT_BASE_URL,
MOONSHOT_BASE_URL,
STABILITY_BASE_URL,
IFLYTEK_BASE_URL,
} from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
@@ -15,45 +24,25 @@ let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
const isApp = getClientConfig()?.buildMode === "export";
const DEFAULT_OPENAI_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/openai"
: ApiPath.OpenAI;
const DEFAULT_OPENAI_URL = isApp ? OPENAI_BASE_URL : ApiPath.OpenAI;
const DEFAULT_GOOGLE_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/google"
: ApiPath.Google;
const DEFAULT_GOOGLE_URL = isApp ? GEMINI_BASE_URL : ApiPath.Google;
const DEFAULT_ANTHROPIC_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/anthropic"
: ApiPath.Anthropic;
const DEFAULT_ANTHROPIC_URL = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
const DEFAULT_BAIDU_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/baidu"
: ApiPath.Baidu;
const DEFAULT_BAIDU_URL = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;
const DEFAULT_BYTEDANCE_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/bytedance"
: ApiPath.ByteDance;
const DEFAULT_BYTEDANCE_URL = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;
const DEFAULT_ALIBABA_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/alibaba"
: ApiPath.Alibaba;
const DEFAULT_ALIBABA_URL = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;
const DEFAULT_TENCENT_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/tencent"
: ApiPath.Tencent;
const DEFAULT_TENCENT_URL = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
const DEFAULT_MOONSHOT_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/moonshot"
: ApiPath.Moonshot;
const DEFAULT_MOONSHOT_URL = isApp ? MOONSHOT_BASE_URL : ApiPath.Moonshot;
const DEFAULT_STABILITY_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/stability"
: ApiPath.Stability;
const DEFAULT_STABILITY_URL = isApp ? STABILITY_BASE_URL : ApiPath.Stability;
const DEFAULT_IFLYTEK_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/iflytek"
: ApiPath.Iflytek;
const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
const DEFAULT_ACCESS_STATE = {
accessCode: "",

View File

@@ -16,6 +16,9 @@ import {
DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate,
StoreKey,
SUMMARIZE_MODEL,
GEMINI_SUMMARIZE_MODEL,
ServiceProvider,
} from "../constant";
import Locale, { getLang } from "../locales";
import { isDalle3, safeLocalStorage } from "../utils";
@@ -23,6 +26,8 @@ import { prettyObject } from "../utils/format";
import { createPersistStore } from "../utils/store";
import { estimateTokenLength } from "../utils/token";
import { ModelConfig, ModelType, useAppConfig } from "./config";
import { useAccessStore } from "./access";
import { collectModelsWithDefaultModel } from "../utils/model";
import { createEmptyMask, Mask } from "./mask";
const localStorage = safeLocalStorage();
@@ -37,6 +42,7 @@ export type ChatMessageTool = {
};
content?: string;
isError?: boolean;
errorMsg?: string;
};
export type ChatMessage = RequestMessage & {
@@ -102,6 +108,35 @@ function createEmptySession(): ChatSession {
};
}
function getSummarizeModel(
currentModel: string,
providerName: string,
): string[] {
// if it is using gpt-* models, force to use 4o-mini to summarize
if (currentModel.startsWith("gpt") || currentModel.startsWith("chatgpt")) {
const configStore = useAppConfig.getState();
const accessStore = useAccessStore.getState();
const allModel = collectModelsWithDefaultModel(
configStore.models,
[configStore.customModels, accessStore.customModels].join(","),
accessStore.defaultModel,
);
const summarizeModel = allModel.find(
(m) => m.name === SUMMARIZE_MODEL && m.available,
);
if (summarizeModel) {
return [
summarizeModel.name,
summarizeModel.provider?.providerName as string,
];
}
}
if (currentModel.startsWith("gemini")) {
return [GEMINI_SUMMARIZE_MODEL, ServiceProvider.Google];
}
return [currentModel, providerName];
}
function countMessages(msgs: ChatMessage[]) {
return msgs.reduce(
(pre, cur) => pre + estimateTokenLength(getMessageTextContent(cur)),
@@ -578,8 +613,14 @@ export const useChatStore = createPersistStore(
return;
}
const providerName = modelConfig.compressProviderName;
const api: ClientApi = getClientApi(providerName);
// if not config compressModel, then using getSummarizeModel
const [model, providerName] = modelConfig.compressModel
? [modelConfig.compressModel, modelConfig.compressProviderName]
: getSummarizeModel(
session.mask.modelConfig.model,
session.mask.modelConfig.providerName,
);
const api: ClientApi = getClientApi(providerName as ServiceProvider);
// remove error messages if any
const messages = session.messages;
@@ -610,7 +651,7 @@ export const useChatStore = createPersistStore(
api.llm.chat({
messages: topicMessages,
config: {
model: modelConfig.compressModel,
model,
stream: false,
providerName,
},
@@ -674,7 +715,8 @@ export const useChatStore = createPersistStore(
config: {
...modelcfg,
stream: true,
model: modelConfig.compressModel,
model,
providerName,
},
onUpdate(message) {
session.memoryPrompt = message;
@@ -727,7 +769,7 @@ export const useChatStore = createPersistStore(
},
{
name: StoreKey.Chat,
version: 3.2,
version: 3.3,
migrate(persistedState, version) {
const state = persistedState as any;
const newState = JSON.parse(
@@ -783,6 +825,14 @@ export const useChatStore = createPersistStore(
config.modelConfig.compressProviderName;
});
}
// revert default summarize model for every session
if (version < 3.3) {
newState.sessions.forEach((s) => {
const config = useAppConfig.getState();
s.mask.modelConfig.compressModel = "";
s.mask.modelConfig.compressProviderName = "";
});
}
return newState as any;
},

View File

@@ -71,8 +71,8 @@ export const DEFAULT_CONFIG = {
sendMemory: true,
historyMessageCount: 4,
compressMessageLengthThreshold: 1000,
compressModel: "gpt-4o-mini" as ModelType,
compressProviderName: "OpenAI" as ServiceProvider,
compressModel: "",
compressProviderName: "",
enableInjectSystemPrompts: true,
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
size: "1024x1024" as DalleSize,
@@ -178,7 +178,7 @@ export const useAppConfig = createPersistStore(
}),
{
name: StoreKey.Config,
version: 4,
version: 4.1,
merge(persistedState, currentState) {
const state = persistedState as ChatConfig | undefined;
@@ -231,7 +231,7 @@ export const useAppConfig = createPersistStore(
: config?.template ?? DEFAULT_INPUT_TEMPLATE;
}
if (version < 4) {
if (version < 4.1) {
state.modelConfig.compressModel =
DEFAULT_CONFIG.modelConfig.compressModel;
state.modelConfig.compressProviderName =

View File

@@ -7,7 +7,7 @@ import yaml from "js-yaml";
import { adapter, getOperationId } from "../utils";
import { useAccessStore } from "./access";
const isApp = getClientConfig()?.isApp;
const isApp = getClientConfig()?.isApp !== false;
export type Plugin = {
id: string;

View File

@@ -12,7 +12,6 @@ import { downloadAs, readFromFile } from "../utils";
import { showToast } from "../components/ui-lib";
import Locale from "../locales";
import { createSyncClient, ProviderType } from "../utils/cloud";
import { corsPath } from "../utils/cors";
export interface WebDavConfig {
server: string;
@@ -26,7 +25,7 @@ export type SyncStore = GetStoreState<typeof useSyncStore>;
const DEFAULT_SYNC_STATE = {
provider: ProviderType.WebDAV,
useProxy: true,
proxyUrl: corsPath(ApiPath.Cors),
proxyUrl: ApiPath.Cors as string,
webdav: {
endpoint: "",

View File

@@ -2,8 +2,9 @@ import { useEffect, useState } from "react";
import { showToast } from "./components/ui-lib";
import Locale from "./locales";
import { RequestMessage } from "./client/api";
import { ServiceProvider, REQUEST_TIMEOUT_MS } from "./constant";
import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
import { ServiceProvider } from "./constant";
// import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
import { fetch as tauriStreamFetch } from "./utils/stream";
export function trimTopic(topic: string) {
// Fix an issue where double quotes still show in the Indonesian language
@@ -295,30 +296,23 @@ export function fetch(
options?: Record<string, unknown>,
): Promise<any> {
if (window.__TAURI__) {
const payload = options?.body || options?.data;
return tauriFetch(url, {
...options,
body:
payload &&
({
type: "Text",
payload,
} as any),
timeout: ((options?.timeout as number) || REQUEST_TIMEOUT_MS) / 1000,
responseType:
options?.responseType == "text" ? ResponseType.Text : ResponseType.JSON,
} as any);
return tauriStreamFetch(url, options);
}
return window.fetch(url, options);
}
export function adapter(config: Record<string, unknown>) {
const { baseURL, url, params, ...rest } = config;
const { baseURL, url, params, data: body, ...rest } = config;
const path = baseURL ? `${baseURL}${url}` : url;
const fetchUrl = params
? `${path}?${new URLSearchParams(params as any).toString()}`
: path;
return fetch(fetchUrl as string, { ...rest, responseType: "text" });
return fetch(fetchUrl as string, { ...rest, body }).then((res) => {
const { status, headers, statusText } = res;
return res
.text()
.then((data: string) => ({ status, statusText, headers, data }));
});
}
export function safeLocalStorage(): {

View File

@@ -10,6 +10,7 @@ import {
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "./format";
import { fetch as tauriFetch } from "./stream";
export function compressImage(file: Blob, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
@@ -221,7 +222,7 @@ export function stream(
),
)
.then((res) => {
const content = JSON.stringify(res.data);
let content = res.data || res?.statusText;
if (res.status >= 300) {
return Promise.reject(content);
}
@@ -236,7 +237,11 @@ export function stream(
return content;
})
.catch((e) => {
options?.onAfterTool?.({ ...tool, isError: true });
options?.onAfterTool?.({
...tool,
isError: true,
errorMsg: e.toString(),
});
return e.toString();
})
.then((content) => ({
@@ -288,6 +293,7 @@ export function stream(
REQUEST_TIMEOUT_MS,
);
fetchEventSource(chatPath, {
fetch: tauriFetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);

View File

@@ -1,19 +0,0 @@
import { getClientConfig } from "../config/client";
import { DEFAULT_API_HOST } from "../constant";
export function corsPath(path: string) {
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
if (baseUrl === "" && path === "") {
return "";
}
if (!path.startsWith("/")) {
path = "/" + path;
}
if (!path.endsWith("/")) {
path += "/";
}
return `${baseUrl}${path}`;
}

107
app/utils/stream.ts Normal file
View File

@@ -0,0 +1,107 @@
// using tauri command to send request
// see src-tauri/src/stream.rs, and src-tauri/src/main.rs
// 1. invoke('stream_fetch', {url, method, headers, body}), get response with headers.
// 2. listen event: `stream-response` multi times to get body
type ResponseEvent = {
id: number;
payload: {
request_id: number;
status?: number;
chunk?: number[];
};
};
type StreamResponse = {
request_id: number;
status: number;
status_text: string;
headers: Record<string, string>;
};
export function fetch(url: string, options?: RequestInit): Promise<any> {
if (window.__TAURI__) {
const {
signal,
method = "GET",
headers: _headers = {},
body = [],
} = options || {};
let unlisten: Function | undefined;
let setRequestId: Function | undefined;
const requestIdPromise = new Promise((resolve) => (setRequestId = resolve));
const ts = new TransformStream();
const writer = ts.writable.getWriter();
let closed = false;
const close = () => {
if (closed) return;
closed = true;
unlisten && unlisten();
writer.ready.then(() => {
writer.close().catch((e) => console.error(e));
});
};
if (signal) {
signal.addEventListener("abort", () => close());
}
// @ts-ignore 2. listen response multi times, and write to Response.body
window.__TAURI__.event
.listen("stream-response", (e: ResponseEvent) =>
requestIdPromise.then((request_id) => {
const { request_id: rid, chunk, status } = e?.payload || {};
if (request_id != rid) {
return;
}
if (chunk) {
writer.ready.then(() => {
writer.write(new Uint8Array(chunk));
});
} else if (status === 0) {
// end of body
close();
}
}),
)
.then((u: Function) => (unlisten = u));
const headers: Record<string, string> = {
Accept: "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
"User-Agent": navigator.userAgent,
};
for (const item of new Headers(_headers || {})) {
headers[item[0]] = item[1];
}
return window.__TAURI__
.invoke("stream_fetch", {
method: method.toUpperCase(),
url,
headers,
// TODO FormData
body:
typeof body === "string"
? Array.from(new TextEncoder().encode(body))
: [],
})
.then((res: StreamResponse) => {
const { request_id, status, status_text: statusText, headers } = res;
setRequestId?.(request_id);
const response = new Response(ts.readable, {
status,
statusText,
headers,
});
if (status >= 300) {
setTimeout(close, 100);
}
return response;
})
.catch((e) => {
console.error("stream error", e);
throw e;
});
}
return window.fetch(url, options);
}