feat: model provider refactor done

This commit is contained in:
Dean-YZG 2024-05-15 21:38:25 +08:00
parent 240d330001
commit a0e4a468d6
33 changed files with 3077 additions and 8 deletions

9
app/client/core/index.ts Normal file
View File

@ -0,0 +1,9 @@
export * from "./types";
export * from "./providerClient";
export * from "./modelClient";
export * from "./locale";
export * from "./shim";

19
app/client/core/locale.ts Normal file
View File

@ -0,0 +1,19 @@
import { Lang, getLang } from "@/app/locales";
interface PlainConfig {
[k: string]: PlainConfig | string;
}
export type LocaleMap<
TextPlainConfig extends PlainConfig,
Default extends Lang,
> = Partial<Record<Lang, TextPlainConfig>> & {
[name in Default]: TextPlainConfig;
};
export function getLocaleText<
TextPlainConfig extends PlainConfig,
DefaultLang extends Lang,
>(textMap: LocaleMap<TextPlainConfig, DefaultLang>, defaultLang: DefaultLang) {
return textMap[getLang()] || textMap[defaultLang];
}

View File

@ -0,0 +1,46 @@
import { ChatRequestPayload, Model, ModelConfig, ChatHandlers } from "./types";
import { ProviderClient, ProviderTemplateName } from "./providerClient";
export class ModelClient {
static getAllProvidersDefaultModels = () => {
return ProviderClient.getAllProvidersDefaultModels();
};
constructor(
private model: Model,
private modelConfig: ModelConfig,
private providerClient: ProviderClient,
) {}
chat(payload: ChatRequestPayload, handlers: ChatHandlers) {
try {
return this.providerClient.streamChat(
{
...payload,
modelConfig: this.modelConfig,
model: this.model.name,
},
handlers,
);
} catch (e) {
handlers.onError(e as Error);
}
}
summerize(payload: ChatRequestPayload) {
try {
return this.providerClient.chat({
...payload,
modelConfig: this.modelConfig,
model: this.model.name,
});
} catch (e) {
return "";
}
}
}
export function ModelClientFactory(model: Model, modelConfig: ModelConfig) {
const providerClient = new ProviderClient(model.providerTemplateName);
return new ModelClient(model, modelConfig, providerClient);
}

View File

@ -0,0 +1,137 @@
import {
ChatHandlers,
IProviderTemplate,
Model,
StandChatReponseMessage,
StandChatRequestPayload,
} from "./types";
import * as ProviderTemplates from "@/app/client/providers";
import { cloneDeep } from "lodash-es";
export type ProviderTemplate =
(typeof ProviderTemplates)[keyof typeof ProviderTemplates];
export type ProviderTemplateName =
(typeof ProviderTemplates)[keyof typeof ProviderTemplates]["prototype"]["name"];
export class ProviderClient {
provider: IProviderTemplate<any, any, any>;
static ProviderTemplates = ProviderTemplates;
static getAllProvidersDefaultModels = () => {
return Object.values(ProviderClient.ProviderTemplates).reduce(
(r, p) => ({
...r,
[p.prototype.name]: cloneDeep(p.prototype.models),
}),
{} as Record<ProviderTemplateName, Model[]>,
);
};
static getAllProviderTemplates = () => {
return Object.values(ProviderClient.ProviderTemplates).reduce(
(r, p) => ({
...r,
[p.prototype.name]: p,
}),
{} as Record<ProviderTemplateName, ProviderTemplate>,
);
};
static getProviderTemplateList = () => {
return Object.values(ProviderClient.ProviderTemplates);
};
constructor(providerTemplateName: string) {
this.provider = this.getProviderTemplate(providerTemplateName);
}
get settingItems() {
const { providerMeta } = this.provider;
const { settingItems } = providerMeta;
return settingItems;
}
private getProviderTemplate(providerTemplateName: string) {
const providerTemplate =
Object.values(ProviderTemplates).find(
(template) => template.prototype.name === providerTemplateName,
) || ProviderTemplates.NextChatProvider;
return new providerTemplate();
}
getModelConfig(modelName: string) {
const { models } = this.provider;
return (
models.find((config) => config.name === modelName) ||
models.find((config) => config.isDefaultSelected)
);
}
async chat(
payload: StandChatRequestPayload<string>,
): Promise<StandChatReponseMessage> {
return this.provider.chat({
...payload,
stream: false,
isVisionModel: this.getModelConfig(payload.model)?.isVisionModel,
});
}
streamChat(payload: StandChatRequestPayload<string>, handlers: ChatHandlers) {
return this.provider.streamChat(
{
...payload,
stream: true,
isVisionModel: this.getModelConfig(payload.model)?.isVisionModel,
},
handlers.onProgress,
handlers.onFinish,
handlers.onError,
);
}
}
export interface Provider {
name: string; // id of provider
displayName: string;
isActive: boolean;
providerTemplateName: ProviderTemplateName;
models: Model[];
}
function createProvider(
provider: ProviderTemplateName,
params?: Omit<Provider, "providerTemplateName">,
): Provider;
function createProvider(
provider: ProviderTemplate,
params?: Omit<Provider, "providerTemplateName">,
): Provider;
function createProvider(
provider: ProviderTemplate | ProviderTemplateName,
params?: Omit<Provider, "providerTemplateName">,
): Provider {
let providerTemplate: ProviderTemplate;
if (typeof provider === "string") {
providerTemplate = ProviderClient.getAllProviderTemplates()[provider];
} else {
providerTemplate = provider;
}
const {
name = providerTemplate.prototype.name,
displayName = providerTemplate.prototype.providerMeta.displayName,
models = providerTemplate.prototype.models,
} = params ?? {};
return {
name,
displayName,
isActive: true,
models,
providerTemplateName: providerTemplate.prototype.name,
};
}
export { createProvider };

25
app/client/core/shim.ts Normal file
View File

@ -0,0 +1,25 @@
import { getClientConfig } from "@/app/config/client";
if (!(window.fetch as any).__hijacked__) {
let _fetch = window.fetch;
function fetch(...args: Parameters<typeof _fetch>) {
const { isApp } = getClientConfig() || {};
let fetch: typeof _fetch = _fetch;
if (isApp) {
try {
fetch = window.__TAURI__!.http.fetch;
} catch (e) {
fetch = _fetch;
}
}
return fetch(...args);
}
fetch.__hijacked__ = true;
window.fetch = fetch;
}

164
app/client/core/types.ts Normal file
View File

@ -0,0 +1,164 @@
import { RequestMessage } from "../api";
// ===================================== LLM Types start ======================================
export interface ModelConfig {
temperature: number;
top_p: number;
presence_penalty: number;
frequency_penalty: number;
max_tokens: number;
}
export type Model = {
name: string; // id of model in a provider
displayName: string;
isVisionModel?: boolean;
isDefaultActive: boolean; // model is initialized to be active
isDefaultSelected?: boolean; // model is initialized to be as default used model
providerTemplateName: string;
};
// ===================================== LLM Types end ======================================
// ===================================== Chat Request Types start ======================================
export interface ChatRequestPayload<SettingKeys extends string = ""> {
messages: RequestMessage[];
providerConfig: Record<SettingKeys, string>;
context: {
isApp: boolean;
};
}
export interface StandChatRequestPayload<SettingKeys extends string = "">
extends ChatRequestPayload<SettingKeys> {
modelConfig: ModelConfig;
model: string;
}
export interface InternalChatRequestPayload<SettingKeys extends string = "">
extends StandChatRequestPayload<SettingKeys> {
isVisionModel: Model["isVisionModel"];
stream: boolean;
}
export interface ProviderRequestPayload {
headers: Record<string, string>;
body: string;
url: string;
method: string;
}
export interface ChatHandlers {
onProgress: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError: (err: Error) => void;
}
// ===================================== Chat Request Types end ======================================
// ===================================== Chat Response Types start ======================================
export interface StandChatReponseMessage {
message: string;
}
// ===================================== Chat Request Types end ======================================
// ===================================== Provider Settings Types start ======================================
type NumberRange = [number, number];
export type Validator =
| "required"
| "number"
| "string"
| NumberRange
| NumberRange[];
export type CommonSettingItem<SettingKeys extends string> = {
name: SettingKeys;
title?: string;
description?: string;
validators?: Validator[];
};
export type InputSettingItem = {
type: "input";
placeholder?: string;
} & (
| {
inputType?: "password" | "normal";
defaultValue?: string;
}
| {
inputType?: "number";
defaultValue?: number;
}
);
export type SelectSettingItem = {
type: "select";
options: {
name: string;
value: "number" | "string" | "boolean";
}[];
placeholder?: string;
};
export type RangeSettingItem = {
type: "range";
range: NumberRange;
};
export type SwitchSettingItem = {
type: "switch";
};
export type SettingItem<SettingKeys extends string = ""> =
CommonSettingItem<SettingKeys> &
(
| InputSettingItem
| SelectSettingItem
| RangeSettingItem
| SwitchSettingItem
);
// ===================================== Provider Settings Types end ======================================
// ===================================== Provider Template Types start ======================================
export interface IProviderTemplate<
SettingKeys extends string,
NAME extends string,
Meta extends Record<string, any>,
> {
readonly name: NAME;
readonly metas: Meta;
readonly providerMeta: {
displayName: string;
settingItems: SettingItem<SettingKeys>[];
};
readonly models: Model[];
// formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>): ProviderRequestPayload;
// readWholeMessageResponseBody(res: WholeMessageResponseBody): StandChatReponseMessage;
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
onProgress?: (message: string, chunk: string) => void,
onFinish?: (message: string) => void,
onError?: (err: Error) => void,
): AbortController;
chat(
payload: InternalChatRequestPayload<SettingKeys>,
): Promise<StandChatReponseMessage>;
}
export interface Serializable<Snapshot> {
serialize(): Snapshot;
}

3
app/client/index.ts Normal file
View File

@ -0,0 +1,3 @@
export * from "./core";
export * from "./providers";

View File

@ -0,0 +1,87 @@
import { SettingItem } from "../../core/types";
import Locale from "./locale";
export type SettingKeys =
| "anthropicUrl"
| "anthropicApiKey"
| "anthropicApiVersion";
export const AnthropicMetas = {
ChatPath: "v1/messages",
ChatPath1: "v1/complete",
ExampleEndpoint: "https://api.anthropic.com",
Vision: "2023-06-01",
};
export const modelConfigs = [
{
name: "claude-instant-1.2",
displayName: "claude-instant-1.2",
isVision: false,
isDefaultActive: true,
isDefaultSelected: true,
},
{
name: "claude-2.0",
displayName: "claude-2.0",
isVision: false,
isDefaultActive: true,
isDefaultSelected: false,
},
{
name: "claude-2.1",
displayName: "claude-2.1",
isVision: false,
isDefaultActive: true,
isDefaultSelected: false,
},
{
name: "claude-3-sonnet-20240229",
displayName: "claude-3-sonnet-20240229",
isVision: true,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "claude-3-opus-20240229",
displayName: "claude-3-opus-20240229",
isVision: true,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "claude-3-haiku-20240307",
displayName: "claude-3-haiku-20240307",
isVision: true,
isDefaultActive: true,
isDefaultSelected: false,
},
];
export const settingItems: SettingItem<SettingKeys>[] = [
{
name: "anthropicUrl",
title: Locale.Endpoint.Title,
description: Locale.Endpoint.SubTitle + AnthropicMetas.ExampleEndpoint,
placeholder: AnthropicMetas.ExampleEndpoint,
type: "input",
validators: ["required"],
},
{
name: "anthropicApiKey",
title: Locale.ApiKey.Title,
description: Locale.ApiKey.SubTitle,
placeholder: Locale.ApiKey.Placeholder,
type: "input",
inputType: "password",
validators: ["required"],
},
{
name: "anthropicApiVersion",
title: Locale.ApiVerion.Title,
description: Locale.ApiVerion.SubTitle,
placeholder: AnthropicMetas.Vision,
type: "input",
validators: ["required"],
},
];

View File

@ -0,0 +1,402 @@
import { getMessageTextContent } from "@/app/utils";
import {
AnthropicMetas,
SettingKeys,
modelConfigs,
settingItems,
} from "./config";
import {
InternalChatRequestPayload,
IProviderTemplate,
} from "../../core/types";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import Locale from "@/app/locales";
import { prettyObject } from "@/app/utils/format";
export type AnthropicProviderSettingKeys = SettingKeys;
const ClaudeMapper = {
assistant: "assistant",
user: "user",
system: "user",
} as const;
export type MultiBlockContent = {
type: "image" | "text";
source?: {
type: string;
media_type: string;
data: string;
};
text?: string;
};
export type AnthropicMessage = {
role: (typeof ClaudeMapper)[keyof typeof ClaudeMapper];
content: string | MultiBlockContent[];
};
export interface AnthropicChatRequest {
model: string; // The model that will complete your prompt.
messages: AnthropicMessage[]; // The prompt that you want Claude to complete.
max_tokens: number; // The maximum number of tokens to generate before stopping.
stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
temperature?: number; // Amount of randomness injected into the response.
top_p?: number; // Use nucleus sampling.
top_k?: number; // Only sample from the top K options for each subsequent token.
metadata?: object; // An object describing metadata about the request.
stream?: boolean; // Whether to incrementally stream the response using server-sent events.
}
export interface ChatRequest {
model: string; // The model that will complete your prompt.
prompt: string; // The prompt that you want Claude to complete.
max_tokens_to_sample: number; // The maximum number of tokens to generate before stopping.
stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
temperature?: number; // Amount of randomness injected into the response.
top_p?: number; // Use nucleus sampling.
top_k?: number; // Only sample from the top K options for each subsequent token.
metadata?: object; // An object describing metadata about the request.
stream?: boolean; // Whether to incrementally stream the response using server-sent events.
}
export default class AnthropicProvider
implements IProviderTemplate<SettingKeys, "anthropic", typeof AnthropicMetas>
{
name = "anthropic" as const;
metas = AnthropicMetas;
providerMeta = {
displayName: "Anthropic",
settingItems,
};
models = modelConfigs.map((c) => ({ ...c, providerTemplateName: this.name }));
readonly REQUEST_TIMEOUT_MS = 60000;
private path(payload: InternalChatRequestPayload<SettingKeys>) {
const {
providerConfig: { anthropicUrl },
context: { isApp },
} = payload;
let baseUrl: string = anthropicUrl;
// if endpoint is empty, use default endpoint
if (baseUrl.trim().length === 0) {
baseUrl = "/api/anthropic";
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
baseUrl = "https://" + baseUrl;
}
baseUrl = trimEnd(baseUrl, "/");
return `${baseUrl}/${AnthropicMetas.ChatPath}`;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const {
messages,
isVisionModel,
model,
stream,
modelConfig,
providerConfig,
} = payload;
const { anthropicApiKey, anthropicApiVersion, anthropicUrl } =
providerConfig;
const { temperature, top_p, max_tokens } = modelConfig;
const keys = ["system", "user"];
// roles must alternate between "user" and "assistant" in claude, so add a fake assistant message between two user messages
for (let i = 0; i < messages.length - 1; i++) {
const message = messages[i];
const nextMessage = messages[i + 1];
if (keys.includes(message.role) && keys.includes(nextMessage.role)) {
messages[i] = [
message,
{
role: "assistant",
content: ";",
},
] as any;
}
}
const prompt = messages
.flat()
.filter((v) => {
if (!v.content) return false;
if (typeof v.content === "string" && !v.content.trim()) return false;
return true;
})
.map((v) => {
const { role, content } = v;
const insideRole = ClaudeMapper[role] ?? "user";
if (!isVisionModel || typeof content === "string") {
return {
role: insideRole,
content: getMessageTextContent(v),
};
}
return {
role: insideRole,
content: content
.filter((v) => v.image_url || v.text)
.map(({ type, text, image_url }) => {
if (type === "text") {
return {
type,
text: text!,
};
}
const { url = "" } = image_url || {};
const colonIndex = url.indexOf(":");
const semicolonIndex = url.indexOf(";");
const comma = url.indexOf(",");
const mimeType = url.slice(colonIndex + 1, semicolonIndex);
const encodeType = url.slice(semicolonIndex + 1, comma);
const data = url.slice(comma + 1);
return {
type: "image" as const,
source: {
type: encodeType,
media_type: mimeType,
data,
},
};
}),
};
});
const requestBody: AnthropicChatRequest = {
messages: prompt,
stream,
model,
max_tokens,
temperature,
top_p,
top_k: 5,
};
return {
headers: {
"Content-Type": "application/json",
Accept: "application/json",
"x-api-key": anthropicApiKey ?? "",
"anthropic-version": anthropicApiVersion,
Authorization: getAuthKey(anthropicApiKey),
},
body: JSON.stringify(requestBody),
method: "POST",
url: this.path(payload),
};
}
private readWholeMessageResponseBody(res: any) {
return {
message: res?.content?.[0]?.text ?? "",
};
}
private getTimer = (onabort: () => void = () => {}) => {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
);
controller.signal.onabort = onabort;
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
async chat(payload: InternalChatRequestPayload<SettingKeys>) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
// make a fetch request
const requestTimeoutId = setTimeout(
() => timer.abort(),
this.REQUEST_TIMEOUT_MS,
);
const res = await fetch(requestPayload.url, {
headers: {
...requestPayload.headers,
},
body: requestPayload.body,
method: requestPayload.method,
signal: timer.signal,
});
timer.clear();
const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson);
return message;
}
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
onProgress: (message: string, chunk: string) => void,
onFinish: (message: string) => void,
onError: (err: Error) => void,
) {
const requestPayload = this.formatChatPayload(payload);
let responseText = "";
let remainText = "";
let finished = false;
const timer = this.getTimer();
// animate response to make it looks smooth
const animateResponseText = () => {
if (finished || timer.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
onError(new Error("empty response from server"));
}
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
onProgress(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
};
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
onFinish(responseText + remainText);
}
};
fetchEventSource(requestPayload.url, {
...requestPayload,
async onopen(res) {
timer.clear();
const contentType = res.headers.get("content-type");
console.log("[OpenAI] request response content type: ", contentType);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
const choices = json.choices as Array<{
delta: { content: string };
}>;
const delta = choices[0]?.delta?.content;
const textmoderation = json?.prompt_filter_results;
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
onError(e);
throw e;
},
openWhenHidden: true,
});
return timer;
}
}
function trimEnd(s: string, end = " ") {
if (end.length === 0) return s;
while (s.endsWith(end)) {
s = s.slice(0, -end.length);
}
return s;
}
function bearer(value: string) {
return `Bearer ${value.trim()}`;
}
function getAuthKey(apiKey = "") {
let authKey = "";
if (apiKey) {
// use user's api key first
authKey = bearer(apiKey);
}
return authKey;
}

View File

@ -0,0 +1,110 @@
import { getLocaleText } from "../../core/locale";
export default getLocaleText<
{
ApiKey: {
Title: string;
SubTitle: string;
Placeholder: string;
};
Endpoint: {
Title: string;
SubTitle: string;
};
ApiVerion: {
Title: string;
SubTitle: string;
};
},
"en"
>(
{
cn: {
ApiKey: {
Title: "接口密钥",
SubTitle: "使用自定义 Anthropic Key 绕过密码访问限制",
Placeholder: "Anthropic API Key",
},
Endpoint: {
Title: "接口地址",
SubTitle: "样例:",
},
ApiVerion: {
Title: "接口版本 (claude api version)",
SubTitle: "选择一个特定的 API 版本输入",
},
},
en: {
ApiKey: {
Title: "Anthropic API Key",
SubTitle:
"Use a custom Anthropic Key to bypass password access restrictions",
Placeholder: "Anthropic API Key",
},
Endpoint: {
Title: "Endpoint Address",
SubTitle: "Example:",
},
ApiVerion: {
Title: "API Version (claude api version)",
SubTitle: "Select and input a specific API version",
},
},
pt: {
ApiKey: {
Title: "Chave API Anthropic",
SubTitle: "Verifique sua chave API do console Anthropic",
Placeholder: "Chave API Anthropic",
},
Endpoint: {
Title: "Endpoint Address",
SubTitle: "Exemplo: ",
},
ApiVerion: {
Title: "Versão API (Versão api claude)",
SubTitle: "Verifique sua versão API do console Anthropic",
},
},
sk: {
ApiKey: {
Title: "API kľúč Anthropic",
SubTitle: "Skontrolujte svoj API kľúč v Anthropic konzole",
Placeholder: "API kľúč Anthropic",
},
Endpoint: {
Title: "Adresa koncového bodu",
SubTitle: "Príklad:",
},
ApiVerion: {
Title: "Verzia API (claude verzia API)",
SubTitle: "Vyberte špecifickú verziu časti",
},
},
tw: {
ApiKey: {
Title: "API 金鑰",
SubTitle: "從 Anthropic AI 取得您的 API 金鑰",
Placeholder: "Anthropic API Key",
},
Endpoint: {
Title: "終端地址",
SubTitle: "範例:",
},
ApiVerion: {
Title: "API 版本 (claude api version)",
SubTitle: "選擇一個特定的 API 版本輸入",
},
},
},
"en",
);

View File

@ -0,0 +1,41 @@
import Locale from "./locale";
import { SettingItem } from "../../core/types";
import { modelConfigs as openaiModelConfigs } from "../openai/config";
export const AzureMetas = {
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
ChatPath: "v1/chat/completions",
OpenAI: "/api/openai",
};
export type SettingKeys = "azureUrl" | "azureApiKey" | "azureApiVersion";
export const modelConfigs = openaiModelConfigs;
export const settingItems: SettingItem<SettingKeys>[] = [
{
name: "azureUrl",
title: Locale.Endpoint.Title,
description: Locale.Endpoint.SubTitle + AzureMetas.ExampleEndpoint,
placeholder: AzureMetas.ExampleEndpoint,
type: "input",
},
{
name: "azureApiKey",
title: Locale.ApiKey.Title,
description: Locale.ApiKey.SubTitle,
placeholder: Locale.ApiKey.Placeholder,
type: "input",
inputType: "password",
validators: ["required"],
},
{
name: "azureApiVersion",
title: Locale.ApiVerion.Title,
description: Locale.ApiVerion.SubTitle,
placeholder: "2023-08-01-preview",
type: "input",
validators: ["required"],
},
];

View File

@ -0,0 +1,326 @@
import { settingItems, SettingKeys, modelConfigs, AzureMetas } from "./config";
import {
InternalChatRequestPayload,
IProviderTemplate,
} from "../../core/types";
import { getMessageTextContent } from "@/app/utils";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import Locale from "@/app/locales";
export type AzureProviderSettingKeys = SettingKeys;
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export interface MultimodalContent {
type: "text" | "image_url";
text?: string;
image_url?: {
url: string;
};
}
export interface RequestMessage {
role: MessageRole;
content: string | MultimodalContent[];
}
interface RequestPayload {
messages: {
role: "system" | "user" | "assistant";
content: string | MultimodalContent[];
}[];
stream?: boolean;
model: string;
temperature: number;
presence_penalty: number;
frequency_penalty: number;
top_p: number;
max_tokens?: number;
}
export default class Azure
implements IProviderTemplate<SettingKeys, "azure", typeof AzureMetas>
{
name = "azure" as const;
metas = AzureMetas;
models = modelConfigs.map((c) => ({ ...c, providerTemplateName: this.name }));
providerMeta = {
displayName: "Azure",
settingItems,
};
readonly REQUEST_TIMEOUT_MS = 60000;
private path(payload: InternalChatRequestPayload<SettingKeys>): string {
const {
providerConfig: { azureUrl, azureApiVersion },
} = payload;
const path = makeAzurePath(AzureMetas.ChatPath, azureApiVersion);
let baseUrl = azureUrl;
if (!baseUrl) {
baseUrl = "/api/openai";
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(AzureMetas.OpenAI)) {
baseUrl = "https://" + baseUrl;
}
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { azureApiKey } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
const authHeader = "Authorization";
const makeBearer = (s: string) => `Bearer ${s.trim()}`;
const validString = (x?: string): x is string => Boolean(x && x.length > 0);
// when using google api in app, not set auth header
if (validString(azureApiKey)) {
headers[authHeader] = makeBearer(azureApiKey);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload;
const {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
} = modelConfig;
const openAiMessages = messages.map((v) => ({
role: v.role,
content: isVisionModel ? v.content : getMessageTextContent(v),
}));
const requestPayload: RequestPayload = {
messages: openAiMessages,
stream,
model,
temperature,
presence_penalty,
frequency_penalty,
top_p,
};
// add max_tokens to vision model
if (isVisionModel) {
requestPayload["max_tokens"] = Math.max(max_tokens, 4000);
}
console.log("[Request] openai payload: ", requestPayload);
return {
headers: this.getHeaders(payload),
body: JSON.stringify(requestPayload),
method: "POST",
url: this.path(payload),
};
}
private readWholeMessageResponseBody(res: any) {
return {
message: res.choices?.at(0)?.message?.content ?? "",
};
}
private getTimer = (onabort: () => void = () => {}) => {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
);
controller.signal.onabort = onabort;
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
async chat(payload: InternalChatRequestPayload<SettingKeys>) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
// make a fetch request
const requestTimeoutId = setTimeout(
() => timer.abort(),
this.REQUEST_TIMEOUT_MS,
);
const res = await fetch(requestPayload.url, {
headers: {
...requestPayload.headers,
},
body: requestPayload.body,
method: requestPayload.method,
signal: timer.signal,
});
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson);
return message;
}
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
onProgress: (message: string, chunk: string) => void,
onFinish: (message: string) => void,
onError: (err: Error) => void,
) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
let responseText = "";
let remainText = "";
let finished = false;
// animate response to make it looks smooth
const animateResponseText = () => {
if (finished || timer.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
onError(new Error("empty response from server"));
}
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
onProgress(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
};
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
onFinish(responseText + remainText);
}
};
timer.signal.onabort = finish;
fetchEventSource(requestPayload.url, {
...requestPayload,
async onopen(res) {
timer.clear();
const contentType = res.headers.get("content-type");
console.log("[OpenAI] request response content type: ", contentType);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
const choices = json.choices as Array<{
delta: { content: string };
}>;
const delta = choices[0]?.delta?.content;
const textmoderation = json?.prompt_filter_results;
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
onError(e);
throw e;
},
openWhenHidden: true,
});
return timer;
}
}
function makeAzurePath(path: string, apiVersion: string) {
// should omit /v1 prefix
path = path.replaceAll("v1/", "");
// should add api-key to query string
path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`;
return path;
}

View File

@ -0,0 +1,109 @@
import { getLocaleText } from "../../core/locale";
export default getLocaleText<
{
ApiKey: {
Title: string;
SubTitle: string;
Placeholder: string;
};
Endpoint: {
Title: string;
SubTitle: string;
};
ApiVerion: {
Title: string;
SubTitle: string;
};
},
"en"
>(
{
cn: {
ApiKey: {
Title: "接口密钥",
SubTitle: "使用自定义 Azure Key 绕过密码访问限制",
Placeholder: "Azure API Key",
},
Endpoint: {
Title: "接口地址",
SubTitle: "样例:",
},
ApiVerion: {
Title: "接口版本 (azure api version)",
SubTitle: "选择指定的部分版本",
},
},
en: {
ApiKey: {
Title: "Azure Api Key",
SubTitle: "Check your api key from Azure console",
Placeholder: "Azure Api Key",
},
Endpoint: {
Title: "Azure Endpoint",
SubTitle: "Example: ",
},
ApiVerion: {
Title: "Azure Api Version",
SubTitle: "Check your api version from azure console",
},
},
pt: {
ApiKey: {
Title: "Chave API Azure",
SubTitle: "Verifique sua chave API do console Azure",
Placeholder: "Chave API Azure",
},
Endpoint: {
Title: "Endpoint Azure",
SubTitle: "Exemplo: ",
},
ApiVerion: {
Title: "Versão API Azure",
SubTitle: "Verifique sua versão API do console Azure",
},
},
sk: {
ApiKey: {
Title: "API kľúč Azure",
SubTitle: "Skontrolujte svoj API kľúč v Azure konzole",
Placeholder: "API kľúč Azure",
},
Endpoint: {
Title: "Koncový bod Azure",
SubTitle: "Príklad: ",
},
ApiVerion: {
Title: "Verzia API Azure",
SubTitle: "Skontrolujte svoju verziu API v Azure konzole",
},
},
tw: {
ApiKey: {
Title: "介面金鑰",
SubTitle: "使用自定義 Azure Key 繞過密碼存取限制",
Placeholder: "Azure API Key",
},
Endpoint: {
Title: "介面(Endpoint) 地址",
SubTitle: "樣例:",
},
ApiVerion: {
Title: "介面版本 (azure api version)",
SubTitle: "選擇指定的部分版本",
},
},
},
"en",
);

View File

@ -0,0 +1,63 @@
import { SettingItem } from "../../core/types";
import Locale from "./locale";
export const GoogleMetas = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/",
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
VisionChatPath: (modelName: string) =>
`v1beta/models/${modelName}:generateContent`,
};
export type SettingKeys = "googleUrl" | "googleApiKey" | "googleApiVersion";
export const modelConfigs = [
{
name: "gemini-1.0-pro",
displayName: "gemini-1.0-pro",
isVision: false,
isDefaultActive: true,
isDefaultSelected: true,
},
{
name: "gemini-1.5-pro-latest",
displayName: "gemini-1.5-pro-latest",
isVision: true,
isDefaultActive: true,
isDefaultSelected: false,
},
{
name: "gemini-pro-vision",
displayName: "gemini-pro-vision",
isVision: true,
isDefaultActive: true,
isDefaultSelected: false,
},
];
export const settingItems: SettingItem<SettingKeys>[] = [
{
name: "googleUrl",
title: Locale.Endpoint.Title,
description: Locale.Endpoint.SubTitle + GoogleMetas.ExampleEndpoint,
placeholder: GoogleMetas.ExampleEndpoint,
type: "input",
validators: ["required"],
},
{
name: "googleApiKey",
title: Locale.ApiKey.Title,
description: Locale.ApiKey.SubTitle,
placeholder: Locale.ApiKey.Placeholder,
type: "input",
inputType: "password",
validators: ["required"],
},
{
name: "googleApiVersion",
title: Locale.ApiVersion.Title,
description: Locale.ApiVersion.SubTitle,
placeholder: "2023-08-01-preview",
type: "input",
validators: ["required"],
},
];

View File

@ -0,0 +1,338 @@
import { getMessageImages, getMessageTextContent } from "@/app/utils";
import { SettingKeys, modelConfigs, settingItems, GoogleMetas } from "./config";
import {
InternalChatRequestPayload,
IProviderTemplate,
StandChatReponseMessage,
} from "../../core/types";
export type GoogleProviderSettingKeys = SettingKeys;
export default class GoogleProvider
implements IProviderTemplate<SettingKeys, "google", typeof GoogleMetas>
{
name = "google" as const;
metas = GoogleMetas;
providerMeta = {
displayName: "Google",
settingItems,
};
models = modelConfigs.map((c) => ({ ...c, providerTemplateName: this.name }));
readonly REQUEST_TIMEOUT_MS = 60000;
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const {
providerConfig: { googleApiKey },
context: { isApp },
} = payload;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
const authHeader = "Authorization";
const makeBearer = (s: string) => `Bearer ${s.trim()}`;
const validString = (x?: string): x is string => Boolean(x && x.length > 0);
// when using google api in app, not set auth header
if (!isApp) {
// use user's api key first
if (validString(googleApiKey)) {
headers[authHeader] = makeBearer(googleApiKey);
} else {
throw new Error("no apiKey when chat through google");
}
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const {
messages,
isVisionModel,
model,
stream,
modelConfig,
providerConfig,
context: { isApp },
} = payload;
const { googleUrl, googleApiKey } = providerConfig;
const { temperature, top_p, max_tokens } = modelConfig;
let multimodal = false;
const internalMessages = messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }];
if (isVisionModel) {
const images = getMessageImages(v);
if (images.length > 0) {
multimodal = true;
parts = parts.concat(
images.map((image) => {
const imageType = image.split(";")[0].split(":")[1];
const imageData = image.split(",")[1];
return {
inline_data: {
mime_type: imageType,
data: imageData,
},
};
}),
);
}
}
return {
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: parts,
};
});
// google requires that role in neighboring messages must not be the same
for (let i = 0; i < internalMessages.length - 1; ) {
// Check if current and next item both have the role "model"
if (internalMessages[i].role === internalMessages[i + 1].role) {
// Concatenate the 'parts' of the current and next item
internalMessages[i].parts = internalMessages[i].parts.concat(
internalMessages[i + 1].parts,
);
// Remove the next item
internalMessages.splice(i + 1, 1);
} else {
// Move to the next item
i++;
}
}
const requestPayload = {
contents: internalMessages,
generationConfig: {
temperature,
maxOutputTokens: max_tokens,
topP: top_p,
},
safetySettings: [
{
category: "HARM_CATEGORY_HARASSMENT",
threshold: "BLOCK_ONLY_HIGH",
},
{
category: "HARM_CATEGORY_HATE_SPEECH",
threshold: "BLOCK_ONLY_HIGH",
},
{
category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
threshold: "BLOCK_ONLY_HIGH",
},
{
category: "HARM_CATEGORY_DANGEROUS_CONTENT",
threshold: "BLOCK_ONLY_HIGH",
},
],
};
let baseUrl = googleUrl;
let googleChatPath = isVisionModel
? GoogleMetas.VisionChatPath(model)
: GoogleMetas.ChatPath(model);
if (!baseUrl) {
baseUrl = "/api/google/" + googleChatPath;
}
if (isApp) {
baseUrl += `?key=${googleApiKey}`;
}
return {
headers: this.getHeaders(payload),
body: JSON.stringify(requestPayload),
method: "POST",
url: stream
? baseUrl.replace("generateContent", "streamGenerateContent")
: baseUrl,
};
}
private readWholeMessageResponseBody(res: any) {
if (res?.promptFeedback?.blockReason) {
// being blocked
throw new Error(
"Message is being blocked for reason: " +
res.promptFeedback.blockReason,
);
}
return {
message:
res.candidates?.at(0)?.content?.parts?.at(0)?.text ||
res.error?.message ||
"",
};
}
private getTimer = () => {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
onProgress: (message: string, chunk: string) => void,
onFinish: (message: string) => void,
onError: (err: Error) => void,
) {
const requestPayload = this.formatChatPayload(payload);
let responseText = "";
let remainText = "";
let finished = false;
const timer = this.getTimer();
let existingTexts: string[] = [];
const finish = () => {
finished = true;
onFinish(existingTexts.join(""));
};
// animate response to make it looks smooth
const animateResponseText = () => {
if (finished || timer.signal.aborted) {
responseText += remainText;
finish();
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
onProgress(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
};
// start animaion
animateResponseText();
fetch(requestPayload.url, {
...requestPayload,
signal: timer.signal,
})
.then((response) => {
const reader = response?.body?.getReader();
const decoder = new TextDecoder();
let partialData = "";
return reader?.read().then(function processText({
done,
value,
}): Promise<any> {
if (done) {
if (response.status !== 200) {
try {
let data = JSON.parse(ensureProperEnding(partialData));
if (data && data[0].error) {
onError(new Error(data[0].error.message));
} else {
onError(new Error("Request failed"));
}
} catch (_) {
onError(new Error("Request failed"));
}
}
console.log("Stream complete");
// options.onFinish(responseText + remainText);
finished = true;
return Promise.resolve();
}
partialData += decoder.decode(value, { stream: true });
try {
let data = JSON.parse(ensureProperEnding(partialData));
const textArray = data.reduce(
(acc: string[], item: { candidates: any[] }) => {
const texts = item.candidates.map((candidate) =>
candidate.content.parts
.map((part: { text: any }) => part.text)
.join(""),
);
return acc.concat(texts);
},
[],
);
if (textArray.length > existingTexts.length) {
const deltaArray = textArray.slice(existingTexts.length);
existingTexts = textArray;
remainText += deltaArray.join("");
}
} catch (error) {
// console.log("[Response Animation] error: ", error,partialData);
// skip error message when parsing json
}
return reader.read().then(processText);
});
})
.catch((error) => {
console.error("Error:", error);
});
return timer;
}
async chat(
payload: InternalChatRequestPayload<SettingKeys>,
): Promise<StandChatReponseMessage> {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
// make a fetch request
const requestTimeoutId = setTimeout(
() => timer.abort(),
this.REQUEST_TIMEOUT_MS,
);
const res = await fetch(requestPayload.url, {
headers: {
...requestPayload.headers,
},
body: requestPayload.body,
method: requestPayload.method,
signal: timer.signal,
});
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson);
return message;
}
}
function ensureProperEnding(str: string) {
if (str.startsWith("[") && !str.endsWith("]")) {
return str + "]";
}
return str;
}

View File

@ -0,0 +1,93 @@
import { getLocaleText } from "../../core/locale";
export default getLocaleText<
{
ApiKey: {
Title: string;
SubTitle: string;
Placeholder: string;
};
Endpoint: {
Title: string;
SubTitle: string;
};
ApiVersion: {
Title: string;
SubTitle: string;
};
},
"en"
>(
{
cn: {
ApiKey: {
Title: "API 密钥",
SubTitle: "从 Google AI 获取您的 API 密钥",
Placeholder: "输入您的 Google AI Studio API 密钥",
},
Endpoint: {
Title: "终端地址",
SubTitle: "示例:",
},
ApiVersion: {
Title: "API 版本(仅适用于 gemini-pro",
SubTitle: "选择一个特定的 API 版本",
},
},
en: {
ApiKey: {
Title: "API Key",
SubTitle: "Obtain your API Key from Google AI",
Placeholder: "Enter your Google AI Studio API Key",
},
Endpoint: {
Title: "Endpoint Address",
SubTitle: "Example:",
},
ApiVersion: {
Title: "API Version (specific to gemini-pro)",
SubTitle: "Select a specific API version",
},
},
sk: {
ApiKey: {
Title: "API kľúč",
SubTitle:
"Obísť obmedzenia prístupu heslom pomocou vlastného API kľúča Google AI Studio",
Placeholder: "API kľúč Google AI Studio",
},
Endpoint: {
Title: "Adresa koncového bodu",
SubTitle: "Príklad:",
},
ApiVersion: {
Title: "Verzia API (gemini-pro verzia API)",
SubTitle: "Vyberte špecifickú verziu časti",
},
},
tw: {
ApiKey: {
Title: "API 金鑰",
SubTitle: "從 Google AI 取得您的 API 金鑰",
Placeholder: "輸入您的 Google AI Studio API 金鑰",
},
Endpoint: {
Title: "終端地址",
SubTitle: "範例:",
},
ApiVersion: {
Title: "API 版本(僅適用於 gemini-pro",
SubTitle: "選擇一個特定的 API 版本",
},
},
},
"en",
);

View File

@ -0,0 +1,20 @@
export {
default as NextChatProvider,
type NextChatProviderSettingKeys,
} from "@/app/client/providers/nextchat";
export {
default as GoogleProvider,
type GoogleProviderSettingKeys,
} from "@/app/client/providers/google";
export {
default as OpenAIProvider,
type OpenAIProviderSettingKeys,
} from "@/app/client/providers/openai";
export {
default as AnthropicProvider,
type AnthropicProviderSettingKeys,
} from "@/app/client/providers/anthropic";
export {
default as AzureProvider,
type AzureProviderSettingKeys,
} from "@/app/client/providers/azure";

View File

@ -0,0 +1,67 @@
import { SettingItem } from "../../core/types";
import { isVisionModel } from "@/app/utils";
import Locale from "@/app/locales";
export const NextChatMetas = {
ChatPath: "v1/chat/completions",
UsagePath: "dashboard/billing/usage",
SubsPath: "dashboard/billing/subscription",
ListModelPath: "v1/models",
};
export type SettingKeys = "accessCode";
export const defaultModal = "gpt-3.5-turbo";
export const models = [
defaultModal,
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
"gpt-4-1106-preview",
"gpt-4-0125-preview",
"gpt-4-32k",
"gpt-4-32k-0314",
"gpt-4-32k-0613",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4-vision-preview",
"gpt-4-turbo-2024-04-09",
"gemini-1.0-pro",
"gemini-1.5-pro-latest",
"gemini-pro-vision",
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-sonnet-20240229",
"claude-3-opus-20240229",
"claude-3-haiku-20240307",
];
export const modelConfigs = models.map((name) => ({
name,
displayName: name,
isVision: isVisionModel(name),
isDefaultActive: true,
isDefaultSelected: name === defaultModal,
}));
export const settingItems: SettingItem<SettingKeys>[] = [
{
name: "accessCode",
title: Locale.Auth.Title,
description: Locale.Auth.Tips,
placeholder: Locale.Auth.Input,
type: "input",
inputType: "password",
validators: ["required"],
},
];

View File

@ -0,0 +1,303 @@
import {
modelConfigs,
settingItems,
SettingKeys,
NextChatMetas,
} from "./config";
import { getMessageTextContent } from "@/app/utils";
import { ACCESS_CODE_PREFIX } from "@/app/constant";
import {
InternalChatRequestPayload,
IProviderTemplate,
StandChatReponseMessage,
} from "../../core/types";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import Locale from "@/app/locales";
export type NextChatProviderSettingKeys = SettingKeys;
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export interface MultimodalContent {
type: "text" | "image_url";
text?: string;
image_url?: {
url: string;
};
}
export interface RequestMessage {
role: MessageRole;
content: string | MultimodalContent[];
}
interface RequestPayload {
messages: {
role: "system" | "user" | "assistant";
content: string | MultimodalContent[];
}[];
stream?: boolean;
model: string;
temperature: number;
presence_penalty: number;
frequency_penalty: number;
top_p: number;
max_tokens?: number;
}
export default class NextChatProvider
implements IProviderTemplate<SettingKeys, "nextchat", typeof NextChatMetas>
{
name = "nextchat" as const;
metas = NextChatMetas;
models = modelConfigs.map((c) => ({ ...c, providerTemplateName: this.name }));
providerMeta = {
displayName: "NextChat",
settingItems,
};
readonly REQUEST_TIMEOUT_MS = 60000;
private path(): string {
const path = NextChatMetas.ChatPath;
let baseUrl = "/api/openai";
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { accessCode } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
const authHeader = "Authorization";
const makeBearer = (s: string) => `Bearer ${s.trim()}`;
const validString = (x?: string): x is string => Boolean(x && x.length > 0);
// when using google api in app, not set auth header
if (validString(accessCode)) {
headers[authHeader] = makeBearer(ACCESS_CODE_PREFIX + accessCode);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload;
const {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
} = modelConfig;
const openAiMessages = messages.map((v) => ({
role: v.role,
content: isVisionModel ? v.content : getMessageTextContent(v),
}));
const requestPayload: RequestPayload = {
messages: openAiMessages,
stream,
model,
temperature,
presence_penalty,
frequency_penalty,
top_p,
};
// add max_tokens to vision model
if (isVisionModel) {
requestPayload["max_tokens"] = Math.max(max_tokens, 4000);
}
console.log("[Request] openai payload: ", requestPayload);
return {
headers: this.getHeaders(payload),
body: JSON.stringify(requestPayload),
method: "POST",
url: this.path(),
};
}
private readWholeMessageResponseBody(res: any) {
return {
message: res.choices?.at(0)?.message?.content ?? "",
};
}
private getTimer = () => {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
onProgress: (message: string, chunk: string) => void,
onFinish: (message: string) => void,
onError: (err: Error) => void,
) {
const requestPayload = this.formatChatPayload(payload);
let responseText = "";
let remainText = "";
let finished = false;
const timer = this.getTimer();
// animate response to make it looks smooth
const animateResponseText = () => {
if (finished || timer.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
onError(new Error("empty response from server"));
}
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
onProgress(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
};
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
onFinish(responseText + remainText);
}
};
timer.signal.onabort = finish;
fetchEventSource(requestPayload.url, {
...requestPayload,
async onopen(res) {
timer.clear();
const contentType = res.headers.get("content-type");
console.log("[OpenAI] request response content type: ", contentType);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
const choices = json.choices as Array<{
delta: { content: string };
}>;
const delta = choices[0]?.delta?.content;
const textmoderation = json?.prompt_filter_results;
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
onError(e);
throw e;
},
openWhenHidden: true,
});
return timer;
}
async chat(
payload: InternalChatRequestPayload<"accessCode">,
): Promise<StandChatReponseMessage> {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
const res = await fetch(requestPayload.url, {
headers: {
...requestPayload.headers,
},
body: requestPayload.body,
method: requestPayload.method,
signal: timer.signal,
});
timer.clear();
const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson);
return message;
}
}

View File

@ -0,0 +1,170 @@
import { SettingItem } from "../../core/types";
import Locale from "./locale";
export const OPENAI_BASE_URL = "https://api.openai.com";
export const OpenaiMetas = {
ChatPath: "v1/chat/completions",
UsagePath: "dashboard/billing/usage",
SubsPath: "dashboard/billing/subscription",
ListModelPath: "v1/models",
};
export type SettingKeys = "openaiUrl" | "openaiApiKey";
export const defaultModal = "gpt-3.5-turbo";
export const modelConfigs = [
{
name: "gpt-3.5-turbo",
displayName: "gpt-3.5-turbo",
isVision: false,
isDefaultActive: true,
isDefaultSelected: true,
},
{
name: "gpt-3.5-turbo-0301",
displayName: "gpt-3.5-turbo-0301",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-3.5-turbo-0613",
displayName: "gpt-3.5-turbo-0613",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-3.5-turbo-1106",
displayName: "gpt-3.5-turbo-1106",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-3.5-turbo-0125",
displayName: "gpt-3.5-turbo-0125",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-3.5-turbo-16k",
displayName: "gpt-3.5-turbo-16k",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-3.5-turbo-16k-0613",
displayName: "gpt-3.5-turbo-16k-0613",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4",
displayName: "gpt-4",
isVision: false,
isDefaultActive: true,
isDefaultSelected: false,
},
{
name: "gpt-4-0314",
displayName: "gpt-4-0314",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-0613",
displayName: "gpt-4-0613",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-1106-preview",
displayName: "gpt-4-1106-preview",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-0125-preview",
displayName: "gpt-4-0125-preview",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-32k",
displayName: "gpt-4-32k",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-32k-0314",
displayName: "gpt-4-32k-0314",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-32k-0613",
displayName: "gpt-4-32k-0613",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-turbo",
displayName: "gpt-4-turbo",
isVision: true,
isDefaultActive: true,
isDefaultSelected: false,
},
{
name: "gpt-4-turbo-preview",
displayName: "gpt-4-turbo-preview",
isVision: false,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-vision-preview",
displayName: "gpt-4-vision-preview",
isVision: true,
isDefaultActive: false,
isDefaultSelected: false,
},
{
name: "gpt-4-turbo-2024-04-09",
displayName: "gpt-4-turbo-2024-04-09",
isVision: true,
isDefaultActive: false,
isDefaultSelected: false,
},
];
export const settingItems: SettingItem<SettingKeys>[] = [
{
name: "openaiUrl",
title: Locale.Endpoint.Title,
description: Locale.Endpoint.SubTitle,
defaultValue: OPENAI_BASE_URL,
type: "input",
},
{
name: "openaiApiKey",
title: Locale.ApiKey.Title,
description: Locale.ApiKey.SubTitle,
placeholder: Locale.ApiKey.Placeholder,
type: "input",
inputType: "password",
validators: ["required"],
},
];

View File

@ -0,0 +1,312 @@
import { modelConfigs, settingItems, SettingKeys, OpenaiMetas } from "./config";
import { getMessageTextContent } from "@/app/utils";
import {
InternalChatRequestPayload,
IProviderTemplate,
} from "../../core/types";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import Locale from "@/app/locales";
export type OpenAIProviderSettingKeys = SettingKeys;
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export interface MultimodalContent {
type: "text" | "image_url";
text?: string;
image_url?: {
url: string;
};
}
export interface RequestMessage {
role: MessageRole;
content: string | MultimodalContent[];
}
interface RequestPayload {
messages: {
role: "system" | "user" | "assistant";
content: string | MultimodalContent[];
}[];
stream?: boolean;
model: string;
temperature: number;
presence_penalty: number;
frequency_penalty: number;
top_p: number;
max_tokens?: number;
}
class OpenAIProvider
implements IProviderTemplate<SettingKeys, "openai", typeof OpenaiMetas>
{
name = "openai" as const;
metas = OpenaiMetas;
readonly REQUEST_TIMEOUT_MS = 60000;
models = modelConfigs.map((c) => ({ ...c, providerTemplateName: this.name }));
providerMeta = {
displayName: "OpenAI",
settingItems,
};
private path(payload: InternalChatRequestPayload<SettingKeys>): string {
const {
providerConfig: { openaiUrl },
} = payload;
const path = OpenaiMetas.ChatPath;
let baseUrl = openaiUrl;
if (!baseUrl) {
baseUrl = "/api/openai";
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api/openai")) {
baseUrl = "https://" + baseUrl;
}
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/");
}
private getHeaders(payload: InternalChatRequestPayload<SettingKeys>) {
const { openaiApiKey } = payload.providerConfig;
const headers: Record<string, string> = {
"Content-Type": "application/json",
Accept: "application/json",
};
const authHeader = "Authorization";
const makeBearer = (s: string) => `Bearer ${s.trim()}`;
const validString = (x?: string): x is string => Boolean(x && x.length > 0);
// when using google api in app, not set auth header
if (validString(openaiApiKey)) {
headers[authHeader] = makeBearer(openaiApiKey);
}
return headers;
}
private formatChatPayload(payload: InternalChatRequestPayload<SettingKeys>) {
const { messages, isVisionModel, model, stream, modelConfig } = payload;
const {
temperature,
presence_penalty,
frequency_penalty,
top_p,
max_tokens,
} = modelConfig;
const openAiMessages = messages.map((v) => ({
role: v.role,
content: isVisionModel ? v.content : getMessageTextContent(v),
}));
const requestPayload: RequestPayload = {
messages: openAiMessages,
stream,
model,
temperature,
presence_penalty,
frequency_penalty,
top_p,
};
// add max_tokens to vision model
if (isVisionModel) {
requestPayload["max_tokens"] = Math.max(max_tokens, 4000);
}
console.log("[Request] openai payload: ", requestPayload);
return {
headers: this.getHeaders(payload),
body: JSON.stringify(requestPayload),
method: "POST",
url: this.path(payload),
};
}
private readWholeMessageResponseBody(res: any) {
return {
message: res.choices?.at(0)?.message?.content ?? "",
};
}
private getTimer = () => {
const controller = new AbortController();
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
this.REQUEST_TIMEOUT_MS,
);
return {
...controller,
clear: () => {
clearTimeout(requestTimeoutId);
},
};
};
async chat(payload: InternalChatRequestPayload<SettingKeys>) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
const res = await fetch(requestPayload.url, {
headers: {
...requestPayload.headers,
},
body: requestPayload.body,
method: requestPayload.method,
signal: timer.signal,
});
timer.clear();
const resJson = await res.json();
const message = this.readWholeMessageResponseBody(resJson);
return message;
}
streamChat(
payload: InternalChatRequestPayload<SettingKeys>,
onProgress: (message: string, chunk: string) => void,
onFinish: (message: string) => void,
onError: (err: Error) => void,
) {
const requestPayload = this.formatChatPayload(payload);
const timer = this.getTimer();
let responseText = "";
let remainText = "";
let finished = false;
// animate response to make it looks smooth
const animateResponseText = () => {
if (finished || timer.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
onError(new Error("empty response from server"));
}
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
onProgress(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
};
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
onFinish(responseText + remainText);
}
};
timer.signal.onabort = finish;
fetchEventSource(requestPayload.url, {
...requestPayload,
async onopen(res) {
timer.clear();
const contentType = res.headers.get("content-type");
console.log("[OpenAI] request response content type: ", contentType);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
const choices = json.choices as Array<{
delta: { content: string };
}>;
const delta = choices[0]?.delta?.content;
const textmoderation = json?.prompt_filter_results;
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
onError(e);
throw e;
},
openWhenHidden: true,
});
return timer;
}
}
export default OpenAIProvider;

View File

@ -0,0 +1,82 @@
import { getLocaleText } from "../../core/locale";
export default getLocaleText<
{
ApiKey: {
Title: string;
SubTitle: string;
Placeholder: string;
};
Endpoint: {
Title: string;
SubTitle: string;
};
},
"en"
>(
{
cn: {
ApiKey: {
Title: "API Key",
SubTitle: "使用自定义 OpenAI Key 绕过密码访问限制",
Placeholder: "OpenAI API Key",
},
Endpoint: {
Title: "接口地址",
SubTitle: "除默认地址外,必须包含 http(s)://",
},
},
en: {
ApiKey: {
Title: "OpenAI API Key",
SubTitle: "User custom OpenAI Api Key",
Placeholder: "sk-xxx",
},
Endpoint: {
Title: "OpenAI Endpoint",
SubTitle: "Must starts with http(s):// or use /api/openai as default",
},
},
pt: {
ApiKey: {
Title: "Chave API OpenAI",
SubTitle: "Usar Chave API OpenAI personalizada",
Placeholder: "sk-xxx",
},
Endpoint: {
Title: "Endpoint OpenAI",
SubTitle: "Deve começar com http(s):// ou usar /api/openai como padrão",
},
},
sk: {
ApiKey: {
Title: "API kľúč OpenAI",
SubTitle: "Použiť vlastný API kľúč OpenAI",
Placeholder: "sk-xxx",
},
Endpoint: {
Title: "Koncový bod OpenAI",
SubTitle:
"Musí začínať http(s):// alebo použiť /api/openai ako predvolený",
},
},
tw: {
ApiKey: {
Title: "API Key",
SubTitle: "使用自定義 OpenAI Key 繞過密碼存取限制",
Placeholder: "OpenAI API Key",
},
Endpoint: {
Title: "介面(Endpoint) 地址",
SubTitle: "除預設地址外,必須包含 http(s)://",
},
},
},
"en",
);

View File

@ -21,7 +21,7 @@ type Groups = {
}; };
export interface ActionsBarProps { export interface ActionsBarProps {
actionsShema: Action[]; actionsSchema: Action[];
onSelect?: (id: string) => void; onSelect?: (id: string) => void;
selected?: string; selected?: string;
groups: string[][] | Groups; groups: string[][] | Groups;
@ -30,7 +30,7 @@ export interface ActionsBarProps {
} }
export default function ActionsBar(props: ActionsBarProps) { export default function ActionsBar(props: ActionsBarProps) {
const { actionsShema, onSelect, selected, groups, className, inMobile } = const { actionsSchema, onSelect, selected, groups, className, inMobile } =
props; props;
const handlerClick = const handlerClick =
@ -53,7 +53,7 @@ export default function ActionsBar(props: ActionsBarProps) {
const content = internalGroup.reduce((res, group, ind, arr) => { const content = internalGroup.reduce((res, group, ind, arr) => {
res.push( res.push(
...group.map((i) => { ...group.map((i) => {
const action = actionsShema.find((a) => a.id === i); const action = actionsSchema.find((a) => a.id === i);
if (!action) { if (!action) {
return <></>; return <></>;
} }

View File

@ -47,6 +47,7 @@ export enum StoreKey {
Prompt = "prompt-store", Prompt = "prompt-store",
Update = "chat-update", Update = "chat-update",
Sync = "sync", Sync = "sync",
Provider = "provider",
} }
export const NARROW_SIDEBAR_WIDTH = 100; export const NARROW_SIDEBAR_WIDTH = 100;
@ -106,7 +107,8 @@ export const Azure = {
export const Google = { export const Google = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/", ExampleEndpoint: "https://generativelanguage.googleapis.com/",
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`, ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
VisionChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`, VisionChatPath: (modelName: string) =>
`v1beta/models/${modelName}:generateContent`,
}; };
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang

View File

@ -31,7 +31,7 @@ export interface MessageActionsProps {
setShowPromptModal?: (value: boolean) => void; setShowPromptModal?: (value: boolean) => void;
} }
const genActionsShema = ( const genActionsSchema = (
message: RenderMessage, message: RenderMessage,
{ {
onEdit, onEdit,
@ -272,7 +272,7 @@ export default function MessageActions(props: MessageActionsProps) {
`} `}
> >
<ActionsBar <ActionsBar
actionsShema={genActionsShema(message, { actionsSchema={genActionsSchema(message, {
onCopy, onCopy,
onDelete, onDelete,
onPinMessage, onPinMessage,

View File

@ -51,7 +51,7 @@ export function SideBar(props: { className?: string }) {
> >
<ActionsBar <ActionsBar
inMobile={isMobileScreen} inMobile={isMobileScreen}
actionsShema={[ actionsSchema={[
{ {
id: Path.Masks, id: Path.Masks,
icons: { icons: {

5
app/global.d.ts vendored
View File

@ -21,10 +21,13 @@ declare interface Window {
writeBinaryFile(path: string, data: Uint8Array): Promise<void>; writeBinaryFile(path: string, data: Uint8Array): Promise<void>;
writeTextFile(path: string, data: string): Promise<void>; writeTextFile(path: string, data: string): Promise<void>;
}; };
notification:{ notification: {
requestPermission(): Promise<Permission>; requestPermission(): Promise<Permission>;
isPermissionGranted(): Promise<boolean>; isPermissionGranted(): Promise<boolean>;
sendNotification(options: string | Options): void; sendNotification(options: string | Options): void;
}; };
http: {
fetch: typeof window.fetch;
};
}; };
} }

View File

@ -4,6 +4,9 @@ import { SubmitKey } from "../store/config";
const isApp = !!getClientConfig()?.isApp; const isApp = !!getClientConfig()?.isApp;
const cn = { const cn = {
Provider: {
// OPENAI_DISPLAY_NAME: 'OpenAI'
},
WIP: "该功能仍在开发中……", WIP: "该功能仍在开发中……",
Error: { Error: {
Unauthorized: isApp Unauthorized: isApp

111
app/store/provider.ts Normal file
View File

@ -0,0 +1,111 @@
import {
ProviderClient,
NextChatProvider,
createProvider,
Provider,
Model,
} from "@/app/client";
// import { getClientConfig } from "../config/client";
import { StoreKey } from "../constant";
import { createPersistStore } from "../utils/store";
export const DEFAULT_CONFIG = {
lastUpdate: Date.now(), // timestamp, to merge state
providers: ProviderClient.getProviderTemplateList()
.filter((p) => p !== NextChatProvider)
.map((p) => createProvider(p)),
};
export type ProvidersConfig = typeof DEFAULT_CONFIG;
export const useProviders = createPersistStore(
{ ...DEFAULT_CONFIG },
(set, get) => {
const methods = {
reset() {
set(() => ({ ...DEFAULT_CONFIG }));
},
addProvider(provider: Provider) {
set(() => ({
providers: [...get().providers, provider],
}));
},
deleteProvider(provider: Provider) {
set(() => ({
providers: [
...get().providers.filter((p) => p.name !== provider.name),
],
}));
},
updateProvider(provider: Provider) {
set(() => ({
providers: get().providers.map((p) =>
p.name === provider.name ? provider : p,
),
}));
},
getProvider(providerName: string) {
return get().providers.find((p) => p.name === providerName);
},
addModel(model: Omit<Model, "providerTemplateName">, provider: Provider) {
const newModel: Model = {
providerTemplateName: provider.providerTemplateName,
...model,
};
return methods.updateProvider({
...provider,
models: [...provider.models, newModel],
});
},
deleteModel(model: Model, provider: Provider) {
return methods.updateProvider({
...provider,
models: provider.models.filter((m) => m.name !== model.name),
});
},
updateModel(model: Model, provider: Provider) {
return methods.updateProvider({
...provider,
models: provider.models.map((m) =>
m.name === model.name ? model : m,
),
});
},
getModel(
modelName: string,
providerName: string,
): (Model & { providerName: string }) | undefined {
const provider = methods.getProvider(providerName);
const model = provider?.models.find((m) => m.name === modelName);
return model
? {
...model,
providerName: provider!.name,
}
: undefined;
},
allModels() {},
};
return methods;
},
{
name: StoreKey.Provider,
version: 1.0,
migrate(persistedState, version) {
const state = persistedState as ProvidersConfig;
return state as any;
},
},
);

View File

@ -32,3 +32,9 @@ export function updateGlobalCSSVars(nextSidebar: number) {
return { menuWidth }; return { menuWidth };
} }
let count = 0;
export function getUid() {
return count++;
}

View File

@ -31,6 +31,7 @@
"fuse.js": "^7.0.0", "fuse.js": "^7.0.0",
"html-to-image": "^1.11.11", "html-to-image": "^1.11.11",
"install": "^0.13.0", "install": "^0.13.0",
"lodash-es": "^4.17.21",
"mermaid": "^10.6.1", "mermaid": "^10.6.1",
"nanoid": "^5.0.3", "nanoid": "^5.0.3",
"next": "^13.4.9", "next": "^13.4.9",
@ -52,6 +53,7 @@
}, },
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "1.5.11", "@tauri-apps/cli": "1.5.11",
"@types/lodash-es": "^4.17.12",
"@types/node": "^20.11.30", "@types/node": "^20.11.30",
"@types/react": "^18.2.70", "@types/react": "^18.2.70",
"@types/react-dom": "^18.2.7", "@types/react-dom": "^18.2.7",

View File

@ -50,6 +50,10 @@
}, },
"notification": { "notification": {
"all": true "all": true
},
"http": {
"all": true,
"request": true
} }
}, },
"bundle": { "bundle": {

View File

@ -1878,6 +1878,18 @@
resolved "https://registry.yarnpkg.com/@types/katex/-/katex-0.14.0.tgz#b84c0afc3218069a5ad64fe2a95321881021b5fe" resolved "https://registry.yarnpkg.com/@types/katex/-/katex-0.14.0.tgz#b84c0afc3218069a5ad64fe2a95321881021b5fe"
integrity sha512-+2FW2CcT0K3P+JMR8YG846bmDwplKUTsWgT2ENwdQ1UdVfRk3GQrh6Mi4sTopy30gI8Uau5CEqHTDZ6YvWIUPA== integrity sha512-+2FW2CcT0K3P+JMR8YG846bmDwplKUTsWgT2ENwdQ1UdVfRk3GQrh6Mi4sTopy30gI8Uau5CEqHTDZ6YvWIUPA==
"@types/lodash-es@^4.17.12":
version "4.17.12"
resolved "https://registry.npmmirror.com/@types/lodash-es/-/lodash-es-4.17.12.tgz#65f6d1e5f80539aa7cfbfc962de5def0cf4f341b"
integrity sha512-0NgftHUcV4v34VhXm8QBSftKVXtbkBG3ViCjs6+eJ5a6y6Mi/jiFGPc1sC7QK+9BFhWrURE3EOggmWaSxL9OzQ==
dependencies:
"@types/lodash" "*"
"@types/lodash@*":
version "4.17.1"
resolved "https://registry.npmmirror.com/@types/lodash/-/lodash-4.17.1.tgz#0fabfcf2f2127ef73b119d98452bd317c4a17eb8"
integrity sha512-X+2qazGS3jxLAIz5JDXDzglAF3KpijdhFxlf/V1+hEsOUc+HnWi81L/uv/EvGuV90WY+7mPGFCUDGfQC3Gj95Q==
"@types/mdast@^3.0.0": "@types/mdast@^3.0.0":
version "3.0.11" version "3.0.11"
resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.11.tgz#dc130f7e7d9306124286f6d6cee40cf4d14a3dc0" resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.11.tgz#dc130f7e7d9306124286f6d6cee40cf4d14a3dc0"