mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-09-04 06:16:54 +08:00
feat: merge main
This commit is contained in:
@@ -1,12 +1,12 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { STORAGE_KEY, internalWhiteWebDavEndpoints } from "../../../constant";
|
||||
import { STORAGE_KEY, internalAllowedWebDavEndpoints } from "../../../constant";
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
|
||||
const config = getServerSideConfig();
|
||||
|
||||
const mergedWhiteWebDavEndpoints = [
|
||||
...internalWhiteWebDavEndpoints,
|
||||
...config.whiteWebDevEndpoints,
|
||||
const mergedAllowedWebDavEndpoints = [
|
||||
...internalAllowedWebDavEndpoints,
|
||||
...config.allowedWebDevEndpoints,
|
||||
].filter((domain) => Boolean(domain.trim()));
|
||||
|
||||
async function handle(
|
||||
@@ -24,7 +24,9 @@ async function handle(
|
||||
|
||||
// Validate the endpoint to prevent potential SSRF attacks
|
||||
if (
|
||||
!mergedWhiteWebDavEndpoints.some((white) => endpoint?.startsWith(white))
|
||||
!mergedAllowedWebDavEndpoints.some(
|
||||
(allowedEndpoint) => endpoint?.startsWith(allowedEndpoint),
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
|
@@ -161,6 +161,13 @@ export class ClaudeApi implements LLMApi {
|
||||
};
|
||||
});
|
||||
|
||||
if (prompt[0]?.role === "assistant") {
|
||||
prompt.unshift({
|
||||
role: "user",
|
||||
content: ";",
|
||||
});
|
||||
}
|
||||
|
||||
const requestBody: AnthropicChatRequest = {
|
||||
messages: prompt,
|
||||
stream: shouldStream,
|
||||
|
@@ -21,11 +21,10 @@ export class GeminiProApi implements LLMApi {
|
||||
}
|
||||
async chat(options: ChatOptions): Promise<void> {
|
||||
// const apiClient = this;
|
||||
const visionModel = isVisionModel(options.config.model);
|
||||
let multimodal = false;
|
||||
const messages = options.messages.map((v) => {
|
||||
let parts: any[] = [{ text: getMessageTextContent(v) }];
|
||||
if (visionModel) {
|
||||
if (isVisionModel(options.config.model)) {
|
||||
const images = getMessageImages(v);
|
||||
if (images.length > 0) {
|
||||
multimodal = true;
|
||||
@@ -117,17 +116,14 @@ export class GeminiProApi implements LLMApi {
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
try {
|
||||
let googleChatPath = visionModel
|
||||
? Google.VisionChatPath(modelConfig.model)
|
||||
: Google.ChatPath(modelConfig.model);
|
||||
let chatPath = this.path(googleChatPath);
|
||||
|
||||
// let baseUrl = accessStore.googleUrl;
|
||||
|
||||
if (!baseUrl) {
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/google/" + googleChatPath
|
||||
: chatPath;
|
||||
? DEFAULT_API_HOST +
|
||||
"/api/proxy/google/" +
|
||||
Google.ChatPath(modelConfig.model)
|
||||
: this.path(Google.ChatPath(modelConfig.model));
|
||||
}
|
||||
|
||||
if (isApp) {
|
||||
@@ -145,6 +141,7 @@ export class GeminiProApi implements LLMApi {
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
|
@@ -129,7 +129,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
};
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel) {
|
||||
if (visionModel && modelConfig.model.includes("preview")) {
|
||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||
}
|
||||
|
||||
|
@@ -59,9 +59,10 @@ import {
|
||||
getMessageTextContent,
|
||||
getMessageImages,
|
||||
isVisionModel,
|
||||
compressImage,
|
||||
} from "../utils";
|
||||
|
||||
import { compressImage } from "@/app/utils/chat";
|
||||
|
||||
import dynamic from "next/dynamic";
|
||||
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
@@ -1088,6 +1089,7 @@ function _Chat() {
|
||||
if (payload.url) {
|
||||
accessStore.update((access) => (access.openaiUrl = payload.url!));
|
||||
}
|
||||
accessStore.update((access) => (access.useCustomConfig = true));
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import tauriConfig from "../../src-tauri/tauri.conf.json";
|
||||
import { DEFAULT_INPUT_TEMPLATE } from "../constant";
|
||||
|
||||
export const getBuildConfig = () => {
|
||||
if (typeof process === "undefined") {
|
||||
@@ -38,6 +39,7 @@ export const getBuildConfig = () => {
|
||||
...commitInfo,
|
||||
buildMode,
|
||||
isApp,
|
||||
template: process.env.DEFAULT_INPUT_TEMPLATE ?? DEFAULT_INPUT_TEMPLATE,
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -34,6 +34,9 @@ declare global {
|
||||
|
||||
// google tag manager
|
||||
GTM_ID?: string;
|
||||
|
||||
// custom template for preprocessing user input
|
||||
DEFAULT_INPUT_TEMPLATE?: string;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,6 +54,22 @@ const ACCESS_CODES = (function getAccessCodes(): Set<string> {
|
||||
}
|
||||
})();
|
||||
|
||||
function getApiKey(keys?: string) {
|
||||
const apiKeyEnvVar = keys ?? "";
|
||||
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
const apiKey = apiKeys[randomIndex];
|
||||
if (apiKey) {
|
||||
console.log(
|
||||
`[Server Config] using ${randomIndex + 1} of ${
|
||||
apiKeys.length
|
||||
} api key - ${apiKey}`,
|
||||
);
|
||||
}
|
||||
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
export const getServerSideConfig = () => {
|
||||
if (typeof process === "undefined") {
|
||||
throw Error(
|
||||
@@ -74,34 +93,34 @@ export const getServerSideConfig = () => {
|
||||
const isGoogle = !!process.env.GOOGLE_API_KEY;
|
||||
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
const apiKey = apiKeys[randomIndex];
|
||||
console.log(
|
||||
`[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
|
||||
);
|
||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
// const apiKey = apiKeys[randomIndex];
|
||||
// console.log(
|
||||
// `[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
|
||||
// );
|
||||
|
||||
const whiteWebDevEndpoints = (process.env.WHITE_WEBDEV_ENDPOINTS ?? "").split(
|
||||
",",
|
||||
);
|
||||
const allowedWebDevEndpoints = (
|
||||
process.env.WHITE_WEBDEV_ENDPOINTS ?? ""
|
||||
).split(",");
|
||||
|
||||
return {
|
||||
baseUrl: process.env.BASE_URL,
|
||||
apiKey,
|
||||
apiKey: getApiKey(process.env.OPENAI_API_KEY),
|
||||
openaiOrgId: process.env.OPENAI_ORG_ID,
|
||||
|
||||
isAzure,
|
||||
azureUrl: process.env.AZURE_URL,
|
||||
azureApiKey: process.env.AZURE_API_KEY,
|
||||
azureApiKey: getApiKey(process.env.AZURE_API_KEY),
|
||||
azureApiVersion: process.env.AZURE_API_VERSION,
|
||||
|
||||
isGoogle,
|
||||
googleApiKey: process.env.GOOGLE_API_KEY,
|
||||
googleApiKey: getApiKey(process.env.GOOGLE_API_KEY),
|
||||
googleUrl: process.env.GOOGLE_URL,
|
||||
|
||||
isAnthropic,
|
||||
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
||||
anthropicApiKey: getApiKey(process.env.ANTHROPIC_API_KEY),
|
||||
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
|
||||
anthropicUrl: process.env.ANTHROPIC_URL,
|
||||
|
||||
@@ -120,6 +139,6 @@ export const getServerSideConfig = () => {
|
||||
disableFastLink: !!process.env.DISABLE_FAST_LINK,
|
||||
customModels,
|
||||
defaultModel,
|
||||
whiteWebDevEndpoints,
|
||||
allowedWebDevEndpoints,
|
||||
};
|
||||
};
|
||||
|
@@ -107,8 +107,6 @@ export const Azure = {
|
||||
export const Google = {
|
||||
ExampleEndpoint: "https://generativelanguage.googleapis.com/",
|
||||
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
|
||||
VisionChatPath: (modelName: string) =>
|
||||
`v1beta/models/${modelName}:generateContent`,
|
||||
};
|
||||
|
||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||
@@ -137,8 +135,8 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
"gpt-4-turbo": "2023-12",
|
||||
"gpt-4-turbo-2024-04-09": "2023-12",
|
||||
"gpt-4-turbo-preview": "2023-12",
|
||||
"gpt-4-1106-preview": "2023-04",
|
||||
"gpt-4-0125-preview": "2023-12",
|
||||
"gpt-4o": "2023-10",
|
||||
"gpt-4o-2024-05-13": "2023-10",
|
||||
"gpt-4-vision-preview": "2023-04",
|
||||
// After improvements,
|
||||
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
|
||||
@@ -148,22 +146,16 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
|
||||
const openaiModels = [
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-3.5-turbo-0301",
|
||||
"gpt-3.5-turbo-0613",
|
||||
"gpt-3.5-turbo-1106",
|
||||
"gpt-3.5-turbo-0125",
|
||||
"gpt-3.5-turbo-16k",
|
||||
"gpt-3.5-turbo-16k-0613",
|
||||
"gpt-4",
|
||||
"gpt-4-0314",
|
||||
"gpt-4-0613",
|
||||
"gpt-4-1106-preview",
|
||||
"gpt-4-0125-preview",
|
||||
"gpt-4-32k",
|
||||
"gpt-4-32k-0314",
|
||||
"gpt-4-32k-0613",
|
||||
"gpt-4-turbo",
|
||||
"gpt-4-turbo-preview",
|
||||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4-vision-preview",
|
||||
"gpt-4-turbo-2024-04-09",
|
||||
];
|
||||
@@ -171,6 +163,7 @@ const openaiModels = [
|
||||
const googleModels = [
|
||||
"gemini-1.0-pro",
|
||||
"gemini-1.5-pro-latest",
|
||||
"gemini-1.5-flash-latest",
|
||||
"gemini-pro-vision",
|
||||
];
|
||||
|
||||
@@ -217,7 +210,7 @@ export const CHAT_PAGE_SIZE = 15;
|
||||
export const MAX_RENDER_MSG_COUNT = 45;
|
||||
|
||||
// some famous webdav endpoints
|
||||
export const internalWhiteWebDavEndpoints = [
|
||||
export const internalAllowedWebDavEndpoints = [
|
||||
"https://dav.jianguoyun.com/dav/",
|
||||
"https://dav.dropdav.com/",
|
||||
"https://dav.box.com/dav",
|
||||
|
@@ -8,7 +8,7 @@ import {
|
||||
ModelType,
|
||||
} from "@/app/store";
|
||||
import Locale from "@/app/locales";
|
||||
import { Selector, showConfirm, showToast } from "@/app/components/ui-lib";
|
||||
import { showConfirm } from "@/app/components/ui-lib";
|
||||
import {
|
||||
CHAT_PAGE_SIZE,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
@@ -25,7 +25,6 @@ import ChatInputPanel, {
|
||||
ChatInputPanelInstance,
|
||||
} from "./components/ChatInputPanel";
|
||||
import ChatMessagePanel, { RenderMessage } from "./components/ChatMessagePanel";
|
||||
import { useAllModels } from "@/app/utils/hooks";
|
||||
import useRows from "@/app/hooks/useRows";
|
||||
import SessionConfigModel from "./components/SessionConfigModal";
|
||||
import useScrollToBottom from "@/app/hooks/useScrollToBottom";
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { compressImage, isVisionModel } from "@/app/utils";
|
||||
import { isVisionModel } from "@/app/utils";
|
||||
import { compressImage } from "@/app/utils/chat";
|
||||
import { useCallback, useRef } from "react";
|
||||
import { useChatStore } from "../store/chat";
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { compressImage } from "@/app/utils";
|
||||
import { compressImage } from "@/app/utils/chat";
|
||||
import { useCallback, useRef } from "react";
|
||||
|
||||
interface UseUploadImageOptions {
|
||||
|
@@ -317,7 +317,7 @@ const en: LocaleType = {
|
||||
|
||||
Endpoint: {
|
||||
Title: "OpenAI Endpoint",
|
||||
SubTitle: "Must starts with http(s):// or use /api/openai as default",
|
||||
SubTitle: "Must start with http(s):// or use /api/openai as default",
|
||||
},
|
||||
},
|
||||
Azure: {
|
||||
|
@@ -21,6 +21,8 @@ import { estimateTokenLength } from "../utils/token";
|
||||
import { nanoid } from "nanoid";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { useAccessStore } from "./access";
|
||||
|
||||
export type ChatMessage = RequestMessage & {
|
||||
date: string;
|
||||
@@ -104,9 +106,19 @@ function createEmptySession(): ChatSession {
|
||||
function getSummarizeModel(currentModel: string) {
|
||||
// if it is using gpt-* models, force to use 3.5 to summarize
|
||||
if (currentModel.startsWith("gpt")) {
|
||||
return SUMMARIZE_MODEL;
|
||||
const configStore = useAppConfig.getState();
|
||||
const accessStore = useAccessStore.getState();
|
||||
const allModel = collectModelsWithDefaultModel(
|
||||
configStore.models,
|
||||
[configStore.customModels, accessStore.customModels].join(","),
|
||||
accessStore.defaultModel,
|
||||
);
|
||||
const summarizeModel = allModel.find(
|
||||
(m) => m.name === SUMMARIZE_MODEL && m.available,
|
||||
);
|
||||
return summarizeModel?.name ?? currentModel;
|
||||
}
|
||||
if (currentModel.startsWith("gemini-pro")) {
|
||||
if (currentModel.startsWith("gemini")) {
|
||||
return GEMINI_SUMMARIZE_MODEL;
|
||||
}
|
||||
return currentModel;
|
||||
@@ -433,14 +445,13 @@ export const useChatStore = createPersistStore(
|
||||
getMemoryPrompt() {
|
||||
const session = get().currentSession();
|
||||
|
||||
return {
|
||||
role: "system",
|
||||
content:
|
||||
session.memoryPrompt.length > 0
|
||||
? Locale.Store.Prompt.History(session.memoryPrompt)
|
||||
: "",
|
||||
date: "",
|
||||
} as ChatMessage;
|
||||
if (session.memoryPrompt.length) {
|
||||
return {
|
||||
role: "system",
|
||||
content: Locale.Store.Prompt.History(session.memoryPrompt),
|
||||
date: "",
|
||||
} as ChatMessage;
|
||||
}
|
||||
},
|
||||
|
||||
getMessagesWithMemory() {
|
||||
@@ -476,16 +487,15 @@ export const useChatStore = createPersistStore(
|
||||
systemPrompts.at(0)?.content ?? "empty",
|
||||
);
|
||||
}
|
||||
|
||||
const memoryPrompt = get().getMemoryPrompt();
|
||||
// long term memory
|
||||
const shouldSendLongTermMemory =
|
||||
modelConfig.sendMemory &&
|
||||
session.memoryPrompt &&
|
||||
session.memoryPrompt.length > 0 &&
|
||||
session.lastSummarizeIndex > clearContextIndex;
|
||||
const longTermMemoryPrompts = shouldSendLongTermMemory
|
||||
? [get().getMemoryPrompt()]
|
||||
: [];
|
||||
const longTermMemoryPrompts =
|
||||
shouldSendLongTermMemory && memoryPrompt ? [memoryPrompt] : [];
|
||||
const longTermMemoryStartIndex = session.lastSummarizeIndex;
|
||||
|
||||
// short term memory
|
||||
@@ -610,9 +620,11 @@ export const useChatStore = createPersistStore(
|
||||
Math.max(0, n - modelConfig.historyMessageCount),
|
||||
);
|
||||
}
|
||||
|
||||
// add memory prompt
|
||||
toBeSummarizedMsgs.unshift(get().getMemoryPrompt());
|
||||
const memoryPrompt = get().getMemoryPrompt();
|
||||
if (memoryPrompt) {
|
||||
// add memory prompt
|
||||
toBeSummarizedMsgs.unshift(memoryPrompt);
|
||||
}
|
||||
|
||||
const lastSummarizeIndex = session.messages.length;
|
||||
|
||||
|
@@ -41,6 +41,7 @@ export const ThemeConfig = {
|
||||
title: "Dark model",
|
||||
},
|
||||
};
|
||||
const config = getClientConfig();
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
lastUpdate: Date.now(), // timestamp, to merge state
|
||||
@@ -49,7 +50,7 @@ export const DEFAULT_CONFIG = {
|
||||
avatar: "1f603",
|
||||
fontSize: 14,
|
||||
theme: Theme.Auto as Theme,
|
||||
tightBorder: !!getClientConfig()?.isApp,
|
||||
tightBorder: !!config?.isApp,
|
||||
sendPreviewBubble: true,
|
||||
enableAutoGenerateTitle: true,
|
||||
sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
|
||||
@@ -75,7 +76,7 @@ export const DEFAULT_CONFIG = {
|
||||
historyMessageCount: 4,
|
||||
compressMessageLengthThreshold: 1000,
|
||||
enableInjectSystemPrompts: true,
|
||||
template: DEFAULT_INPUT_TEMPLATE,
|
||||
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -151,7 +152,7 @@ export const useAppConfig = createPersistStore(
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Config,
|
||||
version: 3.8,
|
||||
version: 3.9,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as ChatConfig;
|
||||
|
||||
@@ -182,6 +183,13 @@ export const useAppConfig = createPersistStore(
|
||||
state.lastUpdate = Date.now();
|
||||
}
|
||||
|
||||
if (version < 3.9) {
|
||||
state.modelConfig.template =
|
||||
state.modelConfig.template !== DEFAULT_INPUT_TEMPLATE
|
||||
? state.modelConfig.template
|
||||
: config?.template ?? DEFAULT_INPUT_TEMPLATE;
|
||||
}
|
||||
|
||||
return state as any;
|
||||
},
|
||||
},
|
||||
|
@@ -97,11 +97,20 @@ export const useSyncStore = createPersistStore(
|
||||
const client = this.getClient();
|
||||
|
||||
try {
|
||||
const remoteState = JSON.parse(
|
||||
await client.get(config.username),
|
||||
) as AppState;
|
||||
mergeAppState(localState, remoteState);
|
||||
setLocalAppState(localState);
|
||||
const remoteState = await client.get(config.username);
|
||||
if (!remoteState || remoteState === "") {
|
||||
await client.set(config.username, JSON.stringify(localState));
|
||||
console.log(
|
||||
"[Sync] Remote state is empty, using local state instead.",
|
||||
);
|
||||
return;
|
||||
} else {
|
||||
const parsedRemoteState = JSON.parse(
|
||||
await client.get(config.username),
|
||||
) as AppState;
|
||||
mergeAppState(localState, parsedRemoteState);
|
||||
setLocalAppState(localState);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Sync] failed to get remote state", e);
|
||||
throw e;
|
||||
|
@@ -82,6 +82,7 @@
|
||||
@include dark;
|
||||
}
|
||||
}
|
||||
|
||||
html {
|
||||
height: var(--full-height);
|
||||
|
||||
@@ -106,6 +107,10 @@ body {
|
||||
@media only screen and (max-width: 600px) {
|
||||
background-color: var(--second);
|
||||
}
|
||||
|
||||
*:focus-visible {
|
||||
outline: none;
|
||||
}
|
||||
}
|
||||
|
||||
::-webkit-scrollbar {
|
||||
|
53
app/utils.ts
53
app/utils.ts
@@ -84,48 +84,6 @@ export async function downloadAs(text: string, filename: string) {
|
||||
}
|
||||
}
|
||||
|
||||
export function compressImage(file: File, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (readerEvent: any) => {
|
||||
const image = new Image();
|
||||
image.onload = () => {
|
||||
let canvas = document.createElement("canvas");
|
||||
let ctx = canvas.getContext("2d");
|
||||
let width = image.width;
|
||||
let height = image.height;
|
||||
let quality = 0.9;
|
||||
let dataUrl;
|
||||
|
||||
do {
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
ctx?.clearRect(0, 0, canvas.width, canvas.height);
|
||||
ctx?.drawImage(image, 0, 0, width, height);
|
||||
dataUrl = canvas.toDataURL("image/jpeg", quality);
|
||||
|
||||
if (dataUrl.length < maxSize) break;
|
||||
|
||||
if (quality > 0.5) {
|
||||
// Prioritize quality reduction
|
||||
quality -= 0.1;
|
||||
} else {
|
||||
// Then reduce the size
|
||||
width *= 0.9;
|
||||
height *= 0.9;
|
||||
}
|
||||
} while (dataUrl.length > maxSize);
|
||||
|
||||
resolve(dataUrl);
|
||||
};
|
||||
image.onerror = reject;
|
||||
image.src = readerEvent.target.result;
|
||||
};
|
||||
reader.onerror = reject;
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
}
|
||||
|
||||
export function readFromFile() {
|
||||
return new Promise<string>((res, rej) => {
|
||||
const fileInput = document.createElement("input");
|
||||
@@ -291,18 +249,21 @@ export function getMessageImages(message: RequestMessage): string[] {
|
||||
}
|
||||
|
||||
export function isVisionModel(model: string) {
|
||||
|
||||
// Note: This is a better way using the TypeScript feature instead of `&&` or `||` (ts v5.5.0-dev.20240314 I've been using)
|
||||
|
||||
const visionKeywords = [
|
||||
"vision",
|
||||
"claude-3",
|
||||
"gemini-1.5-pro",
|
||||
"gemini-1.5-flash",
|
||||
"gpt-4o",
|
||||
];
|
||||
const isGpt4Turbo =
|
||||
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||
|
||||
const isGpt4Turbo = model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||
|
||||
return visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo;
|
||||
return (
|
||||
visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo
|
||||
);
|
||||
}
|
||||
|
||||
export function getTime(dateTime: string) {
|
||||
|
54
app/utils/chat.ts
Normal file
54
app/utils/chat.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import heic2any from "heic2any";
|
||||
|
||||
export function compressImage(file: File, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (readerEvent: any) => {
|
||||
const image = new Image();
|
||||
image.onload = () => {
|
||||
let canvas = document.createElement("canvas");
|
||||
let ctx = canvas.getContext("2d");
|
||||
let width = image.width;
|
||||
let height = image.height;
|
||||
let quality = 0.9;
|
||||
let dataUrl;
|
||||
|
||||
do {
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
ctx?.clearRect(0, 0, canvas.width, canvas.height);
|
||||
ctx?.drawImage(image, 0, 0, width, height);
|
||||
dataUrl = canvas.toDataURL("image/jpeg", quality);
|
||||
|
||||
if (dataUrl.length < maxSize) break;
|
||||
|
||||
if (quality > 0.5) {
|
||||
// Prioritize quality reduction
|
||||
quality -= 0.1;
|
||||
} else {
|
||||
// Then reduce the size
|
||||
width *= 0.9;
|
||||
height *= 0.9;
|
||||
}
|
||||
} while (dataUrl.length > maxSize);
|
||||
|
||||
resolve(dataUrl);
|
||||
};
|
||||
image.onerror = reject;
|
||||
image.src = readerEvent.target.result;
|
||||
};
|
||||
reader.onerror = reject;
|
||||
|
||||
if (file.type.includes("heic")) {
|
||||
heic2any({ blob: file, toType: "image/jpeg" })
|
||||
.then((blob) => {
|
||||
reader.readAsDataURL(blob as Blob);
|
||||
})
|
||||
.catch((e) => {
|
||||
reject(e);
|
||||
});
|
||||
}
|
||||
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
}
|
@@ -93,14 +93,17 @@ export function createUpstashClient(store: SyncStore) {
|
||||
}
|
||||
|
||||
let url;
|
||||
if (proxyUrl.length > 0 || proxyUrl === "/") {
|
||||
let u = new URL(proxyUrl + "/api/upstash/" + path);
|
||||
const pathPrefix = "/api/upstash/";
|
||||
|
||||
try {
|
||||
let u = new URL(proxyUrl + pathPrefix + path);
|
||||
// add query params
|
||||
u.searchParams.append("endpoint", config.endpoint);
|
||||
url = u.toString();
|
||||
} else {
|
||||
url = "/api/upstash/" + path + "?endpoint=" + config.endpoint;
|
||||
} catch (e) {
|
||||
url = pathPrefix + path + "?endpoint=" + config.endpoint;
|
||||
}
|
||||
|
||||
return url;
|
||||
},
|
||||
};
|
||||
|
@@ -64,13 +64,10 @@ export function collectModelTableWithDefaultModel(
|
||||
) {
|
||||
let modelTable = collectModelTable(models, customModels);
|
||||
if (defaultModel && defaultModel !== "") {
|
||||
delete modelTable[defaultModel];
|
||||
modelTable[defaultModel] = {
|
||||
...modelTable[defaultModel],
|
||||
name: defaultModel,
|
||||
displayName: defaultModel,
|
||||
available: true,
|
||||
provider:
|
||||
modelTable[defaultModel]?.provider ?? customProvider(defaultModel),
|
||||
isDefault: true,
|
||||
};
|
||||
}
|
||||
|
Reference in New Issue
Block a user