mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-08-08 14:02:08 +08:00
merge origin/main
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import { UPLOAD_URL } from "@/app/constant";
|
||||
import heic2any from "heic2any";
|
||||
import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
|
||||
import { RequestMessage } from "@/app/client/api";
|
||||
|
||||
export function compressImage(file: File, maxSize: number): Promise<string> {
|
||||
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (readerEvent: any) => {
|
||||
@@ -41,19 +41,67 @@ export function compressImage(file: File, maxSize: number): Promise<string> {
|
||||
reader.onerror = reject;
|
||||
|
||||
if (file.type.includes("heic")) {
|
||||
heic2any({ blob: file, toType: "image/jpeg" })
|
||||
.then((blob) => {
|
||||
reader.readAsDataURL(blob as Blob);
|
||||
})
|
||||
.catch((e) => {
|
||||
reject(e);
|
||||
});
|
||||
try {
|
||||
const heic2any = require("heic2any");
|
||||
heic2any({ blob: file, toType: "image/jpeg" })
|
||||
.then((blob: Blob) => {
|
||||
reader.readAsDataURL(blob);
|
||||
})
|
||||
.catch((e: any) => {
|
||||
reject(e);
|
||||
});
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
}
|
||||
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
}
|
||||
|
||||
export async function preProcessImageContent(
|
||||
content: RequestMessage["content"],
|
||||
) {
|
||||
if (typeof content === "string") {
|
||||
return content;
|
||||
}
|
||||
const result = [];
|
||||
for (const part of content) {
|
||||
if (part?.type == "image_url" && part?.image_url?.url) {
|
||||
try {
|
||||
const url = await cacheImageToBase64Image(part?.image_url?.url);
|
||||
result.push({ type: part.type, image_url: { url } });
|
||||
} catch (error) {
|
||||
console.error("Error processing image URL:", error);
|
||||
}
|
||||
} else {
|
||||
result.push({ ...part });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const imageCaches: Record<string, string> = {};
|
||||
export function cacheImageToBase64Image(imageUrl: string) {
|
||||
if (imageUrl.includes(CACHE_URL_PREFIX)) {
|
||||
if (!imageCaches[imageUrl]) {
|
||||
const reader = new FileReader();
|
||||
return fetch(imageUrl, {
|
||||
method: "GET",
|
||||
mode: "cors",
|
||||
credentials: "include",
|
||||
})
|
||||
.then((res) => res.blob())
|
||||
.then(
|
||||
async (blob) =>
|
||||
(imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)),
|
||||
); // compressImage
|
||||
}
|
||||
return Promise.resolve(imageCaches[imageUrl]);
|
||||
}
|
||||
return Promise.resolve(imageUrl);
|
||||
}
|
||||
|
||||
export function base64Image2Blob(base64Data: string, contentType: string) {
|
||||
const byteCharacters = atob(base64Data);
|
||||
const byteNumbers = new Array(byteCharacters.length);
|
||||
@@ -65,6 +113,10 @@ export function base64Image2Blob(base64Data: string, contentType: string) {
|
||||
}
|
||||
|
||||
export function uploadImage(file: Blob): Promise<string> {
|
||||
if (!window._SW_ENABLED) {
|
||||
// if serviceWorker register error, using compressImage
|
||||
return compressImage(file, 256 * 1024);
|
||||
}
|
||||
const body = new FormData();
|
||||
body.append("file", file);
|
||||
return fetch(UPLOAD_URL, {
|
||||
|
26
app/utils/cloudflare.ts
Normal file
26
app/utils/cloudflare.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
export function cloudflareAIGatewayUrl(fetchUrl: string) {
|
||||
// rebuild fetchUrl, if using cloudflare ai gateway
|
||||
// document: https://developers.cloudflare.com/ai-gateway/providers/openai/
|
||||
|
||||
const paths = fetchUrl.split("/");
|
||||
if ("gateway.ai.cloudflare.com" == paths[2]) {
|
||||
// is cloudflare.com ai gateway
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/azure-openai/{resource_name}/{deployment_name}/chat/completions?api-version=2023-05-15'
|
||||
if ("azure-openai" == paths[6]) {
|
||||
// is azure gateway
|
||||
return paths.slice(0, 8).concat(paths.slice(-3)).join("/"); // rebuild ai gateway azure_url
|
||||
}
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai/chat/completions
|
||||
if ("openai" == paths[6]) {
|
||||
// is openai gateway
|
||||
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway openai_url
|
||||
}
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/anthropic/v1/messages \
|
||||
if ("anthropic" == paths[6]) {
|
||||
// is anthropic gateway
|
||||
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway anthropic_url
|
||||
}
|
||||
// TODO: Amazon Bedrock, Groq, HuggingFace...
|
||||
}
|
||||
return fetchUrl;
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
import { useMemo } from "react";
|
||||
import { useAccessStore, useAppConfig } from "../store";
|
||||
import { collectModels, collectModelsWithDefaultModel } from "./model";
|
||||
import { collectModelsWithDefaultModel } from "./model";
|
||||
|
||||
export function useAllModels() {
|
||||
const accessStore = useAccessStore();
|
||||
|
@@ -1,9 +1,9 @@
|
||||
import { DEFAULT_MODELS } from "../constant";
|
||||
import { LLMModel } from "../client/api";
|
||||
|
||||
const customProvider = (modelName: string) => ({
|
||||
id: modelName,
|
||||
providerName: "Custom",
|
||||
const customProvider = (providerName: string) => ({
|
||||
id: providerName.toLowerCase(),
|
||||
providerName: providerName,
|
||||
providerType: "custom",
|
||||
});
|
||||
|
||||
@@ -71,10 +71,17 @@ export function collectModelTable(
|
||||
}
|
||||
// 2. if model not exists, create new model with available value
|
||||
if (count === 0) {
|
||||
const provider = customProvider(name);
|
||||
modelTable[`${name}@${provider?.id}`] = {
|
||||
name,
|
||||
displayName: displayName || name,
|
||||
let [customModelName, customProviderName] = name.split("@");
|
||||
const provider = customProvider(
|
||||
customProviderName || customModelName,
|
||||
);
|
||||
// swap name and displayName for bytedance
|
||||
if (displayName && provider.providerName == "ByteDance") {
|
||||
[customModelName, displayName] = [displayName, customModelName];
|
||||
}
|
||||
modelTable[`${customModelName}@${provider?.id}`] = {
|
||||
name: customModelName,
|
||||
displayName: displayName || customModelName,
|
||||
available,
|
||||
provider, // Use optional chaining
|
||||
};
|
||||
|
Reference in New Issue
Block a user