Merge remote-tracking branch 'source/main'

This commit is contained in:
YeungYeah
2024-07-19 21:38:25 +08:00
23 changed files with 1270 additions and 153 deletions

View File

@@ -1,7 +1,15 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { getServerSideConfig } from "@/app/config/server";
import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant";
import {
ApiPath,
GEMINI_BASE_URL,
Google,
ModelProvider,
} from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
const serverConfig = getServerSideConfig();
async function handle(
req: NextRequest,
@@ -13,32 +21,6 @@ async function handle(
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const controller = new AbortController();
const serverConfig = getServerSideConfig();
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}
let path = `${req.nextUrl.pathname}`.replaceAll("/api/google/", "");
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const authResult = auth(req, ModelProvider.GeminiPro);
if (authResult.error) {
return NextResponse.json(authResult, {
@@ -49,9 +31,9 @@ async function handle(
const bearToken = req.headers.get("Authorization") ?? "";
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
const key = token ? token : serverConfig.googleApiKey;
const apiKey = token ? token : serverConfig.googleApiKey;
if (!key) {
if (!apiKey) {
return NextResponse.json(
{
error: true,
@@ -62,10 +44,63 @@ async function handle(
},
);
}
try {
const response = await request(req, apiKey);
return response;
} catch (e) {
console.error("[Google] ", e);
return NextResponse.json(prettyObject(e));
}
}
const fetchUrl = `${baseUrl}/${path}?key=${key}${
req?.nextUrl?.searchParams?.get("alt") == "sse" ? "&alt=sse" : ""
export const GET = handle;
export const POST = handle;
export const runtime = "edge";
export const preferredRegion = [
"bom1",
"cle1",
"cpt1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];
async function request(req: NextRequest, apiKey: string) {
const controller = new AbortController();
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Google, "");
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const fetchUrl = `${baseUrl}${path}?key=${apiKey}${
req?.nextUrl?.searchParams?.get("alt") === "sse" ? "&alt=sse" : ""
}`;
console.log("[Fetch Url] ", fetchUrl);
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
@@ -97,22 +132,3 @@ async function handle(
clearTimeout(timeoutId);
}
}
export const GET = handle;
export const POST = handle;
export const runtime = "edge";
export const preferredRegion = [
"bom1",
"cle1",
"cpt1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];

View File

@@ -21,7 +21,7 @@ import {
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { getMessageTextContent } from "@/app/utils";
export interface OpenAIListModelResponse {
object: string;

View File

@@ -3,7 +3,6 @@ import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { RequestMessage } from "@/app/typing";
import {
EventStreamContentType,
fetchEventSource,
@@ -12,6 +11,7 @@ import {
import Locale from "../../locales";
import { prettyObject } from "@/app/utils/format";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { preProcessImageContent } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
export type MultiBlockContent = {
@@ -93,7 +93,12 @@ export class ClaudeApi implements LLMApi {
},
};
const messages = [...options.messages];
// try get base64image from local cache image_url
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = await preProcessImageContent(v.content);
messages.push({ role: v.role, content });
}
const keys = ["system", "user"];

View File

@@ -1,4 +1,4 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ApiPath, Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
@@ -14,8 +14,37 @@ import {
getMessageImages,
isVisionModel,
} from "@/app/utils";
import { preProcessImageContent } from "@/app/utils/chat";
export class GeminiProApi implements LLMApi {
path(path: string): string {
const accessStore = useAccessStore.getState();
let baseUrl = "";
if (accessStore.useCustomConfig) {
baseUrl = accessStore.googleUrl;
}
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp
? DEFAULT_API_HOST + `/api/proxy/google?key=${accessStore.googleApiKey}`
: ApiPath.Google;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Google)) {
baseUrl = "https://" + baseUrl;
}
console.log("[Proxy Endpoint] ", baseUrl, path);
let chatPath = [baseUrl, path].join("/");
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
return chatPath;
}
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);
@@ -28,7 +57,14 @@ export class GeminiProApi implements LLMApi {
async chat(options: ChatOptions): Promise<void> {
const apiClient = this;
let multimodal = false;
const messages = options.messages.map((v) => {
// try get base64image from local cache image_url
const _messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = await preProcessImageContent(v.content);
_messages.push({ role: v.role, content });
}
const messages = _messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }];
if (isVisionModel(options.config.model)) {
const images = getMessageImages(v);
@@ -111,30 +147,13 @@ export class GeminiProApi implements LLMApi {
],
};
let baseUrl = "";
if (accessStore.useCustomConfig) {
baseUrl = accessStore.googleUrl;
}
const isApp = !!getClientConfig()?.isApp;
let shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/"
: this.path("");
}
baseUrl = `${baseUrl}/${Google.ChatPath(modelConfig.model)}`.replaceAll(
"//",
"/",
);
if (isApp) {
baseUrl += `?key=${accessStore.googleApiKey}`;
}
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
const chatPath = this.path(Google.ChatPath(modelConfig.model));
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
@@ -184,10 +203,6 @@ export class GeminiProApi implements LLMApi {
controller.signal.onabort = finish;
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
const chatPath =
baseUrl.replace("generateContent", "streamGenerateContent") +
(baseUrl.indexOf("?") > -1 ? "&alt=sse" : "?alt=sse");
fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
@@ -262,7 +277,7 @@ export class GeminiProApi implements LLMApi {
openWhenHidden: true,
});
} else {
const res = await fetch(baseUrl, chatPayload);
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) {
@@ -288,14 +303,4 @@ export class GeminiProApi implements LLMApi {
async models(): Promise<LLMModel[]> {
return [];
}
path(path: string): string {
return "/api/google/" + path;
}
}
function ensureProperEnding(str: string) {
if (str.startsWith("[") && !str.endsWith("]")) {
return str + "]";
}
return str;
}

View File

@@ -11,6 +11,7 @@ import {
} from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { collectModelsWithDefaultModel } from "@/app/utils/model";
import { preProcessImageContent } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
import {
@@ -105,10 +106,13 @@ export class ChatGPTApi implements LLMApi {
async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages = options.messages.map((v) => ({
role: v.role,
content: visionModel ? v.content : getMessageTextContent(v),
}));
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
messages.push({ role: v.role, content });
}
const modelConfig = {
...useAppConfig.getState().modelConfig,

View File

@@ -61,7 +61,7 @@ import {
isVisionModel,
} from "../utils";
import { compressImage } from "@/app/utils/chat";
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
import dynamic from "next/dynamic";
@@ -1167,7 +1167,7 @@ function _Chat() {
...(await new Promise<string[]>((res, rej) => {
setUploading(true);
const imagesData: string[] = [];
compressImage(file, 256 * 1024)
uploadImageRemote(file)
.then((dataUrl) => {
imagesData.push(dataUrl);
setUploading(false);
@@ -1209,7 +1209,7 @@ function _Chat() {
const imagesData: string[] = [];
for (let i = 0; i < files.length; i++) {
const file = event.target.files[i];
compressImage(file, 256 * 1024)
uploadImageRemote(file)
.then((dataUrl) => {
imagesData.push(dataUrl);
if (

View File

@@ -21,7 +21,7 @@ declare global {
ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not
DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
CUSTOM_MODELS?: string; // to control custom models
DEFAULT_MODEL?: string; // to cnntrol default model in every new chat window
DEFAULT_MODEL?: string; // to control default model in every new chat window
// azure only
AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name}

View File

@@ -21,6 +21,9 @@ export const BYTEDANCE_BASE_URL = "https://ark.cn-beijing.volces.com";
export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/";
export const CACHE_URL_PREFIX = "/api/cache";
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
export enum Path {
Home = "/",
Chat = "/chat",
@@ -127,7 +130,8 @@ export const Azure = {
export const Google = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/",
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
ChatPath: (modelName: string) =>
`v1beta/models/${modelName}:streamGenerateContent`,
};
export const Baidu = {
@@ -191,6 +195,8 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"gpt-4-turbo-preview": "2023-12",
"gpt-4o": "2023-10",
"gpt-4o-2024-05-13": "2023-10",
"gpt-4o-mini": "2023-10",
"gpt-4o-mini-2024-07-18": "2023-10",
"gpt-4-vision-preview": "2023-04",
// After improvements,
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
@@ -210,6 +216,8 @@ const openaiModels = [
"gpt-4-turbo-preview",
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
"gpt-4-vision-preview",
"gpt-4-turbo-2024-04-09",
"gpt-4-1106-preview",
@@ -243,7 +251,7 @@ const baiduModels = [
"ernie-speed-128k",
"ernie-speed-8k",
"ernie-lite-8k",
"ernie-tiny-8k"
"ernie-tiny-8k",
];
const bytedanceModels = [

View File

@@ -9,8 +9,6 @@ import {
DEFAULT_MODELS,
DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate,
ServiceProvider,
ModelProvider,
StoreKey,
SUMMARIZE_MODEL,
GEMINI_SUMMARIZE_MODEL,

View File

@@ -154,7 +154,7 @@ export const usePromptStore = createPersistStore(
fetch(PROMPT_URL)
.then((res) => res.json())
.then((res) => {
let fetchPrompts = [res.en, res.cn];
let fetchPrompts = [res.en, res.tw, res.cn];
if (getLang() === "cn") {
fetchPrompts = fetchPrompts.reverse();
}
@@ -175,7 +175,8 @@ export const usePromptStore = createPersistStore(
const allPromptsForSearch = builtinPrompts
.reduce((pre, cur) => pre.concat(cur), [])
.filter((v) => !!v.title && !!v.content);
SearchService.count.builtin = res.en.length + res.cn.length;
SearchService.count.builtin =
res.en.length + res.cn.length + res.tw.length;
SearchService.init(allPromptsForSearch, userPrompts);
});
},

View File

@@ -256,6 +256,7 @@ export function isVisionModel(model: string) {
"gemini-1.5-pro",
"gemini-1.5-flash",
"gpt-4o",
"gpt-4o-mini",
];
const isGpt4Turbo =
model.includes("gpt-4-turbo") && !model.includes("preview");

View File

@@ -1,6 +1,7 @@
import heic2any from "heic2any";
import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
import { RequestMessage } from "@/app/client/api";
export function compressImage(file: File, maxSize: number): Promise<string> {
export function compressImage(file: Blob, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (readerEvent: any) => {
@@ -40,15 +41,104 @@ export function compressImage(file: File, maxSize: number): Promise<string> {
reader.onerror = reject;
if (file.type.includes("heic")) {
heic2any({ blob: file, toType: "image/jpeg" })
.then((blob) => {
reader.readAsDataURL(blob as Blob);
})
.catch((e) => {
reject(e);
});
try {
const heic2any = require("heic2any");
heic2any({ blob: file, toType: "image/jpeg" })
.then((blob: Blob) => {
reader.readAsDataURL(blob);
})
.catch((e: any) => {
reject(e);
});
} catch (e) {
reject(e);
}
}
reader.readAsDataURL(file);
});
}
export async function preProcessImageContent(
content: RequestMessage["content"],
) {
if (typeof content === "string") {
return content;
}
const result = [];
for (const part of content) {
if (part?.type == "image_url" && part?.image_url?.url) {
try {
const url = await cacheImageToBase64Image(part?.image_url?.url);
result.push({ type: part.type, image_url: { url } });
} catch (error) {
console.error("Error processing image URL:", error);
}
} else {
result.push({ ...part });
}
}
return result;
}
const imageCaches: Record<string, string> = {};
export function cacheImageToBase64Image(imageUrl: string) {
if (imageUrl.includes(CACHE_URL_PREFIX)) {
if (!imageCaches[imageUrl]) {
const reader = new FileReader();
return fetch(imageUrl, {
method: "GET",
mode: "cors",
credentials: "include",
})
.then((res) => res.blob())
.then(
async (blob) =>
(imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)),
); // compressImage
}
return Promise.resolve(imageCaches[imageUrl]);
}
return Promise.resolve(imageUrl);
}
export function base64Image2Blob(base64Data: string, contentType: string) {
const byteCharacters = atob(base64Data);
const byteNumbers = new Array(byteCharacters.length);
for (let i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
return new Blob([byteArray], { type: contentType });
}
export function uploadImage(file: File): Promise<string> {
if (!window._SW_ENABLED) {
// if serviceWorker register error, using compressImage
return compressImage(file, 256 * 1024);
}
const body = new FormData();
body.append("file", file);
return fetch(UPLOAD_URL, {
method: "post",
body,
mode: "cors",
credentials: "include",
})
.then((res) => res.json())
.then((res) => {
console.log("res", res);
if (res?.code == 0 && res?.data) {
return res?.data;
}
throw Error(`upload Error: ${res?.msg}`);
});
}
export function removeImage(imageUrl: string) {
return fetch(imageUrl, {
method: "DELETE",
mode: "cors",
credentials: "include",
});
}

View File

@@ -1,6 +1,6 @@
import { useMemo } from "react";
import { useAccessStore, useAppConfig } from "../store";
import { collectModels, collectModelsWithDefaultModel } from "./model";
import { collectModelsWithDefaultModel } from "./model";
export function useAllModels() {
const accessStore = useAccessStore();