Merge remote
This commit is contained in:
parent
da29a94714
commit
fa2e046285
|
@ -16,7 +16,7 @@ GOOGLE_API_KEY=
|
||||||
# (optional)
|
# (optional)
|
||||||
# Default: https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent
|
# Default: https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent
|
||||||
# Googel Gemini Pro API url, set if you want to customize Google Gemini Pro API url.
|
# Googel Gemini Pro API url, set if you want to customize Google Gemini Pro API url.
|
||||||
GOOGLE_URL=
|
GOOGLE_BASE_URL=
|
||||||
|
|
||||||
# Override openai api request base url. (optional)
|
# Override openai api request base url. (optional)
|
||||||
# Default: https://api.openai.com
|
# Default: https://api.openai.com
|
||||||
|
|
|
@ -110,7 +110,7 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro
|
||||||
|
|
||||||
Google Gemini Pro 密钥.
|
Google Gemini Pro 密钥.
|
||||||
|
|
||||||
### `GOOGLE_URL` (optional)
|
### `GOOGLE_BASE_URL` (optional)
|
||||||
|
|
||||||
Google Gemini Pro Api Url.
|
Google Gemini Pro Api Url.
|
||||||
|
|
||||||
|
|
|
@ -9,15 +9,14 @@ const serverConfig = getServerSideConfig();
|
||||||
export async function requestOpenai(req: NextRequest) {
|
export async function requestOpenai(req: NextRequest) {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
let authValue = req.headers.get("Authorization") ?? "";
|
||||||
if (serverConfig.isAzure) {
|
if (serverConfig.isAzure) {
|
||||||
const authValue =
|
authValue =
|
||||||
req.headers
|
req.headers
|
||||||
.get("Authorization")
|
.get("Authorization")
|
||||||
?.trim()
|
?.trim()
|
||||||
.replaceAll("Bearer ", "")
|
.replaceAll("Bearer ", "")
|
||||||
.trim() ?? "";
|
.trim() ?? "";
|
||||||
} else {
|
|
||||||
const authValue = req.headers.get("Authorization") ?? "";
|
|
||||||
}
|
}
|
||||||
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";
|
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "../../auth";
|
import { auth } from "../../auth";
|
||||||
import S3FileStorage from "../../../utils/s3_file_storage";
|
import S3FileStorage from "../../../utils/s3_file_storage";
|
||||||
|
import { ModelProvider } from "@/app/constant";
|
||||||
|
|
||||||
async function handle(req: NextRequest) {
|
async function handle(req: NextRequest) {
|
||||||
if (req.method === "OPTIONS") {
|
if (req.method === "OPTIONS") {
|
||||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
}
|
}
|
||||||
|
|
||||||
const authResult = auth(req);
|
const authResult = auth(req, ModelProvider.GPT);
|
||||||
if (authResult.error) {
|
if (authResult.error) {
|
||||||
return NextResponse.json(authResult, {
|
return NextResponse.json(authResult, {
|
||||||
status: 401,
|
status: 401,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { auth } from "../../auth";
|
import { auth } from "../../auth";
|
||||||
import { getServerSideConfig } from "@/app/config/server";
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant";
|
import { GEMINI_BASE_URL, ModelProvider } from "@/app/constant";
|
||||||
|
|
||||||
async function handle(
|
async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
|
@ -17,7 +17,7 @@ async function handle(
|
||||||
|
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
|
let baseUrl = serverConfig.googleBaseUrl || GEMINI_BASE_URL;
|
||||||
|
|
||||||
if (!baseUrl.startsWith("http")) {
|
if (!baseUrl.startsWith("http")) {
|
||||||
baseUrl = `https://${baseUrl}`;
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
@ -63,7 +63,7 @@ async function handle(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const fetchUrl = `${baseUrl}/${path}?key=${key}`;
|
const fetchUrl = `${baseUrl}/${path}?key=${key}&alt=sse`;
|
||||||
const fetchOptions: RequestInit = {
|
const fetchOptions: RequestInit = {
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
|
|
@ -4,13 +4,14 @@ import { auth } from "@/app/api/auth";
|
||||||
import { EdgeTool } from "../../../../langchain-tools/edge_tools";
|
import { EdgeTool } from "../../../../langchain-tools/edge_tools";
|
||||||
import { OpenAI } from "langchain/llms/openai";
|
import { OpenAI } from "langchain/llms/openai";
|
||||||
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
|
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
|
||||||
|
import { ModelProvider } from "@/app/constant";
|
||||||
|
|
||||||
async function handle(req: NextRequest) {
|
async function handle(req: NextRequest) {
|
||||||
if (req.method === "OPTIONS") {
|
if (req.method === "OPTIONS") {
|
||||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const authResult = auth(req);
|
const authResult = auth(req, ModelProvider.GPT);
|
||||||
if (authResult.error) {
|
if (authResult.error) {
|
||||||
return NextResponse.json(authResult, {
|
return NextResponse.json(authResult, {
|
||||||
status: 401,
|
status: 401,
|
||||||
|
|
|
@ -5,13 +5,14 @@ import { EdgeTool } from "../../../../langchain-tools/edge_tools";
|
||||||
import { OpenAI } from "langchain/llms/openai";
|
import { OpenAI } from "langchain/llms/openai";
|
||||||
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
|
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
|
||||||
import { NodeJSTool } from "@/app/api/langchain-tools/nodejs_tools";
|
import { NodeJSTool } from "@/app/api/langchain-tools/nodejs_tools";
|
||||||
|
import { ModelProvider } from "@/app/constant";
|
||||||
|
|
||||||
async function handle(req: NextRequest) {
|
async function handle(req: NextRequest) {
|
||||||
if (req.method === "OPTIONS") {
|
if (req.method === "OPTIONS") {
|
||||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const authResult = auth(req);
|
const authResult = auth(req, ModelProvider.GPT);
|
||||||
if (authResult.error) {
|
if (authResult.error) {
|
||||||
return NextResponse.json(authResult, {
|
return NextResponse.json(authResult, {
|
||||||
status: 401,
|
status: 401,
|
||||||
|
|
|
@ -115,9 +115,9 @@ export class ClientApi {
|
||||||
constructor(provider: ModelProvider = ModelProvider.GPT) {
|
constructor(provider: ModelProvider = ModelProvider.GPT) {
|
||||||
if (provider === ModelProvider.GeminiPro) {
|
if (provider === ModelProvider.GeminiPro) {
|
||||||
this.llm = new GeminiProApi();
|
this.llm = new GeminiProApi();
|
||||||
return;
|
} else {
|
||||||
}
|
|
||||||
this.llm = new ChatGPTApi();
|
this.llm = new ChatGPTApi();
|
||||||
|
}
|
||||||
this.file = new FileApi();
|
this.file = new FileApi();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,12 @@
|
||||||
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||||
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
getHeaders,
|
||||||
|
LLMApi,
|
||||||
|
LLMModel,
|
||||||
|
LLMUsage,
|
||||||
|
} from "../api";
|
||||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||||
import {
|
import {
|
||||||
EventStreamContentType,
|
EventStreamContentType,
|
||||||
|
@ -10,6 +17,9 @@ import { getClientConfig } from "@/app/config/client";
|
||||||
import Locale from "../../locales";
|
import Locale from "../../locales";
|
||||||
import { getServerSideConfig } from "@/app/config/server";
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
export class GeminiProApi implements LLMApi {
|
export class GeminiProApi implements LLMApi {
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
extractMessage(res: any) {
|
extractMessage(res: any) {
|
||||||
console.log("[Response] gemini-pro response: ", res);
|
console.log("[Response] gemini-pro response: ", res);
|
||||||
|
|
||||||
|
@ -62,7 +72,7 @@ export class GeminiProApi implements LLMApi {
|
||||||
console.log("[Request] google payload: ", requestPayload);
|
console.log("[Request] google payload: ", requestPayload);
|
||||||
|
|
||||||
// todo: support stream later
|
// todo: support stream later
|
||||||
const shouldStream = false;
|
const shouldStream = true;
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
options.onController?.(controller);
|
options.onController?.(controller);
|
||||||
try {
|
try {
|
||||||
|
@ -121,7 +131,7 @@ export class GeminiProApi implements LLMApi {
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
const contentType = res.headers.get("content-type");
|
const contentType = res.headers.get("content-type");
|
||||||
console.log(
|
console.log(
|
||||||
"[OpenAI] request response content type: ",
|
"[Google] request response content type: ",
|
||||||
contentType,
|
contentType,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -164,13 +174,15 @@ export class GeminiProApi implements LLMApi {
|
||||||
const text = msg.data;
|
const text = msg.data;
|
||||||
try {
|
try {
|
||||||
const json = JSON.parse(text) as {
|
const json = JSON.parse(text) as {
|
||||||
choices: Array<{
|
candidates: Array<{
|
||||||
delta: {
|
content: {
|
||||||
content: string;
|
parts: Array<{
|
||||||
|
text: string;
|
||||||
|
}>;
|
||||||
};
|
};
|
||||||
}>;
|
}>;
|
||||||
};
|
};
|
||||||
const delta = json.choices[0]?.delta?.content;
|
const delta = json.candidates[0]?.content?.parts[0]?.text;
|
||||||
if (delta) {
|
if (delta) {
|
||||||
remainText += delta;
|
remainText += delta;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import { getAuthHeaders } from "../api";
|
import { getHeaders } from "../api";
|
||||||
|
|
||||||
export class FileApi {
|
export class FileApi {
|
||||||
async upload(file: any): Promise<void> {
|
async upload(file: any): Promise<void> {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append("file", file);
|
formData.append("file", file);
|
||||||
var headers = getAuthHeaders();
|
var headers = getHeaders();
|
||||||
var res = await fetch("/api/file/upload", {
|
var res = await fetch("/api/file/upload", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: formData,
|
body: formData,
|
||||||
|
|
|
@ -96,7 +96,7 @@ import { ExportMessageModal } from "./exporter";
|
||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { useAllModels } from "../utils/hooks";
|
import { useAllModels } from "../utils/hooks";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import { api } from "../client/api";
|
import { ClientApi } from "../client/api";
|
||||||
|
|
||||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||||
loading: () => <LoadingIcon />,
|
loading: () => <LoadingIcon />,
|
||||||
|
@ -464,6 +464,7 @@ export function ChatActions(props: {
|
||||||
|
|
||||||
const onImageSelected = async (e: any) => {
|
const onImageSelected = async (e: any) => {
|
||||||
const file = e.target.files[0];
|
const file = e.target.files[0];
|
||||||
|
const api = new ClientApi();
|
||||||
const fileName = await api.file.upload(file);
|
const fileName = await api.file.upload(file);
|
||||||
props.imageSelected({
|
props.imageSelected({
|
||||||
fileName,
|
fileName,
|
||||||
|
@ -494,6 +495,7 @@ export function ChatActions(props: {
|
||||||
}
|
}
|
||||||
const onPaste = (event: ClipboardEvent) => {
|
const onPaste = (event: ClipboardEvent) => {
|
||||||
const items = event.clipboardData?.items || [];
|
const items = event.clipboardData?.items || [];
|
||||||
|
const api = new ClientApi();
|
||||||
for (let i = 0; i < items.length; i++) {
|
for (let i = 0; i < items.length; i++) {
|
||||||
if (items[i].type.indexOf("image") === -1) continue;
|
if (items[i].type.indexOf("image") === -1) continue;
|
||||||
const file = items[i].getAsFile();
|
const file = items[i].getAsFile();
|
||||||
|
|
|
@ -1071,12 +1071,12 @@ export function Settings() {
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={accessStore.googleUrl}
|
value={accessStore.googleBaseUrl}
|
||||||
placeholder={Google.ExampleEndpoint}
|
placeholder={Google.ExampleEndpoint}
|
||||||
onChange={(e) =>
|
onChange={(e) =>
|
||||||
accessStore.update(
|
accessStore.update(
|
||||||
(access) =>
|
(access) =>
|
||||||
(access.googleUrl = e.currentTarget.value),
|
(access.googleBaseUrl = e.currentTarget.value),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
></input>
|
></input>
|
||||||
|
|
|
@ -29,7 +29,7 @@ declare global {
|
||||||
|
|
||||||
// google only
|
// google only
|
||||||
GOOGLE_API_KEY?: string;
|
GOOGLE_API_KEY?: string;
|
||||||
GOOGLE_URL?: string;
|
GOOGLE_BASE_URL?: string;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -87,7 +87,7 @@ export const getServerSideConfig = () => {
|
||||||
|
|
||||||
isGoogle,
|
isGoogle,
|
||||||
googleApiKey: process.env.GOOGLE_API_KEY,
|
googleApiKey: process.env.GOOGLE_API_KEY,
|
||||||
googleUrl: process.env.GOOGLE_URL,
|
googleBaseUrl: process.env.GOOGLE_BASE_URL,
|
||||||
|
|
||||||
needCode: ACCESS_CODES.size > 0,
|
needCode: ACCESS_CODES.size > 0,
|
||||||
code: process.env.CODE,
|
code: process.env.CODE,
|
||||||
|
|
|
@ -99,8 +99,8 @@ export const Azure = {
|
||||||
|
|
||||||
export const Google = {
|
export const Google = {
|
||||||
ExampleEndpoint:
|
ExampleEndpoint:
|
||||||
"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent",
|
"https://generativelanguage.googleapis.com/v1/models/gemini-pro:generateContent",
|
||||||
ChatPath: "v1beta/models/gemini-pro:generateContent",
|
ChatPath: "v1/models/gemini-pro:generateContent",
|
||||||
|
|
||||||
// /api/openai/v1/chat/completions
|
// /api/openai/v1/chat/completions
|
||||||
};
|
};
|
||||||
|
|
|
@ -30,7 +30,7 @@ const DEFAULT_ACCESS_STATE = {
|
||||||
azureApiVersion: "2023-08-01-preview",
|
azureApiVersion: "2023-08-01-preview",
|
||||||
|
|
||||||
// google ai studio
|
// google ai studio
|
||||||
googleUrl: "",
|
googleBaseUrl: "",
|
||||||
googleApiKey: "",
|
googleApiKey: "",
|
||||||
googleApiVersion: "v1",
|
googleApiVersion: "v1",
|
||||||
|
|
||||||
|
|
|
@ -662,7 +662,7 @@ export const useChatStore = createPersistStore(
|
||||||
session.memoryPrompt = message;
|
session.memoryPrompt = message;
|
||||||
},
|
},
|
||||||
onFinish(message) {
|
onFinish(message) {
|
||||||
console.log("[Memory] ", message);
|
// console.log("[Memory] ", message);
|
||||||
get().updateCurrentSession((session) => {
|
get().updateCurrentSession((session) => {
|
||||||
session.lastSummarizeIndex = lastSummarizeIndex;
|
session.lastSummarizeIndex = lastSummarizeIndex;
|
||||||
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
|
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
|
||||||
|
|
Loading…
Reference in New Issue