Merge remote

This commit is contained in:
Hk-Gosuto 2023-12-25 12:40:09 +08:00
parent da29a94714
commit fa2e046285
16 changed files with 47 additions and 31 deletions

View File

@ -16,7 +16,7 @@ GOOGLE_API_KEY=
# (optional)
# Default: https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent
# Googel Gemini Pro API url, set if you want to customize Google Gemini Pro API url.
GOOGLE_URL=
GOOGLE_BASE_URL=
# Override openai api request base url. (optional)
# Default: https://api.openai.com

View File

@ -110,7 +110,7 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro
Google Gemini Pro 密钥.
### `GOOGLE_URL` (optional)
### `GOOGLE_BASE_URL` (optional)
Google Gemini Pro Api Url.

View File

@ -9,15 +9,14 @@ const serverConfig = getServerSideConfig();
export async function requestOpenai(req: NextRequest) {
const controller = new AbortController();
let authValue = req.headers.get("Authorization") ?? "";
if (serverConfig.isAzure) {
const authValue =
authValue =
req.headers
.get("Authorization")
?.trim()
.replaceAll("Bearer ", "")
.trim() ?? "";
} else {
const authValue = req.headers.get("Authorization") ?? "";
}
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";

View File

@ -1,13 +1,14 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import S3FileStorage from "../../../utils/s3_file_storage";
import { ModelProvider } from "@/app/constant";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,

View File

@ -1,7 +1,7 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { getServerSideConfig } from "@/app/config/server";
import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant";
import { GEMINI_BASE_URL, ModelProvider } from "@/app/constant";
async function handle(
req: NextRequest,
@ -17,7 +17,7 @@ async function handle(
const serverConfig = getServerSideConfig();
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
let baseUrl = serverConfig.googleBaseUrl || GEMINI_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
@ -63,7 +63,7 @@ async function handle(
);
}
const fetchUrl = `${baseUrl}/${path}?key=${key}`;
const fetchUrl = `${baseUrl}/${path}?key=${key}&alt=sse`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",

View File

@ -4,13 +4,14 @@ import { auth } from "@/app/api/auth";
import { EdgeTool } from "../../../../langchain-tools/edge_tools";
import { OpenAI } from "langchain/llms/openai";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { ModelProvider } from "@/app/constant";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
try {
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,

View File

@ -5,13 +5,14 @@ import { EdgeTool } from "../../../../langchain-tools/edge_tools";
import { OpenAI } from "langchain/llms/openai";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { NodeJSTool } from "@/app/api/langchain-tools/nodejs_tools";
import { ModelProvider } from "@/app/constant";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
try {
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,

View File

@ -115,9 +115,9 @@ export class ClientApi {
constructor(provider: ModelProvider = ModelProvider.GPT) {
if (provider === ModelProvider.GeminiPro) {
this.llm = new GeminiProApi();
return;
} else {
this.llm = new ChatGPTApi();
}
this.llm = new ChatGPTApi();
this.file = new FileApi();
}

View File

@ -1,5 +1,12 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import {
AgentChatOptions,
ChatOptions,
getHeaders,
LLMApi,
LLMModel,
LLMUsage,
} from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
@ -10,6 +17,9 @@ import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales";
import { getServerSideConfig } from "@/app/config/server";
export class GeminiProApi implements LLMApi {
toolAgentChat(options: AgentChatOptions): Promise<void> {
throw new Error("Method not implemented.");
}
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);
@ -62,7 +72,7 @@ export class GeminiProApi implements LLMApi {
console.log("[Request] google payload: ", requestPayload);
// todo: support stream later
const shouldStream = false;
const shouldStream = true;
const controller = new AbortController();
options.onController?.(controller);
try {
@ -121,7 +131,7 @@ export class GeminiProApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
"[Google] request response content type: ",
contentType,
);
@ -164,13 +174,15 @@ export class GeminiProApi implements LLMApi {
const text = msg.data;
try {
const json = JSON.parse(text) as {
choices: Array<{
delta: {
content: string;
candidates: Array<{
content: {
parts: Array<{
text: string;
}>;
};
}>;
};
const delta = json.choices[0]?.delta?.content;
const delta = json.candidates[0]?.content?.parts[0]?.text;
if (delta) {
remainText += delta;
}

View File

@ -1,10 +1,10 @@
import { getAuthHeaders } from "../api";
import { getHeaders } from "../api";
export class FileApi {
async upload(file: any): Promise<void> {
const formData = new FormData();
formData.append("file", file);
var headers = getAuthHeaders();
var headers = getHeaders();
var res = await fetch("/api/file/upload", {
method: "POST",
body: formData,

View File

@ -96,7 +96,7 @@ import { ExportMessageModal } from "./exporter";
import { getClientConfig } from "../config/client";
import { useAllModels } from "../utils/hooks";
import Image from "next/image";
import { api } from "../client/api";
import { ClientApi } from "../client/api";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />,
@ -464,6 +464,7 @@ export function ChatActions(props: {
const onImageSelected = async (e: any) => {
const file = e.target.files[0];
const api = new ClientApi();
const fileName = await api.file.upload(file);
props.imageSelected({
fileName,
@ -494,6 +495,7 @@ export function ChatActions(props: {
}
const onPaste = (event: ClipboardEvent) => {
const items = event.clipboardData?.items || [];
const api = new ClientApi();
for (let i = 0; i < items.length; i++) {
if (items[i].type.indexOf("image") === -1) continue;
const file = items[i].getAsFile();

View File

@ -1071,12 +1071,12 @@ export function Settings() {
>
<input
type="text"
value={accessStore.googleUrl}
value={accessStore.googleBaseUrl}
placeholder={Google.ExampleEndpoint}
onChange={(e) =>
accessStore.update(
(access) =>
(access.googleUrl = e.currentTarget.value),
(access.googleBaseUrl = e.currentTarget.value),
)
}
></input>

View File

@ -29,7 +29,7 @@ declare global {
// google only
GOOGLE_API_KEY?: string;
GOOGLE_URL?: string;
GOOGLE_BASE_URL?: string;
}
}
}
@ -87,7 +87,7 @@ export const getServerSideConfig = () => {
isGoogle,
googleApiKey: process.env.GOOGLE_API_KEY,
googleUrl: process.env.GOOGLE_URL,
googleBaseUrl: process.env.GOOGLE_BASE_URL,
needCode: ACCESS_CODES.size > 0,
code: process.env.CODE,

View File

@ -99,8 +99,8 @@ export const Azure = {
export const Google = {
ExampleEndpoint:
"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent",
ChatPath: "v1beta/models/gemini-pro:generateContent",
"https://generativelanguage.googleapis.com/v1/models/gemini-pro:generateContent",
ChatPath: "v1/models/gemini-pro:generateContent",
// /api/openai/v1/chat/completions
};

View File

@ -30,7 +30,7 @@ const DEFAULT_ACCESS_STATE = {
azureApiVersion: "2023-08-01-preview",
// google ai studio
googleUrl: "",
googleBaseUrl: "",
googleApiKey: "",
googleApiVersion: "v1",

View File

@ -662,7 +662,7 @@ export const useChatStore = createPersistStore(
session.memoryPrompt = message;
},
onFinish(message) {
console.log("[Memory] ", message);
// console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage