This commit is contained in:
GH Action - Upstream Sync 2024-04-26 00:26:31 +00:00
commit c62b372165
10 changed files with 91 additions and 36 deletions

View File

@ -13,6 +13,7 @@ const DANGER_CONFIG = {
hideBalanceQuery: serverConfig.hideBalanceQuery, hideBalanceQuery: serverConfig.hideBalanceQuery,
disableFastLink: serverConfig.disableFastLink, disableFastLink: serverConfig.disableFastLink,
customModels: serverConfig.customModels, customModels: serverConfig.customModels,
defaultModel: serverConfig.defaultModel,
}; };
declare global { declare global {

View File

@ -21,11 +21,10 @@ export class GeminiProApi implements LLMApi {
} }
async chat(options: ChatOptions): Promise<void> { async chat(options: ChatOptions): Promise<void> {
// const apiClient = this; // const apiClient = this;
const visionModel = isVisionModel(options.config.model);
let multimodal = false; let multimodal = false;
const messages = options.messages.map((v) => { const messages = options.messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }]; let parts: any[] = [{ text: getMessageTextContent(v) }];
if (visionModel) { if (isVisionModel(options.config.model)) {
const images = getMessageImages(v); const images = getMessageImages(v);
if (images.length > 0) { if (images.length > 0) {
multimodal = true; multimodal = true;
@ -117,17 +116,12 @@ export class GeminiProApi implements LLMApi {
const controller = new AbortController(); const controller = new AbortController();
options.onController?.(controller); options.onController?.(controller);
try { try {
let googleChatPath = visionModel
? Google.VisionChatPath(modelConfig.model)
: Google.ChatPath(modelConfig.model);
let chatPath = this.path(googleChatPath);
// let baseUrl = accessStore.googleUrl; // let baseUrl = accessStore.googleUrl;
if (!baseUrl) { if (!baseUrl) {
baseUrl = isApp baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/" + googleChatPath ? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath(modelConfig.model)
: chatPath; : this.path(Google.ChatPath(modelConfig.model));
} }
if (isApp) { if (isApp) {
@ -145,6 +139,7 @@ export class GeminiProApi implements LLMApi {
() => controller.abort(), () => controller.abort(),
REQUEST_TIMEOUT_MS, REQUEST_TIMEOUT_MS,
); );
if (shouldStream) { if (shouldStream) {
let responseText = ""; let responseText = "";
let remainText = ""; let remainText = "";

View File

@ -129,7 +129,7 @@ export class ChatGPTApi implements LLMApi {
}; };
// add max_tokens to vision model // add max_tokens to vision model
if (visionModel) { if (visionModel && modelConfig.model.includes("preview")) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000); requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
} }

View File

@ -448,10 +448,20 @@ export function ChatActions(props: {
// switch model // switch model
const currentModel = chatStore.currentSession().mask.modelConfig.model; const currentModel = chatStore.currentSession().mask.modelConfig.model;
const allModels = useAllModels(); const allModels = useAllModels();
const models = useMemo( const models = useMemo(() => {
() => allModels.filter((m) => m.available), const filteredModels = allModels.filter((m) => m.available);
[allModels], const defaultModel = filteredModels.find((m) => m.isDefault);
);
if (defaultModel) {
const arr = [
defaultModel,
...filteredModels.filter((m) => m !== defaultModel),
];
return arr;
} else {
return filteredModels;
}
}, [allModels]);
const [showModelSelector, setShowModelSelector] = useState(false); const [showModelSelector, setShowModelSelector] = useState(false);
const [showUploadImage, setShowUploadImage] = useState(false); const [showUploadImage, setShowUploadImage] = useState(false);
@ -467,7 +477,10 @@ export function ChatActions(props: {
// switch to first available model // switch to first available model
const isUnavaliableModel = !models.some((m) => m.name === currentModel); const isUnavaliableModel = !models.some((m) => m.name === currentModel);
if (isUnavaliableModel && models.length > 0) { if (isUnavaliableModel && models.length > 0) {
const nextModel = models[0].name as ModelType; // show next model to default model if exist
let nextModel: ModelType = (
models.find((model) => model.isDefault) || models[0]
).name;
chatStore.updateCurrentSession( chatStore.updateCurrentSession(
(session) => (session.mask.modelConfig.model = nextModel), (session) => (session.mask.modelConfig.model = nextModel),
); );
@ -1102,11 +1115,13 @@ function _Chat() {
}; };
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
const handlePaste = useCallback( const handlePaste = useCallback(
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => { async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
const currentModel = chatStore.currentSession().mask.modelConfig.model; const currentModel = chatStore.currentSession().mask.modelConfig.model;
if(!isVisionModel(currentModel)){return;} if (!isVisionModel(currentModel)) {
return;
}
const items = (event.clipboardData || window.clipboardData).items; const items = (event.clipboardData || window.clipboardData).items;
for (const item of items) { for (const item of items) {
if (item.kind === "file" && item.type.startsWith("image/")) { if (item.kind === "file" && item.type.startsWith("image/")) {

View File

@ -21,6 +21,7 @@ declare global {
ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not
DISABLE_FAST_LINK?: string; // disallow parse settings from url or not DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
CUSTOM_MODELS?: string; // to control custom models CUSTOM_MODELS?: string; // to control custom models
DEFAULT_MODEL?: string; // to cnntrol default model in every new chat window
// azure only // azure only
AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name} AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name}
@ -59,12 +60,14 @@ export const getServerSideConfig = () => {
const disableGPT4 = !!process.env.DISABLE_GPT4; const disableGPT4 = !!process.env.DISABLE_GPT4;
let customModels = process.env.CUSTOM_MODELS ?? ""; let customModels = process.env.CUSTOM_MODELS ?? "";
let defaultModel = process.env.DEFAULT_MODEL ?? "";
if (disableGPT4) { if (disableGPT4) {
if (customModels) customModels += ","; if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter((m) => m.name.startsWith("gpt-4")) customModels += DEFAULT_MODELS.filter((m) => m.name.startsWith("gpt-4"))
.map((m) => "-" + m.name) .map((m) => "-" + m.name)
.join(","); .join(",");
if (defaultModel.startsWith("gpt-4")) defaultModel = "";
} }
const isAzure = !!process.env.AZURE_URL; const isAzure = !!process.env.AZURE_URL;
@ -116,6 +119,7 @@ export const getServerSideConfig = () => {
hideBalanceQuery: !process.env.ENABLE_BALANCE_QUERY, hideBalanceQuery: !process.env.ENABLE_BALANCE_QUERY,
disableFastLink: !!process.env.DISABLE_FAST_LINK, disableFastLink: !!process.env.DISABLE_FAST_LINK,
customModels, customModels,
defaultModel,
whiteWebDevEndpoints, whiteWebDevEndpoints,
}; };
}; };

View File

@ -99,7 +99,6 @@ export const Azure = {
export const Google = { export const Google = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/", ExampleEndpoint: "https://generativelanguage.googleapis.com/",
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`, ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
VisionChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
}; };
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
@ -130,8 +129,6 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"gpt-4-turbo": "2023-12", "gpt-4-turbo": "2023-12",
"gpt-4-turbo-2024-04-09": "2023-12", "gpt-4-turbo-2024-04-09": "2023-12",
"gpt-4-turbo-preview": "2023-12", "gpt-4-turbo-preview": "2023-12",
"gpt-4-1106-preview": "2023-04",
"gpt-4-0125-preview": "2023-12",
"gpt-4-vision-preview": "2023-04", "gpt-4-vision-preview": "2023-04",
// After improvements, // After improvements,
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously. // it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
@ -149,19 +146,11 @@ export const ModelNameToName: Record<string, string> = {
const openaiModels = [ const openaiModels = [
"gpt-3.5-turbo", "gpt-3.5-turbo",
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-1106", "gpt-3.5-turbo-1106",
"gpt-3.5-turbo-0125", "gpt-3.5-turbo-0125",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-4", "gpt-4",
"gpt-4-0314",
"gpt-4-0613", "gpt-4-0613",
"gpt-4-1106-preview",
"gpt-4-0125-preview",
"gpt-4-32k", "gpt-4-32k",
"gpt-4-32k-0314",
"gpt-4-32k-0613", "gpt-4-32k-0613",
"gpt-4-turbo", "gpt-4-turbo",
"gpt-4-turbo-preview", "gpt-4-turbo-preview",

View File

@ -8,6 +8,7 @@ import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import { ensure } from "../utils/clone"; import { ensure } from "../utils/clone";
import { DEFAULT_CONFIG } from "./config";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
@ -48,6 +49,7 @@ const DEFAULT_ACCESS_STATE = {
disableGPT4: false, disableGPT4: false,
disableFastLink: false, disableFastLink: false,
customModels: "", customModels: "",
defaultModel: "",
}; };
export const useAccessStore = createPersistStore( export const useAccessStore = createPersistStore(
@ -100,6 +102,13 @@ export const useAccessStore = createPersistStore(
}, },
}) })
.then((res) => res.json()) .then((res) => res.json())
.then((res) => {
// Set default model from env request
let defaultModel = res.defaultModel ?? "";
DEFAULT_CONFIG.modelConfig.model =
defaultModel !== "" ? defaultModel : "gpt-3.5-turbo";
return res;
})
.then((res: DangerConfig) => { .then((res: DangerConfig) => {
console.log("[Config] got config from server", res); console.log("[Config] got config from server", res);
set(() => ({ ...res })); set(() => ({ ...res }));

View File

@ -86,6 +86,7 @@
@include dark; @include dark;
} }
} }
html { html {
height: var(--full-height); height: var(--full-height);
@ -110,6 +111,10 @@ body {
@media only screen and (max-width: 600px) { @media only screen and (max-width: 600px) {
background-color: var(--second); background-color: var(--second);
} }
*:focus-visible {
outline: none;
}
} }
::-webkit-scrollbar { ::-webkit-scrollbar {

View File

@ -1,14 +1,15 @@
import { useMemo } from "react"; import { useMemo } from "react";
import { useAccessStore, useAppConfig } from "../store"; import { useAccessStore, useAppConfig } from "../store";
import { collectModels } from "./model"; import { collectModels, collectModelsWithDefaultModel } from "./model";
export function useAllModels() { export function useAllModels() {
const accessStore = useAccessStore(); const accessStore = useAccessStore();
const configStore = useAppConfig(); const configStore = useAppConfig();
const models = useMemo(() => { const models = useMemo(() => {
return collectModels( return collectModelsWithDefaultModel(
configStore.models, configStore.models,
[configStore.customModels, accessStore.customModels].join(","), [configStore.customModels, accessStore.customModels].join(","),
accessStore.defaultModel,
); );
}, [accessStore.customModels, configStore.customModels, configStore.models]); }, [accessStore.customModels, configStore.customModels, configStore.models]);

View File

@ -1,5 +1,11 @@
import { LLMModel } from "../client/api"; import { LLMModel } from "../client/api";
const customProvider = (modelName: string) => ({
id: modelName,
providerName: "",
providerType: "custom",
});
export function collectModelTable( export function collectModelTable(
models: readonly LLMModel[], models: readonly LLMModel[],
customModels: string, customModels: string,
@ -11,6 +17,7 @@ export function collectModelTable(
name: string; name: string;
displayName: string; displayName: string;
provider?: LLMModel["provider"]; // Marked as optional provider?: LLMModel["provider"]; // Marked as optional
isDefault?: boolean;
} }
> = {}; > = {};
@ -22,12 +29,6 @@ export function collectModelTable(
}; };
}); });
const customProvider = (modelName: string) => ({
id: modelName,
providerName: "",
providerType: "custom",
});
// server custom models // server custom models
customModels customModels
.split(",") .split(",")
@ -52,6 +53,27 @@ export function collectModelTable(
}; };
} }
}); });
return modelTable;
}
export function collectModelTableWithDefaultModel(
models: readonly LLMModel[],
customModels: string,
defaultModel: string,
) {
let modelTable = collectModelTable(models, customModels);
if (defaultModel && defaultModel !== "") {
delete modelTable[defaultModel];
modelTable[defaultModel] = {
name: defaultModel,
displayName: defaultModel,
available: true,
provider:
modelTable[defaultModel]?.provider ?? customProvider(defaultModel),
isDefault: true,
};
}
return modelTable; return modelTable;
} }
@ -67,3 +89,17 @@ export function collectModels(
return allModels; return allModels;
} }
export function collectModelsWithDefaultModel(
models: readonly LLMModel[],
customModels: string,
defaultModel: string,
) {
const modelTable = collectModelTableWithDefaultModel(
models,
customModels,
defaultModel,
);
const allModels = Object.values(modelTable);
return allModels;
}