mirror of
https://github.com/Yidadaa/ChatGPT-Next-Web.git
synced 2025-08-09 12:31:58 +08:00
merge main
This commit is contained in:
@@ -17,6 +17,11 @@ const DEFAULT_OPENAI_URL =
|
||||
? DEFAULT_API_HOST + "/api/proxy/openai"
|
||||
: ApiPath.OpenAI;
|
||||
|
||||
const DEFAULT_AZURE_URL =
|
||||
getClientConfig()?.buildMode === "export"
|
||||
? DEFAULT_API_HOST + "/api/proxy/azure/{resource_name}"
|
||||
: ApiPath.Azure;
|
||||
|
||||
const DEFAULT_ACCESS_STATE = {
|
||||
accessCode: "",
|
||||
useCustomConfig: false,
|
||||
@@ -28,7 +33,7 @@ const DEFAULT_ACCESS_STATE = {
|
||||
openaiApiKey: "",
|
||||
|
||||
// azure
|
||||
azureUrl: "",
|
||||
azureUrl: DEFAULT_AZURE_URL,
|
||||
azureApiKey: "",
|
||||
azureApiVersion: "2023-08-01-preview",
|
||||
|
||||
@@ -42,6 +47,19 @@ const DEFAULT_ACCESS_STATE = {
|
||||
anthropicApiVersion: "2023-06-01",
|
||||
anthropicUrl: "",
|
||||
|
||||
// baidu
|
||||
baiduUrl: "",
|
||||
baiduApiKey: "",
|
||||
baiduSecretKey: "",
|
||||
|
||||
// bytedance
|
||||
bytedanceApiKey: "",
|
||||
bytedanceUrl: "",
|
||||
|
||||
// alibaba
|
||||
alibabaUrl: "",
|
||||
alibabaApiKey: "",
|
||||
|
||||
// server config
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
@@ -78,6 +96,18 @@ export const useAccessStore = createPersistStore(
|
||||
return ensure(get(), ["anthropicApiKey"]);
|
||||
},
|
||||
|
||||
isValidBaidu() {
|
||||
return ensure(get(), ["baiduApiKey", "baiduSecretKey"]);
|
||||
},
|
||||
|
||||
isValidByteDance() {
|
||||
return ensure(get(), ["bytedanceApiKey"]);
|
||||
},
|
||||
|
||||
isValidAlibaba() {
|
||||
return ensure(get(), ["alibabaApiKey"]);
|
||||
},
|
||||
|
||||
isAuthorized() {
|
||||
this.fetch();
|
||||
|
||||
@@ -87,6 +117,9 @@ export const useAccessStore = createPersistStore(
|
||||
this.isValidAzure() ||
|
||||
this.isValidGoogle() ||
|
||||
this.isValidAnthropic() ||
|
||||
this.isValidBaidu() ||
|
||||
this.isValidByteDance() ||
|
||||
this.isValidAlibaba() ||
|
||||
!this.enabledAccessControl() ||
|
||||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
||||
);
|
||||
|
@@ -9,18 +9,23 @@ import {
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_SYSTEM_TEMPLATE,
|
||||
KnowledgeCutOffDate,
|
||||
ServiceProvider,
|
||||
ModelProvider,
|
||||
StoreKey,
|
||||
SUMMARIZE_MODEL,
|
||||
GEMINI_SUMMARIZE_MODEL,
|
||||
} from "../constant";
|
||||
import { ClientApi, RequestMessage, MultimodalContent } from "../client/api";
|
||||
import { getClientApi } from "../client/api";
|
||||
import type {
|
||||
ClientApi,
|
||||
RequestMessage,
|
||||
MultimodalContent,
|
||||
} from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { estimateTokenLength } from "../utils/token";
|
||||
import { nanoid } from "nanoid";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { useAccessStore } from "./access";
|
||||
|
||||
@@ -363,15 +368,7 @@ export const useChatStore = createPersistStore(
|
||||
]);
|
||||
});
|
||||
|
||||
var api: ClientApi;
|
||||
if (modelConfig.model.startsWith("gemini")) {
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (identifyDefaultClaudeModel(modelConfig.model)) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
@@ -547,14 +544,7 @@ export const useChatStore = createPersistStore(
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
|
||||
var api: ClientApi;
|
||||
if (modelConfig.model.startsWith("gemini")) {
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (identifyDefaultClaudeModel(modelConfig.model)) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
|
||||
// remove error messages if any
|
||||
const messages = session.messages;
|
||||
|
@@ -5,6 +5,7 @@ import {
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_SIDEBAR_WIDTH,
|
||||
StoreKey,
|
||||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
|
||||
@@ -48,6 +49,7 @@ export const DEFAULT_CONFIG = {
|
||||
|
||||
modelConfig: {
|
||||
model: "gpt-3.5-turbo" as ModelType,
|
||||
providerName: "OpenAI" as ServiceProvider,
|
||||
temperature: 0.5,
|
||||
top_p: 1,
|
||||
max_tokens: 4000,
|
||||
@@ -116,12 +118,12 @@ export const useAppConfig = createPersistStore(
|
||||
|
||||
for (const model of oldModels) {
|
||||
model.available = false;
|
||||
modelMap[model.name] = model;
|
||||
modelMap[`${model.name}@${model?.provider?.id}`] = model;
|
||||
}
|
||||
|
||||
for (const model of newModels) {
|
||||
model.available = true;
|
||||
modelMap[model.name] = model;
|
||||
modelMap[`${model.name}@${model?.provider?.id}`] = model;
|
||||
}
|
||||
|
||||
set(() => ({
|
||||
|
Reference in New Issue
Block a user