Compare commits

...

9 Commits
v3 ... v2.12.2

Author SHA1 Message Date
fred-bf
52312dbd23 Merge pull request #4595 from ChatGPTNextWeb/feat/bump-version
feat: bump version code
2024-04-30 13:28:30 +08:00
Fred
b2e8a1eaa2 feat: bump version code 2024-04-30 13:27:07 +08:00
DeanYao
506c17a093 Merge pull request #4564 from MrrDrr/gpt4v_remove_max_tokens
remove max_tokens from the official version of gpt4-turbo
2024-04-25 13:01:21 +08:00
DeanYao
69642fba52 Merge pull request #4557 from RoyRao2333/dev/no-fucos-outline
chore: No outline when element is in `:focus-visible` state
2024-04-25 12:58:19 +08:00
DeanYao
7d647c981f Merge pull request #4535 from RubuJam/main
Refer to OpenAI documentation to delete some models.
2024-04-25 11:44:01 +08:00
l.tingting
dd4648ed9a remove max_tokens from the official version of gpt4-turbo 2024-04-24 22:59:14 +08:00
Roy
1cd0beb231 chore: No outline when element is in :focus-visible state 2024-04-23 11:48:54 +08:00
黑云白土
b7aab3c102 Update google.ts 2024-04-17 17:16:31 +08:00
黑云白土
fcb1a657e3 Update constant.ts 2024-04-17 16:24:11 +08:00
5 changed files with 11 additions and 22 deletions

View File

@@ -21,11 +21,10 @@ export class GeminiProApi implements LLMApi {
} }
async chat(options: ChatOptions): Promise<void> { async chat(options: ChatOptions): Promise<void> {
// const apiClient = this; // const apiClient = this;
const visionModel = isVisionModel(options.config.model);
let multimodal = false; let multimodal = false;
const messages = options.messages.map((v) => { const messages = options.messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }]; let parts: any[] = [{ text: getMessageTextContent(v) }];
if (visionModel) { if (isVisionModel(options.config.model)) {
const images = getMessageImages(v); const images = getMessageImages(v);
if (images.length > 0) { if (images.length > 0) {
multimodal = true; multimodal = true;
@@ -117,17 +116,12 @@ export class GeminiProApi implements LLMApi {
const controller = new AbortController(); const controller = new AbortController();
options.onController?.(controller); options.onController?.(controller);
try { try {
let googleChatPath = visionModel
? Google.VisionChatPath(modelConfig.model)
: Google.ChatPath(modelConfig.model);
let chatPath = this.path(googleChatPath);
// let baseUrl = accessStore.googleUrl; // let baseUrl = accessStore.googleUrl;
if (!baseUrl) { if (!baseUrl) {
baseUrl = isApp baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/" + googleChatPath ? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath(modelConfig.model)
: chatPath; : this.path(Google.ChatPath(modelConfig.model));
} }
if (isApp) { if (isApp) {
@@ -145,6 +139,7 @@ export class GeminiProApi implements LLMApi {
() => controller.abort(), () => controller.abort(),
REQUEST_TIMEOUT_MS, REQUEST_TIMEOUT_MS,
); );
if (shouldStream) { if (shouldStream) {
let responseText = ""; let responseText = "";
let remainText = ""; let remainText = "";

View File

@@ -129,7 +129,7 @@ export class ChatGPTApi implements LLMApi {
}; };
// add max_tokens to vision model // add max_tokens to vision model
if (visionModel) { if (visionModel && modelConfig.model.includes("preview")) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000); requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
} }

View File

@@ -99,7 +99,6 @@ export const Azure = {
export const Google = { export const Google = {
ExampleEndpoint: "https://generativelanguage.googleapis.com/", ExampleEndpoint: "https://generativelanguage.googleapis.com/",
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`, ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
VisionChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
}; };
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
@@ -128,8 +127,6 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"gpt-4-turbo": "2023-12", "gpt-4-turbo": "2023-12",
"gpt-4-turbo-2024-04-09": "2023-12", "gpt-4-turbo-2024-04-09": "2023-12",
"gpt-4-turbo-preview": "2023-12", "gpt-4-turbo-preview": "2023-12",
"gpt-4-1106-preview": "2023-04",
"gpt-4-0125-preview": "2023-12",
"gpt-4-vision-preview": "2023-04", "gpt-4-vision-preview": "2023-04",
// After improvements, // After improvements,
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously. // it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
@@ -139,19 +136,11 @@ export const KnowledgeCutOffDate: Record<string, string> = {
const openaiModels = [ const openaiModels = [
"gpt-3.5-turbo", "gpt-3.5-turbo",
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-1106", "gpt-3.5-turbo-1106",
"gpt-3.5-turbo-0125", "gpt-3.5-turbo-0125",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-4", "gpt-4",
"gpt-4-0314",
"gpt-4-0613", "gpt-4-0613",
"gpt-4-1106-preview",
"gpt-4-0125-preview",
"gpt-4-32k", "gpt-4-32k",
"gpt-4-32k-0314",
"gpt-4-32k-0613", "gpt-4-32k-0613",
"gpt-4-turbo", "gpt-4-turbo",
"gpt-4-turbo-preview", "gpt-4-turbo-preview",

View File

@@ -86,6 +86,7 @@
@include dark; @include dark;
} }
} }
html { html {
height: var(--full-height); height: var(--full-height);
@@ -110,6 +111,10 @@ body {
@media only screen and (max-width: 600px) { @media only screen and (max-width: 600px) {
background-color: var(--second); background-color: var(--second);
} }
*:focus-visible {
outline: none;
}
} }
::-webkit-scrollbar { ::-webkit-scrollbar {

View File

@@ -9,7 +9,7 @@
}, },
"package": { "package": {
"productName": "NextChat", "productName": "NextChat",
"version": "2.11.3" "version": "2.12.2"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {