From 536ace8e10553c6101308ec09f2fa65bc84d2416 Mon Sep 17 00:00:00 2001 From: Yidadaa Date: Sun, 19 Nov 2023 18:24:51 +0800 Subject: [PATCH 1/4] feat: animate streaming response to make more smooth --- app/client/platforms/openai.ts | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 930d60690..dc79d2cec 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -115,12 +115,33 @@ export class ChatGPTApi implements LLMApi { if (shouldStream) { let responseText = ""; + let remainText = ""; let finished = false; + // animate response to make it looks smooth + function animateResponseText() { + if (finished || controller.signal.aborted) { + responseText += remainText; + console.log("[Response Animation] finished"); + return; + } + + if (remainText.length > 0) { + responseText += remainText[0]; + remainText = remainText.slice(1); + options.onUpdate?.(responseText, remainText[0]); + } + + requestAnimationFrame(animateResponseText); + } + + // start animaion + animateResponseText(); + const finish = () => { if (!finished) { - options.onFinish(responseText); finished = true; + options.onFinish(responseText + remainText); } }; @@ -183,8 +204,7 @@ export class ChatGPTApi implements LLMApi { }; const delta = json.choices[0]?.delta?.content; if (delta) { - responseText += delta; - options.onUpdate?.(responseText, delta); + remainText += delta; } } catch (e) { console.error("[Request] parse error", text); From dc7159a4504682f6bfad104d5d03168412c550f1 Mon Sep 17 00:00:00 2001 From: Yidadaa Date: Sun, 19 Nov 2023 18:32:54 +0800 Subject: [PATCH 2/4] feat: close #3301 enable or diable default models with -all / +all --- README.md | 6 ++++-- README_CN.md | 5 +++-- app/utils/model.ts | 6 ++++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d4e304f9d..ef7f2e1d9 100644 --- a/README.md +++ b/README.md @@ -216,9 +216,11 @@ If you want to disable parse settings from url, set this to 1. ### `CUSTOM_MODELS` (optional) > Default: Empty -> Example: `+llama,+claude-2,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list, and display `gpt-4-1106-preview` as `gpt-4-turbo`. +> Example: `+llama,+claude-2,-gpt-3.5-turbo,gpt-4-1106-preview=gpt-4-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list, and display `gpt-4-1106-preview` as `gpt-4-turbo`. -To control custom models, use `+` to add a custom model, use `-` to hide a model, use `name:displayName` to customize model name, separated by comma. +To control custom models, use `+` to add a custom model, use `-` to hide a model, use `name=displayName` to customize model name, separated by comma. + +User `-all` to disable all default models, `+all` to enable all default models. ## Requirements diff --git a/README_CN.md b/README_CN.md index dde8c19b3..3b713255a 100644 --- a/README_CN.md +++ b/README_CN.md @@ -122,9 +122,10 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro ### `CUSTOM_MODELS` (可选) -> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` 表示增加 `qwen-7b-chat` 和 `glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`。 +> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview=gpt-4-turbo` 表示增加 `qwen-7b-chat` 和 `glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`。 +> 如果你想先禁用所有模型,再启用指定模型,可以使用 `-all,+gpt-3.5-turbo`,则表示仅启用 `gpt-3.5-turbo` -用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名:展示名` 来自定义模型的展示名,用英文逗号隔开。 +用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。 ## 开发 diff --git a/app/utils/model.ts b/app/utils/model.ts index d5c009c02..bf7300806 100644 --- a/app/utils/model.ts +++ b/app/utils/model.ts @@ -27,6 +27,12 @@ export function collectModelTable( const nameConfig = m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m; const [name, displayName] = nameConfig.split(":"); + + // enable or disable all models + if (name === "all") { + Object.values(modelTable).forEach((m) => (m.available = available)); + } + modelTable[name] = { name, displayName: displayName || name, From 45b88ebb2a720c62d60e63a873004d3cd9734801 Mon Sep 17 00:00:00 2001 From: Yidadaa Date: Sun, 19 Nov 2023 18:34:39 +0800 Subject: [PATCH 3/4] feat: close #3304 use `=` instead of `:` to map model name in CUSTOM_MODELS --- app/utils/model.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/utils/model.ts b/app/utils/model.ts index bf7300806..74b28a66a 100644 --- a/app/utils/model.ts +++ b/app/utils/model.ts @@ -26,7 +26,7 @@ export function collectModelTable( const available = !m.startsWith("-"); const nameConfig = m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m; - const [name, displayName] = nameConfig.split(":"); + const [name, displayName] = nameConfig.split("="); // enable or disable all models if (name === "all") { From 6aade62ce2f131caeaefc18689fea502ec1a3966 Mon Sep 17 00:00:00 2001 From: Yidadaa Date: Sun, 19 Nov 2023 18:42:30 +0800 Subject: [PATCH 4/4] feat: close #3300 support multiple api keys --- README.md | 2 +- README_CN.md | 2 +- app/config/server.ts | 10 +++++++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ef7f2e1d9..3050fcc95 100644 --- a/README.md +++ b/README.md @@ -161,7 +161,7 @@ Access password, separated by comma. ### `OPENAI_API_KEY` (required) -Your openai api key. +Your openai api key, join multiple api keys with comma. ### `BASE_URL` (optional) diff --git a/README_CN.md b/README_CN.md index 3b713255a..0ef508f61 100644 --- a/README_CN.md +++ b/README_CN.md @@ -68,7 +68,7 @@ code1,code2,code3 ### `OPENAI_API_KEY` (必填项) -OpanAI 密钥,你在 openai 账户页面申请的 api key。 +OpanAI 密钥,你在 openai 账户页面申请的 api key,使用英文逗号隔开多个 key,这样可以随机轮询这些 key。 ### `CODE` (可选) diff --git a/app/config/server.ts b/app/config/server.ts index 2f2e7d7fd..2398805a2 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -62,9 +62,17 @@ export const getServerSideConfig = () => { const isAzure = !!process.env.AZURE_URL; + const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? ""; + const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); + const randomIndex = Math.floor(Math.random() * apiKeys.length); + const apiKey = apiKeys[randomIndex]; + console.log( + `[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`, + ); + return { baseUrl: process.env.BASE_URL, - apiKey: process.env.OPENAI_API_KEY, + apiKey, openaiOrgId: process.env.OPENAI_ORG_ID, isAzure,