Merge branch 'ChatGPTNextWeb:main' into main
|
@ -1,12 +1,20 @@
|
|||
# Your openai api key. (required)
|
||||
OPENAI_API_KEY=sk-xxxx
|
||||
|
||||
# DeepSeek Api Key. (Optional)
|
||||
DEEPSEEK_API_KEY=
|
||||
|
||||
# Access password, separated by comma. (optional)
|
||||
CODE=your-password
|
||||
|
||||
# You can start service behind a proxy. (optional)
|
||||
PROXY_URL=http://localhost:7890
|
||||
|
||||
# Enable MCP functionality (optional)
|
||||
# Default: Empty (disabled)
|
||||
# Set to "true" to enable MCP functionality
|
||||
ENABLE_MCP=
|
||||
|
||||
# (optional)
|
||||
# Default: Empty
|
||||
# Google Gemini Pro API key, set if you want to use Google Gemini Pro API.
|
||||
|
@ -66,4 +74,10 @@ ANTHROPIC_API_VERSION=
|
|||
ANTHROPIC_URL=
|
||||
|
||||
### (optional)
|
||||
WHITE_WEBDAV_ENDPOINTS=
|
||||
WHITE_WEBDAV_ENDPOINTS=
|
||||
|
||||
### siliconflow Api key (optional)
|
||||
SILICONFLOW_API_KEY=
|
||||
|
||||
### siliconflow Api url (optional)
|
||||
SILICONFLOW_URL=
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
public/serviceWorker.js
|
||||
public/serviceWorker.js
|
||||
app/mcp/mcp_config.json
|
||||
app/mcp/mcp_config.default.json
|
|
@ -3,9 +3,7 @@ name: VercelPreviewDeployment
|
|||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- review_requested
|
||||
|
||||
env:
|
||||
VERCEL_TEAM: ${{ secrets.VERCEL_TEAM }}
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
name: Run Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- "!*"
|
||||
pull_request:
|
||||
types:
|
||||
- review_requested
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "yarn"
|
||||
|
||||
- name: Cache node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-node_modules-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node_modules-
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Run Jest tests
|
||||
run: yarn test:ci
|
|
@ -46,3 +46,6 @@ dev
|
|||
*.key.pub
|
||||
|
||||
masks.json
|
||||
|
||||
# mcp config
|
||||
app/mcp/mcp_config.json
|
||||
|
|
|
@ -34,12 +34,16 @@ ENV PROXY_URL=""
|
|||
ENV OPENAI_API_KEY=""
|
||||
ENV GOOGLE_API_KEY=""
|
||||
ENV CODE=""
|
||||
ENV ENABLE_MCP=""
|
||||
|
||||
COPY --from=builder /app/public ./public
|
||||
COPY --from=builder /app/.next/standalone ./
|
||||
COPY --from=builder /app/.next/static ./.next/static
|
||||
COPY --from=builder /app/.next/server ./.next/server
|
||||
|
||||
RUN mkdir -p /app/app/mcp && chmod 777 /app/app/mcp
|
||||
COPY --from=builder /app/app/mcp/mcp_config.default.json /app/app/mcp/mcp_config.json
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD if [ -n "$PROXY_URL" ]; then \
|
||||
|
|
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023-2024 Zhang Yifei
|
||||
Copyright (c) 2023-2025 NextChat
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
201
README.md
|
@ -1,16 +1,20 @@
|
|||
<div align="center">
|
||||
|
||||
<a href='#企业版'>
|
||||
<img src="./docs/images/ent.svg" alt="icon"/>
|
||||
<a href='https://nextchat.club'>
|
||||
<img src="https://github.com/user-attachments/assets/83bdcc07-ae5e-4954-a53a-ac151ba6ccf3" width="1000" alt="icon"/>
|
||||
</a>
|
||||
|
||||
|
||||
|
||||
|
||||
<h1 align="center">NextChat (ChatGPT Next Web)</h1>
|
||||
|
||||
English / [简体中文](./README_CN.md)
|
||||
|
||||
One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4 & Gemini Pro support.
|
||||
<a href="https://trendshift.io/repositories/5973" target="_blank"><img src="https://trendshift.io/api/badge/repositories/5973" alt="ChatGPTNextWeb%2FChatGPT-Next-Web | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
|
||||
一键免费部署你的跨平台私人 ChatGPT 应用, 支持 GPT3, GPT4 & Gemini Pro 模型。
|
||||
|
||||
✨ Light and Fast AI Assistant,with Claude, DeepSeek, GPT4 & Gemini Pro support.
|
||||
|
||||
[![Saas][Saas-image]][saas-url]
|
||||
[![Web][Web-image]][web-url]
|
||||
|
@ -18,11 +22,11 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4
|
|||
[![MacOS][MacOS-image]][download-url]
|
||||
[![Linux][Linux-image]][download-url]
|
||||
|
||||
[NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [Web App](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
|
||||
[NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases)
|
||||
[NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
|
||||
|
||||
[NextChatAI](https://nextchat.dev/chat) / [网页版](https://app.nextchat.dev) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues)
|
||||
|
||||
[saas-url]: https://nextchat.dev/chat?utm_source=readme
|
||||
[saas-url]: https://nextchat.club?utm_source=readme
|
||||
[saas-image]: https://img.shields.io/badge/NextChat-Saas-green?logo=microsoftedge
|
||||
[web-url]: https://app.nextchat.dev/
|
||||
[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
|
||||
|
@ -31,12 +35,43 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4
|
|||
[MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple
|
||||
[Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu
|
||||
|
||||
[<img src="https://vercel.com/button" alt="Deploy on Zeabur" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
[<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://vercel.com/button" alt="Deploy on Vercel" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/ChatGPTNextWeb/NextChat)
|
||||
|
||||
[<img src="https://github.com/user-attachments/assets/903482d4-3e87-4134-9af1-f2588fa90659" height="60" width="288" >](https://monica.im/?utm=nxcrp)
|
||||
[<img src="https://github.com/user-attachments/assets/903482d4-3e87-4134-9af1-f2588fa90659" height="50" width="" >](https://monica.im/?utm=nxcrp)
|
||||
|
||||
</div>
|
||||
|
||||
## 👋 Hey, NextChat is going to develop a native app!
|
||||
|
||||
> This week we are going to start working on iOS and Android APP, and we want to find some reliable friends to do it together!
|
||||
|
||||
|
||||
✨ Several key points:
|
||||
|
||||
- Starting from 0, you are a veteran
|
||||
- Completely open source, not hidden
|
||||
- Native development, pursuing the ultimate experience
|
||||
|
||||
Will you come and do something together? 😎
|
||||
|
||||
https://github.com/ChatGPTNextWeb/NextChat/issues/6269
|
||||
|
||||
#Seeking for talents is thirsty #lack of people
|
||||
|
||||
|
||||
## 🥳 Cheer for DeepSeek, China's AI star!
|
||||
> Purpose-Built UI for DeepSeek Reasoner Model
|
||||
|
||||
<img src="https://github.com/user-attachments/assets/f3952210-3af1-4dc0-9b81-40eaa4847d9a"/>
|
||||
|
||||
|
||||
|
||||
## 🫣 NextChat Support MCP !
|
||||
> Before build, please set env ENABLE_MCP=true
|
||||
|
||||
<img src="https://github.com/user-attachments/assets/d8851f40-4e36-4335-b1a4-ec1e11488c7e"/>
|
||||
|
||||
|
||||
## Enterprise Edition
|
||||
|
||||
Meeting Your Company's Privatization and Customization Deployment Requirements:
|
||||
|
@ -50,20 +85,12 @@ Meeting Your Company's Privatization and Customization Deployment Requirements:
|
|||
|
||||
For enterprise inquiries, please contact: **business@nextchat.dev**
|
||||
|
||||
## 企业版
|
||||
## Screenshots
|
||||
|
||||
满足企业用户私有化部署和个性化定制需求:
|
||||
- **品牌定制**:企业量身定制 VI/UI,与企业品牌形象无缝契合
|
||||
- **资源集成**:由企业管理人员统一配置和管理数十种 AI 资源,团队成员开箱即用
|
||||
- **权限管理**:成员权限、资源权限、知识库权限层级分明,企业级 Admin Panel 统一控制
|
||||
- **知识接入**:企业内部知识库与 AI 能力相结合,比通用 AI 更贴近企业自身业务需求
|
||||
- **安全审计**:自动拦截敏感提问,支持追溯全部历史对话记录,让 AI 也能遵循企业信息安全规范
|
||||
- **私有部署**:企业级私有部署,支持各类主流私有云部署,确保数据安全和隐私保护
|
||||
- **持续更新**:提供多模态、智能体等前沿能力持续更新升级服务,常用常新、持续先进
|
||||

|
||||
|
||||
企业版咨询: **business@nextchat.dev**
|
||||

|
||||
|
||||
<img width="300" src="https://github.com/user-attachments/assets/3d4305ac-6e95-489e-884b-51d51db5f692">
|
||||
|
||||
## Features
|
||||
|
||||
|
@ -96,10 +123,11 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
|
|||
- [x] Artifacts: Easily preview, copy and share generated content/webpages through a separate window [#5092](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/pull/5092)
|
||||
- [x] Plugins: support network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
|
||||
- [x] network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
|
||||
- [x] Supports Realtime Chat [#5672](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5672)
|
||||
- [ ] local knowledge base
|
||||
|
||||
## What's New
|
||||
|
||||
- 🚀 v2.15.8 Now supports Realtime Chat [#5672](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5672)
|
||||
- 🚀 v2.15.4 The Application supports using Tauri fetch LLM API, MORE SECURITY! [#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
|
||||
- 🚀 v2.15.0 Now supports Plugins! Read this: [NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
|
||||
- 🚀 v2.14.0 Now supports Artifacts & SD
|
||||
|
@ -109,49 +137,8 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
|
|||
- 🚀 v2.7 let's share conversations as image, or share to ShareGPT!
|
||||
- 🚀 v2.0 is released, now you can create prompt templates, turn your ideas into reality! Read this: [ChatGPT Prompt Engineering Tips: Zero, One and Few Shot Prompting](https://www.allabtai.com/prompt-engineering-tips-zero-one-and-few-shot-prompting/).
|
||||
|
||||
## 主要功能
|
||||
|
||||
- 在 1 分钟内使用 Vercel **免费一键部署**
|
||||
- 提供体积极小(~5MB)的跨平台客户端(Linux/Windows/MacOS), [下载地址](https://github.com/Yidadaa/ChatGPT-Next-Web/releases)
|
||||
- 完整的 Markdown 支持:LaTex 公式、Mermaid 流程图、代码高亮等等
|
||||
- 精心设计的 UI,响应式设计,支持深色模式,支持 PWA
|
||||
- 极快的首屏加载速度(~100kb),支持流式响应
|
||||
- 隐私安全,所有数据保存在用户浏览器本地
|
||||
- 预制角色功能(面具),方便地创建、分享和调试你的个性化对话
|
||||
- 海量的内置 prompt 列表,来自[中文](https://github.com/PlexPt/awesome-chatgpt-prompts-zh)和[英文](https://github.com/f/awesome-chatgpt-prompts)
|
||||
- 自动压缩上下文聊天记录,在节省 Token 的同时支持超长对话
|
||||
- 多国语言支持:English, 简体中文, 繁体中文, 日本語, Español, Italiano, Türkçe, Deutsch, Tiếng Việt, Русский, Čeština, 한국어, Indonesia
|
||||
- 拥有自己的域名?好上加好,绑定后即可在任何地方**无障碍**快速访问
|
||||
|
||||
## 开发计划
|
||||
|
||||
- [x] 为每个对话设置系统 Prompt [#138](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/138)
|
||||
- [x] 允许用户自行编辑内置 Prompt 列表
|
||||
- [x] 预制角色:使用预制角色快速定制新对话 [#993](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/993)
|
||||
- [x] 分享为图片,分享到 ShareGPT 链接 [#1741](https://github.com/Yidadaa/ChatGPT-Next-Web/pull/1741)
|
||||
- [x] 使用 tauri 打包桌面应用
|
||||
- [x] 支持自部署的大语言模型:开箱即用 [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) ,服务端部署 [LocalAI 项目](https://github.com/go-skynet/LocalAI) llama / gpt4all / rwkv / vicuna / koala / gpt4all-j / cerebras / falcon / dolly 等等,或者使用 [api-for-open-llm](https://github.com/xusenlinzy/api-for-open-llm)
|
||||
- [x] Artifacts: 通过独立窗口,轻松预览、复制和分享生成的内容/可交互网页 [#5092](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/pull/5092)
|
||||
- [x] 插件机制,支持`联网搜索`、`计算器`、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
|
||||
- [x] 支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
|
||||
- [ ] 本地知识库
|
||||
|
||||
## 最新动态
|
||||
|
||||
- 🚀 v2.15.4 客户端支持Tauri本地直接调用大模型API,更安全
|
||||
- 🚀 v2.15.0 现在支持插件功能了!了解更多:[NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
|
||||
- 🚀 v2.14.0 现在支持 Artifacts & SD 了。
|
||||
- 🚀 v2.10.1 现在支持 Gemini Pro 模型。
|
||||
- 🚀 v2.9.11 现在可以使用自定义 Azure 服务了。
|
||||
- 🚀 v2.8 发布了横跨 Linux/Windows/MacOS 的体积极小的客户端。
|
||||
- 🚀 v2.7 现在可以将会话分享为图片了,也可以分享到 ShareGPT 的在线链接。
|
||||
- 🚀 v2.0 已经发布,现在你可以使用面具功能快速创建预制对话了! 了解更多: [ChatGPT 提示词高阶技能:零次、一次和少样本提示](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/138)。
|
||||
- 💡 想要更方便地随时随地使用本项目?可以试下这款桌面插件:https://github.com/mushan0x0/AI0x0.com
|
||||
|
||||
## Get Started
|
||||
|
||||
> [简体中文 > 如何开始使用](./README_CN.md#开始使用)
|
||||
|
||||
1. Get [OpenAI API Key](https://platform.openai.com/account/api-keys);
|
||||
2. Click
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web), remember that `CODE` is your page password;
|
||||
|
@ -159,14 +146,10 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
|
|||
|
||||
## FAQ
|
||||
|
||||
[简体中文 > 常见问题](./docs/faq-cn.md)
|
||||
|
||||
[English > FAQ](./docs/faq-en.md)
|
||||
|
||||
## Keep Updated
|
||||
|
||||
> [简体中文 > 如何保持代码更新](./README_CN.md#保持更新)
|
||||
|
||||
If you have deployed your own project with just one click following the steps above, you may encounter the issue of "Updates Available" constantly showing up. This is because Vercel will create a new project for you by default instead of forking this project, resulting in the inability to detect updates correctly.
|
||||
|
||||
We recommend that you follow the steps below to re-deploy:
|
||||
|
@ -193,8 +176,6 @@ You can star or watch this project or follow author to get release notifications
|
|||
|
||||
## Access Password
|
||||
|
||||
> [简体中文 > 如何增加访问密码](./README_CN.md#配置页面访问密码)
|
||||
|
||||
This project provides limited access control. Please add an environment variable named `CODE` on the vercel environment variables page. The value should be passwords separated by comma like this:
|
||||
|
||||
```
|
||||
|
@ -205,8 +186,6 @@ After adding or modifying this environment variable, please redeploy the project
|
|||
|
||||
## Environment Variables
|
||||
|
||||
> [简体中文 > 如何配置 api key、访问密码、接口代理](./README_CN.md#环境变量)
|
||||
|
||||
### `CODE` (optional)
|
||||
|
||||
Access password, separated by comma.
|
||||
|
@ -301,6 +280,22 @@ iflytek Api Key.
|
|||
|
||||
iflytek Api Secret.
|
||||
|
||||
### `CHATGLM_API_KEY` (optional)
|
||||
|
||||
ChatGLM Api Key.
|
||||
|
||||
### `CHATGLM_URL` (optional)
|
||||
|
||||
ChatGLM Api Url.
|
||||
|
||||
### `DEEPSEEK_API_KEY` (optional)
|
||||
|
||||
DeepSeek Api Key.
|
||||
|
||||
### `DEEPSEEK_URL` (optional)
|
||||
|
||||
DeepSeek Api Url.
|
||||
|
||||
### `HIDE_USER_API_KEY` (optional)
|
||||
|
||||
> Default: Empty
|
||||
|
@ -345,6 +340,13 @@ For ByteDance: use `modelName@bytedance=deploymentName` to customize model name
|
|||
|
||||
Change default model
|
||||
|
||||
### `VISION_MODELS` (optional)
|
||||
|
||||
> Default: Empty
|
||||
> Example: `gpt-4-vision,claude-3-opus,my-custom-model` means add vision capabilities to these models in addition to the default pattern matches (which detect models containing keywords like "vision", "claude-3", "gemini-1.5", etc).
|
||||
|
||||
Add additional models to have vision capabilities, beyond the default pattern matching. Multiple models should be separated by commas.
|
||||
|
||||
### `WHITE_WEBDAV_ENDPOINTS` (optional)
|
||||
|
||||
You can use this option if you want to increase the number of webdav service addresses you are allowed to access, as required by the format:
|
||||
|
@ -364,13 +366,25 @@ Stability API key.
|
|||
|
||||
Customize Stability API url.
|
||||
|
||||
|
||||
### `ENABLE_MCP` (optional)
|
||||
|
||||
Enable MCP(Model Context Protocol)Feature
|
||||
|
||||
### `SILICONFLOW_API_KEY` (optional)
|
||||
|
||||
SiliconFlow API Key.
|
||||
|
||||
### `SILICONFLOW_URL` (optional)
|
||||
|
||||
SiliconFlow API URL.
|
||||
|
||||
## Requirements
|
||||
|
||||
NodeJS >= 18, Docker >= 20
|
||||
|
||||
## Development
|
||||
|
||||
> [简体中文 > 如何进行二次开发](./README_CN.md#开发)
|
||||
|
||||
[](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
|
||||
|
@ -395,7 +409,6 @@ yarn dev
|
|||
|
||||
## Deployment
|
||||
|
||||
> [简体中文 > 如何部署到私人服务器](./README_CN.md#部署)
|
||||
|
||||
### Docker (Recommended)
|
||||
|
||||
|
@ -424,6 +437,16 @@ If your proxy needs password, use:
|
|||
-e PROXY_URL="http://127.0.0.1:7890 user pass"
|
||||
```
|
||||
|
||||
If enable MCP, use:
|
||||
|
||||
```
|
||||
docker run -d -p 3000:3000 \
|
||||
-e OPENAI_API_KEY=sk-xxxx \
|
||||
-e CODE=your-password \
|
||||
-e ENABLE_MCP=true \
|
||||
yidadaa/chatgpt-next-web
|
||||
```
|
||||
|
||||
### Shell
|
||||
|
||||
```shell
|
||||
|
@ -444,11 +467,7 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s
|
|||
- [How to use Vercel (No English)](./docs/vercel-cn.md)
|
||||
- [User Manual (Only Chinese, WIP)](./docs/user-manual-cn.md)
|
||||
|
||||
## Screenshots
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
## Translation
|
||||
|
||||
|
@ -460,37 +479,7 @@ If you want to add a new translation, read this [document](./docs/translation.md
|
|||
|
||||
## Special Thanks
|
||||
|
||||
### Sponsor
|
||||
|
||||
> 仅列出捐赠金额 >= 100RMB 的用户。
|
||||
|
||||
[@mushan0x0](https://github.com/mushan0x0)
|
||||
[@ClarenceDan](https://github.com/ClarenceDan)
|
||||
[@zhangjia](https://github.com/zhangjia)
|
||||
[@hoochanlon](https://github.com/hoochanlon)
|
||||
[@relativequantum](https://github.com/relativequantum)
|
||||
[@desenmeng](https://github.com/desenmeng)
|
||||
[@webees](https://github.com/webees)
|
||||
[@chazzhou](https://github.com/chazzhou)
|
||||
[@hauy](https://github.com/hauy)
|
||||
[@Corwin006](https://github.com/Corwin006)
|
||||
[@yankunsong](https://github.com/yankunsong)
|
||||
[@ypwhs](https://github.com/ypwhs)
|
||||
[@fxxxchao](https://github.com/fxxxchao)
|
||||
[@hotic](https://github.com/hotic)
|
||||
[@WingCH](https://github.com/WingCH)
|
||||
[@jtung4](https://github.com/jtung4)
|
||||
[@micozhu](https://github.com/micozhu)
|
||||
[@jhansion](https://github.com/jhansion)
|
||||
[@Sha1rholder](https://github.com/Sha1rholder)
|
||||
[@AnsonHyq](https://github.com/AnsonHyq)
|
||||
[@synwith](https://github.com/synwith)
|
||||
[@piksonGit](https://github.com/piksonGit)
|
||||
[@ouyangzhiping](https://github.com/ouyangzhiping)
|
||||
[@wenjiavv](https://github.com/wenjiavv)
|
||||
[@LeXwDeX](https://github.com/LeXwDeX)
|
||||
[@Licoy](https://github.com/Licoy)
|
||||
[@shangmin2009](https://github.com/shangmin2009)
|
||||
|
||||
### Contributors
|
||||
|
||||
|
|
55
README_CN.md
|
@ -6,9 +6,9 @@
|
|||
|
||||
<h1 align="center">NextChat</h1>
|
||||
|
||||
一键免费部署你的私人 ChatGPT 网页应用,支持 GPT3, GPT4 & Gemini Pro 模型。
|
||||
一键免费部署你的私人 ChatGPT 网页应用,支持 Claude, GPT4 & Gemini Pro 模型。
|
||||
|
||||
[NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) / [演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N)
|
||||
[NextChatAI](https://nextchat.club?utm_source=readme) / [企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) / [演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N)
|
||||
|
||||
[<img src="https://vercel.com/button" alt="Deploy on Zeabur" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
|
||||
|
@ -27,7 +27,8 @@
|
|||
|
||||
企业版咨询: **business@nextchat.dev**
|
||||
|
||||
<img width="300" src="https://github.com/user-attachments/assets/3daeb7b6-ab63-4542-9141-2e4a12c80601">
|
||||
<img width="300" src="https://github.com/user-attachments/assets/bb29a11d-ff75-48a8-b1f8-d2d7238cf987">
|
||||
|
||||
|
||||
## 开始使用
|
||||
|
||||
|
@ -88,7 +89,7 @@ code1,code2,code3
|
|||
|
||||
### `OPENAI_API_KEY` (必填项)
|
||||
|
||||
OpanAI 密钥,你在 openai 账户页面申请的 api key,使用英文逗号隔开多个 key,这样可以随机轮询这些 key。
|
||||
OpenAI 密钥,你在 openai 账户页面申请的 api key,使用英文逗号隔开多个 key,这样可以随机轮询这些 key。
|
||||
|
||||
### `CODE` (可选)
|
||||
|
||||
|
@ -184,6 +185,21 @@ ByteDance Api Url.
|
|||
|
||||
讯飞星火Api Secret.
|
||||
|
||||
### `CHATGLM_API_KEY` (可选)
|
||||
|
||||
ChatGLM Api Key.
|
||||
|
||||
### `CHATGLM_URL` (可选)
|
||||
|
||||
ChatGLM Api Url.
|
||||
|
||||
### `DEEPSEEK_API_KEY` (可选)
|
||||
|
||||
DeepSeek Api Key.
|
||||
|
||||
### `DEEPSEEK_URL` (可选)
|
||||
|
||||
DeepSeek Api Url.
|
||||
|
||||
|
||||
### `HIDE_USER_API_KEY` (可选)
|
||||
|
@ -228,6 +244,13 @@ ByteDance Api Url.
|
|||
|
||||
更改默认模型
|
||||
|
||||
### `VISION_MODELS` (可选)
|
||||
|
||||
> 默认值:空
|
||||
> 示例:`gpt-4-vision,claude-3-opus,my-custom-model` 表示为这些模型添加视觉能力,作为对默认模式匹配的补充(默认会检测包含"vision"、"claude-3"、"gemini-1.5"等关键词的模型)。
|
||||
|
||||
在默认模式匹配之外,添加更多具有视觉能力的模型。多个模型用逗号分隔。
|
||||
|
||||
### `DEFAULT_INPUT_TEMPLATE` (可选)
|
||||
|
||||
自定义默认的 template,用于初始化『设置』中的『用户输入预处理』配置项
|
||||
|
@ -240,6 +263,17 @@ Stability API密钥
|
|||
|
||||
自定义的Stability API请求地址
|
||||
|
||||
### `ENABLE_MCP` (optional)
|
||||
|
||||
启用MCP(Model Context Protocol)功能
|
||||
|
||||
### `SILICONFLOW_API_KEY` (optional)
|
||||
|
||||
SiliconFlow API Key.
|
||||
|
||||
### `SILICONFLOW_URL` (optional)
|
||||
|
||||
SiliconFlow API URL.
|
||||
|
||||
## 开发
|
||||
|
||||
|
@ -264,6 +298,9 @@ BASE_URL=https://b.nextweb.fun/api/proxy
|
|||
|
||||
## 部署
|
||||
|
||||
### 宝塔面板部署
|
||||
> [简体中文 > 如何通过宝塔一键部署](./docs/bt-cn.md)
|
||||
|
||||
### 容器部署 (推荐)
|
||||
|
||||
> Docker 版本需要在 20 及其以上,否则会提示找不到镜像。
|
||||
|
@ -290,6 +327,16 @@ docker run -d -p 3000:3000 \
|
|||
yidadaa/chatgpt-next-web
|
||||
```
|
||||
|
||||
如需启用 MCP 功能,可以使用:
|
||||
|
||||
```shell
|
||||
docker run -d -p 3000:3000 \
|
||||
-e OPENAI_API_KEY=sk-xxxx \
|
||||
-e CODE=页面访问密码 \
|
||||
-e ENABLE_MCP=true \
|
||||
yidadaa/chatgpt-next-web
|
||||
```
|
||||
|
||||
如果你的本地代理需要账号密码,可以使用:
|
||||
|
||||
```shell
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
ワンクリックで無料であなた専用の ChatGPT ウェブアプリをデプロイ。GPT3、GPT4 & Gemini Pro モデルをサポート。
|
||||
|
||||
[NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [企業版](#企業版) / [デモ](https://chat-gpt-next-web.vercel.app/) / [フィードバック](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [Discordに参加](https://discord.gg/zrhvHCr79N)
|
||||
[NextChatAI](https://nextchat.club?utm_source=readme) / [企業版](#企業版) / [デモ](https://chat-gpt-next-web.vercel.app/) / [フィードバック](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [Discordに参加](https://discord.gg/zrhvHCr79N)
|
||||
|
||||
[<img src="https://vercel.com/button" alt="Zeaburでデプロイ" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Zeaburでデプロイ" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Gitpodで開く" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
|
||||
|
@ -217,6 +217,13 @@ ByteDance モードでは、`modelName@bytedance=deploymentName` 形式でモデ
|
|||
|
||||
デフォルトのモデルを変更します。
|
||||
|
||||
### `VISION_MODELS` (オプション)
|
||||
|
||||
> デフォルト:空
|
||||
> 例:`gpt-4-vision,claude-3-opus,my-custom-model` は、これらのモデルにビジョン機能を追加します。これはデフォルトのパターンマッチング("vision"、"claude-3"、"gemini-1.5"などのキーワードを含むモデルを検出)に加えて適用されます。
|
||||
|
||||
デフォルトのパターンマッチングに加えて、追加のモデルにビジョン機能を付与します。複数のモデルはカンマで区切ります。
|
||||
|
||||
### `DEFAULT_INPUT_TEMPLATE` (オプション)
|
||||
|
||||
『設定』の『ユーザー入力前処理』の初期設定に使用するテンプレートをカスタマイズします。
|
||||
|
|
|
@ -10,6 +10,10 @@ import { handle as alibabaHandler } from "../../alibaba";
|
|||
import { handle as moonshotHandler } from "../../moonshot";
|
||||
import { handle as stabilityHandler } from "../../stability";
|
||||
import { handle as iflytekHandler } from "../../iflytek";
|
||||
import { handle as deepseekHandler } from "../../deepseek";
|
||||
import { handle as siliconflowHandler } from "../../siliconflow";
|
||||
import { handle as xaiHandler } from "../../xai";
|
||||
import { handle as chatglmHandler } from "../../glm";
|
||||
import { handle as proxyHandler } from "../../proxy";
|
||||
|
||||
async function handle(
|
||||
|
@ -38,6 +42,14 @@ async function handle(
|
|||
return stabilityHandler(req, { params });
|
||||
case ApiPath.Iflytek:
|
||||
return iflytekHandler(req, { params });
|
||||
case ApiPath.DeepSeek:
|
||||
return deepseekHandler(req, { params });
|
||||
case ApiPath.XAI:
|
||||
return xaiHandler(req, { params });
|
||||
case ApiPath.ChatGLM:
|
||||
return chatglmHandler(req, { params });
|
||||
case ApiPath.SiliconFlow:
|
||||
return siliconflowHandler(req, { params });
|
||||
case ApiPath.OpenAI:
|
||||
return openaiHandler(req, { params });
|
||||
default:
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
|
@ -89,7 +89,7 @@ async function request(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Alibaba as string,
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "./auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
|
||||
|
@ -122,7 +122,7 @@ async function request(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Anthropic as string,
|
||||
|
|
|
@ -92,6 +92,18 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
|
|||
systemApiKey =
|
||||
serverConfig.iflytekApiKey + ":" + serverConfig.iflytekApiSecret;
|
||||
break;
|
||||
case ModelProvider.DeepSeek:
|
||||
systemApiKey = serverConfig.deepseekApiKey;
|
||||
break;
|
||||
case ModelProvider.XAI:
|
||||
systemApiKey = serverConfig.xaiApiKey;
|
||||
break;
|
||||
case ModelProvider.ChatGLM:
|
||||
systemApiKey = serverConfig.chatglmApiKey;
|
||||
break;
|
||||
case ModelProvider.SiliconFlow:
|
||||
systemApiKey = serverConfig.siliconFlowApiKey;
|
||||
break;
|
||||
case ModelProvider.GPT:
|
||||
default:
|
||||
if (req.nextUrl.pathname.includes("azure/deployments")) {
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
import { getAccessToken } from "@/app/utils/baidu";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
@ -104,7 +104,7 @@ async function request(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Baidu as string,
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
|
@ -88,7 +88,7 @@ async function request(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.ByteDance as string,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSideConfig } from "../config/server";
|
||||
import { OPENAI_BASE_URL, ServiceProvider } from "../constant";
|
||||
import { isModelAvailableInServer } from "../utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "../utils/cloudflare";
|
||||
import { getModelProvider, isModelNotavailableInServer } from "../utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
|
@ -71,7 +71,7 @@ export async function requestOpenai(req: NextRequest) {
|
|||
.filter((v) => !!v && !v.startsWith("-") && v.includes(modelName))
|
||||
.forEach((m) => {
|
||||
const [fullName, displayName] = m.split("=");
|
||||
const [_, providerName] = fullName.split("@");
|
||||
const [_, providerName] = getModelProvider(fullName);
|
||||
if (providerName === "azure" && !displayName) {
|
||||
const [_, deployId] = (serverConfig?.azureUrl ?? "").split(
|
||||
"deployments/",
|
||||
|
@ -118,15 +118,14 @@ export async function requestOpenai(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.OpenAI as string,
|
||||
) ||
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Azure as string,
|
||||
[
|
||||
ServiceProvider.OpenAI,
|
||||
ServiceProvider.Azure,
|
||||
jsonBody?.model as string, // support provider-unspecified model
|
||||
],
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
|
|
|
@ -14,6 +14,7 @@ const DANGER_CONFIG = {
|
|||
disableFastLink: serverConfig.disableFastLink,
|
||||
customModels: serverConfig.customModels,
|
||||
defaultModel: serverConfig.defaultModel,
|
||||
visionModels: serverConfig.visionModels,
|
||||
};
|
||||
|
||||
declare global {
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
DEEPSEEK_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
export async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[DeepSeek Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.DeepSeek);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[DeepSeek] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.DeepSeek, "");
|
||||
|
||||
let baseUrl = serverConfig.deepseekUrl || DEEPSEEK_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.DeepSeek as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[DeepSeek] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
CHATGLM_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
export async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[GLM Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.ChatGLM);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[GLM] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ChatGLM, "");
|
||||
|
||||
let baseUrl = serverConfig.chatglmUrl || CHATGLM_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
console.log("[Fetch Url] ", fetchUrl);
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.ChatGLM as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[GLM] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
|
@ -8,7 +8,7 @@ import {
|
|||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
// iflytek
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
@ -89,7 +89,7 @@ async function request(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Iflytek as string,
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
|
@ -88,7 +88,7 @@ async function request(req: NextRequest) {
|
|||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Moonshot as string,
|
||||
|
|
|
@ -14,8 +14,12 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
|
|||
if (config.disableGPT4) {
|
||||
remoteModelRes.data = remoteModelRes.data.filter(
|
||||
(m) =>
|
||||
!(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
|
||||
m.id.startsWith("gpt-4o-mini"),
|
||||
!(
|
||||
m.id.startsWith("gpt-4") ||
|
||||
m.id.startsWith("chatgpt-4o") ||
|
||||
m.id.startsWith("o1") ||
|
||||
m.id.startsWith("o3")
|
||||
) || m.id.startsWith("gpt-4o-mini"),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
|
||||
export async function handle(
|
||||
req: NextRequest,
|
||||
|
@ -9,6 +10,7 @@ export async function handle(
|
|||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
// remove path params from searchParams
|
||||
req.nextUrl.searchParams.delete("path");
|
||||
|
@ -31,6 +33,18 @@ export async function handle(
|
|||
return true;
|
||||
}),
|
||||
);
|
||||
// if dalle3 use openai api key
|
||||
const baseUrl = req.headers.get("x-base-url");
|
||||
if (baseUrl?.includes("api.openai.com")) {
|
||||
if (!serverConfig.apiKey) {
|
||||
return NextResponse.json(
|
||||
{ error: "OpenAI API key not configured" },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
headers.set("Authorization", `Bearer ${serverConfig.apiKey}`);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const fetchOptions: RequestInit = {
|
||||
headers,
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
SILICONFLOW_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
export async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[SiliconFlow Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.SiliconFlow);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[SiliconFlow] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.SiliconFlow, "");
|
||||
|
||||
let baseUrl = serverConfig.siliconFlowUrl || SILICONFLOW_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.SiliconFlow as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[SiliconFlow] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,128 @@
|
|||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
XAI_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelNotavailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
export async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[XAI Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.XAI);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[XAI] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.XAI, "");
|
||||
|
||||
let baseUrl = serverConfig.xaiUrl || XAI_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelNotavailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.XAI as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[XAI] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
|
@ -20,6 +20,10 @@ import { QwenApi } from "./platforms/alibaba";
|
|||
import { HunyuanApi } from "./platforms/tencent";
|
||||
import { MoonshotApi } from "./platforms/moonshot";
|
||||
import { SparkApi } from "./platforms/iflytek";
|
||||
import { DeepSeekApi } from "./platforms/deepseek";
|
||||
import { XAIApi } from "./platforms/xai";
|
||||
import { ChatGLMApi } from "./platforms/glm";
|
||||
import { SiliconflowApi } from "./platforms/siliconflow";
|
||||
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
|
@ -68,7 +72,7 @@ export interface ChatOptions {
|
|||
config: LLMConfig;
|
||||
|
||||
onUpdate?: (message: string, chunk: string) => void;
|
||||
onFinish: (message: string) => void;
|
||||
onFinish: (message: string, responseRes: Response) => void;
|
||||
onError?: (err: Error) => void;
|
||||
onController?: (controller: AbortController) => void;
|
||||
onBeforeTool?: (tool: ChatMessageTool) => void;
|
||||
|
@ -152,6 +156,18 @@ export class ClientApi {
|
|||
case ModelProvider.Iflytek:
|
||||
this.llm = new SparkApi();
|
||||
break;
|
||||
case ModelProvider.DeepSeek:
|
||||
this.llm = new DeepSeekApi();
|
||||
break;
|
||||
case ModelProvider.XAI:
|
||||
this.llm = new XAIApi();
|
||||
break;
|
||||
case ModelProvider.ChatGLM:
|
||||
this.llm = new ChatGLMApi();
|
||||
break;
|
||||
case ModelProvider.SiliconFlow:
|
||||
this.llm = new SiliconflowApi();
|
||||
break;
|
||||
default:
|
||||
this.llm = new ChatGPTApi();
|
||||
}
|
||||
|
@ -239,6 +255,11 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
const isAlibaba = modelConfig.providerName === ServiceProvider.Alibaba;
|
||||
const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot;
|
||||
const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek;
|
||||
const isDeepSeek = modelConfig.providerName === ServiceProvider.DeepSeek;
|
||||
const isXAI = modelConfig.providerName === ServiceProvider.XAI;
|
||||
const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
|
||||
const isSiliconFlow =
|
||||
modelConfig.providerName === ServiceProvider.SiliconFlow;
|
||||
const isEnabledAccessControl = accessStore.enabledAccessControl();
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
|
@ -252,6 +273,14 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
? accessStore.alibabaApiKey
|
||||
: isMoonshot
|
||||
? accessStore.moonshotApiKey
|
||||
: isXAI
|
||||
? accessStore.xaiApiKey
|
||||
: isDeepSeek
|
||||
? accessStore.deepseekApiKey
|
||||
: isChatGLM
|
||||
? accessStore.chatglmApiKey
|
||||
: isSiliconFlow
|
||||
? accessStore.siliconflowApiKey
|
||||
: isIflytek
|
||||
? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
|
||||
? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
|
||||
|
@ -266,6 +295,10 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
isAlibaba,
|
||||
isMoonshot,
|
||||
isIflytek,
|
||||
isDeepSeek,
|
||||
isXAI,
|
||||
isChatGLM,
|
||||
isSiliconFlow,
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
};
|
||||
|
@ -286,6 +319,14 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
|||
isAzure,
|
||||
isAnthropic,
|
||||
isBaidu,
|
||||
isByteDance,
|
||||
isAlibaba,
|
||||
isMoonshot,
|
||||
isIflytek,
|
||||
isDeepSeek,
|
||||
isXAI,
|
||||
isChatGLM,
|
||||
isSiliconFlow,
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
} = getConfig();
|
||||
|
@ -328,6 +369,14 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
|
|||
return new ClientApi(ModelProvider.Moonshot);
|
||||
case ServiceProvider.Iflytek:
|
||||
return new ClientApi(ModelProvider.Iflytek);
|
||||
case ServiceProvider.DeepSeek:
|
||||
return new ClientApi(ModelProvider.DeepSeek);
|
||||
case ServiceProvider.XAI:
|
||||
return new ClientApi(ModelProvider.XAI);
|
||||
case ServiceProvider.ChatGLM:
|
||||
return new ClientApi(ModelProvider.ChatGLM);
|
||||
case ServiceProvider.SiliconFlow:
|
||||
return new ClientApi(ModelProvider.SiliconFlow);
|
||||
default:
|
||||
return new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
"use client";
|
||||
import { ApiPath, Alibaba, ALIBABA_BASE_URL } from "@/app/constant";
|
||||
import {
|
||||
ApiPath,
|
||||
Alibaba,
|
||||
ALIBABA_BASE_URL,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { streamWithThink } from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
|
@ -15,14 +16,12 @@ import {
|
|||
SpeechOptions,
|
||||
MultimodalContent,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
getMessageTextContentWithoutThinking,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
|
@ -92,7 +91,10 @@ export class QwenApi implements LLMApi {
|
|||
async chat(options: ChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: getMessageTextContent(v),
|
||||
content:
|
||||
v.role === "assistant"
|
||||
? getMessageTextContentWithoutThinking(v)
|
||||
: getMessageTextContent(v),
|
||||
}));
|
||||
|
||||
const modelConfig = {
|
||||
|
@ -122,139 +124,125 @@ export class QwenApi implements LLMApi {
|
|||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const headers = {
|
||||
...getHeaders(),
|
||||
"X-DashScope-SSE": shouldStream ? "enable" : "disable",
|
||||
};
|
||||
|
||||
const chatPath = this.path(Alibaba.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: {
|
||||
...getHeaders(),
|
||||
"X-DashScope-SSE": shouldStream ? "enable" : "disable",
|
||||
},
|
||||
headers: headers,
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return streamWithThink(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
headers,
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// console.log("parseSSE", text, runTools);
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.output.choices as Array<{
|
||||
message: {
|
||||
content: string | null;
|
||||
tool_calls: ChatMessageTool[];
|
||||
reasoning_content: string | null;
|
||||
};
|
||||
}>;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
if (finished || controller.signal.aborted) {
|
||||
responseText += remainText;
|
||||
console.log("[Response Animation] finished");
|
||||
if (responseText?.length === 0) {
|
||||
options.onError?.(new Error("empty response from server"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (!choices?.length) return { isThinking: false, content: "" };
|
||||
|
||||
if (remainText.length > 0) {
|
||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||
const fetchText = remainText.slice(0, fetchCount);
|
||||
responseText += fetchText;
|
||||
remainText = remainText.slice(fetchCount);
|
||||
options.onUpdate?.(responseText, fetchText);
|
||||
}
|
||||
|
||||
requestAnimationFrame(animateResponseText);
|
||||
}
|
||||
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log(
|
||||
"[Alibaba] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
const tool_calls = choices[0]?.message?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
|
||||
const reasoning = choices[0]?.message?.reasoning_content;
|
||||
const content = choices[0]?.message?.content;
|
||||
|
||||
// Skip if both content and reasoning_content are empty or null
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
(!reasoning || reasoning.length === 0) &&
|
||||
(!content || content.length === 0)
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.output.choices as Array<{
|
||||
message: { content: string };
|
||||
}>;
|
||||
const delta = choices[0]?.message?.content;
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
|
||||
if (reasoning && reasoning.length > 0) {
|
||||
return {
|
||||
isThinking: true,
|
||||
content: reasoning,
|
||||
};
|
||||
} else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
requestPayload?.input?.messages?.splice(
|
||||
requestPayload?.input?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -13,6 +13,7 @@ import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
|||
import { preProcessImageContent, stream } from "@/app/utils/chat";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export type MultiBlockContent = {
|
||||
type: "image" | "text";
|
||||
|
@ -316,13 +317,14 @@ export class ClaudeApi implements LLMApi {
|
|||
};
|
||||
|
||||
try {
|
||||
controller.signal.onabort = () => options.onFinish("");
|
||||
controller.signal.onabort = () =>
|
||||
options.onFinish("", new Response(null, { status: 400 }));
|
||||
|
||||
const res = await fetch(path, payload);
|
||||
const resJson = await res.json();
|
||||
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
} catch (e) {
|
||||
console.error("failed to chat", e);
|
||||
options.onError?.(e as Error);
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
Baidu,
|
||||
BAIDU_BASE_URL,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { ApiPath, Baidu, BAIDU_BASE_URL } from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getAccessToken } from "@/app/utils/baidu";
|
||||
|
||||
|
@ -23,7 +18,7 @@ import {
|
|||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, getTimeoutMSByModel } from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
|
@ -155,13 +150,14 @@ export class ErnieApi implements LLMApi {
|
|||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -191,7 +187,7 @@ export class ErnieApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -204,7 +200,7 @@ export class ErnieApi implements LLMApi {
|
|||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Baidu] request response content type: ", contentType);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -267,7 +263,7 @@ export class ErnieApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = resJson?.result;
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
"use client";
|
||||
import { ApiPath, ByteDance, BYTEDANCE_BASE_URL } from "@/app/constant";
|
||||
import {
|
||||
ApiPath,
|
||||
ByteDance,
|
||||
BYTEDANCE_BASE_URL,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
|
||||
import {
|
||||
ChatOptions,
|
||||
|
@ -15,14 +16,14 @@ import {
|
|||
MultimodalContent,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
|
||||
import { streamWithThink } from "@/app/utils/chat";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import {
|
||||
getMessageTextContentWithoutThinking,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
|
@ -34,7 +35,7 @@ export interface OpenAIListModelResponse {
|
|||
}>;
|
||||
}
|
||||
|
||||
interface RequestPayload {
|
||||
interface RequestPayloadForByteDance {
|
||||
messages: {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string | MultimodalContent[];
|
||||
|
@ -84,10 +85,14 @@ export class DoubaoApi implements LLMApi {
|
|||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: getMessageTextContent(v),
|
||||
}));
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content =
|
||||
v.role === "assistant"
|
||||
? getMessageTextContentWithoutThinking(v)
|
||||
: await preProcessImageContent(v.content);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
|
@ -98,7 +103,7 @@ export class DoubaoApi implements LLMApi {
|
|||
};
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const requestPayload: RequestPayload = {
|
||||
const requestPayload: RequestPayloadForByteDance = {
|
||||
messages,
|
||||
stream: shouldStream,
|
||||
model: modelConfig.model,
|
||||
|
@ -123,125 +128,108 @@ export class DoubaoApi implements LLMApi {
|
|||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return streamWithThink(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// console.log("parseSSE", text, runTools);
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: {
|
||||
content: string | null;
|
||||
tool_calls: ChatMessageTool[];
|
||||
reasoning_content: string | null;
|
||||
};
|
||||
}>;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
if (finished || controller.signal.aborted) {
|
||||
responseText += remainText;
|
||||
console.log("[Response Animation] finished");
|
||||
if (responseText?.length === 0) {
|
||||
options.onError?.(new Error("empty response from server"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (remainText.length > 0) {
|
||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||
const fetchText = remainText.slice(0, fetchCount);
|
||||
responseText += fetchText;
|
||||
remainText = remainText.slice(fetchCount);
|
||||
options.onUpdate?.(responseText, fetchText);
|
||||
}
|
||||
|
||||
requestAnimationFrame(animateResponseText);
|
||||
}
|
||||
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log(
|
||||
"[ByteDance] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
if (!choices?.length) return { isThinking: false, content: "" };
|
||||
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
const reasoning = choices[0]?.delta?.reasoning_content;
|
||||
const content = choices[0]?.delta?.content;
|
||||
|
||||
// Skip if both content and reasoning_content are empty or null
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
(!reasoning || reasoning.length === 0) &&
|
||||
(!content || content.length === 0)
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: { content: string };
|
||||
}>;
|
||||
const delta = choices[0]?.delta?.content;
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
|
||||
if (reasoning && reasoning.length > 0) {
|
||||
return {
|
||||
isThinking: true,
|
||||
content: reasoning,
|
||||
};
|
||||
} else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
requestPayload: RequestPayloadForByteDance,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
requestPayload?.messages?.splice(
|
||||
requestPayload?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -0,0 +1,234 @@
|
|||
"use client";
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import { ApiPath, DEEPSEEK_BASE_URL, DeepSeek } from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { streamWithThink } from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
getMessageTextContentWithoutThinking,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class DeepSeekApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.deepseekUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.DeepSeek;
|
||||
baseUrl = isApp ? DEEPSEEK_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.DeepSeek)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
if (v.role === "assistant") {
|
||||
const content = getMessageTextContentWithoutThinking(v);
|
||||
messages.push({ role: v.role, content });
|
||||
} else {
|
||||
const content = getMessageTextContent(v);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(DeepSeek.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return streamWithThink(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// console.log("parseSSE", text, runTools);
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: {
|
||||
content: string | null;
|
||||
tool_calls: ChatMessageTool[];
|
||||
reasoning_content: string | null;
|
||||
};
|
||||
}>;
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
const reasoning = choices[0]?.delta?.reasoning_content;
|
||||
const content = choices[0]?.delta?.content;
|
||||
|
||||
// Skip if both content and reasoning_content are empty or null
|
||||
if (
|
||||
(!reasoning || reasoning.length === 0) &&
|
||||
(!content || content.length === 0)
|
||||
) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
}
|
||||
|
||||
if (reasoning && reasoning.length > 0) {
|
||||
return {
|
||||
isThinking: true,
|
||||
content: reasoning,
|
||||
};
|
||||
} else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
},
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.splice(
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,292 @@
|
|||
"use client";
|
||||
import { ApiPath, CHATGLM_BASE_URL, ChatGLM } from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { stream } from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
isVisionModel,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
|
||||
interface BasePayload {
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface ChatPayload extends BasePayload {
|
||||
messages: ChatOptions["messages"];
|
||||
stream?: boolean;
|
||||
temperature?: number;
|
||||
presence_penalty?: number;
|
||||
frequency_penalty?: number;
|
||||
top_p?: number;
|
||||
}
|
||||
|
||||
interface ImageGenerationPayload extends BasePayload {
|
||||
prompt: string;
|
||||
size?: string;
|
||||
user_id?: string;
|
||||
}
|
||||
|
||||
interface VideoGenerationPayload extends BasePayload {
|
||||
prompt: string;
|
||||
duration?: number;
|
||||
resolution?: string;
|
||||
user_id?: string;
|
||||
}
|
||||
|
||||
type ModelType = "chat" | "image" | "video";
|
||||
|
||||
export class ChatGLMApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
private getModelType(model: string): ModelType {
|
||||
if (model.startsWith("cogview-")) return "image";
|
||||
if (model.startsWith("cogvideo-")) return "video";
|
||||
return "chat";
|
||||
}
|
||||
|
||||
private getModelPath(type: ModelType): string {
|
||||
switch (type) {
|
||||
case "image":
|
||||
return ChatGLM.ImagePath;
|
||||
case "video":
|
||||
return ChatGLM.VideoPath;
|
||||
default:
|
||||
return ChatGLM.ChatPath;
|
||||
}
|
||||
}
|
||||
|
||||
private createPayload(
|
||||
messages: ChatOptions["messages"],
|
||||
modelConfig: any,
|
||||
options: ChatOptions,
|
||||
): BasePayload {
|
||||
const modelType = this.getModelType(modelConfig.model);
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const prompt =
|
||||
typeof lastMessage.content === "string"
|
||||
? lastMessage.content
|
||||
: lastMessage.content.map((c) => c.text).join("\n");
|
||||
|
||||
switch (modelType) {
|
||||
case "image":
|
||||
return {
|
||||
model: modelConfig.model,
|
||||
prompt,
|
||||
size: options.config.size,
|
||||
} as ImageGenerationPayload;
|
||||
default:
|
||||
return {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
} as ChatPayload;
|
||||
}
|
||||
}
|
||||
|
||||
private parseResponse(modelType: ModelType, json: any): string {
|
||||
switch (modelType) {
|
||||
case "image": {
|
||||
const imageUrl = json.data?.[0]?.url;
|
||||
return imageUrl ? `` : "";
|
||||
}
|
||||
case "video": {
|
||||
const videoUrl = json.data?.[0]?.url;
|
||||
return videoUrl ? `<video controls src="${videoUrl}"></video>` : "";
|
||||
}
|
||||
default:
|
||||
return this.extractMessage(json);
|
||||
}
|
||||
}
|
||||
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.chatglmUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.ChatGLM;
|
||||
baseUrl = isApp ? CHATGLM_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const visionModel = isVisionModel(options.config.model);
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = visionModel
|
||||
? await preProcessImageContent(v.content)
|
||||
: getMessageTextContent(v);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
const modelType = this.getModelType(modelConfig.model);
|
||||
const requestPayload = this.createPayload(messages, modelConfig, options);
|
||||
const path = this.path(this.getModelPath(modelType));
|
||||
|
||||
console.log(`[Request] glm ${modelType} payload: `, requestPayload);
|
||||
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (modelType === "image" || modelType === "video") {
|
||||
const res = await fetch(path, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
console.log(`[Response] glm ${modelType}:`, resJson);
|
||||
const message = this.parseResponse(modelType, resJson);
|
||||
options.onFinish(message, res);
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
if (shouldStream) {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
path,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: {
|
||||
content: string;
|
||||
tool_calls: ChatMessageTool[];
|
||||
};
|
||||
}>;
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
return choices[0]?.delta?.content;
|
||||
},
|
||||
// processToolMessage
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.splice(
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(path, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import { ApiPath, Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { ApiPath, Google } from "@/app/constant";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
|
@ -22,6 +22,7 @@ import {
|
|||
getMessageTextContent,
|
||||
getMessageImages,
|
||||
isVisionModel,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import { nanoid } from "nanoid";
|
||||
|
@ -29,7 +30,7 @@ import { RequestPayload } from "./openai";
|
|||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class GeminiProApi implements LLMApi {
|
||||
path(path: string): string {
|
||||
path(path: string, shouldStream = false): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
@ -51,15 +52,34 @@ export class GeminiProApi implements LLMApi {
|
|||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
let chatPath = [baseUrl, path].join("/");
|
||||
if (shouldStream) {
|
||||
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
|
||||
}
|
||||
|
||||
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
|
||||
return chatPath;
|
||||
}
|
||||
extractMessage(res: any) {
|
||||
console.log("[Response] gemini-pro response: ", res);
|
||||
|
||||
const getTextFromParts = (parts: any[]) => {
|
||||
if (!Array.isArray(parts)) return "";
|
||||
|
||||
return parts
|
||||
.map((part) => part?.text || "")
|
||||
.filter((text) => text.trim() !== "")
|
||||
.join("\n\n");
|
||||
};
|
||||
|
||||
let content = "";
|
||||
if (Array.isArray(res)) {
|
||||
res.map((item) => {
|
||||
content += getTextFromParts(item?.candidates?.at(0)?.content?.parts);
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
|
||||
getTextFromParts(res?.candidates?.at(0)?.content?.parts) ||
|
||||
content || //getTextFromParts(res?.at(0)?.candidates?.at(0)?.content?.parts) ||
|
||||
res?.error?.message ||
|
||||
""
|
||||
);
|
||||
|
@ -166,7 +186,10 @@ export class GeminiProApi implements LLMApi {
|
|||
options.onController?.(controller);
|
||||
try {
|
||||
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
|
||||
const chatPath = this.path(Google.ChatPath(modelConfig.model));
|
||||
const chatPath = this.path(
|
||||
Google.ChatPath(modelConfig.model),
|
||||
shouldStream,
|
||||
);
|
||||
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
|
@ -175,10 +198,11 @@ export class GeminiProApi implements LLMApi {
|
|||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
const isThinking = options.config.model.includes("-thinking");
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
|
@ -192,7 +216,10 @@ export class GeminiProApi implements LLMApi {
|
|||
requestPayload,
|
||||
getHeaders(),
|
||||
// @ts-ignore
|
||||
[{ functionDeclarations: tools.map((tool) => tool.function) }],
|
||||
tools.length > 0
|
||||
? // @ts-ignore
|
||||
[{ functionDeclarations: tools.map((tool) => tool.function) }]
|
||||
: [],
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
|
@ -214,7 +241,10 @@ export class GeminiProApi implements LLMApi {
|
|||
},
|
||||
});
|
||||
}
|
||||
return chunkJson?.candidates?.at(0)?.content.parts.at(0)?.text;
|
||||
return chunkJson?.candidates
|
||||
?.at(0)
|
||||
?.content.parts?.map((part: { text: string }) => part.text)
|
||||
.join("\n\n");
|
||||
},
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
|
@ -271,7 +301,7 @@ export class GeminiProApi implements LLMApi {
|
|||
);
|
||||
}
|
||||
const message = apiClient.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -117,6 +117,7 @@ export class SparkApi implements LLMApi {
|
|||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// Animate response text to make it look smooth
|
||||
function animateResponseText() {
|
||||
|
@ -143,7 +144,7 @@ export class SparkApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -156,7 +157,7 @@ export class SparkApi implements LLMApi {
|
|||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Spark] request response content type: ", contentType);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -231,7 +232,7 @@ export class SparkApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -24,6 +24,7 @@ import {
|
|||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class MoonshotApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
@ -179,7 +180,7 @@ export class MoonshotApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -21,10 +21,10 @@ import {
|
|||
preProcessImageContent,
|
||||
uploadImage,
|
||||
base64Image2Blob,
|
||||
stream,
|
||||
streamWithThink,
|
||||
} from "@/app/utils/chat";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
import { DalleSize, DalleQuality, DalleStyle } from "@/app/typing";
|
||||
import { ModelSize, DalleQuality, DalleStyle } from "@/app/typing";
|
||||
|
||||
import {
|
||||
ChatOptions,
|
||||
|
@ -41,7 +41,9 @@ import {
|
|||
getMessageTextContent,
|
||||
isVisionModel,
|
||||
isDalle3 as _isDalle3,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
|
@ -64,6 +66,7 @@ export interface RequestPayload {
|
|||
frequency_penalty: number;
|
||||
top_p: number;
|
||||
max_tokens?: number;
|
||||
max_completion_tokens?: number;
|
||||
}
|
||||
|
||||
export interface DalleRequestPayload {
|
||||
|
@ -71,7 +74,7 @@ export interface DalleRequestPayload {
|
|||
prompt: string;
|
||||
response_format: "url" | "b64_json";
|
||||
n: number;
|
||||
size: DalleSize;
|
||||
size: ModelSize;
|
||||
quality: DalleQuality;
|
||||
style: DalleStyle;
|
||||
}
|
||||
|
@ -193,7 +196,9 @@ export class ChatGPTApi implements LLMApi {
|
|||
let requestPayload: RequestPayload | DalleRequestPayload;
|
||||
|
||||
const isDalle3 = _isDalle3(options.config.model);
|
||||
const isO1 = options.config.model.startsWith("o1");
|
||||
const isO1OrO3 =
|
||||
options.config.model.startsWith("o1") ||
|
||||
options.config.model.startsWith("o3");
|
||||
if (isDalle3) {
|
||||
const prompt = getMessageTextContent(
|
||||
options.messages.slice(-1)?.pop() as any,
|
||||
|
@ -215,23 +220,28 @@ export class ChatGPTApi implements LLMApi {
|
|||
const content = visionModel
|
||||
? await preProcessImageContent(v.content)
|
||||
: getMessageTextContent(v);
|
||||
if (!(isO1 && v.role === "system"))
|
||||
if (!(isO1OrO3 && v.role === "system"))
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
|
||||
requestPayload = {
|
||||
messages,
|
||||
stream: !isO1 ? options.config.stream : false,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: !isO1 ? modelConfig.temperature : 1,
|
||||
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
|
||||
frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0,
|
||||
top_p: !isO1 ? modelConfig.top_p : 1,
|
||||
temperature: !isO1OrO3 ? modelConfig.temperature : 1,
|
||||
presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0,
|
||||
frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0,
|
||||
top_p: !isO1OrO3 ? modelConfig.top_p : 1,
|
||||
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
// O1 使用 max_completion_tokens 控制token数 (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
|
||||
if (isO1OrO3) {
|
||||
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
|
||||
}
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel) {
|
||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||
|
@ -240,7 +250,7 @@ export class ChatGPTApi implements LLMApi {
|
|||
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
|
||||
const shouldStream = !isDalle3 && !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
|
@ -284,7 +294,7 @@ export class ChatGPTApi implements LLMApi {
|
|||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
// console.log("getAsTools", tools, funcs);
|
||||
stream(
|
||||
streamWithThink(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
|
@ -299,8 +309,12 @@ export class ChatGPTApi implements LLMApi {
|
|||
delta: {
|
||||
content: string;
|
||||
tool_calls: ChatMessageTool[];
|
||||
reasoning_content: string | null;
|
||||
};
|
||||
}>;
|
||||
|
||||
if (!choices?.length) return { isThinking: false, content: "" };
|
||||
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const id = tool_calls[0]?.id;
|
||||
|
@ -320,7 +334,37 @@ export class ChatGPTApi implements LLMApi {
|
|||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
return choices[0]?.delta?.content;
|
||||
|
||||
const reasoning = choices[0]?.delta?.reasoning_content;
|
||||
const content = choices[0]?.delta?.content;
|
||||
|
||||
// Skip if both content and reasoning_content are empty or null
|
||||
if (
|
||||
(!reasoning || reasoning.length === 0) &&
|
||||
(!content || content.length === 0)
|
||||
) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
}
|
||||
|
||||
if (reasoning && reasoning.length > 0) {
|
||||
return {
|
||||
isThinking: true,
|
||||
content: reasoning,
|
||||
};
|
||||
} else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
},
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
|
@ -352,7 +396,7 @@ export class ChatGPTApi implements LLMApi {
|
|||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
isDalle3 || isO1 ? REQUEST_TIMEOUT_MS * 2 : REQUEST_TIMEOUT_MS, // dalle3 using b64_json is slow.
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
|
@ -360,7 +404,7 @@ export class ChatGPTApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = await this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -0,0 +1,287 @@
|
|||
"use client";
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
SILICONFLOW_BASE_URL,
|
||||
SiliconFlow,
|
||||
DEFAULT_MODELS,
|
||||
} from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { preProcessImageContent, streamWithThink } from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
getMessageTextContentWithoutThinking,
|
||||
isVisionModel,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
export interface SiliconFlowListModelResponse {
|
||||
object: string;
|
||||
data: Array<{
|
||||
id: string;
|
||||
object: string;
|
||||
root: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export class SiliconflowApi implements LLMApi {
|
||||
private disableListModels = false;
|
||||
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.siliconflowUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.SiliconFlow;
|
||||
baseUrl = isApp ? SILICONFLOW_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (
|
||||
!baseUrl.startsWith("http") &&
|
||||
!baseUrl.startsWith(ApiPath.SiliconFlow)
|
||||
) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const visionModel = isVisionModel(options.config.model);
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
if (v.role === "assistant") {
|
||||
const content = getMessageTextContentWithoutThinking(v);
|
||||
messages.push({ role: v.role, content });
|
||||
} else {
|
||||
const content = visionModel
|
||||
? await preProcessImageContent(v.content)
|
||||
: getMessageTextContent(v);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(SiliconFlow.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// console.log(chatPayload);
|
||||
|
||||
// Use extended timeout for thinking models as they typically require more processing time
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return streamWithThink(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// console.log("parseSSE", text, runTools);
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: {
|
||||
content: string | null;
|
||||
tool_calls: ChatMessageTool[];
|
||||
reasoning_content: string | null;
|
||||
};
|
||||
}>;
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
const reasoning = choices[0]?.delta?.reasoning_content;
|
||||
const content = choices[0]?.delta?.content;
|
||||
|
||||
// Skip if both content and reasoning_content are empty or null
|
||||
if (
|
||||
(!reasoning || reasoning.length === 0) &&
|
||||
(!content || content.length === 0)
|
||||
) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
}
|
||||
|
||||
if (reasoning && reasoning.length > 0) {
|
||||
return {
|
||||
isThinking: true,
|
||||
content: reasoning,
|
||||
};
|
||||
} else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
},
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.splice(
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
if (this.disableListModels) {
|
||||
return DEFAULT_MODELS.slice();
|
||||
}
|
||||
|
||||
const res = await fetch(this.path(SiliconFlow.ListModelPath), {
|
||||
method: "GET",
|
||||
headers: {
|
||||
...getHeaders(),
|
||||
},
|
||||
});
|
||||
|
||||
const resJson = (await res.json()) as SiliconFlowListModelResponse;
|
||||
const chatModels = resJson.data;
|
||||
console.log("[Models]", chatModels);
|
||||
|
||||
if (!chatModels) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let seq = 1000; //同 Constant.ts 中的排序保持一致
|
||||
return chatModels.map((m) => ({
|
||||
name: m.id,
|
||||
available: true,
|
||||
sorted: seq++,
|
||||
provider: {
|
||||
id: "siliconflow",
|
||||
providerName: "SiliconFlow",
|
||||
providerType: "siliconflow",
|
||||
sorted: 14,
|
||||
},
|
||||
}));
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
"use client";
|
||||
import { ApiPath, TENCENT_BASE_URL, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { ApiPath, TENCENT_BASE_URL } from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
|
@ -17,7 +17,11 @@ import {
|
|||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
isVisionModel,
|
||||
getTimeoutMSByModel,
|
||||
} from "@/app/utils";
|
||||
import mapKeys from "lodash-es/mapKeys";
|
||||
import mapValues from "lodash-es/mapValues";
|
||||
import isArray from "lodash-es/isArray";
|
||||
|
@ -135,13 +139,14 @@ export class HunyuanApi implements LLMApi {
|
|||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
let responseRes: Response;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
|
@ -171,7 +176,7 @@ export class HunyuanApi implements LLMApi {
|
|||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
options.onFinish(responseText + remainText, responseRes);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -187,7 +192,7 @@ export class HunyuanApi implements LLMApi {
|
|||
"[Tencent] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
responseRes = res;
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
|
@ -253,7 +258,7 @@ export class HunyuanApi implements LLMApi {
|
|||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
|
|
|
@ -0,0 +1,194 @@
|
|||
"use client";
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import { ApiPath, XAI_BASE_URL, XAI } from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
useChatStore,
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { stream } from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getTimeoutMSByModel } from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import { RequestPayload } from "./openai";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class XAIApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.xaiUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.XAI;
|
||||
baseUrl = isApp ? XAI_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.XAI)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = await preProcessImageContent(v.content);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
};
|
||||
|
||||
console.log("[Request] xai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(XAI.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
getTimeoutMSByModel(options.config.model),
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
chatPath,
|
||||
requestPayload,
|
||||
getHeaders(),
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// parseSSE
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// console.log("parseSSE", text, runTools);
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: {
|
||||
content: string;
|
||||
tool_calls: ChatMessageTool[];
|
||||
};
|
||||
}>;
|
||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||
if (tool_calls?.length > 0) {
|
||||
const index = tool_calls[0]?.index;
|
||||
const id = tool_calls[0]?.id;
|
||||
const args = tool_calls[0]?.function?.arguments;
|
||||
if (id) {
|
||||
runTools.push({
|
||||
id,
|
||||
type: tool_calls[0]?.type,
|
||||
function: {
|
||||
name: tool_calls[0]?.function?.name as string,
|
||||
arguments: args,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// @ts-ignore
|
||||
runTools[index]["function"]["arguments"] += args;
|
||||
}
|
||||
}
|
||||
return choices[0]?.delta?.content;
|
||||
},
|
||||
// processToolMessage, include tool_calls message and tool call results
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.splice(
|
||||
// @ts-ignore
|
||||
requestPayload?.messages?.length,
|
||||
0,
|
||||
toolCallMessage,
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
options,
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message, res);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -11,12 +11,15 @@ import Logo from "../icons/logo.svg";
|
|||
import { useMobileScreen } from "@/app/utils";
|
||||
import BotIcon from "../icons/bot.svg";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { PasswordInput } from "./ui-lib";
|
||||
import LeftIcon from "@/app/icons/left.svg";
|
||||
import { safeLocalStorage } from "@/app/utils";
|
||||
import {
|
||||
trackSettingsPageGuideToCPaymentClick,
|
||||
trackAuthorizationPageButtonToCPaymentClick,
|
||||
} from "../utils/auth-settings-events";
|
||||
import clsx from "clsx";
|
||||
|
||||
const storage = safeLocalStorage();
|
||||
|
||||
export function AuthPage() {
|
||||
|
@ -53,43 +56,50 @@ export function AuthPage() {
|
|||
onClick={() => navigate(Path.Home)}
|
||||
></IconButton>
|
||||
</div>
|
||||
<div className={`no-dark ${styles["auth-logo"]}`}>
|
||||
<div className={clsx("no-dark", styles["auth-logo"])}>
|
||||
<BotIcon />
|
||||
</div>
|
||||
|
||||
<div className={styles["auth-title"]}>{Locale.Auth.Title}</div>
|
||||
<div className={styles["auth-tips"]}>{Locale.Auth.Tips}</div>
|
||||
|
||||
<input
|
||||
className={styles["auth-input"]}
|
||||
type="password"
|
||||
placeholder={Locale.Auth.Input}
|
||||
<PasswordInput
|
||||
style={{ marginTop: "3vh", marginBottom: "3vh" }}
|
||||
aria={Locale.Settings.ShowPassword}
|
||||
aria-label={Locale.Auth.Input}
|
||||
value={accessStore.accessCode}
|
||||
type="text"
|
||||
placeholder={Locale.Auth.Input}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.accessCode = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
|
||||
{!accessStore.hideUserApiKey ? (
|
||||
<>
|
||||
<div className={styles["auth-tips"]}>{Locale.Auth.SubTips}</div>
|
||||
<input
|
||||
className={styles["auth-input"]}
|
||||
type="password"
|
||||
placeholder={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
|
||||
<PasswordInput
|
||||
style={{ marginTop: "3vh", marginBottom: "3vh" }}
|
||||
aria={Locale.Settings.ShowPassword}
|
||||
aria-label={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
|
||||
value={accessStore.openaiApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.openaiApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<input
|
||||
className={styles["auth-input-second"]}
|
||||
type="password"
|
||||
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
|
||||
<PasswordInput
|
||||
style={{ marginTop: "3vh", marginBottom: "3vh" }}
|
||||
aria={Locale.Settings.ShowPassword}
|
||||
aria-label={Locale.Settings.Access.Google.ApiKey.Placeholder}
|
||||
value={accessStore.googleApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.googleApiKey = e.currentTarget.value),
|
||||
|
@ -155,7 +165,7 @@ function TopBanner() {
|
|||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
>
|
||||
<div className={`${styles["top-banner-inner"]} no-dark`}>
|
||||
<div className={clsx(styles["top-banner-inner"], "no-dark")}>
|
||||
<Logo className={styles["top-banner-logo"]}></Logo>
|
||||
<span>
|
||||
{Locale.Auth.TopTips}
|
||||
|
|
|
@ -2,6 +2,7 @@ import * as React from "react";
|
|||
|
||||
import styles from "./button.module.scss";
|
||||
import { CSSProperties } from "react";
|
||||
import clsx from "clsx";
|
||||
|
||||
export type ButtonType = "primary" | "danger" | null;
|
||||
|
||||
|
@ -22,12 +23,16 @@ export function IconButton(props: {
|
|||
}) {
|
||||
return (
|
||||
<button
|
||||
className={
|
||||
styles["icon-button"] +
|
||||
` ${props.bordered && styles.border} ${props.shadow && styles.shadow} ${
|
||||
props.className ?? ""
|
||||
} clickable ${styles[props.type ?? ""]}`
|
||||
}
|
||||
className={clsx(
|
||||
"clickable",
|
||||
styles["icon-button"],
|
||||
{
|
||||
[styles.border]: props.bordered,
|
||||
[styles.shadow]: props.shadow,
|
||||
},
|
||||
styles[props.type ?? ""],
|
||||
props.className,
|
||||
)}
|
||||
onClick={props.onClick}
|
||||
title={props.title}
|
||||
disabled={props.disabled}
|
||||
|
@ -40,10 +45,9 @@ export function IconButton(props: {
|
|||
{props.icon && (
|
||||
<div
|
||||
aria-label={props.text || props.title}
|
||||
className={
|
||||
styles["icon-button-icon"] +
|
||||
` ${props.type === "primary" && "no-dark"}`
|
||||
}
|
||||
className={clsx(styles["icon-button-icon"], {
|
||||
"no-dark": props.type === "primary",
|
||||
})}
|
||||
>
|
||||
{props.icon}
|
||||
</div>
|
||||
|
|
|
@ -18,6 +18,7 @@ import { Mask } from "../store/mask";
|
|||
import { useRef, useEffect } from "react";
|
||||
import { showConfirm } from "./ui-lib";
|
||||
import { useMobileScreen } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function ChatItem(props: {
|
||||
onClick?: () => void;
|
||||
|
@ -45,11 +46,11 @@ export function ChatItem(props: {
|
|||
<Draggable draggableId={`${props.id}`} index={props.index}>
|
||||
{(provided) => (
|
||||
<div
|
||||
className={`${styles["chat-item"]} ${
|
||||
props.selected &&
|
||||
(currentPath === Path.Chat || currentPath === Path.Home) &&
|
||||
styles["chat-item-selected"]
|
||||
}`}
|
||||
className={clsx(styles["chat-item"], {
|
||||
[styles["chat-item-selected"]]:
|
||||
props.selected &&
|
||||
(currentPath === Path.Chat || currentPath === Path.Home),
|
||||
})}
|
||||
onClick={props.onClick}
|
||||
ref={(ele) => {
|
||||
draggableRef.current = ele;
|
||||
|
@ -63,7 +64,7 @@ export function ChatItem(props: {
|
|||
>
|
||||
{props.narrow ? (
|
||||
<div className={styles["chat-item-narrow"]}>
|
||||
<div className={styles["chat-item-avatar"] + " no-dark"}>
|
||||
<div className={clsx(styles["chat-item-avatar"], "no-dark")}>
|
||||
<MaskAvatar
|
||||
avatar={props.mask.avatar}
|
||||
model={props.mask.modelConfig.model}
|
||||
|
|
|
@ -45,6 +45,14 @@
|
|||
.chat-input-actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
justify-content: space-between;
|
||||
gap: 5px;
|
||||
|
||||
&-end {
|
||||
display: flex;
|
||||
margin-left: auto;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.chat-input-action {
|
||||
display: inline-flex;
|
||||
|
@ -62,10 +70,6 @@
|
|||
width: var(--icon-width);
|
||||
overflow: hidden;
|
||||
|
||||
&:not(:last-child) {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.text {
|
||||
white-space: nowrap;
|
||||
padding-left: 5px;
|
||||
|
@ -231,10 +235,12 @@
|
|||
|
||||
animation: slide-in ease 0.3s;
|
||||
|
||||
$linear: linear-gradient(to right,
|
||||
rgba(0, 0, 0, 0),
|
||||
rgba(0, 0, 0, 1),
|
||||
rgba(0, 0, 0, 0));
|
||||
$linear: linear-gradient(
|
||||
to right,
|
||||
rgba(0, 0, 0, 0),
|
||||
rgba(0, 0, 0, 1),
|
||||
rgba(0, 0, 0, 0)
|
||||
);
|
||||
mask-image: $linear;
|
||||
|
||||
@mixin show {
|
||||
|
@ -373,7 +379,7 @@
|
|||
}
|
||||
}
|
||||
|
||||
.chat-message-user>.chat-message-container {
|
||||
.chat-message-user > .chat-message-container {
|
||||
align-items: flex-end;
|
||||
}
|
||||
|
||||
|
@ -443,6 +449,25 @@
|
|||
transition: all ease 0.3s;
|
||||
}
|
||||
|
||||
.chat-message-audio {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
border-radius: 10px;
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
border: var(--border-in-light);
|
||||
position: relative;
|
||||
transition: all ease 0.3s;
|
||||
margin-top: 10px;
|
||||
font-size: 14px;
|
||||
user-select: text;
|
||||
word-break: break-word;
|
||||
box-sizing: border-box;
|
||||
audio {
|
||||
height: 30px; /* 调整高度 */
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message-item-image {
|
||||
width: 100%;
|
||||
margin-top: 10px;
|
||||
|
@ -471,23 +496,27 @@
|
|||
border: rgba($color: #888, $alpha: 0.2) 1px solid;
|
||||
}
|
||||
|
||||
|
||||
@media only screen and (max-width: 600px) {
|
||||
$calc-image-width: calc(100vw/3*2/var(--image-count));
|
||||
$calc-image-width: calc(100vw / 3 * 2 / var(--image-count));
|
||||
|
||||
.chat-message-item-image-multi {
|
||||
width: $calc-image-width;
|
||||
height: $calc-image-width;
|
||||
}
|
||||
|
||||
|
||||
.chat-message-item-image {
|
||||
max-width: calc(100vw/3*2);
|
||||
max-width: calc(100vw / 3 * 2);
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 600px) {
|
||||
$max-image-width: calc(calc(1200px - var(--sidebar-width))/3*2/var(--image-count));
|
||||
$image-width: calc(calc(var(--window-width) - var(--sidebar-width))/3*2/var(--image-count));
|
||||
$max-image-width: calc(
|
||||
calc(1200px - var(--sidebar-width)) / 3 * 2 / var(--image-count)
|
||||
);
|
||||
$image-width: calc(
|
||||
calc(var(--window-width) - var(--sidebar-width)) / 3 * 2 /
|
||||
var(--image-count)
|
||||
);
|
||||
|
||||
.chat-message-item-image-multi {
|
||||
width: $image-width;
|
||||
|
@ -497,7 +526,7 @@
|
|||
}
|
||||
|
||||
.chat-message-item-image {
|
||||
max-width: calc(calc(1200px - var(--sidebar-width))/3*2);
|
||||
max-width: calc(calc(1200px - var(--sidebar-width)) / 3 * 2);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -515,7 +544,7 @@
|
|||
z-index: 1;
|
||||
}
|
||||
|
||||
.chat-message-user>.chat-message-container>.chat-message-item {
|
||||
.chat-message-user > .chat-message-container > .chat-message-item {
|
||||
background-color: var(--second);
|
||||
|
||||
&:hover {
|
||||
|
@ -626,7 +655,8 @@
|
|||
min-height: 68px;
|
||||
}
|
||||
|
||||
.chat-input:focus {}
|
||||
.chat-input:focus {
|
||||
}
|
||||
|
||||
.chat-input-send {
|
||||
background-color: var(--primary);
|
||||
|
@ -693,4 +723,31 @@
|
|||
.shortcut-key span {
|
||||
font-size: 12px;
|
||||
color: var(--black);
|
||||
}
|
||||
}
|
||||
|
||||
.chat-main {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
.chat-body-container {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
}
|
||||
.chat-side-panel {
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
background: var(--white);
|
||||
overflow: hidden;
|
||||
z-index: 10;
|
||||
transform: translateX(100%);
|
||||
transition: all ease 0.3s;
|
||||
&-show {
|
||||
transform: translateX(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,8 +6,21 @@ import EmojiPicker, {
|
|||
|
||||
import { ModelType } from "../store";
|
||||
|
||||
import BotIcon from "../icons/bot.svg";
|
||||
import BlackBotIcon from "../icons/black-bot.svg";
|
||||
import BotIconDefault from "../icons/llm-icons/default.svg";
|
||||
import BotIconOpenAI from "../icons/llm-icons/openai.svg";
|
||||
import BotIconGemini from "../icons/llm-icons/gemini.svg";
|
||||
import BotIconGemma from "../icons/llm-icons/gemma.svg";
|
||||
import BotIconClaude from "../icons/llm-icons/claude.svg";
|
||||
import BotIconMeta from "../icons/llm-icons/meta.svg";
|
||||
import BotIconMistral from "../icons/llm-icons/mistral.svg";
|
||||
import BotIconDeepseek from "../icons/llm-icons/deepseek.svg";
|
||||
import BotIconMoonshot from "../icons/llm-icons/moonshot.svg";
|
||||
import BotIconQwen from "../icons/llm-icons/qwen.svg";
|
||||
import BotIconWenxin from "../icons/llm-icons/wenxin.svg";
|
||||
import BotIconGrok from "../icons/llm-icons/grok.svg";
|
||||
import BotIconHunyuan from "../icons/llm-icons/hunyuan.svg";
|
||||
import BotIconDoubao from "../icons/llm-icons/doubao.svg";
|
||||
import BotIconChatglm from "../icons/llm-icons/chatglm.svg";
|
||||
|
||||
export function getEmojiUrl(unified: string, style: EmojiStyle) {
|
||||
// Whoever owns this Content Delivery Network (CDN), I am using your CDN to serve emojis
|
||||
|
@ -33,15 +46,55 @@ export function AvatarPicker(props: {
|
|||
}
|
||||
|
||||
export function Avatar(props: { model?: ModelType; avatar?: string }) {
|
||||
let LlmIcon = BotIconDefault;
|
||||
|
||||
if (props.model) {
|
||||
const modelName = props.model.toLowerCase();
|
||||
|
||||
if (
|
||||
modelName.startsWith("gpt") ||
|
||||
modelName.startsWith("chatgpt") ||
|
||||
modelName.startsWith("dall-e") ||
|
||||
modelName.startsWith("dalle") ||
|
||||
modelName.startsWith("o1") ||
|
||||
modelName.startsWith("o3")
|
||||
) {
|
||||
LlmIcon = BotIconOpenAI;
|
||||
} else if (modelName.startsWith("gemini")) {
|
||||
LlmIcon = BotIconGemini;
|
||||
} else if (modelName.startsWith("gemma")) {
|
||||
LlmIcon = BotIconGemma;
|
||||
} else if (modelName.startsWith("claude")) {
|
||||
LlmIcon = BotIconClaude;
|
||||
} else if (modelName.toLowerCase().includes("llama")) {
|
||||
LlmIcon = BotIconMeta;
|
||||
} else if (modelName.startsWith("mixtral")) {
|
||||
LlmIcon = BotIconMistral;
|
||||
} else if (modelName.toLowerCase().includes("deepseek")) {
|
||||
LlmIcon = BotIconDeepseek;
|
||||
} else if (modelName.startsWith("moonshot")) {
|
||||
LlmIcon = BotIconMoonshot;
|
||||
} else if (modelName.startsWith("qwen")) {
|
||||
LlmIcon = BotIconQwen;
|
||||
} else if (modelName.startsWith("ernie")) {
|
||||
LlmIcon = BotIconWenxin;
|
||||
} else if (modelName.startsWith("grok")) {
|
||||
LlmIcon = BotIconGrok;
|
||||
} else if (modelName.startsWith("hunyuan")) {
|
||||
LlmIcon = BotIconHunyuan;
|
||||
} else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) {
|
||||
LlmIcon = BotIconDoubao;
|
||||
} else if (
|
||||
modelName.toLowerCase().includes("glm") ||
|
||||
modelName.startsWith("cogview-") ||
|
||||
modelName.startsWith("cogvideox-")
|
||||
) {
|
||||
LlmIcon = BotIconChatglm;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="no-dark">
|
||||
{props.model?.startsWith("gpt-4") ||
|
||||
props.model?.startsWith("chatgpt-4o") ? (
|
||||
<BlackBotIcon className="user-avatar" />
|
||||
) : (
|
||||
<BotIcon className="user-avatar" />
|
||||
)}
|
||||
<LlmIcon className="user-avatar" width={30} height={30} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import CopyIcon from "../icons/copy.svg";
|
|||
import LoadingIcon from "../icons/three-dots.svg";
|
||||
import ChatGptIcon from "../icons/chatgpt.png";
|
||||
import ShareIcon from "../icons/share.svg";
|
||||
import BotIcon from "../icons/bot.png";
|
||||
|
||||
import DownloadIcon from "../icons/download.svg";
|
||||
import { useEffect, useMemo, useRef, useState } from "react";
|
||||
|
@ -33,13 +32,14 @@ import dynamic from "next/dynamic";
|
|||
import NextImage from "next/image";
|
||||
|
||||
import { toBlob, toPng } from "html-to-image";
|
||||
import { DEFAULT_MASK_AVATAR } from "../store/mask";
|
||||
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { type ClientApi, getClientApi } from "../client/api";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import { MaskAvatar } from "./mask";
|
||||
import clsx from "clsx";
|
||||
|
||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||
loading: () => <LoadingIcon />,
|
||||
|
@ -118,9 +118,10 @@ function Steps<
|
|||
return (
|
||||
<div
|
||||
key={i}
|
||||
className={`${styles["step"]} ${
|
||||
styles[i <= props.index ? "step-finished" : ""]
|
||||
} ${i === props.index && styles["step-current"]} clickable`}
|
||||
className={clsx("clickable", styles["step"], {
|
||||
[styles["step-finished"]]: i <= props.index,
|
||||
[styles["step-current"]]: i === props.index,
|
||||
})}
|
||||
onClick={() => {
|
||||
props.onStepChange?.(i);
|
||||
}}
|
||||
|
@ -405,22 +406,6 @@ export function PreviewActions(props: {
|
|||
);
|
||||
}
|
||||
|
||||
function ExportAvatar(props: { avatar: string }) {
|
||||
if (props.avatar === DEFAULT_MASK_AVATAR) {
|
||||
return (
|
||||
<img
|
||||
src={BotIcon.src}
|
||||
width={30}
|
||||
height={30}
|
||||
alt="bot"
|
||||
className="user-avatar"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return <Avatar avatar={props.avatar} />;
|
||||
}
|
||||
|
||||
export function ImagePreviewer(props: {
|
||||
messages: ChatMessage[];
|
||||
topic: string;
|
||||
|
@ -525,11 +510,11 @@ export function ImagePreviewer(props: {
|
|||
messages={props.messages}
|
||||
/>
|
||||
<div
|
||||
className={`${styles["preview-body"]} ${styles["default-theme"]}`}
|
||||
className={clsx(styles["preview-body"], styles["default-theme"])}
|
||||
ref={previewRef}
|
||||
>
|
||||
<div className={styles["chat-info"]}>
|
||||
<div className={styles["logo"] + " no-dark"}>
|
||||
<div className={clsx(styles["logo"], "no-dark")}>
|
||||
<NextImage
|
||||
src={ChatGptIcon.src}
|
||||
alt="logo"
|
||||
|
@ -544,9 +529,12 @@ export function ImagePreviewer(props: {
|
|||
github.com/ChatGPTNextWeb/ChatGPT-Next-Web
|
||||
</div>
|
||||
<div className={styles["icons"]}>
|
||||
<ExportAvatar avatar={config.avatar} />
|
||||
<MaskAvatar avatar={config.avatar} />
|
||||
<span className={styles["icon-space"]}>&</span>
|
||||
<ExportAvatar avatar={mask.avatar} />
|
||||
<MaskAvatar
|
||||
avatar={mask.avatar}
|
||||
model={session.mask.modelConfig.model}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
|
@ -570,13 +558,18 @@ export function ImagePreviewer(props: {
|
|||
{props.messages.map((m, i) => {
|
||||
return (
|
||||
<div
|
||||
className={styles["message"] + " " + styles["message-" + m.role]}
|
||||
className={clsx(styles["message"], styles["message-" + m.role])}
|
||||
key={i}
|
||||
>
|
||||
<div className={styles["avatar"]}>
|
||||
<ExportAvatar
|
||||
avatar={m.role === "user" ? config.avatar : mask.avatar}
|
||||
/>
|
||||
{m.role === "user" ? (
|
||||
<Avatar avatar={config.avatar}></Avatar>
|
||||
) : (
|
||||
<MaskAvatar
|
||||
avatar={session.mask.avatar}
|
||||
model={m.model || session.mask.modelConfig.model}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className={styles["body"]}>
|
||||
|
|
|
@ -140,6 +140,9 @@
|
|||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
&-narrow {
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
.sidebar-logo {
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
|
||||
require("../polyfill");
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import styles from "./home.module.scss";
|
||||
|
||||
import BotIcon from "../icons/bot.svg";
|
||||
|
@ -19,8 +18,8 @@ import { getISOLang, getLang } from "../locales";
|
|||
|
||||
import {
|
||||
HashRouter as Router,
|
||||
Routes,
|
||||
Route,
|
||||
Routes,
|
||||
useLocation,
|
||||
} from "react-router-dom";
|
||||
import { SideBar } from "./sidebar";
|
||||
|
@ -29,10 +28,12 @@ import { AuthPage } from "./auth";
|
|||
import { getClientConfig } from "../config/client";
|
||||
import { type ClientApi, getClientApi } from "../client/api";
|
||||
import { useAccessStore } from "../store";
|
||||
import clsx from "clsx";
|
||||
import { initializeMcpSystem, isMcpEnabled } from "../mcp/actions";
|
||||
|
||||
export function Loading(props: { noLogo?: boolean }) {
|
||||
return (
|
||||
<div className={styles["loading-content"] + " no-dark"}>
|
||||
<div className={clsx("no-dark", styles["loading-content"])}>
|
||||
{!props.noLogo && <BotIcon />}
|
||||
<LoadingIcon />
|
||||
</div>
|
||||
|
@ -74,6 +75,13 @@ const Sd = dynamic(async () => (await import("./sd")).Sd, {
|
|||
loading: () => <Loading noLogo />,
|
||||
});
|
||||
|
||||
const McpMarketPage = dynamic(
|
||||
async () => (await import("./mcp-market")).McpMarketPage,
|
||||
{
|
||||
loading: () => <Loading noLogo />,
|
||||
},
|
||||
);
|
||||
|
||||
export function useSwitchTheme() {
|
||||
const config = useAppConfig();
|
||||
|
||||
|
@ -179,7 +187,11 @@ function Screen() {
|
|||
if (isSdNew) return <Sd />;
|
||||
return (
|
||||
<>
|
||||
<SideBar className={isHome ? styles["sidebar-show"] : ""} />
|
||||
<SideBar
|
||||
className={clsx({
|
||||
[styles["sidebar-show"]]: isHome,
|
||||
})}
|
||||
/>
|
||||
<WindowContent>
|
||||
<Routes>
|
||||
<Route path={Path.Home} element={<Chat />} />
|
||||
|
@ -189,6 +201,7 @@ function Screen() {
|
|||
<Route path={Path.SearchChat} element={<SearchChat />} />
|
||||
<Route path={Path.Chat} element={<Chat />} />
|
||||
<Route path={Path.Settings} element={<Settings />} />
|
||||
<Route path={Path.McpMarket} element={<McpMarketPage />} />
|
||||
</Routes>
|
||||
</WindowContent>
|
||||
</>
|
||||
|
@ -197,9 +210,10 @@ function Screen() {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={`${styles.container} ${
|
||||
shouldTightBorder ? styles["tight-container"] : styles.container
|
||||
} ${getLang() === "ar" ? styles["rtl-screen"] : ""}`}
|
||||
className={clsx(styles.container, {
|
||||
[styles["tight-container"]]: shouldTightBorder,
|
||||
[styles["rtl-screen"]]: getLang() === "ar",
|
||||
})}
|
||||
>
|
||||
{renderContent()}
|
||||
</div>
|
||||
|
@ -228,6 +242,20 @@ export function Home() {
|
|||
useEffect(() => {
|
||||
console.log("[Config] got config from build time", getClientConfig());
|
||||
useAccessStore.getState().fetch();
|
||||
|
||||
const initMcp = async () => {
|
||||
try {
|
||||
const enabled = await isMcpEnabled();
|
||||
if (enabled) {
|
||||
console.log("[MCP] initializing...");
|
||||
await initializeMcpSystem();
|
||||
console.log("[MCP] initialized");
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("[MCP] failed to initialize:", err);
|
||||
}
|
||||
};
|
||||
initMcp();
|
||||
}, []);
|
||||
|
||||
if (!useHasHydrated()) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import * as React from "react";
|
||||
import styles from "./input-range.module.scss";
|
||||
import clsx from "clsx";
|
||||
|
||||
interface InputRangeProps {
|
||||
onChange: React.ChangeEventHandler<HTMLInputElement>;
|
||||
|
@ -23,7 +24,7 @@ export function InputRange({
|
|||
aria,
|
||||
}: InputRangeProps) {
|
||||
return (
|
||||
<div className={styles["input-range"] + ` ${className ?? ""}`}>
|
||||
<div className={clsx(styles["input-range"], className)}>
|
||||
{title || value}
|
||||
<input
|
||||
aria-label={aria}
|
||||
|
|
|
@ -23,6 +23,7 @@ import { useChatStore } from "../store";
|
|||
import { IconButton } from "./button";
|
||||
|
||||
import { useAppConfig } from "../store/config";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function Mermaid(props: { code: string }) {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
|
@ -57,7 +58,7 @@ export function Mermaid(props: { code: string }) {
|
|||
|
||||
return (
|
||||
<div
|
||||
className="no-dark mermaid"
|
||||
className={clsx("no-dark", "mermaid")}
|
||||
style={{
|
||||
cursor: "pointer",
|
||||
overflow: "auto",
|
||||
|
@ -89,7 +90,11 @@ export function PreCode(props: { children: any }) {
|
|||
const refText = ref.current.querySelector("code")?.innerText;
|
||||
if (htmlDom) {
|
||||
setHtmlCode((htmlDom as HTMLElement).innerText);
|
||||
} else if (refText?.startsWith("<!DOCTYPE")) {
|
||||
} else if (
|
||||
refText?.startsWith("<!DOCTYPE") ||
|
||||
refText?.startsWith("<svg") ||
|
||||
refText?.startsWith("<?xml")
|
||||
) {
|
||||
setHtmlCode(refText);
|
||||
}
|
||||
}, 600);
|
||||
|
@ -169,6 +174,12 @@ export function PreCode(props: { children: any }) {
|
|||
}
|
||||
|
||||
function CustomCode(props: { children: any; className?: string }) {
|
||||
const chatStore = useChatStore();
|
||||
const session = chatStore.currentSession();
|
||||
const config = useAppConfig();
|
||||
const enableCodeFold =
|
||||
session.mask?.enableCodeFold !== false && config.enableCodeFold;
|
||||
|
||||
const ref = useRef<HTMLPreElement>(null);
|
||||
const [collapsed, setCollapsed] = useState(true);
|
||||
const [showToggle, setShowToggle] = useState(false);
|
||||
|
@ -184,25 +195,35 @@ function CustomCode(props: { children: any; className?: string }) {
|
|||
const toggleCollapsed = () => {
|
||||
setCollapsed((collapsed) => !collapsed);
|
||||
};
|
||||
const renderShowMoreButton = () => {
|
||||
if (showToggle && enableCodeFold && collapsed) {
|
||||
return (
|
||||
<div
|
||||
className={clsx("show-hide-button", {
|
||||
collapsed,
|
||||
expanded: !collapsed,
|
||||
})}
|
||||
>
|
||||
<button onClick={toggleCollapsed}>{Locale.NewChat.More}</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<code
|
||||
className={props?.className}
|
||||
className={clsx(props?.className)}
|
||||
ref={ref}
|
||||
style={{
|
||||
maxHeight: collapsed ? "400px" : "none",
|
||||
maxHeight: enableCodeFold && collapsed ? "400px" : "none",
|
||||
overflowY: "hidden",
|
||||
}}
|
||||
>
|
||||
{props.children}
|
||||
</code>
|
||||
{showToggle && collapsed && (
|
||||
<div
|
||||
className={`show-hide-button ${collapsed ? "collapsed" : "expanded"}`}
|
||||
>
|
||||
<button onClick={toggleCollapsed}>{Locale.NewChat.More}</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{renderShowMoreButton()}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
@ -227,6 +248,10 @@ function escapeBrackets(text: string) {
|
|||
|
||||
function tryWrapHtmlCode(text: string) {
|
||||
// try add wrap html code (fixed: html codeblock include 2 newline)
|
||||
// ignore embed codeblock
|
||||
if (text.includes("```")) {
|
||||
return text;
|
||||
}
|
||||
return text
|
||||
.replace(
|
||||
/([`]*?)(\w*?)([\n\r]*?)(<!DOCTYPE html>)/g,
|
||||
|
|
|
@ -55,6 +55,7 @@ import {
|
|||
OnDragEndResponder,
|
||||
} from "@hello-pangea/dnd";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
// drag and drop helper function
|
||||
function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
|
||||
|
@ -183,6 +184,23 @@ export function MaskConfig(props: {
|
|||
></input>
|
||||
</ListItem>
|
||||
)}
|
||||
{globalConfig.enableCodeFold && (
|
||||
<ListItem
|
||||
title={Locale.Mask.Config.CodeFold.Title}
|
||||
subTitle={Locale.Mask.Config.CodeFold.SubTitle}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Mask.Config.CodeFold.Title}
|
||||
type="checkbox"
|
||||
checked={props.mask.enableCodeFold !== false}
|
||||
onChange={(e) => {
|
||||
props.updateMask((mask) => {
|
||||
mask.enableCodeFold = e.currentTarget.checked;
|
||||
});
|
||||
}}
|
||||
></input>
|
||||
</ListItem>
|
||||
)}
|
||||
|
||||
{!props.shouldSyncFromGlobal ? (
|
||||
<ListItem
|
||||
|
@ -571,7 +589,7 @@ export function MaskPage() {
|
|||
</div>
|
||||
<div className={styles["mask-title"]}>
|
||||
<div className={styles["mask-name"]}>{m.name}</div>
|
||||
<div className={styles["mask-info"] + " one-line"}>
|
||||
<div className={clsx(styles["mask-info"], "one-line")}>
|
||||
{`${Locale.Mask.Item.Info(m.context.length)} / ${
|
||||
ALL_LANG_OPTIONS[m.lang]
|
||||
} / ${m.modelConfig.model}`}
|
||||
|
|
|
@ -0,0 +1,657 @@
|
|||
@import "../styles/animation.scss";
|
||||
|
||||
.mcp-market-page {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.loading-indicator {
|
||||
font-size: 12px;
|
||||
color: var(--primary);
|
||||
margin-left: 8px;
|
||||
font-weight: normal;
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.mcp-market-page-body {
|
||||
padding: 20px;
|
||||
overflow-y: auto;
|
||||
|
||||
.loading-container,
|
||||
.empty-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 200px;
|
||||
width: 100%;
|
||||
background-color: var(--white);
|
||||
border: var(--border-in-light);
|
||||
border-radius: 10px;
|
||||
animation: slide-in ease 0.3s;
|
||||
}
|
||||
|
||||
.loading-text,
|
||||
.empty-text {
|
||||
font-size: 14px;
|
||||
color: var(--black);
|
||||
opacity: 0.5;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.mcp-market-filter {
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
margin-bottom: 20px;
|
||||
animation: slide-in ease 0.3s;
|
||||
height: 40px;
|
||||
display: flex;
|
||||
|
||||
.search-bar {
|
||||
flex-grow: 1;
|
||||
max-width: 100%;
|
||||
min-width: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.server-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 1px;
|
||||
}
|
||||
|
||||
.mcp-market-item {
|
||||
padding: 20px;
|
||||
border: var(--border-in-light);
|
||||
animation: slide-in ease 0.3s;
|
||||
background-color: var(--white);
|
||||
transition: all 0.3s ease;
|
||||
|
||||
&.disabled {
|
||||
opacity: 0.7;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
&:not(:last-child) {
|
||||
border-bottom: 0;
|
||||
}
|
||||
|
||||
&:first-child {
|
||||
border-top-left-radius: 10px;
|
||||
border-top-right-radius: 10px;
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
border-bottom-left-radius: 10px;
|
||||
border-bottom-right-radius: 10px;
|
||||
}
|
||||
|
||||
&.loading {
|
||||
position: relative;
|
||||
&::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: linear-gradient(
|
||||
90deg,
|
||||
transparent,
|
||||
rgba(255, 255, 255, 0.2),
|
||||
transparent
|
||||
);
|
||||
background-size: 200% 100%;
|
||||
animation: loading-pulse 1.5s infinite;
|
||||
}
|
||||
}
|
||||
|
||||
.operation-status {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
margin-left: 10px;
|
||||
padding: 2px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 12px;
|
||||
background-color: #16a34a;
|
||||
color: #fff;
|
||||
animation: pulse 1.5s infinite;
|
||||
|
||||
&[data-status="stopping"] {
|
||||
background-color: #9ca3af;
|
||||
}
|
||||
|
||||
&[data-status="starting"] {
|
||||
background-color: #4ade80;
|
||||
}
|
||||
|
||||
&[data-status="error"] {
|
||||
background-color: #f87171;
|
||||
}
|
||||
}
|
||||
|
||||
.mcp-market-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
width: 100%;
|
||||
|
||||
.mcp-market-title {
|
||||
flex-grow: 1;
|
||||
margin-right: 20px;
|
||||
max-width: calc(100% - 300px);
|
||||
}
|
||||
|
||||
.mcp-market-name {
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 8px;
|
||||
|
||||
.server-status {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
margin-left: 10px;
|
||||
padding: 2px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 12px;
|
||||
background-color: #22c55e;
|
||||
color: #fff;
|
||||
|
||||
&.error {
|
||||
background-color: #ef4444;
|
||||
}
|
||||
|
||||
&.stopped {
|
||||
background-color: #6b7280;
|
||||
}
|
||||
|
||||
&.initializing {
|
||||
background-color: #f59e0b;
|
||||
animation: pulse 1.5s infinite;
|
||||
}
|
||||
|
||||
.error-message {
|
||||
margin-left: 4px;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.repo-link {
|
||||
color: var(--primary);
|
||||
font-size: 12px;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
text-decoration: none;
|
||||
opacity: 0.8;
|
||||
transition: opacity 0.2s;
|
||||
|
||||
&:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.tags-container {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
flex-wrap: wrap;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.tag {
|
||||
background: var(--gray);
|
||||
color: var(--black);
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
font-size: 10px;
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.mcp-market-info {
|
||||
color: var(--black);
|
||||
font-size: 12px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.mcp-market-actions {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
align-items: flex-start;
|
||||
flex-shrink: 0;
|
||||
min-width: 180px;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.array-input {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
width: 100%;
|
||||
padding: 16px;
|
||||
border: 1px solid var(--gray-200);
|
||||
border-radius: 10px;
|
||||
background-color: var(--white);
|
||||
|
||||
.array-input-item {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
padding: 0;
|
||||
|
||||
input {
|
||||
width: 100%;
|
||||
padding: 8px 12px;
|
||||
background-color: var(--gray-50);
|
||||
border-radius: 6px;
|
||||
transition: all 0.3s ease;
|
||||
font-size: 13px;
|
||||
border: 1px solid var(--gray-200);
|
||||
|
||||
&:hover {
|
||||
background-color: var(--gray-100);
|
||||
border-color: var(--gray-300);
|
||||
}
|
||||
|
||||
&:focus {
|
||||
background-color: var(--white);
|
||||
border-color: var(--primary);
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px var(--primary-10);
|
||||
}
|
||||
|
||||
&::placeholder {
|
||||
color: var(--gray-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
:global(.icon-button.add-path-button) {
|
||||
width: 100%;
|
||||
background-color: var(--primary);
|
||||
color: white;
|
||||
padding: 8px 12px;
|
||||
border-radius: 6px;
|
||||
transition: all 0.3s ease;
|
||||
margin-top: 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border: none;
|
||||
height: 36px;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--primary-dark);
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
margin-right: 4px;
|
||||
filter: brightness(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.path-list {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
|
||||
.path-item {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
width: 100%;
|
||||
|
||||
input {
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
padding: 10px;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 10px;
|
||||
box-sizing: border-box;
|
||||
font-size: 14px;
|
||||
background-color: var(--white);
|
||||
color: var(--black);
|
||||
|
||||
&:hover {
|
||||
border-color: var(--gray-300);
|
||||
}
|
||||
|
||||
&:focus {
|
||||
border-color: var(--primary);
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px var(--primary-10);
|
||||
}
|
||||
}
|
||||
|
||||
.browse-button {
|
||||
padding: 8px;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 10px;
|
||||
background-color: transparent;
|
||||
color: var(--black-50);
|
||||
|
||||
&:hover {
|
||||
border-color: var(--primary);
|
||||
color: var(--primary);
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
.delete-button {
|
||||
padding: 8px;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 10px;
|
||||
background-color: transparent;
|
||||
color: var(--black-50);
|
||||
|
||||
&:hover {
|
||||
border-color: var(--danger);
|
||||
color: var(--danger);
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
.file-input {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.add-button {
|
||||
align-self: flex-start;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
padding: 8px 12px;
|
||||
background-color: transparent;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 10px;
|
||||
color: var(--black);
|
||||
font-size: 12px;
|
||||
margin-top: 5px;
|
||||
|
||||
&:hover {
|
||||
border-color: var(--primary);
|
||||
color: var(--primary);
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.config-section {
|
||||
width: 100%;
|
||||
|
||||
.config-header {
|
||||
margin-bottom: 12px;
|
||||
|
||||
.config-title {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
color: var(--black);
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.config-description {
|
||||
font-size: 12px;
|
||||
color: var(--gray-500);
|
||||
margin-top: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
.array-input {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
width: 100%;
|
||||
padding: 16px;
|
||||
border: 1px solid var(--gray-200);
|
||||
border-radius: 10px;
|
||||
background-color: var(--white);
|
||||
|
||||
.array-input-item {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
padding: 0;
|
||||
|
||||
input {
|
||||
width: 100%;
|
||||
padding: 8px 12px;
|
||||
background-color: var(--gray-50);
|
||||
border-radius: 6px;
|
||||
transition: all 0.3s ease;
|
||||
font-size: 13px;
|
||||
border: 1px solid var(--gray-200);
|
||||
|
||||
&:hover {
|
||||
background-color: var(--gray-100);
|
||||
border-color: var(--gray-300);
|
||||
}
|
||||
|
||||
&:focus {
|
||||
background-color: var(--white);
|
||||
border-color: var(--primary);
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px var(--primary-10);
|
||||
}
|
||||
|
||||
&::placeholder {
|
||||
color: var(--gray-300);
|
||||
}
|
||||
}
|
||||
|
||||
:global(.icon-button) {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
padding: 0;
|
||||
border-radius: 6px;
|
||||
background-color: transparent;
|
||||
border: 1px solid var(--gray-200);
|
||||
flex-shrink: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--gray-100);
|
||||
border-color: var(--gray-300);
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
:global(.icon-button.add-path-button) {
|
||||
width: 100%;
|
||||
background-color: var(--primary);
|
||||
color: white;
|
||||
padding: 8px 12px;
|
||||
border-radius: 6px;
|
||||
transition: all 0.3s ease;
|
||||
margin-top: 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border: none;
|
||||
height: 36px;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--primary-dark);
|
||||
}
|
||||
|
||||
svg {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
margin-right: 4px;
|
||||
filter: brightness(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.input-item {
|
||||
width: 100%;
|
||||
|
||||
input {
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 10px;
|
||||
box-sizing: border-box;
|
||||
font-size: 14px;
|
||||
background-color: var(--white);
|
||||
color: var(--black);
|
||||
|
||||
&:hover {
|
||||
border-color: var(--gray-300);
|
||||
}
|
||||
|
||||
&:focus {
|
||||
border-color: var(--primary);
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px var(--primary-10);
|
||||
}
|
||||
|
||||
&::placeholder {
|
||||
color: var(--gray-300) !important;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.tools-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
width: 100%;
|
||||
padding: 20px;
|
||||
max-width: 100%;
|
||||
overflow-x: hidden;
|
||||
word-break: break-word;
|
||||
box-sizing: border-box;
|
||||
|
||||
.tool-item {
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
|
||||
.tool-name {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
color: var(--black);
|
||||
margin-bottom: 8px;
|
||||
padding-left: 12px;
|
||||
border-left: 3px solid var(--primary);
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.tool-description {
|
||||
font-size: 13px;
|
||||
color: var(--gray-500);
|
||||
line-height: 1.6;
|
||||
padding-left: 15px;
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
:global {
|
||||
.modal-content {
|
||||
margin-top: 20px;
|
||||
max-width: 100%;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.list {
|
||||
padding: 10px;
|
||||
margin-bottom: 10px;
|
||||
background-color: var(--white);
|
||||
}
|
||||
|
||||
.list-item {
|
||||
border: none;
|
||||
background-color: transparent;
|
||||
border-radius: 10px;
|
||||
padding: 10px;
|
||||
margin-bottom: 10px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
|
||||
.list-header {
|
||||
margin-bottom: 0;
|
||||
|
||||
.list-title {
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
text-transform: capitalize;
|
||||
color: var(--black);
|
||||
}
|
||||
|
||||
.list-sub-title {
|
||||
font-size: 12px;
|
||||
color: var(--gray-500);
|
||||
margin-top: 4px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes loading-pulse {
|
||||
0% {
|
||||
background-position: 200% 0;
|
||||
}
|
||||
100% {
|
||||
background-position: -200% 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
opacity: 0.6;
|
||||
}
|
||||
50% {
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
opacity: 0.6;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,755 @@
|
|||
import { IconButton } from "./button";
|
||||
import { ErrorBoundary } from "./error";
|
||||
import styles from "./mcp-market.module.scss";
|
||||
import EditIcon from "../icons/edit.svg";
|
||||
import AddIcon from "../icons/add.svg";
|
||||
import CloseIcon from "../icons/close.svg";
|
||||
import DeleteIcon from "../icons/delete.svg";
|
||||
import RestartIcon from "../icons/reload.svg";
|
||||
import EyeIcon from "../icons/eye.svg";
|
||||
import GithubIcon from "../icons/github.svg";
|
||||
import { List, ListItem, Modal, showToast } from "./ui-lib";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { useEffect, useState } from "react";
|
||||
import {
|
||||
addMcpServer,
|
||||
getClientsStatus,
|
||||
getClientTools,
|
||||
getMcpConfigFromFile,
|
||||
isMcpEnabled,
|
||||
pauseMcpServer,
|
||||
restartAllClients,
|
||||
resumeMcpServer,
|
||||
} from "../mcp/actions";
|
||||
import {
|
||||
ListToolsResponse,
|
||||
McpConfigData,
|
||||
PresetServer,
|
||||
ServerConfig,
|
||||
ServerStatusResponse,
|
||||
} from "../mcp/types";
|
||||
import clsx from "clsx";
|
||||
import PlayIcon from "../icons/play.svg";
|
||||
import StopIcon from "../icons/pause.svg";
|
||||
import { Path } from "../constant";
|
||||
|
||||
interface ConfigProperty {
|
||||
type: string;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
minItems?: number;
|
||||
}
|
||||
|
||||
export function McpMarketPage() {
|
||||
const navigate = useNavigate();
|
||||
const [mcpEnabled, setMcpEnabled] = useState(false);
|
||||
const [searchText, setSearchText] = useState("");
|
||||
const [userConfig, setUserConfig] = useState<Record<string, any>>({});
|
||||
const [editingServerId, setEditingServerId] = useState<string | undefined>();
|
||||
const [tools, setTools] = useState<ListToolsResponse["tools"] | null>(null);
|
||||
const [viewingServerId, setViewingServerId] = useState<string | undefined>();
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [config, setConfig] = useState<McpConfigData>();
|
||||
const [clientStatuses, setClientStatuses] = useState<
|
||||
Record<string, ServerStatusResponse>
|
||||
>({});
|
||||
const [loadingPresets, setLoadingPresets] = useState(true);
|
||||
const [presetServers, setPresetServers] = useState<PresetServer[]>([]);
|
||||
const [loadingStates, setLoadingStates] = useState<Record<string, string>>(
|
||||
{},
|
||||
);
|
||||
|
||||
// 检查 MCP 是否启用
|
||||
useEffect(() => {
|
||||
const checkMcpStatus = async () => {
|
||||
const enabled = await isMcpEnabled();
|
||||
setMcpEnabled(enabled);
|
||||
if (!enabled) {
|
||||
navigate(Path.Home);
|
||||
}
|
||||
};
|
||||
checkMcpStatus();
|
||||
}, [navigate]);
|
||||
|
||||
// 添加状态轮询
|
||||
useEffect(() => {
|
||||
if (!mcpEnabled || !config) return;
|
||||
|
||||
const updateStatuses = async () => {
|
||||
const statuses = await getClientsStatus();
|
||||
setClientStatuses(statuses);
|
||||
};
|
||||
|
||||
// 立即执行一次
|
||||
updateStatuses();
|
||||
// 每 1000ms 轮询一次
|
||||
const timer = setInterval(updateStatuses, 1000);
|
||||
|
||||
return () => clearInterval(timer);
|
||||
}, [mcpEnabled, config]);
|
||||
|
||||
// 加载预设服务器
|
||||
useEffect(() => {
|
||||
const loadPresetServers = async () => {
|
||||
if (!mcpEnabled) return;
|
||||
try {
|
||||
setLoadingPresets(true);
|
||||
const response = await fetch("https://nextchat.club/mcp/list");
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to load preset servers");
|
||||
}
|
||||
const data = await response.json();
|
||||
setPresetServers(data?.data ?? []);
|
||||
} catch (error) {
|
||||
console.error("Failed to load preset servers:", error);
|
||||
showToast("Failed to load preset servers");
|
||||
} finally {
|
||||
setLoadingPresets(false);
|
||||
}
|
||||
};
|
||||
loadPresetServers();
|
||||
}, [mcpEnabled]);
|
||||
|
||||
// 加载初始状态
|
||||
useEffect(() => {
|
||||
const loadInitialState = async () => {
|
||||
if (!mcpEnabled) return;
|
||||
try {
|
||||
setIsLoading(true);
|
||||
const config = await getMcpConfigFromFile();
|
||||
setConfig(config);
|
||||
|
||||
// 获取所有客户端的状态
|
||||
const statuses = await getClientsStatus();
|
||||
setClientStatuses(statuses);
|
||||
} catch (error) {
|
||||
console.error("Failed to load initial state:", error);
|
||||
showToast("Failed to load initial state");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
loadInitialState();
|
||||
}, [mcpEnabled]);
|
||||
|
||||
// 加载当前编辑服务器的配置
|
||||
useEffect(() => {
|
||||
if (!editingServerId || !config) return;
|
||||
const currentConfig = config.mcpServers[editingServerId];
|
||||
if (currentConfig) {
|
||||
// 从当前配置中提取用户配置
|
||||
const preset = presetServers.find((s) => s.id === editingServerId);
|
||||
if (preset?.configSchema) {
|
||||
const userConfig: Record<string, any> = {};
|
||||
Object.entries(preset.argsMapping || {}).forEach(([key, mapping]) => {
|
||||
if (mapping.type === "spread") {
|
||||
// For spread types, extract the array from args.
|
||||
const startPos = mapping.position ?? 0;
|
||||
userConfig[key] = currentConfig.args.slice(startPos);
|
||||
} else if (mapping.type === "single") {
|
||||
// For single types, get a single value
|
||||
userConfig[key] = currentConfig.args[mapping.position ?? 0];
|
||||
} else if (
|
||||
mapping.type === "env" &&
|
||||
mapping.key &&
|
||||
currentConfig.env
|
||||
) {
|
||||
// For env types, get values from environment variables
|
||||
userConfig[key] = currentConfig.env[mapping.key];
|
||||
}
|
||||
});
|
||||
setUserConfig(userConfig);
|
||||
}
|
||||
} else {
|
||||
setUserConfig({});
|
||||
}
|
||||
}, [editingServerId, config, presetServers]);
|
||||
|
||||
if (!mcpEnabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// 检查服务器是否已添加
|
||||
const isServerAdded = (id: string) => {
|
||||
return id in (config?.mcpServers ?? {});
|
||||
};
|
||||
|
||||
// 保存服务器配置
|
||||
const saveServerConfig = async () => {
|
||||
const preset = presetServers.find((s) => s.id === editingServerId);
|
||||
if (!preset || !preset.configSchema || !editingServerId) return;
|
||||
|
||||
const savingServerId = editingServerId;
|
||||
setEditingServerId(undefined);
|
||||
|
||||
try {
|
||||
updateLoadingState(savingServerId, "Updating configuration...");
|
||||
// 构建服务器配置
|
||||
const args = [...preset.baseArgs];
|
||||
const env: Record<string, string> = {};
|
||||
|
||||
Object.entries(preset.argsMapping || {}).forEach(([key, mapping]) => {
|
||||
const value = userConfig[key];
|
||||
if (mapping.type === "spread" && Array.isArray(value)) {
|
||||
const pos = mapping.position ?? 0;
|
||||
args.splice(pos, 0, ...value);
|
||||
} else if (
|
||||
mapping.type === "single" &&
|
||||
mapping.position !== undefined
|
||||
) {
|
||||
args[mapping.position] = value;
|
||||
} else if (
|
||||
mapping.type === "env" &&
|
||||
mapping.key &&
|
||||
typeof value === "string"
|
||||
) {
|
||||
env[mapping.key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
const serverConfig: ServerConfig = {
|
||||
command: preset.command,
|
||||
args,
|
||||
...(Object.keys(env).length > 0 ? { env } : {}),
|
||||
};
|
||||
|
||||
const newConfig = await addMcpServer(savingServerId, serverConfig);
|
||||
setConfig(newConfig);
|
||||
showToast("Server configuration updated successfully");
|
||||
} catch (error) {
|
||||
showToast(
|
||||
error instanceof Error ? error.message : "Failed to save configuration",
|
||||
);
|
||||
} finally {
|
||||
updateLoadingState(savingServerId, null);
|
||||
}
|
||||
};
|
||||
|
||||
// 获取服务器支持的 Tools
|
||||
const loadTools = async (id: string) => {
|
||||
try {
|
||||
const result = await getClientTools(id);
|
||||
if (result) {
|
||||
setTools(result);
|
||||
} else {
|
||||
throw new Error("Failed to load tools");
|
||||
}
|
||||
} catch (error) {
|
||||
showToast("Failed to load tools");
|
||||
console.error(error);
|
||||
setTools(null);
|
||||
}
|
||||
};
|
||||
|
||||
// 更新加载状态的辅助函数
|
||||
const updateLoadingState = (id: string, message: string | null) => {
|
||||
setLoadingStates((prev) => {
|
||||
if (message === null) {
|
||||
const { [id]: _, ...rest } = prev;
|
||||
return rest;
|
||||
}
|
||||
return { ...prev, [id]: message };
|
||||
});
|
||||
};
|
||||
|
||||
// 修改添加服务器函数
|
||||
const addServer = async (preset: PresetServer) => {
|
||||
if (!preset.configurable) {
|
||||
try {
|
||||
const serverId = preset.id;
|
||||
updateLoadingState(serverId, "Creating MCP client...");
|
||||
|
||||
const serverConfig: ServerConfig = {
|
||||
command: preset.command,
|
||||
args: [...preset.baseArgs],
|
||||
};
|
||||
const newConfig = await addMcpServer(preset.id, serverConfig);
|
||||
setConfig(newConfig);
|
||||
|
||||
// 更新状态
|
||||
const statuses = await getClientsStatus();
|
||||
setClientStatuses(statuses);
|
||||
} finally {
|
||||
updateLoadingState(preset.id, null);
|
||||
}
|
||||
} else {
|
||||
// 如果需要配置,打开配置对话框
|
||||
setEditingServerId(preset.id);
|
||||
setUserConfig({});
|
||||
}
|
||||
};
|
||||
|
||||
// 修改暂停服务器函数
|
||||
const pauseServer = async (id: string) => {
|
||||
try {
|
||||
updateLoadingState(id, "Stopping server...");
|
||||
const newConfig = await pauseMcpServer(id);
|
||||
setConfig(newConfig);
|
||||
showToast("Server stopped successfully");
|
||||
} catch (error) {
|
||||
showToast("Failed to stop server");
|
||||
console.error(error);
|
||||
} finally {
|
||||
updateLoadingState(id, null);
|
||||
}
|
||||
};
|
||||
|
||||
// Restart server
|
||||
const restartServer = async (id: string) => {
|
||||
try {
|
||||
updateLoadingState(id, "Starting server...");
|
||||
await resumeMcpServer(id);
|
||||
} catch (error) {
|
||||
showToast(
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Failed to start server, please check logs",
|
||||
);
|
||||
console.error(error);
|
||||
} finally {
|
||||
updateLoadingState(id, null);
|
||||
}
|
||||
};
|
||||
|
||||
// Restart all clients
|
||||
const handleRestartAll = async () => {
|
||||
try {
|
||||
updateLoadingState("all", "Restarting all servers...");
|
||||
const newConfig = await restartAllClients();
|
||||
setConfig(newConfig);
|
||||
showToast("Restarting all clients");
|
||||
} catch (error) {
|
||||
showToast("Failed to restart clients");
|
||||
console.error(error);
|
||||
} finally {
|
||||
updateLoadingState("all", null);
|
||||
}
|
||||
};
|
||||
|
||||
// Render configuration form
|
||||
const renderConfigForm = () => {
|
||||
const preset = presetServers.find((s) => s.id === editingServerId);
|
||||
if (!preset?.configSchema) return null;
|
||||
|
||||
return Object.entries(preset.configSchema.properties).map(
|
||||
([key, prop]: [string, ConfigProperty]) => {
|
||||
if (prop.type === "array") {
|
||||
const currentValue = userConfig[key as keyof typeof userConfig] || [];
|
||||
const itemLabel = (prop as any).itemLabel || key;
|
||||
const addButtonText =
|
||||
(prop as any).addButtonText || `Add ${itemLabel}`;
|
||||
|
||||
return (
|
||||
<ListItem
|
||||
key={key}
|
||||
title={key}
|
||||
subTitle={prop.description}
|
||||
vertical
|
||||
>
|
||||
<div className={styles["path-list"]}>
|
||||
{(currentValue as string[]).map(
|
||||
(value: string, index: number) => (
|
||||
<div key={index} className={styles["path-item"]}>
|
||||
<input
|
||||
type="text"
|
||||
value={value}
|
||||
placeholder={`${itemLabel} ${index + 1}`}
|
||||
onChange={(e) => {
|
||||
const newValue = [...currentValue] as string[];
|
||||
newValue[index] = e.target.value;
|
||||
setUserConfig({ ...userConfig, [key]: newValue });
|
||||
}}
|
||||
/>
|
||||
<IconButton
|
||||
icon={<DeleteIcon />}
|
||||
className={styles["delete-button"]}
|
||||
onClick={() => {
|
||||
const newValue = [...currentValue] as string[];
|
||||
newValue.splice(index, 1);
|
||||
setUserConfig({ ...userConfig, [key]: newValue });
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
)}
|
||||
<IconButton
|
||||
icon={<AddIcon />}
|
||||
text={addButtonText}
|
||||
className={styles["add-button"]}
|
||||
bordered
|
||||
onClick={() => {
|
||||
const newValue = [...currentValue, ""] as string[];
|
||||
setUserConfig({ ...userConfig, [key]: newValue });
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</ListItem>
|
||||
);
|
||||
} else if (prop.type === "string") {
|
||||
const currentValue = userConfig[key as keyof typeof userConfig] || "";
|
||||
return (
|
||||
<ListItem key={key} title={key} subTitle={prop.description}>
|
||||
<input
|
||||
aria-label={key}
|
||||
type="text"
|
||||
value={currentValue}
|
||||
placeholder={`Enter ${key}`}
|
||||
onChange={(e) => {
|
||||
setUserConfig({ ...userConfig, [key]: e.target.value });
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const checkServerStatus = (clientId: string) => {
|
||||
return clientStatuses[clientId] || { status: "undefined", errorMsg: null };
|
||||
};
|
||||
|
||||
const getServerStatusDisplay = (clientId: string) => {
|
||||
const status = checkServerStatus(clientId);
|
||||
|
||||
const statusMap = {
|
||||
undefined: null, // 未配置/未找到不显示
|
||||
// 添加初始化状态
|
||||
initializing: (
|
||||
<span className={clsx(styles["server-status"], styles["initializing"])}>
|
||||
Initializing
|
||||
</span>
|
||||
),
|
||||
paused: (
|
||||
<span className={clsx(styles["server-status"], styles["stopped"])}>
|
||||
Stopped
|
||||
</span>
|
||||
),
|
||||
active: <span className={styles["server-status"]}>Running</span>,
|
||||
error: (
|
||||
<span className={clsx(styles["server-status"], styles["error"])}>
|
||||
Error
|
||||
<span className={styles["error-message"]}>: {status.errorMsg}</span>
|
||||
</span>
|
||||
),
|
||||
};
|
||||
|
||||
return statusMap[status.status];
|
||||
};
|
||||
|
||||
// Get the type of operation status
|
||||
const getOperationStatusType = (message: string) => {
|
||||
if (message.toLowerCase().includes("stopping")) return "stopping";
|
||||
if (message.toLowerCase().includes("starting")) return "starting";
|
||||
if (message.toLowerCase().includes("error")) return "error";
|
||||
return "default";
|
||||
};
|
||||
|
||||
// 渲染服务器列表
|
||||
const renderServerList = () => {
|
||||
if (loadingPresets) {
|
||||
return (
|
||||
<div className={styles["loading-container"]}>
|
||||
<div className={styles["loading-text"]}>
|
||||
Loading preset server list...
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!Array.isArray(presetServers) || presetServers.length === 0) {
|
||||
return (
|
||||
<div className={styles["empty-container"]}>
|
||||
<div className={styles["empty-text"]}>No servers available</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return presetServers
|
||||
.filter((server) => {
|
||||
if (searchText.length === 0) return true;
|
||||
const searchLower = searchText.toLowerCase();
|
||||
return (
|
||||
server.name.toLowerCase().includes(searchLower) ||
|
||||
server.description.toLowerCase().includes(searchLower) ||
|
||||
server.tags.some((tag) => tag.toLowerCase().includes(searchLower))
|
||||
);
|
||||
})
|
||||
.sort((a, b) => {
|
||||
const aStatus = checkServerStatus(a.id).status;
|
||||
const bStatus = checkServerStatus(b.id).status;
|
||||
const aLoading = loadingStates[a.id];
|
||||
const bLoading = loadingStates[b.id];
|
||||
|
||||
// 定义状态优先级
|
||||
const statusPriority: Record<string, number> = {
|
||||
error: 0, // Highest priority for error status
|
||||
active: 1, // Second for active
|
||||
initializing: 2, // Initializing
|
||||
starting: 3, // Starting
|
||||
stopping: 4, // Stopping
|
||||
paused: 5, // Paused
|
||||
undefined: 6, // Lowest priority for undefined
|
||||
};
|
||||
|
||||
// Get actual status (including loading status)
|
||||
const getEffectiveStatus = (status: string, loading?: string) => {
|
||||
if (loading) {
|
||||
const operationType = getOperationStatusType(loading);
|
||||
return operationType === "default" ? status : operationType;
|
||||
}
|
||||
|
||||
if (status === "initializing" && !loading) {
|
||||
return "active";
|
||||
}
|
||||
|
||||
return status;
|
||||
};
|
||||
|
||||
const aEffectiveStatus = getEffectiveStatus(aStatus, aLoading);
|
||||
const bEffectiveStatus = getEffectiveStatus(bStatus, bLoading);
|
||||
|
||||
// 首先按状态排序
|
||||
if (aEffectiveStatus !== bEffectiveStatus) {
|
||||
return (
|
||||
(statusPriority[aEffectiveStatus] ?? 6) -
|
||||
(statusPriority[bEffectiveStatus] ?? 6)
|
||||
);
|
||||
}
|
||||
|
||||
// Sort by name when statuses are the same
|
||||
return a.name.localeCompare(b.name);
|
||||
})
|
||||
.map((server) => (
|
||||
<div
|
||||
className={clsx(styles["mcp-market-item"], {
|
||||
[styles["loading"]]: loadingStates[server.id],
|
||||
})}
|
||||
key={server.id}
|
||||
>
|
||||
<div className={styles["mcp-market-header"]}>
|
||||
<div className={styles["mcp-market-title"]}>
|
||||
<div className={styles["mcp-market-name"]}>
|
||||
{server.name}
|
||||
{loadingStates[server.id] && (
|
||||
<span
|
||||
className={styles["operation-status"]}
|
||||
data-status={getOperationStatusType(
|
||||
loadingStates[server.id],
|
||||
)}
|
||||
>
|
||||
{loadingStates[server.id]}
|
||||
</span>
|
||||
)}
|
||||
{!loadingStates[server.id] && getServerStatusDisplay(server.id)}
|
||||
{server.repo && (
|
||||
<a
|
||||
href={server.repo}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className={styles["repo-link"]}
|
||||
title="Open repository"
|
||||
>
|
||||
<GithubIcon />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<div className={styles["tags-container"]}>
|
||||
{server.tags.map((tag, index) => (
|
||||
<span key={index} className={styles["tag"]}>
|
||||
{tag}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
<div
|
||||
className={clsx(styles["mcp-market-info"], "one-line")}
|
||||
title={server.description}
|
||||
>
|
||||
{server.description}
|
||||
</div>
|
||||
</div>
|
||||
<div className={styles["mcp-market-actions"]}>
|
||||
{isServerAdded(server.id) ? (
|
||||
<>
|
||||
{server.configurable && (
|
||||
<IconButton
|
||||
icon={<EditIcon />}
|
||||
text="Configure"
|
||||
onClick={() => setEditingServerId(server.id)}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
)}
|
||||
{checkServerStatus(server.id).status === "paused" ? (
|
||||
<>
|
||||
<IconButton
|
||||
icon={<PlayIcon />}
|
||||
text="Start"
|
||||
onClick={() => restartServer(server.id)}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
{/* <IconButton
|
||||
icon={<DeleteIcon />}
|
||||
text="Remove"
|
||||
onClick={() => removeServer(server.id)}
|
||||
disabled={isLoading}
|
||||
/> */}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<IconButton
|
||||
icon={<EyeIcon />}
|
||||
text="Tools"
|
||||
onClick={async () => {
|
||||
setViewingServerId(server.id);
|
||||
await loadTools(server.id);
|
||||
}}
|
||||
disabled={
|
||||
isLoading ||
|
||||
checkServerStatus(server.id).status === "error"
|
||||
}
|
||||
/>
|
||||
<IconButton
|
||||
icon={<StopIcon />}
|
||||
text="Stop"
|
||||
onClick={() => pauseServer(server.id)}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<IconButton
|
||||
icon={<AddIcon />}
|
||||
text="Add"
|
||||
onClick={() => addServer(server)}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
));
|
||||
};
|
||||
|
||||
return (
|
||||
<ErrorBoundary>
|
||||
<div className={styles["mcp-market-page"]}>
|
||||
<div className="window-header">
|
||||
<div className="window-header-title">
|
||||
<div className="window-header-main-title">
|
||||
MCP Market
|
||||
{loadingStates["all"] && (
|
||||
<span className={styles["loading-indicator"]}>
|
||||
{loadingStates["all"]}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<div className="window-header-sub-title">
|
||||
{Object.keys(config?.mcpServers ?? {}).length} servers configured
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="window-actions">
|
||||
<div className="window-action-button">
|
||||
<IconButton
|
||||
icon={<RestartIcon />}
|
||||
bordered
|
||||
onClick={handleRestartAll}
|
||||
text="Restart All"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
<div className="window-action-button">
|
||||
<IconButton
|
||||
icon={<CloseIcon />}
|
||||
bordered
|
||||
onClick={() => navigate(-1)}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={styles["mcp-market-page-body"]}>
|
||||
<div className={styles["mcp-market-filter"]}>
|
||||
<input
|
||||
type="text"
|
||||
className={styles["search-bar"]}
|
||||
placeholder={"Search MCP Server"}
|
||||
autoFocus
|
||||
onInput={(e) => setSearchText(e.currentTarget.value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className={styles["server-list"]}>{renderServerList()}</div>
|
||||
</div>
|
||||
|
||||
{/*编辑服务器配置*/}
|
||||
{editingServerId && (
|
||||
<div className="modal-mask">
|
||||
<Modal
|
||||
title={`Configure Server - ${editingServerId}`}
|
||||
onClose={() => !isLoading && setEditingServerId(undefined)}
|
||||
actions={[
|
||||
<IconButton
|
||||
key="cancel"
|
||||
text="Cancel"
|
||||
onClick={() => setEditingServerId(undefined)}
|
||||
bordered
|
||||
disabled={isLoading}
|
||||
/>,
|
||||
<IconButton
|
||||
key="confirm"
|
||||
text="Save"
|
||||
type="primary"
|
||||
onClick={saveServerConfig}
|
||||
bordered
|
||||
disabled={isLoading}
|
||||
/>,
|
||||
]}
|
||||
>
|
||||
<List>{renderConfigForm()}</List>
|
||||
</Modal>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{viewingServerId && (
|
||||
<div className="modal-mask">
|
||||
<Modal
|
||||
title={`Server Details - ${viewingServerId}`}
|
||||
onClose={() => setViewingServerId(undefined)}
|
||||
actions={[
|
||||
<IconButton
|
||||
key="close"
|
||||
text="Close"
|
||||
onClick={() => setViewingServerId(undefined)}
|
||||
bordered
|
||||
/>,
|
||||
]}
|
||||
>
|
||||
<div className={styles["tools-list"]}>
|
||||
{isLoading ? (
|
||||
<div>Loading...</div>
|
||||
) : tools?.tools ? (
|
||||
tools.tools.map(
|
||||
(tool: ListToolsResponse["tools"], index: number) => (
|
||||
<div key={index} className={styles["tool-item"]}>
|
||||
<div className={styles["tool-name"]}>{tool.name}</div>
|
||||
<div className={styles["tool-description"]}>
|
||||
{tool.description}
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
)
|
||||
) : (
|
||||
<div>No tools available</div>
|
||||
)}
|
||||
</div>
|
||||
</Modal>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ErrorBoundary>
|
||||
);
|
||||
}
|
|
@ -8,6 +8,7 @@ import Locale from "../locales";
|
|||
|
||||
import styles from "./message-selector.module.scss";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import clsx from "clsx";
|
||||
|
||||
function useShiftRange() {
|
||||
const [startIndex, setStartIndex] = useState<number>();
|
||||
|
@ -71,6 +72,7 @@ export function MessageSelector(props: {
|
|||
defaultSelectAll?: boolean;
|
||||
onSelected?: (messages: ChatMessage[]) => void;
|
||||
}) {
|
||||
const LATEST_COUNT = 4;
|
||||
const chatStore = useChatStore();
|
||||
const session = chatStore.currentSession();
|
||||
const isValid = (m: ChatMessage) => m.content && !m.isError && !m.streaming;
|
||||
|
@ -141,15 +143,13 @@ export function MessageSelector(props: {
|
|||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [startIndex, endIndex]);
|
||||
|
||||
const LATEST_COUNT = 4;
|
||||
|
||||
return (
|
||||
<div className={styles["message-selector"]}>
|
||||
<div className={styles["message-filter"]}>
|
||||
<input
|
||||
type="text"
|
||||
placeholder={Locale.Select.Search}
|
||||
className={styles["filter-item"] + " " + styles["search-bar"]}
|
||||
className={clsx(styles["filter-item"], styles["search-bar"])}
|
||||
value={searchInput}
|
||||
onInput={(e) => {
|
||||
setSearchInput(e.currentTarget.value);
|
||||
|
@ -196,9 +196,9 @@ export function MessageSelector(props: {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={`${styles["message"]} ${
|
||||
props.selection.has(m.id!) && styles["message-selected"]
|
||||
}`}
|
||||
className={clsx(styles["message"], {
|
||||
[styles["message-selected"]]: props.selection.has(m.id!),
|
||||
})}
|
||||
key={i}
|
||||
onClick={() => {
|
||||
props.updateSelection((selection) => {
|
||||
|
@ -221,7 +221,7 @@ export function MessageSelector(props: {
|
|||
<div className={styles["date"]}>
|
||||
{new Date(m.date).toLocaleString()}
|
||||
</div>
|
||||
<div className={`${styles["content"]} one-line`}>
|
||||
<div className={clsx(styles["content"], "one-line")}>
|
||||
{getMessageTextContent(m)}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -7,6 +7,7 @@ import { ListItem, Select } from "./ui-lib";
|
|||
import { useAllModels } from "../utils/hooks";
|
||||
import { groupBy } from "lodash-es";
|
||||
import styles from "./model-config.module.scss";
|
||||
import { getModelProvider } from "../utils/model";
|
||||
|
||||
export function ModelConfigList(props: {
|
||||
modelConfig: ModelConfig;
|
||||
|
@ -28,7 +29,9 @@ export function ModelConfigList(props: {
|
|||
value={value}
|
||||
align="left"
|
||||
onChange={(e) => {
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
const [model, providerName] = getModelProvider(
|
||||
e.currentTarget.value,
|
||||
);
|
||||
props.updateConfig((config) => {
|
||||
config.model = ModalConfigValidator.model(model);
|
||||
config.providerName = providerName as ServiceProvider;
|
||||
|
@ -247,7 +250,9 @@ export function ModelConfigList(props: {
|
|||
aria-label={Locale.Settings.CompressModel.Title}
|
||||
value={compressModelValue}
|
||||
onChange={(e) => {
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
const [model, providerName] = getModelProvider(
|
||||
e.currentTarget.value,
|
||||
);
|
||||
props.updateConfig((config) => {
|
||||
config.compressModel = ModalConfigValidator.model(model);
|
||||
config.compressProviderName = providerName as ServiceProvider;
|
||||
|
|
|
@ -16,6 +16,7 @@ import { MaskAvatar } from "./mask";
|
|||
import { useCommand } from "../command";
|
||||
import { showConfirm } from "./ui-lib";
|
||||
import { BUILTIN_MASK_STORE } from "../masks";
|
||||
import clsx from "clsx";
|
||||
|
||||
function MaskItem(props: { mask: Mask; onClick?: () => void }) {
|
||||
return (
|
||||
|
@ -24,7 +25,9 @@ function MaskItem(props: { mask: Mask; onClick?: () => void }) {
|
|||
avatar={props.mask.avatar}
|
||||
model={props.mask.modelConfig.model}
|
||||
/>
|
||||
<div className={styles["mask-name"] + " one-line"}>{props.mask.name}</div>
|
||||
<div className={clsx(styles["mask-name"], "one-line")}>
|
||||
{props.mask.name}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import {
|
|||
import Locale from "../locales";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { useState } from "react";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function PluginPage() {
|
||||
const navigate = useNavigate();
|
||||
|
@ -199,7 +200,7 @@ export function PluginPage() {
|
|||
<div className={styles["mask-name"]}>
|
||||
{m.title}@<small>{m.version}</small>
|
||||
</div>
|
||||
<div className={styles["mask-info"] + " one-line"}>
|
||||
<div className={clsx(styles["mask-info"], "one-line")}>
|
||||
{Locale.Plugin.Item.Info(
|
||||
FunctionToolService.add(m).length,
|
||||
)}
|
||||
|
@ -335,7 +336,10 @@ export function PluginPage() {
|
|||
<ListItem
|
||||
subTitle={
|
||||
<div
|
||||
className={`markdown-body ${pluginStyles["plugin-content"]}`}
|
||||
className={clsx(
|
||||
"markdown-body",
|
||||
pluginStyles["plugin-content"],
|
||||
)}
|
||||
dir="auto"
|
||||
>
|
||||
<pre>
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
export * from "./realtime-chat";
|
|
@ -0,0 +1,74 @@
|
|||
.realtime-chat {
|
||||
width: 100%;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
padding: 20px;
|
||||
box-sizing: border-box;
|
||||
.circle-mic {
|
||||
width: 150px;
|
||||
height: 150px;
|
||||
border-radius: 50%;
|
||||
background: linear-gradient(to bottom right, #a0d8ef, #f0f8ff);
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
.icon-center {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.bottom-icons {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
bottom: 20px;
|
||||
box-sizing: border-box;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
.icon-left,
|
||||
.icon-right {
|
||||
width: 46px;
|
||||
height: 46px;
|
||||
font-size: 36px;
|
||||
background: var(--second);
|
||||
border-radius: 50%;
|
||||
padding: 2px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
&.mobile {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.pulse {
|
||||
animation: pulse 1.5s infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
transform: scale(1);
|
||||
opacity: 0.7;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
transform: scale(1);
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,359 @@
|
|||
import VoiceIcon from "@/app/icons/voice.svg";
|
||||
import VoiceOffIcon from "@/app/icons/voice-off.svg";
|
||||
import PowerIcon from "@/app/icons/power.svg";
|
||||
|
||||
import styles from "./realtime-chat.module.scss";
|
||||
import clsx from "clsx";
|
||||
|
||||
import { useState, useRef, useEffect } from "react";
|
||||
|
||||
import { useChatStore, createMessage, useAppConfig } from "@/app/store";
|
||||
|
||||
import { IconButton } from "@/app/components/button";
|
||||
|
||||
import {
|
||||
Modality,
|
||||
RTClient,
|
||||
RTInputAudioItem,
|
||||
RTResponse,
|
||||
TurnDetection,
|
||||
} from "rt-client";
|
||||
import { AudioHandler } from "@/app/lib/audio";
|
||||
import { uploadImage } from "@/app/utils/chat";
|
||||
import { VoicePrint } from "@/app/components/voice-print";
|
||||
|
||||
interface RealtimeChatProps {
|
||||
onClose?: () => void;
|
||||
onStartVoice?: () => void;
|
||||
onPausedVoice?: () => void;
|
||||
}
|
||||
|
||||
export function RealtimeChat({
|
||||
onClose,
|
||||
onStartVoice,
|
||||
onPausedVoice,
|
||||
}: RealtimeChatProps) {
|
||||
const chatStore = useChatStore();
|
||||
const session = chatStore.currentSession();
|
||||
const config = useAppConfig();
|
||||
const [status, setStatus] = useState("");
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [isConnected, setIsConnected] = useState(false);
|
||||
const [isConnecting, setIsConnecting] = useState(false);
|
||||
const [modality, setModality] = useState("audio");
|
||||
const [useVAD, setUseVAD] = useState(true);
|
||||
const [frequencies, setFrequencies] = useState<Uint8Array | undefined>();
|
||||
|
||||
const clientRef = useRef<RTClient | null>(null);
|
||||
const audioHandlerRef = useRef<AudioHandler | null>(null);
|
||||
const initRef = useRef(false);
|
||||
|
||||
const temperature = config.realtimeConfig.temperature;
|
||||
const apiKey = config.realtimeConfig.apiKey;
|
||||
const model = config.realtimeConfig.model;
|
||||
const azure = config.realtimeConfig.provider === "Azure";
|
||||
const azureEndpoint = config.realtimeConfig.azure.endpoint;
|
||||
const azureDeployment = config.realtimeConfig.azure.deployment;
|
||||
const voice = config.realtimeConfig.voice;
|
||||
|
||||
const handleConnect = async () => {
|
||||
if (isConnecting) return;
|
||||
if (!isConnected) {
|
||||
try {
|
||||
setIsConnecting(true);
|
||||
clientRef.current = azure
|
||||
? new RTClient(
|
||||
new URL(azureEndpoint),
|
||||
{ key: apiKey },
|
||||
{ deployment: azureDeployment },
|
||||
)
|
||||
: new RTClient({ key: apiKey }, { model });
|
||||
const modalities: Modality[] =
|
||||
modality === "audio" ? ["text", "audio"] : ["text"];
|
||||
const turnDetection: TurnDetection = useVAD
|
||||
? { type: "server_vad" }
|
||||
: null;
|
||||
await clientRef.current.configure({
|
||||
instructions: "",
|
||||
voice,
|
||||
input_audio_transcription: { model: "whisper-1" },
|
||||
turn_detection: turnDetection,
|
||||
tools: [],
|
||||
temperature,
|
||||
modalities,
|
||||
});
|
||||
startResponseListener();
|
||||
|
||||
setIsConnected(true);
|
||||
// TODO
|
||||
// try {
|
||||
// const recentMessages = chatStore.getMessagesWithMemory();
|
||||
// for (const message of recentMessages) {
|
||||
// const { role, content } = message;
|
||||
// if (typeof content === "string") {
|
||||
// await clientRef.current.sendItem({
|
||||
// type: "message",
|
||||
// role: role as any,
|
||||
// content: [
|
||||
// {
|
||||
// type: (role === "assistant" ? "text" : "input_text") as any,
|
||||
// text: content as string,
|
||||
// },
|
||||
// ],
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// // await clientRef.current.generateResponse();
|
||||
// } catch (error) {
|
||||
// console.error("Set message failed:", error);
|
||||
// }
|
||||
} catch (error) {
|
||||
console.error("Connection failed:", error);
|
||||
setStatus("Connection failed");
|
||||
} finally {
|
||||
setIsConnecting(false);
|
||||
}
|
||||
} else {
|
||||
await disconnect();
|
||||
}
|
||||
};
|
||||
|
||||
const disconnect = async () => {
|
||||
if (clientRef.current) {
|
||||
try {
|
||||
await clientRef.current.close();
|
||||
clientRef.current = null;
|
||||
setIsConnected(false);
|
||||
} catch (error) {
|
||||
console.error("Disconnect failed:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const startResponseListener = async () => {
|
||||
if (!clientRef.current) return;
|
||||
|
||||
try {
|
||||
for await (const serverEvent of clientRef.current.events()) {
|
||||
if (serverEvent.type === "response") {
|
||||
await handleResponse(serverEvent);
|
||||
} else if (serverEvent.type === "input_audio") {
|
||||
await handleInputAudio(serverEvent);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (clientRef.current) {
|
||||
console.error("Response iteration error:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleResponse = async (response: RTResponse) => {
|
||||
for await (const item of response) {
|
||||
if (item.type === "message" && item.role === "assistant") {
|
||||
const botMessage = createMessage({
|
||||
role: item.role,
|
||||
content: "",
|
||||
});
|
||||
// add bot message first
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat([botMessage]);
|
||||
});
|
||||
let hasAudio = false;
|
||||
for await (const content of item) {
|
||||
if (content.type === "text") {
|
||||
for await (const text of content.textChunks()) {
|
||||
botMessage.content += text;
|
||||
}
|
||||
} else if (content.type === "audio") {
|
||||
const textTask = async () => {
|
||||
for await (const text of content.transcriptChunks()) {
|
||||
botMessage.content += text;
|
||||
}
|
||||
};
|
||||
const audioTask = async () => {
|
||||
audioHandlerRef.current?.startStreamingPlayback();
|
||||
for await (const audio of content.audioChunks()) {
|
||||
hasAudio = true;
|
||||
audioHandlerRef.current?.playChunk(audio);
|
||||
}
|
||||
};
|
||||
await Promise.all([textTask(), audioTask()]);
|
||||
}
|
||||
// update message.content
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
}
|
||||
if (hasAudio) {
|
||||
// upload audio get audio_url
|
||||
const blob = audioHandlerRef.current?.savePlayFile();
|
||||
uploadImage(blob!).then((audio_url) => {
|
||||
botMessage.audio_url = audio_url;
|
||||
// update text and audio_url
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleInputAudio = async (item: RTInputAudioItem) => {
|
||||
await item.waitForCompletion();
|
||||
if (item.transcription) {
|
||||
const userMessage = createMessage({
|
||||
role: "user",
|
||||
content: item.transcription,
|
||||
});
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat([userMessage]);
|
||||
});
|
||||
// save input audio_url, and update session
|
||||
const { audioStartMillis, audioEndMillis } = item;
|
||||
// upload audio get audio_url
|
||||
const blob = audioHandlerRef.current?.saveRecordFile(
|
||||
audioStartMillis,
|
||||
audioEndMillis,
|
||||
);
|
||||
uploadImage(blob!).then((audio_url) => {
|
||||
userMessage.audio_url = audio_url;
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
});
|
||||
}
|
||||
// stop streaming play after get input audio.
|
||||
audioHandlerRef.current?.stopStreamingPlayback();
|
||||
};
|
||||
|
||||
const toggleRecording = async () => {
|
||||
if (!isRecording && clientRef.current) {
|
||||
try {
|
||||
if (!audioHandlerRef.current) {
|
||||
audioHandlerRef.current = new AudioHandler();
|
||||
await audioHandlerRef.current.initialize();
|
||||
}
|
||||
await audioHandlerRef.current.startRecording(async (chunk) => {
|
||||
await clientRef.current?.sendAudio(chunk);
|
||||
});
|
||||
setIsRecording(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to start recording:", error);
|
||||
}
|
||||
} else if (audioHandlerRef.current) {
|
||||
try {
|
||||
audioHandlerRef.current.stopRecording();
|
||||
if (!useVAD) {
|
||||
const inputAudio = await clientRef.current?.commitAudio();
|
||||
await handleInputAudio(inputAudio!);
|
||||
await clientRef.current?.generateResponse();
|
||||
}
|
||||
setIsRecording(false);
|
||||
} catch (error) {
|
||||
console.error("Failed to stop recording:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
// 防止重复初始化
|
||||
if (initRef.current) return;
|
||||
initRef.current = true;
|
||||
|
||||
const initAudioHandler = async () => {
|
||||
const handler = new AudioHandler();
|
||||
await handler.initialize();
|
||||
audioHandlerRef.current = handler;
|
||||
await handleConnect();
|
||||
await toggleRecording();
|
||||
};
|
||||
|
||||
initAudioHandler().catch((error) => {
|
||||
setStatus(error);
|
||||
console.error(error);
|
||||
});
|
||||
|
||||
return () => {
|
||||
if (isRecording) {
|
||||
toggleRecording();
|
||||
}
|
||||
audioHandlerRef.current?.close().catch(console.error);
|
||||
disconnect();
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
let animationFrameId: number;
|
||||
|
||||
if (isConnected && isRecording) {
|
||||
const animationFrame = () => {
|
||||
if (audioHandlerRef.current) {
|
||||
const freqData = audioHandlerRef.current.getByteFrequencyData();
|
||||
setFrequencies(freqData);
|
||||
}
|
||||
animationFrameId = requestAnimationFrame(animationFrame);
|
||||
};
|
||||
|
||||
animationFrameId = requestAnimationFrame(animationFrame);
|
||||
} else {
|
||||
setFrequencies(undefined);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (animationFrameId) {
|
||||
cancelAnimationFrame(animationFrameId);
|
||||
}
|
||||
};
|
||||
}, [isConnected, isRecording]);
|
||||
|
||||
// update session params
|
||||
useEffect(() => {
|
||||
clientRef.current?.configure({ voice });
|
||||
}, [voice]);
|
||||
useEffect(() => {
|
||||
clientRef.current?.configure({ temperature });
|
||||
}, [temperature]);
|
||||
|
||||
const handleClose = async () => {
|
||||
onClose?.();
|
||||
if (isRecording) {
|
||||
await toggleRecording();
|
||||
}
|
||||
disconnect().catch(console.error);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles["realtime-chat"]}>
|
||||
<div
|
||||
className={clsx(styles["circle-mic"], {
|
||||
[styles["pulse"]]: isRecording,
|
||||
})}
|
||||
>
|
||||
<VoicePrint frequencies={frequencies} isActive={isRecording} />
|
||||
</div>
|
||||
|
||||
<div className={styles["bottom-icons"]}>
|
||||
<div>
|
||||
<IconButton
|
||||
icon={isRecording ? <VoiceIcon /> : <VoiceOffIcon />}
|
||||
onClick={toggleRecording}
|
||||
disabled={!isConnected}
|
||||
shadow
|
||||
bordered
|
||||
/>
|
||||
</div>
|
||||
<div className={styles["icon-center"]}>{status}</div>
|
||||
<div>
|
||||
<IconButton
|
||||
icon={<PowerIcon />}
|
||||
onClick={handleClose}
|
||||
shadow
|
||||
bordered
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
import { RealtimeConfig } from "@/app/store";
|
||||
|
||||
import Locale from "@/app/locales";
|
||||
import { ListItem, Select, PasswordInput } from "@/app/components/ui-lib";
|
||||
|
||||
import { InputRange } from "@/app/components/input-range";
|
||||
import { Voice } from "rt-client";
|
||||
import { ServiceProvider } from "@/app/constant";
|
||||
|
||||
const providers = [ServiceProvider.OpenAI, ServiceProvider.Azure];
|
||||
|
||||
const models = ["gpt-4o-realtime-preview-2024-10-01"];
|
||||
|
||||
const voice = ["alloy", "shimmer", "echo"];
|
||||
|
||||
export function RealtimeConfigList(props: {
|
||||
realtimeConfig: RealtimeConfig;
|
||||
updateConfig: (updater: (config: RealtimeConfig) => void) => void;
|
||||
}) {
|
||||
const azureConfigComponent = props.realtimeConfig.provider ===
|
||||
ServiceProvider.Azure && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Azure.Endpoint.Title}
|
||||
subTitle={Locale.Settings.Realtime.Azure.Endpoint.SubTitle}
|
||||
>
|
||||
<input
|
||||
value={props.realtimeConfig?.azure?.endpoint}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Realtime.Azure.Endpoint.Title}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.azure.endpoint = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Azure.Deployment.Title}
|
||||
subTitle={Locale.Settings.Realtime.Azure.Deployment.SubTitle}
|
||||
>
|
||||
<input
|
||||
value={props.realtimeConfig?.azure?.deployment}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Realtime.Azure.Deployment.Title}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.azure.deployment = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Enable.Title}
|
||||
subTitle={Locale.Settings.Realtime.Enable.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={props.realtimeConfig.enable}
|
||||
onChange={(e) =>
|
||||
props.updateConfig(
|
||||
(config) => (config.enable = e.currentTarget.checked),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
|
||||
{props.realtimeConfig.enable && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Provider.Title}
|
||||
subTitle={Locale.Settings.Realtime.Provider.SubTitle}
|
||||
>
|
||||
<Select
|
||||
aria-label={Locale.Settings.Realtime.Provider.Title}
|
||||
value={props.realtimeConfig.provider}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) =>
|
||||
(config.provider = e.target.value as ServiceProvider),
|
||||
);
|
||||
}}
|
||||
>
|
||||
{providers.map((v, i) => (
|
||||
<option value={v} key={i}>
|
||||
{v}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Model.Title}
|
||||
subTitle={Locale.Settings.Realtime.Model.SubTitle}
|
||||
>
|
||||
<Select
|
||||
aria-label={Locale.Settings.Realtime.Model.Title}
|
||||
value={props.realtimeConfig.model}
|
||||
onChange={(e) => {
|
||||
props.updateConfig((config) => (config.model = e.target.value));
|
||||
}}
|
||||
>
|
||||
{models.map((v, i) => (
|
||||
<option value={v} key={i}>
|
||||
{v}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Realtime.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria={Locale.Settings.ShowPassword}
|
||||
aria-label={Locale.Settings.Realtime.ApiKey.Title}
|
||||
value={props.realtimeConfig.apiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Realtime.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.apiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
{azureConfigComponent}
|
||||
<ListItem
|
||||
title={Locale.Settings.TTS.Voice.Title}
|
||||
subTitle={Locale.Settings.TTS.Voice.SubTitle}
|
||||
>
|
||||
<Select
|
||||
value={props.realtimeConfig.voice}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.voice = e.currentTarget.value as Voice),
|
||||
);
|
||||
}}
|
||||
>
|
||||
{voice.map((v, i) => (
|
||||
<option value={v} key={i}>
|
||||
{v}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Temperature.Title}
|
||||
subTitle={Locale.Settings.Realtime.Temperature.SubTitle}
|
||||
>
|
||||
<InputRange
|
||||
aria={Locale.Settings.Temperature.Title}
|
||||
value={props.realtimeConfig?.temperature?.toFixed(1)}
|
||||
min="0.6"
|
||||
max="1"
|
||||
step="0.1"
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) =>
|
||||
(config.temperature = e.currentTarget.valueAsNumber),
|
||||
);
|
||||
}}
|
||||
></InputRange>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -4,6 +4,7 @@ import { Select } from "@/app/components/ui-lib";
|
|||
import { IconButton } from "@/app/components/button";
|
||||
import Locale from "@/app/locales";
|
||||
import { useSdStore } from "@/app/store/sd";
|
||||
import clsx from "clsx";
|
||||
|
||||
export const params = [
|
||||
{
|
||||
|
@ -136,7 +137,7 @@ export function ControlParamItem(props: {
|
|||
className?: string;
|
||||
}) {
|
||||
return (
|
||||
<div className={styles["ctrl-param-item"] + ` ${props.className || ""}`}>
|
||||
<div className={clsx(styles["ctrl-param-item"], props.className)}>
|
||||
<div className={styles["ctrl-param-item-header"]}>
|
||||
<div className={styles["ctrl-param-item-title"]}>
|
||||
<div>
|
||||
|
|
|
@ -36,6 +36,7 @@ import { removeImage } from "@/app/utils/chat";
|
|||
import { SideBar } from "./sd-sidebar";
|
||||
import { WindowContent } from "@/app/components/home";
|
||||
import { params } from "./sd-panel";
|
||||
import clsx from "clsx";
|
||||
|
||||
function getSdTaskStatus(item: any) {
|
||||
let s: string;
|
||||
|
@ -104,7 +105,7 @@ export function Sd() {
|
|||
|
||||
return (
|
||||
<>
|
||||
<SideBar className={isSd ? homeStyles["sidebar-show"] : ""} />
|
||||
<SideBar className={clsx({ [homeStyles["sidebar-show"]]: isSd })} />
|
||||
<WindowContent>
|
||||
<div className={chatStyles.chat} key={"1"}>
|
||||
<div className="window-header" data-tauri-drag-region>
|
||||
|
@ -121,7 +122,10 @@ export function Sd() {
|
|||
</div>
|
||||
)}
|
||||
<div
|
||||
className={`window-header-title ${chatStyles["chat-body-title"]}`}
|
||||
className={clsx(
|
||||
"window-header-title",
|
||||
chatStyles["chat-body-title"],
|
||||
)}
|
||||
>
|
||||
<div className={`window-header-main-title`}>Stability AI</div>
|
||||
<div className="window-header-sub-title">
|
||||
|
|
|
@ -49,7 +49,7 @@ import Locale, {
|
|||
changeLang,
|
||||
getLang,
|
||||
} from "../locales";
|
||||
import { copyToClipboard } from "../utils";
|
||||
import { copyToClipboard, clientUpdate, semverCompare } from "../utils";
|
||||
import Link from "next/link";
|
||||
import {
|
||||
Anthropic,
|
||||
|
@ -59,6 +59,7 @@ import {
|
|||
ByteDance,
|
||||
Alibaba,
|
||||
Moonshot,
|
||||
XAI,
|
||||
Google,
|
||||
GoogleSafetySettingsThreshold,
|
||||
OPENAI_BASE_URL,
|
||||
|
@ -71,6 +72,9 @@ import {
|
|||
Stability,
|
||||
Iflytek,
|
||||
SAAS_CHAT_URL,
|
||||
ChatGLM,
|
||||
DeepSeek,
|
||||
SiliconFlow,
|
||||
} from "../constant";
|
||||
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
|
||||
import { ErrorBoundary } from "./error";
|
||||
|
@ -83,6 +87,7 @@ import { nanoid } from "nanoid";
|
|||
import { useMaskStore } from "../store/mask";
|
||||
import { ProviderType } from "../utils/cloud";
|
||||
import { TTSConfigList } from "./tts-config";
|
||||
import { RealtimeConfigList } from "./realtime-chat/realtime-config";
|
||||
|
||||
function EditPromptModal(props: { id: string; onClose: () => void }) {
|
||||
const promptStore = usePromptStore();
|
||||
|
@ -585,7 +590,7 @@ export function Settings() {
|
|||
const [checkingUpdate, setCheckingUpdate] = useState(false);
|
||||
const currentVersion = updateStore.formatVersion(updateStore.version);
|
||||
const remoteId = updateStore.formatVersion(updateStore.remoteVersion);
|
||||
const hasNewVersion = currentVersion !== remoteId;
|
||||
const hasNewVersion = semverCompare(currentVersion, remoteId) === -1;
|
||||
const updateUrl = getClientConfig()?.isApp ? RELEASE_URL : UPDATE_URL;
|
||||
|
||||
function checkUpdate(force = false) {
|
||||
|
@ -1194,6 +1199,167 @@ export function Settings() {
|
|||
</>
|
||||
);
|
||||
|
||||
const deepseekConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.DeepSeek && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.DeepSeek.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.DeepSeek.Endpoint.SubTitle +
|
||||
DeepSeek.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.DeepSeek.Endpoint.Title}
|
||||
type="text"
|
||||
value={accessStore.deepseekUrl}
|
||||
placeholder={DeepSeek.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.deepseekUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.DeepSeek.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.DeepSeek.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria-label={Locale.Settings.Access.DeepSeek.ApiKey.Title}
|
||||
value={accessStore.deepseekApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.DeepSeek.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.deepseekApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const XAIConfigComponent = accessStore.provider === ServiceProvider.XAI && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.XAI.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.XAI.Endpoint.SubTitle + XAI.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.XAI.Endpoint.Title}
|
||||
type="text"
|
||||
value={accessStore.xaiUrl}
|
||||
placeholder={XAI.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.xaiUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.XAI.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.XAI.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria-label={Locale.Settings.Access.XAI.ApiKey.Title}
|
||||
value={accessStore.xaiApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.XAI.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.xaiApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const chatglmConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.ChatGLM && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.ChatGLM.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.ChatGLM.Endpoint.SubTitle +
|
||||
ChatGLM.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.ChatGLM.Endpoint.Title}
|
||||
type="text"
|
||||
value={accessStore.chatglmUrl}
|
||||
placeholder={ChatGLM.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.chatglmUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.ChatGLM.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.ChatGLM.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria-label={Locale.Settings.Access.ChatGLM.ApiKey.Title}
|
||||
value={accessStore.chatglmApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.ChatGLM.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.chatglmApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
const siliconflowConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.SiliconFlow && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.SiliconFlow.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.SiliconFlow.Endpoint.SubTitle +
|
||||
SiliconFlow.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.SiliconFlow.Endpoint.Title}
|
||||
type="text"
|
||||
value={accessStore.siliconflowUrl}
|
||||
placeholder={SiliconFlow.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.siliconflowUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.SiliconFlow.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.SiliconFlow.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria-label={Locale.Settings.Access.SiliconFlow.ApiKey.Title}
|
||||
value={accessStore.siliconflowApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.SiliconFlow.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.siliconflowApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const stabilityConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Stability && (
|
||||
<>
|
||||
|
@ -1357,9 +1523,17 @@ export function Settings() {
|
|||
{checkingUpdate ? (
|
||||
<LoadingIcon />
|
||||
) : hasNewVersion ? (
|
||||
<Link href={updateUrl} target="_blank" className="link">
|
||||
{Locale.Settings.Update.GoToUpdate}
|
||||
</Link>
|
||||
clientConfig?.isApp ? (
|
||||
<IconButton
|
||||
icon={<ResetIcon></ResetIcon>}
|
||||
text={Locale.Settings.Update.GoToUpdate}
|
||||
onClick={() => clientUpdate()}
|
||||
/>
|
||||
) : (
|
||||
<Link href={updateUrl} target="_blank" className="link">
|
||||
{Locale.Settings.Update.GoToUpdate}
|
||||
</Link>
|
||||
)
|
||||
) : (
|
||||
<IconButton
|
||||
icon={<ResetIcon></ResetIcon>}
|
||||
|
@ -1509,6 +1683,22 @@ export function Settings() {
|
|||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Mask.Config.CodeFold.Title}
|
||||
subTitle={Locale.Mask.Config.CodeFold.SubTitle}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Mask.Config.CodeFold.Title}
|
||||
type="checkbox"
|
||||
checked={config.enableCodeFold}
|
||||
data-testid="enable-code-fold-checkbox"
|
||||
onChange={(e) =>
|
||||
updateConfig(
|
||||
(config) => (config.enableCodeFold = e.currentTarget.checked),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
</List>
|
||||
|
||||
<SyncItems />
|
||||
|
@ -1626,8 +1816,12 @@ export function Settings() {
|
|||
{alibabaConfigComponent}
|
||||
{tencentConfigComponent}
|
||||
{moonshotConfigComponent}
|
||||
{deepseekConfigComponent}
|
||||
{stabilityConfigComponent}
|
||||
{lflytekConfigComponent}
|
||||
{XAIConfigComponent}
|
||||
{chatglmConfigComponent}
|
||||
{siliconflowConfigComponent}
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
|
@ -1662,9 +1856,11 @@ export function Settings() {
|
|||
<ListItem
|
||||
title={Locale.Settings.Access.CustomModel.Title}
|
||||
subTitle={Locale.Settings.Access.CustomModel.SubTitle}
|
||||
vertical={true}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.CustomModel.Title}
|
||||
style={{ width: "100%", maxWidth: "unset", textAlign: "left" }}
|
||||
type="text"
|
||||
value={config.customModels}
|
||||
placeholder="model1,model2,model3"
|
||||
|
@ -1691,7 +1887,18 @@ export function Settings() {
|
|||
{shouldShowPromptModal && (
|
||||
<UserPromptModal onClose={() => setShowPromptModal(false)} />
|
||||
)}
|
||||
|
||||
<List>
|
||||
<RealtimeConfigList
|
||||
realtimeConfig={config.realtimeConfig}
|
||||
updateConfig={(updater) => {
|
||||
const realtimeConfig = { ...config.realtimeConfig };
|
||||
updater(realtimeConfig);
|
||||
config.update(
|
||||
(config) => (config.realtimeConfig = realtimeConfig),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</List>
|
||||
<List>
|
||||
<TTSConfigList
|
||||
ttsConfig={config.ttsConfig}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useEffect, useRef, useMemo, useState, Fragment } from "react";
|
||||
import React, { Fragment, useEffect, useMemo, useRef, useState } from "react";
|
||||
|
||||
import styles from "./home.module.scss";
|
||||
|
||||
|
@ -9,6 +9,7 @@ import ChatGptIcon from "../icons/chatgpt.svg";
|
|||
import AddIcon from "../icons/add.svg";
|
||||
import DeleteIcon from "../icons/delete.svg";
|
||||
import MaskIcon from "../icons/mask.svg";
|
||||
import McpIcon from "../icons/mcp.svg";
|
||||
import DragIcon from "../icons/drag.svg";
|
||||
import DiscoveryIcon from "../icons/discovery.svg";
|
||||
|
||||
|
@ -22,14 +23,21 @@ import {
|
|||
MIN_SIDEBAR_WIDTH,
|
||||
NARROW_SIDEBAR_WIDTH,
|
||||
Path,
|
||||
PLUGINS,
|
||||
REPO_URL,
|
||||
} from "../constant";
|
||||
|
||||
import { Link, useNavigate } from "react-router-dom";
|
||||
import { isIOS, useMobileScreen } from "../utils";
|
||||
import dynamic from "next/dynamic";
|
||||
import { showConfirm, Selector } from "./ui-lib";
|
||||
import { Selector, showConfirm } from "./ui-lib";
|
||||
import clsx from "clsx";
|
||||
import { isMcpEnabled } from "../mcp/actions";
|
||||
|
||||
const DISCOVERY = [
|
||||
{ name: Locale.Plugin.Name, path: Path.Plugins },
|
||||
{ name: "Stable Diffusion", path: Path.Sd },
|
||||
{ name: Locale.SearchChat.Page.Title, path: Path.SearchChat },
|
||||
];
|
||||
|
||||
const ChatList = dynamic(async () => (await import("./chat-list")).ChatList, {
|
||||
loading: () => null,
|
||||
|
@ -127,6 +135,7 @@ export function useDragSideBar() {
|
|||
shouldNarrow,
|
||||
};
|
||||
}
|
||||
|
||||
export function SideBarContainer(props: {
|
||||
children: React.ReactNode;
|
||||
onDragStart: (e: MouseEvent) => void;
|
||||
|
@ -141,9 +150,9 @@ export function SideBarContainer(props: {
|
|||
const { children, className, onDragStart, shouldNarrow } = props;
|
||||
return (
|
||||
<div
|
||||
className={`${styles.sidebar} ${className} ${
|
||||
shouldNarrow && styles["narrow-sidebar"]
|
||||
}`}
|
||||
className={clsx(styles.sidebar, className, {
|
||||
[styles["narrow-sidebar"]]: shouldNarrow,
|
||||
})}
|
||||
style={{
|
||||
// #3016 disable transition on ios mobile screen
|
||||
transition: isMobileScreen && isIOSMobile ? "none" : undefined,
|
||||
|
@ -165,18 +174,24 @@ export function SideBarHeader(props: {
|
|||
subTitle?: string | React.ReactNode;
|
||||
logo?: React.ReactNode;
|
||||
children?: React.ReactNode;
|
||||
shouldNarrow?: boolean;
|
||||
}) {
|
||||
const { title, subTitle, logo, children } = props;
|
||||
const { title, subTitle, logo, children, shouldNarrow } = props;
|
||||
return (
|
||||
<Fragment>
|
||||
<div className={styles["sidebar-header"]} data-tauri-drag-region>
|
||||
<div
|
||||
className={clsx(styles["sidebar-header"], {
|
||||
[styles["sidebar-header-narrow"]]: shouldNarrow,
|
||||
})}
|
||||
data-tauri-drag-region
|
||||
>
|
||||
<div className={styles["sidebar-title-container"]}>
|
||||
<div className={styles["sidebar-title"]} data-tauri-drag-region>
|
||||
{title}
|
||||
</div>
|
||||
<div className={styles["sidebar-sub-title"]}>{subTitle}</div>
|
||||
</div>
|
||||
<div className={styles["sidebar-logo"] + " no-dark"}>{logo}</div>
|
||||
<div className={clsx(styles["sidebar-logo"], "no-dark")}>{logo}</div>
|
||||
</div>
|
||||
{children}
|
||||
</Fragment>
|
||||
|
@ -212,10 +227,21 @@ export function SideBarTail(props: {
|
|||
export function SideBar(props: { className?: string }) {
|
||||
useHotKey();
|
||||
const { onDragStart, shouldNarrow } = useDragSideBar();
|
||||
const [showPluginSelector, setShowPluginSelector] = useState(false);
|
||||
const [showDiscoverySelector, setshowDiscoverySelector] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
const config = useAppConfig();
|
||||
const chatStore = useChatStore();
|
||||
const [mcpEnabled, setMcpEnabled] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
// 检查 MCP 是否启用
|
||||
const checkMcpStatus = async () => {
|
||||
const enabled = await isMcpEnabled();
|
||||
setMcpEnabled(enabled);
|
||||
console.log("[SideBar] MCP enabled:", enabled);
|
||||
};
|
||||
checkMcpStatus();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<SideBarContainer
|
||||
|
@ -227,6 +253,7 @@ export function SideBar(props: { className?: string }) {
|
|||
title="NextChat"
|
||||
subTitle="Build your own AI assistant."
|
||||
logo={<ChatGptIcon />}
|
||||
shouldNarrow={shouldNarrow}
|
||||
>
|
||||
<div className={styles["sidebar-header-bar"]}>
|
||||
<IconButton
|
||||
|
@ -242,25 +269,36 @@ export function SideBar(props: { className?: string }) {
|
|||
}}
|
||||
shadow
|
||||
/>
|
||||
{mcpEnabled && (
|
||||
<IconButton
|
||||
icon={<McpIcon />}
|
||||
text={shouldNarrow ? undefined : Locale.Mcp.Name}
|
||||
className={styles["sidebar-bar-button"]}
|
||||
onClick={() => {
|
||||
navigate(Path.McpMarket, { state: { fromHome: true } });
|
||||
}}
|
||||
shadow
|
||||
/>
|
||||
)}
|
||||
<IconButton
|
||||
icon={<DiscoveryIcon />}
|
||||
text={shouldNarrow ? undefined : Locale.Discovery.Name}
|
||||
className={styles["sidebar-bar-button"]}
|
||||
onClick={() => setShowPluginSelector(true)}
|
||||
onClick={() => setshowDiscoverySelector(true)}
|
||||
shadow
|
||||
/>
|
||||
</div>
|
||||
{showPluginSelector && (
|
||||
{showDiscoverySelector && (
|
||||
<Selector
|
||||
items={[
|
||||
...PLUGINS.map((item) => {
|
||||
...DISCOVERY.map((item) => {
|
||||
return {
|
||||
title: item.name,
|
||||
value: item.path,
|
||||
};
|
||||
}),
|
||||
]}
|
||||
onClose={() => setShowPluginSelector(false)}
|
||||
onClose={() => setshowDiscoverySelector(false)}
|
||||
onSelection={(s) => {
|
||||
navigate(s[0], { state: { fromHome: true } });
|
||||
}}
|
||||
|
@ -279,7 +317,7 @@ export function SideBar(props: { className?: string }) {
|
|||
<SideBarTail
|
||||
primaryAction={
|
||||
<>
|
||||
<div className={styles["sidebar-action"] + " " + styles.mobile}>
|
||||
<div className={clsx(styles["sidebar-action"], styles.mobile)}>
|
||||
<IconButton
|
||||
icon={<DeleteIcon />}
|
||||
onClick={async () => {
|
||||
|
|
|
@ -23,6 +23,8 @@ import React, {
|
|||
useRef,
|
||||
} from "react";
|
||||
import { IconButton } from "./button";
|
||||
import { Avatar } from "./emoji";
|
||||
import clsx from "clsx";
|
||||
|
||||
export function Popover(props: {
|
||||
children: JSX.Element;
|
||||
|
@ -45,7 +47,7 @@ export function Popover(props: {
|
|||
|
||||
export function Card(props: { children: JSX.Element[]; className?: string }) {
|
||||
return (
|
||||
<div className={styles.card + " " + props.className}>{props.children}</div>
|
||||
<div className={clsx(styles.card, props.className)}>{props.children}</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -60,11 +62,13 @@ export function ListItem(props: {
|
|||
}) {
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
styles["list-item"] +
|
||||
` ${props.vertical ? styles["vertical"] : ""} ` +
|
||||
` ${props.className || ""}`
|
||||
}
|
||||
className={clsx(
|
||||
styles["list-item"],
|
||||
{
|
||||
[styles["vertical"]]: props.vertical,
|
||||
},
|
||||
props.className,
|
||||
)}
|
||||
onClick={props.onClick}
|
||||
>
|
||||
<div className={styles["list-header"]}>
|
||||
|
@ -135,9 +139,9 @@ export function Modal(props: ModalProps) {
|
|||
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
styles["modal-container"] + ` ${isMax && styles["modal-container-max"]}`
|
||||
}
|
||||
className={clsx(styles["modal-container"], {
|
||||
[styles["modal-container-max"]]: isMax,
|
||||
})}
|
||||
>
|
||||
<div className={styles["modal-header"]}>
|
||||
<div className={styles["modal-title"]}>{props.title}</div>
|
||||
|
@ -260,7 +264,7 @@ export function Input(props: InputProps) {
|
|||
return (
|
||||
<textarea
|
||||
{...props}
|
||||
className={`${styles["input"]} ${props.className}`}
|
||||
className={clsx(styles["input"], props.className)}
|
||||
></textarea>
|
||||
);
|
||||
}
|
||||
|
@ -301,9 +305,13 @@ export function Select(
|
|||
const { className, children, align, ...otherProps } = props;
|
||||
return (
|
||||
<div
|
||||
className={`${styles["select-with-icon"]} ${
|
||||
align === "left" ? styles["left-align-option"] : ""
|
||||
} ${className}`}
|
||||
className={clsx(
|
||||
styles["select-with-icon"],
|
||||
{
|
||||
[styles["left-align-option"]]: align === "left",
|
||||
},
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<select className={styles["select-with-icon-select"]} {...otherProps}>
|
||||
{children}
|
||||
|
@ -509,12 +517,13 @@ export function Selector<T>(props: {
|
|||
const selected = selectedValues.includes(item.value);
|
||||
return (
|
||||
<ListItem
|
||||
className={`${styles["selector-item"]} ${
|
||||
item.disable && styles["selector-item-disabled"]
|
||||
}`}
|
||||
className={clsx(styles["selector-item"], {
|
||||
[styles["selector-item-disabled"]]: item.disable,
|
||||
})}
|
||||
key={i}
|
||||
title={item.title}
|
||||
subTitle={item.subTitle}
|
||||
icon={<Avatar model={item.value as string} />}
|
||||
onClick={(e) => {
|
||||
if (item.disable) {
|
||||
e.stopPropagation();
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
export * from "./voice-print";
|
|
@ -0,0 +1,11 @@
|
|||
.voice-print {
|
||||
width: 100%;
|
||||
height: 60px;
|
||||
margin: 20px 0;
|
||||
|
||||
canvas {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
filter: brightness(1.2); // 增加整体亮度
|
||||
}
|
||||
}
|
|
@ -0,0 +1,180 @@
|
|||
import { useEffect, useRef, useCallback } from "react";
|
||||
import styles from "./voice-print.module.scss";
|
||||
|
||||
interface VoicePrintProps {
|
||||
frequencies?: Uint8Array;
|
||||
isActive?: boolean;
|
||||
}
|
||||
|
||||
export function VoicePrint({ frequencies, isActive }: VoicePrintProps) {
|
||||
// Canvas引用,用于获取绘图上下文
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
// 存储历史频率数据,用于平滑处理
|
||||
const historyRef = useRef<number[][]>([]);
|
||||
// 控制保留的历史数据帧数,影响平滑度
|
||||
const historyLengthRef = useRef(10);
|
||||
// 存储动画帧ID,用于清理
|
||||
const animationFrameRef = useRef<number>();
|
||||
|
||||
/**
|
||||
* 更新频率历史数据
|
||||
* 使用FIFO队列维护固定长度的历史记录
|
||||
*/
|
||||
const updateHistory = useCallback((freqArray: number[]) => {
|
||||
historyRef.current.push(freqArray);
|
||||
if (historyRef.current.length > historyLengthRef.current) {
|
||||
historyRef.current.shift();
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas) return;
|
||||
|
||||
const ctx = canvas.getContext("2d");
|
||||
if (!ctx) return;
|
||||
|
||||
/**
|
||||
* 处理高DPI屏幕显示
|
||||
* 根据设备像素比例调整canvas实际渲染分辨率
|
||||
*/
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
canvas.width = canvas.offsetWidth * dpr;
|
||||
canvas.height = canvas.offsetHeight * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
/**
|
||||
* 主要绘制函数
|
||||
* 使用requestAnimationFrame实现平滑动画
|
||||
* 包含以下步骤:
|
||||
* 1. 清空画布
|
||||
* 2. 更新历史数据
|
||||
* 3. 计算波形点
|
||||
* 4. 绘制上下对称的声纹
|
||||
*/
|
||||
const draw = () => {
|
||||
// 清空画布
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
if (!frequencies || !isActive) {
|
||||
historyRef.current = [];
|
||||
return;
|
||||
}
|
||||
|
||||
const freqArray = Array.from(frequencies);
|
||||
updateHistory(freqArray);
|
||||
|
||||
// 绘制声纹
|
||||
const points: [number, number][] = [];
|
||||
const centerY = canvas.height / 2;
|
||||
const width = canvas.width;
|
||||
const sliceWidth = width / (frequencies.length - 1);
|
||||
|
||||
// 绘制主波形
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, centerY);
|
||||
|
||||
/**
|
||||
* 声纹绘制算法:
|
||||
* 1. 使用历史数据平均值实现平滑过渡
|
||||
* 2. 通过正弦函数添加自然波动
|
||||
* 3. 使用贝塞尔曲线连接点,使曲线更平滑
|
||||
* 4. 绘制对称部分形成完整声纹
|
||||
*/
|
||||
for (let i = 0; i < frequencies.length; i++) {
|
||||
const x = i * sliceWidth;
|
||||
let avgFrequency = frequencies[i];
|
||||
|
||||
/**
|
||||
* 波形平滑处理:
|
||||
* 1. 收集历史数据中对应位置的频率值
|
||||
* 2. 计算当前值与历史值的加权平均
|
||||
* 3. 根据平均值计算实际显示高度
|
||||
*/
|
||||
if (historyRef.current.length > 0) {
|
||||
const historicalValues = historyRef.current.map((h) => h[i] || 0);
|
||||
avgFrequency =
|
||||
(avgFrequency + historicalValues.reduce((a, b) => a + b, 0)) /
|
||||
(historyRef.current.length + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* 波形变换:
|
||||
* 1. 归一化频率值到0-1范围
|
||||
* 2. 添加时间相关的正弦变换
|
||||
* 3. 使用贝塞尔曲线平滑连接点
|
||||
*/
|
||||
const normalized = avgFrequency / 255.0;
|
||||
const height = normalized * (canvas.height / 2);
|
||||
const y = centerY + height * Math.sin(i * 0.2 + Date.now() * 0.002);
|
||||
|
||||
points.push([x, y]);
|
||||
|
||||
if (i === 0) {
|
||||
ctx.moveTo(x, y);
|
||||
} else {
|
||||
// 使用贝塞尔曲线使波形更平滑
|
||||
const prevPoint = points[i - 1];
|
||||
const midX = (prevPoint[0] + x) / 2;
|
||||
ctx.quadraticCurveTo(
|
||||
prevPoint[0],
|
||||
prevPoint[1],
|
||||
midX,
|
||||
(prevPoint[1] + y) / 2,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 绘制对称的下半部分
|
||||
for (let i = points.length - 1; i >= 0; i--) {
|
||||
const [x, y] = points[i];
|
||||
const symmetricY = centerY - (y - centerY);
|
||||
if (i === points.length - 1) {
|
||||
ctx.lineTo(x, symmetricY);
|
||||
} else {
|
||||
const nextPoint = points[i + 1];
|
||||
const midX = (nextPoint[0] + x) / 2;
|
||||
ctx.quadraticCurveTo(
|
||||
nextPoint[0],
|
||||
centerY - (nextPoint[1] - centerY),
|
||||
midX,
|
||||
centerY - ((nextPoint[1] + y) / 2 - centerY),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.closePath();
|
||||
|
||||
/**
|
||||
* 渐变效果:
|
||||
* 从左到右应用三色渐变,带透明度
|
||||
* 使用蓝色系配色提升视觉效果
|
||||
*/
|
||||
const gradient = ctx.createLinearGradient(0, 0, canvas.width, 0);
|
||||
gradient.addColorStop(0, "rgba(100, 180, 255, 0.95)");
|
||||
gradient.addColorStop(0.5, "rgba(140, 200, 255, 0.9)");
|
||||
gradient.addColorStop(1, "rgba(180, 220, 255, 0.95)");
|
||||
|
||||
ctx.fillStyle = gradient;
|
||||
ctx.fill();
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
};
|
||||
|
||||
// 启动动画循环
|
||||
draw();
|
||||
|
||||
// 清理函数:在组件卸载时取消动画
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
};
|
||||
}, [frequencies, isActive, updateHistory]);
|
||||
|
||||
return (
|
||||
<div className={styles["voice-print"]}>
|
||||
<canvas ref={canvasRef} />
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
import md5 from "spark-md5";
|
||||
import { DEFAULT_MODELS, DEFAULT_GA_ID } from "../constant";
|
||||
import { isGPT4Model } from "../utils/model";
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
|
@ -22,6 +23,7 @@ declare global {
|
|||
DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
|
||||
CUSTOM_MODELS?: string; // to control custom models
|
||||
DEFAULT_MODEL?: string; // to control default model in every new chat window
|
||||
VISION_MODELS?: string; // to control vision models
|
||||
|
||||
// stability only
|
||||
STABILITY_URL?: string;
|
||||
|
@ -71,8 +73,25 @@ declare global {
|
|||
IFLYTEK_API_KEY?: string;
|
||||
IFLYTEK_API_SECRET?: string;
|
||||
|
||||
DEEPSEEK_URL?: string;
|
||||
DEEPSEEK_API_KEY?: string;
|
||||
|
||||
// xai only
|
||||
XAI_URL?: string;
|
||||
XAI_API_KEY?: string;
|
||||
|
||||
// chatglm only
|
||||
CHATGLM_URL?: string;
|
||||
CHATGLM_API_KEY?: string;
|
||||
|
||||
// siliconflow only
|
||||
SILICONFLOW_URL?: string;
|
||||
SILICONFLOW_API_KEY?: string;
|
||||
|
||||
// custom template for preprocessing user input
|
||||
DEFAULT_INPUT_TEMPLATE?: string;
|
||||
|
||||
ENABLE_MCP?: string; // enable mcp functionality
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -116,22 +135,16 @@ export const getServerSideConfig = () => {
|
|||
const disableGPT4 = !!process.env.DISABLE_GPT4;
|
||||
let customModels = process.env.CUSTOM_MODELS ?? "";
|
||||
let defaultModel = process.env.DEFAULT_MODEL ?? "";
|
||||
let visionModels = process.env.VISION_MODELS ?? "";
|
||||
|
||||
if (disableGPT4) {
|
||||
if (customModels) customModels += ",";
|
||||
customModels += DEFAULT_MODELS.filter(
|
||||
(m) =>
|
||||
(m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
|
||||
!m.name.startsWith("gpt-4o-mini"),
|
||||
)
|
||||
customModels += DEFAULT_MODELS.filter((m) => isGPT4Model(m.name))
|
||||
.map((m) => "-" + m.name)
|
||||
.join(",");
|
||||
if (
|
||||
(defaultModel.startsWith("gpt-4") ||
|
||||
defaultModel.startsWith("chatgpt-4o")) &&
|
||||
!defaultModel.startsWith("gpt-4o-mini")
|
||||
)
|
||||
if (defaultModel && isGPT4Model(defaultModel)) {
|
||||
defaultModel = "";
|
||||
}
|
||||
}
|
||||
|
||||
const isStability = !!process.env.STABILITY_API_KEY;
|
||||
|
@ -146,6 +159,10 @@ export const getServerSideConfig = () => {
|
|||
const isAlibaba = !!process.env.ALIBABA_API_KEY;
|
||||
const isMoonshot = !!process.env.MOONSHOT_API_KEY;
|
||||
const isIflytek = !!process.env.IFLYTEK_API_KEY;
|
||||
const isDeepSeek = !!process.env.DEEPSEEK_API_KEY;
|
||||
const isXAI = !!process.env.XAI_API_KEY;
|
||||
const isChatGLM = !!process.env.CHATGLM_API_KEY;
|
||||
const isSiliconFlow = !!process.env.SILICONFLOW_API_KEY;
|
||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
|
@ -208,11 +225,27 @@ export const getServerSideConfig = () => {
|
|||
iflytekApiKey: process.env.IFLYTEK_API_KEY,
|
||||
iflytekApiSecret: process.env.IFLYTEK_API_SECRET,
|
||||
|
||||
isDeepSeek,
|
||||
deepseekUrl: process.env.DEEPSEEK_URL,
|
||||
deepseekApiKey: getApiKey(process.env.DEEPSEEK_API_KEY),
|
||||
|
||||
isXAI,
|
||||
xaiUrl: process.env.XAI_URL,
|
||||
xaiApiKey: getApiKey(process.env.XAI_API_KEY),
|
||||
|
||||
isChatGLM,
|
||||
chatglmUrl: process.env.CHATGLM_URL,
|
||||
chatglmApiKey: getApiKey(process.env.CHATGLM_API_KEY),
|
||||
|
||||
cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID,
|
||||
cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID,
|
||||
cloudflareKVApiKey: getApiKey(process.env.CLOUDFLARE_KV_API_KEY),
|
||||
cloudflareKVTTL: process.env.CLOUDFLARE_KV_TTL,
|
||||
|
||||
isSiliconFlow,
|
||||
siliconFlowUrl: process.env.SILICONFLOW_URL,
|
||||
siliconFlowApiKey: getApiKey(process.env.SILICONFLOW_API_KEY),
|
||||
|
||||
gtmId: process.env.GTM_ID,
|
||||
gaId: process.env.GA_ID || DEFAULT_GA_ID,
|
||||
|
||||
|
@ -229,6 +262,8 @@ export const getServerSideConfig = () => {
|
|||
disableFastLink: !!process.env.DISABLE_FAST_LINK,
|
||||
customModels,
|
||||
defaultModel,
|
||||
visionModels,
|
||||
allowedWebDavEndpoints,
|
||||
enableMcp: process.env.ENABLE_MCP === "true",
|
||||
};
|
||||
};
|
||||
|
|
333
app/constant.ts
|
@ -28,6 +28,14 @@ export const TENCENT_BASE_URL = "https://hunyuan.tencentcloudapi.com";
|
|||
export const MOONSHOT_BASE_URL = "https://api.moonshot.cn";
|
||||
export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com";
|
||||
|
||||
export const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
|
||||
|
||||
export const XAI_BASE_URL = "https://api.x.ai";
|
||||
|
||||
export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
|
||||
|
||||
export const SILICONFLOW_BASE_URL = "https://api.siliconflow.cn";
|
||||
|
||||
export const CACHE_URL_PREFIX = "/api/cache";
|
||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
|
||||
|
||||
|
@ -43,6 +51,7 @@ export enum Path {
|
|||
SdNew = "/sd-new",
|
||||
Artifacts = "/artifacts",
|
||||
SearchChat = "/search-chat",
|
||||
McpMarket = "/mcp-market",
|
||||
}
|
||||
|
||||
export enum ApiPath {
|
||||
|
@ -59,6 +68,10 @@ export enum ApiPath {
|
|||
Iflytek = "/api/iflytek",
|
||||
Stability = "/api/stability",
|
||||
Artifacts = "/api/artifacts",
|
||||
XAI = "/api/xai",
|
||||
ChatGLM = "/api/chatglm",
|
||||
DeepSeek = "/api/deepseek",
|
||||
SiliconFlow = "/api/siliconflow",
|
||||
}
|
||||
|
||||
export enum SlotID {
|
||||
|
@ -81,6 +94,7 @@ export enum StoreKey {
|
|||
Update = "chat-update",
|
||||
Sync = "sync",
|
||||
SdList = "sd-list",
|
||||
Mcp = "mcp-store",
|
||||
}
|
||||
|
||||
export const DEFAULT_SIDEBAR_WIDTH = 300;
|
||||
|
@ -96,6 +110,7 @@ export const UNFINISHED_INPUT = (id: string) => "unfinished-input-" + id;
|
|||
export const STORAGE_KEY = "chatgpt-next-web";
|
||||
|
||||
export const REQUEST_TIMEOUT_MS = 60000;
|
||||
export const REQUEST_TIMEOUT_MS_FOR_THINKING = REQUEST_TIMEOUT_MS * 5;
|
||||
|
||||
export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown";
|
||||
|
||||
|
@ -111,6 +126,10 @@ export enum ServiceProvider {
|
|||
Moonshot = "Moonshot",
|
||||
Stability = "Stability",
|
||||
Iflytek = "Iflytek",
|
||||
XAI = "XAI",
|
||||
ChatGLM = "ChatGLM",
|
||||
DeepSeek = "DeepSeek",
|
||||
SiliconFlow = "SiliconFlow",
|
||||
}
|
||||
|
||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
|
||||
|
@ -133,6 +152,10 @@ export enum ModelProvider {
|
|||
Hunyuan = "Hunyuan",
|
||||
Moonshot = "Moonshot",
|
||||
Iflytek = "Iflytek",
|
||||
XAI = "XAI",
|
||||
ChatGLM = "ChatGLM",
|
||||
DeepSeek = "DeepSeek",
|
||||
SiliconFlow = "SiliconFlow",
|
||||
}
|
||||
|
||||
export const Stability = {
|
||||
|
@ -215,6 +238,29 @@ export const Iflytek = {
|
|||
ChatPath: "v1/chat/completions",
|
||||
};
|
||||
|
||||
export const DeepSeek = {
|
||||
ExampleEndpoint: DEEPSEEK_BASE_URL,
|
||||
ChatPath: "chat/completions",
|
||||
};
|
||||
|
||||
export const XAI = {
|
||||
ExampleEndpoint: XAI_BASE_URL,
|
||||
ChatPath: "v1/chat/completions",
|
||||
};
|
||||
|
||||
export const ChatGLM = {
|
||||
ExampleEndpoint: CHATGLM_BASE_URL,
|
||||
ChatPath: "api/paas/v4/chat/completions",
|
||||
ImagePath: "api/paas/v4/images/generations",
|
||||
VideoPath: "api/paas/v4/videos/generations",
|
||||
};
|
||||
|
||||
export const SiliconFlow = {
|
||||
ExampleEndpoint: SILICONFLOW_BASE_URL,
|
||||
ChatPath: "v1/chat/completions",
|
||||
ListModelPath: "v1/models?&sub_type=chat",
|
||||
};
|
||||
|
||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
||||
|
@ -233,8 +279,133 @@ Latex inline: \\(x^2\\)
|
|||
Latex block: $$e=mc^2$$
|
||||
`;
|
||||
|
||||
export const MCP_TOOLS_TEMPLATE = `
|
||||
[clientId]
|
||||
{{ clientId }}
|
||||
[tools]
|
||||
{{ tools }}
|
||||
`;
|
||||
|
||||
export const MCP_SYSTEM_TEMPLATE = `
|
||||
You are an AI assistant with access to system tools. Your role is to help users by combining natural language understanding with tool operations when needed.
|
||||
|
||||
1. AVAILABLE TOOLS:
|
||||
{{ MCP_TOOLS }}
|
||||
|
||||
2. WHEN TO USE TOOLS:
|
||||
- ALWAYS USE TOOLS when they can help answer user questions
|
||||
- DO NOT just describe what you could do - TAKE ACTION immediately
|
||||
- If you're not sure whether to use a tool, USE IT
|
||||
- Common triggers for tool use:
|
||||
* Questions about files or directories
|
||||
* Requests to check, list, or manipulate system resources
|
||||
* Any query that can be answered with available tools
|
||||
|
||||
3. HOW TO USE TOOLS:
|
||||
A. Tool Call Format:
|
||||
- Use markdown code blocks with format: \`\`\`json:mcp:{clientId}\`\`\`
|
||||
- Always include:
|
||||
* method: "tools/call"(Only this method is supported)
|
||||
* params:
|
||||
- name: must match an available primitive name
|
||||
- arguments: required parameters for the primitive
|
||||
|
||||
B. Response Format:
|
||||
- Tool responses will come as user messages
|
||||
- Format: \`\`\`json:mcp-response:{clientId}\`\`\`
|
||||
- Wait for response before making another tool call
|
||||
|
||||
C. Important Rules:
|
||||
- Only use tools/call method
|
||||
- Only ONE tool call per message
|
||||
- ALWAYS TAKE ACTION instead of just describing what you could do
|
||||
- Include the correct clientId in code block language tag
|
||||
- Verify arguments match the primitive's requirements
|
||||
|
||||
4. INTERACTION FLOW:
|
||||
A. When user makes a request:
|
||||
- IMMEDIATELY use appropriate tool if available
|
||||
- DO NOT ask if user wants you to use the tool
|
||||
- DO NOT just describe what you could do
|
||||
B. After receiving tool response:
|
||||
- Explain results clearly
|
||||
- Take next appropriate action if needed
|
||||
C. If tools fail:
|
||||
- Explain the error
|
||||
- Try alternative approach immediately
|
||||
|
||||
5. EXAMPLE INTERACTION:
|
||||
|
||||
good example:
|
||||
|
||||
\`\`\`json:mcp:filesystem
|
||||
{
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "list_allowed_directories",
|
||||
"arguments": {}
|
||||
}
|
||||
}
|
||||
\`\`\`"
|
||||
|
||||
|
||||
\`\`\`json:mcp-response:filesystem
|
||||
{
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "write_file",
|
||||
"arguments": {
|
||||
"path": "/Users/river/dev/nextchat/test/joke.txt",
|
||||
"content": "为什么数学书总是感到忧伤?因为它有太多的问题。"
|
||||
}
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
follwing is the wrong! mcp json example:
|
||||
|
||||
\`\`\`json:mcp:filesystem
|
||||
{
|
||||
"method": "write_file",
|
||||
"params": {
|
||||
"path": "NextChat_Information.txt",
|
||||
"content": "1"
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
This is wrong because the method is not tools/call.
|
||||
|
||||
\`\`\`{
|
||||
"method": "search_repositories",
|
||||
"params": {
|
||||
"query": "2oeee"
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
This is wrong because the method is not tools/call.!!!!!!!!!!!
|
||||
|
||||
the right format is:
|
||||
\`\`\`json:mcp:filesystem
|
||||
{
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "search_repositories",
|
||||
"arguments": {
|
||||
"query": "2oeee"
|
||||
}
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
please follow the format strictly ONLY use tools/call method!!!!!!!!!!!
|
||||
|
||||
`;
|
||||
|
||||
export const SUMMARIZE_MODEL = "gpt-4o-mini";
|
||||
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
|
||||
export const DEEPSEEK_SUMMARIZE_MODEL = "deepseek-chat";
|
||||
|
||||
export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
default: "2021-09",
|
||||
|
@ -244,16 +415,25 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
|||
"gpt-4o": "2023-10",
|
||||
"gpt-4o-2024-05-13": "2023-10",
|
||||
"gpt-4o-2024-08-06": "2023-10",
|
||||
"gpt-4o-2024-11-20": "2023-10",
|
||||
"chatgpt-4o-latest": "2023-10",
|
||||
"gpt-4o-mini": "2023-10",
|
||||
"gpt-4o-mini-2024-07-18": "2023-10",
|
||||
"gpt-4-vision-preview": "2023-04",
|
||||
"o1-mini-2024-09-12": "2023-10",
|
||||
"o1-mini": "2023-10",
|
||||
"o1-preview-2024-09-12": "2023-10",
|
||||
"o1-preview": "2023-10",
|
||||
"o1-2024-12-17": "2023-10",
|
||||
o1: "2023-10",
|
||||
"o3-mini-2025-01-31": "2023-10",
|
||||
"o3-mini": "2023-10",
|
||||
// After improvements,
|
||||
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
|
||||
"gemini-pro": "2023-12",
|
||||
"gemini-pro-vision": "2023-12",
|
||||
"deepseek-chat": "2024-07",
|
||||
"deepseek-coder": "2024-07",
|
||||
};
|
||||
|
||||
export const DEFAULT_TTS_ENGINE = "OpenAI-TTS";
|
||||
|
@ -270,7 +450,27 @@ export const DEFAULT_TTS_VOICES = [
|
|||
"shimmer",
|
||||
];
|
||||
|
||||
export const VISION_MODEL_REGEXES = [
|
||||
/vision/,
|
||||
/gpt-4o/,
|
||||
/claude-3/,
|
||||
/gemini-1\.5/,
|
||||
/gemini-exp/,
|
||||
/gemini-2\.0/,
|
||||
/learnlm/,
|
||||
/qwen-vl/,
|
||||
/qwen2-vl/,
|
||||
/gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview"
|
||||
/^dall-e-3$/, // Matches exactly "dall-e-3"
|
||||
/glm-4v/,
|
||||
/vl/i,
|
||||
];
|
||||
|
||||
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
|
||||
|
||||
const openaiModels = [
|
||||
// As of July 2024, gpt-4o-mini should be used in place of gpt-3.5-turbo,
|
||||
// as it is cheaper, more capable, multimodal, and just as fast. gpt-3.5-turbo is still available for use in the API.
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-3.5-turbo-1106",
|
||||
"gpt-3.5-turbo-0125",
|
||||
|
@ -283,6 +483,7 @@ const openaiModels = [
|
|||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4o-2024-08-06",
|
||||
"gpt-4o-2024-11-20",
|
||||
"chatgpt-4o-latest",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
|
@ -292,13 +493,33 @@ const openaiModels = [
|
|||
"dall-e-3",
|
||||
"o1-mini",
|
||||
"o1-preview",
|
||||
"o3-mini",
|
||||
];
|
||||
|
||||
const googleModels = [
|
||||
"gemini-1.0-pro",
|
||||
"gemini-1.0-pro", // Deprecated on 2/15/2025
|
||||
"gemini-1.5-pro-latest",
|
||||
"gemini-1.5-pro",
|
||||
"gemini-1.5-pro-002",
|
||||
"gemini-1.5-pro-exp-0827",
|
||||
"gemini-1.5-flash-latest",
|
||||
"gemini-pro-vision",
|
||||
"gemini-1.5-flash-8b-latest",
|
||||
"gemini-1.5-flash",
|
||||
"gemini-1.5-flash-8b",
|
||||
"gemini-1.5-flash-002",
|
||||
"gemini-1.5-flash-exp-0827",
|
||||
"learnlm-1.5-pro-experimental",
|
||||
"gemini-exp-1114",
|
||||
"gemini-exp-1121",
|
||||
"gemini-exp-1206",
|
||||
"gemini-2.0-flash",
|
||||
"gemini-2.0-flash-exp",
|
||||
"gemini-2.0-flash-lite-preview-02-05",
|
||||
"gemini-2.0-flash-thinking-exp",
|
||||
"gemini-2.0-flash-thinking-exp-1219",
|
||||
"gemini-2.0-flash-thinking-exp-01-21",
|
||||
"gemini-2.0-pro-exp",
|
||||
"gemini-2.0-pro-exp-02-05",
|
||||
];
|
||||
|
||||
const anthropicModels = [
|
||||
|
@ -307,8 +528,13 @@ const anthropicModels = [
|
|||
"claude-2.1",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
];
|
||||
|
||||
const baiduModels = [
|
||||
|
@ -364,6 +590,56 @@ const iflytekModels = [
|
|||
"4.0Ultra",
|
||||
];
|
||||
|
||||
const deepseekModels = ["deepseek-chat", "deepseek-coder", "deepseek-reasoner"];
|
||||
|
||||
const xAIModes = [
|
||||
"grok-beta",
|
||||
"grok-2",
|
||||
"grok-2-1212",
|
||||
"grok-2-latest",
|
||||
"grok-vision-beta",
|
||||
"grok-2-vision-1212",
|
||||
"grok-2-vision",
|
||||
"grok-2-vision-latest",
|
||||
];
|
||||
|
||||
const chatglmModels = [
|
||||
"glm-4-plus",
|
||||
"glm-4-0520",
|
||||
"glm-4",
|
||||
"glm-4-air",
|
||||
"glm-4-airx",
|
||||
"glm-4-long",
|
||||
"glm-4-flashx",
|
||||
"glm-4-flash",
|
||||
"glm-4v-plus",
|
||||
"glm-4v",
|
||||
"glm-4v-flash", // free
|
||||
"cogview-3-plus",
|
||||
"cogview-3",
|
||||
"cogview-3-flash", // free
|
||||
// 目前无法适配轮询任务
|
||||
// "cogvideox",
|
||||
// "cogvideox-flash", // free
|
||||
];
|
||||
|
||||
const siliconflowModels = [
|
||||
"Qwen/Qwen2.5-7B-Instruct",
|
||||
"Qwen/Qwen2.5-72B-Instruct",
|
||||
"deepseek-ai/DeepSeek-R1",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
||||
"deepseek-ai/DeepSeek-V3",
|
||||
"meta-llama/Llama-3.3-70B-Instruct",
|
||||
"THUDM/glm-4-9b-chat",
|
||||
"Pro/deepseek-ai/DeepSeek-R1",
|
||||
"Pro/deepseek-ai/DeepSeek-V3",
|
||||
];
|
||||
|
||||
let seq = 1000; // 内置的模型序号生成器从1000开始
|
||||
export const DEFAULT_MODELS = [
|
||||
...openaiModels.map((name) => ({
|
||||
|
@ -476,6 +752,50 @@ export const DEFAULT_MODELS = [
|
|||
sorted: 10,
|
||||
},
|
||||
})),
|
||||
...xAIModes.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
sorted: seq++,
|
||||
provider: {
|
||||
id: "xai",
|
||||
providerName: "XAI",
|
||||
providerType: "xai",
|
||||
sorted: 11,
|
||||
},
|
||||
})),
|
||||
...chatglmModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
sorted: seq++,
|
||||
provider: {
|
||||
id: "chatglm",
|
||||
providerName: "ChatGLM",
|
||||
providerType: "chatglm",
|
||||
sorted: 12,
|
||||
},
|
||||
})),
|
||||
...deepseekModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
sorted: seq++,
|
||||
provider: {
|
||||
id: "deepseek",
|
||||
providerName: "DeepSeek",
|
||||
providerType: "deepseek",
|
||||
sorted: 13,
|
||||
},
|
||||
})),
|
||||
...siliconflowModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
sorted: seq++,
|
||||
provider: {
|
||||
id: "siliconflow",
|
||||
providerName: "SiliconFlow",
|
||||
providerType: "siliconflow",
|
||||
sorted: 14,
|
||||
},
|
||||
})),
|
||||
] as const;
|
||||
|
||||
export const CHAT_PAGE_SIZE = 15;
|
||||
|
@ -495,11 +815,6 @@ export const internalAllowedWebDavEndpoints = [
|
|||
];
|
||||
|
||||
export const DEFAULT_GA_ID = "G-89WN60ZK2E";
|
||||
export const PLUGINS = [
|
||||
{ name: "Plugins", path: Path.Plugins },
|
||||
{ name: "Stable Diffusion", path: Path.Sd },
|
||||
{ name: "Search Chat", path: Path.SearchChat },
|
||||
];
|
||||
|
||||
export const SAAS_CHAT_URL = "https://nextchat.dev/chat";
|
||||
export const SAAS_CHAT_UTM_URL = "https://nextchat.dev/chat?utm=github";
|
||||
export const SAAS_CHAT_URL = "https://nextchat.club";
|
||||
export const SAAS_CHAT_UTM_URL = "https://nextchat.club?utm=github";
|
||||
|
|
|
@ -26,6 +26,13 @@ declare interface Window {
|
|||
isPermissionGranted(): Promise<boolean>;
|
||||
sendNotification(options: string | Options): void;
|
||||
};
|
||||
updater: {
|
||||
checkUpdate(): Promise<UpdateResult>;
|
||||
installUpdate(): Promise<void>;
|
||||
onUpdaterEvent(
|
||||
handler: (status: UpdateStatusResult) => void,
|
||||
): Promise<UnlistenFn>;
|
||||
};
|
||||
http: {
|
||||
fetch<T>(
|
||||
url: string,
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="16" height="16" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 28C4 26.8954 4.89543 26 6 26H10V38H6C4.89543 38 4 37.1046 4 36V28Z" fill="none" />
|
||||
<path d="M38 26H42C43.1046 26 44 26.8954 44 28V36C44 37.1046 43.1046 38 42 38H38V26Z"
|
||||
fill="none" />
|
||||
<path
|
||||
d="M10 36V24C10 16.268 16.268 10 24 10C31.732 10 38 16.268 38 24V36M10 26H6C4.89543 26 4 26.8954 4 28V36C4 37.1046 4.89543 38 6 38H10V26ZM38 26H42C43.1046 26 44 26.8954 44 28V36C44 37.1046 43.1046 38 42 38H38V26Z"
|
||||
stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M16 32H20L22 26L26 38L28 32H32" stroke="#333" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 808 B |
|
@ -0,0 +1,14 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>ChatGLM</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<defs>
|
||||
<linearGradient id="lobe-icons-chatglm-fill" x1="-18.756%" x2="70.894%" y1="49.371%" y2="90.944%">
|
||||
<stop offset="0%" stop-color="#504AF4"></stop>
|
||||
<stop offset="100%" stop-color="#3485FF"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path d="M9.917 2c4.906 0 10.178 3.947 8.93 10.58-.014.07-.037.14-.057.21l-.003-.277c-.083-3-1.534-8.934-8.87-8.934-3.393 0-8.137 3.054-7.93 8.158-.04 4.778 3.555 8.4 7.95 8.332l.073-.001c1.2-.033 2.763-.429 3.1-1.657.063-.031.26.534.268.598.048.256.112.369.192.34.981-.348 2.286-1.222 1.952-2.38-.176-.61-1.775-.147-1.921-.347.418-.979 2.234-.926 3.153-.716.443.102.657.38 1.012.442.29.052.981-.2.96.242-1.5 3.042-4.893 5.41-8.808 5.41C3.654 22 0 16.574 0 11.737 0 5.947 4.959 2 9.917 2zM9.9 5.3c.484 0 1.125.225 1.38.585 3.669.145 4.313 2.686 4.694 5.444.255 1.838.315 2.3.182 1.387l.083.59c.068.448.554.737.982.516.144-.075.254-.231.328-.47a.2.2 0 01.258-.13l.625.22a.2.2 0 01.124.238 2.172 2.172 0 01-.51.92c-.878.917-2.757.664-3.08-.62-.14-.554-.055-.626-.345-1.242-.292-.621-1.238-.709-1.69-.295-.345.315-.407.805-.406 1.282L12.6 15.9a.9.9 0 01-.9.9h-1.4a.9.9 0 01-.9-.9v-.65a1.15 1.15 0 10-2.3 0v.65a.9.9 0 01-.9.9H4.8a.9.9 0 01-.9-.9l.035-3.239c.012-1.884.356-3.658 2.47-4.134.2-.045.252.13.29.342.025.154.043.252.053.294.701 3.058 1.75 4.299 3.144 3.722l.66-.331.254-.13c.158-.082.25-.131.276-.15.012-.01-.165-.206-.407-.464l-1.012-1.067a8.925 8.925 0 01-.199-.216c-.047-.034-.116.068-.208.306-.074.157-.251.252-.272.326-.013.058.108.298.362.72.164.288.22.508-.31.343-1.04-.8-1.518-2.273-1.684-3.725-.004-.035-.162-1.913-.162-1.913a1.2 1.2 0 011.113-1.281L9.9 5.3zm12.994 8.68c.037.697-.403.704-1.213.591l-1.783-.276c-.265-.053-.385-.099-.313-.147.47-.315 3.268-.93 3.31-.168zm-.915-.083l-.926.042c-.85.077-1.452.24.338.336l.103.003c.815.012 1.264-.359.485-.381zm1.667-3.601h.01c.79.398.067 1.03-.65 1.393-.14.07-.491.176-1.052.315-.241.04-.457.092-.333.16l.01.005c1.952.958-3.123 1.534-2.495 1.285l.38-.148c.68-.266 1.614-.682 1.666-1.337.038-.48 1.253-.442 1.493-.968.048-.106 0-.236-.144-.389-.05-.047-.094-.094-.107-.148-.073-.305.7-.431 1.222-.168zm-2.568-.474c-.135 1.198-2.479 4.192-1.949 2.863l.017-.042c.298-.717.376-2.221 1.337-3.221.25-.26.636.035.595.4zm-7.976-.253c.02-.694 1.002-.968 1.346-.347.01-1.274-1.941-.768-1.346.347z"
|
||||
fill="url(#lobe-icons-chatglm-fill)" fill-rule="evenodd"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.6 KiB |
|
@ -0,0 +1,8 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Claude</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<path d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z"
|
||||
fill="#D97757" fill-rule="nonzero"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
|
@ -0,0 +1,8 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>DeepSeek</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(4, 4)">
|
||||
<path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z"
|
||||
fill="#4D6BFE"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.2 KiB |
|
@ -0,0 +1,27 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="30" height="30" fill="none"
|
||||
viewBox="0 0 30 30">
|
||||
<defs>
|
||||
<rect id="path_0" width="30" height="30" x="0" y="0"/>
|
||||
<rect id="path_1" width="20.455" height="20.455" x="0" y="0"/>
|
||||
</defs>
|
||||
<g opacity="1" transform="translate(0 0) rotate(0 14.999999999999998 14.999999999999998)">
|
||||
<rect width="30" height="30" x="0" y="0" fill="#E7F8FF" opacity="1" rx="10"
|
||||
transform="translate(0 0) rotate(0 14.999999999999998 14.999999999999998)"/>
|
||||
<mask id="bg-mask-0" fill="#fff">
|
||||
<use xlink:href="#path_0"/>
|
||||
</mask>
|
||||
<g mask="url(#bg-mask-0)">
|
||||
<g opacity="1"
|
||||
transform="translate(4.772727272727272 4.772727272727273) rotate(0 10.227272727272725 10.227272727272725)">
|
||||
<mask id="bg-mask-1" fill="#fff">
|
||||
<use xlink:href="#path_1"/>
|
||||
</mask>
|
||||
<g mask="url(#bg-mask-1)">
|
||||
<path id="分组 1" fill-rule="evenodd" style="fill:#1f948c"
|
||||
d="M19.11 8.37L19.11 8.37C19.28 7.85 19.37 7.31 19.37 6.76C19.37 5.86 19.13 4.97 18.66 4.19C17.73 2.59 16 1.6 14.13 1.6C13.76 1.6 13.4 1.64 13.04 1.71C12.06 0.62 10.65 0 9.17 0L9.14 0L9.13 0C6.86 0 4.86 1.44 4.16 3.57C2.7 3.86 1.44 4.76 0.71 6.04C0.24 6.83 0 7.72 0 8.63C0 9.9 0.48 11.14 1.35 12.08C1.17 12.6 1.08 13.15 1.08 13.69C1.08 14.6 1.33 15.49 1.79 16.27C2.92 18.21 5.2 19.21 7.42 18.74C8.4 19.83 9.8 20.45 11.28 20.45L11.31 20.45L11.33 20.45C13.59 20.45 15.6 19.01 16.3 16.88C17.76 16.59 19.01 15.69 19.75 14.41C20.21 13.63 20.45 12.74 20.45 11.83C20.45 10.55 19.97 9.32 19.11 8.37Z M8.94734 18.1579C8.90734 18.1879 8.86734 18.2079 8.82734 18.2279C9.52734 18.8079 10.3973 19.1179 11.3073 19.1179L11.3173 19.1179C13.4573 19.1179 15.1973 17.3979 15.1973 15.2879L15.1973 10.5279C15.1973 10.5079 15.1773 10.4879 15.1573 10.4779L13.4173 9.48792L13.4173 15.2379C13.4173 15.4679 13.2873 15.6879 13.0773 15.8079L8.94734 18.1579Z M8.27654 17.0048L12.4465 14.6248C12.4665 14.6148 12.4765 14.5948 12.4765 14.5748L12.4765 14.5748L12.4765 12.5848L7.43654 15.4548C7.22654 15.5748 6.96654 15.5748 6.75654 15.4548L2.62654 13.1048C2.58654 13.0848 2.53654 13.0448 2.50654 13.0348C2.46654 13.2448 2.44654 13.4648 2.44654 13.6848C2.44654 14.3548 2.62654 15.0148 2.96654 15.6048L2.96654 15.5948C3.66654 16.7848 4.94654 17.5148 6.33654 17.5148C7.01654 17.5148 7.68654 17.3348 8.27654 17.0048Z M3.90324 5.16818C3.90324 5.12818 3.90324 5.06818 3.90324 5.02818C3.05324 5.33818 2.33324 5.92818 1.88324 6.70818L1.88324 6.70818C1.54324 7.28818 1.36324 7.94818 1.36324 8.61818C1.36324 9.98818 2.10324 11.2582 3.30324 11.9482L7.47324 14.3182C7.49324 14.3282 7.51324 14.3282 7.53324 14.3182L9.28324 13.3182L4.24324 10.4482C4.03324 10.3382 3.90324 10.1182 3.90324 9.87818L3.90324 9.87818L3.90324 5.16818Z M17.1561 8.50521L12.9761 6.1252C12.9561 6.1252 12.9361 6.1252 12.9161 6.1352L11.1761 7.1252L16.2161 9.9952C16.4261 10.1152 16.5561 10.3352 16.5561 10.5752C16.5561 10.5752 16.5561 10.5752 16.5561 10.5752L16.5561 15.4252C18.0761 14.8652 19.0961 13.4352 19.0961 11.8252C19.0961 10.4552 18.3561 9.1952 17.1561 8.50521Z M8.01418 5.82927C7.99418 5.83927 7.98418 5.85927 7.98418 5.87927L7.98418 5.87927L7.98418 7.86927L13.0242 4.99927C13.1242 4.93927 13.2442 4.90927 13.3642 4.90927C13.4842 4.90927 13.5942 4.93927 13.7042 4.99927L17.8342 7.34927C17.8742 7.36927 17.9142 7.39927 17.9542 7.41927L17.9542 7.41927C17.9842 7.20927 18.0042 6.98927 18.0042 6.76927C18.0042 4.65927 16.2642 2.93927 14.1242 2.93927C13.4442 2.93927 12.7742 3.11927 12.1842 3.44927L8.01418 5.82927Z M9.14676 1.33731C6.99676 1.33731 5.25676 3.05731 5.25676 5.16731L5.25676 9.92731C5.25676 9.94731 5.27676 9.95731 5.28676 9.96731L7.03676 10.9673L7.03676 5.22731L7.03676 5.21731C7.03676 4.98731 7.16676 4.76731 7.37676 4.64731L11.5068 2.29731C11.5468 2.26731 11.5968 2.23731 11.6268 2.22731C10.9268 1.64731 10.0468 1.33731 9.14676 1.33731Z M7.98345 11.5093L10.2235 12.7793L12.4735 11.5093L12.4735 8.9493L10.2235 7.6693L7.98345 8.9493L7.98345 11.5093Z"
|
||||
opacity="1" transform="translate(0 0) rotate(0 10.227272727272725 10.227272727272725)"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 4.2 KiB |
|
@ -0,0 +1,14 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Doubao</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<path d="M5.31 15.756c.172-3.75 1.883-5.999 2.549-6.739-3.26 2.058-5.425 5.658-6.358 8.308v1.12C1.501 21.513 4.226 24 7.59 24a6.59 6.59 0 002.2-.375c.353-.12.7-.248 1.039-.378.913-.899 1.65-1.91 2.243-2.992-4.877 2.431-7.974.072-7.763-4.5l.002.001z"
|
||||
fill="#1E37FC"></path>
|
||||
<path d="M22.57 10.283c-1.212-.901-4.109-2.404-7.397-2.8.295 3.792.093 8.766-2.1 12.773a12.782 12.782 0 01-2.244 2.992c3.764-1.448 6.746-3.457 8.596-5.219 2.82-2.683 3.353-5.178 3.361-6.66a2.737 2.737 0 00-.216-1.084v-.002z"
|
||||
fill="#37E1BE"></path>
|
||||
<path d="M14.303 1.867C12.955.7 11.248 0 9.39 0 7.532 0 5.883.677 4.545 1.807 2.791 3.29 1.627 5.557 1.5 8.125v9.201c.932-2.65 3.097-6.25 6.357-8.307.5-.318 1.025-.595 1.569-.829 1.883-.801 3.878-.932 5.746-.706-.222-2.83-.718-5.002-.87-5.617h.001z"
|
||||
fill="#A569FF"></path>
|
||||
<path d="M17.305 4.961a199.47 199.47 0 01-1.08-1.094c-.202-.213-.398-.419-.586-.622l-1.333-1.378c.151.615.648 2.786.869 5.617 3.288.395 6.185 1.898 7.396 2.8-1.306-1.275-3.475-3.487-5.266-5.323z"
|
||||
fill="#1E37FC"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -0,0 +1,15 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Gemini</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<defs>
|
||||
<linearGradient id="lobe-icons-gemini-fill" x1="0%" x2="68.73%" y1="100%" y2="30.395%">
|
||||
<stop offset="0%" stop-color="#1C7DFF"></stop>
|
||||
<stop offset="52.021%" stop-color="#1C69FF"></stop>
|
||||
<stop offset="100%" stop-color="#F0DCD6"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path d="M12 24A14.304 14.304 0 000 12 14.304 14.304 0 0012 0a14.305 14.305 0 0012 12 14.305 14.305 0 00-12 12"
|
||||
fill="url(#lobe-icons-gemini-fill)" fill-rule="nonzero"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 807 B |
|
@ -0,0 +1,15 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Gemma</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<defs>
|
||||
<linearGradient id="lobe-icons-gemma-fill" x1="24.419%" x2="75.194%" y1="75.581%" y2="25.194%">
|
||||
<stop offset="0%" stop-color="#446EFF"></stop>
|
||||
<stop offset="36.661%" stop-color="#2E96FF"></stop>
|
||||
<stop offset="83.221%" stop-color="#B1C5FF"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path d="M12.34 5.953a8.233 8.233 0 01-.247-1.125V3.72a8.25 8.25 0 015.562 2.232H12.34zm-.69 0c.113-.373.199-.755.257-1.145V3.72a8.25 8.25 0 00-5.562 2.232h5.304zm-5.433.187h5.373a7.98 7.98 0 01-.267.696 8.41 8.41 0 01-1.76 2.65L6.216 6.14zm-.264-.187H2.977v.187h2.915a8.436 8.436 0 00-2.357 5.767H0v.186h3.535a8.436 8.436 0 002.357 5.767H2.977v.186h2.976v2.977h.187v-2.915a8.436 8.436 0 005.767 2.357V24h.186v-3.535a8.436 8.436 0 005.767-2.357v2.915h.186v-2.977h2.977v-.186h-2.915a8.436 8.436 0 002.357-5.767H24v-.186h-3.535a8.436 8.436 0 00-2.357-5.767h2.915v-.187h-2.977V2.977h-.186v2.915a8.436 8.436 0 00-5.767-2.357V0h-.186v3.535A8.436 8.436 0 006.14 5.892V2.977h-.187v2.976zm6.14 14.326a8.25 8.25 0 005.562-2.233H12.34c-.108.367-.19.743-.247 1.126v1.107zm-.186-1.087a8.015 8.015 0 00-.258-1.146H6.345a8.25 8.25 0 005.562 2.233v-1.087zm-8.186-7.285h1.107a8.23 8.23 0 001.125-.247V6.345a8.25 8.25 0 00-2.232 5.562zm1.087.186H3.72a8.25 8.25 0 002.232 5.562v-5.304a8.012 8.012 0 00-1.145-.258zm15.47-.186a8.25 8.25 0 00-2.232-5.562v5.315c.367.108.743.19 1.126.247h1.107zm-1.086.186c-.39.058-.772.144-1.146.258v5.304a8.25 8.25 0 002.233-5.562h-1.087zm-1.332 5.69V12.41a7.97 7.97 0 00-.696.267 8.409 8.409 0 00-2.65 1.76l3.346 3.346zm0-6.18v-5.45l-.012-.013h-5.451c.076.235.162.468.26.696a8.698 8.698 0 001.819 2.688 8.698 8.698 0 002.688 1.82c.228.097.46.183.696.259zM6.14 17.848V12.41c.235.078.468.167.696.267a8.403 8.403 0 012.688 1.799 8.404 8.404 0 011.799 2.688c.1.228.19.46.267.696H6.152l-.012-.012zm0-6.245V6.326l3.29 3.29a8.716 8.716 0 01-2.594 1.728 8.14 8.14 0 01-.696.259zm6.257 6.257h5.277l-3.29-3.29a8.716 8.716 0 00-1.728 2.594 8.135 8.135 0 00-.259.696zm-2.347-7.81a9.435 9.435 0 01-2.88 1.96 9.14 9.14 0 012.88 1.94 9.14 9.14 0 011.94 2.88 9.435 9.435 0 011.96-2.88 9.14 9.14 0 012.88-1.94 9.435 9.435 0 01-2.88-1.96 9.434 9.434 0 01-1.96-2.88 9.14 9.14 0 01-1.94 2.88z"
|
||||
fill="url(#lobe-icons-gemma-fill)" fill-rule="evenodd"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.5 KiB |
|
@ -0,0 +1,8 @@
|
|||
<svg fill="#333" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30"
|
||||
width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Grok</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<path d="M6.469 8.776L16.512 23h-4.464L2.005 8.776H6.47zm-.004 7.9l2.233 3.164L6.467 23H2l4.465-6.324zM22 2.582V23h-3.659V7.764L22 2.582zM22 1l-9.952 14.095-2.233-3.163L17.533 1H22z"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 486 B |
|
@ -0,0 +1,17 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Hunyuan</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<g fill="none" fill-rule="evenodd">
|
||||
<circle cx="12" cy="12" fill="#0055E9" r="12"></circle>
|
||||
<path d="M12 0c.518 0 1.028.033 1.528.096A6.188 6.188 0 0112.12 12.28l-.12.001c-2.99 0-5.242 2.179-5.554 5.11-.223 2.086.353 4.412 2.242 6.146C3.672 22.1 0 17.479 0 12 0 5.373 5.373 0 12 0z"
|
||||
fill="#A8DFF5"></path>
|
||||
<path d="M5.286 5a2.438 2.438 0 01.682 3.38c-3.962 5.966-3.215 10.743 2.648 15.136C3.636 22.056 0 17.452 0 12c0-1.787.39-3.482 1.09-5.006.253-.435.525-.872.817-1.311A2.438 2.438 0 015.286 5z"
|
||||
fill="#0055E9"></path>
|
||||
<path d="M12.98.04c.272.021.543.053.81.093.583.106 1.117.254 1.538.44 6.638 2.927 8.07 10.052 1.748 15.642a4.125 4.125 0 01-5.822-.358c-1.51-1.706-1.3-4.184.357-5.822.858-.848 3.108-1.223 4.045-2.441 1.257-1.634 2.122-6.009-2.523-7.506L12.98.039z"
|
||||
fill="#00BCFF"></path>
|
||||
<path d="M13.528.096A6.187 6.187 0 0112 12.281a5.75 5.75 0 00-1.71.255c.147-.905.595-1.784 1.321-2.501.858-.848 3.108-1.223 4.045-2.441 1.27-1.651 2.14-6.104-2.676-7.554.184.014.367.033.548.056z"
|
||||
fill="#ECECEE"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.4 KiB |
|
@ -0,0 +1,93 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Meta</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<defs>
|
||||
<linearGradient id="lobe-icons-meta-fill-0" x1="75.897%" x2="26.312%" y1="89.199%" y2="12.194%">
|
||||
<stop offset=".06%" stop-color="#0867DF"></stop>
|
||||
<stop offset="45.39%" stop-color="#0668E1"></stop>
|
||||
<stop offset="85.91%" stop-color="#0064E0"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-1" x1="21.67%" x2="97.068%" y1="75.874%" y2="23.985%">
|
||||
<stop offset="13.23%" stop-color="#0064DF"></stop>
|
||||
<stop offset="99.88%" stop-color="#0064E0"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-2" x1="38.263%" x2="60.895%" y1="89.127%" y2="16.131%">
|
||||
<stop offset="1.47%" stop-color="#0072EC"></stop>
|
||||
<stop offset="68.81%" stop-color="#0064DF"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-3" x1="47.032%" x2="52.15%" y1="90.19%" y2="15.745%">
|
||||
<stop offset="7.31%" stop-color="#007CF6"></stop>
|
||||
<stop offset="99.43%" stop-color="#0072EC"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-4" x1="52.155%" x2="47.591%" y1="58.301%" y2="37.004%">
|
||||
<stop offset="7.31%" stop-color="#007FF9"></stop>
|
||||
<stop offset="100%" stop-color="#007CF6"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-5" x1="37.689%" x2="61.961%" y1="12.502%" y2="63.624%">
|
||||
<stop offset="7.31%" stop-color="#007FF9"></stop>
|
||||
<stop offset="100%" stop-color="#0082FB"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-6" x1="34.808%" x2="62.313%" y1="68.859%" y2="23.174%">
|
||||
<stop offset="27.99%" stop-color="#007FF8"></stop>
|
||||
<stop offset="91.41%" stop-color="#0082FB"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-7" x1="43.762%" x2="57.602%" y1="6.235%" y2="98.514%">
|
||||
<stop offset="0%" stop-color="#0082FB"></stop>
|
||||
<stop offset="99.95%" stop-color="#0081FA"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-8" x1="60.055%" x2="39.88%" y1="4.661%" y2="69.077%">
|
||||
<stop offset="6.19%" stop-color="#0081FA"></stop>
|
||||
<stop offset="100%" stop-color="#0080F9"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-9" x1="30.282%" x2="61.081%" y1="59.32%" y2="33.244%">
|
||||
<stop offset="0%" stop-color="#027AF3"></stop>
|
||||
<stop offset="100%" stop-color="#0080F9"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-10" x1="20.433%" x2="82.112%" y1="50.001%" y2="50.001%">
|
||||
<stop offset="0%" stop-color="#0377EF"></stop>
|
||||
<stop offset="99.94%" stop-color="#0279F1"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-11" x1="40.303%" x2="72.394%" y1="35.298%" y2="57.811%">
|
||||
<stop offset=".19%" stop-color="#0471E9"></stop>
|
||||
<stop offset="100%" stop-color="#0377EF"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="lobe-icons-meta-fill-12" x1="32.254%" x2="68.003%" y1="19.719%" y2="84.908%">
|
||||
<stop offset="27.65%" stop-color="#0867DF"></stop>
|
||||
<stop offset="100%" stop-color="#0471E9"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g fill="none" fill-rule="nonzero">
|
||||
<path d="M6.897 4h-.024l-.031 2.615h.022c1.715 0 3.046 1.357 5.94 6.246l.175.297.012.02 1.62-2.438-.012-.019a48.763 48.763 0 00-1.098-1.716 28.01 28.01 0 00-1.175-1.629C10.413 4.932 8.812 4 6.896 4z"
|
||||
fill="url(#lobe-icons-meta-fill-0)"></path>
|
||||
<path d="M6.873 4C4.95 4.01 3.247 5.258 2.02 7.17a4.352 4.352 0 00-.01.017l2.254 1.231.011-.017c.718-1.083 1.61-1.774 2.568-1.785h.021L6.896 4h-.023z"
|
||||
fill="url(#lobe-icons-meta-fill-1)"></path>
|
||||
<path d="M2.019 7.17l-.011.017C1.2 8.447.598 9.995.274 11.664l-.005.022 2.534.6.004-.022c.27-1.467.786-2.828 1.456-3.845l.011-.017L2.02 7.17z"
|
||||
fill="url(#lobe-icons-meta-fill-2)"></path>
|
||||
<path d="M2.807 12.264l-2.533-.6-.005.022c-.177.918-.267 1.851-.269 2.786v.023l2.598.233v-.023a12.591 12.591 0 01.21-2.44z"
|
||||
fill="url(#lobe-icons-meta-fill-3)"></path>
|
||||
<path d="M2.677 15.537a5.462 5.462 0 01-.079-.813v-.022L0 14.468v.024a8.89 8.89 0 00.146 1.652l2.535-.585a4.106 4.106 0 01-.004-.022z"
|
||||
fill="url(#lobe-icons-meta-fill-4)"></path>
|
||||
<path d="M3.27 16.89c-.284-.31-.484-.756-.589-1.328l-.004-.021-2.535.585.004.021c.192 1.01.568 1.85 1.106 2.487l.014.017 2.018-1.745a2.106 2.106 0 01-.015-.016z"
|
||||
fill="url(#lobe-icons-meta-fill-5)"></path>
|
||||
<path d="M10.78 9.654c-1.528 2.35-2.454 3.825-2.454 3.825-2.035 3.2-2.739 3.917-3.871 3.917a1.545 1.545 0 01-1.186-.508l-2.017 1.744.014.017C2.01 19.518 3.058 20 4.356 20c1.963 0 3.374-.928 5.884-5.33l1.766-3.13a41.283 41.283 0 00-1.227-1.886z"
|
||||
fill="#0082FB"></path>
|
||||
<path d="M13.502 5.946l-.016.016c-.4.43-.786.908-1.16 1.416.378.483.768 1.024 1.175 1.63.48-.743.928-1.345 1.367-1.807l.016-.016-1.382-1.24z"
|
||||
fill="url(#lobe-icons-meta-fill-6)"></path>
|
||||
<path d="M20.918 5.713C19.853 4.633 18.583 4 17.225 4c-1.432 0-2.637.787-3.723 1.944l-.016.016 1.382 1.24.016-.017c.715-.747 1.408-1.12 2.176-1.12.826 0 1.6.39 2.27 1.075l.015.016 1.589-1.425-.016-.016z"
|
||||
fill="#0082FB"></path>
|
||||
<path d="M23.998 14.125c-.06-3.467-1.27-6.566-3.064-8.396l-.016-.016-1.588 1.424.015.016c1.35 1.392 2.277 3.98 2.361 6.971v.023h2.292v-.022z"
|
||||
fill="url(#lobe-icons-meta-fill-7)"></path>
|
||||
<path d="M23.998 14.15v-.023h-2.292v.022c.004.14.006.282.006.424 0 .815-.121 1.474-.368 1.95l-.011.022 1.708 1.782.013-.02c.62-.96.946-2.293.946-3.91 0-.083 0-.165-.002-.247z"
|
||||
fill="url(#lobe-icons-meta-fill-8)"></path>
|
||||
<path d="M21.344 16.52l-.011.02c-.214.402-.519.67-.917.787l.778 2.462a3.493 3.493 0 00.438-.182 3.558 3.558 0 001.366-1.218l.044-.065.012-.02-1.71-1.784z"
|
||||
fill="url(#lobe-icons-meta-fill-9)"></path>
|
||||
<path d="M19.92 17.393c-.262 0-.492-.039-.718-.14l-.798 2.522c.449.153.927.222 1.46.222.492 0 .943-.073 1.352-.215l-.78-2.462c-.167.05-.341.075-.517.073z"
|
||||
fill="url(#lobe-icons-meta-fill-10)"></path>
|
||||
<path d="M18.323 16.534l-.014-.017-1.836 1.914.016.017c.637.682 1.246 1.105 1.937 1.337l.797-2.52c-.291-.125-.573-.353-.9-.731z"
|
||||
fill="url(#lobe-icons-meta-fill-11)"></path>
|
||||
<path d="M18.309 16.515c-.55-.642-1.232-1.712-2.303-3.44l-1.396-2.336-.011-.02-1.62 2.438.012.02.989 1.668c.959 1.61 1.74 2.774 2.493 3.585l.016.016 1.834-1.914a2.353 2.353 0 01-.014-.017z"
|
||||
fill="url(#lobe-icons-meta-fill-12)"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 7.3 KiB |
|
@ -0,0 +1,15 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Mistral</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<g fill="none" fill-rule="nonzero">
|
||||
<path d="M15 6v4h-2V6h2zm4-4v4h-2V2h2zM3 2H1h2zM1 2h2v20H1V2zm8 12h2v4H9v-4zm8 0h2v8h-2v-8z"
|
||||
fill="#000"></path>
|
||||
<path d="M19 2h4v4h-4V2zM3 2h4v4H3V2z" fill="#F7D046"></path>
|
||||
<path d="M15 10V6h8v4h-8zM3 10V6h8v4H3z" fill="#F2A73B"></path>
|
||||
<path d="M3 14v-4h20v4z" fill="#EE792F"></path>
|
||||
<path d="M11 14h4v4h-4v-4zm8 0h4v4h-4v-4zM3 14h4v4H3v-4z" fill="#EB5829"></path>
|
||||
<path d="M19 18h4v4h-4v-4zM3 18h4v4H3v-4z" fill="#EA3326"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 834 B |
|
@ -0,0 +1,8 @@
|
|||
<svg fill="#333" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30"
|
||||
width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>MoonshotAI</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<path d="M1.052 16.916l9.539 2.552a21.007 21.007 0 00.06 2.033l5.956 1.593a11.997 11.997 0 01-5.586.865l-.18-.016-.044-.004-.084-.009-.094-.01a11.605 11.605 0 01-.157-.02l-.107-.014-.11-.016a11.962 11.962 0 01-.32-.051l-.042-.008-.075-.013-.107-.02-.07-.015-.093-.019-.075-.016-.095-.02-.097-.023-.094-.022-.068-.017-.088-.022-.09-.024-.095-.025-.082-.023-.109-.03-.062-.02-.084-.025-.093-.028-.105-.034-.058-.019-.08-.026-.09-.031-.066-.024a6.293 6.293 0 01-.044-.015l-.068-.025-.101-.037-.057-.022-.08-.03-.087-.035-.088-.035-.079-.032-.095-.04-.063-.028-.063-.027a5.655 5.655 0 01-.041-.018l-.066-.03-.103-.047-.052-.024-.096-.046-.062-.03-.084-.04-.086-.044-.093-.047-.052-.027-.103-.055-.057-.03-.058-.032a6.49 6.49 0 01-.046-.026l-.094-.053-.06-.034-.051-.03-.072-.041-.082-.05-.093-.056-.052-.032-.084-.053-.061-.039-.079-.05-.07-.047-.053-.035a7.785 7.785 0 01-.054-.036l-.044-.03-.044-.03a6.066 6.066 0 01-.04-.028l-.057-.04-.076-.054-.069-.05-.074-.054-.056-.042-.076-.057-.076-.059-.086-.067-.045-.035-.064-.052-.074-.06-.089-.073-.046-.039-.046-.039a7.516 7.516 0 01-.043-.037l-.045-.04-.061-.053-.07-.062-.068-.06-.062-.058-.067-.062-.053-.05-.088-.084a13.28 13.28 0 01-.099-.097l-.029-.028-.041-.042-.069-.07-.05-.051-.05-.053a6.457 6.457 0 01-.168-.179l-.08-.088-.062-.07-.071-.08-.042-.049-.053-.062-.058-.068-.046-.056a7.175 7.175 0 01-.027-.033l-.045-.055-.066-.082-.041-.052-.05-.064-.02-.025a11.99 11.99 0 01-1.44-2.402zm-1.02-5.794l11.353 3.037a20.468 20.468 0 00-.469 2.011l10.817 2.894a12.076 12.076 0 01-1.845 2.005L.657 15.923l-.016-.046-.035-.104a11.965 11.965 0 01-.05-.153l-.007-.023a11.896 11.896 0 01-.207-.741l-.03-.126-.018-.08-.021-.097-.018-.081-.018-.09-.017-.084-.018-.094c-.026-.141-.05-.283-.071-.426l-.017-.118-.011-.083-.013-.102a12.01 12.01 0 01-.019-.161l-.005-.047a12.12 12.12 0 01-.034-2.145zm1.593-5.15l11.948 3.196c-.368.605-.705 1.231-1.01 1.875l11.295 3.022c-.142.82-.368 1.612-.668 2.365l-11.55-3.09L.124 10.26l.015-.1.008-.049.01-.067.015-.087.018-.098c.026-.148.056-.295.088-.442l.028-.124.02-.085.024-.097c.022-.09.045-.18.07-.268l.028-.102.023-.083.03-.1.025-.082.03-.096.026-.082.031-.095a11.896 11.896 0 011.01-2.232zm4.442-4.4L17.352 4.59a20.77 20.77 0 00-1.688 1.721l7.823 2.093c.267.852.442 1.744.513 2.665L2.106 5.213l.045-.065.027-.04.04-.055.046-.065.055-.076.054-.072.064-.086.05-.065.057-.073.055-.07.06-.074.055-.069.065-.077.054-.066.066-.077.053-.06.072-.082.053-.06.067-.074.054-.058.073-.078.058-.06.063-.067.168-.17.1-.098.059-.056.076-.071a12.084 12.084 0 012.272-1.677zM12.017 0h.097l.082.001.069.001.054.002.068.002.046.001.076.003.047.002.06.003.054.002.087.005.105.007.144.011.088.007.044.004.077.008.082.008.047.005.102.012.05.006.108.014.081.01.042.006.065.01.207.032.07.012.065.011.14.026.092.018.11.022.046.01.075.016.041.01L14.7.3l.042.01.065.015.049.012.071.017.096.024.112.03.113.03.113.032.05.015.07.02.078.024.073.023.05.016.05.016.076.025.099.033.102.036.048.017.064.023.093.034.11.041.116.045.1.04.047.02.06.024.041.018.063.026.04.018.057.025.11.048.1.046.074.035.075.036.06.028.092.046.091.045.102.052.053.028.049.026.046.024.06.033.041.022.052.029.088.05.106.06.087.051.057.034.053.032.096.059.088.055.098.062.036.024.064.041.084.056.04.027.062.042.062.043.023.017c.054.037.108.075.161.114l.083.06.065.048.056.043.086.065.082.064.04.03.05.041.086.069.079.065.085.071c.712.6 1.353 1.283 1.909 2.031L7.222.994l.062-.027.065-.028.081-.034.086-.035c.113-.045.227-.09.341-.131l.096-.035.093-.033.084-.03.096-.031c.087-.03.176-.058.264-.085l.091-.027.086-.025.102-.03.085-.023.1-.026L9.04.37l.09-.023.091-.022.095-.022.09-.02.098-.021.091-.02.095-.018.092-.018.1-.018.091-.016.098-.017.092-.014.097-.015.092-.013.102-.013.091-.012.105-.012.09-.01.105-.01c.093-.01.186-.018.28-.024l.106-.008.09-.005.11-.006.093-.004.1-.004.097-.002.099-.002.197-.002z"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 4.0 KiB |
|
@ -0,0 +1,8 @@
|
|||
<svg fill="#333" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30"
|
||||
width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>OpenAI</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<path d="M21.55 10.004a5.416 5.416 0 00-.478-4.501c-1.217-2.09-3.662-3.166-6.05-2.66A5.59 5.59 0 0010.831 1C8.39.995 6.224 2.546 5.473 4.838A5.553 5.553 0 001.76 7.496a5.487 5.487 0 00.691 6.5 5.416 5.416 0 00.477 4.502c1.217 2.09 3.662 3.165 6.05 2.66A5.586 5.586 0 0013.168 23c2.443.006 4.61-1.546 5.361-3.84a5.553 5.553 0 003.715-2.66 5.488 5.488 0 00-.693-6.497v.001zm-8.381 11.558a4.199 4.199 0 01-2.675-.954c.034-.018.093-.05.132-.074l4.44-2.53a.71.71 0 00.364-.623v-6.176l1.877 1.069c.02.01.033.029.036.05v5.115c-.003 2.274-1.87 4.118-4.174 4.123zM4.192 17.78a4.059 4.059 0 01-.498-2.763c.032.02.09.055.131.078l4.44 2.53c.225.13.504.13.73 0l5.42-3.088v2.138a.068.068 0 01-.027.057L9.9 19.288c-1.999 1.136-4.552.46-5.707-1.51h-.001zM3.023 8.216A4.15 4.15 0 015.198 6.41l-.002.151v5.06a.711.711 0 00.364.624l5.42 3.087-1.876 1.07a.067.067 0 01-.063.005l-4.489-2.559c-1.995-1.14-2.679-3.658-1.53-5.63h.001zm15.417 3.54l-5.42-3.088L14.896 7.6a.067.067 0 01.063-.006l4.489 2.557c1.998 1.14 2.683 3.662 1.529 5.633a4.163 4.163 0 01-2.174 1.807V12.38a.71.71 0 00-.363-.623zm1.867-2.773a6.04 6.04 0 00-.132-.078l-4.44-2.53a.731.731 0 00-.729 0l-5.42 3.088V7.325a.068.068 0 01.027-.057L14.1 4.713c2-1.137 4.555-.46 5.707 1.513.487.833.664 1.809.499 2.757h.001zm-11.741 3.81l-1.877-1.068a.065.065 0 01-.036-.051V6.559c.001-2.277 1.873-4.122 4.181-4.12.976 0 1.92.338 2.671.954-.034.018-.092.05-.131.073l-4.44 2.53a.71.71 0 00-.365.623l-.003 6.173v.002zm1.02-2.168L12 9.25l2.414 1.375v2.75L12 14.75l-2.415-1.375v-2.75z"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
|
@ -0,0 +1,14 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Qwen</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<defs>
|
||||
<linearGradient id="lobe-icons-qwen-fill" x1="0%" x2="100%" y1="0%" y2="0%">
|
||||
<stop offset="0%" stop-color="#00055F" stop-opacity=".84"></stop>
|
||||
<stop offset="100%" stop-color="#6F69F7" stop-opacity=".84"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path d="M12.604 1.34c.393.69.784 1.382 1.174 2.075a.18.18 0 00.157.091h5.552c.174 0 .322.11.446.327l1.454 2.57c.19.337.24.478.024.837-.26.43-.513.864-.76 1.3l-.367.658c-.106.196-.223.28-.04.512l2.652 4.637c.172.301.111.494-.043.77-.437.785-.882 1.564-1.335 2.34-.159.272-.352.375-.68.37-.777-.016-1.552-.01-2.327.016a.099.099 0 00-.081.05 575.097 575.097 0 01-2.705 4.74c-.169.293-.38.363-.725.364-.997.003-2.002.004-3.017.002a.537.537 0 01-.465-.271l-1.335-2.323a.09.09 0 00-.083-.049H4.982c-.285.03-.553-.001-.805-.092l-1.603-2.77a.543.543 0 01-.002-.54l1.207-2.12a.198.198 0 000-.197 550.951 550.951 0 01-1.875-3.272l-.79-1.395c-.16-.31-.173-.496.095-.965.465-.813.927-1.625 1.387-2.436.132-.234.304-.334.584-.335a338.3 338.3 0 012.589-.001.124.124 0 00.107-.063l2.806-4.895a.488.488 0 01.422-.246c.524-.001 1.053 0 1.583-.006L11.704 1c.341-.003.724.032.9.34zm-3.432.403a.06.06 0 00-.052.03L6.254 6.788a.157.157 0 01-.135.078H3.253c-.056 0-.07.025-.041.074l5.81 10.156c.025.042.013.062-.034.063l-2.795.015a.218.218 0 00-.2.116l-1.32 2.31c-.044.078-.021.118.068.118l5.716.008c.046 0 .08.02.104.061l1.403 2.454c.046.081.092.082.139 0l5.006-8.76.783-1.382a.055.055 0 01.096 0l1.424 2.53a.122.122 0 00.107.062l2.763-.02a.04.04 0 00.035-.02.041.041 0 000-.04l-2.9-5.086a.108.108 0 010-.113l.293-.507 1.12-1.977c.024-.041.012-.062-.035-.062H9.2c-.059 0-.073-.026-.043-.077l1.434-2.505a.107.107 0 000-.114L9.225 1.774a.06.06 0 00-.053-.031zm6.29 8.02c.046 0 .058.02.034.06l-.832 1.465-2.613 4.585a.056.056 0 01-.05.029.058.058 0 01-.05-.029L8.498 9.841c-.02-.034-.01-.052.028-.054l.216-.012 6.722-.012z"
|
||||
fill="url(#lobe-icons-qwen-fill)" fill-rule="nonzero"></path>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.2 KiB |
|
@ -0,0 +1,18 @@
|
|||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 30 30" width="1em" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>Wenxin</title>
|
||||
<rect width="30" height="30" fill="#E7F8FF" rx="6"/>
|
||||
<g transform="translate(3, 3)">
|
||||
<defs>
|
||||
<linearGradient id="lobe-icons-wenxin-fill" x1="9.155%" x2="90.531%" y1="75.177%" y2="25.028%">
|
||||
<stop offset="0%" stop-color="#0A51C3"></stop>
|
||||
<stop offset="100%" stop-color="#23A4FB"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g fill="none" fill-rule="nonzero">
|
||||
<path d="M11.32 1.176a1.4 1.4 0 011.36 0l8.64 4.843c.421.234.68.67.68 1.141v9.68c0 .472-.259.908-.68 1.143l-8.64 4.84a1.4 1.4 0 01-1.36 0l-8.64-4.84A1.31 1.31 0 012 16.84V7.159c0-.471.259-.907.68-1.142l8.64-4.84zm7.42 13.839V8.227L12.002 12 12 19.551l6.059-3.394a1.31 1.31 0 00.68-1.142zM12.68 4.833a1.393 1.393 0 00-1.36 0L5.944 7.846c-.421.235-.68.67-.68 1.142v6.027c0 .47.259.905.68 1.142l2.795 1.566V11.09a1.546 1.546 0 00.221.79 1.527 1.527 0 01-.216-.834l.004-.094.02-.15.018-.084.017-.062.039-.117.062-.142.035-.065.081-.13.094-.122.084-.091.08-.075.125-.1.071-.048.134-.076 5.87-3.29-2.796-1.566z"
|
||||
fill="url(#lobe-icons-wenxin-fill)"></path>
|
||||
<path d="M12 11.088c0-.875-.73-1.584-1.631-1.584a1.66 1.66 0 00-.855.237c-.027.016-.055.033-.08.05a2.361 2.361 0 00-.123.093c-.022.02-.045.038-.066.059l-.048.045-.063.067c-.014.016-.028.031-.04.048a2.303 2.303 0 00-.094.125l-.042.069a1.7 1.7 0 00-.07.13l-.036.081a.764.764 0 00-.022.06c-.01.03-.02.058-.028.087l-.017.062a.883.883 0 00-.03.16c-.002.025-.007.05-.008.074a1.527 1.527 0 00.213.929c.302.508.85.792 1.414.792.277 0 .558-.068.814-.212l.815-.457v-.914L12 11.088z"
|
||||
fill="#012F8D"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
|
@ -0,0 +1,15 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 180 180" fill="none">
|
||||
<g clip-path="url(#clip0_19_13)">
|
||||
<path d="M18 84.8528L85.8822 16.9706C95.2548 7.59798 110.451 7.59798 119.823 16.9706V16.9706C129.196 26.3431 129.196 41.5391 119.823 50.9117L68.5581 102.177"
|
||||
stroke="black" stroke-width="12" stroke-linecap="round"/>
|
||||
<path d="M69.2652 101.47L119.823 50.9117C129.196 41.5391 144.392 41.5391 153.765 50.9117L154.118 51.2652C163.491 60.6378 163.491 75.8338 154.118 85.2063L92.7248 146.6C89.6006 149.724 89.6006 154.789 92.7248 157.913L105.331 170.52"
|
||||
stroke="black" stroke-width="12" stroke-linecap="round"/>
|
||||
<path d="M102.853 33.9411L52.6482 84.1457C43.2756 93.5183 43.2756 108.714 52.6482 118.087V118.087C62.0208 127.459 77.2167 127.459 86.5893 118.087L136.794 67.8822"
|
||||
stroke="black" stroke-width="12" stroke-linecap="round"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_19_13">
|
||||
<rect width="180" height="180" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
|
@ -1 +1,3 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="16" height="16" fill="none" viewBox="0 0 16 16"><defs><rect id="path_0" width="16" height="16" x="0" y="0"/></defs><g opacity="1" transform="translate(0 0) rotate(0 8 8)"><mask id="bg-mask-0" fill="#fff"><use xlink:href="#path_0"/></mask><g mask="url(#bg-mask-0)"><path id="路径 1" style="stroke:#333;stroke-width:1.3333333333333333;stroke-opacity:1;stroke-dasharray:0 0" d="M13.33,6.67C13.33,2.98 10.35,0 6.67,0C2.98,0 0,2.98 0,6.67C0,10.35 2.98,13.33 6.67,13.33C10.35,13.33 13.33,10.35 13.33,6.67Z" transform="translate(1.3333333333333333 1.3333333333333333) rotate(0 6.666666666666666 6.666666666666666)"/><path id="路径 2" style="stroke:#333;stroke-width:1.3333333333333333;stroke-opacity:1;stroke-dasharray:0 0" d="M0,0L0,4" transform="translate(6.333333333333333 6) rotate(0 0 2)"/><path id="路径 3" style="stroke:#333;stroke-width:1.3333333333333333;stroke-opacity:1;stroke-dasharray:0 0" d="M0,0L0,4" transform="translate(9.666666666666666 6) rotate(0 0 2)"/></g></g></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="3" y="3" width="18" height="18" rx="2" ry="2"></rect>
|
||||
</svg>
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 253 B |
|
@ -0,0 +1,3 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<polygon points="5 3 19 12 5 21 5 3"></polygon>
|
||||
</svg>
|
After Width: | Height: | Size: 239 B |
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24" height="24" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M14.5 8C13.8406 8.37652 13.2062 8.79103 12.6 9.24051C11.5625 10.0097 10.6074 10.8814 9.75 11.8402C6.79377 15.1463 5 19.4891 5 24.2455C5 34.6033 13.5066 43 24 43C34.4934 43 43 34.6033 43 24.2455C43 19.4891 41.2062 15.1463 38.25 11.8402C37.3926 10.8814 36.4375 10.0097 35.4 9.24051C34.7938 8.79103 34.1594 8.37652 33.5 8"
|
||||
stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M24 4V24" stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 675 B |
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M10.155 3.247c-.519.396-1.129 1.004-2.012 1.887s-1.49 1.493-1.887 2.012c-.383.502-.497.83-.497 1.14s.114.638.497 1.14c.397.52 1.004 1.13 1.887 2.012l4.419 4.419c.883.883 1.493 1.49 2.012 1.887c.502.383.83.497 1.14.497s.638-.114 1.14-.497c.519-.396 1.129-1.004 2.012-1.887s1.49-1.493 1.887-2.012c.383-.503.497-.83.497-1.14s-.114-.638-.497-1.14c-.396-.52-1.004-1.13-1.887-2.012l-4.419-4.419c-.883-.883-1.493-1.49-2.012-1.887c-.502-.383-.83-.497-1.14-.497s-.637.114-1.14.497m-.91-1.192c.636-.485 1.28-.805 2.05-.805s1.414.32 2.05.805c.609.464 1.29 1.145 2.125 1.98l.244.245c.239-.238.451-.44.685-.574a2.31 2.31 0 0 1 2.312 0c.267.154.505.393.787.675l.06.06l.061.061c.282.282.521.52.675.787a2.31 2.31 0 0 1 0 2.312c-.135.234-.336.446-.574.685l.245.244c.835.836 1.516 1.516 1.98 2.125c.485.636.805 1.28.805 2.05s-.32 1.414-.805 2.05c-.464.608-1.145 1.289-1.98 2.124l-.077.077c-.835.835-1.516 1.516-2.125 1.98c-.635.485-1.28.805-2.05.805c-.768 0-1.413-.32-2.049-.805c-.609-.464-1.29-1.145-2.125-1.98l-.244-.245l-4.993 4.994l-.06.06c-.282.282-.52.521-.787.675a2.31 2.31 0 0 1-2.312 0c-.267-.154-.505-.393-.787-.675l-.06-.06l-.061-.061c-.282-.282-.521-.52-.675-.787a2.31 2.31 0 0 1 0-2.312c.154-.266.393-.505.675-.786l.06-.061l4.994-4.993l-.245-.244c-.835-.836-1.516-1.516-1.98-2.125c-.485-.636-.805-1.28-.805-2.05s.32-1.414.805-2.05c.464-.608 1.145-1.289 1.98-2.124l.077-.077c.835-.835 1.516-1.516 2.125-1.98m-.896 11.71L3.356 18.76c-.376.376-.456.465-.497.536a.81.81 0 0 0 0 .812c.04.072.12.16.497.537c.377.376.466.456.537.497a.81.81 0 0 0 .812 0c.07-.04.16-.12.536-.497l4.994-4.993zm10.31-6.54c.24-.243.302-.314.336-.374a.81.81 0 0 0 0-.812c-.041-.071-.12-.16-.497-.537c-.377-.376-.466-.456-.537-.497a.81.81 0 0 0-.812 0c-.06.034-.131.096-.374.336z" clip-rule="evenodd"/></svg>
|
After Width: | Height: | Size: 1.9 KiB |
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24" height="24" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M31 24V11C31 7.13401 27.866 4 24 4C20.134 4 17 7.13401 17 11V24C17 27.866 20.134 31 24 31C27.866 31 31 27.866 31 24Z"
|
||||
stroke="#d0021b" stroke-width="4" stroke-linejoin="round" />
|
||||
<path
|
||||
d="M9 23C9 31.2843 15.7157 38 24 38C25.7532 38 27.4361 37.6992 29 37.1465M39 23C39 25.1333 38.5547 27.1626 37.7519 29"
|
||||
stroke="#d0021b" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M24 38V44" stroke="#d0021b" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
<path d="M42 42L6 6" stroke="#d0021b" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 811 B |
|
@ -0,0 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24" height="24" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect x="17" y="4" width="14" height="27" rx="7" fill="none" stroke="#333" stroke-width="4"
|
||||
stroke-linejoin="round" />
|
||||
<path d="M9 23C9 31.2843 15.7157 38 24 38C32.2843 38 39 31.2843 39 23" stroke="#333"
|
||||
stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M24 38V44" stroke="#333" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 549 B |
|
@ -5,9 +5,8 @@ import "./styles/highlight.scss";
|
|||
import { getClientConfig } from "./config/client";
|
||||
import type { Metadata, Viewport } from "next";
|
||||
import { SpeedInsights } from "@vercel/speed-insights/next";
|
||||
import { getServerSideConfig } from "./config/server";
|
||||
import { GoogleTagManager, GoogleAnalytics } from "@next/third-parties/google";
|
||||
const serverConfig = getServerSideConfig();
|
||||
import { getServerSideConfig } from "./config/server";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "NextChat",
|
||||
|
@ -33,6 +32,8 @@ export default function RootLayout({
|
|||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
|
|
|
@ -0,0 +1,200 @@
|
|||
export class AudioHandler {
|
||||
private context: AudioContext;
|
||||
private mergeNode: ChannelMergerNode;
|
||||
private analyserData: Uint8Array;
|
||||
public analyser: AnalyserNode;
|
||||
private workletNode: AudioWorkletNode | null = null;
|
||||
private stream: MediaStream | null = null;
|
||||
private source: MediaStreamAudioSourceNode | null = null;
|
||||
private recordBuffer: Int16Array[] = [];
|
||||
private readonly sampleRate = 24000;
|
||||
|
||||
private nextPlayTime: number = 0;
|
||||
private isPlaying: boolean = false;
|
||||
private playbackQueue: AudioBufferSourceNode[] = [];
|
||||
private playBuffer: Int16Array[] = [];
|
||||
|
||||
constructor() {
|
||||
this.context = new AudioContext({ sampleRate: this.sampleRate });
|
||||
// using ChannelMergerNode to get merged audio data, and then get analyser data.
|
||||
this.mergeNode = new ChannelMergerNode(this.context, { numberOfInputs: 2 });
|
||||
this.analyser = new AnalyserNode(this.context, { fftSize: 256 });
|
||||
this.analyserData = new Uint8Array(this.analyser.frequencyBinCount);
|
||||
this.mergeNode.connect(this.analyser);
|
||||
}
|
||||
|
||||
getByteFrequencyData() {
|
||||
this.analyser.getByteFrequencyData(this.analyserData);
|
||||
return this.analyserData;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
await this.context.audioWorklet.addModule("/audio-processor.js");
|
||||
}
|
||||
|
||||
async startRecording(onChunk: (chunk: Uint8Array) => void) {
|
||||
try {
|
||||
if (!this.workletNode) {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
this.stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
channelCount: 1,
|
||||
sampleRate: this.sampleRate,
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
},
|
||||
});
|
||||
|
||||
await this.context.resume();
|
||||
this.source = this.context.createMediaStreamSource(this.stream);
|
||||
this.workletNode = new AudioWorkletNode(
|
||||
this.context,
|
||||
"audio-recorder-processor",
|
||||
);
|
||||
|
||||
this.workletNode.port.onmessage = (event) => {
|
||||
if (event.data.eventType === "audio") {
|
||||
const float32Data = event.data.audioData;
|
||||
const int16Data = new Int16Array(float32Data.length);
|
||||
|
||||
for (let i = 0; i < float32Data.length; i++) {
|
||||
const s = Math.max(-1, Math.min(1, float32Data[i]));
|
||||
int16Data[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
|
||||
}
|
||||
|
||||
const uint8Data = new Uint8Array(int16Data.buffer);
|
||||
onChunk(uint8Data);
|
||||
// save recordBuffer
|
||||
// @ts-ignore
|
||||
this.recordBuffer.push.apply(this.recordBuffer, int16Data);
|
||||
}
|
||||
};
|
||||
|
||||
this.source.connect(this.workletNode);
|
||||
this.source.connect(this.mergeNode, 0, 0);
|
||||
this.workletNode.connect(this.context.destination);
|
||||
|
||||
this.workletNode.port.postMessage({ command: "START_RECORDING" });
|
||||
} catch (error) {
|
||||
console.error("Error starting recording:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
stopRecording() {
|
||||
if (!this.workletNode || !this.source || !this.stream) {
|
||||
throw new Error("Recording not started");
|
||||
}
|
||||
|
||||
this.workletNode.port.postMessage({ command: "STOP_RECORDING" });
|
||||
|
||||
this.workletNode.disconnect();
|
||||
this.source.disconnect();
|
||||
this.stream.getTracks().forEach((track) => track.stop());
|
||||
}
|
||||
startStreamingPlayback() {
|
||||
this.isPlaying = true;
|
||||
this.nextPlayTime = this.context.currentTime;
|
||||
}
|
||||
|
||||
stopStreamingPlayback() {
|
||||
this.isPlaying = false;
|
||||
this.playbackQueue.forEach((source) => source.stop());
|
||||
this.playbackQueue = [];
|
||||
this.playBuffer = [];
|
||||
}
|
||||
|
||||
playChunk(chunk: Uint8Array) {
|
||||
if (!this.isPlaying) return;
|
||||
|
||||
const int16Data = new Int16Array(chunk.buffer);
|
||||
// @ts-ignore
|
||||
this.playBuffer.push.apply(this.playBuffer, int16Data); // save playBuffer
|
||||
|
||||
const float32Data = new Float32Array(int16Data.length);
|
||||
for (let i = 0; i < int16Data.length; i++) {
|
||||
float32Data[i] = int16Data[i] / (int16Data[i] < 0 ? 0x8000 : 0x7fff);
|
||||
}
|
||||
|
||||
const audioBuffer = this.context.createBuffer(
|
||||
1,
|
||||
float32Data.length,
|
||||
this.sampleRate,
|
||||
);
|
||||
audioBuffer.getChannelData(0).set(float32Data);
|
||||
|
||||
const source = this.context.createBufferSource();
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(this.context.destination);
|
||||
source.connect(this.mergeNode, 0, 1);
|
||||
|
||||
const chunkDuration = audioBuffer.length / this.sampleRate;
|
||||
|
||||
source.start(this.nextPlayTime);
|
||||
|
||||
this.playbackQueue.push(source);
|
||||
source.onended = () => {
|
||||
const index = this.playbackQueue.indexOf(source);
|
||||
if (index > -1) {
|
||||
this.playbackQueue.splice(index, 1);
|
||||
}
|
||||
};
|
||||
|
||||
this.nextPlayTime += chunkDuration;
|
||||
|
||||
if (this.nextPlayTime < this.context.currentTime) {
|
||||
this.nextPlayTime = this.context.currentTime;
|
||||
}
|
||||
}
|
||||
_saveData(data: Int16Array, bytesPerSample = 16): Blob {
|
||||
const headerLength = 44;
|
||||
const numberOfChannels = 1;
|
||||
const byteLength = data.buffer.byteLength;
|
||||
const header = new Uint8Array(headerLength);
|
||||
const view = new DataView(header.buffer);
|
||||
view.setUint32(0, 1380533830, false); // RIFF identifier 'RIFF'
|
||||
view.setUint32(4, 36 + byteLength, true); // file length minus RIFF identifier length and file description length
|
||||
view.setUint32(8, 1463899717, false); // RIFF type 'WAVE'
|
||||
view.setUint32(12, 1718449184, false); // format chunk identifier 'fmt '
|
||||
view.setUint32(16, 16, true); // format chunk length
|
||||
view.setUint16(20, 1, true); // sample format (raw)
|
||||
view.setUint16(22, numberOfChannels, true); // channel count
|
||||
view.setUint32(24, this.sampleRate, true); // sample rate
|
||||
view.setUint32(28, this.sampleRate * 4, true); // byte rate (sample rate * block align)
|
||||
view.setUint16(32, numberOfChannels * 2, true); // block align (channel count * bytes per sample)
|
||||
view.setUint16(34, bytesPerSample, true); // bits per sample
|
||||
view.setUint32(36, 1684108385, false); // data chunk identifier 'data'
|
||||
view.setUint32(40, byteLength, true); // data chunk length
|
||||
|
||||
// using data.buffer, so no need to setUint16 to view.
|
||||
return new Blob([view, data.buffer], { type: "audio/mpeg" });
|
||||
}
|
||||
savePlayFile() {
|
||||
// @ts-ignore
|
||||
return this._saveData(new Int16Array(this.playBuffer));
|
||||
}
|
||||
saveRecordFile(
|
||||
audioStartMillis: number | undefined,
|
||||
audioEndMillis: number | undefined,
|
||||
) {
|
||||
const startIndex = audioStartMillis
|
||||
? Math.floor((audioStartMillis * this.sampleRate) / 1000)
|
||||
: 0;
|
||||
const endIndex = audioEndMillis
|
||||
? Math.floor((audioEndMillis * this.sampleRate) / 1000)
|
||||
: this.recordBuffer.length;
|
||||
return this._saveData(
|
||||
// @ts-ignore
|
||||
new Int16Array(this.recordBuffer.slice(startIndex, endIndex)),
|
||||
);
|
||||
}
|
||||
async close() {
|
||||
this.recordBuffer = [];
|
||||
this.workletNode?.disconnect();
|
||||
this.source?.disconnect();
|
||||
this.stream?.getTracks().forEach((track) => track.stop());
|
||||
await this.context.close();
|
||||
}
|
||||
}
|
|
@ -106,6 +106,7 @@ const cn = {
|
|||
copyLastMessage: "复制最后一个回复",
|
||||
copyLastCode: "复制最后一个代码块",
|
||||
showShortcutKey: "显示快捷方式",
|
||||
clearContext: "清除上下文",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
|
@ -176,7 +177,7 @@ const cn = {
|
|||
},
|
||||
},
|
||||
Lang: {
|
||||
Name: "Language", // ATTENTION: if you wanna add a new translation, please do not translate this value, leave it as `Language`
|
||||
Name: "Language", // 注意:如果要添加新的翻译,请不要翻译此值,将它保留为 `Language`
|
||||
All: "所有语言",
|
||||
},
|
||||
Avatar: "头像",
|
||||
|
@ -205,6 +206,8 @@ const cn = {
|
|||
IsChecking: "正在检查更新...",
|
||||
FoundUpdate: (x: string) => `发现新版本:${x}`,
|
||||
GoToUpdate: "前往更新",
|
||||
Success: "更新成功!",
|
||||
Failed: "更新失败",
|
||||
},
|
||||
SendKey: "发送键",
|
||||
Theme: "主题",
|
||||
|
@ -460,6 +463,50 @@ const cn = {
|
|||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
DeepSeek: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义DeepSeek API Key",
|
||||
Placeholder: "DeepSeek API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
XAI: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义XAI API Key",
|
||||
Placeholder: "XAI API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
ChatGLM: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义 ChatGLM API Key",
|
||||
Placeholder: "ChatGLM API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
SiliconFlow: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义硅基流动 API Key",
|
||||
Placeholder: "硅基流动 API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
Stability: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
|
@ -495,8 +542,8 @@ const cn = {
|
|||
|
||||
Model: "模型 (model)",
|
||||
CompressModel: {
|
||||
Title: "压缩模型",
|
||||
SubTitle: "用于压缩历史记录的模型",
|
||||
Title: "对话摘要模型",
|
||||
SubTitle: "用于压缩历史记录、生成对话标题的模型",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "随机性 (temperature)",
|
||||
|
@ -538,6 +585,39 @@ const cn = {
|
|||
SubTitle: "生成语音的速度",
|
||||
},
|
||||
},
|
||||
Realtime: {
|
||||
Enable: {
|
||||
Title: "实时聊天",
|
||||
SubTitle: "开启实时聊天功能",
|
||||
},
|
||||
Provider: {
|
||||
Title: "模型服务商",
|
||||
SubTitle: "切换不同的服务商",
|
||||
},
|
||||
Model: {
|
||||
Title: "模型",
|
||||
SubTitle: "选择一个模型",
|
||||
},
|
||||
ApiKey: {
|
||||
Title: "API Key",
|
||||
SubTitle: "API Key",
|
||||
Placeholder: "API Key",
|
||||
},
|
||||
Azure: {
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "接口地址",
|
||||
},
|
||||
Deployment: {
|
||||
Title: "部署名称",
|
||||
SubTitle: "部署名称",
|
||||
},
|
||||
},
|
||||
Temperature: {
|
||||
Title: "随机性 (temperature)",
|
||||
SubTitle: "值越大,回复越随机",
|
||||
},
|
||||
},
|
||||
},
|
||||
Store: {
|
||||
DefaultTopic: "新的聊天",
|
||||
|
@ -569,11 +649,14 @@ const cn = {
|
|||
Discovery: {
|
||||
Name: "发现",
|
||||
},
|
||||
Mcp: {
|
||||
Name: "MCP",
|
||||
},
|
||||
FineTuned: {
|
||||
Sysmessage: "你是一个助手",
|
||||
},
|
||||
SearchChat: {
|
||||
Name: "搜索",
|
||||
Name: "搜索聊天记录",
|
||||
Page: {
|
||||
Title: "搜索聊天记录",
|
||||
Search: "输入搜索关键词",
|
||||
|
@ -665,6 +748,10 @@ const cn = {
|
|||
Title: "启用Artifacts",
|
||||
SubTitle: "启用之后可以直接渲染HTML页面",
|
||||
},
|
||||
CodeFold: {
|
||||
Title: "启用代码折叠",
|
||||
SubTitle: "启用之后可以自动折叠/展开过长的代码块",
|
||||
},
|
||||
Share: {
|
||||
Title: "分享此面具",
|
||||
SubTitle: "生成此面具的直达链接",
|
||||
|
|
|
@ -0,0 +1,832 @@
|
|||
import { getClientConfig } from "../config/client";
|
||||
import { SubmitKey } from "../store/config";
|
||||
import { SAAS_CHAT_UTM_URL } from "@/app/constant";
|
||||
import { PartialLocaleType } from "./index";
|
||||
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const da: PartialLocaleType = {
|
||||
WIP: "Der kommer snart mere...",
|
||||
Error: {
|
||||
Unauthorized: isApp
|
||||
? `Hov, der skete en fejl. Sådan kan du komme videre:
|
||||
\\ 1️⃣ Er du ny her? [Tryk for at starte nu 🚀](${SAAS_CHAT_UTM_URL})
|
||||
\\ 2️⃣ Vil du bruge dine egne OpenAI-nøgler? [Tryk her](/#/settings) for at ændre indstillinger ⚙️`
|
||||
: `Hov, der skete en fejl. Lad os løse det:
|
||||
\\ 1️⃣ Er du ny her? [Tryk for at starte nu 🚀](${SAAS_CHAT_UTM_URL})
|
||||
\\ 2️⃣ Bruger du en privat opsætning? [Tryk her](/#/auth) for at taste din nøgle 🔑
|
||||
\\ 3️⃣ Vil du bruge dine egne OpenAI-nøgler? [Tryk her](/#/settings) for at ændre indstillinger ⚙️
|
||||
`,
|
||||
},
|
||||
Auth: {
|
||||
Return: "Tilbage",
|
||||
Title: "Adgangskode",
|
||||
Tips: "Skriv venligst koden herunder",
|
||||
SubTips: "Eller brug din egen OpenAI- eller Google-nøgle",
|
||||
Input: "Adgangskode",
|
||||
Confirm: "OK",
|
||||
Later: "Senere",
|
||||
SaasTips: "Hvis det er for svært, kan du starte nu",
|
||||
},
|
||||
ChatItem: {
|
||||
ChatItemCount: (count: number) => `${count} beskeder`,
|
||||
},
|
||||
Chat: {
|
||||
SubTitle: (count: number) => `${count} beskeder`,
|
||||
EditMessage: {
|
||||
Title: "Rediger beskeder",
|
||||
Topic: {
|
||||
Title: "Emne",
|
||||
SubTitle: "Skift emne for denne chat",
|
||||
},
|
||||
},
|
||||
Actions: {
|
||||
ChatList: "Gå til chatliste",
|
||||
CompressedHistory: "Komprimeret historie",
|
||||
Export: "Eksporter alle beskeder som Markdown",
|
||||
Copy: "Kopiér",
|
||||
Stop: "Stop",
|
||||
Retry: "Prøv igen",
|
||||
Pin: "Fastgør",
|
||||
PinToastContent: "1 besked er nu fastgjort",
|
||||
PinToastAction: "Se",
|
||||
Delete: "Slet",
|
||||
Edit: "Rediger",
|
||||
FullScreen: "Fuld skærm",
|
||||
RefreshTitle: "Opdatér titel",
|
||||
RefreshToast: "Anmodning om ny titel sendt",
|
||||
Speech: "Afspil",
|
||||
StopSpeech: "Stop",
|
||||
},
|
||||
Commands: {
|
||||
new: "Ny chat",
|
||||
newm: "Ny chat med persona",
|
||||
next: "Næste chat",
|
||||
prev: "Forrige chat",
|
||||
clear: "Ryd alt før",
|
||||
fork: "Kopiér chat",
|
||||
del: "Slet chat",
|
||||
},
|
||||
InputActions: {
|
||||
Stop: "Stop",
|
||||
ToBottom: "Ned til nyeste",
|
||||
Theme: {
|
||||
auto: "Automatisk",
|
||||
light: "Lyst tema",
|
||||
dark: "Mørkt tema",
|
||||
},
|
||||
Prompt: "Prompts",
|
||||
Masks: "Personaer",
|
||||
Clear: "Ryd kontekst",
|
||||
Settings: "Indstillinger",
|
||||
UploadImage: "Upload billeder",
|
||||
},
|
||||
Rename: "Omdøb chat",
|
||||
Typing: "Skriver…",
|
||||
Input: (submitKey: string) => {
|
||||
let inputHints = `${submitKey} for at sende`;
|
||||
if (submitKey === String(SubmitKey.Enter)) {
|
||||
inputHints += ", Shift + Enter for ny linje";
|
||||
}
|
||||
return (
|
||||
inputHints + ", / for at søge i prompts, : for at bruge kommandoer"
|
||||
);
|
||||
},
|
||||
Send: "Send",
|
||||
StartSpeak: "Start oplæsning",
|
||||
StopSpeak: "Stop oplæsning",
|
||||
Config: {
|
||||
Reset: "Nulstil til standard",
|
||||
SaveAs: "Gem som persona",
|
||||
},
|
||||
IsContext: "Ekstra prompt til baggrund",
|
||||
ShortcutKey: {
|
||||
Title: "Hurtigtaster",
|
||||
newChat: "Åbn ny chat",
|
||||
focusInput: "Fokus på tekstfeltet",
|
||||
copyLastMessage: "Kopiér sidste svar",
|
||||
copyLastCode: "Kopiér sidste kodeblok",
|
||||
showShortcutKey: "Vis hurtigtaster",
|
||||
clearContext: "Ryd kontekst",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
Title: "Eksportér beskeder",
|
||||
Copy: "Kopiér alt",
|
||||
Download: "Download",
|
||||
MessageFromYou: "Fra dig",
|
||||
MessageFromChatGPT: "Fra ChatGPT",
|
||||
Share: "Del til ShareGPT",
|
||||
Format: {
|
||||
Title: "Filformat",
|
||||
SubTitle: "Vælg enten Markdown eller PNG-billede",
|
||||
},
|
||||
IncludeContext: {
|
||||
Title: "Tag baggrund med",
|
||||
SubTitle: "Skal ekstra baggrund (persona) med i eksporten?",
|
||||
},
|
||||
Steps: {
|
||||
Select: "Vælg",
|
||||
Preview: "Forhåndsvis",
|
||||
},
|
||||
Image: {
|
||||
Toast: "Laver billede...",
|
||||
Modal: "Tryk længe eller højreklik for at gemme",
|
||||
},
|
||||
Artifacts: {
|
||||
Title: "Del side",
|
||||
Error: "Fejl ved deling",
|
||||
},
|
||||
},
|
||||
Select: {
|
||||
Search: "Søg",
|
||||
All: "Vælg alle",
|
||||
Latest: "Vælg nyeste",
|
||||
Clear: "Ryd alt",
|
||||
},
|
||||
Memory: {
|
||||
Title: "Huskesætning",
|
||||
EmptyContent: "Ingenting lige nu.",
|
||||
Send: "Send huskesætning",
|
||||
Copy: "Kopiér huskesætning",
|
||||
Reset: "Nulstil chat",
|
||||
ResetConfirm:
|
||||
"Dette sletter nuværende samtale og hukommelse. Er du sikker?",
|
||||
},
|
||||
Home: {
|
||||
NewChat: "Ny Chat",
|
||||
DeleteChat: "Vil du slette den valgte chat?",
|
||||
DeleteToast: "Chat slettet",
|
||||
Revert: "Fortryd",
|
||||
},
|
||||
Settings: {
|
||||
Title: "Indstillinger",
|
||||
SubTitle: "Alle indstillinger",
|
||||
ShowPassword: "Vis kodeord",
|
||||
Danger: {
|
||||
Reset: {
|
||||
Title: "Nulstil alle indstillinger",
|
||||
SubTitle: "Gendan alt til standard",
|
||||
Action: "Nulstil",
|
||||
Confirm: "Vil du virkelig nulstille alt?",
|
||||
},
|
||||
Clear: {
|
||||
Title: "Slet alle data",
|
||||
SubTitle: "Sletter alt om beskeder og indstillinger",
|
||||
Action: "Slet",
|
||||
Confirm: "Er du sikker på, at du vil slette alt?",
|
||||
},
|
||||
},
|
||||
Lang: {
|
||||
Name: "Language",
|
||||
All: "Alle sprog",
|
||||
},
|
||||
Avatar: "Avatar",
|
||||
FontSize: {
|
||||
Title: "Skriftstørrelse",
|
||||
SubTitle: "Vælg, hvor stor teksten skal være",
|
||||
},
|
||||
FontFamily: {
|
||||
Title: "Skrifttype",
|
||||
SubTitle: "Hvis tom, bruger den standard skrifttype",
|
||||
Placeholder: "Skrifttype-navn",
|
||||
},
|
||||
InjectSystemPrompts: {
|
||||
Title: "Tilføj system-prompt",
|
||||
SubTitle: "Læg altid en ekstra prompt først i anmodninger",
|
||||
},
|
||||
InputTemplate: {
|
||||
Title: "Tekstskabelon",
|
||||
SubTitle: "Den seneste besked placeres i denne skabelon",
|
||||
},
|
||||
Update: {
|
||||
Version: (x: string) => `Version: ${x}`,
|
||||
IsLatest: "Du har nyeste version",
|
||||
CheckUpdate: "Tjek efter opdatering",
|
||||
IsChecking: "Tjekker...",
|
||||
FoundUpdate: (x: string) => `Ny version fundet: ${x}`,
|
||||
GoToUpdate: "Opdatér",
|
||||
Success: "Opdatering lykkedes.",
|
||||
Failed: "Opdatering mislykkedes.",
|
||||
},
|
||||
SendKey: "Tast for send",
|
||||
Theme: "Tema",
|
||||
TightBorder: "Stram kant",
|
||||
SendPreviewBubble: {
|
||||
Title: "Forhåndsvisnings-boble",
|
||||
SubTitle: "Vis tekst, før den sendes",
|
||||
},
|
||||
AutoGenerateTitle: {
|
||||
Title: "Lav titel automatisk",
|
||||
SubTitle: "Foreslå en titel ud fra chatten",
|
||||
},
|
||||
Sync: {
|
||||
CloudState: "Seneste opdatering",
|
||||
NotSyncYet: "Endnu ikke synkroniseret",
|
||||
Success: "Synkronisering lykkedes",
|
||||
Fail: "Synkronisering mislykkedes",
|
||||
Config: {
|
||||
Modal: {
|
||||
Title: "Indstil synk",
|
||||
Check: "Tjek forbindelse",
|
||||
},
|
||||
SyncType: {
|
||||
Title: "Synk-type",
|
||||
SubTitle: "Vælg en synk-tjeneste",
|
||||
},
|
||||
Proxy: {
|
||||
Title: "Aktivér proxy",
|
||||
SubTitle: "Brug proxy for at undgå netværksproblemer",
|
||||
},
|
||||
ProxyUrl: {
|
||||
Title: "Proxy-adresse",
|
||||
SubTitle: "Bruges kun til projektets egen proxy",
|
||||
},
|
||||
WebDav: {
|
||||
Endpoint: "WebDAV-adresse",
|
||||
UserName: "Brugernavn",
|
||||
Password: "Kodeord",
|
||||
},
|
||||
UpStash: {
|
||||
Endpoint: "UpStash Redis REST URL",
|
||||
UserName: "Backup-navn",
|
||||
Password: "UpStash Redis REST Token",
|
||||
},
|
||||
},
|
||||
LocalState: "Lokale data",
|
||||
Overview: (overview: any) =>
|
||||
`${overview.chat} chats, ${overview.message} beskeder, ${overview.prompt} prompts, ${overview.mask} personaer`,
|
||||
ImportFailed: "Import mislykkedes",
|
||||
},
|
||||
Mask: {
|
||||
Splash: {
|
||||
Title: "Persona-forside",
|
||||
SubTitle: "Vis denne side, når du opretter ny chat",
|
||||
},
|
||||
Builtin: {
|
||||
Title: "Skjul indbyggede personaer",
|
||||
SubTitle: "Vis ikke de indbyggede personaer i listen",
|
||||
},
|
||||
},
|
||||
Prompt: {
|
||||
Disable: {
|
||||
Title: "Slå auto-forslag fra",
|
||||
SubTitle: "Tast / for at få forslag",
|
||||
},
|
||||
List: "Prompt-liste",
|
||||
ListCount: (builtin: number, custom: number) =>
|
||||
`${builtin} indbygget, ${custom} brugerdefineret`,
|
||||
Edit: "Rediger",
|
||||
Modal: {
|
||||
Title: "Prompt-liste",
|
||||
Add: "Tilføj",
|
||||
Search: "Søg prompts",
|
||||
},
|
||||
EditModal: {
|
||||
Title: "Rediger prompt",
|
||||
},
|
||||
},
|
||||
HistoryCount: {
|
||||
Title: "Antal beskeder, der følger med",
|
||||
SubTitle: "Hvor mange af de tidligere beskeder, der sendes hver gang",
|
||||
},
|
||||
CompressThreshold: {
|
||||
Title: "Komprimeringsgrænse",
|
||||
SubTitle:
|
||||
"Hvis chatten bliver for lang, vil den komprimeres efter dette antal tegn",
|
||||
},
|
||||
Usage: {
|
||||
Title: "Brug og saldo",
|
||||
SubTitle(used: any, total: any) {
|
||||
return `Du har brugt $${used} i denne måned, og din grænse er $${total}.`;
|
||||
},
|
||||
IsChecking: "Tjekker...",
|
||||
Check: "Tjek igen",
|
||||
NoAccess: "Indtast API-nøgle for at se forbrug",
|
||||
},
|
||||
Access: {
|
||||
AccessCode: {
|
||||
Title: "Adgangskode",
|
||||
SubTitle: "Adgangskontrol er slået til",
|
||||
Placeholder: "Skriv kode her",
|
||||
},
|
||||
CustomEndpoint: {
|
||||
Title: "Brugerdefineret adresse",
|
||||
SubTitle: "Brug Azure eller OpenAI fra egen server",
|
||||
},
|
||||
Provider: {
|
||||
Title: "Model-udbyder",
|
||||
SubTitle: "Vælg Azure eller OpenAI",
|
||||
},
|
||||
OpenAI: {
|
||||
ApiKey: {
|
||||
Title: "OpenAI API-nøgle",
|
||||
SubTitle: "Brug din egen nøgle",
|
||||
Placeholder: "sk-xxx",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "OpenAI Endpoint",
|
||||
SubTitle: "Skal starte med http(s):// eller /api/openai som standard",
|
||||
},
|
||||
},
|
||||
Azure: {
|
||||
ApiKey: {
|
||||
Title: "Azure Api Key",
|
||||
SubTitle: "Hent din nøgle fra Azure-portalen",
|
||||
Placeholder: "Azure Api Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Azure Endpoint",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
ApiVerion: {
|
||||
Title: "Azure Api Version",
|
||||
SubTitle: "Hentet fra Azure-portalen",
|
||||
},
|
||||
},
|
||||
Anthropic: {
|
||||
ApiKey: {
|
||||
Title: "Anthropic API-nøgle",
|
||||
SubTitle: "Brug din egen Anthropic-nøgle",
|
||||
Placeholder: "Anthropic API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Endpoint-adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
ApiVerion: {
|
||||
Title: "API-version (Claude)",
|
||||
SubTitle: "Vælg den ønskede version",
|
||||
},
|
||||
},
|
||||
Baidu: {
|
||||
ApiKey: {
|
||||
Title: "Baidu-nøgle",
|
||||
SubTitle: "Din egen Baidu-nøgle",
|
||||
Placeholder: "Baidu API Key",
|
||||
},
|
||||
SecretKey: {
|
||||
Title: "Baidu hemmelig nøgle",
|
||||
SubTitle: "Din egen hemmelige nøgle fra Baidu",
|
||||
Placeholder: "Baidu Secret Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "Kan ikke ændres, se .env",
|
||||
},
|
||||
},
|
||||
Tencent: {
|
||||
ApiKey: {
|
||||
Title: "Tencent-nøgle",
|
||||
SubTitle: "Din egen nøgle fra Tencent",
|
||||
Placeholder: "Tencent API Key",
|
||||
},
|
||||
SecretKey: {
|
||||
Title: "Tencent hemmelig nøgle",
|
||||
SubTitle: "Din egen hemmelige nøgle fra Tencent",
|
||||
Placeholder: "Tencent Secret Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "Kan ikke ændres, se .env",
|
||||
},
|
||||
},
|
||||
ByteDance: {
|
||||
ApiKey: {
|
||||
Title: "ByteDance-nøgle",
|
||||
SubTitle: "Din egen nøgle til ByteDance",
|
||||
Placeholder: "ByteDance API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
Alibaba: {
|
||||
ApiKey: {
|
||||
Title: "Alibaba-nøgle",
|
||||
SubTitle: "Din egen Alibaba Cloud-nøgle",
|
||||
Placeholder: "Alibaba Cloud API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
Moonshot: {
|
||||
ApiKey: {
|
||||
Title: "Moonshot-nøgle",
|
||||
SubTitle: "Din egen Moonshot-nøgle",
|
||||
Placeholder: "Moonshot API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
DeepSeek: {
|
||||
ApiKey: {
|
||||
Title: "DeepSeek-nøgle",
|
||||
SubTitle: "Din egen DeepSeek-nøgle",
|
||||
Placeholder: "DeepSeek API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
XAI: {
|
||||
ApiKey: {
|
||||
Title: "XAI-nøgle",
|
||||
SubTitle: "Din egen XAI-nøgle",
|
||||
Placeholder: "XAI API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
ChatGLM: {
|
||||
ApiKey: {
|
||||
Title: "ChatGLM-nøgle",
|
||||
SubTitle: "Din egen ChatGLM-nøgle",
|
||||
Placeholder: "ChatGLM API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
SiliconFlow: {
|
||||
ApiKey: {
|
||||
Title: "SiliconFlow-nøgle",
|
||||
SubTitle: "Din egen SiliconFlow-nøgle",
|
||||
Placeholder: "SiliconFlow API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
Stability: {
|
||||
ApiKey: {
|
||||
Title: "Stability-nøgle",
|
||||
SubTitle: "Din egen Stability-nøgle",
|
||||
Placeholder: "Stability API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
Iflytek: {
|
||||
ApiKey: {
|
||||
Title: "Iflytek API Key",
|
||||
SubTitle: "Nøgle fra Iflytek",
|
||||
Placeholder: "Iflytek API Key",
|
||||
},
|
||||
ApiSecret: {
|
||||
Title: "Iflytek hemmelig nøgle",
|
||||
SubTitle: "Hentet fra Iflytek",
|
||||
Placeholder: "Iflytek API Secret",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
},
|
||||
CustomModel: {
|
||||
Title: "Egne modelnavne",
|
||||
SubTitle: "Skriv komma-adskilte navne",
|
||||
},
|
||||
Google: {
|
||||
ApiKey: {
|
||||
Title: "Google-nøgle",
|
||||
SubTitle: "Få din nøgle hos Google AI",
|
||||
Placeholder: "Google AI API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "F.eks.: ",
|
||||
},
|
||||
ApiVersion: {
|
||||
Title: "API-version (til gemini-pro)",
|
||||
SubTitle: "Vælg en bestemt version",
|
||||
},
|
||||
GoogleSafetySettings: {
|
||||
Title: "Google sikkerhedsindstillinger",
|
||||
SubTitle: "Vælg et niveau for indholdskontrol",
|
||||
},
|
||||
},
|
||||
},
|
||||
Model: "Model",
|
||||
CompressModel: {
|
||||
Title: "Opsummeringsmodel",
|
||||
SubTitle: "Bruges til at korte historik ned og lave titel",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Temperatur",
|
||||
SubTitle: "Jo højere tal, jo mere kreativt svar",
|
||||
},
|
||||
TopP: {
|
||||
Title: "Top P",
|
||||
SubTitle: "Skal ikke ændres sammen med temperatur",
|
||||
},
|
||||
MaxTokens: {
|
||||
Title: "Maks. længde",
|
||||
SubTitle: "Hvor mange tokens (ord/stykker tekst) der kan bruges",
|
||||
},
|
||||
PresencePenalty: {
|
||||
Title: "Nye emner",
|
||||
SubTitle: "Jo højere tal, jo mere nyt indhold",
|
||||
},
|
||||
FrequencyPenalty: {
|
||||
Title: "Gentagelsesstraf",
|
||||
SubTitle: "Jo højere tal, jo mindre gentagelse",
|
||||
},
|
||||
TTS: {
|
||||
Enable: {
|
||||
Title: "Tænd for oplæsning (TTS)",
|
||||
SubTitle: "Slå tekst-til-tale til",
|
||||
},
|
||||
Autoplay: {
|
||||
Title: "Automatisk oplæsning",
|
||||
SubTitle: "Laver lyd automatisk, hvis TTS er slået til",
|
||||
},
|
||||
Model: "Model",
|
||||
Voice: {
|
||||
Title: "Stemme",
|
||||
SubTitle: "Hvilken stemme der bruges til lyd",
|
||||
},
|
||||
Speed: {
|
||||
Title: "Hastighed",
|
||||
SubTitle: "Hvor hurtigt der oplæses",
|
||||
},
|
||||
Engine: "TTS-motor",
|
||||
},
|
||||
Realtime: {
|
||||
Enable: {
|
||||
Title: "Live-chat",
|
||||
SubTitle: "Slå live-svar til",
|
||||
},
|
||||
Provider: {
|
||||
Title: "Modeludbyder",
|
||||
SubTitle: "Vælg forskellig udbyder",
|
||||
},
|
||||
Model: {
|
||||
Title: "Model",
|
||||
SubTitle: "Vælg en model",
|
||||
},
|
||||
ApiKey: {
|
||||
Title: "API-nøgle",
|
||||
SubTitle: "Din nøgle",
|
||||
Placeholder: "API-nøgle",
|
||||
},
|
||||
Azure: {
|
||||
Endpoint: {
|
||||
Title: "Adresse",
|
||||
SubTitle: "Endpoint til Azure",
|
||||
},
|
||||
Deployment: {
|
||||
Title: "Udrulningsnavn",
|
||||
SubTitle: "Navn for dit Azure-setup",
|
||||
},
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Temperatur",
|
||||
SubTitle: "Højere tal = mere varierede svar",
|
||||
},
|
||||
},
|
||||
},
|
||||
Store: {
|
||||
DefaultTopic: "Ny samtale",
|
||||
BotHello: "Hej! Hvordan kan jeg hjælpe dig i dag?",
|
||||
Error: "Noget gik galt. Prøv igen senere.",
|
||||
Prompt: {
|
||||
History: (content: string) =>
|
||||
"Her er et kort resume af, hvad vi har snakket om: " + content,
|
||||
Topic:
|
||||
"Find en kort overskrift med 4-5 ord om emnet. Ingen tegnsætning eller anførselstegn.",
|
||||
Summarize:
|
||||
"Skriv et kort resumé (under 200 ord) af vores samtale til senere brug.",
|
||||
},
|
||||
},
|
||||
Copy: {
|
||||
Success: "Kopieret",
|
||||
Failed: "Kunne ikke kopiere. Giv adgang til udklipsholder.",
|
||||
},
|
||||
Download: {
|
||||
Success: "Filen er downloadet.",
|
||||
Failed: "Download fejlede.",
|
||||
},
|
||||
Context: {
|
||||
Toast: (x: any) => `Inkluderer ${x} ekstra prompts`,
|
||||
Edit: "Chatindstillinger",
|
||||
Add: "Tilføj prompt",
|
||||
Clear: "Kontekst ryddet",
|
||||
Revert: "Fortryd",
|
||||
},
|
||||
Discovery: {
|
||||
Name: "Søgning og plugins",
|
||||
},
|
||||
Mcp: {
|
||||
Name: "MCP",
|
||||
},
|
||||
FineTuned: {
|
||||
Sysmessage: "Du er en hjælper, der skal...",
|
||||
},
|
||||
SearchChat: {
|
||||
Name: "Søg",
|
||||
Page: {
|
||||
Title: "Søg i tidligere chats",
|
||||
Search: "Skriv her for at søge",
|
||||
NoResult: "Ingen resultater",
|
||||
NoData: "Ingen data",
|
||||
Loading: "Henter...",
|
||||
SubTitle: (count: number) => `Fandt ${count} resultater`,
|
||||
},
|
||||
Item: {
|
||||
View: "Vis",
|
||||
},
|
||||
},
|
||||
Plugin: {
|
||||
Name: "Plugin",
|
||||
Page: {
|
||||
Title: "Plugins",
|
||||
SubTitle: (count: number) => `${count} plugins`,
|
||||
Search: "Søg plugin",
|
||||
Create: "Opret nyt",
|
||||
Find: "Du kan finde flere plugins på GitHub: ",
|
||||
},
|
||||
Item: {
|
||||
Info: (count: number) => `${count} metode`,
|
||||
View: "Vis",
|
||||
Edit: "Rediger",
|
||||
Delete: "Slet",
|
||||
DeleteConfirm: "Vil du slette?",
|
||||
},
|
||||
Auth: {
|
||||
None: "Ingen",
|
||||
Basic: "Basic",
|
||||
Bearer: "Bearer",
|
||||
Custom: "Tilpasset",
|
||||
CustomHeader: "Parameternavn",
|
||||
Token: "Token",
|
||||
Proxy: "Brug Proxy",
|
||||
ProxyDescription: "Løs CORS-problemer med Proxy",
|
||||
Location: "Sted",
|
||||
LocationHeader: "Header",
|
||||
LocationQuery: "Query",
|
||||
LocationBody: "Body",
|
||||
},
|
||||
EditModal: {
|
||||
Title: (readonly: boolean) =>
|
||||
`Rediger Plugin ${readonly ? "(skrivebeskyttet)" : ""}`,
|
||||
Download: "Download",
|
||||
Auth: "Godkendelsestype",
|
||||
Content: "OpenAPI Schema",
|
||||
Load: "Hent fra URL",
|
||||
Method: "Metode",
|
||||
Error: "Fejl i OpenAPI Schema",
|
||||
},
|
||||
},
|
||||
Mask: {
|
||||
Name: "Persona",
|
||||
Page: {
|
||||
Title: "Prompts som personaer",
|
||||
SubTitle: (count: number) => `${count} skabeloner`,
|
||||
Search: "Søg skabeloner",
|
||||
Create: "Opret ny",
|
||||
},
|
||||
Item: {
|
||||
Info: (count: number) => `${count} prompts`,
|
||||
Chat: "Chat",
|
||||
View: "Vis",
|
||||
Edit: "Rediger",
|
||||
Delete: "Slet",
|
||||
DeleteConfirm: "Vil du slette?",
|
||||
},
|
||||
EditModal: {
|
||||
Title: (readonly: boolean) =>
|
||||
`Rediger skabelon ${readonly ? "(skrivebeskyttet)" : ""}`,
|
||||
Download: "Download",
|
||||
Clone: "Klon",
|
||||
},
|
||||
Config: {
|
||||
Avatar: "Chat-avatar",
|
||||
Name: "Chat-navn",
|
||||
Sync: {
|
||||
Title: "Brug globale indstillinger",
|
||||
SubTitle: "Gældende for denne chat",
|
||||
Confirm: "Erstat nuværende indstillinger med globale?",
|
||||
},
|
||||
HideContext: {
|
||||
Title: "Skjul ekstra prompts",
|
||||
SubTitle: "Vis dem ikke på chat-skærmen",
|
||||
},
|
||||
Artifacts: {
|
||||
Title: "Brug Artefakter",
|
||||
SubTitle: "Gør det muligt at vise HTML-sider",
|
||||
},
|
||||
CodeFold: {
|
||||
Title: "Fold kode sammen",
|
||||
SubTitle: "Luk/åbn lange kodestykker automatisk",
|
||||
},
|
||||
Share: {
|
||||
Title: "Del denne persona",
|
||||
SubTitle: "Få et link til denne skabelon",
|
||||
Action: "Kopiér link",
|
||||
},
|
||||
},
|
||||
},
|
||||
NewChat: {
|
||||
Return: "Tilbage",
|
||||
Skip: "Start straks",
|
||||
Title: "Vælg en persona",
|
||||
SubTitle: "Chat med den persona, du vælger",
|
||||
More: "Se flere",
|
||||
NotShow: "Vis ikke igen",
|
||||
ConfirmNoShow:
|
||||
"Er du sikker på, at du ikke vil se det igen? Du kan altid slå det til under indstillinger.",
|
||||
},
|
||||
UI: {
|
||||
Confirm: "OK",
|
||||
Cancel: "Fortryd",
|
||||
Close: "Luk",
|
||||
Create: "Opret",
|
||||
Edit: "Rediger",
|
||||
Export: "Eksporter",
|
||||
Import: "Importér",
|
||||
Sync: "Synk",
|
||||
Config: "Konfigurer",
|
||||
},
|
||||
Exporter: {
|
||||
Description: {
|
||||
Title: "Kun beskeder efter sidste rydning vises",
|
||||
},
|
||||
Model: "Model",
|
||||
Messages: "Beskeder",
|
||||
Topic: "Emne",
|
||||
Time: "Tid",
|
||||
},
|
||||
URLCommand: {
|
||||
Code: "Så ud til, at der var en kode i linket. Vil du bruge den?",
|
||||
Settings: "Så ud til, at der var indstillinger i linket. Vil du bruge dem?",
|
||||
},
|
||||
SdPanel: {
|
||||
Prompt: "Prompt",
|
||||
NegativePrompt: "Negativ prompt",
|
||||
PleaseInput: (name: string) => `Indtast: ${name}`,
|
||||
AspectRatio: "Billedformat",
|
||||
ImageStyle: "Stil",
|
||||
OutFormat: "Uddataformat",
|
||||
AIModel: "AI-model",
|
||||
ModelVersion: "Version",
|
||||
Submit: "Send",
|
||||
ParamIsRequired: (name: string) => `${name} er krævet`,
|
||||
Styles: {
|
||||
D3Model: "3d-model",
|
||||
AnalogFilm: "analog-film",
|
||||
Anime: "anime",
|
||||
Cinematic: "cinematisk",
|
||||
ComicBook: "tegneserie",
|
||||
DigitalArt: "digital-art",
|
||||
Enhance: "enhance",
|
||||
FantasyArt: "fantasy-art",
|
||||
Isometric: "isometric",
|
||||
LineArt: "line-art",
|
||||
LowPoly: "low-poly",
|
||||
ModelingCompound: "modeling-compound",
|
||||
NeonPunk: "neon-punk",
|
||||
Origami: "origami",
|
||||
Photographic: "fotografisk",
|
||||
PixelArt: "pixel-art",
|
||||
TileTexture: "tile-texture",
|
||||
},
|
||||
},
|
||||
Sd: {
|
||||
SubTitle: (count: number) => `${count} billeder`,
|
||||
Actions: {
|
||||
Params: "Se indstillinger",
|
||||
Copy: "Kopiér prompt",
|
||||
Delete: "Slet",
|
||||
Retry: "Prøv igen",
|
||||
ReturnHome: "Til forsiden",
|
||||
History: "Historik",
|
||||
},
|
||||
EmptyRecord: "Ingen billeder endnu",
|
||||
Status: {
|
||||
Name: "Status",
|
||||
Success: "Ok",
|
||||
Error: "Fejl",
|
||||
Wait: "Venter",
|
||||
Running: "I gang",
|
||||
},
|
||||
Danger: {
|
||||
Delete: "Vil du slette?",
|
||||
},
|
||||
GenerateParams: "Genereringsvalg",
|
||||
Detail: "Detaljer",
|
||||
},
|
||||
};
|
||||
|
||||
export default da;
|