Compare commits

..

4 Commits

Author SHA1 Message Date
Louis Lam
e4aedeb7af WIP 2023-08-11 21:26:25 +08:00
Louis Lam
642136780f WIP 2023-08-11 20:27:57 +08:00
Louis Lam
80d5f6840b Merge branch '2.0.X' into npm-publish 2023-08-11 20:27:34 +08:00
Louis Lam
d76a515cbf WIP 2023-08-03 20:51:11 +08:00
150 changed files with 1227 additions and 4434 deletions

View File

@@ -6,7 +6,7 @@ You can modifiy Uptime Kuma in your browser without setting up a local developme
1. Click `Code` -> `Create codespace on master`
2. Wait a few minutes until you see there are two exposed ports
3. Go to the `3000` url, see if it is working
3. Go to the `3000` url, see if it is working
![image](https://github.com/louislam/uptime-kuma/assets/1336778/909b2eb4-4c5e-44e4-ac26-6d20ed856e7f)

View File

@@ -13,10 +13,9 @@
"customizations": {
"vscode": {
"extensions": [
"streetsidesoftware.code-spell-checker",
"dbaeumer.vscode-eslint",
"GitHub.copilot-chat"
]
"streetsidesoftware.code-spell-checker",
"dbaeumer.vscode-eslint"
]
}
},
"forwardPorts": [3000, 3001]

View File

@@ -1,3 +1,5 @@
# Should be very similar to .npmignore, but the syntax is a bit different
/.idea
/node_modules
/data*
@@ -12,9 +14,8 @@
**/.gitignore
**/docker-compose*
**/[Dd]ockerfile*
LICENSE
README.md
.editorconfig
.vs
.vscode
.eslint*
.stylelint*
@@ -32,23 +33,15 @@ tsconfig.json
/tmp
/babel.config.js
/ecosystem.config.js
/extra/healthcheck.exe
/extra/healthcheck
/extra/exe-builder
/extra/push-examples
/extra/uptime-kuma-push
extra/exe-builder
/public
### .gitignore content (commented rules are duplicated)
#node_modules
.DS_Store
#dist
dist-ssr
*.local
#.idea
#/data
#!/data/.gitkeep
#.vscode
### End of .gitignore content
# .dockerignore only, not in .npmignore
/extra/healthcheck.exe
/extra/healthcheck
LICENSE
README.md

View File

@@ -1,7 +1,6 @@
module.exports = {
ignorePatterns: [
"test/*.js",
"test/cypress",
"test/*",
"server/modules/apicache/*",
"src/util.js"
],
@@ -76,7 +75,7 @@ module.exports = {
"no-var": "error",
"key-spacing": "warn",
"keyword-spacing": "warn",
"space-infix-ops": "error",
"space-infix-ops": "warn",
"arrow-spacing": "warn",
"no-trailing-spaces": "error",
"no-constant-condition": [ "error", {
@@ -114,7 +113,7 @@ module.exports = {
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/require-throws": "warn",
"jsdoc/require-throws": "error",
"jsdoc/require-jsdoc": [
"error",
{
@@ -125,20 +124,19 @@ module.exports = {
}
],
"jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns": [
"warn",
"error",
{
"forceRequireReturn": true,
"forceReturnsWithAsync": true
}
],
"jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn"
"jsdoc/require-param-type": "error",
"jsdoc/require-param-description": "error"
},
"overrides": [
{

View File

@@ -12,6 +12,8 @@ labels:
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
Your GitHub Advisory URL:

View File

@@ -1,7 +1,7 @@
⚠️⚠️⚠️ Since we do not accept all types of pull requests and do not want to waste your time. Please be sure that you have read pull request rules:
https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma
Tick the checkbox if you understand [x]:
Tick the checkbox if you understand [x]:
- [ ] I have read and understand the pull request rules.
# Description

View File

@@ -9,7 +9,7 @@ on:
paths-ignore:
- '*.md'
pull_request:
branches: [ master, 2.0.X ]
branches: [ master ]
paths-ignore:
- '*.md'
@@ -22,7 +22,7 @@ jobs:
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
node: [ 14, 20.5 ]
node: [ 14, 20 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
@@ -33,7 +33,7 @@ jobs:
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
- run: npm install npm@9 -g
- run: npm install npm@latest -g
- run: npm install
- run: npm run build
- run: npm test
@@ -50,7 +50,7 @@ jobs:
strategy:
matrix:
os: [ ARMv7 ]
node: [ 14, 20 ]
node: [ 14.21.3, 20.5.0 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
@@ -61,7 +61,7 @@ jobs:
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
- run: npm install npm@9 -g
- run: npm install npm@latest -g
- run: npm ci --production
check-linters:
@@ -78,21 +78,20 @@ jobs:
- run: npm install
- run: npm run lint
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet
# e2e-tests:
# needs: [ check-linters ]
# runs-on: ubuntu-latest
# steps:
# - run: git config --global core.autocrlf false # Mainly for Windows
# - uses: actions/checkout@v3
#
# - name: Use Node.js 14
# uses: actions/setup-node@v3
# with:
# node-version: 14
# - run: npm install
# - run: npm run build
# - run: npm run cy:test
e2e-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
- run: npm install
- run: npm run build
- run: npm run cy:test
frontend-unit-tests:
needs: [ check-linters ]

View File

@@ -1,4 +1,4 @@
name: json-yaml-validate
name: json-yaml-validate
on:
push:
branches:
@@ -6,7 +6,6 @@ on:
pull_request:
branches:
- master
- 2.0.X
workflow_dispatch:
permissions:

View File

@@ -1,17 +0,0 @@
name: prevent-file-change
on:
pull_request:
jobs:
check-file-changes:
runs-on: ubuntu-latest
steps:
- name: Prevent file change
uses: xalvarez/prevent-file-change-action@v1
with:
githubToken: ${{ secrets.GITHUB_TOKEN }}
# Regex, /src/lang/*.json is not allowed to be changed, except for /src/lang/en.json
pattern: '^(?!src/lang/en\.json$)src/lang/.*\.json$'
trustedAuthors: UptimeKumaBot

45
.npmignore Normal file
View File

@@ -0,0 +1,45 @@
# Should be very similar to .dockerignore, but the syntax is a bit different
/.idea
/node_modules
/data
/cypress
/out
/test
/kubernetes
/.do
/.dockerignore
/private
/.git
/.gitignore
/docker-compose*
/[Dd]ockerfile*
.editorconfig
.vs
.vscode
.eslint*
.stylelint*
/.devcontainer
/.github
yarn.lock
app.json
CODE_OF_CONDUCT.md
CONTRIBUTING.md
CNAME
install.sh
SECURITY.md
tsconfig.json
.env
/tmp
/babel.config.js
/ecosystem.config.js
extra/exe-builder
/public
.DS_Store
dist-ssr
*.local
## .npmignore only, not in .dockerignore
/docker
/extra/healthcheck*

View File

@@ -2,13 +2,13 @@
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
## Key Technical Skills
- Node.js (You should know about promise, async/await and arrow function etc.)
- Node.js (You should know what are promise, async/await and arrow function etc.)
- Socket.io
- SCSS
- Vue.js
@@ -30,31 +30,28 @@ The frontend code builds into "dist" directory. The server (express.js) exposes
## Can I create a pull request for Uptime Kuma?
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
Here are some references:
### ✅ Usually accepted
### ✅ Usually accepted:
- Bug fix
- Security fix
- Adding notification providers
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Adding new language keys: `$t("...")`
### ⚠️ Discussion required
### ⚠️ Discussion required:
- Large pull requests
- New features
### ❌ Won't be merged
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Do not pass the auto-test
### ❌ Won't be merged:
- A dedicated pr for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Do not pass the auto test
- Any breaking changes
- Duplicated pull requests
- Buggy
- UI/UX is not close to Uptime Kuma
- UI/UX is not close to Uptime Kuma
- Modifications or deletions of existing logic without a valid reason.
- Adding functions that is completely out of scope
- Converting existing code into other programming languages
@@ -64,9 +61,10 @@ The above cases may not cover all possible situations.
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
### Recommended Pull Request Guideline
@@ -85,11 +83,11 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
## Project Styles
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app.
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`
- Easy to use
- The web UI styling should be consistent and nice
@@ -114,18 +112,6 @@ I personally do not like something that requires so many configurations before y
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
### GitHub Codespace
If you don't want to setup an local environment, you can now develop on GitHub Codespace, read more:
https://github.com/louislam/uptime-kuma/tree/master/.devcontainer
## Git Branches
- `master`: 2.X.X development. If you want to add a new feature, your pull request should base on this.
- `1.23.X`: 1.23.X development. If you want to fix a bug for v1 and v2, your pull request should base on this.
- All other branches are unused, outdated or for dev.
## Install Dependencies for Development
```bash
@@ -144,9 +130,8 @@ Port `3000` and port `3001` will be used.
npm run dev
```
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
```bash
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals:
```
npm run start-frontend-dev
npm run start-server-dev
```
@@ -155,18 +140,19 @@ npm run start-server-dev
It binds to `0.0.0.0:3001` by default.
It is mainly a socket.io app + express.js.
express.js is used for:
express.js is used for:
- entry point such as redirecting to a status page or the dashboard
- serving the frontend built files (index.html, .js and .css etc.)
- serving internal APIs of the status page
- serving internal APIs of status page
### Structure in /server/
- jobs/ (Jobs that are running in another process)
- model/ (Object model, auto-mapping to the database table name)
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- monitor_types (Monitor Types)
- notification-providers/ (individual notification logic)
@@ -177,9 +163,9 @@ express.js is used for:
## Frontend Dev Server
It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
For production, it is not used. It will be compiled to `dist` directory instead.
For production, it is not used. It will be compiled to `dist` directory instead.
You can use Vue.js devtools Chrome extension for debugging.
@@ -195,13 +181,14 @@ Uptime Kuma Frontend is a single page application (SPA). Most paths are handled
The router is in `src/router.js`
As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`.
## Database Migration
See: https://github.com/louislam/uptime-kuma/tree/master/db/knex_migrations
1. Create `patch-{name}.sql` in `./db/`
2. Add your patch filename in the `patchList` list in `./server/database.js`
## Unit Test
@@ -223,25 +210,25 @@ Both frontend and backend share the same package.json. However, the frontend dep
### Update Dependencies
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change.
If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes.
## Translations
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are ommited, they can not be translated).
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
**Don't include any other languages in your inital Pull-Request** (even if this is your mother tounge), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language-skills.
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not English and my grammar is not that great.
My mother language is not english and my grammar is not that great.
## Wiki
@@ -249,42 +236,6 @@ Since there is no way to make a pull request to wiki's repo, I have set up anoth
https://github.com/louislam/uptime-kuma-wiki
## Docker
### Arch
- amd64
- arm64
- armv7
### Docker Tags
#### v2
- `2`, `latest-2`: v2 with full features such as Chromium and bundled MariaDB
- `2.x.x`
- `2-slim`: v2 with basic features
- `2.x.x-slim`
- `beta2`: Latest beta build
- `2.x.x-beta.x`
- `nightly2`: Dev build
- `base2`: Basic Debian setup without Uptime Kuma source code (Full features)
- `base2-slim`: Basic Debian setup without Uptime Kuma source code
- `pr-test2`: For testing pull request without setting up a local environment
#### v1
- `1`, `latest`, `1-debian`, `debian`: Latest version of v1
- `1.x.x`, `1.x.x-debian`
- `1.x.x-beta.x`: Beta build
- `beta`: Latest beta build
- `nightly`: Dev build
- `base-debian`: Basic Debian setup without Uptime Kuma source code
- `pr-test`: For testing pull request without setting up a local environment
- `base-alpine`: (Deprecated) Basic Alpine setup without Uptime Kuma source code
- `1-alpine`, `alpine`: (Deprecated)
- `1.x.x-alpine`: (Deprecated)
## Maintainer
Check the latest issues and pull requests:
@@ -295,12 +246,12 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
1. Draft a release note
2. Make sure the repo is cleared
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
5. Wait until the `Press any key to continue`
6. `git push`
7. Publish the release note as 1.X.X
8. Press any key to continue
9. Deploy to the demo server: `npm run deploy-demo-server`
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue`
5. `git push`
6. Publish the release note as 1.X.X
7. Press any key to continue
8. Deploy to the demo server: `npm run deploy-demo-server`
Checking:
@@ -333,11 +284,3 @@ git remote add production https://github.com/louislam/uptime-kuma.wiki.git
git pull
git push production master
```
## Useful Commands
Change the base of a pull request such as `master` to `1.23.X`
```bash
git rebase --onto <new parent> <old parent>
```

View File

@@ -23,17 +23,17 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
## ⭐ Features
- Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
- Fancy, Reactive, Fast UI/UX
- Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
- 20-second intervals
- [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
- Multiple status pages
- Map status pages to specific domains
- Ping chart
- Certificate info
- Proxy support
- 2FA support
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Fancy, Reactive, Fast UI/UX
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
* 20 second intervals
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
* Multiple status pages
* Map status pages to specific domains
* Ping chart
* Certificate info
* Proxy support
* 2FA support
## 🔧 How to Install
@@ -50,7 +50,6 @@ Uptime Kuma is now running on http://localhost:3001
### 💪🏻 Non-Docker
Requirements:
- Platform
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
@@ -61,8 +60,8 @@ Requirements:
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
```bash
# Update your npm
npm install npm@9 -g
# Update your npm to the latest version
npm install npm -g
git clone https://github.com/louislam/uptime-kuma.git
cd uptime-kuma
@@ -71,14 +70,15 @@ npm run setup
# Option 1. Try it
node server/server.js
# (Recommended) Option 2. Run in the background using PM2
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it:
npm install pm2 -g && pm2 install pm2-logrotate
# Start Server
pm2 start server/server.js --name uptime-kuma
```
```
Uptime Kuma is now running on http://localhost:3001
More useful PM2 Commands
@@ -93,7 +93,7 @@ pm2 save && pm2 startup
### Windows Portable (x64)
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1-2.zip
https://github.com/louislam/uptime-kuma/files/11886108/uptime-kuma-win64-portable-1.0.1.zip
### Advanced Installation
@@ -109,7 +109,7 @@ https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next?
I will assign requests/issues to the next milestone.
I will mark requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones
@@ -143,27 +143,28 @@ Telegram Notification Sample:
## Motivation
- I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
- Want to build a fancy UI.
- Learn Vue 3 and vite.js.
- Show the power of Bootstrap 5.
- Try to use WebSocket with SPA instead of REST API.
- Deploy my first Docker image to Docker Hub.
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
* Want to build a fancy UI.
* Learn Vue 3 and vite.js.
* Show the power of Bootstrap 5.
* Try to use WebSocket with SPA instead of REST API.
* Deploy my first Docker image to Docker Hub.
If you love this project, please consider giving me a ⭐.
## 🗣️ Discussion / Ask for Help
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not respond if you asked such questions.
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not response if you asked such questions.
I recommend using Google, GitHub Issues, or Uptime Kuma's Subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
- [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
- [Subreddit r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
You can mention me if you ask a question on Reddit.
## Contribute
### Test Pull Requests
@@ -178,18 +179,15 @@ https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
### Bug Reports / Feature Requests
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Translations
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
### Spelling & Grammar
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not english and my grammar is not that great.
### Create Pull Requests
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

View File

@@ -3,19 +3,19 @@
## Reporting a Vulnerability
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
1. Please also create a empty security issues for alerting me, as GitHub Advisory do not send a notification, I probably will miss without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
Do not use the public issue tracker or discuss it in public as it will cause more damage.
Do not use the public issue tracker or discuss it in the public as it will cause more damage.
## Do you accept other 3rd-party bug bounty platforms?
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone have tried to send a phishing link to me by this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
## Supported Versions
### Uptime Kuma Versions
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
### Upgradable Docker Tags

View File

@@ -272,10 +272,10 @@ async function createTables() {
await knex.schema.createTable("notification", (table) => {
table.increments("id");
table.string("name", 255);
table.string("config", 255); // TODO: should use TEXT!
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned();
table.boolean("is_default").notNullable().defaultTo(false);
table.text("config");
});
// monitor_notification

View File

@@ -1,41 +0,0 @@
exports.up = function (knex) {
return knex.schema
.createTable("stat_minutely", function (table) {
table.increments("id");
table.comment("This table contains the minutely aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest minute");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
})
.createTable("stat_daily", function (table) {
table.increments("id");
table.comment("This table contains the daily aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest day");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
});
};
exports.down = function (knex) {
return knex.schema
.dropTable("stat_minutely")
.dropTable("stat_daily");
};

View File

@@ -1,16 +0,0 @@
exports.up = function (knex) {
// Add new column heartbeat.end_time
return knex.schema
.alterTable("heartbeat", function (table) {
table.datetime("end_time").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
// Rename heartbeat.start_time to heartbeat.time
return knex.schema
.alterTable("heartbeat", function (table) {
table.dropColumn("end_time");
});
};

View File

@@ -1,15 +1,12 @@
# Info
## Info
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Basic rules
- All tables must have a primary key named `id`
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports SQLite and MariaDB.
## Template
Filename: YYYYMMDDHHMMSS_name.js
```js
exports.up = function(knex) {
@@ -24,17 +21,19 @@ exports.down = function(knex) {
## Example
Filename: 2023-06-30-1348-create-user-and-product.js
YYYY-MM-DD-HHMM-create-users-products.js
2023-06-30-1348-create-users-products.js
```js
exports.up = function(knex) {
return knex.schema
.createTable('user', function (table) {
.createTable('users', function (table) {
table.increments('id');
table.string('first_name', 255).notNullable();
table.string('last_name', 255).notNullable();
})
.createTable('product', function (table) {
.createTable('products', function (table) {
table.increments('id');
table.decimal('price').notNullable();
table.string('name', 1000).notNullable();
@@ -48,8 +47,8 @@ exports.up = function(knex) {
exports.down = function(knex) {
return knex.schema
.dropTable("product")
.dropTable("user");
.dropTable("products")
.dropTable("users");
};
```

View File

@@ -1,7 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor_group
ADD send_url BOOLEAN DEFAULT 0 NOT NULL;
COMMIT;

View File

@@ -1,7 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD game VARCHAR(255);
COMMIT;
COMMIT

View File

@@ -1,7 +1,4 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD google_analytics_tag_id VARCHAR;
ALTER TABLE status_page ADD google_analytics_tag_id VARCHAR;
COMMIT;

View File

@@ -1,7 +1,6 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
COMMIT;
COMMIT

View File

@@ -1,4 +1,3 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
@@ -16,4 +15,4 @@ ALTER TABLE monitor
ALTER TABLE monitor
ADD radius_secret VARCHAR(255);
COMMIT;
COMMIT

View File

@@ -1,6 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [api_key] (
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
[key] VARCHAR(255) NOT NULL,
@@ -11,5 +10,4 @@ CREATE TABLE [api_key] (
[expires] DATETIME DEFAULT NULL,
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
);
COMMIT;

View File

@@ -1,7 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD packet_size INTEGER DEFAULT 56 NOT NULL;
COMMIT;

View File

@@ -18,4 +18,5 @@ drop table setting;
alter table setting_dg_tmp rename to setting;
COMMIT;

View File

@@ -1,11 +1,6 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD footer_text TEXT;
ALTER TABLE status_page
ADD custom_css TEXT;
ALTER TABLE status_page
ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
ALTER TABLE status_page ADD footer_text TEXT;
ALTER TABLE status_page ADD custom_css TEXT;
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
COMMIT;

View File

@@ -1,4 +1,3 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE monitor_tls_info (

View File

@@ -3,5 +3,4 @@ BEGIN TRANSACTION;
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
COMMIT;
COMMIT;

View File

@@ -2,6 +2,8 @@
FROM node:20-bookworm-slim AS base2-slim
ARG TARGETPLATFORM
WORKDIR /app
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
# apprise = for notifications (From testing repo)
# sqlite3 = for debugging
@@ -42,10 +44,13 @@ COPY ./docker/etc/sudoers /etc/sudoers
# Full Base Image
# MariaDB, Chromium and fonts
# Not working for armv7, so use the older version (10.5) of MariaDB from the debian repo
# curl -LsS https://r.mariadb.com/downloads/mariadb_repo_setup | bash -s -- --mariadb-server-version="mariadb-11.1" && \
FROM base2-slim AS base2
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
RUN apt update && \
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
apt --yes remove curl && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove && \
chown -R node:node /var/lib/mysql

View File

@@ -2,7 +2,7 @@ version: '3.8'
services:
uptime-kuma:
image: louislam/uptime-kuma:1
image: louislam/uptime-kuma:2
container_name: uptime-kuma
volumes:
- uptime-kuma:/app/data

View File

@@ -21,7 +21,6 @@ COPY --chown=node:node package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . .
COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
RUN mkdir ./data
############################################
# ⭐ Main Image
@@ -30,8 +29,6 @@ FROM $BASE_IMAGE AS release
USER node
WORKDIR /app
LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
ENV UPTIME_KUMA_IS_CONTAINER=1
# Copy app files from build layer

View File

@@ -1,68 +0,0 @@
[
{
"name": "getPushExample",
"description": "Get a push example.",
"params": [
{
"name": "language",
"type": "string",
"description": "The programming language such as `javascript-fetch` or `python`. See the directory ./extra/push-examples for a list of available languages."
}
],
"returnType": "response-json",
"okReturn": [
{
"name": "code",
"type": "string",
"description": "The push example."
}
],
"possibleErrorReasons": [
"The parameter `language` is not available"
],
},
{
"name": "checkApprise",
"description": "Check if the apprise library is installed.",
"params": [],
"returnType": "boolean",
},
{
"name": "getSettings",
"description": "",
"params": [],
"returnType": "response-json",
"okReturn": [
{
"name": "data",
"type": "object",
"description": "The setting object. It does not contain default values."
}
],
"possibleErrorReasons": [],
},
{
"name": "changePassword",
"description": "",
"params": [
{
"name": "password",
"type": "object",
"description": "The password object with the following properties: `currentPassword` and `newPassword`"
}
],
"returnType": "response-json",
"okReturn": [
{
"name": "data",
"type": "object",
"description": "The setting object. It does not contain default values."
}
],
"possibleErrorReasons": [
"Incorrect current password",
"Invalid new password",
"Password is too weak"
],
}
]

75
extra/cli.js Normal file
View File

@@ -0,0 +1,75 @@
#!/usr/bin/env node
const path = require("path");
const args = require("args-parser")(process.argv);
// Set the data directory
if (!process.env.DATA_DIR) {
if (process.platform === "win32") {
process.env.DATA_DIR = process.env.LOCALAPPDATA + "\\uptime-kuma\\";
} else if (process.platform === "linux") {
process.env.DATA_DIR = process.env.HOME + "/.local/share/uptime-kuma/";
} else if (process.platform === "darwin") {
// TODO: Not sure if this is the correct path for macOS
process.env.DATA_DIR = process.env.HOME + "/Library/Preferences/uptime-kuma/";
} else {
console.error("Unable to detect app data directory on platform: " + process.platform);
console.error("Please set the DATA_DIR environment variable or `--data-dir=` to the directory where you want to store your data.");
process.exit(1);
}
}
// Change the working directory to the root of the project, so it can read the dist folder
process.chdir(path.join(__dirname, ".."));
if (args.run) {
require("../server/server");
} else if (args.installService) {
if (process.platform === "win32") {
let Service = require("node-windows").Service;
// Create a new service object
let svc = new Service({
name: "Uptime Kuma",
description: "Uptime Kuma is an easy-to-use self-hosted monitoring tool.",
script: "C:\\path\\to\\helloworld.js",
nodeOptions: [
"--harmony",
"--max_old_space_size=4096"
]
//, workingDirectory: '...'
//, allowServiceLogon: true
});
// Listen for the "install" event, which indicates the
// process is available as a service.
svc.on("install", function () {
svc.start();
});
svc.install();
} else if (process.platform === "linux") {
} else {
console.error("Unable to install service on platform: " + process.platform);
process.exit(1);
}
} else if (args.version || args.v) {
const version = require("../package.json").version;
console.log("Uptime Kuma version: " + version);
} else {
console.log(`Usage: uptime-kuma [options]
Options:
--install-service Install Uptime Kuma service (Windows and Linux only)
--uninstall-service Uninstall Uptime Kuma service
--run Run Uptime Kuma directly in the terminal
--data-dir="your path" Set the data directory
--version Print the version
--help Print this help
`);
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
@@ -28,15 +28,9 @@ namespace UptimeKuma {
Environment.CurrentDirectory = cwd;
}
bool isIntranet = args.Contains("--intranet");
if (isIntranet) {
Console.WriteLine("The --intranet argument was provided, so we will not try to access the internet. The first time this application runs you'll need to run it without the --intranet param or copy the result from another machine to the intranet server.");
}
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new UptimeKumaApplicationContext(isIntranet));
Application.Run(new UptimeKumaApplicationContext());
}
}
@@ -55,9 +49,8 @@ namespace UptimeKuma {
private RegistryKey registryKey = Registry.CurrentUser.OpenSubKey("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run", true);
private readonly bool intranetOnly;
public UptimeKumaApplicationContext(bool intranetOnly) {
public UptimeKumaApplicationContext() {
// Single instance only
bool createdNew;
@@ -66,8 +59,6 @@ namespace UptimeKuma {
return;
}
this.intranetOnly = intranetOnly;
var startingText = "Starting server...";
trayIcon = new NotifyIcon();
trayIcon.Text = startingText;
@@ -107,10 +98,6 @@ namespace UptimeKuma {
}
void DownloadFiles() {
if (intranetOnly) {
return;
}
var form = new DownloadForm();
form.Closed += Exit;
form.Show();
@@ -186,9 +173,7 @@ namespace UptimeKuma {
}
void CheckForUpdate(object sender, EventArgs e) {
if (intranetOnly) {
return;
}
var needUpdate = false;
// Check version.json exists
if (File.Exists("version.json")) {
@@ -219,12 +204,8 @@ namespace UptimeKuma {
}
void VisitGitHub(object sender, EventArgs e) {
if (intranetOnly) {
MessageBox.Show("You have parsed in --intranet so we will not try to access the internet or visit github.com, please go to https://github.com/louislam/uptime-kuma if you want to visit github.");
return;
}
void VisitGitHub(object sender, EventArgs e)
{
Process.Start("https://github.com/louislam/uptime-kuma");
}

View File

@@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.2.0")]
[assembly: AssemblyFileVersion("1.0.2.0")]
[assembly: AssemblyVersion("1.0.1.0")]
[assembly: AssemblyFileVersion("1.0.1.0")]

View File

@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="packages\Costura.Fody.5.7.0\build\Costura.Fody.props" Condition="Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.props')" />
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
@@ -38,104 +39,107 @@
<ApplicationManifest>app.manifest</ApplicationManifest>
</PropertyGroup>
<PropertyGroup>
<PostBuildEvent>COPY "$(SolutionDir)bin\Debug\uptime-kuma.exe" "%UserProfile%\Desktop\uptime-kuma-win64\"</PostBuildEvent>
<PostBuildEvent>COPY "$(SolutionDir)bin\Debug\uptime-kuma.exe" "%UserProfile%\Desktop\uptime-kuma-win64\"</PostBuildEvent>
</PropertyGroup>
<ItemGroup>
<Reference Include="Costura, Version=5.7.0.0, Culture=neutral, processorArchitecture=MSIL">
<HintPath>packages\Costura.Fody.5.7.0\lib\netstandard1.0\Costura.dll</HintPath>
</Reference>
<Reference Include="Microsoft.Win32.Primitives, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\Microsoft.Win32.Primitives.4.3.0\lib\net46\Microsoft.Win32.Primitives.dll</HintPath>
<HintPath>packages\Microsoft.Win32.Primitives.4.3.0\lib\net46\Microsoft.Win32.Primitives.dll</HintPath>
</Reference>
<Reference Include="mscorlib" />
<Reference Include="Newtonsoft.Json, Version=13.0.0.0, Culture=neutral, PublicKeyToken=30ad4fe6b2a6aeed, processorArchitecture=MSIL">
<HintPath>packages\Newtonsoft.Json.13.0.2\lib\net45\Newtonsoft.Json.dll</HintPath>
<HintPath>packages\Newtonsoft.Json.13.0.2\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.AppContext, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.AppContext.4.3.0\lib\net463\System.AppContext.dll</HintPath>
<HintPath>packages\System.AppContext.4.3.0\lib\net463\System.AppContext.dll</HintPath>
</Reference>
<Reference Include="System.Buffers, Version=4.0.3.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>packages\System.Buffers.4.5.1\lib\net461\System.Buffers.dll</HintPath>
<HintPath>packages\System.Buffers.4.5.1\lib\net461\System.Buffers.dll</HintPath>
</Reference>
<Reference Include="System.ComponentModel.Composition" />
<Reference Include="System.Console, Version=4.0.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Console.4.3.1\lib\net46\System.Console.dll</HintPath>
<HintPath>packages\System.Console.4.3.1\lib\net46\System.Console.dll</HintPath>
</Reference>
<Reference Include="System.Core" />
<Reference Include="System.Diagnostics.DiagnosticSource, Version=7.0.0.1, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>packages\System.Diagnostics.DiagnosticSource.7.0.1\lib\net462\System.Diagnostics.DiagnosticSource.dll</HintPath>
<HintPath>packages\System.Diagnostics.DiagnosticSource.7.0.1\lib\net462\System.Diagnostics.DiagnosticSource.dll</HintPath>
</Reference>
<Reference Include="System.Diagnostics.Tracing, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Diagnostics.Tracing.4.3.0\lib\net462\System.Diagnostics.Tracing.dll</HintPath>
<HintPath>packages\System.Diagnostics.Tracing.4.3.0\lib\net462\System.Diagnostics.Tracing.dll</HintPath>
</Reference>
<Reference Include="System.Globalization.Calendars, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Globalization.Calendars.4.3.0\lib\net46\System.Globalization.Calendars.dll</HintPath>
<HintPath>packages\System.Globalization.Calendars.4.3.0\lib\net46\System.Globalization.Calendars.dll</HintPath>
</Reference>
<Reference Include="System.IO, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.IO.4.3.0\lib\net462\System.IO.dll</HintPath>
<HintPath>packages\System.IO.4.3.0\lib\net462\System.IO.dll</HintPath>
</Reference>
<Reference Include="System.IO.Compression, Version=4.1.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL">
<HintPath>packages\System.IO.Compression.4.3.0\lib\net46\System.IO.Compression.dll</HintPath>
<HintPath>packages\System.IO.Compression.4.3.0\lib\net46\System.IO.Compression.dll</HintPath>
</Reference>
<Reference Include="System.IO.Compression.FileSystem" />
<Reference Include="System.IO.Compression.ZipFile, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL">
<HintPath>packages\System.IO.Compression.ZipFile.4.3.0\lib\net46\System.IO.Compression.ZipFile.dll</HintPath>
<HintPath>packages\System.IO.Compression.ZipFile.4.3.0\lib\net46\System.IO.Compression.ZipFile.dll</HintPath>
</Reference>
<Reference Include="System.IO.FileSystem, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.IO.FileSystem.4.3.0\lib\net46\System.IO.FileSystem.dll</HintPath>
<HintPath>packages\System.IO.FileSystem.4.3.0\lib\net46\System.IO.FileSystem.dll</HintPath>
</Reference>
<Reference Include="System.IO.FileSystem.Primitives, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.IO.FileSystem.Primitives.4.3.0\lib\net46\System.IO.FileSystem.Primitives.dll</HintPath>
<HintPath>packages\System.IO.FileSystem.Primitives.4.3.0\lib\net46\System.IO.FileSystem.Primitives.dll</HintPath>
</Reference>
<Reference Include="System.Linq, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Linq.4.3.0\lib\net463\System.Linq.dll</HintPath>
<HintPath>packages\System.Linq.4.3.0\lib\net463\System.Linq.dll</HintPath>
</Reference>
<Reference Include="System.Linq.Expressions, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Linq.Expressions.4.3.0\lib\net463\System.Linq.Expressions.dll</HintPath>
<HintPath>packages\System.Linq.Expressions.4.3.0\lib\net463\System.Linq.Expressions.dll</HintPath>
</Reference>
<Reference Include="System.Memory, Version=4.0.1.2, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>packages\System.Memory.4.5.5\lib\net461\System.Memory.dll</HintPath>
<HintPath>packages\System.Memory.4.5.5\lib\net461\System.Memory.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http, Version=4.1.1.3, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Net.Http.4.3.4\lib\net46\System.Net.Http.dll</HintPath>
<HintPath>packages\System.Net.Http.4.3.4\lib\net46\System.Net.Http.dll</HintPath>
</Reference>
<Reference Include="System.Net.Sockets, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Net.Sockets.4.3.0\lib\net46\System.Net.Sockets.dll</HintPath>
<HintPath>packages\System.Net.Sockets.4.3.0\lib\net46\System.Net.Sockets.dll</HintPath>
</Reference>
<Reference Include="System.Numerics" />
<Reference Include="System.Numerics.Vectors, Version=4.1.4.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Numerics.Vectors.4.5.0\lib\net46\System.Numerics.Vectors.dll</HintPath>
<HintPath>packages\System.Numerics.Vectors.4.5.0\lib\net46\System.Numerics.Vectors.dll</HintPath>
</Reference>
<Reference Include="System.Reflection, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Reflection.4.3.0\lib\net462\System.Reflection.dll</HintPath>
<HintPath>packages\System.Reflection.4.3.0\lib\net462\System.Reflection.dll</HintPath>
</Reference>
<Reference Include="System.Runtime, Version=4.1.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.4.3.1\lib\net462\System.Runtime.dll</HintPath>
<HintPath>packages\System.Runtime.4.3.1\lib\net462\System.Runtime.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.CompilerServices.Unsafe, Version=6.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.CompilerServices.Unsafe.6.0.0\lib\net461\System.Runtime.CompilerServices.Unsafe.dll</HintPath>
<HintPath>packages\System.Runtime.CompilerServices.Unsafe.6.0.0\lib\net461\System.Runtime.CompilerServices.Unsafe.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.Extensions, Version=4.1.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.Extensions.4.3.1\lib\net462\System.Runtime.Extensions.dll</HintPath>
<HintPath>packages\System.Runtime.Extensions.4.3.1\lib\net462\System.Runtime.Extensions.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.InteropServices, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.InteropServices.4.3.0\lib\net463\System.Runtime.InteropServices.dll</HintPath>
<HintPath>packages\System.Runtime.InteropServices.4.3.0\lib\net463\System.Runtime.InteropServices.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.InteropServices.RuntimeInformation, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.InteropServices.RuntimeInformation.4.3.0\lib\net45\System.Runtime.InteropServices.RuntimeInformation.dll</HintPath>
<HintPath>packages\System.Runtime.InteropServices.RuntimeInformation.4.3.0\lib\net45\System.Runtime.InteropServices.RuntimeInformation.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.Algorithms, Version=4.2.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.Algorithms.4.3.1\lib\net463\System.Security.Cryptography.Algorithms.dll</HintPath>
<HintPath>packages\System.Security.Cryptography.Algorithms.4.3.1\lib\net463\System.Security.Cryptography.Algorithms.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.Encoding, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.Encoding.4.3.0\lib\net46\System.Security.Cryptography.Encoding.dll</HintPath>
<HintPath>packages\System.Security.Cryptography.Encoding.4.3.0\lib\net46\System.Security.Cryptography.Encoding.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.Primitives, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.Primitives.4.3.0\lib\net46\System.Security.Cryptography.Primitives.dll</HintPath>
<HintPath>packages\System.Security.Cryptography.Primitives.4.3.0\lib\net46\System.Security.Cryptography.Primitives.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.X509Certificates, Version=4.1.1.2, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.X509Certificates.4.3.2\lib\net461\System.Security.Cryptography.X509Certificates.dll</HintPath>
<HintPath>packages\System.Security.Cryptography.X509Certificates.4.3.2\lib\net461\System.Security.Cryptography.X509Certificates.dll</HintPath>
</Reference>
<Reference Include="System.Text.RegularExpressions, Version=4.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Text.RegularExpressions.4.3.1\lib\net463\System.Text.RegularExpressions.dll</HintPath>
<HintPath>packages\System.Text.RegularExpressions.4.3.1\lib\net463\System.Text.RegularExpressions.dll</HintPath>
</Reference>
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
@@ -146,21 +150,21 @@
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
<Reference Include="System.Xml.ReaderWriter, Version=4.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Xml.ReaderWriter.4.3.1\lib\net46\System.Xml.ReaderWriter.dll</HintPath>
<HintPath>packages\System.Xml.ReaderWriter.4.3.1\lib\net46\System.Xml.ReaderWriter.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup>
<Compile Include="DownloadForm.cs">
<SubType>Form</SubType>
<SubType>Form</SubType>
</Compile>
<Compile Include="DownloadForm.Designer.cs">
<DependentUpon>DownloadForm.cs</DependentUpon>
<DependentUpon>DownloadForm.cs</DependentUpon>
</Compile>
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Version.cs" />
<EmbeddedResource Include="DownloadForm.resx">
<DependentUpon>DownloadForm.cs</DependentUpon>
<DependentUpon>DownloadForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
@@ -172,7 +176,7 @@
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
<None Include="..\..\public\favicon.ico">
<Link>favicon.ico</Link>
<Link>favicon.ico</Link>
</None>
<None Include="packages.config" />
<None Include="Properties\Settings.settings">
@@ -189,15 +193,20 @@
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<Content Include=".gitignore" />
<Content Include="app.manifest" />
<Content Include=".gitignore" />
<Content Include="app.manifest" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
<PropertyGroup>
<ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105.The missing file is {0}.</ErrorText>
</PropertyGroup>
<Error Condition="!Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets'))" />
<PropertyGroup>
<ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105.The missing file is {0}.</ErrorText>
</PropertyGroup>
<Error Condition="!Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.props')" Text="$([System.String]::Format('$(ErrorText)', 'packages\Costura.Fody.5.7.0\build\Costura.Fody.props'))" />
<Error Condition="!Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\Costura.Fody.5.7.0\build\Costura.Fody.targets'))" />
<Error Condition="!Exists('packages\Fody.6.6.4\build\Fody.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\Fody.6.6.4\build\Fody.targets'))" />
<Error Condition="!Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets'))" />
</Target>
<Import Project="packages\Costura.Fody.5.7.0\build\Costura.Fody.targets" Condition="Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.targets')" />
<Import Project="packages\Fody.6.6.4\build\Fody.targets" Condition="Exists('packages\Fody.6.6.4\build\Fody.targets')" />
<Import Project="packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets" Condition="Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" />
</Project>

View File

@@ -1,5 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Costura.Fody" version="5.7.0" targetFramework="net472" developmentDependency="true" />
<package id="Fody" version="6.6.4" targetFramework="net472" developmentDependency="true" />
<package id="Microsoft.NETCore.Platforms" version="7.0.0" targetFramework="net472" />
<package id="Microsoft.Win32.Primitives" version="4.3.0" targetFramework="net472" />
<package id="NETStandard.Library" version="2.0.3" targetFramework="net472" />
@@ -51,4 +53,4 @@
<package id="System.Threading.Tasks" version="4.3.0" targetFramework="net472" />
<package id="System.Threading.Timer" version="4.3.0" targetFramework="net472" />
<package id="System.Xml.XDocument" version="4.3.0" targetFramework="net472" />
</packages>
</packages>

View File

@@ -189,15 +189,13 @@ if (type == "local") {
bash("check=$(git --version)");
if (check == "") {
error = 1;
println("Error: git is not found!");
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
println("Error: git is missing");
}
bash("check=$(node -v)");
if (check == "") {
error = 1;
println("Error: node is not found");
println("help: an installation guide is available at https://nodejs.org/en/download");
println("Error: node is missing");
}
if (error > 0) {
@@ -218,7 +216,6 @@ if (type == "local") {
bash("check=$(pm2 --version)");
if (check == "") {
println("Error: pm2 is not found!");
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
bash("exit 1");
}
@@ -235,7 +232,6 @@ if (type == "local") {
bash("check=$(docker -v)");
if (check == "") {
println("Error: docker is not found!");
println("help: an installation guide is available at https://docs.docker.com/desktop/");
bash("exit 1");
}
@@ -243,7 +239,6 @@ if (type == "local") {
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
\"echo\" \"Error: docker is not running\"
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
\"exit\" \"1\"
fi");

View File

@@ -1,3 +0,0 @@
java/Index.class
csharp/index.exe
typescript-fetch/index.js

View File

@@ -1,10 +0,0 @@
#!/bin/bash
# Filename: index.sh
PUSH_URL="https://example.com/api/push/key?status=up&msg=OK&ping="
INTERVAL=60
while true; do
curl -s -o /dev/null $PUSH_URL
echo "Pushed!"
sleep $INTERVAL
done

View File

@@ -1,24 +0,0 @@
using System;
using System.Net;
using System.Threading;
/**
* Compile: C:\Windows\Microsoft.NET\Framework\v4.0.30319\csc.exe index.cs
* Run: index.exe
*/
class Index
{
const string PushURL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const int Interval = 60;
static void Main(string[] args)
{
while (true)
{
WebClient client = new WebClient();
client.DownloadString(PushURL);
Console.WriteLine("Pushed!");
Thread.Sleep(Interval * 1000);
}
}
}

View File

@@ -1 +0,0 @@
docker run -d --restart=always --name uptime-kuma-push louislam/uptime-kuma:push "https://example.com/api/push/key?status=up&msg=OK&ping=" 60

View File

@@ -1,20 +0,0 @@
package main
import (
"fmt"
"net/http"
"time"
)
func main() {
const PushURL = "https://example.com/api/push/key?status=up&msg=OK&ping="
const Interval = 60
for {
_, err := http.Get(PushURL)
if err == nil {
fmt.Println("Pushed!")
}
time.Sleep(Interval * time.Second)
}
}

View File

@@ -1,32 +0,0 @@
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Compile: javac index.java
* Run: java Index
*/
class Index {
public static final String PUSH_URL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
public static final int INTERVAL = 60;
public static void main(String[] args) {
while (true) {
try {
URL url = new URL(PUSH_URL);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestMethod("GET");
con.getResponseCode();
con.disconnect();
System.out.println("Pushed!");
} catch (Exception e) {
e.printStackTrace();
}
try {
Thread.sleep(INTERVAL * 1000);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}

View File

@@ -1,11 +0,0 @@
// Supports: Node.js >= 18, Deno, Bun
const pushURL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval = 60;
const push = async () => {
await fetch(pushURL);
console.log("Pushed!");
};
push();
setInterval(push, interval * 1000);

View File

@@ -1,5 +0,0 @@
{
"scripts": {
"start": "node index.js"
}
}

View File

@@ -1,13 +0,0 @@
<?php
const PUSH_URL = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval = 60;
while (true) {
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, PUSH_URL);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_exec($ch);
curl_close($ch);
echo "Pushed!\n";
sleep(interval);
}

View File

@@ -1,9 +0,0 @@
# Filename: index.ps1
$pushURL = "https://example.com/api/push/key?status=up&msg=OK&ping="
$interval = 60
while ($true) {
$res = Invoke-WebRequest -Uri $pushURL
Write-Host "Pushed!"
Start-Sleep -Seconds $interval
}

View File

@@ -1,10 +0,0 @@
import urllib.request
import time
push_url = "https://example.com/api/push/key?status=up&msg=OK&ping="
interval = 60
while True:
urllib.request.urlopen(push_url)
print("Pushed!\n")
time.sleep(interval)

View File

@@ -1,19 +0,0 @@
# How to run
Node.js (ts-node)
```bash
ts-node index.ts
```
Deno
```bash
deno run --allow-net index.ts
```
Bun.js
```bash
bun index.ts
```

View File

@@ -1,11 +0,0 @@
// Supports: Deno, Bun, Node.js >= 18 (ts-node)
const pushURL : string = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval : number = 60;
const push = async () => {
await fetch(pushURL);
console.log("Pushed!");
};
push();
setInterval(push, interval * 1000);

View File

@@ -1,13 +0,0 @@
{
"scripts": {
"ts-node": "ts-node index.ts",
"deno": "deno run --allow-net index.ts",
"bun": "bun index.ts"
},
"devDependencies": {
"@types/node": "^20.6.0",
"ts-node": "^10.9.1",
"tslib": "^2.6.2",
"typescript": "^5.2.2"
}
}

View File

@@ -1,40 +0,0 @@
const { execSync } = require("child_process");
/**
* Rebase a PR onto such as 1.23.X or master
* @returns {Promise<void>}
*/
async function main() {
const branch = process.argv[2];
// Use gh to get current branch's pr id
let currentBranchPRID = execSync("gh pr view --json number --jq \".number\"").toString().trim();
console.log("Pr ID: ", currentBranchPRID);
// Use gh commend to get pr commits
const prCommits = JSON.parse(execSync(`gh pr view ${currentBranchPRID} --json commits`).toString().trim()).commits;
console.log("Found commits: ", prCommits.length);
// Sort the commits by authoredDate
prCommits.sort((a, b) => {
return new Date(a.authoredDate) - new Date(b.authoredDate);
});
// Get the oldest commit id
const oldestCommitID = prCommits[0].oid;
console.log("Oldest commit id of this pr:", oldestCommitID);
// Get the latest commit id of the target branch
const latestCommitID = execSync(`git rev-parse origin/${branch}`).toString().trim();
console.log("Latest commit id of " + branch + ":", latestCommitID);
// Get the original parent commit id of the oldest commit
const originalParentCommitID = execSync(`git log --pretty=%P -n 1 "${oldestCommitID}"`).toString().trim();
console.log("Original parent commit id of the oldest commit:", originalParentCommitID);
// Rebase the pr onto the target branch
execSync(`git rebase --onto ${latestCommitID} ${originalParentCommitID}`);
}
main();

View File

@@ -1 +0,0 @@
build/*

View File

@@ -1,18 +0,0 @@
FROM node AS build
RUN useradd --create-home kuma
USER kuma
WORKDIR /home/kuma
ARG TARGETPLATFORM
COPY --chown=kuma:kuma ./build/ ./build/
COPY --chown=kuma:kuma build.js build.js
RUN node build.js $TARGETPLATFORM
FROM debian:bookworm-slim AS release
RUN useradd --create-home kuma
USER kuma
WORKDIR /home/kuma
COPY --from=build /home/kuma/uptime-kuma-push ./uptime-kuma-push
ENTRYPOINT ["/home/kuma/uptime-kuma-push"]

View File

@@ -1,48 +0,0 @@
const fs = require("fs");
const platform = process.argv[2];
if (!platform) {
console.error("No platform??");
process.exit(1);
}
const supportedPlatforms = [
{
name: "linux/amd64",
bin: "./build/uptime-kuma-push-amd64"
},
{
name: "linux/arm64",
bin: "./build/uptime-kuma-push-arm64"
},
{
name: "linux/arm/v7",
bin: "./build/uptime-kuma-push-armv7"
}
];
let platformObj = null;
// Check if the platform is supported
for (let i = 0; i < supportedPlatforms.length; i++) {
if (supportedPlatforms[i].name === platform) {
platformObj = supportedPlatforms[i];
break;
}
}
if (platformObj) {
let filename = platformObj.bin;
if (!fs.existsSync(filename)) {
console.error(`prebuilt: ${filename} is not found, please build it first`);
process.exit(1);
}
fs.renameSync(filename, "./uptime-kuma-push");
process.exit(0);
} else {
console.error("Unsupported platform: " + platform);
process.exit(1);
}

View File

@@ -1,13 +0,0 @@
{
"scripts": {
"build-docker": "npm run build-all && docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:push . --push --target release",
"build-all": "npm run build-win && npm run build-linux-amd64 && npm run build-linux-arm64 && npm run build-linux-armv7 && npm run build-linux-armv6 && npm run build-linux-armv5 && npm run build-linux-riscv64",
"build-win": "cross-env GOOS=windows GOARCH=amd64 go build -x -o ./build/uptime-kuma-push.exe uptime-kuma-push.go",
"build-linux-amd64": "cross-env GOOS=linux GOARCH=amd64 go build -x -o ./build/uptime-kuma-push-amd64 uptime-kuma-push.go",
"build-linux-arm64": "cross-env GOOS=linux GOARCH=arm64 go build -x -o ./build/uptime-kuma-push-arm64 uptime-kuma-push.go",
"build-linux-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./build/uptime-kuma-push-armv7 uptime-kuma-push.go",
"build-linux-armv6": "cross-env GOOS=linux GOARCH=arm GOARM=6 go build -x -o ./build/uptime-kuma-push-armv6 uptime-kuma-push.go",
"build-linux-armv5": "cross-env GOOS=linux GOARCH=arm GOARM=5 go build -x -o ./build/uptime-kuma-push-armv5 uptime-kuma-push.go",
"build-linux-riscv64": "cross-env GOOS=linux GOARCH=riscv64 go build -x -o ./build/uptime-kuma-push-riscv64 uptime-kuma-push.go"
}
}

View File

@@ -1,44 +0,0 @@
package main
import (
"fmt"
"net/http"
os "os"
"time"
)
func main() {
if len(os.Args) < 2 {
fmt.Fprintln(os.Stderr, "Usage: uptime-kuma-push <url> [<interval>]")
os.Exit(1)
}
pushURL := os.Args[1]
var interval time.Duration
if len(os.Args) >= 3 {
intervalString, err := time.ParseDuration(os.Args[2] + "s")
interval = intervalString
if err != nil {
fmt.Fprintln(os.Stderr, "Error: Invalid interval", err)
os.Exit(1)
}
} else {
interval = 60 * time.Second
}
for {
_, err := http.Get(pushURL)
if err == nil {
fmt.Print("Pushed!")
} else {
fmt.Print("Error: ", err)
}
fmt.Println(" Sleeping for", interval)
time.Sleep(interval)
}
}

View File

@@ -9,24 +9,8 @@
<meta name="theme-color" id="theme-color" content="" />
<meta name="description" content="Uptime Kuma monitoring tool" />
<title>Uptime Kuma</title>
<style>
.noscript-message {
font-size: 20px;
text-align: center;
padding: 10px;
max-width: 500px;
margin: 0 auto;
}
</style>
</head>
<body>
<noscript>
<div class="noscript-message">
Sorry, you don't seem to have JavaScript enabled or your browser
doesn't support it.<br />This website requires JavaScript to function.
Please enable JavaScript in your browser settings to continue.
</div>
</noscript>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>

View File

@@ -156,14 +156,12 @@ fi
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is not found!"
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is not found"
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
@@ -182,7 +180,6 @@ fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Error: pm2 is not found!"
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
exit 1
fi
mkdir -p $installPath
@@ -195,13 +192,11 @@ else
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
"exit" "1"
fi
if [ "$3" != "" ]; then

596
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "uptime-kuma",
"version": "1.23.2",
"version": "1.23.0-beta.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "uptime-kuma",
"version": "1.23.2",
"version": "1.23.0-beta.1",
"license": "MIT",
"dependencies": {
"@grpc/grpc-js": "~1.7.3",
@@ -39,23 +39,21 @@
"iconv-lite": "~0.6.3",
"isomorphic-ws": "^5.0.0",
"jsesc": "~3.0.2",
"json5": "~2.2.3",
"jsonata": "^2.0.3",
"jsonschema": "~1.4.1",
"jsonwebtoken": "~9.0.0",
"jwt-decode": "~3.1.2",
"kafkajs": "^2.2.4",
"knex": "^2.4.2",
"limiter": "~2.1.0",
"liquidjs": "^10.7.0",
"mitt": "~3.0.1",
"mongodb": "~4.17.1",
"mongodb": "~4.14.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"mysql2": "~2.3.3",
"nanoid": "~3.3.4",
"node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0",
"node-windows": "^1.0.0-beta.8",
"nodemailer": "~6.6.5",
"nostr-tools": "^1.13.1",
"notp": "~2.0.3",
@@ -71,13 +69,16 @@
"redbean-node": "~0.3.0",
"redis": "~4.5.1",
"semver": "~7.5.4",
"socket.io": "~4.7.2",
"socket.io-client": "~4.7.2",
"socket.io": "~4.6.1",
"socket.io-client": "~4.6.1",
"socks-proxy-agent": "6.1.1",
"tar": "~6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2",
"ws": "~8.11.0"
"ws": "^8.13.0"
},
"bin": {
"uptime-kuma": "extra/cli.js"
},
"devDependencies": {
"@actions/github": "~5.0.1",
@@ -101,12 +102,12 @@
"core-js": "~3.26.1",
"cronstrue": "~2.24.0",
"cross-env": "~7.0.3",
"cypress": "^13.2.0",
"cypress": "^12.17.0",
"delay": "^5.0.0",
"dns2": "~2.0.1",
"dompurify": "~2.4.3",
"eslint": "~8.14.0",
"eslint-plugin-jsdoc": "~46.4.6",
"eslint-plugin-jsdoc": "^46.4.6",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10",
"jest": "~29.6.1",
@@ -122,7 +123,6 @@
"stylelint": "^15.10.1",
"stylelint-config-standard": "~25.0.0",
"terser": "~5.15.0",
"test": "~3.3.0",
"timezones-list": "~3.0.1",
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",
@@ -3400,9 +3400,9 @@
}
},
"node_modules/@cypress/request": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz",
"integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==",
"version": "2.88.11",
"resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.11.tgz",
"integrity": "sha512-M83/wfQ1EkspjkE2lNWNV5ui2Cv7UCv1swW1DqljahbzLVWltcsexQh8jYtuS/vzFXP+HySntGM83ZXA9fn17w==",
"dev": true,
"dependencies": {
"aws-sign2": "~0.7.0",
@@ -3418,9 +3418,9 @@
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.19",
"performance-now": "^2.1.0",
"qs": "6.10.4",
"qs": "~6.10.3",
"safe-buffer": "^5.1.2",
"tough-cookie": "^4.1.3",
"tough-cookie": "~2.5.0",
"tunnel-agent": "^0.6.0",
"uuid": "^8.3.2"
},
@@ -4982,15 +4982,6 @@
"node-pre-gyp": "bin/node-pre-gyp"
}
},
"node_modules/@mongodb-js/saslprep": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.0.tgz",
"integrity": "sha512-Xfijy7HvfzzqiOAhAepF4SGN5e9leLkMvg/OPOF97XemjfVCYN/oWa75wnkc6mltMSTwY+XlbhWgUOJmkFspSw==",
"optional": true,
"dependencies": {
"sparse-bitfield": "^3.0.3"
}
},
"node_modules/@nicolo-ribaudo/eslint-scope-5-internals": {
"version": "5.1.1-v1",
"resolved": "https://registry.npmjs.org/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz",
@@ -5566,9 +5557,9 @@
}
},
"node_modules/@types/cors": {
"version": "2.8.14",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.14.tgz",
"integrity": "sha512-RXHUvNWYICtbP6s18PnOCaqToK8y14DnLd75c6HfyKf228dxy7pHNOQkxPtvXKp/hINFMDjbYzsj63nnpPMSRQ==",
"version": "2.8.13",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.13.tgz",
"integrity": "sha512-RG8AStHlUiV5ysZQKq97copd2UmVYw3/pRMLefISZ3S1hK104Cwm7iLQ3fTKx+lsUH2CE8FlLaYeEA2LSeqYUA==",
"dependencies": {
"@types/node": "*"
}
@@ -5994,18 +5985,6 @@
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
},
"node_modules/abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
"dev": true,
"dependencies": {
"event-target-shim": "^5.0.0"
},
"engines": {
"node": ">=6.5"
}
},
"node_modules/accepts": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
@@ -6325,26 +6304,6 @@
"node": ">=8"
}
},
"node_modules/arraybuffer.prototype.slice": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz",
"integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==",
"dependencies": {
"array-buffer-byte-length": "^1.0.0",
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"es-abstract": "^1.22.1",
"get-intrinsic": "^1.2.1",
"is-array-buffer": "^3.0.2",
"is-shared-array-buffer": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/arrify": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
@@ -8125,15 +8084,15 @@
"dev": true
},
"node_modules/cypress": {
"version": "13.2.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-13.2.0.tgz",
"integrity": "sha512-AvDQxBydE771GTq0TR4ZUBvv9m9ffXuB/ueEtpDF/6gOcvFR96amgwSJP16Yhqw6VhmwqspT5nAGzoxxB+D89g==",
"version": "12.17.3",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-12.17.3.tgz",
"integrity": "sha512-/R4+xdIDjUSLYkiQfwJd630S81KIgicmQOLXotFxVXkl+eTeVO+3bHXxdi5KBh/OgC33HWN33kHX+0tQR/ZWpg==",
"dev": true,
"hasInstallScript": true,
"dependencies": {
"@cypress/request": "^3.0.0",
"@cypress/request": "^2.88.11",
"@cypress/xvfb": "^1.2.4",
"@types/node": "^18.17.5",
"@types/node": "^16.18.39",
"@types/sinonjs__fake-timers": "8.1.1",
"@types/sizzle": "^2.3.2",
"arch": "^2.2.0",
@@ -8166,7 +8125,6 @@
"minimist": "^1.2.8",
"ospath": "^1.2.2",
"pretty-bytes": "^5.6.0",
"process": "^0.11.10",
"proxy-from-env": "1.0.0",
"request-progress": "^3.0.0",
"semver": "^7.5.3",
@@ -8179,13 +8137,13 @@
"cypress": "bin/cypress"
},
"engines": {
"node": "^16.0.0 || ^18.0.0 || >=20.0.0"
"node": "^14.0.0 || ^16.0.0 || >=18.0.0"
}
},
"node_modules/cypress/node_modules/@types/node": {
"version": "18.17.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.17.18.tgz",
"integrity": "sha512-/4QOuy3ZpV7Ya1GTRz5CYSz3DgkKpyUptXuQ5PPce7uuyJAOR7r9FhkmxJfvcNUXyklbC63a+YvB3jxy7s9ngw==",
"version": "16.18.40",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.40.tgz",
"integrity": "sha512-+yno3ItTEwGxXiS/75Q/aHaa5srkpnJaH+kdkTVJ3DtJEwv92itpKbxU+FjPoh2m/5G9zmUQfrL4A4C13c+iGA==",
"dev": true
},
"node_modules/cypress/node_modules/ansi-styles": {
@@ -8418,19 +8376,6 @@
"node": ">=10"
}
},
"node_modules/define-data-property": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.0.tgz",
"integrity": "sha512-UzGwzcjyv3OtAvolTj1GoyNYzfFR+iqbGjcnBEENZVCpM4/Ng1yhGNvS3lR/xDS74Tb2wGG9WzNSNIOS9UVb2g==",
"dependencies": {
"get-intrinsic": "^1.2.1",
"gopd": "^1.0.1",
"has-property-descriptors": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/define-lazy-prop": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz",
@@ -8732,9 +8677,9 @@
}
},
"node_modules/engine.io": {
"version": "6.5.3",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.5.3.tgz",
"integrity": "sha512-IML/R4eG/pUS5w7OfcDE0jKrljWS9nwnEfsxWCIJF5eO6AHo6+Hlv+lQbdlAYsiJPHzUthLm1RUjnBzWOs45cw==",
"version": "6.4.2",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.4.2.tgz",
"integrity": "sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==",
"dependencies": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@@ -8744,31 +8689,71 @@
"cookie": "~0.4.1",
"cors": "~2.8.5",
"debug": "~4.3.1",
"engine.io-parser": "~5.2.1",
"engine.io-parser": "~5.0.3",
"ws": "~8.11.0"
},
"engines": {
"node": ">=10.2.0"
"node": ">=10.0.0"
}
},
"node_modules/engine.io-client": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.5.2.tgz",
"integrity": "sha512-CQZqbrpEYnrpGqC07a9dJDz4gePZUgTPMU3NKJPSeQOyw27Tst4Pl3FemKoFGAlHzgZmKjoRmiJvbWfhCXUlIg==",
"version": "6.4.0",
"resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.4.0.tgz",
"integrity": "sha512-GyKPDyoEha+XZ7iEqam49vz6auPnNJ9ZBfy89f+rMMas8AuiMWOZ9PVzu8xb9ZC6rafUqiGHSCfu22ih66E+1g==",
"dependencies": {
"@socket.io/component-emitter": "~3.1.0",
"debug": "~4.3.1",
"engine.io-parser": "~5.2.1",
"engine.io-parser": "~5.0.3",
"ws": "~8.11.0",
"xmlhttprequest-ssl": "~2.0.0"
}
},
"node_modules/engine.io-parser": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.1.tgz",
"integrity": "sha512-9JktcM3u18nU9N2Lz3bWeBgxVgOKpw7yhRaoxQA3FUDZzzw+9WlA6p4G4u0RixNkg14fH7EfEc/RhpurtiROTQ==",
"node_modules/engine.io-client/node_modules/ws": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz",
"integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/engine.io-parser": {
"version": "5.0.7",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.7.tgz",
"integrity": "sha512-P+jDFbvK6lE3n1OL+q9KuzdOFWkkZ/cMV9gol/SbVfpyqfvrfrFTOFJ6fQm2VC3PZHlU3QPhVwmbsCnauHF2MQ==",
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/engine.io/node_modules/ws": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz",
"integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/enquirer": {
@@ -8819,18 +8804,17 @@
}
},
"node_modules/es-abstract": {
"version": "1.22.2",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.2.tgz",
"integrity": "sha512-YoxfFcDmhjOgWPWsV13+2RNjq1F6UQnfs+8TftwNqtzlmFzEXvlUwdrNrYeaizfjQzRMxkZ6ElWMOJIFKdVqwA==",
"version": "1.21.2",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz",
"integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==",
"dependencies": {
"array-buffer-byte-length": "^1.0.0",
"arraybuffer.prototype.slice": "^1.0.2",
"available-typed-arrays": "^1.0.5",
"call-bind": "^1.0.2",
"es-set-tostringtag": "^2.0.1",
"es-to-primitive": "^1.2.1",
"function.prototype.name": "^1.1.6",
"get-intrinsic": "^1.2.1",
"function.prototype.name": "^1.1.5",
"get-intrinsic": "^1.2.0",
"get-symbol-description": "^1.0.0",
"globalthis": "^1.0.3",
"gopd": "^1.0.1",
@@ -8845,23 +8829,19 @@
"is-regex": "^1.1.4",
"is-shared-array-buffer": "^1.0.2",
"is-string": "^1.0.7",
"is-typed-array": "^1.1.12",
"is-typed-array": "^1.1.10",
"is-weakref": "^1.0.2",
"object-inspect": "^1.12.3",
"object-keys": "^1.1.1",
"object.assign": "^4.1.4",
"regexp.prototype.flags": "^1.5.1",
"safe-array-concat": "^1.0.1",
"regexp.prototype.flags": "^1.4.3",
"safe-regex-test": "^1.0.0",
"string.prototype.trim": "^1.2.8",
"string.prototype.trimend": "^1.0.7",
"string.prototype.trimstart": "^1.0.7",
"typed-array-buffer": "^1.0.0",
"typed-array-byte-length": "^1.0.0",
"typed-array-byte-offset": "^1.0.0",
"string.prototype.trim": "^1.2.7",
"string.prototype.trimend": "^1.0.6",
"string.prototype.trimstart": "^1.0.6",
"typed-array-length": "^1.0.4",
"unbox-primitive": "^1.0.2",
"which-typed-array": "^1.1.11"
"which-typed-array": "^1.1.9"
},
"engines": {
"node": ">= 0.4"
@@ -9401,15 +9381,6 @@
"node": ">= 0.6"
}
},
"node_modules/event-target-shim": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
"dev": true,
"engines": {
"node": ">=6"
}
},
"node_modules/event-to-promise": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/event-to-promise/-/event-to-promise-0.7.0.tgz",
@@ -10054,14 +10025,14 @@
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
},
"node_modules/function.prototype.name": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz",
"integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==",
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz",
"integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==",
"dependencies": {
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"es-abstract": "^1.22.1",
"functions-have-names": "^1.2.3"
"define-properties": "^1.1.3",
"es-abstract": "^1.19.0",
"functions-have-names": "^1.2.2"
},
"engines": {
"node": ">= 0.4"
@@ -11228,11 +11199,15 @@
}
},
"node_modules/is-typed-array": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz",
"integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==",
"version": "1.1.10",
"resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz",
"integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==",
"dependencies": {
"which-typed-array": "^1.1.11"
"available-typed-arrays": "^1.0.5",
"call-bind": "^1.0.2",
"for-each": "^0.3.3",
"gopd": "^1.0.1",
"has-tostringtag": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
@@ -13208,6 +13183,7 @@
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"dev": true,
"bin": {
"json5": "lib/cli.js"
},
@@ -13235,14 +13211,6 @@
"graceful-fs": "^4.1.6"
}
},
"node_modules/jsonschema": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/jsonschema/-/jsonschema-1.4.1.tgz",
"integrity": "sha512-S6cATIPVv1z0IlxdN+zUk5EPjkGCdnhN4wVSBlvoUO1tOLJootbo9CquNJmbIh4yikWHiUedhRYrNPn1arpEmQ==",
"engines": {
"node": "*"
}
},
"node_modules/jsonwebtoken": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz",
@@ -14208,11 +14176,6 @@
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/mitt": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz",
"integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw=="
},
"node_modules/mkdirp": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
@@ -14225,12 +14188,12 @@
}
},
"node_modules/mongodb": {
"version": "4.17.1",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.17.1.tgz",
"integrity": "sha512-MBuyYiPUPRTqfH2dV0ya4dcr2E5N52ocBuZ8Sgg/M030nGF78v855B3Z27mZJnp8PxjnUquEnAtjOsphgMZOlQ==",
"version": "4.14.0",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.14.0.tgz",
"integrity": "sha512-coGKkWXIBczZPr284tYKFLg+KbGPPLlSbdgfKAb6QqCFt5bo5VFZ50O3FFzsw4rnkqjwT6D8Qcoo9nshYKM7Mg==",
"dependencies": {
"bson": "^4.7.2",
"mongodb-connection-string-url": "^2.6.0",
"bson": "^4.7.0",
"mongodb-connection-string-url": "^2.5.4",
"socks": "^2.7.1"
},
"engines": {
@@ -14238,7 +14201,7 @@
},
"optionalDependencies": {
"@aws-sdk/credential-providers": "^3.186.0",
"@mongodb-js/saslprep": "^1.1.0"
"saslprep": "^1.0.3"
}
},
"node_modules/mongodb-connection-string-url": {
@@ -14683,6 +14646,15 @@
"node": ">= 10"
}
},
"node_modules/node-windows": {
"version": "1.0.0-beta.8",
"resolved": "https://registry.npmjs.org/node-windows/-/node-windows-1.0.0-beta.8.tgz",
"integrity": "sha512-uLekXnSeem3nW5escID224Fd0U/1VtvE796JpSpOY+c73Cslz/Qn2WUHRJyPQJEMrNGAy/FMRFjjhh4z1alZTA==",
"dependencies": {
"xml": "1.0.1",
"yargs": "^17.5.1"
}
},
"node_modules/nodemailer": {
"version": "6.6.5",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.6.5.tgz",
@@ -15570,15 +15542,6 @@
"node": ">=6"
}
},
"node_modules/process": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==",
"dev": true,
"engines": {
"node": ">= 0.6.0"
}
},
"node_modules/process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@@ -15876,12 +15839,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
"dev": true
},
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -16263,13 +16220,13 @@
}
},
"node_modules/regexp.prototype.flags": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz",
"integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==",
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz",
"integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==",
"dependencies": {
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"set-function-name": "^2.0.0"
"functions-have-names": "^1.2.3"
},
"engines": {
"node": ">= 0.4"
@@ -16365,12 +16322,6 @@
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
"dev": true
},
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
"dev": true
},
"node_modules/resolve": {
"version": "1.22.2",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz",
@@ -16661,28 +16612,6 @@
"tslib": "^2.1.0"
}
},
"node_modules/safe-array-concat": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz",
"integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==",
"dependencies": {
"call-bind": "^1.0.2",
"get-intrinsic": "^1.2.1",
"has-symbols": "^1.0.3",
"isarray": "^2.0.5"
},
"engines": {
"node": ">=0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/safe-array-concat/node_modules/isarray": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
"integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="
},
"node_modules/safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
@@ -16706,6 +16635,18 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/saslprep": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
"integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==",
"optional": true,
"dependencies": {
"sparse-bitfield": "^3.0.3"
},
"engines": {
"node": ">=6"
}
},
"node_modules/sass": {
"version": "1.42.1",
"resolved": "https://registry.npmjs.org/sass/-/sass-1.42.1.tgz",
@@ -16842,19 +16783,6 @@
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
},
"node_modules/set-function-name": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz",
"integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==",
"dependencies": {
"define-data-property": "^1.0.1",
"functions-have-names": "^1.2.3",
"has-property-descriptors": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@@ -16984,20 +16912,19 @@
}
},
"node_modules/socket.io": {
"version": "4.7.2",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.7.2.tgz",
"integrity": "sha512-bvKVS29/I5fl2FGLNHuXlQaUH/BlzX1IN6S+NKLNZpBsPZIDH+90eQmCs2Railn4YUiww4SzUedJ6+uzwFnKLw==",
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.6.2.tgz",
"integrity": "sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ==",
"dependencies": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"cors": "~2.8.5",
"debug": "~4.3.2",
"engine.io": "~6.5.2",
"engine.io": "~6.4.2",
"socket.io-adapter": "~2.5.2",
"socket.io-parser": "~4.2.4"
},
"engines": {
"node": ">=10.2.0"
"node": ">=10.0.0"
}
},
"node_modules/socket.io-adapter": {
@@ -17008,14 +16935,34 @@
"ws": "~8.11.0"
}
},
"node_modules/socket.io-adapter/node_modules/ws": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz",
"integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/socket.io-client": {
"version": "4.7.2",
"resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.7.2.tgz",
"integrity": "sha512-vtA0uD4ibrYD793SOIAwlo8cj6haOeMHrGvwPxJsxH7CeIksqJ+3Zc06RvWTIFgiSqx4A3sOnTXpfAEE2Zyz6w==",
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.6.2.tgz",
"integrity": "sha512-OwWrMbbA8wSqhBAR0yoPK6EdQLERQAYjXb3A0zLpgxfM1ZGLKoxHx8gVmCHA6pcclRX5oA/zvQf7bghAS11jRA==",
"dependencies": {
"@socket.io/component-emitter": "~3.1.0",
"debug": "~4.3.2",
"engine.io-client": "~6.5.2",
"engine.io-client": "~6.4.0",
"socket.io-parser": "~4.2.4"
},
"engines": {
@@ -17363,31 +17310,14 @@
"node": ">=8"
}
},
"node_modules/string.prototype.replaceall": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/string.prototype.replaceall/-/string.prototype.replaceall-1.0.8.tgz",
"integrity": "sha512-MmCXb9980obcnmbEd3guqVl6lXTxpP28zASfgAlAhlBMw5XehQeSKsdIWlAYtLxp/1GtALwex+2HyoIQtaLQwQ==",
"dev": true,
"dependencies": {
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"es-abstract": "^1.22.1",
"get-intrinsic": "^1.2.1",
"has-symbols": "^1.0.3",
"is-regex": "^1.1.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/string.prototype.trim": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz",
"integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==",
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz",
"integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==",
"dependencies": {
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"es-abstract": "^1.22.1"
"define-properties": "^1.1.4",
"es-abstract": "^1.20.4"
},
"engines": {
"node": ">= 0.4"
@@ -17397,26 +17327,26 @@
}
},
"node_modules/string.prototype.trimend": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz",
"integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==",
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz",
"integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==",
"dependencies": {
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"es-abstract": "^1.22.1"
"define-properties": "^1.1.4",
"es-abstract": "^1.20.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/string.prototype.trimstart": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz",
"integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==",
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz",
"integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==",
"dependencies": {
"call-bind": "^1.0.2",
"define-properties": "^1.2.0",
"es-abstract": "^1.22.1"
"define-properties": "^1.1.4",
"es-abstract": "^1.20.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -17908,23 +17838,6 @@
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"dev": true
},
"node_modules/test": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/test/-/test-3.3.0.tgz",
"integrity": "sha512-JKlEohxDIJRjwBH/+BrTcAPHljBALrAHw3Zs99RqZlaC605f6BggqXhxkdqZThbSHgaYPwpNJlf9bTSWkb/1rA==",
"dev": true,
"dependencies": {
"minimist": "^1.2.6",
"readable-stream": "^4.3.0",
"string.prototype.replaceall": "^1.0.6"
},
"bin": {
"node--test": "bin/node--test.js",
"node--test-name-pattern": "bin/node--test-name-pattern.js",
"node--test-only": "bin/node--test-only.js",
"test": "bin/node-core-test.js"
}
},
"node_modules/test-exclude": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
@@ -17939,46 +17852,6 @@
"node": ">=8"
}
},
"node_modules/test/node_modules/buffer": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
"integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.2.1"
}
},
"node_modules/test/node_modules/readable-stream": {
"version": "4.4.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz",
"integrity": "sha512-Lk/fICSyIhodxy1IDK2HazkeGjSmezAWX2egdtJnYhtzKEsBPJowlI6F6LPb5tqIQILrMbx22S5o3GuJavPusA==",
"dev": true,
"dependencies": {
"abort-controller": "^3.0.0",
"buffer": "^6.0.3",
"events": "^3.3.0",
"process": "^0.11.10",
"string_decoder": "^1.3.0"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
},
"node_modules/text-table": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
@@ -18076,27 +17949,16 @@
}
},
"node_modules/tough-cookie": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
"dev": true,
"dependencies": {
"psl": "^1.1.33",
"punycode": "^2.1.1",
"universalify": "^0.2.0",
"url-parse": "^1.5.3"
"psl": "^1.1.28",
"punycode": "^2.1.1"
},
"engines": {
"node": ">=6"
}
},
"node_modules/tough-cookie/node_modules/universalify": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
"dev": true,
"engines": {
"node": ">= 4.0.0"
"node": ">=0.8"
}
},
"node_modules/tr46": {
@@ -18209,54 +18071,6 @@
"node": ">= 0.6"
}
},
"node_modules/typed-array-buffer": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz",
"integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==",
"dependencies": {
"call-bind": "^1.0.2",
"get-intrinsic": "^1.2.1",
"is-typed-array": "^1.1.10"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/typed-array-byte-length": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz",
"integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==",
"dependencies": {
"call-bind": "^1.0.2",
"for-each": "^0.3.3",
"has-proto": "^1.0.1",
"is-typed-array": "^1.1.10"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/typed-array-byte-offset": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz",
"integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==",
"dependencies": {
"available-typed-arrays": "^1.0.5",
"call-bind": "^1.0.2",
"for-each": "^0.3.3",
"has-proto": "^1.0.1",
"is-typed-array": "^1.1.10"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/typed-array-length": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz",
@@ -18436,16 +18250,6 @@
"punycode": "^2.1.0"
}
},
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"dev": true,
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
@@ -19059,15 +18863,16 @@
"dev": true
},
"node_modules/which-typed-array": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.11.tgz",
"integrity": "sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew==",
"version": "1.1.9",
"resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz",
"integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==",
"dependencies": {
"available-typed-arrays": "^1.0.5",
"call-bind": "^1.0.2",
"for-each": "^0.3.3",
"gopd": "^1.0.1",
"has-tostringtag": "^1.0.0"
"has-tostringtag": "^1.0.0",
"is-typed-array": "^1.1.10"
},
"engines": {
"node": ">= 0.4"
@@ -19205,15 +19010,15 @@
}
},
"node_modules/ws": {
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz",
"integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==",
"version": "8.13.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz",
"integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
@@ -19224,6 +19029,11 @@
}
}
},
"node_modules/xml": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
"integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw=="
},
"node_modules/xmlbuilder": {
"version": "8.2.2",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-8.2.2.tgz",

View File

@@ -1,6 +1,7 @@
{
"name": "uptime-kuma",
"version": "1.23.2",
"description": "Uptime Kuma is an easy-to-use self-hosted monitoring tool",
"version": "1.23.0-beta.1",
"license": "MIT",
"repository": {
"type": "git",
@@ -9,7 +10,11 @@
"engines": {
"node": "14 || 16 || 18 || >= 20.4.0"
},
"bin": {
"uptime-kuma": "./extra/cli.js"
},
"scripts": {
"uptime-kuma": "node ./extra/cli.js",
"install-legacy": "npm install",
"update-legacy": "npm update",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
@@ -24,11 +29,8 @@
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
"build": "vite build --config ./config/vite.config.js",
"test": "node test/prepare-test-server.js && npm run test-backend",
"test": "node test/prepare-test-server.js && npm run jest-backend",
"test-with-build": "npm run build && npm test",
"test-backend": "node test/backend-test-entry.js && npm run jest-backend",
"test-backend:14": "cross-env TEST_BACKEND=1 NODE_OPTIONS=\"--experimental-abortcontroller --no-warnings\" node--test test/backend-test",
"test-backend:18": "cross-env TEST_BACKEND=1 node --test test/backend-test",
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
"tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
@@ -40,9 +42,9 @@
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.23.2 && npm ci --production && npm run download-dist",
"setup": "git checkout 1.22.1 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
@@ -57,10 +59,9 @@
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js && npm run publish-to-npm",
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts && npm run publish-to-npm-beta",
"git-remove-tag": "git tag -d",
"build-dist-and-restart": "npm run build && npm run start-server-dev",
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
@@ -73,7 +74,9 @@
"sort-contributors": "node extra/sort-contributors.js",
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate",
"rebase-pr-to-1.23.X": "node extra/rebase-pr.js 1.23.X"
"publish-to-npm-dev": "npm run build && npm publish --tag dev",
"publish-to-npm-beta": "npm publish --tag beta",
"publish-to-npm": "npm publish"
},
"dependencies": {
"@grpc/grpc-js": "~1.7.3",
@@ -106,23 +109,21 @@
"iconv-lite": "~0.6.3",
"isomorphic-ws": "^5.0.0",
"jsesc": "~3.0.2",
"json5": "~2.2.3",
"jsonata": "^2.0.3",
"jsonschema": "~1.4.1",
"jsonwebtoken": "~9.0.0",
"jwt-decode": "~3.1.2",
"kafkajs": "^2.2.4",
"knex": "^2.4.2",
"limiter": "~2.1.0",
"liquidjs": "^10.7.0",
"mitt": "~3.0.1",
"mongodb": "~4.17.1",
"mongodb": "~4.14.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"mysql2": "~2.3.3",
"nanoid": "~3.3.4",
"node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0",
"node-windows": "^1.0.0-beta.8",
"nodemailer": "~6.6.5",
"nostr-tools": "^1.13.1",
"notp": "~2.0.3",
@@ -138,13 +139,13 @@
"redbean-node": "~0.3.0",
"redis": "~4.5.1",
"semver": "~7.5.4",
"socket.io": "~4.7.2",
"socket.io-client": "~4.7.2",
"socket.io": "~4.6.1",
"socket.io-client": "~4.6.1",
"socks-proxy-agent": "6.1.1",
"tar": "~6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2",
"ws": "~8.11.0"
"ws": "^8.13.0"
},
"devDependencies": {
"@actions/github": "~5.0.1",
@@ -168,12 +169,12 @@
"core-js": "~3.26.1",
"cronstrue": "~2.24.0",
"cross-env": "~7.0.3",
"cypress": "^13.2.0",
"cypress": "^12.17.0",
"delay": "^5.0.0",
"dns2": "~2.0.1",
"dompurify": "~2.4.3",
"eslint": "~8.14.0",
"eslint-plugin-jsdoc": "~46.4.6",
"eslint-plugin-jsdoc": "^46.4.6",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10",
"jest": "~29.6.1",
@@ -189,7 +190,6 @@
"stylelint": "^15.10.1",
"stylelint-config-standard": "~25.0.0",
"terser": "~5.15.0",
"test": "~3.3.0",
"timezones-list": "~3.0.1",
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",

View File

@@ -36,32 +36,20 @@ exports.login = async function (username, password) {
return null;
};
/**
* uk prefix + key ID is before _
* @param {string} key API Key
* @returns {{clear: string, index: string}} Parsed API key
*/
exports.parseAPIKey = function (key) {
let index = key.substring(2, key.indexOf("_"));
let clear = key.substring(key.indexOf("_") + 1, key.length);
return {
index,
clear,
};
};
/**
* Validate a provided API key
* @param {string} key API key to verify
* @returns {Promise<boolean>} API is ok?
* @returns {boolean} API is ok?
*/
async function verifyAPIKey(key) {
if (typeof key !== "string") {
return false;
}
const { index, clear } = exports.parseAPIKey(key);
// uk prefix + key ID is before _
let index = key.substring(2, key.indexOf("_"));
let clear = key.substring(key.indexOf("_") + 1, key.length);
let hash = await R.findOne("api_key", " id=? ", [ index ]);
if (hash === null) {
@@ -77,28 +65,6 @@ async function verifyAPIKey(key) {
return hash && passwordHash.verify(clear, hash.key);
}
/**
* @param {string} key API key to verify
* @returns {Promise<void>}
* @throws {Error} If API key is invalid or rate limit exceeded
*/
async function verifyAPIKeyWithRateLimit(key) {
const pass = await apiRateLimiter.pass(null, 0);
if (pass) {
await apiRateLimiter.removeTokens(1);
const valid = await verifyAPIKey(key);
if (!valid) {
const errMsg = "Failed API auth attempt: invalid API Key";
log.warn("api-auth", errMsg);
throw new Error(errMsg);
}
} else {
const errMsg = "Failed API auth attempt: rate limit exceeded";
log.warn("api-auth", errMsg);
throw new Error(errMsg);
}
}
/**
* Callback for basic auth authorizers
* @callback authCallback
@@ -114,10 +80,22 @@ async function verifyAPIKeyWithRateLimit(key) {
* @returns {void}
*/
function apiAuthorizer(username, password, callback) {
verifyAPIKeyWithRateLimit(password).then(() => {
callback(null, true);
}).catch(() => {
callback(null, false);
// API Rate Limit
apiRateLimiter.pass(null, 0).then((pass) => {
if (pass) {
verifyAPIKey(password).then((valid) => {
if (!valid) {
log.warn("api-auth", "Failed API auth attempt: invalid API Key");
}
callback(null, valid);
// Only allow a set number of api requests per minute
// (currently set to 60)
apiRateLimiter.removeTokens(1);
});
} else {
log.warn("api-auth", "Failed API auth attempt: rate limit exceeded");
callback(null, false);
}
});
}
@@ -177,49 +155,25 @@ exports.basicAuth = async function (req, res, next) {
* @param {express.NextFunction} next Next handler in chain
* @returns {void}
*/
exports.basicAuthMiddleware = async function (req, res, next) {
let middleware = basicAuth({
authorizer: apiAuthorizer,
authorizeAsync: true,
challenge: true,
});
middleware(req, res, next);
};
// Get the API key from the header Authorization and verify it
exports.headerAuthMiddleware = async function (req, res, next) {
const authorizationHeader = req.header("Authorization");
let key = null;
if (authorizationHeader && typeof authorizationHeader === "string") {
const arr = authorizationHeader.split(" ");
if (arr.length === 2) {
const type = arr[0];
if (type === "Bearer") {
key = arr[1];
}
}
}
if (key) {
try {
await verifyAPIKeyWithRateLimit(key);
res.locals.apiKeyID = exports.parseAPIKey(key).index;
next();
} catch (e) {
res.status(401);
res.json({
ok: false,
msg: e.message,
exports.apiAuth = async function (req, res, next) {
if (!await Settings.get("disableAuth")) {
let usingAPIKeys = await Settings.get("apiKeysEnabled");
let middleware;
if (usingAPIKeys) {
middleware = basicAuth({
authorizer: apiAuthorizer,
authorizeAsync: true,
challenge: true,
});
} else {
middleware = basicAuth({
authorizer: userAuthorizer,
authorizeAsync: true,
challenge: true,
});
}
middleware(req, res, next);
} else {
await apiRateLimiter.removeTokens(1);
res.status(401);
res.json({
ok: false,
msg: "No API Key provided, please provide an API Key in the \"Authorization\" header",
});
next();
}
};

View File

@@ -45,6 +45,8 @@ async function sendNotificationList(socket) {
* @returns {Promise<void>}
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.getAll(`
SELECT * FROM heartbeat
WHERE monitor_id = ?
@@ -61,6 +63,8 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
} else {
socket.emit("heartbeatList", monitorID, result, overwrite);
}
timeLogger.print(`[Monitor: ${monitorID}] sendHeartbeatList`);
}
/**
@@ -143,18 +147,15 @@ async function sendAPIKeyList(socket) {
async function sendInfo(socket, hideVersion = false) {
let version;
let latestVersion;
let isContainer;
if (!hideVersion) {
version = checkVersion.version;
latestVersion = checkVersion.latestVersion;
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
}
socket.emit("info", {
version,
latestVersion,
isContainer,
primaryBaseURL: await setting("primaryBaseURL"),
serverTimezone: await server.getTimezone(),
serverTimezoneOffset: server.getTimezoneOffset(),

View File

@@ -133,12 +133,6 @@ class Database {
log.info("db", `Data Dir: ${Database.dataDir}`);
}
/**
* Read the database config
* @throws {Error} If the config is invalid
* @typedef {string|undefined} envString
* @returns {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString}} Database config
*/
static readDBConfig() {
let dbConfig;
@@ -155,19 +149,16 @@ class Database {
return dbConfig;
}
/**
* @typedef {string|undefined} envString
* @param {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString}} dbConfig the database configuration that should be written
* @returns {void}
*/
static writeDBConfig(dbConfig) {
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
}
/**
* Connect to the database
* @param {boolean} testMode Should the connection be started in test mode?
* @param {boolean} autoloadModels Should models be automatically loaded?
* @param {boolean} testMode Should the connection be
* started in test mode?
* @param {boolean} autoloadModels Should models be
* automatically loaded?
* @param {boolean} noLog Should logs not be output?
* @returns {Promise<void>}
*/
@@ -186,12 +177,6 @@ class Database {
let config = {};
let mariadbPoolConfig = {
afterCreate: function (conn, done) {
}
};
log.info("db", `Database Type: ${dbConfig.type}`);
if (dbConfig.type === "sqlite") {
@@ -242,9 +227,7 @@ class Database {
user: dbConfig.username,
password: dbConfig.password,
database: dbConfig.dbName,
timezone: "+00:00",
},
pool: mariadbPoolConfig,
}
};
} else if (dbConfig.type === "embedded-mariadb") {
let embeddedMariaDB = EmbeddedMariaDB.getInstance();
@@ -256,8 +239,7 @@ class Database {
socketPath: embeddedMariaDB.socketPath,
user: "node",
database: "kuma",
},
pool: mariadbPoolConfig,
}
};
} else {
throw new Error("Unknown Database type: " + dbConfig.type);
@@ -294,11 +276,6 @@ class Database {
}
}
/**
@param {boolean} testMode Should the connection be started in test mode?
@param {boolean} noLog Should logs not be output?
@returns {Promise<void>}
*/
static async initSQLite(testMode, noLog) {
await R.exec("PRAGMA foreign_keys = ON");
if (testMode) {
@@ -324,10 +301,6 @@ class Database {
}
}
/**
* Initialize MariaDB
* @returns {Promise<void>}
*/
static async initMariaDB() {
log.debug("db", "Checking if MariaDB database exists...");
@@ -358,19 +331,13 @@ class Database {
directory: Database.knexMigrationsPath,
});
} catch (e) {
// Allow missing patch files for downgrade or testing pr.
if (e.message.includes("the following files are missing:")) {
log.warn("db", e.message);
log.warn("db", "Database migration failed, you may be downgrading Uptime Kuma.");
} else {
log.error("db", "Database migration failed");
throw e;
}
log.error("db", "Database migration failed");
throw e;
}
}
/**
* TODO
*
* @returns {Promise<void>}
*/
static async rollbackLatestPatch() {
@@ -379,7 +346,6 @@ class Database {
/**
* Patch the database for SQLite
* @returns {Promise<void>}
* @deprecated
*/
static async patchSqlite() {
@@ -604,6 +570,14 @@ class Database {
}
}
/**
* Aquire a direct connection to database
* @returns {any} Database connection
*/
static getBetterSQLite3Database() {
return R.knex.client.acquireConnection();
}
/**
* Special handle, because tarn.js throw a promise reject that cannot be caught
* @returns {Promise<void>}
@@ -617,9 +591,7 @@ class Database {
log.info("db", "Closing the database");
// Flush WAL to main database
if (Database.dbConfig.type === "sqlite") {
await R.exec("PRAGMA wal_checkpoint(TRUNCATE)");
}
await R.exec("PRAGMA wal_checkpoint(TRUNCATE)");
while (true) {
Database.noReject = true;
@@ -632,23 +604,20 @@ class Database {
log.info("db", "Waiting to close the database");
}
}
log.info("db", "Database closed");
log.info("db", "SQLite closed");
process.removeListener("unhandledRejection", listener);
}
/**
* Get the size of the database (SQLite only)
* Get the size of the database
* @returns {number} Size of database
*/
static getSize() {
if (Database.dbConfig.type === "sqlite") {
log.debug("db", "Database.getSize()");
let stats = fs.statSync(Database.sqlitePath);
log.debug("db", stats);
return stats.size;
}
return 0;
log.debug("db", "Database.getSize()");
let stats = fs.statSync(Database.sqlitePath);
log.debug("db", stats);
return stats.size;
}
/**
@@ -656,16 +625,11 @@ class Database {
* @returns {Promise<void>}
*/
static async shrink() {
if (Database.dbConfig.type === "sqlite") {
await R.exec("VACUUM");
}
await R.exec("VACUUM");
}
/**
* @returns {string} Get the SQL for the current time plus a number of hours
*/
static sqlHourOffset() {
if (Database.dbConfig.type === "sqlite") {
if (this.dbConfig.client === "sqlite3") {
return "DATETIME('now', ? || ' hours')";
} else {
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";

View File

@@ -80,8 +80,8 @@ class DockerHost {
options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") {
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
}
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
let res = await axios.request(options);

View File

@@ -18,17 +18,14 @@ class EmbeddedMariaDB {
socketPath = this.runDir + "/mysqld.sock";
/**
* @type {ChildProcessWithoutNullStreams}
* @private
*/
childProcess = null;
running = false;
started = false;
/**
* @returns {EmbeddedMariaDB} The singleton instance
*
* @returns {EmbeddedMariaDB}
*/
static getInstance() {
if (!EmbeddedMariaDB.instance) {
@@ -37,16 +34,12 @@ class EmbeddedMariaDB {
return EmbeddedMariaDB.instance;
}
/**
* @returns {boolean} If the singleton instance is created
*/
static hasInstance() {
return !!EmbeddedMariaDB.instance;
}
/**
* Start the embedded MariaDB
* @returns {Promise<void>|void} A promise that resolves when the MariaDB is started or void if it is already started
*
*/
start() {
if (this.childProcess) {
@@ -107,10 +100,6 @@ class EmbeddedMariaDB {
});
}
/**
* Stop all the child processes
* @returns {void}
*/
stop() {
if (this.childProcess) {
this.childProcess.kill("SIGINT");
@@ -118,10 +107,6 @@ class EmbeddedMariaDB {
}
}
/**
* Install MariaDB if it is not installed and make sure the `runDir` directory exists
* @returns {void}
*/
initDB() {
if (!fs.existsSync(this.mariadbDataDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.mariadbDataDir} is not found, create one now.`);
@@ -152,10 +137,6 @@ class EmbeddedMariaDB {
}
/**
* Initialise the "kuma" database in mariadb if it does not exist
* @returns {Promise<void>}
*/
async initDBAfterStarted() {
const connection = mysql.createConnection({
socketPath: this.socketPath,

View File

@@ -43,9 +43,7 @@ const clearOldData = async () => {
[ parsedPeriod * -24 ]
);
if (Database.dbConfig.type === "sqlite") {
await R.exec("PRAGMA optimize;");
}
await R.exec("PRAGMA optimize;");
} catch (e) {
log.error("clearOldData", `Failed to clear old data: ${e.message}`);
}

View File

@@ -2,10 +2,10 @@ const https = require("https");
const dayjs = require("dayjs");
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
SQL_DATETIME_FORMAT
} = require("../../src/util");
const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials,
} = require("../util-server");
const { R } = require("redbean-node");
@@ -18,10 +18,11 @@ const apicache = require("../modules/apicache");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
const { DockerHost } = require("../docker");
const { UptimeCacheList } = require("../uptime-cache-list");
const Gamedig = require("gamedig");
const jsonata = require("jsonata");
const jwt = require("jsonwebtoken");
const { UptimeCalculator } = require("../uptime-calculator");
const Database = require("../database");
/**
* status:
@@ -56,7 +57,7 @@ class Monitor extends BeanModel {
obj.tags = await this.getTags();
}
if (certExpiry && this.type === "http" && this.getURLProtocol() === "https:") {
if (certExpiry && this.type === "http") {
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
obj.validCert = validCert;
@@ -290,10 +291,6 @@ class Monitor extends BeanModel {
return JSON.parse(this.accepted_statuscodes_json);
}
/**
* Get if game dig should only use the port which was provided
* @returns {boolean} gamedig should only use the provided port
*/
getGameDigGivenPortOnly() {
return Boolean(this.gamedigGivenPortOnly);
}
@@ -346,6 +343,13 @@ class Monitor extends BeanModel {
bean.status = flipStatus(bean.status);
}
// Duration
if (!isFirstBeat) {
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), "second");
} else {
bean.duration = 0;
}
try {
if (await Monitor.isUnderMaintenance(this.id)) {
bean.msg = "Monitor under maintenance";
@@ -431,9 +435,6 @@ class Monitor extends BeanModel {
} catch (e) {
throw new Error("Your JSON body is invalid. " + e.message);
}
} else if (this.httpBodyEncoding === "form") {
bodyValue = this.body;
contentType = "application/x-www-form-urlencoded";
} else if (this.httpBodyEncoding === "xml") {
bodyValue = this.body;
contentType = "text/xml; charset=utf-8";
@@ -584,6 +585,46 @@ class Monitor extends BeanModel {
bean.ping = await ping(this.hostname, this.packetSize);
bean.msg = "";
bean.status = UP;
} else if (this.type === "dns") {
let startTime = dayjs().valueOf();
let dnsMessage = "";
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.port, this.dns_resolve_type);
bean.ping = dayjs().valueOf() - startTime;
if (this.dns_resolve_type === "A" || this.dns_resolve_type === "AAAA" || this.dns_resolve_type === "TXT") {
dnsMessage += "Records: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type === "CNAME" || this.dns_resolve_type === "PTR") {
dnsMessage = dnsRes[0];
} else if (this.dns_resolve_type === "CAA") {
dnsMessage = dnsRes[0].issue;
} else if (this.dns_resolve_type === "MX") {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
} else if (this.dns_resolve_type === "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type === "SOA") {
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
} else if (this.dns_resolve_type === "SRV") {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
}
if (this.dnsLastResult !== dnsMessage) {
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
dnsMessage,
this.id
]);
}
bean.msg = dnsMessage;
bean.status = UP;
} else if (this.type === "push") { // Type: Push
log.debug("monitor", `[${this.name}] Checking monitor at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`);
const bufferTime = 1000; // 1s buffer to accommodate clock differences
@@ -927,17 +968,11 @@ class Monitor extends BeanModel {
log.warn("monitor", `Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type} | Down Count: ${bean.downCount} | Resend Interval: ${this.resendInterval}`);
}
// Calculate uptime
let uptimeCalculator = await UptimeCalculator.getUptimeCalculator(this.id);
let endTimeDayjs = await uptimeCalculator.update(bean.status, parseFloat(bean.ping));
bean.end_time = R.isoDateTimeMillis(endTimeDayjs);
// Send to frontend
log.debug("monitor", `[${this.name}] Send to socket`);
UptimeCacheList.clearCache(this.id);
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id);
// Store to database
log.debug("monitor", `[${this.name}] Store`);
await R.store(bean);
@@ -948,15 +983,7 @@ class Monitor extends BeanModel {
if (! this.isStop) {
log.debug("monitor", `[${this.name}] SetTimeout for next check.`);
let intervalRemainingMs = Math.max(
1,
beatInterval * 1000 - dayjs().diff(dayjs.utc(bean.time))
);
log.debug("monitor", `[${this.name}] Next heartbeat in: ${intervalRemainingMs}ms`);
this.heartbeatInterval = setTimeout(safeBeat, intervalRemainingMs);
this.heartbeatInterval = setTimeout(safeBeat, beatInterval * 1000);
} else {
log.info("monitor", `[${this.name}] isStop = true, no next check.`);
}
@@ -1065,19 +1092,6 @@ class Monitor extends BeanModel {
}
}
/**
* Example: http: or https:
* @returns {(null|string)} URL's protocol
*/
getURLProtocol() {
const url = this.getUrl();
if (url) {
return this.getUrl().protocol;
} else {
return null;
}
}
/**
* Store TLS info to database
* @param {object} checkCertificateResult Certificate to update
@@ -1132,31 +1146,44 @@ class Monitor extends BeanModel {
*/
static async sendStats(io, monitorID, userID) {
const hasClients = getTotalClientInRoom(io, userID) > 0;
let uptimeCalculator = await UptimeCalculator.getUptimeCalculator(monitorID);
if (hasClients) {
// Send 24 hour average ping
let data24h = await uptimeCalculator.get24Hour();
io.to(userID).emit("avgPing", monitorID, (data24h.avgPing) ? data24h.avgPing.toFixed(2) : null);
// Send 24 hour uptime
io.to(userID).emit("uptime", monitorID, 24, data24h.uptime);
// Send 30 day uptime
let data30d = await uptimeCalculator.get30Day();
io.to(userID).emit("uptime", monitorID, 720, data30d.uptime);
// Send 1-year uptime
let data1y = await uptimeCalculator.get1Year();
io.to(userID).emit("uptime", monitorID, "1y", data1y.uptime);
// Send Cert Info
await Monitor.sendAvgPing(24, io, monitorID, userID);
await Monitor.sendUptime(24, io, monitorID, userID);
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
await Monitor.sendCertInfo(io, monitorID, userID);
} else {
log.debug("monitor", "No clients in the room, no need to send stats");
}
}
/**
* Send the average ping to user
* @param {number} duration Hours
* @param {Server} io Socket instance to send data to
* @param {number} monitorID ID of monitor to read
* @param {number} userID ID of user to send data to
* @returns {void}
*/
static async sendAvgPing(duration, io, monitorID, userID) {
const timeLogger = new TimeLogger();
const sqlHourOffset = Database.sqlHourOffset();
let avgPing = parseInt(await R.getCell(`
SELECT AVG(ping)
FROM heartbeat
WHERE time > ${sqlHourOffset}
AND ping IS NOT NULL
AND monitor_id = ? `, [
-duration,
monitorID,
]));
timeLogger.print(`[Monitor: ${monitorID}] avgPing`);
io.to(userID).emit("avgPing", monitorID, avgPing);
}
/**
* Send certificate information to client
* @param {Server} io Socket server instance
@@ -1173,6 +1200,101 @@ class Monitor extends BeanModel {
}
}
/**
* Uptime with calculation
* Calculation based on:
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
* @param {number} duration Hours
* @param {number} monitorID ID of monitor to calculate
* @param {boolean} forceNoCache Should the uptime be recalculated?
* @returns {number} Uptime of monitor
*/
static async calcUptime(duration, monitorID, forceNoCache = false) {
if (!forceNoCache) {
let cachedUptime = UptimeCacheList.getUptime(monitorID, duration);
if (cachedUptime != null) {
return cachedUptime;
}
}
const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
// Handle if heartbeat duration longer than the target duration
// e.g. If the last beat's duration is bigger that the 24hrs window, it will use the duration between the (beat time - window margin) (THEN case in SQL)
let result = await R.getRow(`
SELECT
-- SUM all duration, also trim off the beat out of time window
SUM(
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
ELSE duration
END
) AS total_duration,
-- SUM all uptime duration, also trim off the beat out of time window
SUM(
CASE
WHEN (status = 1 OR status = 3)
THEN
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
ELSE duration
END
END
) AS uptime_duration
FROM heartbeat
WHERE time > ?
AND monitor_id = ?
`, [
startTime, startTime, startTime, startTime, startTime,
monitorID,
]);
timeLogger.print(`[Monitor: ${monitorID}][${duration}] sendUptime`);
let totalDuration = result.total_duration;
let uptimeDuration = result.uptime_duration;
let uptime = 0;
if (totalDuration > 0) {
uptime = uptimeDuration / totalDuration;
if (uptime < 0) {
uptime = 0;
}
} else {
// Handle new monitor with only one beat, because the beat's duration = 0
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
if (status === UP) {
uptime = 1;
}
}
// Cache
UptimeCacheList.addUptime(monitorID, duration, uptime);
return uptime;
}
/**
* Send Uptime
* @param {number} duration Hours
* @param {Server} io Socket server instance
* @param {number} monitorID ID of monitor to send
* @param {number} userID ID of user to send to
* @returns {void}
*/
static async sendUptime(duration, io, monitorID, userID) {
const uptime = await this.calcUptime(duration, monitorID);
io.to(userID).emit("uptime", monitorID, duration, uptime);
}
/**
* Has status of monitor changed since last beat?
* @param {boolean} isFirstBeat Is this the first beat of this monitor?

View File

@@ -1,56 +0,0 @@
const { MonitorType } = require("./monitor-type");
const { UP } = require("../../src/util");
const dayjs = require("dayjs");
const { dnsResolve } = require("../util-server");
const { R } = require("redbean-node");
class DnsMonitorType extends MonitorType {
name = "dns";
/**
* @inheritdoc
*/
async check(monitor, heartbeat, _server) {
let startTime = dayjs().valueOf();
let dnsMessage = "";
let dnsRes = await dnsResolve(monitor.hostname, monitor.dns_resolve_server, monitor.port, monitor.dns_resolve_type);
heartbeat.ping = dayjs().valueOf() - startTime;
if (monitor.dns_resolve_type === "A" || monitor.dns_resolve_type === "AAAA" || monitor.dns_resolve_type === "TXT" || monitor.dns_resolve_type === "PTR") {
dnsMessage += "Records: ";
dnsMessage += dnsRes.join(" | ");
} else if (monitor.dns_resolve_type === "CNAME" || monitor.dns_resolve_type === "PTR") {
dnsMessage += dnsRes[0];
} else if (monitor.dns_resolve_type === "CAA") {
dnsMessage += dnsRes[0].issue;
} else if (monitor.dns_resolve_type === "MX") {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
} else if (monitor.dns_resolve_type === "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
} else if (monitor.dns_resolve_type === "SOA") {
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
} else if (monitor.dns_resolve_type === "SRV") {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
}
if (monitor.dns_last_result !== dnsMessage && dnsMessage !== undefined) {
await R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [ dnsMessage, monitor.id ]);
}
heartbeat.msg = dnsMessage;
heartbeat.status = UP;
}
}
module.exports = {
DnsMonitorType,
};

View File

@@ -21,7 +21,7 @@ class AliyunSMS extends NotificationProvider {
status: this.statusToString(heartbeatJSON["status"]),
msg: heartbeatJSON["msg"],
});
if (await this.sendSms(notification, msgBody)) {
if (this.sendSms(notification, msgBody)) {
return okMsg;
}
} else {
@@ -31,7 +31,7 @@ class AliyunSMS extends NotificationProvider {
status: "",
msg: msg,
});
if (await this.sendSms(notification, msgBody)) {
if (this.sendSms(notification, msgBody)) {
return okMsg;
}
}
@@ -76,8 +76,7 @@ class AliyunSMS extends NotificationProvider {
if (result.data.Message === "OK") {
return true;
}
throw new Error(result.data.Message);
return false;
}
/**

View File

@@ -46,7 +46,8 @@ class Bark extends NotificationProvider {
}
/**
* Add additional parameter for Bark v1 endpoints
* Add additional parameter for better on device styles (iOS 15
* optimized)
* @param {BeanModel} notification Notification to send
* @param {string} postUrl URL to append parameters to
* @returns {string} Additional URL parameters
@@ -95,23 +96,12 @@ class Bark extends NotificationProvider {
* @returns {string} Success message
*/
async postNotification(notification, title, subtitle, endpoint) {
let result;
if (notification.apiVersion === "v1" || notification.apiVersion == null) {
// url encode title and subtitle
title = encodeURIComponent(title);
subtitle = encodeURIComponent(subtitle);
let postUrl = endpoint + "/" + title + "/" + subtitle;
postUrl = this.appendAdditionalParameters(notification, postUrl);
result = await axios.get(postUrl);
} else {
result = await axios.post(`${endpoint}/push`, {
title,
body: subtitle,
icon: barkNotificationAvatar,
sound: notification.barkSound || "telegraph", // default sound is telegraph
group: notification.barkGroup || "UptimeKuma", // default group is UptimeKuma
});
}
// url encode title and subtitle
title = encodeURIComponent(title);
subtitle = encodeURIComponent(subtitle);
let postUrl = endpoint + "/" + title + "/" + subtitle;
postUrl = this.appendAdditionalParameters(notification, postUrl);
let result = await axios.get(postUrl);
this.checkResult(result);
if (result.statusText != null) {
return "Bark notification succeed: " + result.statusText;

View File

@@ -33,7 +33,6 @@ class Discord extends NotificationProvider {
break;
case "port":
case "dns":
case "gamedig":
case "steam":
address = monitorJSON["hostname"];
if (monitorJSON["port"]) {

View File

@@ -20,10 +20,10 @@ class Opsgenie extends NotificationProvider {
try {
switch (notification.opsgenieRegion) {
case "us":
case "US":
opsgenieAlertsUrl = opsgenieAlertsUrlUS;
break;
case "eu":
case "EU":
opsgenieAlertsUrl = opsgenieAlertsUrlEU;
break;
default:

View File

@@ -28,7 +28,11 @@ class Telegram extends NotificationProvider {
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
if (error.response && error.response.data && error.response.data.description) {
throw new Error(error.response.data.description);
} else {
throw new Error(error.message);
}
}
}
}

View File

@@ -1,4 +1,4 @@
const express = require("express");
let express = require("express");
const { allowDevAllOrigin, allowAllOrigin, percentageToColor, filterAndJoin, sendHttpError } = require("../util-server");
const { R } = require("redbean-node");
const apicache = require("../modules/apicache");
@@ -7,18 +7,11 @@ const dayjs = require("dayjs");
const { UP, MAINTENANCE, DOWN, PENDING, flipStatus, log } = require("../../src/util");
const StatusPage = require("../model/status_page");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const { UptimeCacheList } = require("../uptime-cache-list");
const { makeBadge } = require("badge-maker");
const { badgeConstants } = require("../config");
const { Prometheus } = require("../prometheus");
const Database = require("../database");
const { UptimeCalculator } = require("../uptime-calculator");
const ioClient = require("socket.io-client").io;
const Socket = require("socket.io-client").Socket;
const { headerAuthMiddleware } = require("../auth");
const jwt = require("jsonwebtoken");
const fs = require("fs");
const JSON5 = require("json5");
const apiSpec = JSON5.parse(fs.readFileSync("./extra/api-spec.json5", "utf8"));
let router = express.Router();
@@ -96,7 +89,7 @@ router.get("/api/push/:pushToken", async (request, response) => {
await R.store(bean);
io.to(monitor.user_id).emit("heartbeat", bean.toJSON());
UptimeCacheList.clearCache(monitor.id);
Monitor.sendStats(io, monitor.id, monitor.user_id);
new Prometheus(monitor).update(bean, undefined);
@@ -116,165 +109,6 @@ router.get("/api/push/:pushToken", async (request, response) => {
}
});
/*
* Map Socket.io API to REST API
*/
router.post("/api", headerAuthMiddleware, async (request, response) => {
allowDevAllOrigin(response);
// TODO: Allow whitelist of origins
// Generate a JWT for logging in to the socket.io server
const apiKeyID = response.locals.apiKeyID;
const userID = await R.getCell("SELECT user_id FROM api_key WHERE id = ?", [ apiKeyID ]);
const username = await R.getCell("SELECT username FROM user WHERE id = ?", [ userID ]);
const token = jwt.sign({
username,
}, server.jwtSecret);
const requestData = request.body;
let hostname = "localhost";
if (server.hostname) {
hostname = server.hostname;
}
const protocol = (server.isHTTPS) ? "wss" : "ws";
let wsURL = `${protocol}://${hostname}:${server.port}`;
const socket = ioClient(wsURL, {
transports: [ "websocket" ],
reconnection: false,
});
try {
let result = await socketClientHandler(socket, token, requestData);
let status = 200;
if (result.status) {
status = result.status;
} else if (typeof result === "object" && result.ok === false) {
status = 404;
}
response.status(status).json(result);
} catch (e) {
response.status(e.status).json(e);
}
console.log("Close socket");
socket.disconnect();
});
/**
* @param {Socket} socket
* @param {string} token JWT
* @param {object} requestData Request Data
*/
function socketClientHandler(socket, token, requestData) {
const action = requestData.action;
const params = requestData.params;
return new Promise((resolve, reject) => {
socket.on("connect", () => {
socket.emit("loginByToken", token, (res) => {
if (res.ok) {
let matched = false;
// Find the action in the API spec
for (let actionObj of apiSpec) {
// Find it
if (action === actionObj.name) {
matched = true;
let flatParams = [];
// Check if required parameters are provided
if (actionObj.params.length > 0 && !params) {
reject({
status: 400,
ok: false,
msg: "Missing \"params\" property in request body",
});
return;
}
// Check if required parameters are valid
for (let paramObj of actionObj.params) {
let value = params[paramObj.name];
// Check if required parameter is in a correct data type
if (typeof value !== paramObj.type) {
reject({
status: 400,
ok: false,
msg: `Parameter "${paramObj.name}" should be "${paramObj.type}". Got "${typeof value}" instead.`
});
return;
}
flatParams.push(value);
}
socket.emit(actionObj.name, ...flatParams, (res) => {
resolve(res);
});
break;
}
}
if (action === "getPushExample") {
if (params.length <= 0) {
reject({
status: 400,
ok: false,
msg: "Missing required parameter(s)",
});
} else {
socket.emit("getPushExample", params[0], (res) => {
resolve(res);
});
}
}
if (!matched) {
reject({
status: 404,
ok: false,
msg: "Event not found"
});
}
} else {
reject({
status: 401,
ok: false,
msg: "Login failed?????"
});
}
});
});
socket.on("connect_error", (error) => {
reject({
status: 500,
ok: false,
msg: error.message
});
});
socket.on("error", (error) => {
reject({
status: 500,
ok: false,
msg: error.message
});
});
});
}
/*
* Badge API
*/
router.get("/api/badge/:id/status", cache("5 minutes"), async (request, response) => {
allowAllOrigin(response);
@@ -372,13 +206,9 @@ router.get("/api/badge/:id/uptime/:duration?", cache("5 minutes"), async (reques
try {
const requestedMonitorId = parseInt(request.params.id, 10);
// if no duration is given, set value to 24 (h)
let requestedDuration = request.params.duration !== undefined ? request.params.duration : "24h";
const requestedDuration = request.params.duration !== undefined ? parseInt(request.params.duration, 10) : 24;
const overrideValue = value && parseFloat(value);
if (requestedDuration === "24") {
requestedDuration = "24h";
}
let publicMonitor = await R.getRow(`
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
WHERE monitor_group.group_id = \`group\`.id
@@ -395,8 +225,10 @@ router.get("/api/badge/:id/uptime/:duration?", cache("5 minutes"), async (reques
badgeValues.message = "N/A";
badgeValues.color = badgeConstants.naColor;
} else {
const uptimeCalculator = await UptimeCalculator.getUptimeCalculator(requestedMonitorId);
const uptime = overrideValue ?? uptimeCalculator.getDataByDuration(requestedDuration).uptime;
const uptime = overrideValue ?? await Monitor.calcUptime(
requestedDuration,
requestedMonitorId
);
// limit the displayed uptime percentage to four (two, when displayed as percent) decimal digits
const cleanUptime = (uptime * 100).toPrecision(4);
@@ -442,17 +274,21 @@ router.get("/api/badge/:id/ping/:duration?", cache("5 minutes"), async (request,
const requestedMonitorId = parseInt(request.params.id, 10);
// Default duration is 24 (h) if not defined in queryParam, limited to 720h (30d)
let requestedDuration = request.params.duration !== undefined ? request.params.duration : "24h";
const requestedDuration = Math.min(request.params.duration ? parseInt(request.params.duration, 10) : 24, 720);
const overrideValue = value && parseFloat(value);
if (requestedDuration === "24") {
requestedDuration = "24h";
}
const sqlHourOffset = Database.sqlHourOffset();
// Check if monitor is public
const uptimeCalculator = await UptimeCalculator.getUptimeCalculator(requestedMonitorId);
const publicAvgPing = uptimeCalculator.getDataByDuration(requestedDuration).avgPing;
const publicAvgPing = parseInt(await R.getCell(`
SELECT AVG(ping) FROM monitor_group, \`group\`, heartbeat
WHERE monitor_group.group_id = \`group\`.id
AND heartbeat.time > ${sqlHourOffset}
AND heartbeat.ping IS NOT NULL
AND public = 1
AND heartbeat.monitor_id = ?
`,
[ -requestedDuration, requestedMonitorId ]
));
const badgeValues = { style };

View File

@@ -4,9 +4,9 @@ const { UptimeKumaServer } = require("../uptime-kuma-server");
const StatusPage = require("../model/status_page");
const { allowDevAllOrigin, sendHttpError } = require("../util-server");
const { R } = require("redbean-node");
const Monitor = require("../model/monitor");
const { badgeConstants } = require("../config");
const { makeBadge } = require("badge-maker");
const { UptimeCalculator } = require("../uptime-calculator");
let router = express.Router();
@@ -92,8 +92,8 @@ router.get("/api/status-page/heartbeat/:slug", cache("1 minutes"), async (reques
list = R.convertToBeans("heartbeat", list);
heartbeatList[monitorID] = list.reverse().map(row => row.toPublicJSON());
const uptimeCalculator = await UptimeCalculator.getUptimeCalculator(monitorID);
uptimeList[`${monitorID}_24`] = uptimeCalculator.get24Hour().uptime;
const type = 24;
uptimeList[`${monitorID}_${type}`] = await Monitor.calcUptime(type, monitorID);
}
response.json({

View File

@@ -51,6 +51,11 @@ if (! process.env.NODE_ENV) {
log.info("server", "Node Env: " + process.env.NODE_ENV);
log.info("server", "Inside Container: " + (process.env.UPTIME_KUMA_IS_CONTAINER === "1"));
log.info("server", "Importing Node libraries");
const fs = require("fs");
log.info("server", "Importing 3rd-party libraries");
log.debug("server", "Importing express");
const express = require("express");
const expressStaticGzip = require("express-static-gzip");
@@ -79,7 +84,7 @@ log.info("server", "Importing this project modules");
log.debug("server", "Importing Monitor");
const Monitor = require("./model/monitor");
log.debug("server", "Importing Settings");
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, doubleCheckPassword, startE2eTests,
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, startUnitTest, FBSD, doubleCheckPassword, startE2eTests,
allowDevAllOrigin
} = require("./util-server");
@@ -97,13 +102,27 @@ log.debug("server", "Importing Background Jobs");
const { initBackgroundJobs, stopBackgroundJobs } = require("./jobs");
const { loginRateLimiter, twoFaRateLimiter } = require("./rate-limiter");
const { basicAuthMiddleware } = require("./auth");
const { apiAuth } = require("./auth");
const { login } = require("./auth");
const passwordHash = require("./password-hash");
const checkVersion = require("./check-version");
log.info("server", "Version: " + checkVersion.version);
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
let hostEnv = FBSD ? null : process.env.HOST;
let hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
if (hostname) {
log.info("server", "Custom hostname: " + hostname);
}
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
.map(portValue => parseInt(portValue))
.find(portValue => !isNaN(portValue));
const disableFrameSameOrigin = !!process.env.UPTIME_KUMA_DISABLE_FRAME_SAMEORIGIN || args["disable-frame-sameorigin"] || false;
const cloudflaredToken = args["cloudflared-token"] || process.env.UPTIME_KUMA_CLOUDFLARED_TOKEN || undefined;
@@ -125,7 +144,7 @@ if (config.demoMode) {
}
// Must be after io instantiation
const { sendNotificationList, sendHeartbeatList, sendInfo, sendProxyList, sendDockerHostList, sendAPIKeyList } = require("./client");
const { sendNotificationList, sendHeartbeatList, sendImportantHeartbeatList, sendInfo, sendProxyList, sendDockerHostList, sendAPIKeyList } = require("./client");
const { statusPageSocketHandler } = require("./socket-handlers/status-page-socket-handler");
const databaseSocketHandler = require("./socket-handlers/database-socket-handler");
const TwoFA = require("./2fa");
@@ -168,7 +187,7 @@ let needSetup = false;
let setupDatabase = new SetupDatabase(args, server);
if (setupDatabase.isNeedSetup()) {
// Hold here and start a special setup page until user choose a database type
await setupDatabase.start(server.hostname, server.port);
await setupDatabase.start(hostname, port);
}
// Connect to database
@@ -232,12 +251,6 @@ let needSetup = false;
log.debug("test", request.body);
response.send("OK");
});
app.post("/test-x-www-form-urlencoded", async (request, response) => {
log.debug("test", request.headers);
log.debug("test", request.body);
response.send("OK");
});
}
// Robots.txt
@@ -253,8 +266,8 @@ let needSetup = false;
// Basic Auth Router here
// Prometheus API metrics /metrics
// With Basic Auth using an API Key
app.get("/metrics", basicAuthMiddleware, prometheusAPIMetrics());
// With Basic Auth using the first user's username/password
app.get("/metrics", apiAuth, prometheusAPIMetrics());
app.use("/", expressStaticGzip("dist", {
enableBrotli: true,
@@ -328,8 +341,7 @@ let needSetup = false;
callback({
ok: false,
msg: "authUserInactiveOrDeleted",
msgi18n: true,
msg: "The user is inactive or deleted.",
});
}
} catch (error) {
@@ -338,8 +350,7 @@ let needSetup = false;
callback({
ok: false,
msg: "authInvalidToken",
msgi18n: true,
msg: "Invalid token.",
});
}
@@ -415,8 +426,7 @@ let needSetup = false;
callback({
ok: false,
msg: "authInvalidToken",
msgi18n: true,
msg: "Invalid Token!",
});
}
}
@@ -426,8 +436,7 @@ let needSetup = false;
callback({
ok: false,
msg: "authIncorrectCreds",
msgi18n: true,
msg: "Incorrect username or password.",
});
}
@@ -483,8 +492,7 @@ let needSetup = false;
} else {
callback({
ok: false,
msg: "2faAlreadyEnabled",
msgi18n: true,
msg: "2FA is already enabled.",
});
}
} catch (error) {
@@ -514,8 +522,7 @@ let needSetup = false;
callback({
ok: true,
msg: "2faEnabled",
msgi18n: true,
msg: "2FA Enabled.",
});
} catch (error) {
@@ -544,8 +551,7 @@ let needSetup = false;
callback({
ok: true,
msg: "2faDisabled",
msgi18n: true,
msg: "2FA Disabled.",
});
} catch (error) {
@@ -577,8 +583,7 @@ let needSetup = false;
} else {
callback({
ok: false,
msg: "authInvalidToken",
msgi18n: true,
msg: "Invalid Token.",
valid: false,
});
}
@@ -641,8 +646,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
});
} catch (e) {
@@ -695,8 +699,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
monitorID: bean.id,
});
@@ -752,11 +755,11 @@ let needSetup = false;
bean.basic_auth_user = monitor.basic_auth_user;
bean.basic_auth_pass = monitor.basic_auth_pass;
bean.timeout = monitor.timeout;
bean.oauth_client_id = monitor.oauth_client_id;
bean.oauth_client_secret = monitor.oauth_client_secret;
bean.oauth_auth_method = monitor.oauth_auth_method;
bean.oauth_token_url = monitor.oauth_token_url;
bean.oauth_scopes = monitor.oauth_scopes;
bean.oauth_client_id = monitor.oauth_client_id,
bean.oauth_client_secret = monitor.oauth_client_secret,
bean.oauth_auth_method = this.oauth_auth_method,
bean.oauth_token_url = monitor.oauth_token_url,
bean.oauth_scopes = monitor.oauth_scopes,
bean.tlsCa = monitor.tlsCa;
bean.tlsCert = monitor.tlsCert;
bean.tlsKey = monitor.tlsKey;
@@ -827,7 +830,7 @@ let needSetup = false;
await updateMonitorNotification(bean.id, monitor.notificationIDList);
if (await bean.isActive()) {
if (bean.isActive()) {
await restartMonitor(socket.userID, bean.id);
}
@@ -836,7 +839,6 @@ let needSetup = false;
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
monitorID: bean.id,
});
@@ -933,8 +935,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successResumed",
msgi18n: true,
msg: "Resumed Successfully.",
});
} catch (e) {
@@ -953,8 +954,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successPaused",
msgi18n: true,
msg: "Paused Successfully.",
});
} catch (e) {
@@ -992,11 +992,12 @@ let needSetup = false;
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted Successfully.",
});
await server.sendMonitorList(socket);
// Clear heartbeat list on client
await sendImportantHeartbeatList(socket, monitorID, true, true);
} catch (e) {
callback({
@@ -1055,8 +1056,7 @@ let needSetup = false;
if (bean == null) {
callback({
ok: false,
msg: "tagNotFound",
msgi18n: true,
msg: "Tag not found",
});
return;
}
@@ -1066,8 +1066,7 @@ let needSetup = false;
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
msg: "Saved",
tag: await bean.toJSON(),
});
@@ -1087,8 +1086,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted Successfully.",
});
} catch (e) {
@@ -1111,8 +1109,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
});
} catch (e) {
@@ -1135,8 +1132,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successEdited",
msgi18n: true,
msg: "Edited Successfully.",
});
} catch (e) {
@@ -1157,78 +1153,14 @@ let needSetup = false;
value,
]);
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("monitorImportantHeartbeatListCount", async (monitorID, callback) => {
try {
checkLogin(socket);
let count;
if (monitorID == null) {
count = await R.count("heartbeat", "important = 1");
} else {
count = await R.count("heartbeat", "monitor_id = ? AND important = 1", [
monitorID,
]);
}
// Cleanup unused Tags
await R.exec("delete from tag where ( select count(*) from monitor_tag mt where tag.id = mt.tag_id ) = 0");
callback({
ok: true,
count: count,
msg: "Deleted Successfully.",
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("monitorImportantHeartbeatListPaged", async (monitorID, offset, count, callback) => {
try {
checkLogin(socket);
let list;
if (monitorID == null) {
list = await R.find("heartbeat", `
important = 1
ORDER BY time DESC
LIMIT ?
OFFSET ?
`, [
count,
offset,
]);
} else {
list = await R.find("heartbeat", `
monitor_id = ?
AND important = 1
ORDER BY time DESC
LIMIT ?
OFFSET ?
`, [
monitorID,
count,
offset,
]);
}
callback({
ok: true,
data: list,
});
} catch (e) {
callback({
ok: false,
@@ -1241,10 +1173,6 @@ let needSetup = false;
try {
checkLogin(socket);
if (typeof password.currentPassword === "undefined") {
throw new Error("Incorrect current password");
}
if (!password.newPassword) {
throw new Error("Invalid new password");
}
@@ -1258,8 +1186,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successAuthChangePassword",
msgi18n: true,
msg: "Password has been updated successfully.",
});
} catch (e) {
@@ -1307,7 +1234,6 @@ let needSetup = false;
}
const previousChromeExecutable = await Settings.get("chromeExecutable");
const previousNSCDStatus = await Settings.get("nscd");
await setSettings("general", data);
server.entryPage = data.entryPage;
@@ -1325,19 +1251,9 @@ let needSetup = false;
await resetChrome();
}
// Update nscd status
if (previousNSCDStatus !== data.nscd) {
if (data.nscd) {
server.startNSCDServices();
} else {
server.stopNSCDServices();
}
}
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
msg: "Saved"
});
sendInfo(socket);
@@ -1361,8 +1277,7 @@ let needSetup = false;
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
msg: "Saved",
id: notificationBean.id,
});
@@ -1383,8 +1298,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted",
});
} catch (e) {
@@ -1619,8 +1533,7 @@ let needSetup = false;
callback({
ok: true,
msg: "successBackupRestored",
msgi18n: true,
msg: "Backup successfully restored.",
});
} catch (e) {
@@ -1643,6 +1556,8 @@ let needSetup = false;
monitorID,
]);
await sendImportantHeartbeatList(socket, monitorID, true, true);
callback({
ok: true,
});
@@ -1735,15 +1650,19 @@ let needSetup = false;
server.start();
server.httpServer.listen(server.port, server.hostname, () => {
if (server.hostname) {
log.info("server", `Listening on ${server.hostname}:${server.port}`);
server.httpServer.listen(port, hostname, () => {
if (hostname) {
log.info("server", `Listening on ${hostname}:${port}`);
} else {
log.info("server", `Listening on ${server.port}`);
log.info("server", `Listening on ${port}`);
}
startMonitors();
checkVersion.startInterval();
if (testMode) {
startUnitTest();
}
if (e2eTestMode) {
startE2eTests();
}
@@ -1823,6 +1742,10 @@ async function afterLogin(socket, user) {
await sendHeartbeatList(socket, monitorID);
}
for (let monitorID in monitorList) {
await sendImportantHeartbeatList(socket, monitorID);
}
for (let monitorID in monitorList) {
await Monitor.sendStats(io, monitorID, user.id);
}

View File

@@ -8,33 +8,21 @@ const { allowDevAllOrigin } = require("./util-server");
const mysql = require("mysql2/promise");
/**
* A standalone express app that is used to setup a database
* A standalone express app that is used to setup database
* It is used when db-config.json and kuma.db are not found or invalid
* Once it is configured, it will shut down and start the main server
* Once it is configured, it will shutdown and start the main server
*/
class SetupDatabase {
/**
* Show Setup Page
* @type {boolean}
*/
needSetup = true;
/**
* If the server has finished the setup
* @type {boolean}
* @private
*/
runningSetup = false;
/**
* @inheritDoc
* @type {UptimeKumaServer}
* @private
*/
server;
/**
* @param {object} args The arguments passed from the command line
* @param {UptimeKumaServer} server the main server instance
*/
constructor(args, server) {
this.server = server;
@@ -54,14 +42,9 @@ class SetupDatabase {
} catch (e) {
log.info("setup-database", "db-config.json is not found or invalid: " + e.message);
// Check if kuma.db is found (1.X.X users), generate db-config.json
// Check if kuma.db is found (1.X.X users)
if (fs.existsSync(path.join(Database.dataDir, "kuma.db"))) {
this.needSetup = false;
log.info("setup-database", "kuma.db is found, generate db-config.json");
Database.writeDBConfig({
type: "sqlite",
});
} else {
this.needSetup = true;
}
@@ -84,26 +67,15 @@ class SetupDatabase {
/**
* Show Setup Page
* @returns {boolean} true if the setup page should be shown
*/
isNeedSetup() {
return this.needSetup;
}
/**
* Check if the embedded MariaDB is enabled
* @returns {boolean} true if the embedded MariaDB is enabled
*/
isEnabledEmbeddedMariaDB() {
return process.env.UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB === "1";
}
/**
* Start the setup-database server
* @param {string} hostname where the server is listening
* @param {number} port where the server is listening
* @returns {Promise<void>}
*/
start(hostname, port) {
return new Promise((resolve) => {
const app = express();

View File

@@ -38,8 +38,7 @@ module.exports.apiKeySocketHandler = (socket) => {
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
key: formattedKey,
keyID: bean.id,
});
@@ -83,8 +82,7 @@ module.exports.apiKeySocketHandler = (socket) => {
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted Successfully.",
});
await sendAPIKeyList(socket);
@@ -111,8 +109,7 @@ module.exports.apiKeySocketHandler = (socket) => {
callback({
ok: true,
msg: "successDisabled",
msgi18n: true,
msg: "Disabled Successfully.",
});
await sendAPIKeyList(socket);
@@ -139,8 +136,7 @@ module.exports.apiKeySocketHandler = (socket) => {
callback({
ok: true,
msg: "successEnabled",
msgi18n: true,
msg: "Enabled Successfully",
});
await sendAPIKeyList(socket);

View File

@@ -18,8 +18,7 @@ module.exports.dockerSocketHandler = (socket) => {
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
msg: "Saved",
id: dockerHostBean.id,
});
@@ -40,8 +39,7 @@ module.exports.dockerSocketHandler = (socket) => {
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted",
});
} catch (e) {

View File

@@ -4,8 +4,6 @@ const { sendInfo } = require("../client");
const { checkLogin } = require("../util-server");
const GameResolver = require("gamedig/lib/GameResolver");
const { testChrome } = require("../monitor-types/real-browser-monitor-type");
const fs = require("fs");
const path = require("path");
let gameResolver = new GameResolver();
let gameList = null;
@@ -55,11 +53,7 @@ module.exports.generalSocketHandler = (socket, server) => {
testChrome(executable).then((version) => {
callback({
ok: true,
msg: {
key: "foundChromiumVersion",
values: [ version ],
},
msgi18n: true,
msg: "Found Chromium/Chrome. Version: " + version,
});
}).catch((e) => {
callback({
@@ -68,29 +62,4 @@ module.exports.generalSocketHandler = (socket, server) => {
});
});
});
socket.on("getPushExample", (language, callback) => {
try {
let dir = path.join("./extra/push-examples", language);
let files = fs.readdirSync(dir);
for (let file of files) {
if (file.startsWith("index.")) {
callback({
ok: true,
code: fs.readFileSync(path.join(dir, file), "utf8"),
});
return;
}
}
} catch (e) {
}
callback({
ok: false,
msg: "Not found",
});
});
};

View File

@@ -30,8 +30,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
maintenanceID,
});
@@ -62,7 +61,6 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
maintenanceID: bean.id,
});
@@ -98,8 +96,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
});
} catch (e) {
@@ -133,8 +130,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "Added Successfully.",
});
} catch (e) {
@@ -253,8 +249,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted Successfully.",
});
await server.sendMaintenanceList(socket);
@@ -287,8 +282,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "successPaused",
msgi18n: true,
msg: "Paused Successfully.",
});
await server.sendMaintenanceList(socket);
@@ -321,8 +315,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
callback({
ok: true,
msg: "successResumed",
msgi18n: true,
msg: "Resume Successfully",
});
await server.sendMaintenanceList(socket);

View File

@@ -24,8 +24,7 @@ module.exports.proxySocketHandler = (socket) => {
callback({
ok: true,
msg: "Saved.",
msgi18n: true,
msg: "Saved",
id: proxyBean.id,
});
@@ -47,8 +46,7 @@ module.exports.proxySocketHandler = (socket) => {
callback({
ok: true,
msg: "successDeleted",
msgi18n: true,
msg: "Deleted",
});
} catch (e) {

View File

@@ -284,8 +284,7 @@ module.exports.statusPageSocketHandler = (socket) => {
callback({
ok: true,
msg: "successAdded",
msgi18n: true,
msg: "OK!"
});
} catch (error) {

View File

@@ -0,0 +1,51 @@
const { log } = require("../src/util");
class UptimeCacheList {
/**
* list[monitorID][duration]
*/
static list = {};
/**
* Get the uptime for a specific period
* @param {number} monitorID ID of monitor to query
* @param {number} duration Duration to query
* @returns {(number|null)} Uptime for provided duration, if it exists
*/
static getUptime(monitorID, duration) {
if (UptimeCacheList.list[monitorID] && UptimeCacheList.list[monitorID][duration]) {
log.debug("UptimeCacheList", "getUptime: " + monitorID + " " + duration);
return UptimeCacheList.list[monitorID][duration];
} else {
return null;
}
}
/**
* Add uptime for specified monitor
* @param {number} monitorID ID of monitor to insert for
* @param {number} duration Duration to insert for
* @param {number} uptime Uptime to add
* @returns {void}
*/
static addUptime(monitorID, duration, uptime) {
log.debug("UptimeCacheList", "addUptime: " + monitorID + " " + duration);
if (!UptimeCacheList.list[monitorID]) {
UptimeCacheList.list[monitorID] = {};
}
UptimeCacheList.list[monitorID][duration] = uptime;
}
/**
* Clear cache for specified monitor
* @param {number} monitorID ID of monitor to clear
* @returns {void}
*/
static clearCache(monitorID) {
log.debug("UptimeCacheList", "clearCache: " + monitorID);
delete UptimeCacheList.list[monitorID];
}
}
module.exports = {
UptimeCacheList,
};

View File

@@ -1,490 +0,0 @@
const dayjs = require("dayjs");
const { UP, MAINTENANCE, DOWN, PENDING } = require("../src/util");
const { LimitQueue } = require("./utils/limit-queue");
const { log } = require("../src/util");
const { R } = require("redbean-node");
/**
* Calculates the uptime of a monitor.
*/
class UptimeCalculator {
/**
* @private
* @type {{string:UptimeCalculator}}
*/
static list = {};
/**
* For testing purposes, we can set the current date to a specific date.
* @type {dayjs.Dayjs}
*/
static currentDate = null;
/**
* monitorID the id of the monitor
* @type {number}
*/
monitorID;
/**
* Recent 24-hour uptime, each item is a 1-minute interval
* Key: {number} DivisionKey
* @type {LimitQueue<number,string>}
*/
minutelyUptimeDataList = new LimitQueue(24 * 60);
/**
* Daily uptime data,
* Key: {number} DailyKey
*/
dailyUptimeDataList = new LimitQueue(365);
lastDailyUptimeData = null;
lastUptimeData = null;
lastDailyStatBean = null;
lastMinutelyStatBean = null;
/**
* Get the uptime calculator for a monitor
* Initializes and returns the monitor if it does not exist
* @param {number} monitorID the id of the monitor
* @returns {Promise<UptimeCalculator>} UptimeCalculator
*/
static async getUptimeCalculator(monitorID) {
if (!UptimeCalculator.list[monitorID]) {
UptimeCalculator.list[monitorID] = new UptimeCalculator();
await UptimeCalculator.list[monitorID].init(monitorID);
}
return UptimeCalculator.list[monitorID];
}
/**
* Remove a monitor from the list
* @param {number} monitorID the id of the monitor
* @returns {Promise<void>}
*/
static async remove(monitorID) {
delete UptimeCalculator.list[monitorID];
}
/**
*
*/
constructor() {
if (process.env.TEST_BACKEND) {
// Override the getCurrentDate() method to return a specific date
// Only for testing
this.getCurrentDate = () => {
if (UptimeCalculator.currentDate) {
return UptimeCalculator.currentDate;
} else {
return dayjs.utc();
}
};
}
}
/**
* Initialize the uptime calculator for a monitor
* @param {number} monitorID the id of the monitor
* @returns {Promise<void>}
*/
async init(monitorID) {
this.monitorID = monitorID;
let now = this.getCurrentDate();
// Load minutely data from database (recent 24 hours only)
let minutelyStatBeans = await R.find("stat_minutely", " monitor_id = ? AND timestamp > ? ORDER BY timestamp", [
monitorID,
this.getMinutelyKey(now.subtract(24, "hour")),
]);
for (let bean of minutelyStatBeans) {
let key = bean.timestamp;
this.minutelyUptimeDataList.push(key, {
up: bean.up,
down: bean.down,
avgPing: bean.ping,
});
}
// Load daily data from database (recent 365 days only)
let dailyStatBeans = await R.find("stat_daily", " monitor_id = ? AND timestamp > ? ORDER BY timestamp", [
monitorID,
this.getDailyKey(now.subtract(365, "day").unix()),
]);
for (let bean of dailyStatBeans) {
let key = bean.timestamp;
this.dailyUptimeDataList.push(key, {
up: bean.up,
down: bean.down,
avgPing: bean.ping,
});
}
}
/**
* @param {number} status status
* @param {number} ping Ping
* @returns {dayjs.Dayjs} date
* @throws {Error} Invalid status
*/
async update(status, ping = 0) {
let date = this.getCurrentDate();
// Don't count MAINTENANCE into uptime
if (status === MAINTENANCE) {
return date;
}
let flatStatus = this.flatStatus(status);
if (flatStatus === DOWN && ping > 0) {
log.warn("uptime-calc", "The ping is not effective when the status is DOWN");
}
let divisionKey = this.getMinutelyKey(date);
let dailyKey = this.getDailyKey(divisionKey);
let minutelyData = this.minutelyUptimeDataList[divisionKey];
let dailyData = this.dailyUptimeDataList[dailyKey];
if (flatStatus === UP) {
minutelyData.up += 1;
dailyData.up += 1;
// Only UP status can update the ping
if (!isNaN(ping)) {
// Add avg ping
// The first beat of the minute, the ping is the current ping
if (minutelyData.up === 1) {
minutelyData.avgPing = ping;
} else {
minutelyData.avgPing = (minutelyData.avgPing * (minutelyData.up - 1) + ping) / minutelyData.up;
}
// Add avg ping (daily)
// The first beat of the day, the ping is the current ping
if (minutelyData.up === 1) {
dailyData.avgPing = ping;
} else {
dailyData.avgPing = (dailyData.avgPing * (dailyData.up - 1) + ping) / dailyData.up;
}
}
} else {
minutelyData.down += 1;
dailyData.down += 1;
}
if (dailyData !== this.lastDailyUptimeData) {
this.lastDailyUptimeData = dailyData;
}
if (minutelyData !== this.lastUptimeData) {
this.lastUptimeData = minutelyData;
}
// Don't store data in test mode
if (process.env.TEST_BACKEND) {
log.debug("uptime-calc", "Skip storing data in test mode");
return date;
}
let dailyStatBean = await this.getDailyStatBean(dailyKey);
dailyStatBean.up = dailyData.up;
dailyStatBean.down = dailyData.down;
dailyStatBean.ping = dailyData.avgPing;
await R.store(dailyStatBean);
let minutelyStatBean = await this.getMinutelyStatBean(divisionKey);
minutelyStatBean.up = minutelyData.up;
minutelyStatBean.down = minutelyData.down;
minutelyStatBean.ping = minutelyData.avgPing;
await R.store(minutelyStatBean);
// Remove the old data
log.debug("uptime-calc", "Remove old data");
await R.exec("DELETE FROM stat_minutely WHERE monitor_id = ? AND timestamp < ?", [
this.monitorID,
this.getMinutelyKey(date.subtract(24, "hour")),
]);
return date;
}
/**
* Get the daily stat bean
* @param {number} timestamp milliseconds
* @returns {Promise<import("redbean-node").Bean>} stat_daily bean
*/
async getDailyStatBean(timestamp) {
if (this.lastDailyStatBean && this.lastDailyStatBean.timestamp === timestamp) {
return this.lastDailyStatBean;
}
let bean = await R.findOne("stat_daily", " monitor_id = ? AND timestamp = ?", [
this.monitorID,
timestamp,
]);
if (!bean) {
bean = R.dispense("stat_daily");
bean.monitor_id = this.monitorID;
bean.timestamp = timestamp;
}
this.lastDailyStatBean = bean;
return this.lastDailyStatBean;
}
/**
* Get the minutely stat bean
* @param {number} timestamp milliseconds
* @returns {Promise<import("redbean-node").Bean>} stat_minutely bean
*/
async getMinutelyStatBean(timestamp) {
if (this.lastMinutelyStatBean && this.lastMinutelyStatBean.timestamp === timestamp) {
return this.lastMinutelyStatBean;
}
let bean = await R.findOne("stat_minutely", " monitor_id = ? AND timestamp = ?", [
this.monitorID,
timestamp,
]);
if (!bean) {
bean = R.dispense("stat_minutely");
bean.monitor_id = this.monitorID;
bean.timestamp = timestamp;
}
this.lastMinutelyStatBean = bean;
return this.lastMinutelyStatBean;
}
/**
* @param {dayjs.Dayjs} date The heartbeat date
* @returns {number} Timestamp
*/
getMinutelyKey(date) {
// Convert the current date to the nearest minute (e.g. 2021-01-01 12:34:56 -> 2021-01-01 12:34:00)
date = date.startOf("minute");
// Convert to timestamp in second
let divisionKey = date.unix();
if (! (divisionKey in this.minutelyUptimeDataList)) {
this.minutelyUptimeDataList.push(divisionKey, {
up: 0,
down: 0,
avgPing: 0,
});
}
return divisionKey;
}
/**
* Convert timestamp to daily key
* @param {number} timestamp Timestamp
* @returns {number} Timestamp
*/
getDailyKey(timestamp) {
let date = dayjs.unix(timestamp);
// Convert the date to the nearest day (e.g. 2021-01-01 12:34:56 -> 2021-01-01 00:00:00)
// Considering if the user keep changing could affect the calculation, so use UTC time to avoid this problem.
date = date.utc().startOf("day");
let dailyKey = date.unix();
if (!this.dailyUptimeDataList[dailyKey]) {
this.dailyUptimeDataList.push(dailyKey, {
up: 0,
down: 0,
avgPing: 0,
});
}
return dailyKey;
}
/**
* Flat status to UP or DOWN
* @param {number} status the status which schould be turned into a flat status
* @returns {UP|DOWN|PENDING} The flat status
* @throws {Error} Invalid status
*/
flatStatus(status) {
switch (status) {
case UP:
// case MAINTENANCE:
return UP;
case DOWN:
case PENDING:
return DOWN;
}
throw new Error("Invalid status");
}
/**
* @param {number} num the number of data points which are expected to be returned
* @param {"day" | "minute"} type the type of data which is expected to be returned
* @returns {UptimeDataResult} UptimeDataResult
* @throws {Error} The maximum number of minutes greater than 1440
*/
getData(num, type = "day") {
let key;
if (type === "day") {
key = this.getDailyKey(this.getCurrentDate().unix());
} else {
if (num > 24 * 60) {
throw new Error("The maximum number of minutes is 1440");
}
key = this.getMinutelyKey(this.getCurrentDate());
}
let total = {
up: 0,
down: 0,
};
let totalPing = 0;
let endTimestamp;
if (type === "day") {
endTimestamp = key - 86400 * (num - 1);
} else {
endTimestamp = key - 60 * (num - 1);
}
// Sum up all data in the specified time range
while (key >= endTimestamp) {
let data;
if (type === "day") {
data = this.dailyUptimeDataList[key];
} else {
data = this.minutelyUptimeDataList[key];
}
if (data) {
total.up += data.up;
total.down += data.down;
totalPing += data.avgPing * data.up;
}
// Previous day
if (type === "day") {
key -= 86400;
} else {
key -= 60;
}
}
let uptimeData = new UptimeDataResult();
if (total.up === 0 && total.down === 0) {
if (type === "day" && this.lastDailyUptimeData) {
total = this.lastDailyUptimeData;
totalPing = total.avgPing * total.up;
} else if (type === "minute" && this.lastUptimeData) {
total = this.lastUptimeData;
totalPing = total.avgPing * total.up;
} else {
uptimeData.uptime = 0;
uptimeData.avgPing = null;
return uptimeData;
}
}
let avgPing;
if (total.up === 0) {
avgPing = null;
} else {
avgPing = totalPing / total.up;
}
uptimeData.uptime = total.up / (total.up + total.down);
uptimeData.avgPing = avgPing;
return uptimeData;
}
/**
* Get the uptime data by duration
* @param {'24h'|'30d'|'1y'} duration Only accept 24h, 30d, 1y
* @returns {UptimeDataResult} UptimeDataResult
* @throws {Error} Invalid duration
*/
getDataByDuration(duration) {
if (duration === "24h") {
return this.get24Hour();
} else if (duration === "30d") {
return this.get30Day();
} else if (duration === "1y") {
return this.get1Year();
} else {
throw new Error("Invalid duration");
}
}
/**
* 1440 = 24 * 60mins
* @returns {UptimeDataResult} UptimeDataResult
*/
get24Hour() {
return this.getData(1440, "minute");
}
/**
* @returns {UptimeDataResult} UptimeDataResult
*/
get7Day() {
return this.getData(7);
}
/**
* @returns {UptimeDataResult} UptimeDataResult
*/
get30Day() {
return this.getData(30);
}
/**
* @returns {UptimeDataResult} UptimeDataResult
*/
get1Year() {
return this.getData(365);
}
/**
* @returns {dayjs.Dayjs} Current date
*/
getCurrentDate() {
return dayjs.utc();
}
}
class UptimeDataResult {
/**
* @type {number} Uptime
*/
uptime;
/**
* @type {number} Average ping
*/
avgPing;
}
module.exports = {
UptimeCalculator,
UptimeDataResult,
};

View File

@@ -12,7 +12,6 @@ const { Settings } = require("./settings");
const dayjs = require("dayjs");
const childProcess = require("child_process");
const path = require("path");
const { FBSD } = require("./util-server");
// DO NOT IMPORT HERE IF THE MODULES USED `UptimeKumaServer.getInstance()`, put at the bottom of this file instead.
/**
@@ -62,23 +61,6 @@ class UptimeKumaServer {
*/
jwtSecret = null;
/**
* Port
* @type {number}
*/
port = undefined;
/**
* Hostname
* @type {string|undefined}
*/
hostname = undefined;
/**
* Is SSL enabled?
*/
isHTTPS = false;
/**
* Get the current instance of the server if it exists, otherwise
* create a new instance.
@@ -96,23 +78,6 @@ class UptimeKumaServer {
* @param {object} args Arguments to initialise server with
*/
constructor(args) {
// Port
this.port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
.map(portValue => parseInt(portValue))
.find(portValue => !isNaN(portValue));
// Hostname
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
let hostEnv = FBSD ? null : process.env.HOST;
this.hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
if (this.hostname) {
log.info("server", "Custom hostname: " + this.hostname);
}
// SSL
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
@@ -122,7 +87,6 @@ class UptimeKumaServer {
this.app = express();
if (sslKey && sslCert) {
log.info("server", "Server Type: HTTPS");
this.isHTTPS = true;
this.httpServer = https.createServer({
key: fs.readFileSync(sslKey),
cert: fs.readFileSync(sslCert),
@@ -130,7 +94,6 @@ class UptimeKumaServer {
}, this.app);
} else {
log.info("server", "Server Type: HTTP");
this.isHTTPS = false;
this.httpServer = http.createServer(this.app);
}
@@ -147,7 +110,6 @@ class UptimeKumaServer {
// Set Monitor Types
UptimeKumaServer.monitorTypeList["real-browser"] = new RealBrowserMonitorType();
UptimeKumaServer.monitorTypeList["tailscale-ping"] = new TailscalePing();
UptimeKumaServer.monitorTypeList["dns"] = new DnsMonitorType();
this.io = new Server(this.httpServer);
}
@@ -399,11 +361,7 @@ class UptimeKumaServer {
* @returns {Promise<void>}
*/
async start() {
let enable = await Settings.get("nscd");
if (enable || enable === null) {
this.startNSCDServices();
}
this.startServices();
}
/**
@@ -411,11 +369,7 @@ class UptimeKumaServer {
* @returns {Promise<void>}
*/
async stop() {
let enable = await Settings.get("nscd");
if (enable || enable === null) {
this.stopNSCDServices();
}
this.stopServices();
}
/**
@@ -423,7 +377,7 @@ class UptimeKumaServer {
* For now, only used in Docker
* @returns {void}
*/
startNSCDServices() {
startServices() {
if (process.env.UPTIME_KUMA_IS_CONTAINER) {
try {
log.info("services", "Starting nscd");
@@ -438,7 +392,7 @@ class UptimeKumaServer {
* Stop all system services
* @returns {void}
*/
stopNSCDServices() {
stopServices() {
if (process.env.UPTIME_KUMA_IS_CONTAINER) {
try {
log.info("services", "Stopping nscd");
@@ -457,4 +411,3 @@ module.exports = {
// Must be at the end to avoid circular dependencies
const { RealBrowserMonitorType } = require("./monitor-types/real-browser-monitor-type");
const { TailscalePing } = require("./monitor-types/tailscale-ping");
const { DnsMonitorType } = require("./monitor-types/dns");

View File

@@ -833,7 +833,7 @@ exports.checkLogin = (socket) => {
*/
exports.doubleCheckPassword = async (socket, currentPassword) => {
if (typeof currentPassword !== "string") {
throw new Error("Wrong data type of current password");
throw new Error("Wrong data type?");
}
let user = await R.findOne("user", " id = ? AND active = 1 ", [
@@ -847,6 +847,29 @@ exports.doubleCheckPassword = async (socket, currentPassword) => {
return user;
};
/**
* Start Unit tests
* @returns {void}
*/
exports.startUnitTest = async () => {
console.log("Starting unit test...");
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const child = childProcess.spawn(npm, [ "run", "jest-backend" ]);
child.stdout.on("data", (data) => {
console.log(data.toString());
});
child.stderr.on("data", (data) => {
console.log(data.toString());
});
child.on("close", function (code) {
console.log("Jest exit code: " + code);
process.exit(code);
});
};
/**
* Start end-to-end tests
* @returns {void}

View File

@@ -1,85 +0,0 @@
/**
* An object that can be used as an array with a key
* Like PHP's array
* @template K
* @template V
*/
class ArrayWithKey {
/**
* All keys that are stored in the current object
* @type {K[]}
* @private
*/
__stack = [];
/**
* Push an element to the end of the array
* @param {K} key The key of the element
* @param {V} value The value of the element
* @returns {void}
*/
push(key, value) {
this[key] = value;
this.__stack.push(key);
}
/**
* Get the last element and remove it from the array
* @returns {V|undefined} The first value, or undefined if there is no element to pop
*/
pop() {
let key = this.__stack.pop();
let prop = this[key];
delete this[key];
return prop;
}
/**
* Get the last key
* @returns {K|null} The last key, or null if the array is empty
*/
getLastKey() {
if (this.__stack.length === 0) {
return null;
}
return this.__stack[this.__stack.length - 1];
}
/**
* Get the first element
* @returns {{key:K,value:V}|null} The first element, or null if the array is empty
*/
shift() {
let key = this.__stack.shift();
let value = this[key];
delete this[key];
return {
key,
value,
};
}
/**
* Get the length of the array
* @returns {number} Amount of elements stored
*/
length() {
return this.__stack.length;
}
/**
* Get the last value
* @returns {V|null} The last element without removing it, or null if the array is empty
*/
last() {
let key = this.getLastKey();
if (key === null) {
return null;
}
return this[key];
}
}
module.exports = {
ArrayWithKey
};

View File

@@ -1,48 +0,0 @@
const { ArrayWithKey } = require("./array-with-key");
/**
* Limit Queue
* The first element will be removed when the length exceeds the limit
*/
class LimitQueue extends ArrayWithKey {
/**
* The limit of the queue after which the first element will be removed
* @private
* @type {number}
*/
__limit;
/**
* The callback function when the queue exceeds the limit
* @private
* @callback onExceedCallback
* @param {{key:K,value:V}|nul} item
*/
__onExceed = null;
/**
* @param {number} limit The limit of the queue after which the first element will be removed
*/
constructor(limit) {
super();
this.__limit = limit;
}
/**
* @inheritDoc
*/
push(key, value) {
super.push(key, value);
if (this.length() > this.__limit) {
let item = this.shift();
if (this.__onExceed) {
this.__onExceed(item);
}
}
}
}
module.exports = {
LimitQueue
};

View File

@@ -584,20 +584,6 @@ h5.settings-subheading::after {
border-bottom: 1px solid $dark-border-color;
}
/* required class */
.code-editor, .css-editor {
/* we dont use `language-` classes anymore so thats why we need to add background and text color manually */
border-radius: 1rem;
padding: 10px 5px;
border: 1px solid #ced4da;
.dark & {
background: $dark-bg2;
border: 1px solid $dark-border-color;
}
}
$shadow-box-padding: 20px;
@@ -623,18 +609,6 @@ $shadow-box-padding: 20px;
}
}
@media (max-width: 770px) {
.toast-container {
margin-bottom: 100px !important;
}
}
@media (max-width: 550px) {
.toast-container {
margin-bottom: 126px !important;
}
}
// Localization
@import "localization.scss";

View File

@@ -11,7 +11,7 @@
/>
</div>
<div
v-if="!$root.isMobile && size !== 'small' && beatList.length > 4 && $root.styleElapsedTime !== 'none'"
v-if="size !== 'small' && beatList.length > 4 && $root.styleElapsedTime !== 'none'"
class="d-flex justify-content-between align-items-center word" :style="timeStyle"
>
<div>{{ timeSinceFirstBeat }} ago</div>
@@ -68,7 +68,8 @@ export default {
/**
* Calculates the amount of beats of padding needed to fill the length of shortBeatList.
* @returns {number} The amount of beats of padding needed to fill the length of shortBeatList.
*
* @return {number} The amount of beats of padding needed to fill the length of shortBeatList.
*/
numPadding() {
if (!this.beatList) {
@@ -148,7 +149,7 @@ export default {
/**
* Returns the style object for positioning the time element.
* @returns {object} The style object containing the CSS properties for positioning the time element.
* @return {Object} The style object containing the CSS properties for positioning the time element.
*/
timeStyle() {
return {
@@ -158,7 +159,8 @@ export default {
/**
* Calculates the time elapsed since the first valid beat.
* @returns {string} The time elapsed in minutes or hours.
*
* @return {string} The time elapsed in minutes or hours.
*/
timeSinceFirstBeat() {
const firstValidBeat = this.shortBeatList.at(this.numPadding);
@@ -172,7 +174,8 @@ export default {
/**
* Calculates the elapsed time since the last valid beat was registered.
* @returns {string} The elapsed time in a minutes, hours or "now".
*
* @return {string} The elapsed time in a minutes, hours or "now".
*/
timeSinceLastBeat() {
const lastValidBeat = this.shortBeatList.at(-1);

View File

@@ -5,18 +5,18 @@
<h1 class="h3 mb-3 fw-normal" />
<div v-if="!tokenRequired" class="form-floating">
<input id="floatingInput" v-model="username" type="text" class="form-control" placeholder="Username" autocomplete="username" required>
<input id="floatingInput" v-model="username" type="text" class="form-control" placeholder="Username">
<label for="floatingInput">{{ $t("Username") }}</label>
</div>
<div v-if="!tokenRequired" class="form-floating mt-3">
<input id="floatingPassword" v-model="password" type="password" class="form-control" placeholder="Password" autocomplete="current-password" required>
<input id="floatingPassword" v-model="password" type="password" class="form-control" placeholder="Password">
<label for="floatingPassword">{{ $t("Password") }}</label>
</div>
<div v-if="tokenRequired">
<div class="form-floating mt-3">
<input id="otp" v-model="token" type="text" maxlength="6" class="form-control" placeholder="123456" autocomplete="one-time-code" required>
<input id="otp" v-model="token" type="text" maxlength="6" class="form-control" placeholder="123456">
<label for="otp">{{ $t("Token") }}</label>
</div>
</div>

Some files were not shown because too many files have changed in this diff Show More