Compare commits

..

1 Commits

Author SHA1 Message Date
Louis Lam
fecee1e649 [empty commit] pull request for <YOUR TASK NAME> 2023-02-09 17:32:08 +08:00
284 changed files with 23736 additions and 29487 deletions

View File

@@ -1,28 +0,0 @@
# Codespaces
You can modifiy Uptime Kuma in your browser without setting up a local development.
![image](https://github.com/louislam/uptime-kuma/assets/1336778/31d9f06d-dd0b-4405-8e0d-a96586ee4595)
1. Click `Code` -> `Create codespace on master`
2. Wait a few minutes until you see there are two exposed ports
3. Go to the `3000` url, see if it is working
![image](https://github.com/louislam/uptime-kuma/assets/1336778/909b2eb4-4c5e-44e4-ac26-6d20ed856e7f)
## Frontend
Since the frontend is using [Vite.js](https://vitejs.dev/), all changes in this area will be hot-reloaded.
You don't need to restart the frontend, unless you try to add a new frontend dependency.
## Backend
The backend does not automatically hot-reload.
You will need to restart the backend after changing something using these steps:
1. Click `Terminal`
2. Click `Codespaces: server-dev` in the right panel
3. Press `Ctrl + C` to stop the server
4. Press `Up` to run `npm run start-server-dev`
![image](https://github.com/louislam/uptime-kuma/assets/1336778/e0c0a350-fe46-4588-9f37-e053c85834d1)

View File

@@ -1,22 +0,0 @@
{
"image": "mcr.microsoft.com/devcontainers/javascript-node:dev-18-bookworm",
"features": {
"ghcr.io/devcontainers/features/github-cli:1": {}
},
"updateContentCommand": "npm ci",
"postCreateCommand": "",
"postAttachCommand": {
"frontend-dev": "npm run start-frontend-devcontainer",
"server-dev": "npm run start-server-dev",
"open-port": "gh codespace ports visibility 3001:public -c $CODESPACE_NAME"
},
"customizations": {
"vscode": {
"extensions": [
"streetsidesoftware.code-spell-checker",
"dbaeumer.vscode-eslint"
]
}
},
"forwardPorts": [3000, 3001]
}

View File

@@ -18,7 +18,6 @@ README.md
.vscode
.eslint*
.stylelint*
/.devcontainer
/.github
yarn.lock
app.json
@@ -34,12 +33,6 @@ tsconfig.json
/ecosystem.config.js
/extra/healthcheck.exe
/extra/healthcheck
/extra/exe-builder
/extra/push-examples
/extra/uptime-kuma-push
# Comment the following line if you want to rebuild the healthcheck binary
/extra/healthcheck-armv7
### .gitignore content (commented rules are duplicated)
@@ -55,4 +48,6 @@ dist-ssr
#!/data/.gitkeep
#.vscode
### End of .gitignore content

View File

@@ -78,7 +78,7 @@ module.exports = {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//"no-console": "warn",
//'no-console': 'warn',
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": [ "warn", {
"max": 1,
@@ -90,8 +90,7 @@ module.exports = {
"no-unneeded-ternary": "error",
"array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ],
//"prefer-template": "error",
"template-curly-spacing": [ "warn", "never" ],
//'prefer-template': 'error',
"comma-dangle": [ "warn", "only-multiline" ],
"no-empty": [ "error", {
"allowEmptyCatch": true

View File

@@ -26,12 +26,6 @@ body:
label: "📝 Describe your problem"
description: "Please walk us through it step by step."
placeholder: "Describe what are you asking for..."
- type: textarea
id: error-msg
validations:
required: false
attributes:
label: "📝 Error Message(s) or Log"
- type: input
id: uptime-kuma-version
attributes:
@@ -44,7 +38,7 @@ body:
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
description: "Which OS is your server/device running on?"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
@@ -52,7 +46,7 @@ body:
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on? (For Replit, please do not report this bug)"
description: "Which browser are you running on?"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true

View File

@@ -61,8 +61,8 @@ body:
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Ubuntu 20.04 x64 "
description: "Which OS is your server/device running on?"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
- type: input

View File

@@ -1,19 +0,0 @@
---
name: "Security Issue"
about: "Just for alerting @louislam, do not provide any details here"
title: "Security Issue"
ref: "main"
labels:
- security
---
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
Your GitHub Advisory URL:

View File

@@ -1 +0,0 @@
# This is a .gitignore style file for 'GrantBirki/json-yaml-validate' Action workflow

View File

@@ -1,15 +1,15 @@
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Auto Test
on:
push:
branches: [ master, 1.23.X ]
branches: [ master ]
paths-ignore:
- '*.md'
pull_request:
branches: [ master, 1.23.X ]
branches: [ master ]
paths-ignore:
- '*.md'
@@ -21,73 +21,54 @@ jobs:
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
node: [ 16, 20.5 ]
os: [macos-latest, ubuntu-latest, windows-latest]
node: [ 14, 16, 18, 19 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
- run: npm ci
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm test
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
# As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
armv7-simple-test:
needs: [ check-linters ]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [ ARMv7 ]
node: [ 16, 20.5 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}
- run: npm ci --production
check-linters:
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js 20
uses: actions/setup-node@v4
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 20.5
- run: npm ci
- run: npm run lint:prod
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run lint
e2e-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js 16
uses: actions/setup-node@v4
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 16
- run: npm ci
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm run cy:test
@@ -96,12 +77,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js 16
uses: actions/setup-node@v4
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 16
- run: npm ci
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm run cy:run:unit

View File

@@ -14,10 +14,10 @@ jobs:
node-version: [16]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'

View File

@@ -1,27 +0,0 @@
name: json-yaml-validate
on:
push:
branches:
- master
pull_request:
branches:
- master
- 1.23.X
workflow_dispatch:
permissions:
contents: read
pull-requests: write # enable write permissions for pull request comments
jobs:
json-yaml-validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: json-yaml-validate
id: json-yaml-validate
uses: GrantBirki/json-yaml-validate@v2.4.0
with:
comment: "true" # enable comment mode
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions

View File

@@ -9,7 +9,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
- uses: actions/stale@v7
with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'

6
.gitignore vendored
View File

@@ -20,9 +20,3 @@ cypress/screenshots
/extra/healthcheck.exe
/extra/healthcheck
/extra/healthcheck-armv7
extra/exe-builder/bin
extra/exe-builder/obj
.vs
.vscode

View File

@@ -10,7 +10,5 @@
"color-function-notation": "legacy",
"shorthand-property-no-redundant-values": null,
"color-hex-length": null,
"declaration-block-no-redundant-longhand-properties": null,
"at-rule-no-unknown": null
}
}

View File

@@ -2,13 +2,13 @@
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
## Key Technical Skills
- Node.js (You should know about promise, async/await and arrow function etc.)
- Node.js (You should know what are promise, async/await and arrow function etc.)
- Socket.io
- SCSS
- Vue.js
@@ -30,40 +30,40 @@ The frontend code builds into "dist" directory. The server (express.js) exposes
## Can I create a pull request for Uptime Kuma?
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
Here are some references:
### ✅ Usually accepted:
✅ Usually Accept:
- Bug fix
- Security fix
- Adding notification providers
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Adding new language files (You should go to https://weblate.kuma.pet for existing languages)
- Adding new language keys: `$t("...")`
### ⚠️ Discussion required:
⚠️ Discussion First
- Large pull requests
- New features
### ❌ Won't be merged:
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Do not pass the auto-test
❌ Won't Merge
- A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
- Do not pass auto test
- Any breaking changes
- Duplicated pull requests
- Duplicated pull request
- Buggy
- UI/UX is not close to Uptime Kuma
- Modifications or deletions of existing logic without a valid reason.
- Adding functions that is completely out of scope
- Converting existing code into other programming languages
- Unnecessarily large code changes that are hard to review and cause conflicts with other PRs.
- Existing logic is completely modified or deleted for no reason
- A function that is completely out of scope
- Convert existing code into other programming languages
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests)
The above cases may not cover all possible situations.
The above cases cannot cover all situations.
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
### Recommended Pull Request Guideline
@@ -83,11 +83,11 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
## Project Styles
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app.
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`
- Easy to use
- The web UI styling should be consistent and nice
@@ -106,11 +106,11 @@ I personally do not like something that requires so many configurations before y
## Tools
- [`Node.js`](https://nodejs.org/) >= 14
- [`npm`](https://www.npmjs.com/) >= 8.5
- [`git`](https://git-scm.com/)
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
- Node.js >= 14
- NPM >= 8.5
- Git
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
- A SQLite GUI tool (SQLite Expert Personal is suggested)
## Install Dependencies for Development
@@ -130,7 +130,7 @@ Port `3000` and port `3001` will be used.
npm run dev
```
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals:
```
npm run start-frontend-dev
npm run start-server-dev
@@ -146,13 +146,13 @@ It is mainly a socket.io app + express.js.
express.js is used for:
- entry point such as redirecting to a status page or the dashboard
- serving the frontend built files (index.html, .js and .css etc.)
- serving internal APIs of the status page
- serving internal APIs of status page
### Structure in /server/
- jobs/ (Jobs that are running in another process)
- model/ (Object model, auto-mapping to the database table name)
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- monitor_types (Monitor Types)
- notification-providers/ (individual notification logic)
@@ -163,7 +163,7 @@ express.js is used for:
## Frontend Dev Server
It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
For production, it is not used. It will be compiled to `dist` directory instead.
@@ -181,7 +181,7 @@ Uptime Kuma Frontend is a single page application (SPA). Most paths are handled
The router is in `src/router.js`
As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`.
@@ -210,25 +210,15 @@ Both frontend and backend share the same package.json. However, the frontend dep
### Update Dependencies
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change.
If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes.
## Translations
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not English and my grammar is not that great.
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
## Wiki
@@ -245,13 +235,12 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
1. Draft a release note
2. Make sure the repo is cleared
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue`
5. `git push`
6. Publish the release note as 1.X.X
7. Press any key to continue
8. Deploy to the demo server: `npm run deploy-demo-server`
8. SSH to demo site server and update to 1.X.X
Checking:

View File

@@ -1,16 +1,16 @@
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
# Uptime Kuma
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a>
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
## 🥔 Live Demo
@@ -23,10 +23,10 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Fancy, Reactive, Fast UI/UX
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
* 20-second intervals
* 20 second intervals
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
* Multiple status pages
* Map status pages to specific domains
@@ -49,14 +49,9 @@ Uptime Kuma is now running on http://localhost:3001
### 💪🏻 Non-Docker
Requirements:
- Platform
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
- ❌ Replit / Heroku
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
- [npm](https://docs.npmjs.com/cli/) >= 7
- [Git](https://git-scm.com/downloads)
Required Tools:
- [Node.js](https://nodejs.org/en/download/) >= 14
- [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
```bash
@@ -70,8 +65,8 @@ npm run setup
# Option 1. Try it
node server/server.js
# (Recommended) Option 2. Run in the background using PM2
# Install PM2 if you don't have it:
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it:
npm install pm2 -g && pm2 install pm2-logrotate
# Start Server
@@ -91,10 +86,6 @@ pm2 monit
pm2 save && pm2 startup
```
### Windows Portable (x64)
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1.zip
### Advanced Installation
If you need more options or need to browse via a reverse proxy, please read:
@@ -109,7 +100,7 @@ https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next?
I will assign requests/issues to the next milestone.
I will mark requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones
@@ -152,18 +143,17 @@ Telegram Notification Sample:
If you love this project, please consider giving me a ⭐.
## 🗣️ Discussion / Ask for Help
## 🗣️ Discussion
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not response if you asked such questions.
### Issues Page
I recommend using Google, GitHub Issues, or Uptime Kuma's Subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
You can discuss or ask for help in [issues](https://github.com/louislam/uptime-kuma/issues).
- [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
- [Subreddit r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
### Subreddit
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
You can mention me if you ask a question on Reddit.
[r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
## Contribute
@@ -184,10 +174,7 @@ If you want to report a bug or request a new feature, feel free to open a [new i
### Translations
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not english and my grammar is not that great.
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
### Create Pull Requests
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

View File

@@ -2,20 +2,15 @@
## Reporting a Vulnerability
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
Do not use the public issue tracker or discuss it in public as it will cause more damage.
## Do you accept other 3rd-party bug bounty platforms?
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
Do not use the public issue tracker or discuss it in the public as it will cause more damage.
## Supported Versions
### Uptime Kuma Versions
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
### Upgradable Docker Tags

View File

@@ -4,4 +4,8 @@ if (process.env.TEST_FRONTEND) {
config.presets = [ "@babel/preset-env" ];
}
if (process.env.TEST_BACKEND) {
config.plugins = [ "babel-plugin-rewire" ];
}
module.exports = config;

View File

@@ -16,9 +16,6 @@ export default defineConfig({
},
define: {
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
"DEVCONTAINER": JSON.stringify(process.env.DEVCONTAINER),
"GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN": JSON.stringify(process.env.GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN),
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
},
plugins: [
vue(),
@@ -45,9 +42,6 @@ export default defineConfig({
}
},
build: {
commonjsOptions: {
include: [ /.js$/ ],
},
rollupOptions: {
output: {
manualChunks(id, { getModuleInfo, getModuleIds }) {

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD description TEXT default null;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD gamedig_given_port_only BOOLEAN default 1 not null;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD invert_keyword BOOLEAN default 0 not null;
COMMIT;

View File

@@ -1,6 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
COMMIT

View File

@@ -1,6 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD json_path TEXT;
ALTER TABLE monitor
ADD expected_value VARCHAR(255);
COMMIT;

View File

@@ -1,22 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD kafka_producer_topic VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_brokers TEXT;
ALTER TABLE monitor
ADD kafka_producer_ssl INTEGER;
ALTER TABLE monitor
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_sasl_options TEXT;
ALTER TABLE monitor
ADD kafka_producer_message TEXT;
COMMIT;

View File

@@ -1,13 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [api_key] (
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
[key] VARCHAR(255) NOT NULL,
[name] VARCHAR(255) NOT NULL,
[user_id] INTEGER NOT NULL,
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
[active] BOOLEAN DEFAULT 1 NOT NULL,
[expires] DATETIME DEFAULT NULL,
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
);
COMMIT;

View File

@@ -1,34 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Rename COLUMNs to another one (suffixed by `_old`)
ALTER TABLE monitor
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
ALTER TABLE monitor
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
-- Add correct COLUMNs
ALTER TABLE monitor
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
ALTER TABLE monitor
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
-- Set bring old values from `_old` COLUMNs to correct ones
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
-- Remove old COLUMNs
ALTER TABLE monitor
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
ALTER TABLE monitor
DROP COLUMN kafka_producer_ssl_old;
COMMIT;

View File

@@ -1,12 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
COMMIT;
BEGIN TRANSACTION;
UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
COMMIT;

View File

@@ -1,11 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
DROP TABLE maintenance_timeslot;
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
ALTER TABLE maintenance ADD cron TEXT;
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
COMMIT;

View File

@@ -1,19 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD oauth_client_id TEXT default null;
ALTER TABLE monitor
ADD oauth_client_secret TEXT default null;
ALTER TABLE monitor
ADD oauth_token_url TEXT default null;
ALTER TABLE monitor
ADD oauth_scopes TEXT default null;
ALTER TABLE monitor
ADD oauth_auth_method TEXT default null;
COMMIT;

View File

@@ -1,18 +0,0 @@
BEGIN TRANSACTION;
PRAGMA writable_schema = TRUE;
UPDATE
SQLITE_MASTER
SET
sql = replace(sql,
'monitor_id INTEGER NOT NULL',
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
)
WHERE
name = 'monitor_tls_info'
AND type = 'table';
PRAGMA writable_schema = RESET;
COMMIT;

View File

@@ -1,13 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD tls_ca TEXT default null;
ALTER TABLE monitor
ADD tls_cert TEXT default null;
ALTER TABLE monitor
ADD tls_key TEXT default null;
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
ALTER TABLE notification RENAME COLUMN config TO config_old;
ALTER TABLE notification ADD COLUMN config TEXT;
UPDATE notification SET config = config_old;
ALTER TABLE notification DROP COLUMN config_old;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
UPDATE monitor SET timeout = (interval * 0.8)
WHERE timeout IS NULL OR timeout <= 0;
COMMIT;

View File

@@ -4,5 +4,5 @@ WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
pip3 --no-cache-dir install apprise==1.4.0 && \
pip3 --no-cache-dir install apprise==1.2.1 && \
rm -rf /root/.cache

View File

@@ -1,48 +1,28 @@
# DON'T UPDATE TO bullseye-slim, see #372.
# There is no 20-buster-slim for armv7 unfortunately, 18-buster-slim is the last one for Uptime Kuma v1.
FROM node:18-buster-slim
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too
FROM node:16-buster-slim
ARG TARGETPLATFORM
WORKDIR /app
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
# python3* = apprise's dependencies
# sqlite3 = for debugging
# iputils-ping = for ping
# util-linux = for setpriv (Should be dropped in 2.0.0?)
# dumb-init = avoid zombie processes (#480)
# curl = for debugging
# ca-certificates = keep the cert up-to-date
# sudo = for start service nscd with non-root user
# nscd = for better DNS caching
# (pip) apprise = for notifications
RUN apt-get update && \
apt-get --yes --no-install-recommends install \
python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 \
iputils-ping \
util-linux \
dumb-init \
curl \
ca-certificates \
sudo \
nscd && \
pip3 --no-cache-dir install apprise==1.6.0 && \
# Install Curl
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init git && \
pip3 --no-cache-dir install apprise==1.2.1 && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
# Install cloudflared
RUN set -eux && \
mkdir -p --mode=0755 /usr/share/keyrings && \
curl --fail --show-error --silent --location --insecure https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared buster main' | tee /etc/apt/sources.list.d/cloudflared.list && \
apt-get update && \
apt-get install --yes --no-install-recommends cloudflared && \
cloudflared version && \
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
dpkg --add-architecture arm && \
apt update && \
apt --yes --no-install-recommends install ./cloudflared.deb && \
rm -rf /var/lib/apt/lists/* && \
rm -f cloudflared.deb && \
apt --yes autoremove
# For nscd
COPY ./docker/etc/nscd.conf /etc/nscd.conf
COPY ./docker/etc/sudoers /etc/sudoers

View File

@@ -26,8 +26,6 @@ RUN chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-debian AS release
WORKDIR /app
ENV UPTIME_KUMA_IS_CONTAINER=1
# Copy app files from build layer
COPY --from=build /app /app
@@ -72,7 +70,8 @@ RUN git clone https://github.com/louislam/uptime-kuma.git .
RUN npm ci
EXPOSE 3000 3001
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
CMD ["npm", "run", "start-pr-test"]
############################################

View File

@@ -1,90 +0,0 @@
#
# /etc/nscd.conf
#
# An example Name Service Cache config file. This file is needed by nscd.
#
# Legal entries are:
#
# logfile <file>
# debug-level <level>
# threads <initial #threads to use>
# max-threads <maximum #threads to use>
# server-user <user to run server as instead of root>
# server-user is ignored if nscd is started with -S parameters
# stat-user <user who is allowed to request statistics>
# reload-count unlimited|<number>
# paranoia <yes|no>
# restart-interval <time in seconds>
#
# enable-cache <service> <yes|no>
# positive-time-to-live <service> <time in seconds>
# negative-time-to-live <service> <time in seconds>
# suggested-size <service> <prime number>
# check-files <service> <yes|no>
# persistent <service> <yes|no>
# shared <service> <yes|no>
# max-db-size <service> <number bytes>
# auto-propagate <service> <yes|no>
#
# Currently supported cache names (services): passwd, group, hosts, services
#
# logfile /var/log/nscd.log
# threads 4
# max-threads 32
# server-user node
# stat-user somebody
debug-level 0
# reload-count 5
paranoia no
# restart-interval 3600
enable-cache passwd no
positive-time-to-live passwd 600
negative-time-to-live passwd 20
suggested-size passwd 211
check-files passwd yes
persistent passwd yes
shared passwd yes
max-db-size passwd 33554432
auto-propagate passwd yes
enable-cache group no
positive-time-to-live group 3600
negative-time-to-live group 60
suggested-size group 211
check-files group yes
persistent group yes
shared group yes
max-db-size group 33554432
auto-propagate group yes
enable-cache hosts yes
positive-time-to-live hosts 3600
negative-time-to-live hosts 20
suggested-size hosts 211
check-files hosts yes
persistent hosts yes
# Set shared to "no" to display stats in `nscd -g`
# Read more: https://stackoverflow.com/questions/40429245/nscdcentos7curl-0-dns-cache-hit-rate
shared hosts no
max-db-size hosts 33554432
enable-cache services no
positive-time-to-live services 28800
negative-time-to-live services 20
suggested-size services 211
check-files services yes
persistent services yes
shared services yes
max-db-size services 33554432
enable-cache netgroup no
positive-time-to-live netgroup 28800
negative-time-to-live netgroup 20
suggested-size netgroup 211
check-files netgroup yes
persistent netgroup yes
shared netgroup yes
max-db-size netgroup 33554432

View File

@@ -1,31 +0,0 @@
#
# This file MUST be edited with the 'visudo' command as root.
#
# Please consider adding local content in /etc/sudoers.d/ instead of
# directly modifying this file.
#
# See the man page for details on how to write a sudoers file.
#
Defaults env_reset
Defaults mail_badpass
Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
# Host alias specification
# User alias specification
# Cmnd alias specification
# User privilege specification
root ALL=(ALL:ALL) ALL
# Allow members of group sudo to execute any command
%sudo ALL=(ALL:ALL) ALL
# See sudoers(5) for more information on "#include" directives:
#includedir /etc/sudoers.d
# Allow `node` to control service (mainly for nscd)
node ALL=(root) NOPASSWD: /usr/sbin/nscdservice
node ALL=(root) NOPASSWD: /usr/sbin/service

View File

@@ -22,8 +22,7 @@ if (! exists) {
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
childProcess.spawnSync(npm, [ "install" ]);
childProcess.spawnSync("npm", [ "install" ]);
commit(version);
tag(version);

View File

@@ -1,60 +0,0 @@
require("dotenv").config();
const { NodeSSH } = require("node-ssh");
const readline = require("readline");
const rl = readline.createInterface({ input: process.stdin,
output: process.stdout });
const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
(async () => {
try {
console.log("SSH to demo server");
const ssh = new NodeSSH();
await ssh.connect({
host: process.env.UPTIME_KUMA_DEMO_HOST,
port: process.env.UPTIME_KUMA_DEMO_PORT,
username: process.env.UPTIME_KUMA_DEMO_USERNAME,
privateKeyPath: process.env.UPTIME_KUMA_DEMO_PRIVATE_KEY_PATH
});
let cwd = process.env.UPTIME_KUMA_DEMO_CWD;
let result;
const version = await prompt("Enter Version: ");
result = await ssh.execCommand("git fetch --all", {
cwd,
});
console.log(result.stdout + result.stderr);
await prompt("Press any key to continue...");
result = await ssh.execCommand(`git checkout ${version} --force`, {
cwd,
});
console.log(result.stdout + result.stderr);
result = await ssh.execCommand("npm run download-dist", {
cwd,
});
console.log(result.stdout + result.stderr);
result = await ssh.execCommand("npm install --production", {
cwd,
});
console.log(result.stdout + result.stderr);
/*
result = await ssh.execCommand("pm2 restart 1", {
cwd,
});
console.log(result.stdout + result.stderr);*/
} catch (e) {
console.log(e);
} finally {
rl.close();
}
})();
// When done reading prompt, exit program
rl.on("close", () => process.exit(0));

View File

@@ -0,0 +1,48 @@
//
const http = require("https"); // or 'https' for https:// URLs
const fs = require("fs");
const platform = process.argv[2];
if (!platform) {
console.error("No platform??");
process.exit(1);
}
let arch = null;
if (platform === "linux/amd64") {
arch = "amd64";
} else if (platform === "linux/arm64") {
arch = "arm64";
} else if (platform === "linux/arm/v7") {
arch = "arm";
} else {
console.error("Invalid platform?? " + platform);
}
const file = fs.createWriteStream("cloudflared.deb");
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
/**
* Download specified file
* @param {string} url URL to request
*/
function get(url) {
http.get(url, function (res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
console.log("Redirect to " + res.headers.location);
get(res.headers.location);
} else if (res.statusCode >= 200 && res.statusCode < 300) {
res.pipe(file);
res.on("end", function () {
console.log("Downloaded");
});
} else {
console.error(res.statusCode);
process.exit(1);
}
});
}

View File

@@ -47,7 +47,6 @@ function download(url) {
});
}
console.log("Done");
process.exit(0);
});
tarStream.on("error", () => {

View File

@@ -1 +0,0 @@
packages/

View File

@@ -1,35 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<startup>
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.7.2" />
</startup>
<runtime>
<assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1">
<dependentAssembly>
<assemblyIdentity name="System.Diagnostics.DiagnosticSource" publicKeyToken="cc7b13ffcd2ddd51" culture="neutral" />
<bindingRedirect oldVersion="0.0.0.0-4.0.1.0" newVersion="4.0.1.0" />
</dependentAssembly>
<dependentAssembly>
<assemblyIdentity name="System.Diagnostics.Tracing" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
<bindingRedirect oldVersion="0.0.0.0-4.1.1.0" newVersion="4.1.1.0" />
</dependentAssembly>
<dependentAssembly>
<assemblyIdentity name="System.Reflection" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
<bindingRedirect oldVersion="0.0.0.0-4.1.1.0" newVersion="4.1.1.0" />
</dependentAssembly>
<dependentAssembly>
<assemblyIdentity name="System.Runtime" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
<bindingRedirect oldVersion="0.0.0.0-4.1.1.1" newVersion="4.1.1.1" />
</dependentAssembly>
<dependentAssembly>
<assemblyIdentity name="System.Runtime.InteropServices" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
<bindingRedirect oldVersion="0.0.0.0-4.1.1.0" newVersion="4.1.1.0" />
</dependentAssembly>
<dependentAssembly>
<assemblyIdentity name="System.Runtime.CompilerServices.Unsafe" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
<bindingRedirect oldVersion="0.0.0.0-6.0.0.0" newVersion="6.0.0.0" />
</dependentAssembly>
</assemblyBinding>
</runtime>
</configuration>

View File

@@ -1,84 +0,0 @@
using System.ComponentModel;
namespace UptimeKuma {
partial class DownloadForm {
/// <summary>
/// Required designer variable.
/// </summary>
private IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent() {
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(DownloadForm));
this.progressBar = new System.Windows.Forms.ProgressBar();
this.label = new System.Windows.Forms.Label();
this.labelData = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// progressBar
//
this.progressBar.Location = new System.Drawing.Point(12, 12);
this.progressBar.Name = "progressBar";
this.progressBar.Size = new System.Drawing.Size(472, 41);
this.progressBar.TabIndex = 0;
//
// label
//
this.label.Location = new System.Drawing.Point(12, 59);
this.label.Name = "label";
this.label.Size = new System.Drawing.Size(472, 23);
this.label.TabIndex = 1;
this.label.Text = "Preparing...";
//
// labelData
//
this.labelData.Location = new System.Drawing.Point(12, 82);
this.labelData.Name = "labelData";
this.labelData.Size = new System.Drawing.Size(472, 23);
this.labelData.TabIndex = 2;
//
// DownloadForm
//
this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(496, 117);
this.Controls.Add(this.labelData);
this.Controls.Add(this.label);
this.Controls.Add(this.progressBar);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.MaximizeBox = false;
this.Name = "DownloadForm";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "Uptime Kuma";
this.Load += new System.EventHandler(this.DownloadForm_Load);
this.ResumeLayout(false);
}
private System.Windows.Forms.Label labelData;
private System.Windows.Forms.Label label;
private System.Windows.Forms.ProgressBar progressBar;
#endregion
}
}

View File

@@ -1,204 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using Newtonsoft.Json;
namespace UptimeKuma {
public partial class DownloadForm : Form {
private readonly Queue<DownloadItem> downloadQueue = new();
private readonly WebClient webClient = new();
private DownloadItem currentDownloadItem;
public DownloadForm() {
InitializeComponent();
}
private void DownloadForm_Load(object sender, EventArgs e) {
webClient.DownloadProgressChanged += DownloadProgressChanged;
webClient.DownloadFileCompleted += DownloadFileCompleted;
label.Text = "Reading latest version...";
// Read json from https://uptime.kuma.pet/version
var versionJson = new WebClient().DownloadString("https://uptime.kuma.pet/version");
var versionObj = JsonConvert.DeserializeObject<Version>(versionJson);
var nodeVersion = versionObj.nodejs;
var uptimeKumaVersion = versionObj.latest;
var hasUpdateFile = File.Exists("update");
if (!Directory.Exists("node")) {
downloadQueue.Enqueue(new DownloadItem {
URL = $"https://nodejs.org/dist/v{nodeVersion}/node-v{nodeVersion}-win-x64.zip",
Filename = "node.zip",
TargetFolder = "node"
});
}
if (!Directory.Exists("core") || hasUpdateFile) {
// It is update, rename the core folder to core.old
if (Directory.Exists("core")) {
// Remove the old core.old folder
if (Directory.Exists("core.old")) {
Directory.Delete("core.old", true);
}
Directory.Move("core", "core.old");
}
downloadQueue.Enqueue(new DownloadItem {
URL = $"https://github.com/louislam/uptime-kuma/archive/refs/tags/{uptimeKumaVersion}.zip",
Filename = "core.zip",
TargetFolder = "core"
});
File.WriteAllText("version.json", versionJson);
// Delete the update file
if (hasUpdateFile) {
File.Delete("update");
}
}
DownloadNextFile();
}
void DownloadNextFile() {
if (downloadQueue.Count > 0) {
var item = downloadQueue.Dequeue();
currentDownloadItem = item;
// Download if the zip file is not existing
if (!File.Exists(item.Filename)) {
label.Text = item.URL;
webClient.DownloadFileAsync(new Uri(item.URL), item.Filename);
} else {
progressBar.Value = 100;
label.Text = "Use local " + item.Filename;
DownloadFileCompleted(null, null);
}
} else {
npmSetup();
}
}
void npmSetup() {
labelData.Text = "";
var npm = "..\\node\\npm.cmd";
var cmd = $"{npm} ci --production & {npm} run download-dist & exit";
var startInfo = new ProcessStartInfo {
FileName = "cmd.exe",
Arguments = $"/k \"{cmd}\"",
RedirectStandardOutput = false,
RedirectStandardError = false,
RedirectStandardInput = true,
UseShellExecute = false,
CreateNoWindow = false,
WorkingDirectory = "core"
};
var process = new Process();
process.StartInfo = startInfo;
process.EnableRaisingEvents = true;
process.Exited += (_, e) => {
progressBar.Value = 100;
if (process.ExitCode == 0) {
Task.Delay(2000).ContinueWith(_ => {
Application.Restart();
});
label.Text = "Done";
} else {
label.Text = "Failed, exit code: " + process.ExitCode;
}
};
process.Start();
label.Text = "Installing dependencies and download dist files";
progressBar.Value = 50;
process.WaitForExit();
}
void DownloadProgressChanged(object sender, DownloadProgressChangedEventArgs e) {
progressBar.Value = e.ProgressPercentage;
var total = e.TotalBytesToReceive / 1024;
var current = e.BytesReceived / 1024;
if (total > 0) {
labelData.Text = $"{current}KB/{total}KB";
}
}
void DownloadFileCompleted(object sender, AsyncCompletedEventArgs e) {
Extract(currentDownloadItem);
DownloadNextFile();
}
void Extract(DownloadItem item) {
if (Directory.Exists(item.TargetFolder)) {
var dir = new DirectoryInfo(item.TargetFolder);
dir.Delete(true);
}
if (Directory.Exists("temp")) {
var dir = new DirectoryInfo("temp");
dir.Delete(true);
}
labelData.Text = $"Extracting {item.Filename}...";
ZipFile.ExtractToDirectory(item.Filename, "temp");
string[] dirList;
// Move to the correct level
dirList = Directory.GetDirectories("temp");
if (dirList.Length > 0) {
var dir = dirList[0];
// As sometime ExtractToDirectory is still locking the directory, loop until ok
while (true) {
try {
Directory.Move(dir, item.TargetFolder);
break;
} catch (Exception exception) {
Thread.Sleep(1000);
}
}
} else {
MessageBox.Show("Unexcepted Error: Cannot move extracted files, folder not found.");
}
labelData.Text = $"Extracted";
if (Directory.Exists("temp")) {
var dir = new DirectoryInfo("temp");
dir.Delete(true);
}
File.Delete(item.Filename);
}
}
public class DownloadItem {
public string URL { get; set; }
public string Filename { get; set; }
public string TargetFolder { get; set; }
}
}

View File

@@ -1,377 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<assembly alias="System.Drawing" name="System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
<data name="$this.Icon" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>
AAABAAMAMDAAAAEAIACoJQAANgAAACAgAAABACAAqBAAAN4lAAAQEAAAAQAgAGgEAACGNgAAKAAAADAA
AABgAAAAAQAgAAAAAAAAJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAA////BPT09Bfu7u4e8fHxJPPz8yv19fUy9fX1M/Pz8yvx8fEk9vb2HPPz8xXMzMwFAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//
/wHv7+8f7u7uPPPz81Tx8fFs8fHxgPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGB8fHxcfHx8V3x8fFI9PT0MOvr6w0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AADy8vIU8fHxS/Dw8Hbx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fFr9PT0R/Dw8CIAAAABAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAA8vLyFPHx8Vnx8fGB8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fFs9fX1Mb+/vwQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAICAgALy8vI88fHxfvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvLy8nby8vI8gICAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAzMzMBfHx8Vrx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8vLyYf///wwAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzMwF8vLyYPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8W/z8/MWAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADv7+9R8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLw8PB26urqDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPLy8ijx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgu7w7Ifj79ud2u7PtNLrw83P677dzeu85c3r
u+rM67rwzOu68c7rverQ68Dj0uvD3NbuyM3b7c+64u7apujv5ZPx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxXgAAAAEAAAAAAAAAAAAAAAAAAAAA4+PjCfDw
8Hfx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLd7tSmzeu92MbqsvvG6bH/xumy/8fq
s//H6rP/yOq0/8jqtf/J6rb/yeq2/8rrt//K67j/y+u4/8vruf/M67r/zOu7/83ru//Q7MDx1u7Kz9/t
163s8OuJ8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgu/v7y8AAAAAAAAAAAAA
AAAAAAAA7u7uPfHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC5PDdl8jqtuTE6a7/xOmv/8Xp
sP/G6bH/xumx/8bpsv/H6rP/x+qz/8jqtP/I6rX/yeq2/8nqtv/K67f/yuu4/8vruP/L67n/zOu6/8zr
u//N67v/zey8/87svf/P67742e3Mx+jv5ZLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvDw
8HWAgIACAAAAAAAAAACqqqoD8vLyc/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLf7degxOiu+cPo
rf/D6a7/xOmu/8Xpr//F6bD/xumx/8bpsf/G6bL/x+qz/8fqs//I6rT/yOq1/8nqtv/J6rb/yuu3/8rr
uP/L67j/y+u5/8zruv/M67v/zeu7/83svP/O7L3/zuy9/87svfzc7tK28fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fEkAAAAAAAAAADz8/Mq8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgunv
5o3D6a/0wuis/8Lorf/D6K3/xOmu/8Tprv/F6a//xemw/8bpsf/G6bH/xumy/8fqs//H6rP/yOq0/8jq
tf/J6rb/yeq2/8rrt//K67j/y+u4/8vruf/M67r/zOu7/83ru//N7Lz/zuy9/87svf/O7L3/3e/TtPHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLy8vJNAAAAAAAAAADy8vJM8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgszqutDB6Kv/weir/8LorP/D6K3/w+it/8Tprv/E6a7/xemv/8XpsP/G6bH/xumx/8bp
sv/H6rP/x+qz/8jqtP/I6rX/yeq2/8nqtv/K67f/yuu4/8vruP/L67n/zOu6/8zru//N67v/zey8/87s
vf/O7L3/zuy++u3w6Yzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLy8vJ1AAAAAAAAAADx8fFr8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC6O/kjsDoqvzA6Kr/weir/8Loq//C6Kz/w+it/8Porf/E6a7/xOmu/8Xp
r//F6bD/xumx/8bpsf/G6bL/x+qz/8fqtP/I6rT/yOq1/8nqtv/J6rb/yuu3/8rruP/L67n/y+u5/8zr
uv/M67v/zeu7/83svP/O7L3/zuy9/93u07Xx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC////Bv//
/wfx8fGB8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC1ezJsr/nqf/A56n/weiq/8Hoq//C6Kv/wuis/8Po
rf/D6K3/xOmu/8Pprv+856T/uOed/7bmmv+05Zf/teWZ/7jnnf+86KP/wOio/8fqs//J6rb/yeq2/8rr
t//K67j/y+u5/8vruf/M67r/zOu7/83ru//N7Lz/zuy9/9buyNLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8vLyE/Ly8hPx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCy+q6zr/nqP/A56n/wOep/8Ho
qv/B6Kv/wuir/8LorP+u5Y//neF2/5bgav+V4Gr/luBr/5fhbP+Y4W7/meFv/5rhcf+b4nL/nOJ0/53i
dv+j5H//reaM/7nnnf/E6q//y+y4/8vruf/L67n/zOu6/8zru//N67v/zey8/9Lsxd/x8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC7+/vIPb29hzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCx+m03L/n
qP+/56j/wOep/8Dnqf/B6Kr/weir/7nmn/+R32T/kt9l/5PfZ/+U4Gj/leBq/5bga/+X4W3/mOFu/5nh
b/+a4XH/m+Jy/5zidP+d4nX/nuN3/5/jeP+f4nn/weqq/8rruP/L67n/y+u5/8zruv/M67v/zeu7/9Ls
w+Lx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8PDwI/Hx8SXx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGCxeix5L/nqP+/56j/v+eo/8Dnqf/A56n/weiq/7Pllv+Q3mP/kd9k/5LfZf+T32f/lOBo/5Xg
av+W4Gv/l+Ft/5jhbv+Z4W//muFx/5vicv+c4nT/neJ1/57jd/+f43j/xOmu/8rrt//K67j/y+u5/8vr
uf/M67r/zOu7/9Tsxtfx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC9PT0GO/v7yDx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGCx+m037/nqP+/56j/v+eo/7/nqP/A56n/wOip/7TmmP+P3mH/kN5j/5Hf
ZP+S32b/k99n/5TgaP+V4Gr/luBr/5fhbf+Y4W7/meFw/5rhcf+b4nL/nOJ0/53idf+h5Hz/yuu2/8nq
t//K67f/yuu4/8vruf/L67n/zOu6/9ftysrx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC7e3tDvT0
9Bfx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCyOq117/nqP+/56j/v+eo/7/nqP+/56j/wOep/7vn
of+O3mD/j95h/5DeY/+R32T/kt9m/5PfZ/+U4Gj/leBq/5bga/+X4W3/mOFu/5nhcP+a4nH/m+Jy/5zi
dP+r5Yr/yOq1/8nqtv/J6rf/yuu3/8rruP/L67n/y+u5/9zu1LHx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLz8/OA////A+7u7g/x8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCz+q+xb/nqP+/56j/v+eo/7/n
qP+/56j/v+eo/8Dnqf+S4Gb/jt5g/4/eYf+Q3mP/kd9k/5LfZv+T32f/lOBo/5Xgav+W4Gv/l+Ft/5jh
bv+Z4XD/muJx/5vic/+4553/yOq0/8jqtf/J6rb/yeq3/8rrt//K67j/y+u5/+bw4Zfx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fFrAAAAAP///wHz8/N88fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC1+zMrr/n
qP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+f4Xn/jd5f/47eYP+P3mH/kN5j/5HfZP+S32b/k99n/5Tg
af+V4Gr/luBr/5fhbf+Y4W7/meFw/5vic//F6rD/x+q0/8jqtP/I6rX/yeq2/8nqt//K67f/zOu88u/x
74Px8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLv7+9QAAAAAAAAAADw8PBm8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC5e7gk7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+u5I//jN1d/43eX/+O3mD/j95h/5De
Y/+R32T/kt9m/5PfZ/+U4Gn/leBq/5bga/+X4W3/mOFu/6rliP/G6rL/x+qz/8fqtP/I6rT/yOq1/8nq
tv/J6rf/1OzGy/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YL19fUzAAAAAAAAAADy8vJO8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgsPoru2/56j/v+eo/7/nqP+/56j/v+eo/7/nqP++6Kf/j95i/4zd
Xf+N3l//jt5g/4/eYv+Q3mP/kd9k/5LfZv+T32f/lOBp/5Xgav+W4Gz/l+Ft/7voov/G6bL/xuqy/8fq
s//H6rT/yOq1/8jqtf/J6rb/4e/Zo/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLw8PARAAAAAAAA
AADu7u4u8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgszpvMm/56j/v+eo/7/nqP+/56j/v+eo/7/n
qP+/56j/q+SL/4vdXP+M3V3/jd5f/47eYP+P3mL/kN9j/5HfZP+S32b/k99n/5Tgaf+V4Gr/qOOH/8Xp
sP/G6bH/xumy/8bqsv/H6rP/x+q0/8jqtf/K67jy8PHwhPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8WoAAAAAAAAAAAAAAADo6OgL8fHxgfHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxguDv2J2/56j/v+eo/7/n
qP+/56j/v+eo/7/nqP+/56j/v+eo/6Xjgv+L3Vz/jN1d/43eX/+O3mD/j95i/5DfY/+R32T/kt9m/5Pf
Z/+k44D/xOmu/8XpsP/F6bD/xumx/8bpsv/G6rL/x+qz/8fqtP/W7cnB8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvPz80AAAAAAAAAAAAAAAAAAAAAA8PDwZ/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLD6K/rv+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+u5I//kt5n/4zdXf+N3l//jt5g/4/e
Yv+Q32P/luFs/67kj//D6K3/xOmu/8Tpr//F6bD/xemw/8bpsf/G6bL/xuqy/8fqtP7o7+WR8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz8xYAAAAAAAAAAAAAAAAAAAAA8vLyPPHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLV7ci0v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/wOio/7Xl
mv+u5I7/rOSM/67kj/+35pz/wumr/8Lorf/D6K3/w+it/8Tprv/E6a//xemw/8XpsP/G6bH/xumy/9Ds
wNPx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8vLyZQAAAAAAAAAAAAAAAAAAAAAAAAAA////DPHx
8YDx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCx+m03L/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/n
qP+/56j/v+eo/7/nqP+/56j/wOep/8Doqv/B6Kr/weir/8LorP/C6K3/w+it/8Porv/E6a7/xOmv/8Xp
sP/F6bD/yOq18uvw6Yvx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC7+/vMQAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAPHx8Vzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC6O/ij8LorPG/56j/v+eo/7/n
qP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/8Dnqf/A6Kr/weiq/8Hoq//C6Kz/wuit/8Po
rf/D6K7/xOmu/8Tpr//F6bH74u/anvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLw8PB6////BQAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAPPz8yrx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxguHu
2pnB56v2v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP/A56n/wOiq/8Ho
q//B6Kv/wuis/8Lorf/D6K3/w+mu/8Tprv3b7dKq8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fFJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHy8vJf8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLi7tyXwumt8L/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/n
qP+/56j/wOep/8Doqv/B6Kv/weir/8LorP/C6K3/xOiv+d7u1aTx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvLy8nb///8KAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADv7+8Q8/Pze/Hx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC6/Dpiszqu82/56j/v+eo/7/nqP+/56j/v+eo/7/n
qP+/56j/v+eo/7/nqP+/56j/v+eo/8Dnqf/A6Kr/weir/8Hoq//H6bTj5e7elfHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz8yoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAA9fX1MvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLe7tShx+mz3r/n
qP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP/A56n/xumy5drtz6rv8e+D8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8vLyTgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAPHx8Unx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgubv45DU68e2y+q6z8XoseTD6a7uweir9MPpru7F6bHly+q50tLsxLrl796U8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLy8vJh////AwAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///wHx8fFZ8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8Wzf398IAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8D8/PzVfHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8PDwZujo
6AsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAA////AfHx8Ujx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fFa////BQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADz8/Mp8vLydvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8/PzfPHx8TcAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////CvLy8lDz8/N/8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvPz84Hx8fFa8PDwEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AADw8PAR8vLyTvHx8X3x8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fF/8/PzVvT09BgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///wXz8/Mq8/PzU/Hx8XDx8fGB8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLy8vJz8fHxWO/v7y////8IAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8G7e3tHfLy
8ifu7u4u8PDwNPT09C/y8vIo7+/vH+Pj4wkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAP///////wAA////////AAD///////8AAP//gAf//wAA//gAAD//AAD/wAAAB/8AAP+A
AAAB/wAA/gAAAAB/AAD8AAAAAD8AAPgAAAAAHwAA8AAAAAAPAADwAAAAAAcAAOAAAAAABwAA4AAAAAAD
AADAAAAAAAMAAMAAAAAAAwAAwAAAAAABAACAAAAAAAEAAIAAAAAAAQAAgAAAAAABAACAAAAAAAEAAIAA
AAAAAQAAgAAAAAABAACAAAAAAAMAAMAAAAAAAwAAwAAAAAADAADAAAAAAAMAAMAAAAAABwAAwAAAAAAH
AADgAAAAAAcAAOAAAAAADwAA4AAAAAAPAADwAAAAAB8AAPAAAAAAHwAA+AAAAAA/AAD8AAAAAD8AAPwA
AAAAfwAA/gAAAAD/AAD/AAAAAf8AAP+AAAAD/wAA/8AAAAf/AAD/8AAAH/8AAP/8AAA//wAA//8AAf//
AAD//+AP//8AAP///////wAA////////AAD///////8AACgAAAAgAAAAQAAAAAEAIAAAAAAAABAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAgICAAu/v7xD09PQX7u7uHvDw8CP29vYb8vLyFOrq6gwAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAICA
gALy8vIm7+/vT/Pz82fz8/N98fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvDw8Hrw8PBm7+/vUPT0
9C3o6OgLAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj
4wnz8/NC8vLydPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YHy8vJj8/PzKoCAgAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AADx8fEl8vLydfHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxcfHx8SUAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAA9PT0LfHx8YDx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8/PzgPLy8j0AAAABAAAAAAAA
AAAAAAAAAAAAAO3t7Rzx8fGA8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLr8OmM5O7emeTv
3Z7h79mj5fDem+nv45Tu8u6H8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvLy
8joAAAAAAAAAAAAAAAD///8E8fHxbvHx8YLx8fGC8fHxgvHx8YLx8fGC7vDshtns0K7N67zayeq288fq
s//I6rT/yOq1/8nqtv/K67f/y+u4/8vruf/P7L7w0+zF29vv0Lrn8OKX8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8/PzfvPz8xUAAAAAAAAAAPX19TLx8fGC8fHxgvHx8YLx8fGC8fHxgt3u1KXF6rHzxOmv/8Xp
sP/G6bH/xumy/8fqs//I6rT/yOq1/8nqtv/K67f/y+u4/8vruf/M67v/zey8/87svf/S7MPj4u7Zp/Hx
8YLx8fGC8fHxgvHx8YLx8fGC8/PzVQAAAAAAAAAA8fHxavHx8YLx8fGC8fHxgvHx8YLf7defwuis/cPo
rf/E6a7/xOmv/8XpsP/G6bH/xumy/8fqs//I6rT/yOq1/8nqtv/K67f/y+u4/8vruv/M67v/zey8/87s
vf/N67z/3e7SufHx8YLx8fGC8fHxgvHx8YLz8/N8////Bf///w3x8fGC8fHxgvHx8YLx8fGC8fHxgsXp
sOnB6Kv/wuis/8Porf/E6a7/xOmv/8XpsP/G6bH/xumy/8fqs//I6rT/yOq1/8nqtv/K67f/y+u4/8vr
uv/M67v/zey8/87svf/O67z96/Hoj/Hx8YLx8fGC8fHxgvHx8YLy8vIm8/PzK/Hx8YLx8fGC8fHxgvHx
8YLg79icwOep/8Hoqv/B6Kv/wuis/8Porf/E6a7/wuit/73opP+76KL/u+eh/77opv/D6a3/yeu1/8nq
tv/K67f/y+u5/8zruv/M67v/zey8/87svf/d7tSz8fHxgvHx8YLx8fGC8fHxgvHx8Tby8vI68fHxgvHx
8YLx8fGC8fHxgtTrxre/56j/wOep/8Hoqv/B6Kv/uOad/53idv+V4Gn/leBq/5fhbP+Y4W//muFx/5vi
c/+e4Xb/puWD/7PmlP/D6a3/y+u5/8zruv/M67v/zey8/9rtzsHx8fGC8fHxgvHx8YLx8fGC8/PzQfPz
80Lx8fGC8fHxgvHx8YLx8fGC0OvAwr/nqP+/56j/wOep/8Hoqv+o44b/kd9k/5LfZv+U4Gj/leBq/5fh
bf+Y4W//muFx/5vic/+d4nX/n+N3/7fnm//K67j/y+u5/8zruv/M67v/2u3QvPHx8YLx8fGC8fHxgvHx
8YLy8vI98/PzP/Hx8YLx8fGC8fHxgvHx8YLQ6sK/v+eo/7/nqP+/56j/wOep/6jjhv+P3mL/kd9k/5Lf
Zv+U4Gj/leBr/5fhbf+Y4W//muFx/5zic/+d4nX/v+mm/8nqt//K67j/y+u5/8zruv/f79au8fHxgvHx
8YLx8fGC8fHxgvX19TLx8fE38fHxgvHx8YLx8fGC8fHxgtTrybO/56j/v+eo/7/nqP+/56j/sOSS/47e
YP+P3mL/kd9k/5LfZv+U4Gj/leBr/5fhbf+Z4W//muJx/5/jd//H6bP/yeq2/8nqt//K67j/y+u5/+nv
45Tx8fGC8fHxgvHx8YLx8fGC7+/vIPHx8SXx8fGC8fHxgvHx8YLx8fGC4e/Zm7/nqP+/56j/v+eo/7/n
qP+956X/jt5h/47eYP+P3mL/kd9k/5LfZv+U4Gn/luBr/5fhbf+Z4W//q+aK/8fqs//I6rT/yeq2/8nq
t//N7Lvw8fHxgvHx8YLx8fGC8fHxgvPz84D///8G6+vrDfHx8YLx8fGC8fHxgvHx8YLv8e+Dweis87/n
qP+/56j/v+eo/7/nqP+d4XX/jN1e/47eYP+P3mL/kd9k/5PfZ/+U4Gn/luBr/5fhbf+86KP/xuqy/8fq
s//I6rX/yeq2/9Tsx8nx8fGC8fHxgvHx8YLx8fGC8PDwaAAAAAAAAAAA8fHxbPHx8YLx8fGC8fHxgvHx
8YLM6rrMv+eo/7/nqP+/56j/v+eo/7blmv+N3V//jN1e/47eYP+Q3mL/kd9k/5PfZ/+U4Gn/qeSH/8Xp
sP/G6bH/xuqy/8fqs//I6rX/5fDem/Hx8YLx8fGC8fHxgvHx8YLz8/M/AAAAAAAAAADz8/NB8fHxgvHx
8YLx8fGC8fHxgt3s06O/56j/v+eo/7/nqP+/56j/v+eo/7Xmmf+U32n/jN1e/47eYP+Q3mL/k99o/6zk
i//D6a7/xemv/8XpsP/G6bH/xuqy/8vqu+jx8fGC8fHxgvHx8YLx8fGC8fHxgvPz8xUAAAAAAAAAAPT0
9Bfx8fGC8fHxgvHx8YLx8fGC8fHvg8Tpsee/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+35pz/suWV/7Xm
mf/A6Kj/wuit/8Porf/E6a7/xemv/8XpsP/G6bH/3e3UqvHx8YLx8fGC8fHxgvHx8YLw8PBmAAAAAAAA
AAAAAAAAAAAAAPHx8W7x8fGC8fHxgvHx8YLx8fGC4u7cmMHnqvm/56j/v+eo/7/nqP+/56j/v+eo/7/n
qP+/56j/wOep/8Hoqv/C6Kz/wuit/8Porf/E6a7/xemv/9Hrwszx8fGC8fHxgvHx8YLx8fGC8fHxgvX1
9TEAAAAAAAAAAAAAAAAAAAAA7u7uO/Hx8YLx8fGC8fHxgvHx8YLx8fGC3e7SpMHoqfq/56j/v+eo/7/n
qP+/56j/v+eo/7/nqP+/56j/wOip/8Hoq//C6Kz/wuit/8Porf/O67zV8PHwhPHx8YLx8fGC8fHxgvHx
8YLy8vJ2////BQAAAAAAAAAAAAAAAAAAAACqqqoD8PDwafHx8YLx8fGC8fHxgvHx8YLx8fGC4O/YnMTo
ruy/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/wOip/8Hoq//C6Kz90uvEwe/x74Px8fGC8fHxgvHx
8YLx8fGC8fHxgvPz8ykAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADz8/MW8fHxfPHx8YLx8fGC8fHxgvHx
8YLx8fGC8PLuhdXtyLXF6bHlv+eo/7/nqP+/56j/v+eo/7/nqP/B6Kv0zeq8zOXv4JTx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLy8vJNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADy8vIm8fHxgPHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLs8OmJ4e/Zm93u06Pf7def5+/hkvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxXf///wIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AADy8vIo8/PzffHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8VnMzMwFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAD29vYb8fHxbvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz83/v7+9BgICAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzMwF8/PzQPLy8nnx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz84Hx8fFc9PT0GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////B/X19TLx8fFc8PDwevHx
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgPHx8Wv09PRE9PT0FwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAA7+/vEPb29hvw8PAj7+/vH/T09Be/v78EAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8B///wAA//wAAD/wAAAP4AAAB+AA
AAfAAAADwAAAA4AAAAGAAAABgAAAAYAAAAGAAAABgAAAAYAAAAGAAAADwAAAA8AAAAPAAAAH4AAAB+AA
AA/wAAAP+AAAH/gAAD/+AAB//wAB///AA///+B////////////8oAAAAEAAAACAAAAABACAAAAAAAAAE
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////CfDw8BH///8GAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICAAu7u7i7x8fFe8PDwevHx8YLx8fGC8fHxgvDw
8Hvx8fFs7+/vT/Dw8CMAAAABAAAAAAAAAAAAAAAA5ubmCvLy8l/x8fGC8fHxgvHx8YLx8fGC8fHxgvHx
8YLx8fGC8fHxgvHx8YLx8fGC8/PzZu7u7g8AAAAAAAAAAPHx8V3x8fGC8fHxgunv5o7Z7c200+vFytTs
xc7W7cnH2+7QueLu2qbu8OyH8fHxgvHx8YLx8fFu////BfHx8STx8fGC8fHxgtrtzq3D6a/8xemw/8bp
sv/I6rT/yeq2/8vruP/M67v/z+u++Nzu0bjx8fGC8fHxgu/v7zDx8fFI8fHxguzw6ojC56z3wuis/8Tp
rv/E6q3/weiq/8fqsv/J6rb/y+u5/8zru//N67z/6/HpjfHx8YLy8vJN8fHxXPHx8YLg79icv+eo/8Ho
qv+k4n//lOBo/5fhbf+a4XH/n+J5/7Pmlv/L67n/zOu7/+Xw353x8fGC8fHxXvHx8Vrx8fGC4O3Zm7/n
qP+/56j/nuF3/5HfZP+U4Gj/l+Ft/5ricf+x5pL/yeq3/8vruf/r8emN8fHxgu/v70/x8fFK8fHxguzw
6ojA6Kn8v+eo/6njiP+O3mD/kd9k/5Tgaf+X4W3/vuim/8jqtP/N67zr8fHxgvHx8YLy8vI68/PzK/Hx
8YLx8fGCx+m03L/nqP++6Kb/meBw/47eYP+S32X/q+SL/8XpsP/G6rL/1+zLvvHx8YLz8/OB8PDwEdXV
1Qbx8fF98fHxgt/t1Z/A56j9v+eo/7/nqP+656H/vuim/8Lorf/E6a7/yOq18Ovw6Yvx8fGC8vLyYwAA
AAAAAAAA8fHxR/Hx8YLx8fGC2O3NrMDnqfq/56j/v+eo/7/nqP/B6Kv/xumy7OTu3Zfx8fGC8/PzgfLy
8icAAAAAAAAAAP///wPz8/Nm8fHxgvHx8YLo7+SO0+zFuczquszM6bzJ1+zMru7w7Ibx8fGC8fHxgvHx
8UcAAAAAAAAAAAAAAAAAAAAA4+PjCfHx8Vzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgfPz
80D///8BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB8/PzK/Ly8mDz8/N+8fHxgvHx8YLy8vJ68vLyUezs
7BsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAevr6w3j4+MJAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//AAD8fwAA4AcAAMADAACAAQAAgAEAAIABAACAAQAAgAEAAIAB
AADAAwAAwAMAAOAHAADwDwAA/n8AAP//AAA=
</value>
</data>
</root>

View File

@@ -1,3 +0,0 @@
<Weavers xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="FodyWeavers.xsd">
<Costura DisableCompression='true' IncludeDebugSymbols='false' />
</Weavers>

View File

@@ -1,141 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<!-- This file was generated by Fody. Manual changes to this file will be lost when your project is rebuilt. -->
<xs:element name="Weavers">
<xs:complexType>
<xs:all>
<xs:element name="Costura" minOccurs="0" maxOccurs="1">
<xs:complexType>
<xs:all>
<xs:element minOccurs="0" maxOccurs="1" name="ExcludeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of assembly names to exclude from the default action of "embed all Copy Local references", delimited with line breaks</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element minOccurs="0" maxOccurs="1" name="IncludeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of assembly names to include from the default action of "embed all Copy Local references", delimited with line breaks.</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element minOccurs="0" maxOccurs="1" name="ExcludeRuntimeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of runtime assembly names to exclude from the default action of "embed all Copy Local references", delimited with line breaks</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element minOccurs="0" maxOccurs="1" name="IncludeRuntimeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of runtime assembly names to include from the default action of "embed all Copy Local references", delimited with line breaks.</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element minOccurs="0" maxOccurs="1" name="Unmanaged32Assemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of unmanaged 32 bit assembly names to include, delimited with line breaks.</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element minOccurs="0" maxOccurs="1" name="Unmanaged64Assemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of unmanaged 64 bit assembly names to include, delimited with line breaks.</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element minOccurs="0" maxOccurs="1" name="PreloadOrder" type="xs:string">
<xs:annotation>
<xs:documentation>The order of preloaded assemblies, delimited with line breaks.</xs:documentation>
</xs:annotation>
</xs:element>
</xs:all>
<xs:attribute name="CreateTemporaryAssemblies" type="xs:boolean">
<xs:annotation>
<xs:documentation>This will copy embedded files to disk before loading them into memory. This is helpful for some scenarios that expected an assembly to be loaded from a physical file.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="IncludeDebugSymbols" type="xs:boolean">
<xs:annotation>
<xs:documentation>Controls if .pdbs for reference assemblies are also embedded.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="IncludeRuntimeReferences" type="xs:boolean">
<xs:annotation>
<xs:documentation>Controls if runtime assemblies are also embedded.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="UseRuntimeReferencePaths" type="xs:boolean">
<xs:annotation>
<xs:documentation>Controls whether the runtime assemblies are embedded with their full path or only with their assembly name.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="DisableCompression" type="xs:boolean">
<xs:annotation>
<xs:documentation>Embedded assemblies are compressed by default, and uncompressed when they are loaded. You can turn compression off with this option.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="DisableCleanup" type="xs:boolean">
<xs:annotation>
<xs:documentation>As part of Costura, embedded assemblies are no longer included as part of the build. This cleanup can be turned off.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="LoadAtModuleInit" type="xs:boolean">
<xs:annotation>
<xs:documentation>Costura by default will load as part of the module initialization. This flag disables that behavior. Make sure you call CosturaUtility.Initialize() somewhere in your code.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="IgnoreSatelliteAssemblies" type="xs:boolean">
<xs:annotation>
<xs:documentation>Costura will by default use assemblies with a name like 'resources.dll' as a satellite resource and prepend the output path. This flag disables that behavior.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="ExcludeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of assembly names to exclude from the default action of "embed all Copy Local references", delimited with |</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="IncludeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of assembly names to include from the default action of "embed all Copy Local references", delimited with |.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="ExcludeRuntimeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of runtime assembly names to exclude from the default action of "embed all Copy Local references", delimited with |</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="IncludeRuntimeAssemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of runtime assembly names to include from the default action of "embed all Copy Local references", delimited with |.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="Unmanaged32Assemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of unmanaged 32 bit assembly names to include, delimited with |.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="Unmanaged64Assemblies" type="xs:string">
<xs:annotation>
<xs:documentation>A list of unmanaged 64 bit assembly names to include, delimited with |.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="PreloadOrder" type="xs:string">
<xs:annotation>
<xs:documentation>The order of preloaded assemblies, delimited with |.</xs:documentation>
</xs:annotation>
</xs:attribute>
</xs:complexType>
</xs:element>
</xs:all>
<xs:attribute name="VerifyAssembly" type="xs:boolean">
<xs:annotation>
<xs:documentation>'true' to run assembly verification (PEVerify) on the target assembly after all weavers have been executed.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="VerifyIgnoreCodes" type="xs:string">
<xs:annotation>
<xs:documentation>A comma-separated list of error codes that can be safely ignored in assembly verification.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="GenerateXsd" type="xs:boolean">
<xs:annotation>
<xs:documentation>'false' to turn off automatic generation of the XML Schema file.</xs:documentation>
</xs:annotation>
</xs:attribute>
</xs:complexType>
</xs:element>
</xs:schema>

View File

@@ -1,243 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using Microsoft.Win32;
using Newtonsoft.Json;
using UptimeKuma.Properties;
namespace UptimeKuma {
static class Program {
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main(string[] args) {
var cwd = Path.GetDirectoryName(Application.ExecutablePath);
if (cwd != null) {
Environment.CurrentDirectory = cwd;
}
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new UptimeKumaApplicationContext());
}
}
public class UptimeKumaApplicationContext : ApplicationContext
{
private static Mutex mutex = null;
const string appName = "Uptime Kuma";
private NotifyIcon trayIcon;
private Process process;
private MenuItem statusMenuItem;
private MenuItem runWhenStarts;
private MenuItem openMenuItem;
private RegistryKey registryKey = Registry.CurrentUser.OpenSubKey("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run", true);
public UptimeKumaApplicationContext() {
// Single instance only
bool createdNew;
mutex = new Mutex(true, appName, out createdNew);
if (!createdNew) {
return;
}
var startingText = "Starting server...";
trayIcon = new NotifyIcon();
trayIcon.Text = startingText;
runWhenStarts = new MenuItem("Run when system starts", RunWhenStarts);
runWhenStarts.Checked = registryKey.GetValue(appName) != null;
statusMenuItem = new MenuItem(startingText);
statusMenuItem.Enabled = false;
openMenuItem = new MenuItem("Open", Open);
openMenuItem.Enabled = false;
trayIcon.Icon = Icon.ExtractAssociatedIcon(Assembly.GetExecutingAssembly().Location);
trayIcon.ContextMenu = new ContextMenu(new MenuItem[] {
statusMenuItem,
openMenuItem,
//new("Debug Console", DebugConsole),
runWhenStarts,
new("Check for Update...", CheckForUpdate),
new("Visit GitHub...", VisitGitHub),
new("About", About),
new("Exit", Exit),
});
trayIcon.MouseDoubleClick += new MouseEventHandler(Open);
trayIcon.Visible = true;
var hasUpdateFile = File.Exists("update");
if (!hasUpdateFile && Directory.Exists("core") && Directory.Exists("node") && Directory.Exists("core/node_modules") && Directory.Exists("core/dist")) {
// Go go go
StartProcess();
} else {
DownloadFiles();
}
}
void DownloadFiles() {
var form = new DownloadForm();
form.Closed += Exit;
form.Show();
}
private void RunWhenStarts(object sender, EventArgs e) {
if (registryKey == null) {
MessageBox.Show("Error: Unable to set startup registry key.");
return;
}
if (runWhenStarts.Checked) {
registryKey.DeleteValue(appName, false);
runWhenStarts.Checked = false;
} else {
registryKey.SetValue(appName, Application.ExecutablePath);
runWhenStarts.Checked = true;
}
}
void StartProcess() {
var startInfo = new ProcessStartInfo {
FileName = "node/node.exe",
Arguments = "server/server.js --data-dir=\"../data/\"",
RedirectStandardOutput = false,
RedirectStandardError = false,
UseShellExecute = false,
CreateNoWindow = true,
WorkingDirectory = "core"
};
process = new Process();
process.StartInfo = startInfo;
process.EnableRaisingEvents = true;
process.Exited += ProcessExited;
try {
process.Start();
//Open(null, null);
// Async task to check if the server is ready
Task.Run(() => {
var runningText = "Server is running";
using TcpClient tcpClient = new TcpClient();
while (true) {
try {
tcpClient.Connect("127.0.0.1", 3001);
statusMenuItem.Text = runningText;
openMenuItem.Enabled = true;
trayIcon.Text = runningText;
break;
} catch (Exception) {
System.Threading.Thread.Sleep(2000);
}
}
});
} catch (Exception e) {
MessageBox.Show("Startup failed: " + e.Message, "Uptime Kuma Error");
}
}
void StopProcess() {
process?.Kill();
}
void Open(object sender, EventArgs e) {
Process.Start("http://localhost:3001");
}
void DebugConsole(object sender, EventArgs e) {
}
void CheckForUpdate(object sender, EventArgs e) {
var needUpdate = false;
// Check version.json exists
if (File.Exists("version.json")) {
// Load version.json and compare with the latest version from GitHub
var currentVersionObj = JsonConvert.DeserializeObject<Version>(File.ReadAllText("version.json"));
var versionJson = new WebClient().DownloadString("https://uptime.kuma.pet/version");
var latestVersionObj = JsonConvert.DeserializeObject<Version>(versionJson);
// Compare version, if the latest version is newer, then update
if (new System.Version(latestVersionObj.latest).CompareTo(new System.Version(currentVersionObj.latest)) > 0) {
var result = MessageBox.Show("A new version is available. Do you want to update?", "Update", MessageBoxButtons.YesNo);
if (result == DialogResult.Yes) {
// Create a empty file `update`, so the app will download the core files again at startup
File.Create("update").Close();
trayIcon.Visible = false;
process?.Kill();
// Restart the app, it will download the core files again at startup
Application.Restart();
}
} else {
MessageBox.Show("You are using the latest version.");
}
}
}
void VisitGitHub(object sender, EventArgs e)
{
Process.Start("https://github.com/louislam/uptime-kuma");
}
void About(object sender, EventArgs e)
{
MessageBox.Show("Uptime Kuma Windows Runtime v1.0.0" + Environment.NewLine + "© 2023 Louis Lam", "Info");
}
void Exit(object sender, EventArgs e)
{
// Hide tray icon, otherwise it will remain shown until user mouses over it
trayIcon.Visible = false;
process?.Kill();
Application.Exit();
}
void ProcessExited(object sender, EventArgs e) {
if (process.ExitCode != 0) {
var line = "";
while (!process.StandardOutput.EndOfStream)
{
line += process.StandardOutput.ReadLine();
}
MessageBox.Show("Uptime Kuma exited unexpectedly. Exit code: " + process.ExitCode + " " + line);
}
trayIcon.Visible = false;
Application.Exit();
}
}
}

View File

@@ -1,36 +0,0 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Uptime Kuma")]
[assembly: AssemblyDescription("A portable executable for running Uptime Kuma")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Uptime Kuma")]
[assembly: AssemblyProduct("Uptime Kuma")]
[assembly: AssemblyCopyright("Copyright © 2023 Louis Lam")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("86B40AFB-61FC-433D-8C31-650B0F32EA8F")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.1.0")]
[assembly: AssemblyFileVersion("1.0.1.0")]

View File

@@ -1,62 +0,0 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace UptimeKuma.Properties {
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder",
"4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance",
"CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState
.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if ((resourceMan == null)) {
global::System.Resources.ResourceManager temp =
new global::System.Resources.ResourceManager("UptimeKuma.Properties.Resources",
typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState
.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get { return resourceCulture; }
set { resourceCulture = value; }
}
}
}

View File

@@ -1,117 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
</root>

View File

@@ -1,23 +0,0 @@
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace UptimeKuma.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute(
"Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance =
((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get { return defaultInstance; }
}
}
}

View File

@@ -1,7 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<SettingsFile xmlns="http://schemas.microsoft.com/VisualStudio/2004/01/settings" CurrentProfile="(Default)">
<Profiles>
<Profile Name="(Default)" />
</Profiles>
<Settings />
</SettingsFile>

View File

@@ -1,212 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="packages\Costura.Fody.5.7.0\build\Costura.Fody.props" Condition="Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.props')" />
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}</ProjectGuid>
<OutputType>WinExe</OutputType>
<RootNamespace>UptimeKuma</RootNamespace>
<AssemblyName>uptime-kuma</AssemblyName>
<TargetFrameworkVersion>v4.7.2</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects>
<Deterministic>true</Deterministic>
<ApplicationIcon>..\..\public\favicon.ico</ApplicationIcon>
<LangVersion>9</LangVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup>
<ApplicationManifest>app.manifest</ApplicationManifest>
</PropertyGroup>
<PropertyGroup>
<PostBuildEvent>COPY "$(SolutionDir)bin\Debug\uptime-kuma.exe" "%UserProfile%\Desktop\uptime-kuma-win64\"</PostBuildEvent>
</PropertyGroup>
<ItemGroup>
<Reference Include="Costura, Version=5.7.0.0, Culture=neutral, processorArchitecture=MSIL">
<HintPath>packages\Costura.Fody.5.7.0\lib\netstandard1.0\Costura.dll</HintPath>
</Reference>
<Reference Include="Microsoft.Win32.Primitives, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\Microsoft.Win32.Primitives.4.3.0\lib\net46\Microsoft.Win32.Primitives.dll</HintPath>
</Reference>
<Reference Include="mscorlib" />
<Reference Include="Newtonsoft.Json, Version=13.0.0.0, Culture=neutral, PublicKeyToken=30ad4fe6b2a6aeed, processorArchitecture=MSIL">
<HintPath>packages\Newtonsoft.Json.13.0.2\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.AppContext, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.AppContext.4.3.0\lib\net463\System.AppContext.dll</HintPath>
</Reference>
<Reference Include="System.Buffers, Version=4.0.3.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>packages\System.Buffers.4.5.1\lib\net461\System.Buffers.dll</HintPath>
</Reference>
<Reference Include="System.ComponentModel.Composition" />
<Reference Include="System.Console, Version=4.0.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Console.4.3.1\lib\net46\System.Console.dll</HintPath>
</Reference>
<Reference Include="System.Core" />
<Reference Include="System.Diagnostics.DiagnosticSource, Version=7.0.0.1, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>packages\System.Diagnostics.DiagnosticSource.7.0.1\lib\net462\System.Diagnostics.DiagnosticSource.dll</HintPath>
</Reference>
<Reference Include="System.Diagnostics.Tracing, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Diagnostics.Tracing.4.3.0\lib\net462\System.Diagnostics.Tracing.dll</HintPath>
</Reference>
<Reference Include="System.Globalization.Calendars, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Globalization.Calendars.4.3.0\lib\net46\System.Globalization.Calendars.dll</HintPath>
</Reference>
<Reference Include="System.IO, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.IO.4.3.0\lib\net462\System.IO.dll</HintPath>
</Reference>
<Reference Include="System.IO.Compression, Version=4.1.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL">
<HintPath>packages\System.IO.Compression.4.3.0\lib\net46\System.IO.Compression.dll</HintPath>
</Reference>
<Reference Include="System.IO.Compression.FileSystem" />
<Reference Include="System.IO.Compression.ZipFile, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL">
<HintPath>packages\System.IO.Compression.ZipFile.4.3.0\lib\net46\System.IO.Compression.ZipFile.dll</HintPath>
</Reference>
<Reference Include="System.IO.FileSystem, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.IO.FileSystem.4.3.0\lib\net46\System.IO.FileSystem.dll</HintPath>
</Reference>
<Reference Include="System.IO.FileSystem.Primitives, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.IO.FileSystem.Primitives.4.3.0\lib\net46\System.IO.FileSystem.Primitives.dll</HintPath>
</Reference>
<Reference Include="System.Linq, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Linq.4.3.0\lib\net463\System.Linq.dll</HintPath>
</Reference>
<Reference Include="System.Linq.Expressions, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Linq.Expressions.4.3.0\lib\net463\System.Linq.Expressions.dll</HintPath>
</Reference>
<Reference Include="System.Memory, Version=4.0.1.2, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
<HintPath>packages\System.Memory.4.5.5\lib\net461\System.Memory.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http, Version=4.1.1.3, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Net.Http.4.3.4\lib\net46\System.Net.Http.dll</HintPath>
</Reference>
<Reference Include="System.Net.Sockets, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Net.Sockets.4.3.0\lib\net46\System.Net.Sockets.dll</HintPath>
</Reference>
<Reference Include="System.Numerics" />
<Reference Include="System.Numerics.Vectors, Version=4.1.4.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Numerics.Vectors.4.5.0\lib\net46\System.Numerics.Vectors.dll</HintPath>
</Reference>
<Reference Include="System.Reflection, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Reflection.4.3.0\lib\net462\System.Reflection.dll</HintPath>
</Reference>
<Reference Include="System.Runtime, Version=4.1.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.4.3.1\lib\net462\System.Runtime.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.CompilerServices.Unsafe, Version=6.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.CompilerServices.Unsafe.6.0.0\lib\net461\System.Runtime.CompilerServices.Unsafe.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.Extensions, Version=4.1.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.Extensions.4.3.1\lib\net462\System.Runtime.Extensions.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.InteropServices, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.InteropServices.4.3.0\lib\net463\System.Runtime.InteropServices.dll</HintPath>
</Reference>
<Reference Include="System.Runtime.InteropServices.RuntimeInformation, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Runtime.InteropServices.RuntimeInformation.4.3.0\lib\net45\System.Runtime.InteropServices.RuntimeInformation.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.Algorithms, Version=4.2.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.Algorithms.4.3.1\lib\net463\System.Security.Cryptography.Algorithms.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.Encoding, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.Encoding.4.3.0\lib\net46\System.Security.Cryptography.Encoding.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.Primitives, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.Primitives.4.3.0\lib\net46\System.Security.Cryptography.Primitives.dll</HintPath>
</Reference>
<Reference Include="System.Security.Cryptography.X509Certificates, Version=4.1.1.2, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Security.Cryptography.X509Certificates.4.3.2\lib\net461\System.Security.Cryptography.X509Certificates.dll</HintPath>
</Reference>
<Reference Include="System.Text.RegularExpressions, Version=4.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Text.RegularExpressions.4.3.1\lib\net463\System.Text.RegularExpressions.dll</HintPath>
</Reference>
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Deployment" />
<Reference Include="System.Drawing" />
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
<Reference Include="System.Xml.ReaderWriter, Version=4.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
<HintPath>packages\System.Xml.ReaderWriter.4.3.1\lib\net46\System.Xml.ReaderWriter.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup>
<Compile Include="DownloadForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="DownloadForm.Designer.cs">
<DependentUpon>DownloadForm.cs</DependentUpon>
</Compile>
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Version.cs" />
<EmbeddedResource Include="DownloadForm.resx">
<DependentUpon>DownloadForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
<None Include="..\..\public\favicon.ico">
<Link>favicon.ico</Link>
</None>
<None Include="packages.config" />
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
</ItemGroup>
<ItemGroup>
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<Content Include=".gitignore" />
<Content Include="app.manifest" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
<PropertyGroup>
<ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105.The missing file is {0}.</ErrorText>
</PropertyGroup>
<Error Condition="!Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.props')" Text="$([System.String]::Format('$(ErrorText)', 'packages\Costura.Fody.5.7.0\build\Costura.Fody.props'))" />
<Error Condition="!Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\Costura.Fody.5.7.0\build\Costura.Fody.targets'))" />
<Error Condition="!Exists('packages\Fody.6.6.4\build\Fody.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\Fody.6.6.4\build\Fody.targets'))" />
<Error Condition="!Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets'))" />
</Target>
<Import Project="packages\Costura.Fody.5.7.0\build\Costura.Fody.targets" Condition="Exists('packages\Costura.Fody.5.7.0\build\Costura.Fody.targets')" />
<Import Project="packages\Fody.6.6.4\build\Fody.targets" Condition="Exists('packages\Fody.6.6.4\build\Fody.targets')" />
<Import Project="packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets" Condition="Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" />
</Project>

View File

@@ -1,16 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "UptimeKuma", "UptimeKuma.csproj", "{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@@ -1,3 +0,0 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:Boolean x:Key="/Default/ResxEditorPersonal/CheckedGroups/=UptimeKuma_002FProperties_002FResources/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/ResxEditorPersonal/Initialized/@EntryValue">True</s:Boolean></wpf:ResourceDictionary>

View File

@@ -1,9 +0,0 @@
namespace UptimeKuma {
public class Version {
public string latest { get; set; }
public string slow { get; set; }
public string beta { get; set; }
public string nodejs { get; set; }
public string exe { get; set; }
}
}

View File

@@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
<asmv3:application>
<asmv3:windowsSettings>
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true</dpiAware>
<dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">PerMonitorV2</dpiAwareness>
</asmv3:windowsSettings>
</asmv3:application>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v2">
<security>
<requestedPrivileges xmlns="urn:schemas-microsoft-com:asm.v3">
<!-- UAC Manifest Options
If you want to change the Windows User Account Control level replace the
requestedExecutionLevel node with one of the following.
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
<requestedExecutionLevel level="requireAdministrator" uiAccess="false" />
<requestedExecutionLevel level="highestAvailable" uiAccess="false" />
Specifying requestedExecutionLevel element will disable file and registry virtualization.
Remove this element if your application requires this virtualization for backwards
compatibility.
-->
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
</requestedPrivileges>
</security>
</trustInfo>
</assembly>

View File

@@ -1,56 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Costura.Fody" version="5.7.0" targetFramework="net472" developmentDependency="true" />
<package id="Fody" version="6.6.4" targetFramework="net472" developmentDependency="true" />
<package id="Microsoft.NETCore.Platforms" version="7.0.0" targetFramework="net472" />
<package id="Microsoft.Win32.Primitives" version="4.3.0" targetFramework="net472" />
<package id="NETStandard.Library" version="2.0.3" targetFramework="net472" />
<package id="Newtonsoft.Json" version="13.0.2" targetFramework="net472" />
<package id="System.AppContext" version="4.3.0" targetFramework="net472" />
<package id="System.Console" version="4.3.1" targetFramework="net472" />
<package id="System.Diagnostics.DiagnosticSource" version="7.0.1" targetFramework="net472" />
<package id="System.Net.Http" version="4.3.4" targetFramework="net472" />
<package id="System.Runtime.Extensions" version="4.3.1" targetFramework="net472" />
<package id="System.Security.Cryptography.Algorithms" version="4.3.1" targetFramework="net472" />
<package id="System.Security.Cryptography.X509Certificates" version="4.3.2" targetFramework="net472" />
<package id="System.Text.RegularExpressions" version="4.3.1" targetFramework="net472" />
<package id="System.Xml.ReaderWriter" version="4.3.1" targetFramework="net472" />
<package id="System.Memory" version="4.5.5" targetFramework="net472" />
<package id="System.Net.Primitives" version="4.3.1" targetFramework="net472" />
<package id="System.Runtime" version="4.3.1" targetFramework="net472" />
<package id="System.Buffers" version="4.5.1" targetFramework="net472" />
<package id="System.Collections" version="4.3.0" targetFramework="net472" />
<package id="System.Collections.Concurrent" version="4.3.0" targetFramework="net472" />
<package id="System.Diagnostics.Debug" version="4.3.0" targetFramework="net472" />
<package id="System.Diagnostics.Tools" version="4.3.0" targetFramework="net472" />
<package id="System.Diagnostics.Tracing" version="4.3.0" targetFramework="net472" />
<package id="System.Globalization" version="4.3.0" targetFramework="net472" />
<package id="System.Globalization.Calendars" version="4.3.0" targetFramework="net472" />
<package id="System.IO" version="4.3.0" targetFramework="net472" />
<package id="System.IO.Compression" version="4.3.0" targetFramework="net472" />
<package id="System.IO.Compression.ZipFile" version="4.3.0" targetFramework="net472" />
<package id="System.IO.FileSystem" version="4.3.0" targetFramework="net472" />
<package id="System.IO.FileSystem.Primitives" version="4.3.0" targetFramework="net472" />
<package id="System.Linq" version="4.3.0" targetFramework="net472" />
<package id="System.Linq.Expressions" version="4.3.0" targetFramework="net472" />
<package id="System.Net.Sockets" version="4.3.0" targetFramework="net472" />
<package id="System.Numerics.Vectors" version="4.5.0" targetFramework="net472" />
<package id="System.ObjectModel" version="4.3.0" targetFramework="net472" />
<package id="System.Reflection" version="4.3.0" targetFramework="net472" />
<package id="System.Reflection.Extensions" version="4.3.0" targetFramework="net472" />
<package id="System.Reflection.Primitives" version="4.3.0" targetFramework="net472" />
<package id="System.Resources.ResourceManager" version="4.3.0" targetFramework="net472" />
<package id="System.Runtime.CompilerServices.Unsafe" version="6.0.0" targetFramework="net472" />
<package id="System.Runtime.Handles" version="4.3.0" targetFramework="net472" />
<package id="System.Runtime.InteropServices" version="4.3.0" targetFramework="net472" />
<package id="System.Runtime.InteropServices.RuntimeInformation" version="4.3.0" targetFramework="net472" />
<package id="System.Runtime.Numerics" version="4.3.0" targetFramework="net472" />
<package id="System.Security.Cryptography.Encoding" version="4.3.0" targetFramework="net472" />
<package id="System.Security.Cryptography.Primitives" version="4.3.0" targetFramework="net472" />
<package id="System.Text.Encoding" version="4.3.0" targetFramework="net472" />
<package id="System.Text.Encoding.Extensions" version="4.3.0" targetFramework="net472" />
<package id="System.Threading" version="4.3.0" targetFramework="net472" />
<package id="System.Threading.Tasks" version="4.3.0" targetFramework="net472" />
<package id="System.Threading.Timer" version="4.3.0" targetFramework="net472" />
<package id="System.Xml.XDocument" version="4.3.0" targetFramework="net472" />
</packages>

View File

@@ -1,12 +1,8 @@
/*
* ⚠️ ⚠️ ⚠️ ⚠️ Due to the weird issue in Portainer that the healthcheck script is still pointing to this script for unknown reason.
* IT CANNOT BE DROPPED, even though it looks like it is not used.
* See more: https://github.com/louislam/uptime-kuma/issues/2774#issuecomment-1429092359
*
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
const FBSD = /^freebsd/.test(process.platform);
const { FBSD } = require("../server/util-server");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
@@ -23,17 +19,17 @@ if (sslKey && sslCert) {
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
let hostname = process.env.UPTIME_KUMA_HOST;
let hostname = process.env.UPTIME_KUMA_SERVICE_HOST || process.env.UPTIME_KUMA_HOST || "::";
// Also read HOST if not *BSD, as HOST is a system environment variable in FreeBSD
if (!hostname && !FBSD) {
hostname = process.env.HOST;
}
const port = parseInt(process.env.UPTIME_KUMA_PORT || process.env.PORT || 3001);
const port = parseInt(process.env.UPTIME_KUMA_SERVICE_PORT || process.env.UPTIME_KUMA_PORT || process.env.PORT || 3001);
let options = {
host: hostname || "127.0.0.1",
host: hostname,
port: port,
timeout: 28 * 1000,
};

View File

@@ -5,15 +5,15 @@
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
println("=====================");
println("Uptime Kuma Install Script");
println("Uptime Kuma Installer");
println("=====================");
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8");
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
println("---------------------------------------");
println("This script is designed for Linux and basic usage.");
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
println("---------------------------------------");
println("");
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2");
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
println("Docker - Install Uptime Kuma Docker container");
println("");
@@ -29,10 +29,14 @@ function checkNode() {
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
println("Node Version: " ++ nodeVersion);
if (nodeVersion <= "12") {
if (nodeVersion < "12") {
println("Error: Required Node.js 14");
call("exit", "1");
}
if (nodeVersion == "12") {
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
}
}
function deb() {
@@ -56,8 +60,8 @@ function deb() {
bash("apt --yes install curl");
}
println("Installing Node.js 16");
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt");
println("Installing Node.js 14");
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
bash("apt --yes install nodejs");
bash("node -v");
@@ -87,10 +91,6 @@ if (type == "local") {
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
if (os == "Ubuntu") {
distribution = "ubuntu";
// Get ubuntu version
bash(". /etc/lsb-release");
version = DISTRIB_RELEASE;
}
if (os == "Debian") {
distribution = "debian";
@@ -101,7 +101,6 @@ if (type == "local") {
println("Your OS: " ++ os);
println("Distribution: " ++ distribution);
println("Version: " ++ version);
println("Arch: " ++ arch);
if ("$3" != "") {
@@ -132,32 +131,15 @@ if (type == "local") {
checkNode();
} else {
bash("dnfCheck=$(dnf --version)");
// Use yum
if (dnfCheck == "") {
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("yum -y -q install curl");
}
println("Installing Node.js 16");
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
bash("yum install -y -q nodejs");
} else {
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("dnf -y install curl");
}
println("Installing Node.js 16");
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
bash("dnf install -y nodejs");
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("yum -y -q install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
bash("yum install -y -q nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
@@ -189,15 +171,13 @@ if (type == "local") {
bash("check=$(git --version)");
if (check == "") {
error = 1;
println("Error: git is not found!");
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
println("Error: git is missing");
}
bash("check=$(node -v)");
if (check == "") {
error = 1;
println("Error: node is not found");
println("help: an installation guide is available at https://nodejs.org/en/download");
println("Error: node is missing");
}
if (error > 0) {
@@ -213,15 +193,6 @@ if (type == "local") {
bash("pm2 startup");
}
// Check again
bash("check=$(pm2 --version)");
if (check == "") {
println("Error: pm2 is not found!");
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
bash("exit 1");
}
bash("mkdir -p $installPath");
bash("cd $installPath");
bash("git clone https://github.com/louislam/uptime-kuma.git .");
@@ -235,7 +206,6 @@ if (type == "local") {
bash("check=$(docker -v)");
if (check == "") {
println("Error: docker is not found!");
println("help: an installation guide is available at https://docs.docker.com/desktop/");
bash("exit 1");
}
@@ -243,7 +213,6 @@ if (type == "local") {
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
\"echo\" \"Error: docker is not running\"
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
\"exit\" \"1\"
fi");

View File

@@ -1,44 +0,0 @@
// Generate on GitHub
const input = `
* Add Korean translation by @Alanimdeo in https://github.com/louislam/dockge/pull/86
`;
const template = `
### 🆕 New Features
### 💇‍♀️ Improvements
### 🐞 Bug Fixes
### ⬆️ Security Fixes
### 🦎 Translation Contributions
### Others
- Other small changes, code refactoring and comment/doc updates in this repo:
`;
const lines = input.split("\n").filter((line) => line.trim() !== "");
for (const line of lines) {
// Split the last " by "
const usernamePullRequesURL = line.split(" by ").pop();
if (!usernamePullRequesURL) {
console.log("Unable to parse", line);
continue;
}
const [ username, pullRequestURL ] = usernamePullRequesURL.split(" in ");
const pullRequestID = "#" + pullRequestURL.split("/").pop();
let message = line.split(" by ").shift();
if (!message) {
console.log("Unable to parse", line);
continue;
}
message = message.split("* ").pop();
console.log("-", pullRequestID, message, `(Thanks ${username})`);
}
console.log(template);

View File

@@ -5,8 +5,6 @@ const { R } = require("redbean-node");
const readline = require("readline");
const { initJWTSecret } = require("../server/util-server");
const User = require("../server/model/user");
const { io } = require("socket.io-client");
const { localWebSocketURL } = require("../server/config");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
@@ -38,16 +36,12 @@ const main = async () => {
// Reset all sessions by reset jwt secret
await initJWTSecret();
// Disconnect all other socket clients of the user
await disconnectAllSocketClients(user.username, password);
break;
} else {
console.log("Passwords do not match, please try again.");
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
@@ -72,44 +66,6 @@ function question(question) {
});
}
function disconnectAllSocketClients(username, password) {
return new Promise((resolve) => {
console.log("Connecting to " + localWebSocketURL + " to disconnect all other socket clients");
// Disconnect all socket connections
const socket = io(localWebSocketURL, {
reconnection: false,
timeout: 5000,
});
socket.on("connect", () => {
socket.emit("login", {
username,
password,
}, (res) => {
if (res.ok) {
console.log("Logged in.");
socket.emit("disconnectOtherSocketClients");
} else {
console.warn("Login failed.");
console.warn("Please restart the server to disconnect all sessions.");
}
socket.close();
});
});
socket.on("connect_error", function () {
// The localWebSocketURL is not guaranteed to be working for some complicated Uptime Kuma setup
// Ask the user to restart the server manually
console.warn("Failed to connect to " + localWebSocketURL);
console.warn("Please restart the server to disconnect all sessions manually.");
resolve();
});
socket.on("disconnect", () => {
resolve();
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}

View File

@@ -1,22 +0,0 @@
const fs = require("fs");
// Read the file from private/sort-contributors.txt
const file = fs.readFileSync("private/sort-contributors.txt", "utf8");
// Convert to an array of lines
let lines = file.split("\n");
// Remove empty lines
lines = lines.filter((line) => line !== "");
// Remove duplicates
lines = [ ...new Set(lines) ];
// Remove @weblate and @UptimeKumaBot
lines = lines.filter((line) => line !== "@weblate" && line !== "@UptimeKumaBot" && line !== "@louislam");
// Sort the lines
lines = lines.sort();
// Output the lines, concat with " "
console.log(lines.join(" "));

View File

@@ -1,9 +0,0 @@
// Check if docker is running
const { exec } = require("child_process");
exec("docker ps", (err, stdout, stderr) => {
if (err) {
console.error("Docker is not running. Please start docker and try again.");
process.exit(1);
}
});

View File

@@ -26,8 +26,7 @@ if (! exists) {
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
childProcess.spawnSync(npm, [ "install" ]);
childProcess.spawnSync("npm", [ "install" ]);
commit(newVersion);
tag(newVersion);

View File

@@ -2,31 +2,15 @@
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
<link rel="manifest" href="/manifest.json" />
<meta name="theme-color" id="theme-color" content="" />
<meta name="description" content="Uptime Kuma monitoring tool" />
<title>Uptime Kuma</title>
<style>
.noscript-message {
font-size: 20px;
text-align: center;
padding: 10px;
max-width: 500px;
margin: 0 auto;
}
</style>
</head>
<body>
<noscript>
<div class="noscript-message">
Sorry, you don't seem to have JavaScript enabled or your browser
doesn't support it.<br />This website requires JavaScript to function.
Please enable JavaScript in your browser settings to continue.
</div>
</noscript>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>

View File

@@ -3,15 +3,15 @@
# The command is working on Windows PowerShell and Docker for Windows only.
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
"echo" "-e" "====================="
"echo" "-e" "Uptime Kuma Install Script"
"echo" "-e" "Uptime Kuma Installer"
"echo" "-e" "====================="
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
"echo" "-e" "---------------------------------------"
"echo" "-e" "This script is designed for Linux and basic usage."
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
"echo" "-e" "---------------------------------------"
"echo" "-e" ""
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
"echo" "-e" ""
if [ "$1" != "" ]; then
@@ -25,9 +25,12 @@ function checkNode {
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
"echo" "-e" "Node Version: ""$nodeVersion"
_0="12"
if [ $(($nodeVersion <= $_0)) == 1 ]; then
if [ $(($nodeVersion < $_0)) == 1 ]; then
"echo" "-e" "Error: Required Node.js 14"
"exit" "1"
fi
if [ "$nodeVersion" == "12" ]; then
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
fi
}
function deb {
@@ -47,8 +50,8 @@ fi
"echo" "-e" "Installing Curl"
apt --yes install curl
fi
"echo" "-e" "Installing Node.js 16"
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt
"echo" "-e" "Installing Node.js 14"
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
apt --yes install nodejs
node -v
nodeCheckAgain=$(node -v)
@@ -72,10 +75,7 @@ if [ "$type" == "local" ]; then
if [ -e "/etc/issue" ]; then
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
if [ "$os" == "Ubuntu" ]; then
distribution="ubuntu"
# Get ubuntu version
. /etc/lsb-release
version="$DISTRIB_RELEASE"
distribution="ubuntu"
fi
if [ "$os" == "Debian" ]; then
distribution="debian"
@@ -85,7 +85,6 @@ fi
arch=$(uname -i)
"echo" "-e" "Your OS: ""$os"
"echo" "-e" "Distribution: ""$distribution"
"echo" "-e" "Version: ""$version"
"echo" "-e" "Arch: ""$arch"
if [ "$3" != "" ]; then
port="$3"
@@ -109,27 +108,14 @@ fi
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
dnfCheck=$(dnf --version)
# Use yum
if [ "$dnfCheck" == "" ]; then
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
yum -y -q install curl
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
yum -y -q install curl
fi
"echo" "-e" "Installing Node.js 16"
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
yum install -y -q nodejs
else
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
dnf -y install curl
fi
"echo" "-e" "Installing Node.js 16"
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
dnf install -y nodejs
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
yum install -y -q nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
@@ -156,14 +142,12 @@ fi
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is not found!"
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is not found"
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
@@ -177,13 +161,6 @@ fi
"echo" "-e" "Installing PM2"
npm install pm2 -g && pm2 install pm2-logrotate
pm2 startup
fi
# Check again
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Error: pm2 is not found!"
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
exit 1
fi
mkdir -p $installPath
cd $installPath
@@ -195,13 +172,11 @@ else
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
"exit" "1"
fi
if [ "$3" != "" ]; then

29441
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +1,24 @@
{
"name": "uptime-kuma",
"version": "1.23.15",
"version": "1.20.0-beta.0",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/louislam/uptime-kuma.git"
},
"engines": {
"node": "14 || 16 || 18 || >= 20.4.0"
"node": "14.* || >=16.*"
},
"scripts": {
"install-legacy": "npm install",
"update-legacy": "npm update",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
"lint:js-prod": "npm run lint:js -- --max-warnings 0",
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"lint:prod": "npm run lint:js-prod && npm run lint:style",
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
"start": "npm run start-server",
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
@@ -37,34 +34,28 @@
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.23.15 && npm ci --production && npm run download-dist",
"setup": "git checkout 1.19.6 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
"remove-2fa": "node extra/remove-2fa.js",
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
"test-install-script-ubuntu1804": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1804.dockerfile .",
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"ncu-patch": "npm-check-updates -u -t patch",
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"git-remove-tag": "git tag -d",
"build-dist-and-restart": "npm run build && npm run start-server-dev",
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
@@ -72,83 +63,67 @@
"cy:run": "npx cypress run --browser chrome --headless --config-file ./config/cypress.config.js",
"cy:run:unit": "npx cypress run --browser chrome --headless --config-file ./config/cypress.frontend.config.js",
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
"deploy-demo-server": "node extra/deploy-demo-server.js",
"sort-contributors": "node extra/sort-contributors.js",
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go"
},
"dependencies": {
"@grpc/grpc-js": "~1.8.22",
"@louislam/ping": "~0.4.4-mod.1",
"@louislam/sqlite3": "15.1.6",
"@grpc/grpc-js": "~1.7.3",
"@louislam/ping": "~0.4.2-mod.1",
"@louislam/sqlite3": "15.1.2",
"args-parser": "~1.3.0",
"axios": "~0.28.1",
"axios": "~0.27.0",
"axios-ntlm": "1.3.0",
"badge-maker": "~3.3.1",
"bcryptjs": "~2.4.3",
"bree": "~7.1.5",
"cacheable-lookup": "~6.0.4",
"chardet": "~1.4.0",
"check-password-strength": "^2.0.5",
"cheerio": "1.0.0-rc.12",
"cheerio": "~1.0.0-rc.12",
"chroma-js": "~2.4.2",
"command-exists": "~1.2.9",
"compare-versions": "~3.6.0",
"compression": "~1.7.4",
"croner": "~6.0.5",
"dayjs": "~1.11.5",
"dotenv": "~16.0.3",
"express": "~4.21.0",
"express": "~4.17.3",
"express-basic-auth": "~1.2.1",
"express-static-gzip": "~2.1.7",
"form-data": "~4.0.0",
"gamedig": "^4.2.0",
"html-escaper": "^3.0.3",
"gamedig": "^4.0.5",
"http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "~5.0.0",
"https-proxy-agent": "~5.0.1",
"iconv-lite": "~0.6.3",
"isomorphic-ws": "^5.0.0",
"jsesc": "~3.0.2",
"jsonata": "^2.0.3",
"jsonwebtoken": "~9.0.0",
"jwt-decode": "~3.1.2",
"kafkajs": "^2.2.4",
"limiter": "~2.1.0",
"liquidjs": "^10.7.0",
"mongodb": "~4.17.1",
"mongodb": "~4.13.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"mysql2": "~3.9.6",
"nanoid": "~3.3.4",
"mysql2": "~2.3.3",
"node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0",
"nodemailer": "~6.9.13",
"nostr-tools": "^1.13.1",
"nodemailer": "~6.6.5",
"notp": "~2.0.3",
"openid-client": "^5.4.2",
"password-hash": "~1.2.2",
"pg": "~8.11.3",
"pg-connection-string": "~2.6.2",
"playwright-core": "~1.35.1",
"pg": "~8.8.0",
"pg-connection-string": "~2.5.0",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.1",
"promisify-child-process": "~4.1.2",
"protobufjs": "~7.2.4",
"qs": "~6.10.4",
"redbean-node": "~0.3.0",
"protobufjs": "~7.1.1",
"redbean-node": "~0.2.0",
"redis": "~4.5.1",
"semver": "~7.5.4",
"socket.io": "~4.8.0",
"socket.io-client": "~4.8.0",
"socket.io": "~4.5.3",
"socket.io-client": "~4.5.3",
"socks-proxy-agent": "6.1.1",
"tar": "~6.2.1",
"tar": "~6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2",
"ws": "^8.13.0"
"thirty-two": "~1.0.2"
},
"devDependencies": {
"@actions/github": "~5.1.1",
"@babel/eslint-parser": "^7.22.7",
"@actions/github": "~5.0.1",
"@babel/eslint-parser": "~7.17.0",
"@babel/preset-env": "^7.15.8",
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
@@ -156,28 +131,27 @@
"@fortawesome/vue-fontawesome": "~3.0.0-5",
"@popperjs/core": "~2.10.2",
"@types/bootstrap": "~5.1.9",
"@vitejs/plugin-legacy": "~4.1.0",
"@vitejs/plugin-vue": "~4.2.3",
"@vue/compiler-sfc": "~3.3.4",
"@vitejs/plugin-legacy": "~2.1.0",
"@vitejs/plugin-vue": "~3.1.0",
"@vue/compiler-sfc": "~3.2.36",
"@vuepic/vue-datepicker": "~3.4.8",
"aedes": "^0.46.3",
"babel-plugin-rewire": "~1.2.0",
"bootstrap": "5.1.3",
"chart.js": "~4.2.1",
"chartjs-adapter-dayjs-4": "~1.0.4",
"chart.js": "~3.6.2",
"chartjs-adapter-dayjs": "~1.0.0",
"concurrently": "^7.1.0",
"core-js": "~3.26.1",
"cronstrue": "~2.24.0",
"cross-env": "~7.0.3",
"cypress": "^13.2.0",
"cypress": "^10.1.0",
"delay": "^5.0.0",
"dns2": "~2.0.1",
"dompurify": "~3.1.7",
"dompurify": "~2.4.3",
"eslint": "~8.14.0",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10",
"jest": "~29.6.1",
"marked": "~4.2.5",
"node-ssh": "~13.1.0",
"jest": "~27.2.5",
"postcss-html": "~1.5.0",
"postcss-rtlcss": "~3.7.2",
"postcss-scss": "~4.0.4",
@@ -185,16 +159,16 @@
"qrcode": "~1.5.0",
"rollup-plugin-visualizer": "^5.6.0",
"sass": "~1.42.1",
"stylelint": "^15.10.1",
"stylelint": "~14.7.1",
"stylelint-config-standard": "~25.0.0",
"terser": "~5.15.0",
"timezones-list": "~3.0.1",
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",
"vite": "~5.2.8",
"vite": "~3.1.0",
"vite-plugin-compression": "^0.5.1",
"vue": "~3.3.4",
"vue-chartjs": "~5.2.0",
"vue": "next",
"vue-chart-3": "3.0.9",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.2.2",
@@ -205,7 +179,6 @@
"vue-router": "~4.0.14",
"vue-toastification": "~2.0.0-rc.5",
"vuedraggable": "~4.1.0",
"wait-on": "^7.2.0",
"whatwg-url": "~12.0.1"
"wait-on": "^6.0.1"
}
}

View File

@@ -1,9 +1,10 @@
<svg width="640" height="640" viewBox="0 0 640 640" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
<g transform="matrix(1 0 0 1 320 320)">
<linearGradient id="S3" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1 0 0 1 -319.99875 -320.0001577393)" x1="259.78" y1="261.15" x2="463.85" y2="456.49">
<svg width="640" height="640" viewBox="0 0 640 640" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" fill="url(#paint0_linear_381_799)"/>
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" stroke="#F2F2F2" stroke-opacity="0.51" stroke-width="200"/>
<defs>
<linearGradient id="paint0_linear_381_799" x1="259.78" y1="261.15" x2="463.85" y2="456.49" gradientUnits="userSpaceOnUse">
<stop stop-color="#5CDD8B"/>
<stop offset="1" stop-color="#86E6A9"/>
</linearGradient>
<path style="stroke: rgb(242,242,242); stroke-opacity: 0.51; stroke-width: 200; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: url(#S3); fill-rule: nonzero; opacity: 1;" transform=" translate(0, 0)" d="M 170.40125 -84.36016 C 224.09125 38.37984 224.09125 115.33984 170.40125 146.49984 C 89.85125000000001 193.23984000000002 -120.03875 207.48984000000002 -180.45875 135.63984 C -220.73875 87.73983999999999 -220.73875 14.399839999999998 -180.45875 -84.36016000000001 C -139.49875 -151.82016 -81.28875000000001 -185.55016 -5.828750000000014 -185.55016 C 69.64124999999999 -185.55016 128.38125 -151.82016000000002 170.40124999999998 -84.36016000000001 z" stroke-linecap="round" />
</g>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 893 B

View File

@@ -2,10 +2,7 @@ const basicAuth = require("express-basic-auth");
const passwordHash = require("./password-hash");
const { R } = require("redbean-node");
const { setting } = require("./util-server");
const { log } = require("../src/util");
const { loginRateLimiter, apiRateLimiter } = require("./rate-limiter");
const { Settings } = require("./settings");
const dayjs = require("dayjs");
const { loginRateLimiter } = require("./rate-limiter");
/**
* Login to web app
@@ -37,36 +34,8 @@ exports.login = async function (username, password) {
};
/**
* Validate a provided API key
* @param {string} key API key to verify
*/
async function verifyAPIKey(key) {
if (typeof key !== "string") {
return false;
}
// uk prefix + key ID is before _
let index = key.substring(2, key.indexOf("_"));
let clear = key.substring(key.indexOf("_") + 1, key.length);
let hash = await R.findOne("api_key", " id=? ", [ index ]);
if (hash === null) {
return false;
}
let current = dayjs();
let expiry = dayjs(hash.expires);
if (expiry.diff(current) < 0 || !hash.active) {
return false;
}
return hash && passwordHash.verify(clear, hash.key);
}
/**
* Callback for basic auth authorizers
* @callback authCallback
* Callback for myAuthorizer
* @callback myAuthorizerCB
* @param {any} err Any error encountered
* @param {boolean} authorized Is the client authorized?
*/
@@ -75,35 +44,9 @@ async function verifyAPIKey(key) {
* Custom authorizer for express-basic-auth
* @param {string} username
* @param {string} password
* @param {authCallback} callback
* @param {myAuthorizerCB} callback
*/
function apiAuthorizer(username, password, callback) {
// API Rate Limit
apiRateLimiter.pass(null, 0).then((pass) => {
if (pass) {
verifyAPIKey(password).then((valid) => {
if (!valid) {
log.warn("api-auth", "Failed API auth attempt: invalid API Key");
}
callback(null, valid);
// Only allow a set number of api requests per minute
// (currently set to 60)
apiRateLimiter.removeTokens(1);
});
} else {
log.warn("api-auth", "Failed API auth attempt: rate limit exceeded");
callback(null, false);
}
});
}
/**
* Custom authorizer for express-basic-auth
* @param {string} username
* @param {string} password
* @param {authCallback} callback
*/
function userAuthorizer(username, password, callback) {
function myAuthorizer(username, password, callback) {
// Login Rate Limit
loginRateLimiter.pass(null, 0).then((pass) => {
if (pass) {
@@ -111,12 +54,10 @@ function userAuthorizer(username, password, callback) {
callback(null, user != null);
if (user == null) {
log.warn("basic-auth", "Failed basic auth attempt: invalid username/password");
loginRateLimiter.removeTokens(1);
}
});
} else {
log.warn("basic-auth", "Failed basic auth attempt: rate limit exceeded");
callback(null, false);
}
});
@@ -130,7 +71,7 @@ function userAuthorizer(username, password, callback) {
*/
exports.basicAuth = async function (req, res, next) {
const middleware = basicAuth({
authorizer: userAuthorizer,
authorizer: myAuthorizer,
authorizeAsync: true,
challenge: true,
});
@@ -143,32 +84,3 @@ exports.basicAuth = async function (req, res, next) {
next();
}
};
/**
* Use use API Key if API keys enabled, else use basic auth
* @param {express.Request} req Express request object
* @param {express.Response} res Express response object
* @param {express.NextFunction} next
*/
exports.apiAuth = async function (req, res, next) {
if (!await Settings.get("disableAuth")) {
let usingAPIKeys = await Settings.get("apiKeysEnabled");
let middleware;
if (usingAPIKeys) {
middleware = basicAuth({
authorizer: apiAuthorizer,
authorizeAsync: true,
challenge: true,
});
} else {
middleware = basicAuth({
authorizer: userAuthorizer,
authorizeAsync: true,
challenge: true,
});
}
middleware(req, res, next);
} else {
next();
}
};

View File

@@ -1,33 +1,27 @@
const { setSetting, setting } = require("./util-server");
const axios = require("axios");
const compareVersions = require("compare-versions");
const { log } = require("../src/util");
exports.version = require("../package.json").version;
exports.latestVersion = null;
// How much time in ms to wait between update checks
const UPDATE_CHECKER_INTERVAL_MS = 1000 * 60 * 60 * 48;
const UPDATE_CHECKER_LATEST_VERSION_URL = "https://uptime.kuma.pet/version";
let interval;
/** Start 48 hour check interval */
exports.startInterval = () => {
let check = async () => {
if (await setting("checkUpdate") === false) {
return;
}
log.debug("update-checker", "Retrieving latest versions");
try {
const res = await axios.get(UPDATE_CHECKER_LATEST_VERSION_URL);
const res = await axios.get("https://uptime.kuma.pet/version");
// For debug
if (process.env.TEST_CHECK_VERSION === "1") {
res.data.slow = "1000.0.0";
}
if (await setting("checkUpdate") === false) {
return;
}
let checkBeta = await setting("checkBeta");
if (checkBeta && res.data.beta) {
@@ -41,14 +35,12 @@ exports.startInterval = () => {
exports.latestVersion = res.data.slow;
}
} catch (_) {
log.info("update-checker", "Failed to check for new versions");
}
} catch (_) { }
};
check();
interval = setInterval(check, UPDATE_CHECKER_INTERVAL_MS);
interval = setInterval(check, 3600 * 1000 * 48);
};
/**

View File

@@ -113,52 +113,15 @@ async function sendProxyList(socket) {
return list;
}
/**
* Emit API key list to client
* @param {Socket} socket Socket.io socket instance
* @returns {Promise<void>}
*/
async function sendAPIKeyList(socket) {
const timeLogger = new TimeLogger();
let result = [];
const list = await R.find(
"api_key",
"user_id=?",
[ socket.userID ],
);
for (let bean of list) {
result.push(bean.toPublicJSON());
}
io.to(socket.userID).emit("apiKeyList", result);
timeLogger.print("Sent API Key List");
return list;
}
/**
* Emits the version information to the client.
* @param {Socket} socket Socket.io socket instance
* @param {boolean} hideVersion
* @returns {Promise<void>}
*/
async function sendInfo(socket, hideVersion = false) {
let version;
let latestVersion;
let isContainer;
if (!hideVersion) {
version = checkVersion.version;
latestVersion = checkVersion.latestVersion;
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
}
async function sendInfo(socket) {
socket.emit("info", {
version,
latestVersion,
isContainer,
version: checkVersion.version,
latestVersion: checkVersion.latestVersion,
primaryBaseURL: await setting("primaryBaseURL"),
serverTimezone: await server.getTimezone(),
serverTimezoneOffset: server.getTimezoneOffset(),
@@ -194,7 +157,6 @@ module.exports = {
sendImportantHeartbeatList,
sendHeartbeatList,
sendProxyList,
sendAPIKeyList,
sendInfo,
sendDockerHostList
};

View File

@@ -1,42 +1,28 @@
const isFreeBSD = /^freebsd/.test(process.platform);
// Interop with browser
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
let hostEnv = isFreeBSD ? null : process.env.HOST;
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
.map(portValue => parseInt(portValue))
.find(portValue => !isNaN(portValue));
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
const isSSL = sslKey && sslCert;
function getLocalWebSocketURL() {
const protocol = isSSL ? "wss" : "ws";
const host = hostname || "localhost";
return `${protocol}://${host}:${port}`;
}
const localWebSocketURL = getLocalWebSocketURL();
const args = require("args-parser")(process.argv);
const demoMode = args["demo"] || false;
const badgeConstants = {
naColor: "#999",
defaultUpColor: "#66c20a",
defaultWarnColor: "#eed202",
defaultDownColor: "#c2290a",
defaultPendingColor: "#f8a306",
defaultMaintenanceColor: "#1747f5",
defaultPingColor: "blue", // as defined by badge-maker / shields.io
defaultStyle: "flat",
defaultPingValueSuffix: "ms",
defaultPingLabelSuffix: "h",
defaultUptimeValueSuffix: "%",
defaultUptimeLabelSuffix: "h",
defaultCertExpValueSuffix: " days",
defaultCertExpLabelSuffix: "h",
// Values Come From Default Notification Times
defaultCertExpireWarnDays: "14",
defaultCertExpireDownDays: "7"
};
module.exports = {
args,
hostname,
port,
sslKey,
sslCert,
sslKeyPassphrase,
isSSL,
localWebSocketURL,
demoMode,
badgeConstants,
};

View File

@@ -2,8 +2,9 @@ const fs = require("fs");
const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server");
const { log, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
const path = require("path");
const { PluginsManager } = require("./plugins-manager");
/**
* Database & App Data Folder
@@ -22,17 +23,18 @@ class Database {
*/
static uploadDir;
static screenshotDir;
static path;
static dockerTLSDir;
/**
* @type {boolean}
*/
static patched = false;
/**
* For Backup only
*/
static backupPath = null;
/**
* Add patch filename in key
* Values:
@@ -68,23 +70,6 @@ class Database {
"patch-maintenance-table2.sql": true,
"patch-add-gamedig-monitor.sql": true,
"patch-add-google-analytics-status-page-tag.sql": true,
"patch-http-body-encoding.sql": true,
"patch-add-description-monitor.sql": true,
"patch-api-key-table.sql": true,
"patch-monitor-tls.sql": true,
"patch-maintenance-cron.sql": true,
"patch-add-parent-monitor.sql": true,
"patch-add-invert-keyword.sql": true,
"patch-added-json-query.sql": true,
"patch-added-kafka-producer.sql": true,
"patch-add-certificate-expiry-status-page.sql": true,
"patch-monitor-oauth-cc.sql": true,
"patch-add-timeout-monitor.sql": true,
"patch-add-gamedig-given-port.sql": true,
"patch-notification-config.sql": true,
"patch-fix-kafka-producer-booleans.sql": true,
"patch-timeout.sql": true,
"patch-monitor-tls-info-add-fk.sql": true,
};
/**
@@ -103,28 +88,23 @@ class Database {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = path.join(Database.dataDir, "kuma.db");
// Plugin feature is working only if the dataDir = "./data";
if (Database.dataDir !== "./data/") {
log.warn("PLUGIN", "Warning: In order to enable plugin feature, you need to use the default data directory: ./data/");
PluginsManager.disable = true;
}
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = path.join(Database.dataDir, "upload/");
Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
// Create screenshot dir
Database.screenshotDir = path.join(Database.dataDir, "screenshots/");
if (! fs.existsSync(Database.screenshotDir)) {
fs.mkdirSync(Database.screenshotDir, { recursive: true });
}
Database.dockerTLSDir = path.join(Database.dataDir, "docker-tls/");
if (! fs.existsSync(Database.dockerTLSDir)) {
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
}
log.info("db", `Data Dir: ${Database.dataDir}`);
}
@@ -181,12 +161,12 @@ class Database {
await R.exec("PRAGMA journal_mode = WAL");
}
await R.exec("PRAGMA cache_size = -12000");
await R.exec("PRAGMA auto_vacuum = INCREMENTAL");
await R.exec("PRAGMA auto_vacuum = FULL");
// This ensures that an operating system crash or power failure will not corrupt the database.
// FULL synchronous is very safe, but it is also slower.
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
await R.exec("PRAGMA synchronous = NORMAL");
await R.exec("PRAGMA synchronous = FULL");
if (!noLog) {
log.info("db", "SQLite config:");
@@ -214,7 +194,15 @@ class Database {
} else {
log.info("db", "Database patch is needed");
// Try catch anything here
try {
this.backup(version);
} catch (e) {
log.error("db", e);
log.error("db", "Unable to create a backup before patching the database. Please make sure you have enough space and permission.");
process.exit(1);
}
// Try catch anything here, if gone wrong, restore the backup
try {
for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/patch${i}.sql`;
@@ -230,6 +218,7 @@ class Database {
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
}
@@ -271,6 +260,8 @@ class Database {
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
@@ -372,6 +363,8 @@ class Database {
}
}
this.backup(dayjs().format("YYYYMMDDHHmmss"));
log.info("db", sqlFilename + " is patching");
this.patched = true;
await this.importSQLFile("./db/" + sqlFilename);
@@ -437,9 +430,6 @@ class Database {
log.info("db", "Closing the database");
// Flush WAL to main database
await R.exec("PRAGMA wal_checkpoint(TRUNCATE)");
while (true) {
Database.noReject = true;
await R.close();
@@ -456,6 +446,90 @@ class Database {
process.removeListener("unhandledRejection", listener);
}
/**
* One backup one time in this process.
* Reset this.backupPath if you want to backup again
* @param {string} version Version code of backup
*/
static backup(version) {
if (! this.backupPath) {
log.info("db", "Backing up the database");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);
const shmPath = Database.path + "-shm";
if (fs.existsSync(shmPath)) {
this.backupShmPath = shmPath + ".bak" + version;
fs.copyFileSync(shmPath, this.backupShmPath);
}
const walPath = Database.path + "-wal";
if (fs.existsSync(walPath)) {
this.backupWalPath = walPath + ".bak" + version;
fs.copyFileSync(walPath, this.backupWalPath);
}
// Double confirm if all files actually backup
if (!fs.existsSync(this.backupPath)) {
throw new Error("Backup failed! " + this.backupPath);
}
if (fs.existsSync(shmPath)) {
if (!fs.existsSync(this.backupShmPath)) {
throw new Error("Backup failed! " + this.backupShmPath);
}
}
if (fs.existsSync(walPath)) {
if (!fs.existsSync(this.backupWalPath)) {
throw new Error("Backup failed! " + this.backupWalPath);
}
}
}
}
/** Restore from most recent backup */
static restore() {
if (this.backupPath) {
log.error("db", "Patching the database failed!!! Restoring the backup");
const shmPath = Database.path + "-shm";
const walPath = Database.path + "-wal";
// Delete patch failed db
try {
if (fs.existsSync(Database.path)) {
fs.unlinkSync(Database.path);
}
if (fs.existsSync(shmPath)) {
fs.unlinkSync(shmPath);
}
if (fs.existsSync(walPath)) {
fs.unlinkSync(walPath);
}
} catch (e) {
log.error("db", "Restore failed; you may need to restore the backup manually");
process.exit(1);
}
// Restore backup
fs.copyFileSync(this.backupPath, Database.path);
if (this.backupShmPath) {
fs.copyFileSync(this.backupShmPath, shmPath);
}
if (this.backupWalPath) {
fs.copyFileSync(this.backupWalPath, walPath);
}
} else {
log.info("db", "Nothing to restore");
}
}
/** Get the size of the database */
static getSize() {
log.debug("db", "Database.getSize()");

View File

@@ -2,16 +2,8 @@ const axios = require("axios");
const { R } = require("redbean-node");
const version = require("../package.json").version;
const https = require("https");
const fs = require("fs");
const path = require("path");
const Database = require("./database");
class DockerHost {
static CertificateFileNameCA = "ca.pem";
static CertificateFileNameCert = "cert.pem";
static CertificateFileNameKey = "key.pem";
/**
* Save a docker host
* @param {Object} dockerHost Docker host to save
@@ -74,13 +66,16 @@ class DockerHost {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: false,
}),
};
if (dockerHost.dockerType === "socket") {
options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") {
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
}
let res = await axios.request(options);
@@ -116,53 +111,6 @@ class DockerHost {
}
return url;
}
/**
* Returns HTTPS agent options with client side TLS parameters if certificate files
* for the given host are available under a predefined directory path.
*
* The base path where certificates are looked for can be set with the
* 'DOCKER_TLS_DIR_PATH' environmental variable or defaults to 'data/docker-tls/'.
*
* If a directory in this path exists with a name matching the FQDN of the docker host
* (e.g. the FQDN of 'https://example.com:2376' is 'example.com' so the directory
* 'data/docker-tls/example.com/' would be searched for certificate files),
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
*
* @param {String} dockerType i.e. "tcp" or "socket"
* @param {String} url The docker host URL rewritten to https://
* @return {Object}
* */
static getHttpsAgentOptions(dockerType, url) {
let baseOptions = {
maxCachedSessions: 0,
rejectUnauthorized: true
};
let certOptions = {};
let dirName = (new URL(url)).hostname;
let caPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCA);
let certPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCert);
let keyPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameKey);
if (dockerType === "tcp" && fs.existsSync(caPath) && fs.existsSync(certPath) && fs.existsSync(keyPath)) {
let ca = fs.readFileSync(caPath);
let key = fs.readFileSync(keyPath);
let cert = fs.readFileSync(certPath);
certOptions = {
ca,
key,
cert
};
}
return {
...baseOptions,
...certOptions
};
}
}
module.exports = {

24
server/git.js Normal file
View File

@@ -0,0 +1,24 @@
const childProcess = require("child_process");
class Git {
static clone(repoURL, cwd, targetDir = ".") {
let result = childProcess.spawnSync("git", [
"clone",
repoURL,
targetDir,
], {
cwd: cwd,
});
if (result.status !== 0) {
throw new Error(result.stderr.toString("utf-8"));
} else {
return result.stdout.toString("utf-8") + result.stderr.toString("utf-8");
}
}
}
module.exports = {
Git,
};

View File

@@ -1,5 +1,4 @@
const jsesc = require("jsesc");
const { escape } = require("html-escaper");
/**
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
@@ -8,18 +7,15 @@ const { escape } = require("html-escaper");
* @returns {string}
*/
function getGoogleAnalyticsScript(tagId) {
let escapedTagIdJS = jsesc(tagId, { isScriptContext: true });
let escapedTagId = jsesc(tagId, { isScriptContext: true });
if (escapedTagIdJS) {
escapedTagIdJS = escapedTagIdJS.trim();
if (escapedTagId) {
escapedTagId = escapedTagId.trim();
}
// Escape the tag ID for use in an HTML attribute.
let escapedTagIdHTMLAttribute = escape(tagId);
return `
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagIdHTMLAttribute}"></script>
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagIdJS}'); </script>
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagId}"></script>
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagId}'); </script>
`;
}

View File

@@ -1,51 +1,41 @@
const { UptimeKumaServer } = require("./uptime-kuma-server");
const { clearOldData } = require("./jobs/clear-old-data");
const { incrementalVacuum } = require("./jobs/incremental-vacuum");
const Cron = require("croner");
const path = require("path");
const Bree = require("bree");
const { SHARE_ENV } = require("worker_threads");
const { log } = require("../src/util");
let bree;
const jobs = [
{
name: "clear-old-data",
interval: "14 03 * * *",
jobFunc: clearOldData,
croner: null,
interval: "at 03:14",
},
{
name: "incremental-vacuum",
interval: "*/5 * * * *",
jobFunc: incrementalVacuum,
croner: null,
}
];
/**
* Initialize background jobs
* @returns {Promise<void>}
* @param {Object} args Arguments to pass to workers
* @returns {Bree}
*/
const initBackgroundJobs = async function () {
const timezone = await UptimeKumaServer.getInstance().getTimezone();
for (const job of jobs) {
const cornerJob = new Cron(
job.interval,
{
name: job.name,
timezone,
},
job.jobFunc,
);
job.croner = cornerJob;
}
const initBackgroundJobs = function (args) {
bree = new Bree({
root: path.resolve("server", "jobs"),
jobs,
worker: {
env: SHARE_ENV,
workerData: args,
},
workerMessageHandler: (message) => {
log.info("jobs", message);
}
});
bree.start();
return bree;
};
/** Stop all background jobs if running */
const stopBackgroundJobs = function () {
for (const job of jobs) {
if (job.croner) {
job.croner.stop();
job.croner = null;
}
if (bree) {
bree.stop();
}
};

View File

@@ -1,15 +1,12 @@
const { log, exit, connectDb } = require("./util-worker");
const { R } = require("redbean-node");
const { log } = require("../../src/util");
const { setSetting, setting } = require("../util-server");
const DEFAULT_KEEP_PERIOD = 180;
/**
* Clears old data from the heartbeat table of the database.
* @return {Promise<void>} A promise that resolves when the data has been cleared.
*/
(async () => {
await connectDb();
const clearOldData = async () => {
let period = await setting("keepDataPeriodDays");
// Set Default Period
@@ -23,30 +20,26 @@ const clearOldData = async () => {
try {
parsedPeriod = parseInt(period);
} catch (_) {
log.warn("clearOldData", "Failed to parse setting, resetting to default..");
log("Failed to parse setting, resetting to default..");
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
parsedPeriod = DEFAULT_KEEP_PERIOD;
}
if (parsedPeriod < 1) {
log.info("clearOldData", `Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
log(`Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
} else {
log.debug("clearOldData", `Clearing Data older than ${parsedPeriod} days...`);
log(`Clearing Data older than ${parsedPeriod} days...`);
try {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[ parsedPeriod ]
);
await R.exec("PRAGMA optimize;");
} catch (e) {
log.error("clearOldData", `Failed to clear old data: ${e.message}`);
log(`Failed to clear old data: ${e.message}`);
}
}
};
module.exports = {
clearOldData,
};
exit();
})();

View File

@@ -1,21 +0,0 @@
const { R } = require("redbean-node");
const { log } = require("../../src/util");
/**
* Run incremental_vacuum and checkpoint the WAL.
* @return {Promise<void>} A promise that resolves when the process is finished.
*/
const incrementalVacuum = async () => {
try {
log.debug("incrementalVacuum", "Running incremental_vacuum and wal_checkpoint(PASSIVE)...");
await R.exec("PRAGMA incremental_vacuum(200)");
await R.exec("PRAGMA wal_checkpoint(PASSIVE)");
} catch (e) {
log.error("incrementalVacuum", `Failed: ${e.message}`);
}
};
module.exports = {
incrementalVacuum,
};

View File

@@ -0,0 +1,50 @@
const { parentPort, workerData } = require("worker_threads");
const Database = require("../database");
const path = require("path");
/**
* Send message to parent process for logging
* since worker_thread does not have access to stdout, this is used
* instead of console.log()
* @param {any} any The message to log
*/
const log = function (any) {
if (parentPort) {
parentPort.postMessage(any);
}
};
/**
* Exit the worker process
* @param {number} error The status code to exit
*/
const exit = function (error) {
if (error && error !== 0) {
process.exit(error);
} else {
if (parentPort) {
parentPort.postMessage("done");
} else {
process.exit(0);
}
}
};
/** Connects to the database */
const connectDb = async function () {
const dbPath = path.join(
process.env.DATA_DIR || workerData["data-dir"] || "./data/"
);
Database.init({
"data-dir": dbPath,
});
await Database.connect();
};
module.exports = {
log,
exit,
connectDb,
};

View File

@@ -1,76 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
class APIKey extends BeanModel {
/**
* Get the current status of this API key
* @returns {string} active, inactive or expired
*/
getStatus() {
let current = dayjs();
let expiry = dayjs(this.expires);
if (expiry.diff(current) < 0) {
return "expired";
}
return this.active ? "active" : "inactive";
}
/**
* Returns an object that ready to parse to JSON
* @returns {Object}
*/
toJSON() {
return {
id: this.id,
key: this.key,
name: this.name,
userID: this.user_id,
createdDate: this.created_date,
active: this.active,
expires: this.expires,
status: this.getStatus(),
};
}
/**
* Returns an object that ready to parse to JSON with sensitive fields
* removed
* @returns {Object}
*/
toPublicJSON() {
return {
id: this.id,
name: this.name,
userID: this.user_id,
createdDate: this.created_date,
active: this.active,
expires: this.expires,
status: this.getStatus(),
};
}
/**
* Create a new API Key and store it in the database
* @param {Object} key Object sent by client
* @param {int} userID ID of socket user
* @returns {Promise<bean>}
*/
static async save(key, userID) {
let bean;
bean = R.dispense("api_key");
bean.key = key.key;
bean.name = key.name;
bean.user_id = userID;
bean.active = key.active;
bean.expires = key.expires;
await R.store(bean);
return bean;
}
}
module.exports = APIKey;

View File

@@ -9,12 +9,12 @@ class Group extends BeanModel {
* @param {boolean} [showTags=false] Should the JSON include monitor tags
* @returns {Object}
*/
async toPublicJSON(showTags = false, certExpiry = false) {
async toPublicJSON(showTags = false) {
let monitorBeanList = await this.getMonitorList();
let monitorList = [];
for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON(showTags, certExpiry));
monitorList.push(await bean.toPublicJSON(showTags));
}
return {

View File

@@ -1,10 +1,8 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { parseTimeObject, parseTimeFromTimeObject, log } = require("../../src/util");
const { parseTimeObject, parseTimeFromTimeObject, utcToLocal, localToUTC, log } = require("../../src/util");
const { timeObjectToUTC, timeObjectToLocal } = require("../util-server");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
const Cron = require("croner");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const apicache = require("../modules/apicache");
class Maintenance extends BeanModel {
@@ -17,19 +15,16 @@ class Maintenance extends BeanModel {
let dateRange = [];
if (this.start_date) {
dateRange.push(this.start_date);
} else {
dateRange.push(null);
}
if (this.end_date) {
dateRange.push(this.end_date);
dateRange.push(utcToLocal(this.start_date));
if (this.end_date) {
dateRange.push(utcToLocal(this.end_date));
}
}
let timeRange = [];
let startTime = parseTimeObject(this.start_time);
let startTime = timeObjectToLocal(parseTimeObject(this.start_time));
timeRange.push(startTime);
let endTime = parseTimeObject(this.end_time);
let endTime = timeObjectToLocal(parseTimeObject(this.end_time));
timeRange.push(endTime);
let obj = {
@@ -44,44 +39,12 @@ class Maintenance extends BeanModel {
weekdays: (this.weekdays) ? JSON.parse(this.weekdays) : [],
daysOfMonth: (this.days_of_month) ? JSON.parse(this.days_of_month) : [],
timeslotList: [],
cron: this.cron,
duration: this.duration,
durationMinutes: parseInt(this.duration / 60),
timezone: await this.getTimezone(), // Only valid timezone
timezoneOption: this.timezone, // Mainly for dropdown menu, because there is a option "SAME_AS_SERVER"
timezoneOffset: await this.getTimezoneOffset(),
status: await this.getStatus(),
};
if (this.strategy === "manual") {
// Do nothing, no timeslots
} else if (this.strategy === "single") {
obj.timeslotList.push({
startDate: this.start_date,
endDate: this.end_date,
});
} else {
// Should be cron or recurring here
if (this.beanMeta.job) {
let runningTimeslot = this.getRunningTimeslot();
const timeslotList = await this.getTimeslotList();
if (runningTimeslot) {
obj.timeslotList.push(runningTimeslot);
}
let nextRunDate = this.beanMeta.job.nextRun();
if (nextRunDate) {
let startDateDayjs = dayjs(nextRunDate);
let startDate = startDateDayjs.toISOString();
let endDate = startDateDayjs.add(this.duration, "second").toISOString();
obj.timeslotList.push({
startDate,
endDate,
});
}
}
for (let timeslot of timeslotList) {
obj.timeslotList.push(await timeslot.toPublicJSON());
}
if (!Array.isArray(obj.weekdays)) {
@@ -92,9 +55,54 @@ class Maintenance extends BeanModel {
obj.daysOfMonth = [];
}
// Maintenance Status
if (!obj.active) {
obj.status = "inactive";
} else if (obj.strategy === "manual") {
obj.status = "under-maintenance";
} else if (obj.timeslotList.length > 0) {
let currentTimestamp = dayjs().unix();
for (let timeslot of obj.timeslotList) {
if (dayjs.utc(timeslot.startDate).unix() <= currentTimestamp && dayjs.utc(timeslot.endDate).unix() >= currentTimestamp) {
log.debug("timeslot", "Timeslot ID: " + timeslot.id);
log.debug("timeslot", "currentTimestamp:" + currentTimestamp);
log.debug("timeslot", "timeslot.start_date:" + dayjs.utc(timeslot.startDate).unix());
log.debug("timeslot", "timeslot.end_date:" + dayjs.utc(timeslot.endDate).unix());
obj.status = "under-maintenance";
break;
}
}
if (!obj.status) {
obj.status = "scheduled";
}
} else if (obj.timeslotList.length === 0) {
obj.status = "ended";
} else {
obj.status = "unknown";
}
return obj;
}
/**
* Only get future or current timeslots only
* @returns {Promise<[]>}
*/
async getTimeslotList() {
return R.convertToBeans("maintenance_timeslot", await R.getAll(`
SELECT maintenance_timeslot.*
FROM maintenance_timeslot, maintenance
WHERE maintenance_timeslot.maintenance_id = maintenance.id
AND maintenance.id = ?
AND ${Maintenance.getActiveAndFutureMaintenanceSQLCondition()}
`, [
this.id
]));
}
/**
* Return an object that ready to parse to JSON
* @param {string} timezone If not specified, the timeRange will be in UTC
@@ -118,7 +126,7 @@ class Maintenance extends BeanModel {
/**
* Get a list of days in month that maintenance is active for
* @returns {number[]|string[]} Array of active days in month
* @returns {number[]} Array of active days in month
*/
getDayOfMonthList() {
return JSON.parse(this.days_of_month).sort(function (a, b) {
@@ -127,10 +135,26 @@ class Maintenance extends BeanModel {
}
/**
* Get the duration of maintenance in seconds
* Get the start date and time for maintenance
* @returns {dayjs.Dayjs} Start date and time
*/
getStartDateTime() {
let startOfTheDay = dayjs.utc(this.start_date).format("HH:mm");
log.debug("timeslot", "startOfTheDay: " + startOfTheDay);
// Start Time
let startTimeSecond = dayjs.utc(this.start_time, "HH:mm").diff(dayjs.utc(startOfTheDay, "HH:mm"), "second");
log.debug("timeslot", "startTime: " + startTimeSecond);
// Bake StartDate + StartTime = Start DateTime
return dayjs.utc(this.start_date).add(startTimeSecond, "second");
}
/**
* Get the duraction of maintenance in seconds
* @returns {number} Duration of maintenance
*/
calcDuration() {
getDuration() {
let duration = dayjs.utc(this.end_time, "HH:mm").diff(dayjs.utc(this.start_time, "HH:mm"), "second");
// Add 24hours if it is across day
if (duration < 0) {
@@ -145,270 +169,71 @@ class Maintenance extends BeanModel {
* @param {Object} obj Data to fill bean with
* @returns {Bean} Filled bean
*/
static async jsonToBean(bean, obj) {
static jsonToBean(bean, obj) {
if (obj.id) {
bean.id = obj.id;
}
// Apply timezone offset to timeRange, as it cannot apply automatically.
if (obj.timeRange[0]) {
timeObjectToUTC(obj.timeRange[0]);
if (obj.timeRange[1]) {
timeObjectToUTC(obj.timeRange[1]);
}
}
bean.title = obj.title;
bean.description = obj.description;
bean.strategy = obj.strategy;
bean.interval_day = obj.intervalDay;
bean.timezone = obj.timezoneOption;
bean.active = obj.active;
if (obj.dateRange[0]) {
bean.start_date = obj.dateRange[0];
} else {
bean.start_date = null;
bean.start_date = localToUTC(obj.dateRange[0]);
if (obj.dateRange[1]) {
bean.end_date = localToUTC(obj.dateRange[1]);
}
}
if (obj.dateRange[1]) {
bean.end_date = obj.dateRange[1];
} else {
bean.end_date = null;
}
bean.start_time = parseTimeFromTimeObject(obj.timeRange[0]);
bean.end_time = parseTimeFromTimeObject(obj.timeRange[1]);
if (bean.strategy === "cron") {
bean.duration = obj.durationMinutes * 60;
bean.cron = obj.cron;
this.validateCron(bean.cron);
}
bean.weekdays = JSON.stringify(obj.weekdays);
bean.days_of_month = JSON.stringify(obj.daysOfMonth);
if (bean.strategy.startsWith("recurring-")) {
bean.start_time = parseTimeFromTimeObject(obj.timeRange[0]);
bean.end_time = parseTimeFromTimeObject(obj.timeRange[1]);
bean.weekdays = JSON.stringify(obj.weekdays);
bean.days_of_month = JSON.stringify(obj.daysOfMonth);
await bean.generateCron();
this.validateCron(bean.cron);
}
return bean;
}
/**
* Throw error if cron is invalid
* @param cron
* @returns {Promise<void>}
* SQL conditions for active maintenance
* @returns {string}
*/
static async validateCron(cron) {
let job = new Cron(cron, () => {});
job.stop();
static getActiveMaintenanceSQLCondition() {
return `
(
(maintenance_timeslot.start_date <= DATETIME('now')
AND maintenance_timeslot.end_date >= DATETIME('now')
AND maintenance.active = 1)
OR
(maintenance.strategy = 'manual' AND active = 1)
)
`;
}
/**
* Run the cron
* SQL conditions for active and future maintenance
* @returns {string}
*/
async run(throwError = false) {
if (this.beanMeta.job) {
log.debug("maintenance", "Maintenance is already running, stop it first. id: " + this.id);
this.stop();
}
log.debug("maintenance", "Run maintenance id: " + this.id);
// 1.21.2 migration
if (!this.cron) {
await this.generateCron();
if (!this.timezone) {
this.timezone = "UTC";
}
if (this.cron) {
await R.store(this);
}
}
if (this.strategy === "manual") {
// Do nothing, because it is controlled by the user
} else if (this.strategy === "single") {
this.beanMeta.job = new Cron(this.start_date, { timezone: await this.getTimezone() }, () => {
log.info("maintenance", "Maintenance id: " + this.id + " is under maintenance now");
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
apicache.clear();
});
} else if (this.cron != null) {
// Here should be cron or recurring
try {
this.beanMeta.status = "scheduled";
let startEvent = (customDuration = 0) => {
log.info("maintenance", "Maintenance id: " + this.id + " is under maintenance now");
this.beanMeta.status = "under-maintenance";
clearTimeout(this.beanMeta.durationTimeout);
// Check if duration is still in the window. If not, use the duration from the current time to the end of the window
let duration;
if (customDuration > 0) {
duration = customDuration;
} else if (this.end_date) {
let d = dayjs(this.end_date).diff(dayjs(), "second");
if (d < this.duration) {
duration = d * 1000;
}
} else {
duration = this.duration * 1000;
}
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
this.beanMeta.durationTimeout = setTimeout(() => {
// End of maintenance for this timeslot
this.beanMeta.status = "scheduled";
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
}, duration);
};
// Create Cron
this.beanMeta.job = new Cron(this.cron, {
timezone: await this.getTimezone(),
}, startEvent);
// Continue if the maintenance is still in the window
let runningTimeslot = this.getRunningTimeslot();
let current = dayjs();
if (runningTimeslot) {
let duration = dayjs(runningTimeslot.endDate).diff(current, "second") * 1000;
log.debug("maintenance", "Maintenance id: " + this.id + " Remaining duration: " + duration + "ms");
startEvent(duration);
}
} catch (e) {
log.error("maintenance", "Error in maintenance id: " + this.id);
log.error("maintenance", "Cron: " + this.cron);
log.error("maintenance", e);
if (throwError) {
throw e;
}
}
} else {
log.error("maintenance", "Maintenance id: " + this.id + " has no cron");
}
}
getRunningTimeslot() {
let start = dayjs(this.beanMeta.job.nextRun(dayjs().add(-this.duration, "second").toDate()));
let end = start.add(this.duration, "second");
let current = dayjs();
if (current.isAfter(start) && current.isBefore(end)) {
return {
startDate: start.toISOString(),
endDate: end.toISOString(),
};
} else {
return null;
}
}
stop() {
if (this.beanMeta.job) {
this.beanMeta.job.stop();
delete this.beanMeta.job;
}
}
async isUnderMaintenance() {
return (await this.getStatus()) === "under-maintenance";
}
async getTimezone() {
if (!this.timezone || this.timezone === "SAME_AS_SERVER") {
return await UptimeKumaServer.getInstance().getTimezone();
}
return this.timezone;
}
async getTimezoneOffset() {
return dayjs.tz(dayjs(), await this.getTimezone()).format("Z");
}
async getStatus() {
if (!this.active) {
return "inactive";
}
if (this.strategy === "manual") {
return "under-maintenance";
}
// Check if the maintenance is started
if (this.start_date && dayjs().isBefore(dayjs.tz(this.start_date, await this.getTimezone()))) {
return "scheduled";
}
// Check if the maintenance is ended
if (this.end_date && dayjs().isAfter(dayjs.tz(this.end_date, await this.getTimezone()))) {
return "ended";
}
if (this.strategy === "single") {
return "under-maintenance";
}
if (!this.beanMeta.status) {
return "unknown";
}
return this.beanMeta.status;
}
/**
* Generate Cron for recurring maintenance
* @returns {Promise<void>}
*/
async generateCron() {
log.info("maintenance", "Generate cron for maintenance id: " + this.id);
if (this.strategy === "cron") {
// Do nothing for cron
} else if (!this.strategy.startsWith("recurring-")) {
this.cron = "";
} else if (this.strategy === "recurring-interval") {
let array = this.start_time.split(":");
let hour = parseInt(array[0]);
let minute = parseInt(array[1]);
this.cron = minute + " " + hour + " */" + this.interval_day + " * *";
this.duration = this.calcDuration();
log.debug("maintenance", "Cron: " + this.cron);
log.debug("maintenance", "Duration: " + this.duration);
} else if (this.strategy === "recurring-weekday") {
let list = this.getDayOfWeekList();
let array = this.start_time.split(":");
let hour = parseInt(array[0]);
let minute = parseInt(array[1]);
this.cron = minute + " " + hour + " * * " + list.join(",");
this.duration = this.calcDuration();
} else if (this.strategy === "recurring-day-of-month") {
let list = this.getDayOfMonthList();
let array = this.start_time.split(":");
let hour = parseInt(array[0]);
let minute = parseInt(array[1]);
let dayList = [];
for (let day of list) {
if (typeof day === "string" && day.startsWith("lastDay")) {
if (day === "lastDay1") {
dayList.push("L");
}
// Unfortunately, lastDay2-4 is not supported by cron
} else {
dayList.push(day);
}
}
// Remove duplicate
dayList = [ ...new Set(dayList) ];
this.cron = minute + " " + hour + " " + dayList.join(",") + " * *";
this.duration = this.calcDuration();
}
static getActiveAndFutureMaintenanceSQLCondition() {
return `
(
((maintenance_timeslot.end_date >= DATETIME('now')
AND maintenance.active = 1)
OR
(maintenance.strategy = 'manual' AND active = 1))
)
`;
}
}

View File

@@ -0,0 +1,198 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
const { log, utcToLocal, SQL_DATETIME_FORMAT_WITHOUT_SECOND, localToUTC } = require("../../src/util");
const { UptimeKumaServer } = require("../uptime-kuma-server");
class MaintenanceTimeslot extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
const serverTimezoneOffset = UptimeKumaServer.getInstance().getTimezoneOffset();
const obj = {
id: this.id,
startDate: this.start_date,
endDate: this.end_date,
startDateServerTimezone: utcToLocal(this.start_date, SQL_DATETIME_FORMAT_WITHOUT_SECOND),
endDateServerTimezone: utcToLocal(this.end_date, SQL_DATETIME_FORMAT_WITHOUT_SECOND),
serverTimezoneOffset,
};
return obj;
}
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
async toJSON() {
return await this.toPublicJSON();
}
/**
* @param {Maintenance} maintenance
* @param {dayjs} minDate (For recurring type only) Generate a next timeslot from this date.
* @param {boolean} removeExist Remove existing timeslot before create
* @returns {Promise<MaintenanceTimeslot>}
*/
static async generateTimeslot(maintenance, minDate = null, removeExist = false) {
if (removeExist) {
await R.exec("DELETE FROM maintenance_timeslot WHERE maintenance_id = ? ", [
maintenance.id
]);
}
if (maintenance.strategy === "manual") {
log.debug("maintenance", "No need to generate timeslot for manual type");
} else if (maintenance.strategy === "single") {
let bean = R.dispense("maintenance_timeslot");
bean.maintenance_id = maintenance.id;
bean.start_date = maintenance.start_date;
bean.end_date = maintenance.end_date;
bean.generated_next = true;
return await R.store(bean);
} else if (maintenance.strategy === "recurring-interval") {
// Prevent dead loop, in case interval_day is not set
if (!maintenance.interval_day || maintenance.interval_day <= 0) {
maintenance.interval_day = 1;
}
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
return startDateTime.add(maintenance.interval_day, "day");
}, () => {
return true;
});
} else if (maintenance.strategy === "recurring-weekday") {
let dayOfWeekList = maintenance.getDayOfWeekList();
log.debug("timeslot", dayOfWeekList);
if (dayOfWeekList.length <= 0) {
log.debug("timeslot", "No weekdays selected?");
return null;
}
const isValid = (startDateTime) => {
log.debug("timeslot", "nextDateTime: " + startDateTime);
let day = startDateTime.local().day();
log.debug("timeslot", "nextDateTime.day(): " + day);
return dayOfWeekList.includes(day);
};
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
while (true) {
startDateTime = startDateTime.add(1, "day");
if (isValid(startDateTime)) {
return startDateTime;
}
}
}, isValid);
} else if (maintenance.strategy === "recurring-day-of-month") {
let dayOfMonthList = maintenance.getDayOfMonthList();
if (dayOfMonthList.length <= 0) {
log.debug("timeslot", "No day selected?");
return null;
}
const isValid = (startDateTime) => {
let day = parseInt(startDateTime.local().format("D"));
log.debug("timeslot", "day: " + day);
// Check 1-31
if (dayOfMonthList.includes(day)) {
return startDateTime;
}
// Check "lastDay1","lastDay2"...
let daysInMonth = startDateTime.daysInMonth();
let lastDayList = [];
// Small first, e.g. 28 > 29 > 30 > 31
for (let i = 4; i >= 1; i--) {
if (dayOfMonthList.includes("lastDay" + i)) {
lastDayList.push(daysInMonth - i + 1);
}
}
log.debug("timeslot", lastDayList);
return lastDayList.includes(day);
};
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
while (true) {
startDateTime = startDateTime.add(1, "day");
if (isValid(startDateTime)) {
return startDateTime;
}
}
}, isValid);
} else {
throw new Error("Unknown maintenance strategy");
}
}
/**
* Generate a next timeslot for all recurring types
* @param maintenance
* @param minDate
* @param {function} nextDayCallback The logic how to get the next possible day
* @param {function} isValidCallback Check the day whether is matched the current strategy
* @returns {Promise<null|MaintenanceTimeslot>}
*/
static async handleRecurringType(maintenance, minDate, nextDayCallback, isValidCallback) {
let bean = R.dispense("maintenance_timeslot");
let duration = maintenance.getDuration();
let startDateTime = maintenance.getStartDateTime();
let endDateTime;
// Keep generating from the first possible date, until it is ok
while (true) {
log.debug("timeslot", "startDateTime: " + startDateTime.format());
// Handling out of effective date range
if (startDateTime.diff(dayjs.utc(maintenance.end_date)) > 0) {
log.debug("timeslot", "Out of effective date range");
return null;
}
endDateTime = startDateTime.add(duration, "second");
// If endDateTime is out of effective date range, use the end datetime from effective date range
if (endDateTime.diff(dayjs.utc(maintenance.end_date)) > 0) {
endDateTime = dayjs.utc(maintenance.end_date);
}
// If minDate is set, the endDateTime must be bigger than it.
// And the endDateTime must be bigger current time
// Is valid under current recurring strategy
if (
(!minDate || endDateTime.diff(minDate) > 0) &&
endDateTime.diff(dayjs()) > 0 &&
isValidCallback(startDateTime)
) {
break;
}
startDateTime = nextDayCallback(startDateTime);
}
bean.maintenance_id = maintenance.id;
bean.start_date = localToUTC(startDateTime);
bean.end_date = localToUTC(endDateTime);
bean.generated_next = false;
return await R.store(bean);
}
}
module.exports = MaintenanceTimeslot;

File diff suppressed because it is too large Load Diff

View File

@@ -3,6 +3,7 @@ const { R } = require("redbean-node");
const cheerio = require("cheerio");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const jsesc = require("jsesc");
const Maintenance = require("./maintenance");
const googleAnalytics = require("../google-analytics");
class StatusPage extends BeanModel {
@@ -59,11 +60,8 @@ class StatusPage extends BeanModel {
}
// OG Meta Tags
let ogTitle = $("<meta property=\"og:title\" content=\"\" />").attr("content", statusPage.title);
head.append(ogTitle);
let ogDescription = $("<meta property=\"og:description\" content=\"\" />").attr("content", description155);
head.append(ogDescription);
head.append(`<meta property="og:title" content="${statusPage.title}" />`);
head.append(`<meta property="og:description" content="${description155}" />`);
// Preload data
// Add jsesc, fix https://github.com/louislam/uptime-kuma/issues/2186
@@ -90,8 +88,6 @@ class StatusPage extends BeanModel {
* @param {StatusPage} statusPage
*/
static async getStatusPageData(statusPage) {
const config = await statusPage.toPublicJSON();
// Incident
let incident = await R.findOne("incident", " pin = 1 AND active = 1 AND status_page_id = ? ", [
statusPage.id,
@@ -112,13 +108,13 @@ class StatusPage extends BeanModel {
]);
for (let groupBean of list) {
let monitorGroup = await groupBean.toPublicJSON(showTags, config?.showCertificateExpiry);
let monitorGroup = await groupBean.toPublicJSON(showTags);
publicGroupList.push(monitorGroup);
}
// Response
return {
config,
config: await statusPage.toPublicJSON(),
incident,
publicGroupList,
maintenanceList,
@@ -236,7 +232,6 @@ class StatusPage extends BeanModel {
footerText: this.footer_text,
showPoweredBy: !!this.show_powered_by,
googleAnalyticsId: this.google_analytics_tag_id,
showCertificateExpiry: !!this.show_certificate_expiry,
};
}
@@ -258,7 +253,6 @@ class StatusPage extends BeanModel {
footerText: this.footer_text,
showPoweredBy: !!this.show_powered_by,
googleAnalyticsId: this.google_analytics_tag_id,
showCertificateExpiry: !!this.show_certificate_expiry,
};
}
@@ -293,17 +287,21 @@ class StatusPage extends BeanModel {
try {
const publicMaintenanceList = [];
let maintenanceIDList = await R.getCol(`
SELECT DISTINCT maintenance_id
FROM maintenance_status_page
WHERE status_page_id = ?
`, [ statusPageId ]);
let activeCondition = Maintenance.getActiveMaintenanceSQLCondition();
let maintenanceBeanList = R.convertToBeans("maintenance", await R.getAll(`
SELECT DISTINCT maintenance.*
FROM maintenance
JOIN maintenance_status_page
ON maintenance_status_page.maintenance_id = maintenance.id
AND maintenance_status_page.status_page_id = ?
LEFT JOIN maintenance_timeslot
ON maintenance_timeslot.maintenance_id = maintenance.id
WHERE ${activeCondition}
ORDER BY maintenance.end_date
`, [ statusPageId ]));
for (const maintenanceID of maintenanceIDList) {
let maintenance = UptimeKumaServer.getInstance().getMaintenance(maintenanceID);
if (maintenance && await maintenance.isUnderMaintenance()) {
publicMaintenanceList.push(await maintenance.toPublicJSON());
}
for (const bean of maintenanceBeanList) {
publicMaintenanceList.push(await bean.toPublicJSON());
}
return publicMaintenanceList;

View File

@@ -1,8 +1,6 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const passwordHash = require("../password-hash");
const { R } = require("redbean-node");
const jwt = require("jsonwebtoken");
const { shake256, SHAKE256_LENGTH } = require("../util-server");
class User extends BeanModel {
/**
@@ -29,19 +27,6 @@ class User extends BeanModel {
this.password = newPassword;
}
/**
* Create a new JWT for a user
* @param {User} user
* @param {string} jwtSecret
* @return {string}
*/
static createJWT(user, jwtSecret) {
return jwt.sign({
username: user.username,
h: shake256(user.password, SHAKE256_LENGTH),
}, jwtSecret);
}
}
module.exports = User;

View File

@@ -6,10 +6,9 @@ class MonitorType {
*
* @param {Monitor} monitor
* @param {Heartbeat} heartbeat
* @param {UptimeKumaServer} server
* @returns {Promise<void>}
*/
async check(monitor, heartbeat, server) {
async check(monitor, heartbeat) {
throw new Error("You need to override check()");
}

View File

@@ -1,225 +0,0 @@
const { MonitorType } = require("./monitor-type");
const { chromium } = require("playwright-core");
const { UP, log } = require("../../src/util");
const { Settings } = require("../settings");
const commandExistsSync = require("command-exists").sync;
const childProcess = require("child_process");
const path = require("path");
const Database = require("../database");
const jwt = require("jsonwebtoken");
const config = require("../config");
/**
* Cached instance of a browser
* @type {import ("playwright-core").Browser}
*/
let browser = null;
let allowedList = [];
let lastAutoDetectChromeExecutable = null;
if (process.platform === "win32") {
allowedList.push(process.env.LOCALAPPDATA + "\\Google\\Chrome\\Application\\chrome.exe");
allowedList.push(process.env.PROGRAMFILES + "\\Google\\Chrome\\Application\\chrome.exe");
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Google\\Chrome\\Application\\chrome.exe");
// Allow Chromium too
allowedList.push(process.env.LOCALAPPDATA + "\\Chromium\\Application\\chrome.exe");
allowedList.push(process.env.PROGRAMFILES + "\\Chromium\\Application\\chrome.exe");
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Chromium\\Application\\chrome.exe");
// For Loop A to Z
for (let i = 65; i <= 90; i++) {
let drive = String.fromCharCode(i);
allowedList.push(drive + ":\\Program Files\\Google\\Chrome\\Application\\chrome.exe");
allowedList.push(drive + ":\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe");
}
} else if (process.platform === "linux") {
allowedList = [
"chromium",
"chromium-browser",
"google-chrome",
"/usr/bin/chromium",
"/usr/bin/chromium-browser",
"/usr/bin/google-chrome",
"/snap/bin/chromium", // Ubuntu
];
} else if (process.platform === "darwin") {
// TODO: Generated by GitHub Copilot, but not sure if it's correct
allowedList = [
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
"/Applications/Chromium.app/Contents/MacOS/Chromium",
];
}
log.debug("chrome", allowedList);
async function isAllowedChromeExecutable(executablePath) {
console.log(config.args);
if (config.args["allow-all-chrome-exec"] || process.env.UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC === "1") {
return true;
}
// Check if the executablePath is in the list of allowed executables
return allowedList.includes(executablePath);
}
/**
* Get the current instance of the browser. If there isn't one, create
* it.
* @returns {Promise<import ("playwright-core").Browser>} The browser
*/
async function getBrowser() {
if (browser && browser.isConnected()) {
return browser;
} else {
let executablePath = await Settings.get("chromeExecutable");
executablePath = await prepareChromeExecutable(executablePath);
browser = await chromium.launch({
//headless: false,
executablePath,
});
return browser;
}
}
async function prepareChromeExecutable(executablePath) {
// Special code for using the playwright_chromium
if (typeof executablePath === "string" && executablePath.toLocaleLowerCase() === "#playwright_chromium") {
// Set to undefined = use playwright_chromium
executablePath = undefined;
} else if (!executablePath) {
if (process.env.UPTIME_KUMA_IS_CONTAINER) {
executablePath = "/usr/bin/chromium";
// Install chromium in container via apt install
if ( !commandExistsSync(executablePath)) {
await new Promise((resolve, reject) => {
log.info("Chromium", "Installing Chromium...");
let child = childProcess.exec("apt update && apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk");
// On exit
child.on("exit", (code) => {
log.info("Chromium", "apt install chromium exited with code " + code);
if (code === 0) {
log.info("Chromium", "Installed Chromium");
let version = childProcess.execSync(executablePath + " --version").toString("utf8");
log.info("Chromium", "Chromium version: " + version);
resolve();
} else if (code === 100) {
reject(new Error("Installing Chromium, please wait..."));
} else {
reject(new Error("apt install chromium failed with code " + code));
}
});
});
}
} else {
executablePath = findChrome(allowedList);
}
} else {
// User specified a path
// Check if the executablePath is in the list of allowed
if (!await isAllowedChromeExecutable(executablePath)) {
throw new Error("This Chromium executable path is not allowed by default. If you are sure this is safe, please add an environment variable UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC=1 to allow it.");
}
}
return executablePath;
}
function findChrome(executables) {
// Use the last working executable, so we don't have to search for it again
if (lastAutoDetectChromeExecutable) {
if (commandExistsSync(lastAutoDetectChromeExecutable)) {
return lastAutoDetectChromeExecutable;
}
}
for (let executable of executables) {
if (commandExistsSync(executable)) {
lastAutoDetectChromeExecutable = executable;
return executable;
}
}
throw new Error("Chromium not found, please specify Chromium executable path in the settings page.");
}
async function resetChrome() {
if (browser) {
await browser.close();
browser = null;
}
}
/**
* Test if the chrome executable is valid and return the version
* @param executablePath
* @returns {Promise<string>}
*/
async function testChrome(executablePath) {
try {
executablePath = await prepareChromeExecutable(executablePath);
log.info("Chromium", "Testing Chromium executable: " + executablePath);
const browser = await chromium.launch({
executablePath,
});
const version = browser.version();
await browser.close();
return version;
} catch (e) {
throw new Error(e.message);
}
}
/**
* TODO: connect remote browser? https://playwright.dev/docs/api/class-browsertype#browser-type-connect
*
*/
class RealBrowserMonitorType extends MonitorType {
name = "real-browser";
async check(monitor, heartbeat, server) {
const browser = await getBrowser();
const context = await browser.newContext();
const page = await context.newPage();
const res = await page.goto(monitor.url, {
waitUntil: "networkidle",
timeout: monitor.interval * 1000 * 0.8,
});
let filename = jwt.sign(monitor.id, server.jwtSecret) + ".png";
await page.screenshot({
path: path.join(Database.screenshotDir, filename),
});
await context.close();
if (res.status() >= 200 && res.status() < 400) {
heartbeat.status = UP;
heartbeat.msg = res.status();
const timing = res.request().timing();
heartbeat.ping = timing.responseEnd;
} else {
throw new Error(res.status() + "");
}
}
}
module.exports = {
RealBrowserMonitorType,
testChrome,
resetChrome,
};

View File

@@ -1,88 +0,0 @@
const { MonitorType } = require("./monitor-type");
const { UP } = require("../../src/util");
const childProcessAsync = require("promisify-child-process");
/**
* A TailscalePing class extends the MonitorType.
* It runs Tailscale ping to monitor the status of a specific node.
*/
class TailscalePing extends MonitorType {
name = "tailscale-ping";
/**
* Checks the ping status of the URL associated with the monitor.
* It then parses the Tailscale ping command output to update the heatrbeat.
*
* @param {Object} monitor - The monitor object associated with the check.
* @param {Object} heartbeat - The heartbeat object to update.
* @throws Will throw an error if checking Tailscale ping encounters any error
*/
async check(monitor, heartbeat) {
try {
let tailscaleOutput = await this.runTailscalePing(monitor.hostname, monitor.interval);
this.parseTailscaleOutput(tailscaleOutput, heartbeat);
} catch (err) {
// trigger log function somewhere to display a notification or alert to the user (but how?)
throw new Error(`Error checking Tailscale ping: ${err}`);
}
}
/**
* Runs the Tailscale ping command to the given URL.
*
* @param {string} hostname - The hostname to ping.
* @param {number} interval
* @returns {Promise<string>} - A Promise that resolves to the output of the Tailscale ping command
* @throws Will throw an error if the command execution encounters any error.
*/
async runTailscalePing(hostname, interval) {
let timeout = interval * 1000 * 0.8;
let res = await childProcessAsync.spawn("tailscale", [ "ping", "--c", "1", hostname ], {
timeout: timeout,
encoding: "utf8",
});
if (res.stderr && res.stderr.toString()) {
throw new Error(`Error in output: ${res.stderr.toString()}`);
}
if (res.stdout && res.stdout.toString()) {
return res.stdout.toString();
} else {
throw new Error("No output from Tailscale ping");
}
}
/**
* Parses the output of the Tailscale ping command to update the heartbeat.
*
* @param {string} tailscaleOutput - The output of the Tailscale ping command.
* @param {Object} heartbeat - The heartbeat object to update.
* @throws Will throw an eror if the output contains any unexpected string.
*/
parseTailscaleOutput(tailscaleOutput, heartbeat) {
let lines = tailscaleOutput.split("\n");
for (let line of lines) {
if (line.includes("pong from")) {
heartbeat.status = UP;
let time = line.split(" in ")[1].split(" ")[0];
heartbeat.ping = parseInt(time);
heartbeat.msg = "OK";
break;
} else if (line.includes("timed out")) {
throw new Error(`Ping timed out: "${line}"`);
// Immediately throws upon "timed out" message, the server is expected to re-call the check function
} else if (line.includes("no matching peer")) {
throw new Error(`Nonexistant or inaccessible due to ACLs: "${line}"`);
} else if (line.includes("is local Tailscale IP")) {
throw new Error(`Tailscale only works if used on other machines: "${line}"`);
} else if (line !== "") {
throw new Error(`Unexpected output: "${line}"`);
}
}
}
}
module.exports = {
TailscalePing,
};

View File

@@ -18,7 +18,7 @@ class AliyunSMS extends NotificationProvider {
status: this.statusToString(heartbeatJSON["status"]),
msg: heartbeatJSON["msg"],
});
if (await this.sendSms(notification, msgBody)) {
if (this.sendSms(notification, msgBody)) {
return okMsg;
}
} else {
@@ -28,7 +28,7 @@ class AliyunSMS extends NotificationProvider {
status: "",
msg: msg,
});
if (await this.sendSms(notification, msgBody)) {
if (this.sendSms(notification, msgBody)) {
return okMsg;
}
}
@@ -73,8 +73,7 @@ class AliyunSMS extends NotificationProvider {
if (result.data.Message === "OK") {
return true;
}
throw new Error(result.data.Message);
return false;
}
/**

Some files were not shown because too many files have changed in this diff Show More