mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-09-11 05:16:55 +08:00
Compare commits
1 Commits
2.0.0-beta
...
convert-ba
Author | SHA1 | Date | |
---|---|---|---|
|
c3b166bd8f |
28
.devcontainer/README.md
Normal file
28
.devcontainer/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Codespaces
|
||||
|
||||
You can modifiy Uptime Kuma in your browser without setting up a local development.
|
||||
|
||||

|
||||
|
||||
1. Click `Code` -> `Create codespace on master`
|
||||
2. Wait a few minutes until you see there are two exposed ports
|
||||
3. Go to the `3000` url, see if it is working
|
||||
|
||||

|
||||
|
||||
## Frontend
|
||||
|
||||
Since the frontend is using [Vite.js](https://vitejs.dev/), all changes in this area will be hot-reloaded.
|
||||
You don't need to restart the frontend, unless you try to add a new frontend dependency.
|
||||
|
||||
## Backend
|
||||
|
||||
The backend does not automatically hot-reload.
|
||||
You will need to restart the backend after changing something using these steps:
|
||||
|
||||
1. Click `Terminal`
|
||||
2. Click `Codespaces: server-dev` in the right panel
|
||||
3. Press `Ctrl + C` to stop the server
|
||||
4. Press `Up` to run `npm run start-server-dev`
|
||||
|
||||

|
23
.devcontainer/devcontainer.json
Normal file
23
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"image": "mcr.microsoft.com/devcontainers/javascript-node:dev-18-bookworm",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
"updateContentCommand": "npm ci",
|
||||
"postCreateCommand": "",
|
||||
"postAttachCommand": {
|
||||
"frontend-dev": "npm run start-frontend-devcontainer",
|
||||
"server-dev": "npm run start-server-dev",
|
||||
"open-port": "gh codespace ports visibility 3001:public -c $CODESPACE_NAME"
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"streetsidesoftware.code-spell-checker",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"GitHub.copilot-chat"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [3000, 3001]
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
/.idea
|
||||
/node_modules
|
||||
/data*
|
||||
/cypress
|
||||
/out
|
||||
/test
|
||||
/kubernetes
|
||||
@@ -17,6 +18,7 @@ README.md
|
||||
.vscode
|
||||
.eslint*
|
||||
.stylelint*
|
||||
/.devcontainer
|
||||
/.github
|
||||
yarn.lock
|
||||
app.json
|
||||
@@ -32,6 +34,7 @@ tsconfig.json
|
||||
/extra/healthcheck.exe
|
||||
/extra/healthcheck
|
||||
/extra/exe-builder
|
||||
/extra/push-examples
|
||||
/extra/uptime-kuma-push
|
||||
|
||||
# Comment the following line if you want to rebuild the healthcheck binary
|
||||
|
@@ -1,7 +1,8 @@
|
||||
module.exports = {
|
||||
ignorePatterns: [
|
||||
"test/*.js",
|
||||
"server/modules/*",
|
||||
"test/cypress",
|
||||
"server/modules/apicache/*",
|
||||
"src/util.js"
|
||||
],
|
||||
root: true,
|
||||
|
33
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
33
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
@@ -6,7 +6,7 @@ body:
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "⚠️ Please verify that this question has NOT been raised before."
|
||||
label: "⚠️ Please verify that this bug has NOT been raised before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find similar issue"
|
||||
@@ -24,7 +24,7 @@ body:
|
||||
required: true
|
||||
attributes:
|
||||
label: "📝 Describe your problem"
|
||||
description: "Please walk us through it step by step. Include all important details and add screenshots where appropriate"
|
||||
description: "Please walk us through it step by step."
|
||||
placeholder: "Describe what are you asking for..."
|
||||
- type: textarea
|
||||
id: error-msg
|
||||
@@ -56,20 +56,19 @@ body:
|
||||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: deployment-info
|
||||
- type: input
|
||||
id: docker-version
|
||||
attributes:
|
||||
label: "🖥️ Deployment Environment"
|
||||
description: |
|
||||
examples:
|
||||
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
|
||||
- **Database**: sqlite/embedded mariadb/external mariadb
|
||||
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
|
||||
- **number of monitors**: 42
|
||||
value: |
|
||||
- Runtime:
|
||||
- Database:
|
||||
- Filesystem used to store the database on:
|
||||
- number of monitors:
|
||||
label: "🐋 Docker Version"
|
||||
description: "If running with Docker, which version are you running?"
|
||||
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
|
||||
validations:
|
||||
required: true
|
||||
required: false
|
||||
- type: input
|
||||
id: nodejs-version
|
||||
attributes:
|
||||
label: "🟩 NodeJS Version"
|
||||
description: "If running with Node.js? which version are you running?"
|
||||
placeholder: "Ex. 14.18.0"
|
||||
validations:
|
||||
required: false
|
||||
|
45
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
45
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -3,14 +3,14 @@ description: "Submit a bug report to help us improve"
|
||||
#title: "[Bug] "
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: textarea
|
||||
id: related-issues
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "📑 I have found these related issues/pull requests"
|
||||
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
|
||||
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
|
||||
label: "⚠️ Please verify that this bug has NOT been raised before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find similar issue"
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: "🛡️ Security Policy"
|
||||
@@ -31,7 +31,7 @@ body:
|
||||
required: true
|
||||
attributes:
|
||||
label: "👟 Reproduction steps"
|
||||
description: "How do you trigger this bug? Please walk us through it step by step. Include all important details and add screenshots where appropriate"
|
||||
description: "How do you trigger this bug? Please walk us through it step by step."
|
||||
placeholder: "..."
|
||||
- type: textarea
|
||||
id: expected-behavior
|
||||
@@ -73,23 +73,22 @@ body:
|
||||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: deployment-info
|
||||
- type: input
|
||||
id: docker-version
|
||||
attributes:
|
||||
label: "🖥️ Deployment Environment"
|
||||
description: |
|
||||
examples:
|
||||
- **Runtime**: Docker 20.10.9 / nodejs 18.17.1 / K8S via ... v1.3.3 / ..
|
||||
- **Database**: sqlite/embedded mariadb/external mariadb
|
||||
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
|
||||
- **number of monitors**: 42
|
||||
value: |
|
||||
- Runtime:
|
||||
- Database:
|
||||
- Filesystem used to store the database on:
|
||||
- number of monitors:
|
||||
label: "🐋 Docker Version"
|
||||
description: "If running with Docker, which version are you running?"
|
||||
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
|
||||
validations:
|
||||
required: true
|
||||
required: false
|
||||
- type: input
|
||||
id: nodejs-version
|
||||
attributes:
|
||||
label: "🟩 NodeJS Version"
|
||||
description: "If running with Node.js? which version are you running?"
|
||||
placeholder: "Ex. 14.18.0"
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,2 +0,0 @@
|
||||
---
|
||||
blank_issues_enabled: false
|
29
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
29
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
@@ -3,14 +3,14 @@ description: "Submit a proposal for a new feature"
|
||||
#title: "[Feature] "
|
||||
labels: [feature-request]
|
||||
body:
|
||||
- type: textarea
|
||||
id: related-issues
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "📑 I have found these related issues/pull requests"
|
||||
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
|
||||
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
|
||||
label: "⚠️ Please verify that this feature request has NOT been suggested before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find similar feature request"
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: feature-area
|
||||
attributes:
|
||||
@@ -18,17 +18,10 @@ body:
|
||||
description: "What kind of feature request is this?"
|
||||
multiple: true
|
||||
options:
|
||||
- API / automation options
|
||||
- New notification-provider
|
||||
- Change to existing notification-provider
|
||||
- New monitor
|
||||
- Change to existing monitor
|
||||
- Dashboard
|
||||
- Status-page
|
||||
- Maintenance
|
||||
- Deployment
|
||||
- Certificate expiry
|
||||
- Settings
|
||||
- API
|
||||
- New Notification
|
||||
- New Monitor
|
||||
- UI Feature
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
17
.github/ISSUE_TEMPLATE/security.md
vendored
Normal file
17
.github/ISSUE_TEMPLATE/security.md
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
|
||||
name: "Security Issue"
|
||||
about: "Just for alerting @louislam, do not provide any details here"
|
||||
title: "Security Issue"
|
||||
ref: "main"
|
||||
labels:
|
||||
|
||||
- security
|
||||
|
||||
---
|
||||
|
||||
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
||||
|
||||
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
|
||||
|
||||
Your GitHub Advisory URL:
|
45
.github/ISSUE_TEMPLATE/security_issue.yml
vendored
45
.github/ISSUE_TEMPLATE/security_issue.yml
vendored
@@ -1,45 +0,0 @@
|
||||
---
|
||||
name: "🛡️ Security Issue"
|
||||
description: |
|
||||
Notify Louis Lam about a security concern. Please do NOT include any sensitive details in this issue.
|
||||
# title: "Security Issue"
|
||||
labels: [security]
|
||||
assignees: [louislam]
|
||||
body:
|
||||
- type: "markdown"
|
||||
attributes:
|
||||
value: |
|
||||
## **⚠️ Report a Security Vulnerability**
|
||||
|
||||
### **IMPORTANT: DO NOT SHARE VULNERABILITY DETAILS HERE**
|
||||
|
||||
If you have discovered a security vulnerability, please report it securely using the GitHub Security Advisory.
|
||||
|
||||
**Note**: This issue is only for notifying the maintainers of the repository, as the GitHub Security Advisory does not automatically send notifications.
|
||||
|
||||
- **Confidentiality**: The information you provide in the GitHub Security Advisory will initially remain confidential. However, once the vulnerability is addressed, the advisory will be publicly disclosed on GitHub.
|
||||
- **Access and Visibility**: Until the advisory is published, it will only be visible to the maintainers of the repository and invited collaborators.
|
||||
- **Credit**: You will be automatically credited as a contributor for identifying and reporting the vulnerability. Your contribution will be reflected in the MITRE Credit System.
|
||||
- **Important Reminder**: **Do not include any sensitive or detailed vulnerability information in this issue.** This issue is only for sharing the advisory URL to notify the maintainers of the repository, not for discussing the vulnerability itself.
|
||||
|
||||
**Thank you for helping us keep Uptime Kuma secure!**
|
||||
|
||||
## **Step 1: Submit a GitHub Security Advisory**
|
||||
|
||||
Right-click the link below and select `Open link in new tab` to access the page. This will keep the security issue open, allowing you to easily return and paste the Advisory URL here later.
|
||||
|
||||
➡️ [Create a New Security Advisory](https://github.com/louislam/uptime-kuma/security/advisories/new)
|
||||
|
||||
## **Step 2: Share the Advisory URL**
|
||||
|
||||
Once you've created your advisory, please share the URL below. This will notify Louis Lam and enable them to take the appropriate action.
|
||||
|
||||
- type: "textarea"
|
||||
id: github-advisory-url
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "GitHub Advisory URL for @louislam"
|
||||
placeholder: |
|
||||
Please paste the GitHub Advisory URL here. Only the URL is required.
|
||||
Example: https://github.com/louislam/uptime-kuma/security/advisories/GHSA-8h5r-7t6l-q3kz
|
16
.github/workflows/auto-test.yml
vendored
16
.github/workflows/auto-test.yml
vendored
@@ -15,14 +15,14 @@ on:
|
||||
|
||||
jobs:
|
||||
auto-test:
|
||||
needs: [ check-linters ]
|
||||
needs: [ check-linters, e2e-test ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 15
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
||||
node: [ 18, 20 ]
|
||||
node: [ 14, 20.5 ]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
@@ -33,6 +33,7 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: npm install npm@9 -g
|
||||
- run: npm install
|
||||
- run: npm run build
|
||||
- run: npm run test-backend
|
||||
@@ -42,14 +43,14 @@ jobs:
|
||||
|
||||
# As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
|
||||
armv7-simple-test:
|
||||
needs: [ ]
|
||||
needs: [ check-linters ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 15
|
||||
if: ${{ github.repository == 'louislam/uptime-kuma' }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ARMv7 ]
|
||||
node: [ 18, 20 ]
|
||||
node: [ 14, 20 ]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
@@ -60,6 +61,7 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: npm install npm@9 -g
|
||||
- run: npm ci --production
|
||||
|
||||
check-linters:
|
||||
@@ -77,8 +79,8 @@ jobs:
|
||||
- run: npm run lint:prod
|
||||
|
||||
e2e-test:
|
||||
needs: [ ]
|
||||
runs-on: ubuntu-24.04-arm
|
||||
needs: [ check-linters ]
|
||||
runs-on: ARM64
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v4
|
||||
|
2
.github/workflows/close-incorrect-issue.yml
vendored
2
.github/workflows/close-incorrect-issue.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [18]
|
||||
node-version: [16]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
25
.github/workflows/conflict_labeler.yml
vendored
25
.github/workflows/conflict_labeler.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: Merge Conflict Labeler
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: Labeling
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'louislam/uptime-kuma' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Apply label
|
||||
uses: eps1lon/actions-label-merge-conflict@v3
|
||||
with:
|
||||
dirtyLabel: 'needs:resolve-merge-conflict'
|
||||
repoToken: '${{ secrets.GITHUB_TOKEN }}'
|
@@ -1,4 +1,4 @@
|
||||
name: validate
|
||||
name: json-yaml-validate
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -25,19 +25,3 @@ jobs:
|
||||
with:
|
||||
comment: "true" # enable comment mode
|
||||
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
||||
|
||||
# General validations
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Validate language JSON files
|
||||
run: node ./extra/check-lang-json.js
|
||||
|
||||
- name: Validate knex migrations filename
|
||||
run: node ./extra/check-knex-filenames.mjs
|
30
.github/workflows/stale-bot.yml
vendored
30
.github/workflows/stale-bot.yml
vendored
@@ -9,34 +9,14 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-message: |-
|
||||
We are clearing up our old `help`-issues and your issue has been open for 60 days with no activity.
|
||||
If no comment is made and the stale label is not removed, this issue will be closed in 7 days.
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
|
||||
days-before-stale: 90
|
||||
days-before-close: 2
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
|
||||
exempt-issue-assignees: 'louislam'
|
||||
operations-per-run: 200
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
stale-issue-message: |-
|
||||
This issue was marked as `cannot-reproduce` by a maintainer.
|
||||
If an issue is non-reproducible, we cannot fix it, as we do not know what the underlying issue is.
|
||||
If you have any ideas how we can reproduce this issue, we would love to hear them.
|
||||
|
||||
We don't have a good way to deal with truely unreproducible issues and are going to close this issue in a month.
|
||||
If think there might be other differences in our environment or in how we tried to reproduce this, we would appreciate any ideas.
|
||||
close-issue-message: |-
|
||||
This issue will be closed as no way to reproduce it has been found.
|
||||
If you/somebody finds a way how to (semi-reliably) reproduce this, we can reopen this issue. ^^
|
||||
days-before-stale: 180
|
||||
days-before-close: 30
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
any-of-issue-labels: 'cannot-reproduce'
|
||||
operations-per-run: 200
|
||||
|
||||
|
412
CONTRIBUTING.md
412
CONTRIBUTING.md
@@ -1,193 +1,76 @@
|
||||
# Project Info
|
||||
|
||||
First of all, I want to thank everyone who has submitted issues or shared pull requests for Uptime Kuma.
|
||||
I never thought the GitHub community would be so nice!
|
||||
Because of this, I also never thought that other people would actually read and edit my code.
|
||||
Parts of the code are not very well-structured or commented, sorry about that.
|
||||
First of all, I want to thank everyone who have made pull requests for Uptime Kuma. I never thought the GitHub community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
||||
|
||||
The project was created with `vite.js` and is written in `vue3`.
|
||||
Our backend lives in the `server`-directory and mostly communicates via websockets.
|
||||
Both frontend and backend share the same `package.json`.
|
||||
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same `package.json`.
|
||||
|
||||
For production, the frontend is built into the `dist`-directory and the server (`express.js`) exposes the `dist` directory as the root of the endpoint.
|
||||
For development, we run vite in development mode on another port.
|
||||
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
|
||||
|
||||
## Key Technical Skills
|
||||
|
||||
- Node.js (You should know about promises, async/await, arrow functions, etc.)
|
||||
- Socket.io
|
||||
- SCSS
|
||||
- Vue.js
|
||||
- Bootstrap
|
||||
- SQLite
|
||||
|
||||
## Directories
|
||||
|
||||
- `config` (dev config files)
|
||||
- `data` (App data)
|
||||
- `db` (Base database and migration scripts)
|
||||
- `dist` (Frontend build)
|
||||
- `docker` (Dockerfiles)
|
||||
- `extra` (Extra useful scripts)
|
||||
- `public` (Frontend resources for dev only)
|
||||
- `server` (Server source code)
|
||||
- `src` (Frontend source code)
|
||||
- `test` (unit test)
|
||||
- config (dev config files)
|
||||
- data (App data)
|
||||
- db (Base database and migration scripts)
|
||||
- dist (Frontend build)
|
||||
- docker (Dockerfiles)
|
||||
- extra (Extra useful scripts)
|
||||
- public (Frontend resources for dev only)
|
||||
- server (Server source code)
|
||||
- src (Frontend source code)
|
||||
- test (unit test)
|
||||
|
||||
## Can I create a pull request for Uptime Kuma?
|
||||
|
||||
Yes or no, it depends on what you will try to do.
|
||||
Both yours and our maintainers' time is precious, and we don't want to waste either.
|
||||
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
|
||||
|
||||
If you have any questions about any process/.. is not clear, you are likely not alone => please ask them ^^
|
||||
Here are some references:
|
||||
|
||||
Different guidelines exist for different types of pull requests (PRs):
|
||||
- <details><summary><b>security fixes</b></summary>
|
||||
<p>
|
||||
|
||||
Submitting security fixes is something that may put the community at risk.
|
||||
Please read through our [security policy](SECURITY.md) and submit vulnerabilities via an [advisory](https://github.com/louislam/uptime-kuma/security/advisories/new) + [issue](https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md) instead.
|
||||
We encourage you to submit how to fix a vulnerability if you know how to, this is not required.
|
||||
Following the security policy allows us to properly test, fix bugs.
|
||||
This review allows us to notice, if there are any changes necessary to unrelated parts like the documentation.
|
||||
[**PLEASE SEE OUR SECURITY POLICY.**](SECURITY.md)
|
||||
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>small, non-breaking bug fixes</b></summary>
|
||||
<p>
|
||||
|
||||
If you come across a bug and think you can solve, we appreciate your work.
|
||||
Please make sure that you follow these rules:
|
||||
- keep the PR as small as possible, fix only one thing at a time => keeping it reviewable
|
||||
- test that your code does what you claim it does.
|
||||
|
||||
<sub>Because maintainer time is precious, junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>translations / internationalisation (i18n)</b></summary>
|
||||
<p>
|
||||
|
||||
We use weblate to localise this project into many languages.
|
||||
If you are unhappy with a translation this is the best start.
|
||||
On how to translate using weblate, please see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||
|
||||
There are two cases in which a change cannot be done in weblate and requires a PR:
|
||||
- A text may not be currently localisable. In this case, **adding a new language key** via `$t("languageKey")` might be nessesary
|
||||
- language keys need to be **added to `en.json`** to be visible in weblate. If this has not happened, a PR is appreciated.
|
||||
- **Adding a new language** requires a new file see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md)
|
||||
|
||||
<sub>Because maintainer time is precious, junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>new notification providers</b></summary>
|
||||
<p>
|
||||
|
||||
To set up a new notification provider these files need to be modified/created:
|
||||
- `server/notification-providers/PROVIDER_NAME.js` is where the heart of the notification provider lives.
|
||||
- Both `monitorJSON` and `heartbeatJSON` can be `null` for some events.
|
||||
If both are `null`, this is a general testing message, but if just `heartbeatJSON` is `null` this is a certificate expiry.
|
||||
- Please wrap the axios call into a
|
||||
```js
|
||||
try {
|
||||
let result = await axios.post(...);
|
||||
if (result.status === ...) ...
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
```
|
||||
- `server/notification.js` is where the backend of the notification provider needs to be registered.
|
||||
*If you have an idea how we can skip this step, we would love to hear about it ^^*
|
||||
- `src/components/NotificationDialog.vue` you need to decide if the provider is a regional or a global one and add it with a name to the respective list
|
||||
- `src/components/notifications/PROVIDER_NAME.vue` is where the frontend of each provider lives.
|
||||
Please make sure that you have:
|
||||
- used `HiddenInput` for secret credentials
|
||||
- included all the necessary helptexts/placeholder/.. to make sure the notification provider is simple to setup for new users.
|
||||
- include all translations (`{{ $t("Translation key") }}`, [`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) in `src/lang/en.json` to enable our translators to translate this
|
||||
- `src/components/notifications/index.js` is where the frontend of the provider needs to be registered.
|
||||
*If you have an idea how we can skip this step, we would love to hear about it ^^*
|
||||
### ✅ Usually accepted
|
||||
|
||||
Offering notifications is close to the core of what we are as an uptime monitor.
|
||||
Therefore, making sure that they work is also really important.
|
||||
Because testing notification providers is quite time intensive, we mostly offload this onto the person contributing a notification provider.
|
||||
|
||||
To make sure you have tested the notification provider, please include screenshots of the following events in the pull-request description:
|
||||
- `UP`/`DOWN`
|
||||
- Certificate Expiry via https://expired.badssl.com/
|
||||
- Testing (the test button on the notification provider setup page)
|
||||
|
||||
Using the following way to format this is encouraged:
|
||||
```md
|
||||
| Event | Before | After |
|
||||
------------------
|
||||
| `UP` | paste-image-here | paste-image-here |
|
||||
| `DOWN` | paste-image-here | paste-image-here |
|
||||
| Certificate-expiry | paste-image-here | paste-image-here |
|
||||
| Testing | paste-image-here | paste-image-here |
|
||||
```
|
||||
- Bug fix
|
||||
- Security fix
|
||||
- Adding notification providers
|
||||
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||
- Adding new language keys: `$t("...")`
|
||||
|
||||
<sub>Because maintainer time is precious, junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>new monitoring types</b></summary>
|
||||
<p>
|
||||
### ⚠️ Discussion required
|
||||
|
||||
To set up a new notification provider these files need to be modified/created:
|
||||
- `server/monitor-types/MONITORING_TYPE.js` is the core of each monitor.
|
||||
the `async check(...)`-function should:
|
||||
- throw an error for each fault that is detected with an actionable error message
|
||||
- in the happy-path, you should set `heartbeat.msg` to a successful message and set `heartbeat.status = UP`
|
||||
- `server/uptime-kuma-server.js` is where the monitoring backend needs to be registered.
|
||||
*If you have an idea how we can skip this step, we would love to hear about it ^^*
|
||||
- `src/pages/EditMonitor.vue` is the shared frontend users interact with.
|
||||
Please make sure that you have:
|
||||
- used `HiddenInput` for secret credentials
|
||||
- included all the necessary helptexts/placeholder/.. to make sure the notification provider is simple to setup for new users.
|
||||
- include all translations (`{{ $t("Translation key") }}`, [`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) in `src/lang/en.json` to enable our translators to translate this
|
||||
-
|
||||
- Large pull requests
|
||||
- New features
|
||||
|
||||
### ❌ Won't be merged
|
||||
|
||||
<sub>Because maintainer time is precious, junior maintainers may merge uncontroversial PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary><b>new features/ major changes / breaking bugfixes</b></summary>
|
||||
<p>
|
||||
|
||||
be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**.
|
||||
This is especially important for a large pull request or when you don't know if it will be merged or not.
|
||||
|
||||
<sub>Because of the large impact of this work, only senior maintainers may merge PRs in this area.</sub>
|
||||
</p>
|
||||
</details>
|
||||
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||
- Do not pass the auto-test
|
||||
- Any breaking changes
|
||||
- Duplicated pull requests
|
||||
- Buggy
|
||||
- UI/UX is not close to Uptime Kuma
|
||||
- Modifications or deletions of existing logic without a valid reason.
|
||||
- Adding functions that is completely out of scope
|
||||
- Converting existing code into other programming languages
|
||||
- Unnecessarily large code changes that are hard to review and cause conflicts with other PRs.
|
||||
|
||||
The following rules are essential for making your PR mergable:
|
||||
- Merging multiple issues by a huge PR is more difficult to review and causes conflicts with other PRs. Please
|
||||
- (if possible) **create one PR for one issue** or
|
||||
- (if not possible) **explain which issues a PR addresses and why this PR should not be broken apart**
|
||||
- Make sure your **PR passes our continuous integration**.
|
||||
PRs will not be merged unless all CI-Checks are green.
|
||||
- **Breaking changes** (unless for a good reason and discussed beforehand) will not get merged / not get merged quickly.
|
||||
Such changes require a major version release.
|
||||
- **Test your code** before submitting a PR.
|
||||
Buggy PRs will not be merged.
|
||||
- Make sure the **UI/UX is close to Uptime Kuma**.
|
||||
- **Think about the maintainability**:
|
||||
Don't add functionality that is completely **out of scope**.
|
||||
Keep in mind that we need to be able to maintain the functionality.
|
||||
- Don't modify or delete existing logic without a valid reason.
|
||||
- Don't convert existing code into other programming languages for no reason.
|
||||
The above cases may not cover all possible situations.
|
||||
|
||||
I ([@louislam](https://github.com/louislam)) have the final say.
|
||||
If your pull request does not meet my expectations, I will reject it, no matter how much time you spent on it.
|
||||
Therefore, it is essential to have a discussion beforehand.
|
||||
I ([@louislam](https://github.com/louislam)) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spent on it. Therefore, it is essential to have a discussion beforehand.
|
||||
|
||||
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
|
||||
|
||||
Please don't rush or ask for an ETA.
|
||||
We have to understand the pull request, make sure it has no breaking changes and stick to the vision of this project, especially for large pull requests.
|
||||
|
||||
|
||||
## I'd like to work on an issue. How do I do that?
|
||||
|
||||
We have found that assigning people to issues is management-overhead that we don't need.
|
||||
A short comment that you want to try your hand at this issue is appreciated to save other devs time.
|
||||
If you come across any problem during development, feel free to leave a comment with what you are stuck on.
|
||||
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
||||
|
||||
### Recommended Pull Request Guideline
|
||||
|
||||
Before diving deep into coding, having a discussion first by creating an empty pull request for discussion is preferred.
|
||||
The rationale behind this is that we can align the direction and scope of the feature to eliminate any conflicts with existing and planned work, and can help by pointing out any potential pitfalls.
|
||||
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
|
||||
|
||||
1. Fork the project
|
||||
2. Clone your fork repo to local
|
||||
@@ -201,16 +84,10 @@ The rationale behind this is that we can align the direction and scope of the fe
|
||||
|
||||
## Project Styles
|
||||
|
||||
I personally do not like something that requires a lot of configuration before you can finally start the app.
|
||||
The goal is to make the Uptime Kuma installation as easy as installing a mobile app.
|
||||
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
|
||||
|
||||
- Easy to install for non-Docker users
|
||||
- no native build dependency is needed (for `x86_64`/`armv7`/`arm64`)
|
||||
- no extra configuration and
|
||||
- no extra effort required to get it running
|
||||
- Single container for Docker users
|
||||
- no complex docker-compose file
|
||||
- mapping the volume and exposing the port should be the only requirements
|
||||
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
|
||||
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
|
||||
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
|
||||
- Easy to use
|
||||
- The web UI styling should be consistent and nice
|
||||
@@ -230,12 +107,18 @@ The goal is to make the Uptime Kuma installation as easy as installing a mobile
|
||||
|
||||
## Tools
|
||||
|
||||
- [`Node.js`](https://nodejs.org/) >= 18
|
||||
- [`npm`](https://www.npmjs.com/) >= 9.3
|
||||
- [`Node.js`](https://nodejs.org/) >= 14
|
||||
- [`npm`](https://www.npmjs.com/) >= 8.5
|
||||
- [`git`](https://git-scm.com/)
|
||||
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
|
||||
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
|
||||
|
||||
### GitHub Codespaces
|
||||
|
||||
If you don't want to setup an local environment, you can now develop on GitHub Codespaces, read more:
|
||||
|
||||
https://github.com/louislam/uptime-kuma/tree/master/.devcontainer
|
||||
|
||||
## Git Branches
|
||||
|
||||
- `master`: 2.X.X development. If you want to add a new feature, your pull request should base on this.
|
||||
@@ -260,7 +143,7 @@ Port `3000` and port `3001` will be used.
|
||||
npm run dev
|
||||
```
|
||||
|
||||
But sometimes you may want to restart the server without restarting the frontend. In that case, you can run these commands in two terminals:
|
||||
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
|
||||
|
||||
```bash
|
||||
npm run start-frontend-dev
|
||||
@@ -271,25 +154,25 @@ npm run start-server-dev
|
||||
|
||||
It binds to `0.0.0.0:3001` by default.
|
||||
|
||||
The backend is an `express.js` server with `socket.io` integrated.
|
||||
It uses `socket.io` to communicate with clients, and most server logic is encapsulated in the `socket.io` handlers.
|
||||
`express.js` is also used to serve:
|
||||
It is mainly a socket.io app + express.js.
|
||||
|
||||
- as an entry point for redirecting to a status page or the dashboard
|
||||
- the frontend built files (`index.html`, `*.js`, `*.css`, etc.)
|
||||
- internal APIs of the status page
|
||||
express.js is used for:
|
||||
|
||||
### Structure in `/server/`
|
||||
- entry point such as redirecting to a status page or the dashboard
|
||||
- serving the frontend built files (index.html, .js and .css etc.)
|
||||
- serving internal APIs of the status page
|
||||
|
||||
- `jobs/` (Jobs that are running in another process)
|
||||
- `model/` (Object model, auto-mapping to the database table name)
|
||||
- `modules/` (Modified 3rd-party modules)
|
||||
- `monitor_types/` (Monitor Types)
|
||||
- `notification-providers/` (individual notification logic)
|
||||
- `routers/` (Express Routers)
|
||||
- `socket-handler/` (Socket.io Handlers)
|
||||
- `server.js` (Server entry point)
|
||||
- `uptime-kuma-server.js` (UptimeKumaServer class, main logic should be here, but some still in `server.js`)
|
||||
### Structure in /server/
|
||||
|
||||
- jobs/ (Jobs that are running in another process)
|
||||
- model/ (Object model, auto-mapping to the database table name)
|
||||
- modules/ (Modified 3rd-party modules)
|
||||
- monitor_types (Monitor Types)
|
||||
- notification-providers/ (individual notification logic)
|
||||
- routers/ (Express Routers)
|
||||
- socket-handler (Socket.io Handlers)
|
||||
- server.js (Server entry point)
|
||||
- uptime-kuma-server.js (UptimeKumaServer class, main logic should be here, but some still in `server.js`)
|
||||
|
||||
## Frontend Dev Server
|
||||
|
||||
@@ -328,15 +211,14 @@ npm test
|
||||
|
||||
## Dependencies
|
||||
|
||||
Both frontend and backend share the same `package.json`.
|
||||
However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into `dist` files. So:
|
||||
Both frontend and backend share the same package.json. However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into dist files. So:
|
||||
|
||||
- Frontend dependencies = "devDependencies"
|
||||
- Examples: `vue`, `chart.js`
|
||||
- Examples: vue, chart.js
|
||||
- Backend dependencies = "dependencies"
|
||||
- Examples: `socket.io`, `sqlite3`
|
||||
- Examples: socket.io, sqlite3
|
||||
- Development dependencies = "devDependencies"
|
||||
- Examples: `eslint`, `sass`
|
||||
- Examples: eslint, sass
|
||||
|
||||
### Update Dependencies
|
||||
|
||||
@@ -407,106 +289,54 @@ https://github.com/louislam/uptime-kuma-wiki
|
||||
Check the latest issues and pull requests:
|
||||
https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
|
||||
|
||||
### What is a maintainer and what are their roles?
|
||||
### Release Procedures
|
||||
|
||||
This project has multiple maintainers who specialise in different areas.
|
||||
Currently, there are 3 maintainers:
|
||||
1. Draft a release note
|
||||
2. Make sure the repo is cleared
|
||||
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
|
||||
4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
5. Wait until the `Press any key to continue`
|
||||
6. `git push`
|
||||
7. Publish the release note as 1.X.X
|
||||
8. Press any key to continue
|
||||
9. Deploy to the demo server: `npm run deploy-demo-server`
|
||||
|
||||
| Person | Role | Main Area |
|
||||
|-------------------|-------------------|------------------|
|
||||
| `@louislam` | senior maintainer | major features |
|
||||
| `@chakflying` | junior maintainer | fixing bugs |
|
||||
| `@commanderstorm` | junior maintainer | issue-management |
|
||||
Checking:
|
||||
|
||||
### Procedures
|
||||
- Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
|
||||
- Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
|
||||
- Try clean installation with Node.js
|
||||
|
||||
We have a few procedures we follow. These are documented here:
|
||||
- <details><summary>Set up a Docker Builder</summary>
|
||||
<p>
|
||||
### Release Beta Procedures
|
||||
|
||||
- amd64, armv7 using local.
|
||||
- arm64 using remote arm64 cpu, as the emulator is too slow and can no longer pass the `npm ci` command.
|
||||
1. Add the public key to the remote server.
|
||||
2. Add the remote context. The remote machine must be arm64 and installed Docker CE.
|
||||
```
|
||||
docker context create oracle-arm64-jp --docker "host=ssh://root@100.107.174.88"
|
||||
```
|
||||
3. Create a new builder.
|
||||
```
|
||||
docker buildx create --name kuma-builder --platform linux/amd64,linux/arm/v7
|
||||
docker buildx use kuma-builder
|
||||
docker buildx inspect --bootstrap
|
||||
```
|
||||
4. Append the remote context to the builder.
|
||||
```
|
||||
docker buildx create --append --name kuma-builder --platform linux/arm64 oracle-arm64-jp
|
||||
```
|
||||
5. Verify the builder and check if the builder is using `kuma-builder`.
|
||||
```
|
||||
docker buildx inspect kuma-builder
|
||||
docker buildx ls
|
||||
```
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Release</summary>
|
||||
<p>
|
||||
1. Draft a release note, check "This is a pre-release"
|
||||
2. Make sure the repo is cleared
|
||||
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
4. Wait until the `Press any key to continue`
|
||||
5. Publish the release note as 1.X.X-beta.X
|
||||
6. Press any key to continue
|
||||
|
||||
1. Draft a release note
|
||||
2. Make sure the repo is cleared
|
||||
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
|
||||
4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
5. Wait until the `Press any key to continue`
|
||||
6. `git push`
|
||||
7. Publish the release note as `1.X.X`
|
||||
8. Press any key to continue
|
||||
9. Deploy to the demo server: `npm run deploy-demo-server`
|
||||
### Release Wiki
|
||||
|
||||
These Items need to be checked:
|
||||
#### Setup Repo
|
||||
|
||||
- [ ] Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
|
||||
- [ ] Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
|
||||
- [ ] Try clean installation with Node.js
|
||||
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Release Beta</summary>
|
||||
<p>
|
||||
```bash
|
||||
git clone https://github.com/louislam/uptime-kuma-wiki.git
|
||||
cd uptime-kuma-wiki
|
||||
git remote add production https://github.com/louislam/uptime-kuma.wiki.git
|
||||
```
|
||||
|
||||
1. Draft a release note, check `This is a pre-release`
|
||||
2. Make sure the repo is cleared
|
||||
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
|
||||
4. Wait until the `Press any key to continue`
|
||||
5. Publish the release note as `1.X.X-beta.X`
|
||||
6. Press any key to continue
|
||||
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Release Wiki</summary>
|
||||
<p>
|
||||
#### Push to Production Wiki
|
||||
|
||||
**Setup Repo**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/louislam/uptime-kuma-wiki.git
|
||||
cd uptime-kuma-wiki
|
||||
git remote add production https://github.com/louislam/uptime-kuma.wiki.git
|
||||
```
|
||||
|
||||
**Push to Production Wiki**
|
||||
|
||||
```bash
|
||||
git pull
|
||||
git push production master
|
||||
```
|
||||
|
||||
</p>
|
||||
</details>
|
||||
- <details><summary>Change the base of a pull request such as <code>master</code> to <code>1.23.X</code></summary>
|
||||
<p>
|
||||
|
||||
```bash
|
||||
git rebase --onto <new parent> <old parent>
|
||||
```
|
||||
|
||||
</p>
|
||||
</details>
|
||||
```bash
|
||||
git pull
|
||||
git push production master
|
||||
```
|
||||
|
||||
## Useful Commands
|
||||
|
||||
Change the base of a pull request such as `master` to `1.23.X`
|
||||
|
||||
```bash
|
||||
git rebase --onto <new parent> <old parent>
|
||||
```
|
||||
|
21
README.md
21
README.md
@@ -6,7 +6,7 @@
|
||||
|
||||
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||
|
||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||
[](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
||||
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
||||
</a>
|
||||
@@ -17,9 +17,9 @@ Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||
|
||||
Try it!
|
||||
|
||||
Demo Server (Location: Frankfurt - Germany): https://demo.kuma.pet/start-demo
|
||||
- Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors))
|
||||
|
||||
It is a temporary live demo, all data will be deleted after 10 minutes. Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors).
|
||||
It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience.
|
||||
|
||||
## ⭐ Features
|
||||
|
||||
@@ -43,18 +43,11 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Sponsore
|
||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
```
|
||||
|
||||
Uptime Kuma is now running on <http://0.0.0.0:3001>.
|
||||
Uptime Kuma is now running on http://localhost:3001
|
||||
|
||||
> [!WARNING]
|
||||
> File Systems like **NFS** (Network File System) are **NOT** supported. Please map to a local directory or volume.
|
||||
|
||||
> [!NOTE]
|
||||
> If you want to limit exposure to localhost (without exposing port for other users or to use a [reverse proxy](https://github.com/louislam/uptime-kuma/wiki/Reverse-Proxy)), you can expose the port like this:
|
||||
>
|
||||
> ```bash
|
||||
> docker run -d --restart=always -p 127.0.0.1:3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
> ```
|
||||
|
||||
### 💪🏻 Non-Docker
|
||||
|
||||
Requirements:
|
||||
@@ -62,14 +55,16 @@ Requirements:
|
||||
- Platform
|
||||
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
||||
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
||||
- ❌ FreeBSD / OpenBSD / NetBSD
|
||||
- ❌ Replit / Heroku
|
||||
- [Node.js](https://nodejs.org/en/download/) 18 / 20.4
|
||||
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
|
||||
- [npm](https://docs.npmjs.com/cli/) 9
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
||||
|
||||
```bash
|
||||
# Update your npm
|
||||
npm install npm@9 -g
|
||||
|
||||
git clone https://github.com/louislam/uptime-kuma.git
|
||||
cd uptime-kuma
|
||||
npm run setup
|
||||
|
@@ -1,9 +0,0 @@
|
||||
services:
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
ports:
|
||||
# <Host Port>:<Container Port>
|
||||
- 3001:3001
|
||||
restart: unless-stopped
|
28
config/cypress.config.js
Normal file
28
config/cypress.config.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const { defineConfig } = require("cypress");
|
||||
|
||||
module.exports = defineConfig({
|
||||
projectId: "vyjuem",
|
||||
e2e: {
|
||||
experimentalStudio: true,
|
||||
setupNodeEvents(on, config) {
|
||||
|
||||
},
|
||||
fixturesFolder: "test/cypress/fixtures",
|
||||
screenshotsFolder: "test/cypress/screenshots",
|
||||
videosFolder: "test/cypress/videos",
|
||||
downloadsFolder: "test/cypress/downloads",
|
||||
supportFile: "test/cypress/support/e2e.js",
|
||||
baseUrl: "http://localhost:3002",
|
||||
defaultCommandTimeout: 10000,
|
||||
pageLoadTimeout: 60000,
|
||||
viewportWidth: 1920,
|
||||
viewportHeight: 1080,
|
||||
specPattern: [
|
||||
"test/cypress/e2e/setup.cy.js",
|
||||
"test/cypress/e2e/**/*.js"
|
||||
],
|
||||
},
|
||||
env: {
|
||||
baseUrl: "http://localhost:3002",
|
||||
},
|
||||
});
|
10
config/cypress.frontend.config.js
Normal file
10
config/cypress.frontend.config.js
Normal file
@@ -0,0 +1,10 @@
|
||||
const { defineConfig } = require("cypress");
|
||||
|
||||
module.exports = defineConfig({
|
||||
e2e: {
|
||||
supportFile: false,
|
||||
specPattern: [
|
||||
"test/cypress/unit/**/*.js"
|
||||
],
|
||||
}
|
||||
});
|
@@ -1,11 +1,11 @@
|
||||
import { defineConfig, devices } from "@playwright/test";
|
||||
|
||||
const port = 30001;
|
||||
export const url = `http://localhost:${port}`;
|
||||
const url = `http://localhost:${port}`;
|
||||
|
||||
export default defineConfig({
|
||||
// Look for test files in the "tests" directory, relative to this configuration file.
|
||||
testDir: "../test/e2e/specs",
|
||||
testDir: "../test/e2e",
|
||||
outputDir: "../private/playwright-test-results",
|
||||
fullyParallel: false,
|
||||
locale: "en-US",
|
||||
@@ -40,15 +40,9 @@ export default defineConfig({
|
||||
// Configure projects for major browsers.
|
||||
projects: [
|
||||
{
|
||||
name: "run-once setup",
|
||||
testMatch: /setup-process\.once\.js/,
|
||||
name: "chromium",
|
||||
use: { ...devices["Desktop Chrome"] },
|
||||
},
|
||||
{
|
||||
name: "specs",
|
||||
use: { ...devices["Desktop Chrome"] },
|
||||
dependencies: [ "run-once setup" ],
|
||||
},
|
||||
/*
|
||||
{
|
||||
name: "firefox",
|
||||
@@ -58,7 +52,7 @@ export default defineConfig({
|
||||
|
||||
// Run your local dev server before starting the tests.
|
||||
webServer: {
|
||||
command: `node extra/remove-playwright-test-data.js && cross-env NODE_ENV=development node server/server.js --port=${port} --data-dir=./data/playwright-test`,
|
||||
command: `node extra/remove-playwright-test-data.js && node server/server.js --port=${port} --data-dir=./data/playwright-test`,
|
||||
url,
|
||||
reuseExistingServer: false,
|
||||
cwd: "../",
|
||||
|
@@ -2,7 +2,6 @@ import vue from "@vitejs/plugin-vue";
|
||||
import { defineConfig } from "vite";
|
||||
import visualizer from "rollup-plugin-visualizer";
|
||||
import viteCompression from "vite-plugin-compression";
|
||||
import VueDevTools from "vite-plugin-vue-devtools";
|
||||
|
||||
const postCssScss = require("postcss-scss");
|
||||
const postcssRTLCSS = require("postcss-rtlcss");
|
||||
@@ -16,7 +15,9 @@ export default defineConfig({
|
||||
},
|
||||
define: {
|
||||
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
|
||||
"process.env": {},
|
||||
"DEVCONTAINER": JSON.stringify(process.env.DEVCONTAINER),
|
||||
"GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN": JSON.stringify(process.env.GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN),
|
||||
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
||||
},
|
||||
plugins: [
|
||||
vue(),
|
||||
@@ -31,7 +32,6 @@ export default defineConfig({
|
||||
algorithm: "brotliCompress",
|
||||
filter: viteCompressionFilter,
|
||||
}),
|
||||
VueDevTools(),
|
||||
],
|
||||
css: {
|
||||
postcss: {
|
||||
|
@@ -318,10 +318,7 @@ async function createTables() {
|
||||
// monitor_tls_info
|
||||
await knex.schema.createTable("monitor_tls_info", (table) => {
|
||||
table.increments("id");
|
||||
table.integer("monitor_id").unsigned().notNullable()
|
||||
.references("id").inTable("monitor")
|
||||
.onDelete("CASCADE")
|
||||
.onUpdate("CASCADE");
|
||||
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
|
||||
table.text("info_json");
|
||||
});
|
||||
|
||||
|
@@ -1,12 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("status_page", function (table) {
|
||||
table.integer("auto_refresh_interval").defaultTo(300).unsigned();
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("status_page", function (table) {
|
||||
table.dropColumn("auto_refresh_interval");
|
||||
});
|
||||
};
|
@@ -1,24 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.dropColumn("ping_min");
|
||||
table.dropColumn("ping_max");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.dropColumn("ping_min");
|
||||
table.dropColumn("ping_max");
|
||||
});
|
||||
};
|
@@ -1,26 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.createTable("stat_hourly", function (table) {
|
||||
table.increments("id");
|
||||
table.comment("This table contains the hourly aggregate statistics for each monitor");
|
||||
table.integer("monitor_id").unsigned().notNullable()
|
||||
.references("id").inTable("monitor")
|
||||
.onDelete("CASCADE")
|
||||
.onUpdate("CASCADE");
|
||||
table.integer("timestamp")
|
||||
.notNullable()
|
||||
.comment("Unix timestamp rounded down to the nearest hour");
|
||||
table.float("ping").notNullable().comment("Average ping in milliseconds");
|
||||
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||
table.smallint("up").notNullable();
|
||||
table.smallint("down").notNullable();
|
||||
|
||||
table.unique([ "monitor_id", "timestamp" ]);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.dropTable("stat_hourly");
|
||||
};
|
@@ -1,26 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||
})
|
||||
.alterTable("stat_hourly", function (table) {
|
||||
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("stat_daily", function (table) {
|
||||
table.dropColumn("extras");
|
||||
})
|
||||
.alterTable("stat_minutely", function (table) {
|
||||
table.dropColumn("extras");
|
||||
})
|
||||
.alterTable("stat_hourly", function (table) {
|
||||
table.dropColumn("extras");
|
||||
});
|
||||
};
|
@@ -1,16 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.string("snmp_oid").defaultTo(null);
|
||||
table.enum("snmp_version", [ "1", "2c", "3" ]).defaultTo("2c");
|
||||
table.string("json_path_operator").defaultTo(null);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("monitor", function (table) {
|
||||
table.dropColumn("snmp_oid");
|
||||
table.dropColumn("snmp_version");
|
||||
table.dropColumn("json_path_operator");
|
||||
});
|
||||
};
|
@@ -1,13 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.boolean("cache_bust").notNullable().defaultTo(false);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.dropColumn("cache_bust");
|
||||
});
|
||||
};
|
@@ -1,12 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("monitor", function (table) {
|
||||
table.text("conditions").notNullable().defaultTo("[]");
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("monitor", function (table) {
|
||||
table.dropColumn("conditions");
|
||||
});
|
||||
};
|
@@ -1,17 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex.schema.alterTable("monitor", function (table) {
|
||||
table.text("rabbitmq_nodes");
|
||||
table.string("rabbitmq_username");
|
||||
table.string("rabbitmq_password");
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("monitor", function (table) {
|
||||
table.dropColumn("rabbitmq_nodes");
|
||||
table.dropColumn("rabbitmq_username");
|
||||
table.dropColumn("rabbitmq_password");
|
||||
});
|
||||
|
||||
};
|
@@ -1,7 +0,0 @@
|
||||
exports.up = function (knex) {
|
||||
return knex("monitor").whereNull("json_path_operator").update("json_path_operator", "==");
|
||||
};
|
||||
exports.down = function (knex) {
|
||||
// changing the json_path_operator back to null for all "==" is not possible anymore
|
||||
// we have lost the context which fields have been set explicitely in >= v2.0 and which would need to be reverted
|
||||
};
|
@@ -1,13 +0,0 @@
|
||||
// Update info_json column to LONGTEXT mainly for MariaDB
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("monitor_tls_info", function (table) {
|
||||
table.text("info_json", "longtext").alter();
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("monitor_tls_info", function (table) {
|
||||
table.text("info_json", "text").alter();
|
||||
});
|
||||
};
|
@@ -1,13 +0,0 @@
|
||||
// Fix #5721: Change proxy port column type to integer to support larger port numbers
|
||||
exports.up = function (knex) {
|
||||
return knex.schema
|
||||
.alterTable("proxy", function (table) {
|
||||
table.integer("port").alter();
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = function (knex) {
|
||||
return knex.schema.alterTable("proxy", function (table) {
|
||||
table.smallint("port").alter();
|
||||
});
|
||||
};
|
@@ -1,18 +0,0 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
PRAGMA writable_schema = TRUE;
|
||||
|
||||
UPDATE
|
||||
SQLITE_MASTER
|
||||
SET
|
||||
sql = replace(sql,
|
||||
'monitor_id INTEGER NOT NULL',
|
||||
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||
)
|
||||
WHERE
|
||||
name = 'monitor_tls_info'
|
||||
AND type = 'table';
|
||||
|
||||
PRAGMA writable_schema = RESET;
|
||||
|
||||
COMMIT;
|
@@ -1,18 +0,0 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
PRAGMA writable_schema = TRUE;
|
||||
|
||||
UPDATE
|
||||
SQLITE_MASTER
|
||||
SET
|
||||
sql = replace(sql,
|
||||
'monitor_id INTEGER NOT NULL',
|
||||
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||
)
|
||||
WHERE
|
||||
name = 'monitor_tls_info'
|
||||
AND type = 'table';
|
||||
|
||||
PRAGMA writable_schema = RESET;
|
||||
|
||||
COMMIT;
|
@@ -1,18 +1,9 @@
|
||||
# Download Apprise deb package
|
||||
FROM node:20-bookworm-slim AS download-apprise
|
||||
WORKDIR /app
|
||||
COPY ./extra/download-apprise.mjs ./download-apprise.mjs
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install curl && \
|
||||
npm install cheerio semver && \
|
||||
node ./download-apprise.mjs
|
||||
|
||||
# Base Image (Slim)
|
||||
# If the image changed, the second stage image should be changed too
|
||||
FROM node:20-bookworm-slim AS base2-slim
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
|
||||
# apprise = for notifications (From testing repo)
|
||||
# sqlite3 = for debugging
|
||||
# iputils-ping = for ping
|
||||
# util-linux = for setpriv (Should be dropped in 2.0.0?)
|
||||
@@ -21,10 +12,10 @@ ARG TARGETPLATFORM
|
||||
# ca-certificates = keep the cert up-to-date
|
||||
# sudo = for start service nscd with non-root user
|
||||
# nscd = for better DNS caching
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install \
|
||||
sqlite3 \
|
||||
ca-certificates \
|
||||
RUN echo "deb http://deb.debian.org/debian testing main" >> /etc/apt/sources.list && \
|
||||
apt update && \
|
||||
apt --yes --no-install-recommends -t testing install apprise sqlite3 ca-certificates && \
|
||||
apt --yes --no-install-recommends -t stable install \
|
||||
iputils-ping \
|
||||
util-linux \
|
||||
dumb-init \
|
||||
@@ -34,16 +25,6 @@ RUN apt update && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
apt --yes autoremove
|
||||
|
||||
# apprise = for notifications (Install from the deb package, as the stable one is too old) (workaround for #4867)
|
||||
# Switching to testing repo is no longer working, as the testing repo is not bookworm anymore.
|
||||
# python3-paho-mqtt (#4859)
|
||||
# TODO: no idea how to delete the deb file after installation as it becomes a layer already
|
||||
COPY --from=download-apprise /app/apprise.deb ./apprise.deb
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install ./apprise.deb python3-paho-mqtt && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -f apprise.deb && \
|
||||
apt --yes autoremove
|
||||
|
||||
# Install cloudflared
|
||||
RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
|
||||
@@ -61,9 +42,7 @@ COPY ./docker/etc/sudoers /etc/sudoers
|
||||
|
||||
# Full Base Image
|
||||
# MariaDB, Chromium and fonts
|
||||
# Make sure to reuse the slim image here. Uncomment the above line if you want to build it from scratch.
|
||||
# FROM base2-slim AS base2
|
||||
FROM louislam/uptime-kuma:base2-slim AS base2
|
||||
FROM base2-slim AS base2
|
||||
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
|
||||
|
15
docker/docker-compose.yml
Normal file
15
docker/docker-compose.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
container_name: uptime-kuma
|
||||
volumes:
|
||||
- uptime-kuma:/app/data
|
||||
ports:
|
||||
- "3001:3001" # <Host Port>:<Container Port>
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
uptime-kuma:
|
||||
|
@@ -27,6 +27,7 @@ RUN mkdir ./data
|
||||
# ⭐ Main Image
|
||||
############################################
|
||||
FROM $BASE_IMAGE AS release
|
||||
USER node
|
||||
WORKDIR /app
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
|
||||
@@ -45,7 +46,6 @@ CMD ["node", "server/server.js"]
|
||||
# Rootless Image
|
||||
############################################
|
||||
FROM release AS rootless
|
||||
USER node
|
||||
|
||||
############################################
|
||||
# Mark as Nightly
|
||||
|
@@ -5,7 +5,7 @@ const util = require("../../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const version = process.env.RELEASE_BETA_VERSION;
|
||||
const version = process.env.VERSION;
|
||||
|
||||
console.log("Beta Version: " + version);
|
||||
|
||||
|
@@ -1,72 +0,0 @@
|
||||
import fs from "fs";
|
||||
const dir = "./db/knex_migrations";
|
||||
|
||||
// Get the file list (ending with .js) from the directory
|
||||
const files = fs.readdirSync(dir).filter((file) => file !== "README.md");
|
||||
|
||||
// They are wrong, but they had been merged, so allowed.
|
||||
const exceptionList = [
|
||||
"2024-08-24-000-add-cache-bust.js",
|
||||
"2024-10-1315-rabbitmq-monitor.js",
|
||||
];
|
||||
|
||||
// Correct format: YYYY-MM-DD-HHmm-description.js
|
||||
|
||||
for (const file of files) {
|
||||
if (exceptionList.includes(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check ending with .js
|
||||
if (!file.endsWith(".js")) {
|
||||
console.error(`It should end with .js: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const parts = file.split("-");
|
||||
|
||||
// Should be at least 5 parts
|
||||
if (parts.length < 5) {
|
||||
console.error(`Invalid format: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// First part should be a year >= 2024
|
||||
const year = parseInt(parts[0], 10);
|
||||
if (isNaN(year) || year < 2023) {
|
||||
console.error(`Invalid year: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Second part should be a month
|
||||
const month = parseInt(parts[1], 10);
|
||||
if (isNaN(month) || month < 1 || month > 12) {
|
||||
console.error(`Invalid month: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Third part should be a day
|
||||
const day = parseInt(parts[2], 10);
|
||||
if (isNaN(day) || day < 1 || day > 31) {
|
||||
console.error(`Invalid day: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Fourth part should be HHmm
|
||||
const time = parts[3];
|
||||
|
||||
// Check length is 4
|
||||
if (time.length !== 4) {
|
||||
console.error(`Invalid time: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const hour = parseInt(time.substring(0, 2), 10);
|
||||
const minute = parseInt(time.substring(2), 10);
|
||||
if (isNaN(hour) || hour < 0 || hour > 23 || isNaN(minute) || minute < 0 || minute > 59) {
|
||||
console.error(`Invalid time: ${file}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("All knex filenames are correct.");
|
@@ -1,27 +0,0 @@
|
||||
// For #5231
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
let path = "./src/lang";
|
||||
|
||||
// list directories in the lang directory
|
||||
let jsonFileList = fs.readdirSync(path);
|
||||
|
||||
for (let jsonFile of jsonFileList) {
|
||||
if (!jsonFile.endsWith(".json")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let jsonPath = path + "/" + jsonFile;
|
||||
let originalContent = fs.readFileSync(jsonPath, "utf8");
|
||||
let langData = JSON.parse(originalContent);
|
||||
|
||||
let formattedContent = JSON.stringify(langData, null, 4) + "\n";
|
||||
|
||||
if (originalContent !== formattedContent) {
|
||||
console.error(`File ${jsonFile} is not formatted correctly.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("All lang json files are formatted correctly.");
|
@@ -37,7 +37,7 @@ const github = require("@actions/github");
|
||||
owner: issue.owner,
|
||||
repo: issue.repo,
|
||||
issue_number: issue.number,
|
||||
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context nessesary for a good discussions.`
|
||||
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please DO NOT open a blank issue.`
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
|
@@ -1,57 +0,0 @@
|
||||
// Go to http://ftp.debian.org/debian/pool/main/a/apprise/ using fetch api, where it is a apache directory listing page
|
||||
// Use cheerio to parse the html and get the latest version of Apprise
|
||||
// call curl to download the latest version of Apprise
|
||||
// Target file: the latest version of Apprise, which the format is apprise_{VERSION}_all.deb
|
||||
|
||||
import * as cheerio from "cheerio";
|
||||
import semver from "semver";
|
||||
import * as childProcess from "child_process";
|
||||
|
||||
const baseURL = "http://ftp.debian.org/debian/pool/main/a/apprise/";
|
||||
const response = await fetch(baseURL);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch page of Apprise Debian repository.");
|
||||
}
|
||||
|
||||
const html = await response.text();
|
||||
|
||||
const $ = cheerio.load(html);
|
||||
|
||||
// Get all the links in the page
|
||||
const linkElements = $("a");
|
||||
|
||||
// Filter the links which match apprise_{VERSION}_all.deb
|
||||
const links = [];
|
||||
const pattern = /apprise_(.*?)_all.deb/;
|
||||
|
||||
for (let i = 0; i < linkElements.length; i++) {
|
||||
const link = linkElements[i];
|
||||
if (link.attribs.href.match(pattern) && !link.attribs.href.includes("~")) {
|
||||
links.push({
|
||||
filename: link.attribs.href,
|
||||
version: link.attribs.href.match(pattern)[1],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(links);
|
||||
|
||||
// semver compare and download
|
||||
let latestLink = {
|
||||
filename: "",
|
||||
version: "0.0.0",
|
||||
};
|
||||
|
||||
for (const link of links) {
|
||||
if (semver.gt(link.version, latestLink.version)) {
|
||||
latestLink = link;
|
||||
}
|
||||
}
|
||||
|
||||
const downloadURL = baseURL + latestLink.filename;
|
||||
console.log(`Downloading ${downloadURL}...`);
|
||||
let result = childProcess.spawnSync("curl", [ downloadURL, "--output", "apprise.deb" ]);
|
||||
console.log(result.stdout?.toString());
|
||||
console.error(result.stderr?.toString());
|
||||
process.exit(result.status !== null ? result.status : 1);
|
@@ -4,6 +4,7 @@ const tar = require("tar");
|
||||
|
||||
const packageJSON = require("../package.json");
|
||||
const fs = require("fs");
|
||||
const rmSync = require("./fs-rmSync.js");
|
||||
const version = packageJSON.version;
|
||||
|
||||
const filename = "dist.tar.gz";
|
||||
@@ -28,9 +29,8 @@ function download(url) {
|
||||
if (fs.existsSync("./dist")) {
|
||||
|
||||
if (fs.existsSync("./dist-backup")) {
|
||||
fs.rmSync("./dist-backup", {
|
||||
recursive: true,
|
||||
force: true,
|
||||
rmSync("./dist-backup", {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
|
||||
@@ -43,9 +43,8 @@ function download(url) {
|
||||
|
||||
tarStream.on("close", () => {
|
||||
if (fs.existsSync("./dist-backup")) {
|
||||
fs.rmSync("./dist-backup", {
|
||||
recursive: true,
|
||||
force: true,
|
||||
rmSync("./dist-backup", {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
console.log("Done");
|
||||
|
19
extra/env2arg.js
Normal file
19
extra/env2arg.js
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const childProcess = require("child_process");
|
||||
let env = process.env;
|
||||
|
||||
let cmd = process.argv[2];
|
||||
let args = process.argv.slice(3);
|
||||
let replacedArgs = [];
|
||||
|
||||
for (let arg of args) {
|
||||
for (let key in env) {
|
||||
arg = arg.replaceAll(`$${key}`, env[key]);
|
||||
}
|
||||
replacedArgs.push(arg);
|
||||
}
|
||||
|
||||
let child = childProcess.spawn(cmd, replacedArgs);
|
||||
child.stdout.pipe(process.stdout);
|
||||
child.stderr.pipe(process.stderr);
|
1
extra/exe-builder/.gitignore
vendored
Normal file
1
extra/exe-builder/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
packages/
|
35
extra/exe-builder/App.config
Normal file
35
extra/exe-builder/App.config
Normal file
@@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<startup>
|
||||
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.7.2" />
|
||||
</startup>
|
||||
|
||||
<runtime>
|
||||
<assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1">
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity name="System.Diagnostics.DiagnosticSource" publicKeyToken="cc7b13ffcd2ddd51" culture="neutral" />
|
||||
<bindingRedirect oldVersion="0.0.0.0-4.0.1.0" newVersion="4.0.1.0" />
|
||||
</dependentAssembly>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity name="System.Diagnostics.Tracing" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
|
||||
<bindingRedirect oldVersion="0.0.0.0-4.1.1.0" newVersion="4.1.1.0" />
|
||||
</dependentAssembly>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity name="System.Reflection" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
|
||||
<bindingRedirect oldVersion="0.0.0.0-4.1.1.0" newVersion="4.1.1.0" />
|
||||
</dependentAssembly>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity name="System.Runtime" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
|
||||
<bindingRedirect oldVersion="0.0.0.0-4.1.1.1" newVersion="4.1.1.1" />
|
||||
</dependentAssembly>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity name="System.Runtime.InteropServices" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
|
||||
<bindingRedirect oldVersion="0.0.0.0-4.1.1.0" newVersion="4.1.1.0" />
|
||||
</dependentAssembly>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity name="System.Runtime.CompilerServices.Unsafe" publicKeyToken="b03f5f7f11d50a3a" culture="neutral" />
|
||||
<bindingRedirect oldVersion="0.0.0.0-6.0.0.0" newVersion="6.0.0.0" />
|
||||
</dependentAssembly>
|
||||
</assemblyBinding>
|
||||
</runtime>
|
||||
</configuration>
|
84
extra/exe-builder/DownloadForm.Designer.cs
generated
Normal file
84
extra/exe-builder/DownloadForm.Designer.cs
generated
Normal file
@@ -0,0 +1,84 @@
|
||||
using System.ComponentModel;
|
||||
|
||||
namespace UptimeKuma {
|
||||
partial class DownloadForm {
|
||||
/// <summary>
|
||||
/// Required designer variable.
|
||||
/// </summary>
|
||||
private IContainer components = null;
|
||||
|
||||
/// <summary>
|
||||
/// Clean up any resources being used.
|
||||
/// </summary>
|
||||
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
|
||||
protected override void Dispose(bool disposing) {
|
||||
if (disposing && (components != null)) {
|
||||
components.Dispose();
|
||||
}
|
||||
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
#region Windows Form Designer generated code
|
||||
|
||||
/// <summary>
|
||||
/// Required method for Designer support - do not modify
|
||||
/// the contents of this method with the code editor.
|
||||
/// </summary>
|
||||
private void InitializeComponent() {
|
||||
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(DownloadForm));
|
||||
this.progressBar = new System.Windows.Forms.ProgressBar();
|
||||
this.label = new System.Windows.Forms.Label();
|
||||
this.labelData = new System.Windows.Forms.Label();
|
||||
this.SuspendLayout();
|
||||
//
|
||||
// progressBar
|
||||
//
|
||||
this.progressBar.Location = new System.Drawing.Point(12, 12);
|
||||
this.progressBar.Name = "progressBar";
|
||||
this.progressBar.Size = new System.Drawing.Size(472, 41);
|
||||
this.progressBar.TabIndex = 0;
|
||||
//
|
||||
// label
|
||||
//
|
||||
this.label.Location = new System.Drawing.Point(12, 59);
|
||||
this.label.Name = "label";
|
||||
this.label.Size = new System.Drawing.Size(472, 23);
|
||||
this.label.TabIndex = 1;
|
||||
this.label.Text = "Preparing...";
|
||||
//
|
||||
// labelData
|
||||
//
|
||||
this.labelData.Location = new System.Drawing.Point(12, 82);
|
||||
this.labelData.Name = "labelData";
|
||||
this.labelData.Size = new System.Drawing.Size(472, 23);
|
||||
this.labelData.TabIndex = 2;
|
||||
//
|
||||
// DownloadForm
|
||||
//
|
||||
this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F);
|
||||
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
|
||||
this.ClientSize = new System.Drawing.Size(496, 117);
|
||||
this.Controls.Add(this.labelData);
|
||||
this.Controls.Add(this.label);
|
||||
this.Controls.Add(this.progressBar);
|
||||
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
|
||||
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
|
||||
this.MaximizeBox = false;
|
||||
this.Name = "DownloadForm";
|
||||
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
|
||||
this.Text = "Uptime Kuma";
|
||||
this.Load += new System.EventHandler(this.DownloadForm_Load);
|
||||
this.ResumeLayout(false);
|
||||
}
|
||||
|
||||
private System.Windows.Forms.Label labelData;
|
||||
|
||||
private System.Windows.Forms.Label label;
|
||||
|
||||
private System.Windows.Forms.ProgressBar progressBar;
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
204
extra/exe-builder/DownloadForm.cs
Normal file
204
extra/exe-builder/DownloadForm.cs
Normal file
@@ -0,0 +1,204 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.ComponentModel;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Windows.Forms;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace UptimeKuma {
|
||||
public partial class DownloadForm : Form {
|
||||
private readonly Queue<DownloadItem> downloadQueue = new();
|
||||
private readonly WebClient webClient = new();
|
||||
private DownloadItem currentDownloadItem;
|
||||
|
||||
public DownloadForm() {
|
||||
InitializeComponent();
|
||||
}
|
||||
|
||||
private void DownloadForm_Load(object sender, EventArgs e) {
|
||||
webClient.DownloadProgressChanged += DownloadProgressChanged;
|
||||
webClient.DownloadFileCompleted += DownloadFileCompleted;
|
||||
|
||||
label.Text = "Reading latest version...";
|
||||
|
||||
// Read json from https://uptime.kuma.pet/version
|
||||
var versionJson = new WebClient().DownloadString("https://uptime.kuma.pet/version");
|
||||
var versionObj = JsonConvert.DeserializeObject<Version>(versionJson);
|
||||
|
||||
var nodeVersion = versionObj.nodejs;
|
||||
var uptimeKumaVersion = versionObj.latest;
|
||||
var hasUpdateFile = File.Exists("update");
|
||||
|
||||
if (!Directory.Exists("node")) {
|
||||
downloadQueue.Enqueue(new DownloadItem {
|
||||
URL = $"https://nodejs.org/dist/v{nodeVersion}/node-v{nodeVersion}-win-x64.zip",
|
||||
Filename = "node.zip",
|
||||
TargetFolder = "node"
|
||||
});
|
||||
}
|
||||
|
||||
if (!Directory.Exists("core") || hasUpdateFile) {
|
||||
|
||||
// It is update, rename the core folder to core.old
|
||||
if (Directory.Exists("core")) {
|
||||
// Remove the old core.old folder
|
||||
if (Directory.Exists("core.old")) {
|
||||
Directory.Delete("core.old", true);
|
||||
}
|
||||
|
||||
Directory.Move("core", "core.old");
|
||||
}
|
||||
|
||||
downloadQueue.Enqueue(new DownloadItem {
|
||||
URL = $"https://github.com/louislam/uptime-kuma/archive/refs/tags/{uptimeKumaVersion}.zip",
|
||||
Filename = "core.zip",
|
||||
TargetFolder = "core"
|
||||
});
|
||||
|
||||
File.WriteAllText("version.json", versionJson);
|
||||
|
||||
// Delete the update file
|
||||
if (hasUpdateFile) {
|
||||
File.Delete("update");
|
||||
}
|
||||
}
|
||||
|
||||
DownloadNextFile();
|
||||
}
|
||||
|
||||
void DownloadNextFile() {
|
||||
if (downloadQueue.Count > 0) {
|
||||
var item = downloadQueue.Dequeue();
|
||||
|
||||
currentDownloadItem = item;
|
||||
|
||||
// Download if the zip file is not existing
|
||||
if (!File.Exists(item.Filename)) {
|
||||
label.Text = item.URL;
|
||||
webClient.DownloadFileAsync(new Uri(item.URL), item.Filename);
|
||||
} else {
|
||||
progressBar.Value = 100;
|
||||
label.Text = "Use local " + item.Filename;
|
||||
DownloadFileCompleted(null, null);
|
||||
}
|
||||
} else {
|
||||
npmSetup();
|
||||
}
|
||||
}
|
||||
|
||||
void npmSetup() {
|
||||
labelData.Text = "";
|
||||
|
||||
var npm = "..\\node\\npm.cmd";
|
||||
var cmd = $"{npm} ci --production & {npm} run download-dist & exit";
|
||||
|
||||
var startInfo = new ProcessStartInfo {
|
||||
FileName = "cmd.exe",
|
||||
Arguments = $"/k \"{cmd}\"",
|
||||
RedirectStandardOutput = false,
|
||||
RedirectStandardError = false,
|
||||
RedirectStandardInput = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = false,
|
||||
WorkingDirectory = "core"
|
||||
};
|
||||
|
||||
var process = new Process();
|
||||
process.StartInfo = startInfo;
|
||||
process.EnableRaisingEvents = true;
|
||||
process.Exited += (_, e) => {
|
||||
progressBar.Value = 100;
|
||||
|
||||
if (process.ExitCode == 0) {
|
||||
Task.Delay(2000).ContinueWith(_ => {
|
||||
Application.Restart();
|
||||
});
|
||||
label.Text = "Done";
|
||||
} else {
|
||||
label.Text = "Failed, exit code: " + process.ExitCode;
|
||||
}
|
||||
|
||||
};
|
||||
process.Start();
|
||||
label.Text = "Installing dependencies and download dist files";
|
||||
progressBar.Value = 50;
|
||||
process.WaitForExit();
|
||||
}
|
||||
|
||||
void DownloadProgressChanged(object sender, DownloadProgressChangedEventArgs e) {
|
||||
progressBar.Value = e.ProgressPercentage;
|
||||
var total = e.TotalBytesToReceive / 1024;
|
||||
var current = e.BytesReceived / 1024;
|
||||
|
||||
if (total > 0) {
|
||||
labelData.Text = $"{current}KB/{total}KB";
|
||||
}
|
||||
}
|
||||
|
||||
void DownloadFileCompleted(object sender, AsyncCompletedEventArgs e) {
|
||||
Extract(currentDownloadItem);
|
||||
DownloadNextFile();
|
||||
}
|
||||
|
||||
void Extract(DownloadItem item) {
|
||||
if (Directory.Exists(item.TargetFolder)) {
|
||||
var dir = new DirectoryInfo(item.TargetFolder);
|
||||
dir.Delete(true);
|
||||
}
|
||||
|
||||
if (Directory.Exists("temp")) {
|
||||
var dir = new DirectoryInfo("temp");
|
||||
dir.Delete(true);
|
||||
}
|
||||
|
||||
labelData.Text = $"Extracting {item.Filename}...";
|
||||
|
||||
ZipFile.ExtractToDirectory(item.Filename, "temp");
|
||||
|
||||
string[] dirList;
|
||||
|
||||
// Move to the correct level
|
||||
dirList = Directory.GetDirectories("temp");
|
||||
|
||||
|
||||
|
||||
if (dirList.Length > 0) {
|
||||
var dir = dirList[0];
|
||||
|
||||
// As sometime ExtractToDirectory is still locking the directory, loop until ok
|
||||
while (true) {
|
||||
try {
|
||||
Directory.Move(dir, item.TargetFolder);
|
||||
break;
|
||||
} catch (Exception exception) {
|
||||
Thread.Sleep(1000);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
MessageBox.Show("Unexcepted Error: Cannot move extracted files, folder not found.");
|
||||
}
|
||||
|
||||
labelData.Text = $"Extracted";
|
||||
|
||||
if (Directory.Exists("temp")) {
|
||||
var dir = new DirectoryInfo("temp");
|
||||
dir.Delete(true);
|
||||
}
|
||||
|
||||
File.Delete(item.Filename);
|
||||
}
|
||||
}
|
||||
|
||||
public class DownloadItem {
|
||||
public string URL { get; set; }
|
||||
public string Filename { get; set; }
|
||||
public string TargetFolder { get; set; }
|
||||
}
|
||||
}
|
||||
|
377
extra/exe-builder/DownloadForm.resx
Normal file
377
extra/exe-builder/DownloadForm.resx
Normal file
@@ -0,0 +1,377 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
<!--
|
||||
Microsoft ResX Schema
|
||||
|
||||
Version 2.0
|
||||
|
||||
The primary goals of this format is to allow a simple XML format
|
||||
that is mostly human readable. The generation and parsing of the
|
||||
various data types are done through the TypeConverter classes
|
||||
associated with the data types.
|
||||
|
||||
Example:
|
||||
|
||||
... ado.net/XML headers & schema ...
|
||||
<resheader name="resmimetype">text/microsoft-resx</resheader>
|
||||
<resheader name="version">2.0</resheader>
|
||||
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
|
||||
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
|
||||
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
|
||||
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
|
||||
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
|
||||
<value>[base64 mime encoded serialized .NET Framework object]</value>
|
||||
</data>
|
||||
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
|
||||
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
|
||||
<comment>This is a comment</comment>
|
||||
</data>
|
||||
|
||||
There are any number of "resheader" rows that contain simple
|
||||
name/value pairs.
|
||||
|
||||
Each data row contains a name, and value. The row also contains a
|
||||
type or mimetype. Type corresponds to a .NET class that support
|
||||
text/value conversion through the TypeConverter architecture.
|
||||
Classes that don't support this are serialized and stored with the
|
||||
mimetype set.
|
||||
|
||||
The mimetype is used for serialized objects, and tells the
|
||||
ResXResourceReader how to depersist the object. This is currently not
|
||||
extensible. For a given mimetype the value must be set accordingly:
|
||||
|
||||
Note - application/x-microsoft.net.object.binary.base64 is the format
|
||||
that the ResXResourceWriter will generate, however the reader can
|
||||
read any of the formats listed below.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.binary.base64
|
||||
value : The object must be serialized with
|
||||
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
|
||||
: and then encoded with base64 encoding.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.soap.base64
|
||||
value : The object must be serialized with
|
||||
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
|
||||
: and then encoded with base64 encoding.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.bytearray.base64
|
||||
value : The object must be serialized into a byte array
|
||||
: using a System.ComponentModel.TypeConverter
|
||||
: and then encoded with base64 encoding.
|
||||
-->
|
||||
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
|
||||
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
|
||||
<xsd:element name="root" msdata:IsDataSet="true">
|
||||
<xsd:complexType>
|
||||
<xsd:choice maxOccurs="unbounded">
|
||||
<xsd:element name="metadata">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" use="required" type="xsd:string" />
|
||||
<xsd:attribute name="type" type="xsd:string" />
|
||||
<xsd:attribute name="mimetype" type="xsd:string" />
|
||||
<xsd:attribute ref="xml:space" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="assembly">
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="alias" type="xsd:string" />
|
||||
<xsd:attribute name="name" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="data">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
|
||||
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
|
||||
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
|
||||
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
|
||||
<xsd:attribute ref="xml:space" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="resheader">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" use="required" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:choice>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
<resheader name="resmimetype">
|
||||
<value>text/microsoft-resx</value>
|
||||
</resheader>
|
||||
<resheader name="version">
|
||||
<value>2.0</value>
|
||||
</resheader>
|
||||
<resheader name="reader">
|
||||
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
|
||||
</resheader>
|
||||
<resheader name="writer">
|
||||
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
|
||||
</resheader>
|
||||
<assembly alias="System.Drawing" name="System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
|
||||
<data name="$this.Icon" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
|
||||
<value>
|
||||
AAABAAMAMDAAAAEAIACoJQAANgAAACAgAAABACAAqBAAAN4lAAAQEAAAAQAgAGgEAACGNgAAKAAAADAA
|
||||
AABgAAAAAQAgAAAAAAAAJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAA////BPT09Bfu7u4e8fHxJPPz8yv19fUy9fX1M/Pz8yvx8fEk9vb2HPPz8xXMzMwFAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//
|
||||
/wHv7+8f7u7uPPPz81Tx8fFs8fHxgPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGB8fHxcfHx8V3x8fFI9PT0MOvr6w0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AADy8vIU8fHxS/Dw8Hbx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fFr9PT0R/Dw8CIAAAABAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAA8vLyFPHx8Vnx8fGB8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fFs9fX1Mb+/vwQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAICAgALy8vI88fHxfvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvLy8nby8vI8gICAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAzMzMBfHx8Vrx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8vLyYf///wwAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzMwF8vLyYPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8W/z8/MWAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADv7+9R8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLw8PB26urqDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPLy8ijx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgu7w7Ifj79ud2u7PtNLrw83P677dzeu85c3r
|
||||
u+rM67rwzOu68c7rverQ68Dj0uvD3NbuyM3b7c+64u7apujv5ZPx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxXgAAAAEAAAAAAAAAAAAAAAAAAAAA4+PjCfDw
|
||||
8Hfx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLd7tSmzeu92MbqsvvG6bH/xumy/8fq
|
||||
s//H6rP/yOq0/8jqtf/J6rb/yeq2/8rrt//K67j/y+u4/8vruf/M67r/zOu7/83ru//Q7MDx1u7Kz9/t
|
||||
163s8OuJ8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgu/v7y8AAAAAAAAAAAAA
|
||||
AAAAAAAA7u7uPfHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC5PDdl8jqtuTE6a7/xOmv/8Xp
|
||||
sP/G6bH/xumx/8bpsv/H6rP/x+qz/8jqtP/I6rX/yeq2/8nqtv/K67f/yuu4/8vruP/L67n/zOu6/8zr
|
||||
u//N67v/zey8/87svf/P67742e3Mx+jv5ZLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvDw
|
||||
8HWAgIACAAAAAAAAAACqqqoD8vLyc/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLf7degxOiu+cPo
|
||||
rf/D6a7/xOmu/8Xpr//F6bD/xumx/8bpsf/G6bL/x+qz/8fqs//I6rT/yOq1/8nqtv/J6rb/yuu3/8rr
|
||||
uP/L67j/y+u5/8zruv/M67v/zeu7/83svP/O7L3/zuy9/87svfzc7tK28fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fEkAAAAAAAAAADz8/Mq8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgunv
|
||||
5o3D6a/0wuis/8Lorf/D6K3/xOmu/8Tprv/F6a//xemw/8bpsf/G6bH/xumy/8fqs//H6rP/yOq0/8jq
|
||||
tf/J6rb/yeq2/8rrt//K67j/y+u4/8vruf/M67r/zOu7/83ru//N7Lz/zuy9/87svf/O7L3/3e/TtPHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLy8vJNAAAAAAAAAADy8vJM8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgszqutDB6Kv/weir/8LorP/D6K3/w+it/8Tprv/E6a7/xemv/8XpsP/G6bH/xumx/8bp
|
||||
sv/H6rP/x+qz/8jqtP/I6rX/yeq2/8nqtv/K67f/yuu4/8vruP/L67n/zOu6/8zru//N67v/zey8/87s
|
||||
vf/O7L3/zuy++u3w6Yzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLy8vJ1AAAAAAAAAADx8fFr8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC6O/kjsDoqvzA6Kr/weir/8Loq//C6Kz/w+it/8Porf/E6a7/xOmu/8Xp
|
||||
r//F6bD/xumx/8bpsf/G6bL/x+qz/8fqtP/I6rT/yOq1/8nqtv/J6rb/yuu3/8rruP/L67n/y+u5/8zr
|
||||
uv/M67v/zeu7/83svP/O7L3/zuy9/93u07Xx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC////Bv//
|
||||
/wfx8fGB8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC1ezJsr/nqf/A56n/weiq/8Hoq//C6Kv/wuis/8Po
|
||||
rf/D6K3/xOmu/8Pprv+856T/uOed/7bmmv+05Zf/teWZ/7jnnf+86KP/wOio/8fqs//J6rb/yeq2/8rr
|
||||
t//K67j/y+u5/8vruf/M67r/zOu7/83ru//N7Lz/zuy9/9buyNLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8vLyE/Ly8hPx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCy+q6zr/nqP/A56n/wOep/8Ho
|
||||
qv/B6Kv/wuir/8LorP+u5Y//neF2/5bgav+V4Gr/luBr/5fhbP+Y4W7/meFv/5rhcf+b4nL/nOJ0/53i
|
||||
dv+j5H//reaM/7nnnf/E6q//y+y4/8vruf/L67n/zOu6/8zru//N67v/zey8/9Lsxd/x8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC7+/vIPb29hzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCx+m03L/n
|
||||
qP+/56j/wOep/8Dnqf/B6Kr/weir/7nmn/+R32T/kt9l/5PfZ/+U4Gj/leBq/5bga/+X4W3/mOFu/5nh
|
||||
b/+a4XH/m+Jy/5zidP+d4nX/nuN3/5/jeP+f4nn/weqq/8rruP/L67n/y+u5/8zruv/M67v/zeu7/9Ls
|
||||
w+Lx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8PDwI/Hx8SXx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGCxeix5L/nqP+/56j/v+eo/8Dnqf/A56n/weiq/7Pllv+Q3mP/kd9k/5LfZf+T32f/lOBo/5Xg
|
||||
av+W4Gv/l+Ft/5jhbv+Z4W//muFx/5vicv+c4nT/neJ1/57jd/+f43j/xOmu/8rrt//K67j/y+u5/8vr
|
||||
uf/M67r/zOu7/9Tsxtfx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC9PT0GO/v7yDx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGCx+m037/nqP+/56j/v+eo/7/nqP/A56n/wOip/7TmmP+P3mH/kN5j/5Hf
|
||||
ZP+S32b/k99n/5TgaP+V4Gr/luBr/5fhbf+Y4W7/meFw/5rhcf+b4nL/nOJ0/53idf+h5Hz/yuu2/8nq
|
||||
t//K67f/yuu4/8vruf/L67n/zOu6/9ftysrx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC7e3tDvT0
|
||||
9Bfx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCyOq117/nqP+/56j/v+eo/7/nqP+/56j/wOep/7vn
|
||||
of+O3mD/j95h/5DeY/+R32T/kt9m/5PfZ/+U4Gj/leBq/5bga/+X4W3/mOFu/5nhcP+a4nH/m+Jy/5zi
|
||||
dP+r5Yr/yOq1/8nqtv/J6rf/yuu3/8rruP/L67n/y+u5/9zu1LHx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLz8/OA////A+7u7g/x8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCz+q+xb/nqP+/56j/v+eo/7/n
|
||||
qP+/56j/v+eo/8Dnqf+S4Gb/jt5g/4/eYf+Q3mP/kd9k/5LfZv+T32f/lOBo/5Xgav+W4Gv/l+Ft/5jh
|
||||
bv+Z4XD/muJx/5vic/+4553/yOq0/8jqtf/J6rb/yeq3/8rrt//K67j/y+u5/+bw4Zfx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fFrAAAAAP///wHz8/N88fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC1+zMrr/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+f4Xn/jd5f/47eYP+P3mH/kN5j/5HfZP+S32b/k99n/5Tg
|
||||
af+V4Gr/luBr/5fhbf+Y4W7/meFw/5vic//F6rD/x+q0/8jqtP/I6rX/yeq2/8nqt//K67f/zOu88u/x
|
||||
74Px8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLv7+9QAAAAAAAAAADw8PBm8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC5e7gk7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+u5I//jN1d/43eX/+O3mD/j95h/5De
|
||||
Y/+R32T/kt9m/5PfZ/+U4Gn/leBq/5bga/+X4W3/mOFu/6rliP/G6rL/x+qz/8fqtP/I6rT/yOq1/8nq
|
||||
tv/J6rf/1OzGy/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YL19fUzAAAAAAAAAADy8vJO8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgsPoru2/56j/v+eo/7/nqP+/56j/v+eo/7/nqP++6Kf/j95i/4zd
|
||||
Xf+N3l//jt5g/4/eYv+Q3mP/kd9k/5LfZv+T32f/lOBp/5Xgav+W4Gz/l+Ft/7voov/G6bL/xuqy/8fq
|
||||
s//H6rT/yOq1/8jqtf/J6rb/4e/Zo/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLw8PARAAAAAAAA
|
||||
AADu7u4u8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgszpvMm/56j/v+eo/7/nqP+/56j/v+eo/7/n
|
||||
qP+/56j/q+SL/4vdXP+M3V3/jd5f/47eYP+P3mL/kN9j/5HfZP+S32b/k99n/5Tgaf+V4Gr/qOOH/8Xp
|
||||
sP/G6bH/xumy/8bqsv/H6rP/x+q0/8jqtf/K67jy8PHwhPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8WoAAAAAAAAAAAAAAADo6OgL8fHxgfHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxguDv2J2/56j/v+eo/7/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/v+eo/6Xjgv+L3Vz/jN1d/43eX/+O3mD/j95i/5DfY/+R32T/kt9m/5Pf
|
||||
Z/+k44D/xOmu/8XpsP/F6bD/xumx/8bpsv/G6rL/x+qz/8fqtP/W7cnB8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvPz80AAAAAAAAAAAAAAAAAAAAAA8PDwZ/Hx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLD6K/rv+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+u5I//kt5n/4zdXf+N3l//jt5g/4/e
|
||||
Yv+Q32P/luFs/67kj//D6K3/xOmu/8Tpr//F6bD/xemw/8bpsf/G6bL/xuqy/8fqtP7o7+WR8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz8xYAAAAAAAAAAAAAAAAAAAAA8vLyPPHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLV7ci0v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/wOio/7Xl
|
||||
mv+u5I7/rOSM/67kj/+35pz/wumr/8Lorf/D6K3/w+it/8Tprv/E6a//xemw/8XpsP/G6bH/xumy/9Ds
|
||||
wNPx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8vLyZQAAAAAAAAAAAAAAAAAAAAAAAAAA////DPHx
|
||||
8YDx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGCx+m03L/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/wOep/8Doqv/B6Kr/weir/8LorP/C6K3/w+it/8Porv/E6a7/xOmv/8Xp
|
||||
sP/F6bD/yOq18uvw6Yvx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC7+/vMQAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAPHx8Vzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC6O/ij8LorPG/56j/v+eo/7/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/8Dnqf/A6Kr/weiq/8Hoq//C6Kz/wuit/8Po
|
||||
rf/D6K7/xOmu/8Tpr//F6bH74u/anvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLw8PB6////BQAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAPPz8yrx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxguHu
|
||||
2pnB56v2v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP/A56n/wOiq/8Ho
|
||||
q//B6Kv/wuis/8Lorf/D6K3/w+mu/8Tprv3b7dKq8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fFJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHy8vJf8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLi7tyXwumt8L/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/n
|
||||
qP+/56j/wOep/8Doqv/B6Kv/weir/8LorP/C6K3/xOiv+d7u1aTx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvLy8nb///8KAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADv7+8Q8/Pze/Hx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC6/Dpiszqu82/56j/v+eo/7/nqP+/56j/v+eo/7/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/v+eo/8Dnqf/A6Kr/weir/8Hoq//H6bTj5e7elfHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz8yoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAA9fX1MvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLe7tShx+mz3r/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/v+eo/7/nqP/A56n/xumy5drtz6rv8e+D8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8vLyTgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAPHx8Unx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgubv45DU68e2y+q6z8XoseTD6a7uweir9MPpru7F6bHly+q50tLsxLrl796U8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLy8vJh////AwAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///wHx8fFZ8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8Wzf398IAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8D8/PzVfHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8PDwZujo
|
||||
6AsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAA////AfHx8Ujx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fFa////BQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADz8/Mp8vLydvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8/PzfPHx8TcAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////CvLy8lDz8/N/8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvPz84Hx8fFa8PDwEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AADw8PAR8vLyTvHx8X3x8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fF/8/PzVvT09BgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///wXz8/Mq8/PzU/Hx8XDx8fGB8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLy8vJz8fHxWO/v7y////8IAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8G7e3tHfLy
|
||||
8ifu7u4u8PDwNPT09C/y8vIo7+/vH+Pj4wkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAP///////wAA////////AAD///////8AAP//gAf//wAA//gAAD//AAD/wAAAB/8AAP+A
|
||||
AAAB/wAA/gAAAAB/AAD8AAAAAD8AAPgAAAAAHwAA8AAAAAAPAADwAAAAAAcAAOAAAAAABwAA4AAAAAAD
|
||||
AADAAAAAAAMAAMAAAAAAAwAAwAAAAAABAACAAAAAAAEAAIAAAAAAAQAAgAAAAAABAACAAAAAAAEAAIAA
|
||||
AAAAAQAAgAAAAAABAACAAAAAAAMAAMAAAAAAAwAAwAAAAAADAADAAAAAAAMAAMAAAAAABwAAwAAAAAAH
|
||||
AADgAAAAAAcAAOAAAAAADwAA4AAAAAAPAADwAAAAAB8AAPAAAAAAHwAA+AAAAAA/AAD8AAAAAD8AAPwA
|
||||
AAAAfwAA/gAAAAD/AAD/AAAAAf8AAP+AAAAD/wAA/8AAAAf/AAD/8AAAH/8AAP/8AAA//wAA//8AAf//
|
||||
AAD//+AP//8AAP///////wAA////////AAD///////8AACgAAAAgAAAAQAAAAAEAIAAAAAAAABAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAgICAAu/v7xD09PQX7u7uHvDw8CP29vYb8vLyFOrq6gwAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAICA
|
||||
gALy8vIm7+/vT/Pz82fz8/N98fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvDw8Hrw8PBm7+/vUPT0
|
||||
9C3o6OgLAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOPj
|
||||
4wnz8/NC8vLydPHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YHy8vJj8/PzKoCAgAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AADx8fEl8vLydfHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxcfHx8SUAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAA9PT0LfHx8YDx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8/PzgPLy8j0AAAABAAAAAAAA
|
||||
AAAAAAAAAAAAAO3t7Rzx8fGA8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLr8OmM5O7emeTv
|
||||
3Z7h79mj5fDem+nv45Tu8u6H8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvLy
|
||||
8joAAAAAAAAAAAAAAAD///8E8fHxbvHx8YLx8fGC8fHxgvHx8YLx8fGC7vDshtns0K7N67zayeq288fq
|
||||
s//I6rT/yOq1/8nqtv/K67f/y+u4/8vruf/P7L7w0+zF29vv0Lrn8OKX8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8/PzfvPz8xUAAAAAAAAAAPX19TLx8fGC8fHxgvHx8YLx8fGC8fHxgt3u1KXF6rHzxOmv/8Xp
|
||||
sP/G6bH/xumy/8fqs//I6rT/yOq1/8nqtv/K67f/y+u4/8vruf/M67v/zey8/87svf/S7MPj4u7Zp/Hx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8/PzVQAAAAAAAAAA8fHxavHx8YLx8fGC8fHxgvHx8YLf7defwuis/cPo
|
||||
rf/E6a7/xOmv/8XpsP/G6bH/xumy/8fqs//I6rT/yOq1/8nqtv/K67f/y+u4/8vruv/M67v/zey8/87s
|
||||
vf/N67z/3e7SufHx8YLx8fGC8fHxgvHx8YLz8/N8////Bf///w3x8fGC8fHxgvHx8YLx8fGC8fHxgsXp
|
||||
sOnB6Kv/wuis/8Porf/E6a7/xOmv/8XpsP/G6bH/xumy/8fqs//I6rT/yOq1/8nqtv/K67f/y+u4/8vr
|
||||
uv/M67v/zey8/87svf/O67z96/Hoj/Hx8YLx8fGC8fHxgvHx8YLy8vIm8/PzK/Hx8YLx8fGC8fHxgvHx
|
||||
8YLg79icwOep/8Hoqv/B6Kv/wuis/8Porf/E6a7/wuit/73opP+76KL/u+eh/77opv/D6a3/yeu1/8nq
|
||||
tv/K67f/y+u5/8zruv/M67v/zey8/87svf/d7tSz8fHxgvHx8YLx8fGC8fHxgvHx8Tby8vI68fHxgvHx
|
||||
8YLx8fGC8fHxgtTrxre/56j/wOep/8Hoqv/B6Kv/uOad/53idv+V4Gn/leBq/5fhbP+Y4W//muFx/5vi
|
||||
c/+e4Xb/puWD/7PmlP/D6a3/y+u5/8zruv/M67v/zey8/9rtzsHx8fGC8fHxgvHx8YLx8fGC8/PzQfPz
|
||||
80Lx8fGC8fHxgvHx8YLx8fGC0OvAwr/nqP+/56j/wOep/8Hoqv+o44b/kd9k/5LfZv+U4Gj/leBq/5fh
|
||||
bf+Y4W//muFx/5vic/+d4nX/n+N3/7fnm//K67j/y+u5/8zruv/M67v/2u3QvPHx8YLx8fGC8fHxgvHx
|
||||
8YLy8vI98/PzP/Hx8YLx8fGC8fHxgvHx8YLQ6sK/v+eo/7/nqP+/56j/wOep/6jjhv+P3mL/kd9k/5Lf
|
||||
Zv+U4Gj/leBr/5fhbf+Y4W//muFx/5zic/+d4nX/v+mm/8nqt//K67j/y+u5/8zruv/f79au8fHxgvHx
|
||||
8YLx8fGC8fHxgvX19TLx8fE38fHxgvHx8YLx8fGC8fHxgtTrybO/56j/v+eo/7/nqP+/56j/sOSS/47e
|
||||
YP+P3mL/kd9k/5LfZv+U4Gj/leBr/5fhbf+Z4W//muJx/5/jd//H6bP/yeq2/8nqt//K67j/y+u5/+nv
|
||||
45Tx8fGC8fHxgvHx8YLx8fGC7+/vIPHx8SXx8fGC8fHxgvHx8YLx8fGC4e/Zm7/nqP+/56j/v+eo/7/n
|
||||
qP+956X/jt5h/47eYP+P3mL/kd9k/5LfZv+U4Gn/luBr/5fhbf+Z4W//q+aK/8fqs//I6rT/yeq2/8nq
|
||||
t//N7Lvw8fHxgvHx8YLx8fGC8fHxgvPz84D///8G6+vrDfHx8YLx8fGC8fHxgvHx8YLv8e+Dweis87/n
|
||||
qP+/56j/v+eo/7/nqP+d4XX/jN1e/47eYP+P3mL/kd9k/5PfZ/+U4Gn/luBr/5fhbf+86KP/xuqy/8fq
|
||||
s//I6rX/yeq2/9Tsx8nx8fGC8fHxgvHx8YLx8fGC8PDwaAAAAAAAAAAA8fHxbPHx8YLx8fGC8fHxgvHx
|
||||
8YLM6rrMv+eo/7/nqP+/56j/v+eo/7blmv+N3V//jN1e/47eYP+Q3mL/kd9k/5PfZ/+U4Gn/qeSH/8Xp
|
||||
sP/G6bH/xuqy/8fqs//I6rX/5fDem/Hx8YLx8fGC8fHxgvHx8YLz8/M/AAAAAAAAAADz8/NB8fHxgvHx
|
||||
8YLx8fGC8fHxgt3s06O/56j/v+eo/7/nqP+/56j/v+eo/7Xmmf+U32n/jN1e/47eYP+Q3mL/k99o/6zk
|
||||
i//D6a7/xemv/8XpsP/G6bH/xuqy/8vqu+jx8fGC8fHxgvHx8YLx8fGC8fHxgvPz8xUAAAAAAAAAAPT0
|
||||
9Bfx8fGC8fHxgvHx8YLx8fGC8fHvg8Tpsee/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+35pz/suWV/7Xm
|
||||
mf/A6Kj/wuit/8Porf/E6a7/xemv/8XpsP/G6bH/3e3UqvHx8YLx8fGC8fHxgvHx8YLw8PBmAAAAAAAA
|
||||
AAAAAAAAAAAAAPHx8W7x8fGC8fHxgvHx8YLx8fGC4u7cmMHnqvm/56j/v+eo/7/nqP+/56j/v+eo/7/n
|
||||
qP+/56j/wOep/8Hoqv/C6Kz/wuit/8Porf/E6a7/xemv/9Hrwszx8fGC8fHxgvHx8YLx8fGC8fHxgvX1
|
||||
9TEAAAAAAAAAAAAAAAAAAAAA7u7uO/Hx8YLx8fGC8fHxgvHx8YLx8fGC3e7SpMHoqfq/56j/v+eo/7/n
|
||||
qP+/56j/v+eo/7/nqP+/56j/wOip/8Hoq//C6Kz/wuit/8Porf/O67zV8PHwhPHx8YLx8fGC8fHxgvHx
|
||||
8YLy8vJ2////BQAAAAAAAAAAAAAAAAAAAACqqqoD8PDwafHx8YLx8fGC8fHxgvHx8YLx8fGC4O/YnMTo
|
||||
ruy/56j/v+eo/7/nqP+/56j/v+eo/7/nqP+/56j/wOip/8Hoq//C6Kz90uvEwe/x74Px8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvPz8ykAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADz8/MW8fHxfPHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8PLuhdXtyLXF6bHlv+eo/7/nqP+/56j/v+eo/7/nqP/B6Kv0zeq8zOXv4JTx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLy8vJNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADy8vIm8fHxgPHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLs8OmJ4e/Zm93u06Pf7def5+/hkvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxXf///wIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AADy8vIo8/PzffHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8VnMzMwFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAD29vYb8fHxbvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz83/v7+9BgICAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMzMwF8/PzQPLy8nnx8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgvPz84Hx8fFc9PT0GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////B/X19TLx8fFc8PDwevHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8fHxgPHx8Wv09PRE9PT0FwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAA7+/vEPb29hvw8PAj7+/vH/T09Be/v78EAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8B///wAA//wAAD/wAAAP4AAAB+AA
|
||||
AAfAAAADwAAAA4AAAAGAAAABgAAAAYAAAAGAAAABgAAAAYAAAAGAAAADwAAAA8AAAAPAAAAH4AAAB+AA
|
||||
AA/wAAAP+AAAH/gAAD/+AAB//wAB///AA///+B////////////8oAAAAEAAAACAAAAABACAAAAAAAAAE
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////CfDw8BH///8GAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICAAu7u7i7x8fFe8PDwevHx8YLx8fGC8fHxgvDw
|
||||
8Hvx8fFs7+/vT/Dw8CMAAAABAAAAAAAAAAAAAAAA5ubmCvLy8l/x8fGC8fHxgvHx8YLx8fGC8fHxgvHx
|
||||
8YLx8fGC8fHxgvHx8YLx8fGC8/PzZu7u7g8AAAAAAAAAAPHx8V3x8fGC8fHxgunv5o7Z7c200+vFytTs
|
||||
xc7W7cnH2+7QueLu2qbu8OyH8fHxgvHx8YLx8fFu////BfHx8STx8fGC8fHxgtrtzq3D6a/8xemw/8bp
|
||||
sv/I6rT/yeq2/8vruP/M67v/z+u++Nzu0bjx8fGC8fHxgu/v7zDx8fFI8fHxguzw6ojC56z3wuis/8Tp
|
||||
rv/E6q3/weiq/8fqsv/J6rb/y+u5/8zru//N67z/6/HpjfHx8YLy8vJN8fHxXPHx8YLg79icv+eo/8Ho
|
||||
qv+k4n//lOBo/5fhbf+a4XH/n+J5/7Pmlv/L67n/zOu7/+Xw353x8fGC8fHxXvHx8Vrx8fGC4O3Zm7/n
|
||||
qP+/56j/nuF3/5HfZP+U4Gj/l+Ft/5ricf+x5pL/yeq3/8vruf/r8emN8fHxgu/v70/x8fFK8fHxguzw
|
||||
6ojA6Kn8v+eo/6njiP+O3mD/kd9k/5Tgaf+X4W3/vuim/8jqtP/N67zr8fHxgvHx8YLy8vI68/PzK/Hx
|
||||
8YLx8fGCx+m03L/nqP++6Kb/meBw/47eYP+S32X/q+SL/8XpsP/G6rL/1+zLvvHx8YLz8/OB8PDwEdXV
|
||||
1Qbx8fF98fHxgt/t1Z/A56j9v+eo/7/nqP+656H/vuim/8Lorf/E6a7/yOq18Ovw6Yvx8fGC8vLyYwAA
|
||||
AAAAAAAA8fHxR/Hx8YLx8fGC2O3NrMDnqfq/56j/v+eo/7/nqP/B6Kv/xumy7OTu3Zfx8fGC8/PzgfLy
|
||||
8icAAAAAAAAAAP///wPz8/Nm8fHxgvHx8YLo7+SO0+zFuczquszM6bzJ1+zMru7w7Ibx8fGC8fHxgvHx
|
||||
8UcAAAAAAAAAAAAAAAAAAAAA4+PjCfHx8Vzx8fGC8fHxgvHx8YLx8fGC8fHxgvHx8YLx8fGC8fHxgfPz
|
||||
80D///8BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB8/PzK/Ly8mDz8/N+8fHxgvHx8YLy8vJ68vLyUezs
|
||||
7BsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAevr6w3j4+MJAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//AAD8fwAA4AcAAMADAACAAQAAgAEAAIABAACAAQAAgAEAAIAB
|
||||
AADAAwAAwAMAAOAHAADwDwAA/n8AAP//AAA=
|
||||
</value>
|
||||
</data>
|
||||
</root>
|
3
extra/exe-builder/FodyWeavers.xml
Normal file
3
extra/exe-builder/FodyWeavers.xml
Normal file
@@ -0,0 +1,3 @@
|
||||
<Weavers xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="FodyWeavers.xsd">
|
||||
<Costura DisableCompression='true' IncludeDebugSymbols='false' />
|
||||
</Weavers>
|
141
extra/exe-builder/FodyWeavers.xsd
Normal file
141
extra/exe-builder/FodyWeavers.xsd
Normal file
@@ -0,0 +1,141 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
<!-- This file was generated by Fody. Manual changes to this file will be lost when your project is rebuilt. -->
|
||||
<xs:element name="Weavers">
|
||||
<xs:complexType>
|
||||
<xs:all>
|
||||
<xs:element name="Costura" minOccurs="0" maxOccurs="1">
|
||||
<xs:complexType>
|
||||
<xs:all>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="ExcludeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of assembly names to exclude from the default action of "embed all Copy Local references", delimited with line breaks</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="IncludeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of assembly names to include from the default action of "embed all Copy Local references", delimited with line breaks.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="ExcludeRuntimeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of runtime assembly names to exclude from the default action of "embed all Copy Local references", delimited with line breaks</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="IncludeRuntimeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of runtime assembly names to include from the default action of "embed all Copy Local references", delimited with line breaks.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="Unmanaged32Assemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of unmanaged 32 bit assembly names to include, delimited with line breaks.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="Unmanaged64Assemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of unmanaged 64 bit assembly names to include, delimited with line breaks.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element minOccurs="0" maxOccurs="1" name="PreloadOrder" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>The order of preloaded assemblies, delimited with line breaks.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
</xs:all>
|
||||
<xs:attribute name="CreateTemporaryAssemblies" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>This will copy embedded files to disk before loading them into memory. This is helpful for some scenarios that expected an assembly to be loaded from a physical file.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="IncludeDebugSymbols" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>Controls if .pdbs for reference assemblies are also embedded.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="IncludeRuntimeReferences" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>Controls if runtime assemblies are also embedded.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="UseRuntimeReferencePaths" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>Controls whether the runtime assemblies are embedded with their full path or only with their assembly name.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="DisableCompression" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>Embedded assemblies are compressed by default, and uncompressed when they are loaded. You can turn compression off with this option.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="DisableCleanup" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>As part of Costura, embedded assemblies are no longer included as part of the build. This cleanup can be turned off.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="LoadAtModuleInit" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>Costura by default will load as part of the module initialization. This flag disables that behavior. Make sure you call CosturaUtility.Initialize() somewhere in your code.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="IgnoreSatelliteAssemblies" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>Costura will by default use assemblies with a name like 'resources.dll' as a satellite resource and prepend the output path. This flag disables that behavior.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="ExcludeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of assembly names to exclude from the default action of "embed all Copy Local references", delimited with |</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="IncludeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of assembly names to include from the default action of "embed all Copy Local references", delimited with |.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="ExcludeRuntimeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of runtime assembly names to exclude from the default action of "embed all Copy Local references", delimited with |</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="IncludeRuntimeAssemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of runtime assembly names to include from the default action of "embed all Copy Local references", delimited with |.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="Unmanaged32Assemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of unmanaged 32 bit assembly names to include, delimited with |.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="Unmanaged64Assemblies" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A list of unmanaged 64 bit assembly names to include, delimited with |.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="PreloadOrder" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>The order of preloaded assemblies, delimited with |.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:all>
|
||||
<xs:attribute name="VerifyAssembly" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>'true' to run assembly verification (PEVerify) on the target assembly after all weavers have been executed.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="VerifyIgnoreCodes" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>A comma-separated list of error codes that can be safely ignored in assembly verification.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="GenerateXsd" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>'false' to turn off automatic generation of the XML Schema file.</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>
|
262
extra/exe-builder/Program.cs
Normal file
262
extra/exe-builder/Program.cs
Normal file
@@ -0,0 +1,262 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Drawing;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Sockets;
|
||||
using System.Reflection;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Windows.Forms;
|
||||
using Microsoft.Win32;
|
||||
using Newtonsoft.Json;
|
||||
using UptimeKuma.Properties;
|
||||
|
||||
namespace UptimeKuma {
|
||||
static class Program {
|
||||
/// <summary>
|
||||
/// The main entry point for the application.
|
||||
/// </summary>
|
||||
[STAThread]
|
||||
static void Main(string[] args) {
|
||||
var cwd = Path.GetDirectoryName(Application.ExecutablePath);
|
||||
|
||||
if (cwd != null) {
|
||||
Environment.CurrentDirectory = cwd;
|
||||
}
|
||||
|
||||
bool isIntranet = args.Contains("--intranet");
|
||||
|
||||
if (isIntranet) {
|
||||
Console.WriteLine("The --intranet argument was provided, so we will not try to access the internet. The first time this application runs you'll need to run it without the --intranet param or copy the result from another machine to the intranet server.");
|
||||
}
|
||||
|
||||
Application.EnableVisualStyles();
|
||||
Application.SetCompatibleTextRenderingDefault(false);
|
||||
Application.Run(new UptimeKumaApplicationContext(isIntranet));
|
||||
}
|
||||
}
|
||||
|
||||
public class UptimeKumaApplicationContext : ApplicationContext
|
||||
{
|
||||
private static Mutex mutex = null;
|
||||
|
||||
const string appName = "Uptime Kuma";
|
||||
|
||||
private NotifyIcon trayIcon;
|
||||
private Process process;
|
||||
|
||||
private MenuItem statusMenuItem;
|
||||
private MenuItem runWhenStarts;
|
||||
private MenuItem openMenuItem;
|
||||
|
||||
private RegistryKey registryKey = Registry.CurrentUser.OpenSubKey("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run", true);
|
||||
|
||||
private readonly bool intranetOnly;
|
||||
|
||||
public UptimeKumaApplicationContext(bool intranetOnly) {
|
||||
|
||||
// Single instance only
|
||||
bool createdNew;
|
||||
mutex = new Mutex(true, appName, out createdNew);
|
||||
if (!createdNew) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.intranetOnly = intranetOnly;
|
||||
|
||||
var startingText = "Starting server...";
|
||||
trayIcon = new NotifyIcon();
|
||||
trayIcon.Text = startingText;
|
||||
|
||||
runWhenStarts = new MenuItem("Run when system starts", RunWhenStarts);
|
||||
runWhenStarts.Checked = registryKey.GetValue(appName) != null;
|
||||
|
||||
statusMenuItem = new MenuItem(startingText);
|
||||
statusMenuItem.Enabled = false;
|
||||
|
||||
openMenuItem = new MenuItem("Open", Open);
|
||||
openMenuItem.Enabled = false;
|
||||
|
||||
trayIcon.Icon = Icon.ExtractAssociatedIcon(Assembly.GetExecutingAssembly().Location);
|
||||
trayIcon.ContextMenu = new ContextMenu(new MenuItem[] {
|
||||
statusMenuItem,
|
||||
openMenuItem,
|
||||
//new("Debug Console", DebugConsole),
|
||||
runWhenStarts,
|
||||
new("Check for Update...", CheckForUpdate),
|
||||
new("Visit GitHub...", VisitGitHub),
|
||||
new("About", About),
|
||||
new("Exit", Exit),
|
||||
});
|
||||
|
||||
trayIcon.MouseDoubleClick += new MouseEventHandler(Open);
|
||||
trayIcon.Visible = true;
|
||||
|
||||
var hasUpdateFile = File.Exists("update");
|
||||
|
||||
if (!hasUpdateFile && Directory.Exists("core") && Directory.Exists("node") && Directory.Exists("core/node_modules") && Directory.Exists("core/dist")) {
|
||||
// Go go go
|
||||
StartProcess();
|
||||
} else {
|
||||
DownloadFiles();
|
||||
}
|
||||
}
|
||||
|
||||
void DownloadFiles() {
|
||||
if (intranetOnly) {
|
||||
return;
|
||||
}
|
||||
|
||||
var form = new DownloadForm();
|
||||
form.Closed += Exit;
|
||||
form.Show();
|
||||
}
|
||||
|
||||
private void RunWhenStarts(object sender, EventArgs e) {
|
||||
if (registryKey == null) {
|
||||
MessageBox.Show("Error: Unable to set startup registry key.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (runWhenStarts.Checked) {
|
||||
registryKey.DeleteValue(appName, false);
|
||||
runWhenStarts.Checked = false;
|
||||
} else {
|
||||
registryKey.SetValue(appName, Application.ExecutablePath);
|
||||
runWhenStarts.Checked = true;
|
||||
}
|
||||
}
|
||||
|
||||
void StartProcess() {
|
||||
var startInfo = new ProcessStartInfo {
|
||||
FileName = "node/node.exe",
|
||||
Arguments = "server/server.js --data-dir=\"../data/\"",
|
||||
RedirectStandardOutput = false,
|
||||
RedirectStandardError = false,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true,
|
||||
WorkingDirectory = "core"
|
||||
};
|
||||
|
||||
process = new Process();
|
||||
process.StartInfo = startInfo;
|
||||
process.EnableRaisingEvents = true;
|
||||
process.Exited += ProcessExited;
|
||||
|
||||
try {
|
||||
process.Start();
|
||||
//Open(null, null);
|
||||
|
||||
// Async task to check if the server is ready
|
||||
Task.Run(() => {
|
||||
var runningText = "Server is running";
|
||||
using TcpClient tcpClient = new TcpClient();
|
||||
while (true) {
|
||||
try {
|
||||
tcpClient.Connect("127.0.0.1", 3001);
|
||||
statusMenuItem.Text = runningText;
|
||||
openMenuItem.Enabled = true;
|
||||
trayIcon.Text = runningText;
|
||||
break;
|
||||
} catch (Exception) {
|
||||
System.Threading.Thread.Sleep(2000);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
} catch (Exception e) {
|
||||
MessageBox.Show("Startup failed: " + e.Message, "Uptime Kuma Error");
|
||||
}
|
||||
}
|
||||
|
||||
void StopProcess() {
|
||||
process?.Kill();
|
||||
}
|
||||
|
||||
void Open(object sender, EventArgs e) {
|
||||
Process.Start("http://localhost:3001");
|
||||
}
|
||||
|
||||
void DebugConsole(object sender, EventArgs e) {
|
||||
|
||||
}
|
||||
|
||||
void CheckForUpdate(object sender, EventArgs e) {
|
||||
if (intranetOnly) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check version.json exists
|
||||
if (File.Exists("version.json")) {
|
||||
// Load version.json and compare with the latest version from GitHub
|
||||
var currentVersionObj = JsonConvert.DeserializeObject<Version>(File.ReadAllText("version.json"));
|
||||
|
||||
var versionJson = new WebClient().DownloadString("https://uptime.kuma.pet/version");
|
||||
var latestVersionObj = JsonConvert.DeserializeObject<Version>(versionJson);
|
||||
|
||||
// Compare version, if the latest version is newer, then update
|
||||
if (new System.Version(latestVersionObj.latest).CompareTo(new System.Version(currentVersionObj.latest)) > 0) {
|
||||
var result = MessageBox.Show("A new version is available. Do you want to update?", "Update", MessageBoxButtons.YesNo);
|
||||
if (result == DialogResult.Yes) {
|
||||
// Create a empty file `update`, so the app will download the core files again at startup
|
||||
File.Create("update").Close();
|
||||
|
||||
trayIcon.Visible = false;
|
||||
process?.Kill();
|
||||
|
||||
// Restart the app, it will download the core files again at startup
|
||||
Application.Restart();
|
||||
}
|
||||
} else {
|
||||
MessageBox.Show("You are using the latest version.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
void VisitGitHub(object sender, EventArgs e) {
|
||||
if (intranetOnly) {
|
||||
MessageBox.Show("You have parsed in --intranet so we will not try to access the internet or visit github.com, please go to https://github.com/louislam/uptime-kuma if you want to visit github.");
|
||||
return;
|
||||
}
|
||||
|
||||
Process.Start("https://github.com/louislam/uptime-kuma");
|
||||
}
|
||||
|
||||
void About(object sender, EventArgs e)
|
||||
{
|
||||
MessageBox.Show("Uptime Kuma Windows Runtime v1.0.0" + Environment.NewLine + "© 2023 Louis Lam", "Info");
|
||||
}
|
||||
|
||||
void Exit(object sender, EventArgs e)
|
||||
{
|
||||
// Hide tray icon, otherwise it will remain shown until user mouses over it
|
||||
trayIcon.Visible = false;
|
||||
process?.Kill();
|
||||
Application.Exit();
|
||||
}
|
||||
|
||||
void ProcessExited(object sender, EventArgs e) {
|
||||
|
||||
if (process.ExitCode != 0) {
|
||||
var line = "";
|
||||
while (!process.StandardOutput.EndOfStream)
|
||||
{
|
||||
line += process.StandardOutput.ReadLine();
|
||||
}
|
||||
|
||||
MessageBox.Show("Uptime Kuma exited unexpectedly. Exit code: " + process.ExitCode + " " + line);
|
||||
}
|
||||
|
||||
trayIcon.Visible = false;
|
||||
Application.Exit();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
36
extra/exe-builder/Properties/AssemblyInfo.cs
Normal file
36
extra/exe-builder/Properties/AssemblyInfo.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
// General Information about an assembly is controlled through the following
|
||||
// set of attributes. Change these attribute values to modify the information
|
||||
// associated with an assembly.
|
||||
[assembly: AssemblyTitle("Uptime Kuma")]
|
||||
[assembly: AssemblyDescription("A portable executable for running Uptime Kuma")]
|
||||
[assembly: AssemblyConfiguration("")]
|
||||
[assembly: AssemblyCompany("Uptime Kuma")]
|
||||
[assembly: AssemblyProduct("Uptime Kuma")]
|
||||
[assembly: AssemblyCopyright("Copyright © 2023 Louis Lam")]
|
||||
[assembly: AssemblyTrademark("")]
|
||||
[assembly: AssemblyCulture("")]
|
||||
|
||||
// Setting ComVisible to false makes the types in this assembly not visible
|
||||
// to COM components. If you need to access a type in this assembly from
|
||||
// COM, set the ComVisible attribute to true on that type.
|
||||
[assembly: ComVisible(false)]
|
||||
|
||||
// The following GUID is for the ID of the typelib if this project is exposed to COM
|
||||
[assembly: Guid("86B40AFB-61FC-433D-8C31-650B0F32EA8F")]
|
||||
|
||||
// Version information for an assembly consists of the following four values:
|
||||
//
|
||||
// Major Version
|
||||
// Minor Version
|
||||
// Build Number
|
||||
// Revision
|
||||
//
|
||||
// You can specify all the values or you can default the Build and Revision Numbers
|
||||
// by using the '*' as shown below:
|
||||
// [assembly: AssemblyVersion("1.0.*")]
|
||||
[assembly: AssemblyVersion("1.0.2.0")]
|
||||
[assembly: AssemblyFileVersion("1.0.2.0")]
|
62
extra/exe-builder/Properties/Resources.Designer.cs
generated
Normal file
62
extra/exe-builder/Properties/Resources.Designer.cs
generated
Normal file
@@ -0,0 +1,62 @@
|
||||
//------------------------------------------------------------------------------
|
||||
// <auto-generated>
|
||||
// This code was generated by a tool.
|
||||
// Runtime Version:4.0.30319.42000
|
||||
//
|
||||
// Changes to this file may cause incorrect behavior and will be lost if
|
||||
// the code is regenerated.
|
||||
// </auto-generated>
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
namespace UptimeKuma.Properties {
|
||||
/// <summary>
|
||||
/// A strongly-typed resource class, for looking up localized strings, etc.
|
||||
/// </summary>
|
||||
// This class was auto-generated by the StronglyTypedResourceBuilder
|
||||
// class via a tool like ResGen or Visual Studio.
|
||||
// To add or remove a member, edit your .ResX file then rerun ResGen
|
||||
// with the /str option, or rebuild your VS project.
|
||||
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder",
|
||||
"4.0.0.0")]
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
|
||||
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
|
||||
internal class Resources {
|
||||
private static global::System.Resources.ResourceManager resourceMan;
|
||||
|
||||
private static global::System.Globalization.CultureInfo resourceCulture;
|
||||
|
||||
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance",
|
||||
"CA1811:AvoidUncalledPrivateCode")]
|
||||
internal Resources() {
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the cached ResourceManager instance used by this class.
|
||||
/// </summary>
|
||||
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState
|
||||
.Advanced)]
|
||||
internal static global::System.Resources.ResourceManager ResourceManager {
|
||||
get {
|
||||
if ((resourceMan == null)) {
|
||||
global::System.Resources.ResourceManager temp =
|
||||
new global::System.Resources.ResourceManager("UptimeKuma.Properties.Resources",
|
||||
typeof(Resources).Assembly);
|
||||
resourceMan = temp;
|
||||
}
|
||||
|
||||
return resourceMan;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Overrides the current thread's CurrentUICulture property for all
|
||||
/// resource lookups using this strongly typed resource class.
|
||||
/// </summary>
|
||||
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState
|
||||
.Advanced)]
|
||||
internal static global::System.Globalization.CultureInfo Culture {
|
||||
get { return resourceCulture; }
|
||||
set { resourceCulture = value; }
|
||||
}
|
||||
}
|
||||
}
|
117
extra/exe-builder/Properties/Resources.resx
Normal file
117
extra/exe-builder/Properties/Resources.resx
Normal file
@@ -0,0 +1,117 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
<!--
|
||||
Microsoft ResX Schema
|
||||
|
||||
Version 2.0
|
||||
|
||||
The primary goals of this format is to allow a simple XML format
|
||||
that is mostly human readable. The generation and parsing of the
|
||||
various data types are done through the TypeConverter classes
|
||||
associated with the data types.
|
||||
|
||||
Example:
|
||||
|
||||
... ado.net/XML headers & schema ...
|
||||
<resheader name="resmimetype">text/microsoft-resx</resheader>
|
||||
<resheader name="version">2.0</resheader>
|
||||
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
|
||||
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
|
||||
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
|
||||
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
|
||||
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
|
||||
<value>[base64 mime encoded serialized .NET Framework object]</value>
|
||||
</data>
|
||||
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
|
||||
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
|
||||
<comment>This is a comment</comment>
|
||||
</data>
|
||||
|
||||
There are any number of "resheader" rows that contain simple
|
||||
name/value pairs.
|
||||
|
||||
Each data row contains a name, and value. The row also contains a
|
||||
type or mimetype. Type corresponds to a .NET class that support
|
||||
text/value conversion through the TypeConverter architecture.
|
||||
Classes that don't support this are serialized and stored with the
|
||||
mimetype set.
|
||||
|
||||
The mimetype is used for serialized objects, and tells the
|
||||
ResXResourceReader how to depersist the object. This is currently not
|
||||
extensible. For a given mimetype the value must be set accordingly:
|
||||
|
||||
Note - application/x-microsoft.net.object.binary.base64 is the format
|
||||
that the ResXResourceWriter will generate, however the reader can
|
||||
read any of the formats listed below.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.binary.base64
|
||||
value : The object must be serialized with
|
||||
: System.Serialization.Formatters.Binary.BinaryFormatter
|
||||
: and then encoded with base64 encoding.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.soap.base64
|
||||
value : The object must be serialized with
|
||||
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
|
||||
: and then encoded with base64 encoding.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.bytearray.base64
|
||||
value : The object must be serialized into a byte array
|
||||
: using a System.ComponentModel.TypeConverter
|
||||
: and then encoded with base64 encoding.
|
||||
-->
|
||||
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
|
||||
<xsd:element name="root" msdata:IsDataSet="true">
|
||||
<xsd:complexType>
|
||||
<xsd:choice maxOccurs="unbounded">
|
||||
<xsd:element name="metadata">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" />
|
||||
<xsd:attribute name="type" type="xsd:string" />
|
||||
<xsd:attribute name="mimetype" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="assembly">
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="alias" type="xsd:string" />
|
||||
<xsd:attribute name="name" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="data">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
|
||||
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" />
|
||||
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
|
||||
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="resheader">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" use="required" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:choice>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
<resheader name="resmimetype">
|
||||
<value>text/microsoft-resx</value>
|
||||
</resheader>
|
||||
<resheader name="version">
|
||||
<value>2.0</value>
|
||||
</resheader>
|
||||
<resheader name="reader">
|
||||
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
|
||||
</resheader>
|
||||
<resheader name="writer">
|
||||
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
|
||||
</resheader>
|
||||
</root>
|
23
extra/exe-builder/Properties/Settings.Designer.cs
generated
Normal file
23
extra/exe-builder/Properties/Settings.Designer.cs
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
//------------------------------------------------------------------------------
|
||||
// <auto-generated>
|
||||
// This code was generated by a tool.
|
||||
// Runtime Version:4.0.30319.42000
|
||||
//
|
||||
// Changes to this file may cause incorrect behavior and will be lost if
|
||||
// the code is regenerated.
|
||||
// </auto-generated>
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
namespace UptimeKuma.Properties {
|
||||
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
|
||||
[global::System.CodeDom.Compiler.GeneratedCodeAttribute(
|
||||
"Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
|
||||
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
|
||||
private static Settings defaultInstance =
|
||||
((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
|
||||
|
||||
public static Settings Default {
|
||||
get { return defaultInstance; }
|
||||
}
|
||||
}
|
||||
}
|
7
extra/exe-builder/Properties/Settings.settings
Normal file
7
extra/exe-builder/Properties/Settings.settings
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<SettingsFile xmlns="http://schemas.microsoft.com/VisualStudio/2004/01/settings" CurrentProfile="(Default)">
|
||||
<Profiles>
|
||||
<Profile Name="(Default)" />
|
||||
</Profiles>
|
||||
<Settings />
|
||||
</SettingsFile>
|
203
extra/exe-builder/UptimeKuma.csproj
Normal file
203
extra/exe-builder/UptimeKuma.csproj
Normal file
@@ -0,0 +1,203 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
<ProjectGuid>{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}</ProjectGuid>
|
||||
<OutputType>WinExe</OutputType>
|
||||
<RootNamespace>UptimeKuma</RootNamespace>
|
||||
<AssemblyName>uptime-kuma</AssemblyName>
|
||||
<TargetFrameworkVersion>v4.7.2</TargetFrameworkVersion>
|
||||
<FileAlignment>512</FileAlignment>
|
||||
<AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects>
|
||||
<Deterministic>true</Deterministic>
|
||||
<ApplicationIcon>..\..\public\favicon.ico</ApplicationIcon>
|
||||
<LangVersion>9</LangVersion>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<PlatformTarget>AnyCPU</PlatformTarget>
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<DebugType>full</DebugType>
|
||||
<Optimize>false</Optimize>
|
||||
<OutputPath>bin\Debug\</OutputPath>
|
||||
<DefineConstants>DEBUG;TRACE</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
|
||||
<PlatformTarget>AnyCPU</PlatformTarget>
|
||||
<DebugType>pdbonly</DebugType>
|
||||
<Optimize>true</Optimize>
|
||||
<OutputPath>bin\Release\</OutputPath>
|
||||
<DefineConstants>TRACE</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<ApplicationManifest>app.manifest</ApplicationManifest>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<PostBuildEvent>COPY "$(SolutionDir)bin\Debug\uptime-kuma.exe" "%UserProfile%\Desktop\uptime-kuma-win64\"</PostBuildEvent>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Reference Include="Microsoft.Win32.Primitives, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\Microsoft.Win32.Primitives.4.3.0\lib\net46\Microsoft.Win32.Primitives.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="mscorlib" />
|
||||
<Reference Include="Newtonsoft.Json, Version=13.0.0.0, Culture=neutral, PublicKeyToken=30ad4fe6b2a6aeed, processorArchitecture=MSIL">
|
||||
<HintPath>packages\Newtonsoft.Json.13.0.2\lib\net45\Newtonsoft.Json.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System" />
|
||||
<Reference Include="System.AppContext, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.AppContext.4.3.0\lib\net463\System.AppContext.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Buffers, Version=4.0.3.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Buffers.4.5.1\lib\net461\System.Buffers.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.ComponentModel.Composition" />
|
||||
<Reference Include="System.Console, Version=4.0.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Console.4.3.1\lib\net46\System.Console.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Core" />
|
||||
<Reference Include="System.Diagnostics.DiagnosticSource, Version=7.0.0.1, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Diagnostics.DiagnosticSource.7.0.1\lib\net462\System.Diagnostics.DiagnosticSource.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Diagnostics.Tracing, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Diagnostics.Tracing.4.3.0\lib\net462\System.Diagnostics.Tracing.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Globalization.Calendars, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Globalization.Calendars.4.3.0\lib\net46\System.Globalization.Calendars.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.IO, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.IO.4.3.0\lib\net462\System.IO.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.IO.Compression, Version=4.1.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.IO.Compression.4.3.0\lib\net46\System.IO.Compression.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.IO.Compression.FileSystem" />
|
||||
<Reference Include="System.IO.Compression.ZipFile, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.IO.Compression.ZipFile.4.3.0\lib\net46\System.IO.Compression.ZipFile.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.IO.FileSystem, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.IO.FileSystem.4.3.0\lib\net46\System.IO.FileSystem.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.IO.FileSystem.Primitives, Version=4.0.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.IO.FileSystem.Primitives.4.3.0\lib\net46\System.IO.FileSystem.Primitives.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Linq, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Linq.4.3.0\lib\net463\System.Linq.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Linq.Expressions, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Linq.Expressions.4.3.0\lib\net463\System.Linq.Expressions.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Memory, Version=4.0.1.2, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Memory.4.5.5\lib\net461\System.Memory.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Net.Http, Version=4.1.1.3, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Net.Http.4.3.4\lib\net46\System.Net.Http.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Net.Sockets, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Net.Sockets.4.3.0\lib\net46\System.Net.Sockets.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Numerics" />
|
||||
<Reference Include="System.Numerics.Vectors, Version=4.1.4.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Numerics.Vectors.4.5.0\lib\net46\System.Numerics.Vectors.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Reflection, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Reflection.4.3.0\lib\net462\System.Reflection.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Runtime, Version=4.1.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Runtime.4.3.1\lib\net462\System.Runtime.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Runtime.CompilerServices.Unsafe, Version=6.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Runtime.CompilerServices.Unsafe.6.0.0\lib\net461\System.Runtime.CompilerServices.Unsafe.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Runtime.Extensions, Version=4.1.1.1, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Runtime.Extensions.4.3.1\lib\net462\System.Runtime.Extensions.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Runtime.InteropServices, Version=4.1.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Runtime.InteropServices.4.3.0\lib\net463\System.Runtime.InteropServices.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Runtime.InteropServices.RuntimeInformation, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Runtime.InteropServices.RuntimeInformation.4.3.0\lib\net45\System.Runtime.InteropServices.RuntimeInformation.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Security.Cryptography.Algorithms, Version=4.2.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Security.Cryptography.Algorithms.4.3.1\lib\net463\System.Security.Cryptography.Algorithms.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Security.Cryptography.Encoding, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Security.Cryptography.Encoding.4.3.0\lib\net46\System.Security.Cryptography.Encoding.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Security.Cryptography.Primitives, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Security.Cryptography.Primitives.4.3.0\lib\net46\System.Security.Cryptography.Primitives.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Security.Cryptography.X509Certificates, Version=4.1.1.2, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Security.Cryptography.X509Certificates.4.3.2\lib\net461\System.Security.Cryptography.X509Certificates.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Text.RegularExpressions, Version=4.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Text.RegularExpressions.4.3.1\lib\net463\System.Text.RegularExpressions.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System.Xml.Linq" />
|
||||
<Reference Include="System.Data.DataSetExtensions" />
|
||||
<Reference Include="Microsoft.CSharp" />
|
||||
<Reference Include="System.Data" />
|
||||
<Reference Include="System.Deployment" />
|
||||
<Reference Include="System.Drawing" />
|
||||
<Reference Include="System.Windows.Forms" />
|
||||
<Reference Include="System.Xml" />
|
||||
<Reference Include="System.Xml.ReaderWriter, Version=4.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL">
|
||||
<HintPath>packages\System.Xml.ReaderWriter.4.3.1\lib\net46\System.Xml.ReaderWriter.dll</HintPath>
|
||||
</Reference>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="DownloadForm.cs">
|
||||
<SubType>Form</SubType>
|
||||
</Compile>
|
||||
<Compile Include="DownloadForm.Designer.cs">
|
||||
<DependentUpon>DownloadForm.cs</DependentUpon>
|
||||
</Compile>
|
||||
<Compile Include="Program.cs" />
|
||||
<Compile Include="Properties\AssemblyInfo.cs" />
|
||||
<Compile Include="Version.cs" />
|
||||
<EmbeddedResource Include="DownloadForm.resx">
|
||||
<DependentUpon>DownloadForm.cs</DependentUpon>
|
||||
</EmbeddedResource>
|
||||
<EmbeddedResource Include="Properties\Resources.resx">
|
||||
<Generator>ResXFileCodeGenerator</Generator>
|
||||
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
|
||||
<SubType>Designer</SubType>
|
||||
</EmbeddedResource>
|
||||
<Compile Include="Properties\Resources.Designer.cs">
|
||||
<AutoGen>True</AutoGen>
|
||||
<DependentUpon>Resources.resx</DependentUpon>
|
||||
</Compile>
|
||||
<None Include="..\..\public\favicon.ico">
|
||||
<Link>favicon.ico</Link>
|
||||
</None>
|
||||
<None Include="packages.config" />
|
||||
<None Include="Properties\Settings.settings">
|
||||
<Generator>SettingsSingleFileGenerator</Generator>
|
||||
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
|
||||
</None>
|
||||
<Compile Include="Properties\Settings.Designer.cs">
|
||||
<AutoGen>True</AutoGen>
|
||||
<DependentUpon>Settings.settings</DependentUpon>
|
||||
<DesignTimeSharedInput>True</DesignTimeSharedInput>
|
||||
</Compile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="App.config" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Content Include=".gitignore" />
|
||||
<Content Include="app.manifest" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
|
||||
<Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
|
||||
<PropertyGroup>
|
||||
<ErrorText>This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105.The missing file is {0}.</ErrorText>
|
||||
</PropertyGroup>
|
||||
<Error Condition="!Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" Text="$([System.String]::Format('$(ErrorText)', 'packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets'))" />
|
||||
</Target>
|
||||
<Import Project="packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets" Condition="Exists('packages\NETStandard.Library.2.0.3\build\netstandard2.0\NETStandard.Library.targets')" />
|
||||
</Project>
|
16
extra/exe-builder/UptimeKuma.sln
Normal file
16
extra/exe-builder/UptimeKuma.sln
Normal file
@@ -0,0 +1,16 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "UptimeKuma", "UptimeKuma.csproj", "{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Release|Any CPU = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{2DB53988-1D93-4AC0-90C4-96ADEAAC5C04}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
EndGlobal
|
3
extra/exe-builder/UptimeKuma.sln.DotSettings.user
Normal file
3
extra/exe-builder/UptimeKuma.sln.DotSettings.user
Normal file
@@ -0,0 +1,3 @@
|
||||
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
|
||||
<s:Boolean x:Key="/Default/ResxEditorPersonal/CheckedGroups/=UptimeKuma_002FProperties_002FResources/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/ResxEditorPersonal/Initialized/@EntryValue">True</s:Boolean></wpf:ResourceDictionary>
|
9
extra/exe-builder/Version.cs
Normal file
9
extra/exe-builder/Version.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace UptimeKuma {
|
||||
public class Version {
|
||||
public string latest { get; set; }
|
||||
public string slow { get; set; }
|
||||
public string beta { get; set; }
|
||||
public string nodejs { get; set; }
|
||||
public string exe { get; set; }
|
||||
}
|
||||
}
|
28
extra/exe-builder/app.manifest
Normal file
28
extra/exe-builder/app.manifest
Normal file
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
|
||||
<asmv3:application>
|
||||
<asmv3:windowsSettings>
|
||||
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true</dpiAware>
|
||||
<dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">PerMonitorV2</dpiAwareness>
|
||||
</asmv3:windowsSettings>
|
||||
</asmv3:application>
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v2">
|
||||
<security>
|
||||
<requestedPrivileges xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<!-- UAC Manifest Options
|
||||
If you want to change the Windows User Account Control level replace the
|
||||
requestedExecutionLevel node with one of the following.
|
||||
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
|
||||
<requestedExecutionLevel level="requireAdministrator" uiAccess="false" />
|
||||
<requestedExecutionLevel level="highestAvailable" uiAccess="false" />
|
||||
|
||||
Specifying requestedExecutionLevel element will disable file and registry virtualization.
|
||||
Remove this element if your application requires this virtualization for backwards
|
||||
compatibility.
|
||||
-->
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
54
extra/exe-builder/packages.config
Normal file
54
extra/exe-builder/packages.config
Normal file
@@ -0,0 +1,54 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<packages>
|
||||
<package id="Microsoft.NETCore.Platforms" version="7.0.0" targetFramework="net472" />
|
||||
<package id="Microsoft.Win32.Primitives" version="4.3.0" targetFramework="net472" />
|
||||
<package id="NETStandard.Library" version="2.0.3" targetFramework="net472" />
|
||||
<package id="Newtonsoft.Json" version="13.0.2" targetFramework="net472" />
|
||||
<package id="System.AppContext" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Console" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Diagnostics.DiagnosticSource" version="7.0.1" targetFramework="net472" />
|
||||
<package id="System.Net.Http" version="4.3.4" targetFramework="net472" />
|
||||
<package id="System.Runtime.Extensions" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Security.Cryptography.Algorithms" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Security.Cryptography.X509Certificates" version="4.3.2" targetFramework="net472" />
|
||||
<package id="System.Text.RegularExpressions" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Xml.ReaderWriter" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Memory" version="4.5.5" targetFramework="net472" />
|
||||
<package id="System.Net.Primitives" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Runtime" version="4.3.1" targetFramework="net472" />
|
||||
<package id="System.Buffers" version="4.5.1" targetFramework="net472" />
|
||||
<package id="System.Collections" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Collections.Concurrent" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Diagnostics.Debug" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Diagnostics.Tools" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Diagnostics.Tracing" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Globalization" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Globalization.Calendars" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.IO" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.IO.Compression" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.IO.Compression.ZipFile" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.IO.FileSystem" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.IO.FileSystem.Primitives" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Linq" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Linq.Expressions" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Net.Sockets" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Numerics.Vectors" version="4.5.0" targetFramework="net472" />
|
||||
<package id="System.ObjectModel" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Reflection" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Reflection.Extensions" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Reflection.Primitives" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Resources.ResourceManager" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Runtime.CompilerServices.Unsafe" version="6.0.0" targetFramework="net472" />
|
||||
<package id="System.Runtime.Handles" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Runtime.InteropServices" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Runtime.InteropServices.RuntimeInformation" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Runtime.Numerics" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Security.Cryptography.Encoding" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Security.Cryptography.Primitives" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Text.Encoding" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Text.Encoding.Extensions" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Threading" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Threading.Tasks" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Threading.Timer" version="4.3.0" targetFramework="net472" />
|
||||
<package id="System.Xml.XDocument" version="4.3.0" targetFramework="net472" />
|
||||
</packages>
|
23
extra/fs-rmSync.js
Normal file
23
extra/fs-rmSync.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const fs = require("fs");
|
||||
/**
|
||||
* Detect if `fs.rmSync` is available
|
||||
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
||||
* or the `recursive` property removing completely in the future Node.js version.
|
||||
* See the link below.
|
||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation information of `fs.rmdirSync`
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||
* @returns {void}
|
||||
*/
|
||||
const rmSync = (path, options) => {
|
||||
if (typeof fs.rmSync === "function") {
|
||||
if (options.recursive) {
|
||||
options.force = true;
|
||||
}
|
||||
return fs.rmSync(path, options);
|
||||
}
|
||||
return fs.rmdirSync(path, options);
|
||||
};
|
||||
module.exports = rmSync;
|
@@ -15,6 +15,7 @@ if (newVersion) {
|
||||
// Process package.json
|
||||
pkg.version = newVersion;
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||
|
||||
// Process README.md
|
||||
|
6
extra/press-any-key.js
Normal file
6
extra/press-any-key.js
Normal file
@@ -0,0 +1,6 @@
|
||||
console.log("Git Push and Publish the release note on github, then press any key to continue");
|
||||
|
||||
process.stdin.setRawMode(true);
|
||||
process.stdin.resume();
|
||||
process.stdin.on("data", process.exit.bind(process, 0));
|
||||
|
@@ -1,64 +0,0 @@
|
||||
import "dotenv/config";
|
||||
import {
|
||||
ver,
|
||||
buildDist,
|
||||
buildImage,
|
||||
checkDocker,
|
||||
checkTagExists,
|
||||
checkVersionFormat,
|
||||
getRepoNames,
|
||||
pressAnyKey,
|
||||
execSync, uploadArtifacts,
|
||||
} from "./lib.mjs";
|
||||
import semver from "semver";
|
||||
|
||||
const repoNames = getRepoNames();
|
||||
const version = process.env.RELEASE_BETA_VERSION;
|
||||
const githubToken = process.env.RELEASE_GITHUB_TOKEN;
|
||||
|
||||
console.log("RELEASE_BETA_VERSION:", version);
|
||||
|
||||
if (!githubToken) {
|
||||
console.error("GITHUB_TOKEN is required");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if the version is a valid semver
|
||||
checkVersionFormat(version);
|
||||
|
||||
// Check if the semver identifier is "beta"
|
||||
const semverIdentifier = semver.prerelease(version);
|
||||
console.log("Semver identifier:", semverIdentifier);
|
||||
if (semverIdentifier[0] !== "beta") {
|
||||
console.error("VERSION should have a semver identifier of 'beta'");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if docker is running
|
||||
checkDocker();
|
||||
|
||||
// Check if the tag exists
|
||||
await checkTagExists(repoNames, version);
|
||||
|
||||
// node extra/beta/update-version.js
|
||||
execSync("node ./extra/beta/update-version.js");
|
||||
|
||||
// Build frontend dist
|
||||
buildDist();
|
||||
|
||||
// Build slim image (rootless)
|
||||
buildImage(repoNames, [ "beta-slim-rootless", ver(version, "slim-rootless") ], "rootless", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
|
||||
|
||||
// Build full image (rootless)
|
||||
buildImage(repoNames, [ "beta-rootless", ver(version, "rootless") ], "rootless");
|
||||
|
||||
// Build slim image
|
||||
buildImage(repoNames, [ "beta-slim", ver(version, "slim") ], "release", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
|
||||
|
||||
// Build full image
|
||||
buildImage(repoNames, [ "beta", version ], "release");
|
||||
|
||||
await pressAnyKey();
|
||||
|
||||
// npm run upload-artifacts
|
||||
uploadArtifacts(version, githubToken);
|
@@ -1,57 +0,0 @@
|
||||
import "dotenv/config";
|
||||
import {
|
||||
ver,
|
||||
buildDist,
|
||||
buildImage,
|
||||
checkDocker,
|
||||
checkTagExists,
|
||||
checkVersionFormat,
|
||||
getRepoNames,
|
||||
pressAnyKey, execSync, uploadArtifacts
|
||||
} from "./lib.mjs";
|
||||
|
||||
const repoNames = getRepoNames();
|
||||
const version = process.env.RELEASE_VERSION;
|
||||
const githubToken = process.env.RELEASE_GITHUB_TOKEN;
|
||||
|
||||
console.log("RELEASE_VERSION:", version);
|
||||
|
||||
if (!githubToken) {
|
||||
console.error("GITHUB_TOKEN is required");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if the version is a valid semver
|
||||
checkVersionFormat(version);
|
||||
|
||||
// Check if docker is running
|
||||
checkDocker();
|
||||
|
||||
// Check if the tag exists
|
||||
await checkTagExists(repoNames, version);
|
||||
|
||||
// node extra/beta/update-version.js
|
||||
execSync("node extra/update-version.js");
|
||||
|
||||
// Build frontend dist
|
||||
buildDist();
|
||||
|
||||
// Build slim image (rootless)
|
||||
buildImage(repoNames, [ "2-slim-rootless", ver(version, "slim-rootless") ], "rootless", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
|
||||
|
||||
// Build full image (rootless)
|
||||
buildImage(repoNames, [ "2-rootless", ver(version, "rootless") ], "rootless");
|
||||
|
||||
// Build slim image
|
||||
buildImage(repoNames, [ "next-slim", "2-slim", ver(version, "slim") ], "release", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
|
||||
|
||||
// Build full image
|
||||
buildImage(repoNames, [ "next", "2", version ], "release");
|
||||
|
||||
await pressAnyKey();
|
||||
|
||||
// npm run upload-artifacts
|
||||
uploadArtifacts(version, githubToken);
|
||||
|
||||
// node extra/update-wiki-version.js
|
||||
execSync("node extra/update-wiki-version.js");
|
@@ -1,251 +0,0 @@
|
||||
import "dotenv/config";
|
||||
import * as childProcess from "child_process";
|
||||
import semver from "semver";
|
||||
|
||||
export const dryRun = process.env.RELEASE_DRY_RUN === "1";
|
||||
|
||||
if (dryRun) {
|
||||
console.info("Dry run enabled.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if docker is running
|
||||
* @returns {void}
|
||||
*/
|
||||
export function checkDocker() {
|
||||
try {
|
||||
childProcess.execSync("docker ps");
|
||||
} catch (error) {
|
||||
console.error("Docker is not running. Please start docker and try again.");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Docker Hub repository name
|
||||
*/
|
||||
export function getRepoNames() {
|
||||
if (process.env.RELEASE_REPO_NAMES) {
|
||||
// Split by comma
|
||||
return process.env.RELEASE_REPO_NAMES.split(",").map((name) => name.trim());
|
||||
}
|
||||
return [
|
||||
"louislam/uptime-kuma",
|
||||
"ghcr.io/louislam/uptime-kuma",
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Build frontend dist
|
||||
* @returns {void}
|
||||
*/
|
||||
export function buildDist() {
|
||||
if (!dryRun) {
|
||||
childProcess.execSync("npm run build", { stdio: "inherit" });
|
||||
} else {
|
||||
console.info("[DRY RUN] npm run build");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build docker image and push to Docker Hub
|
||||
* @param {string[]} repoNames Docker Hub repository names
|
||||
* @param {string[]} tags Docker image tags
|
||||
* @param {string} target Dockerfile's target name
|
||||
* @param {string} buildArgs Docker build args
|
||||
* @param {string} dockerfile Path to Dockerfile
|
||||
* @param {string} platform Build platform
|
||||
* @returns {void}
|
||||
*/
|
||||
export function buildImage(repoNames, tags, target, buildArgs = "", dockerfile = "docker/dockerfile", platform = "linux/amd64,linux/arm64,linux/arm/v7") {
|
||||
let args = [
|
||||
"buildx",
|
||||
"build",
|
||||
"-f",
|
||||
dockerfile,
|
||||
"--platform",
|
||||
platform,
|
||||
];
|
||||
|
||||
for (let repoName of repoNames) {
|
||||
// Add tags
|
||||
for (let tag of tags) {
|
||||
args.push("-t", `${repoName}:${tag}`);
|
||||
}
|
||||
}
|
||||
|
||||
args = [
|
||||
...args,
|
||||
"--target",
|
||||
target,
|
||||
];
|
||||
|
||||
// Add build args
|
||||
if (buildArgs) {
|
||||
args.push("--build-arg", buildArgs);
|
||||
}
|
||||
|
||||
args = [
|
||||
...args,
|
||||
".",
|
||||
"--push",
|
||||
];
|
||||
|
||||
if (!dryRun) {
|
||||
childProcess.spawnSync("docker", args, { stdio: "inherit" });
|
||||
} else {
|
||||
console.log(`[DRY RUN] docker ${args.join(" ")}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the version already exists on Docker Hub
|
||||
* TODO: use semver to compare versions if it is greater than the previous?
|
||||
* @param {string[]} repoNames repository name (Only check the name with single slash)
|
||||
* @param {string} version Version to check
|
||||
* @returns {void}
|
||||
*/
|
||||
export async function checkTagExists(repoNames, version) {
|
||||
// Skip if the tag is not on Docker Hub
|
||||
// louislam/uptime-kuma
|
||||
let dockerHubRepoNames = repoNames.filter((name) => {
|
||||
return name.split("/").length === 2;
|
||||
});
|
||||
|
||||
for (let repoName of dockerHubRepoNames) {
|
||||
await checkTagExistsSingle(repoName, version);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the version already exists on Docker Hub
|
||||
* @param {string} repoName repository name
|
||||
* @param {string} version Version to check
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function checkTagExistsSingle(repoName, version) {
|
||||
console.log(`Checking if version ${version} exists on Docker Hub:`, repoName);
|
||||
|
||||
// Get a list of tags from the Docker Hub repository
|
||||
let tags = [];
|
||||
|
||||
// It is mainly to check my careless mistake that I forgot to update the release version in .env, so `page_size` is set to 100 is enough, I think.
|
||||
const response = await fetch(`https://hub.docker.com/v2/repositories/${repoName}/tags/?page_size=100`);
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
tags = data.results.map((tag) => tag.name);
|
||||
} else {
|
||||
console.error("Failed to get tags from Docker Hub");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check if the version already exists
|
||||
if (tags.includes(version)) {
|
||||
console.error(`Version ${version} already exists`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the version format
|
||||
* @param {string} version Version to check
|
||||
* @returns {void}
|
||||
*/
|
||||
export function checkVersionFormat(version) {
|
||||
if (!version) {
|
||||
console.error("VERSION is required");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check the version format, it should be a semver and must be like this: "2.0.0-beta.0"
|
||||
if (!semver.valid(version)) {
|
||||
console.error("VERSION is not a valid semver version");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Press any key to continue
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export function pressAnyKey() {
|
||||
console.log("Git Push and Publish the release note on github, then press any key to continue");
|
||||
process.stdin.setRawMode(true);
|
||||
process.stdin.resume();
|
||||
return new Promise(resolve => process.stdin.once("data", data => {
|
||||
process.stdin.setRawMode(false);
|
||||
process.stdin.pause();
|
||||
resolve();
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Append version identifier
|
||||
* @param {string} version Version
|
||||
* @param {string} identifier Identifier
|
||||
* @returns {string} Version with identifier
|
||||
*/
|
||||
export function ver(version, identifier) {
|
||||
const obj = semver.parse(version);
|
||||
|
||||
if (obj.prerelease.length === 0) {
|
||||
obj.prerelease = [ identifier ];
|
||||
} else {
|
||||
obj.prerelease[0] = [ obj.prerelease[0], identifier ].join("-");
|
||||
}
|
||||
return obj.format();
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload artifacts to GitHub
|
||||
* docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain
|
||||
* @param {string} version Version
|
||||
* @param {string} githubToken GitHub token
|
||||
* @returns {void}
|
||||
*/
|
||||
export function uploadArtifacts(version, githubToken) {
|
||||
let args = [
|
||||
"buildx",
|
||||
"build",
|
||||
"-f",
|
||||
"docker/dockerfile",
|
||||
"--platform",
|
||||
"linux/amd64",
|
||||
"-t",
|
||||
"louislam/uptime-kuma:upload-artifact",
|
||||
"--build-arg",
|
||||
`VERSION=${version}`,
|
||||
"--build-arg",
|
||||
"GITHUB_TOKEN",
|
||||
"--target",
|
||||
"upload-artifact",
|
||||
".",
|
||||
"--progress",
|
||||
"plain",
|
||||
];
|
||||
|
||||
if (!dryRun) {
|
||||
childProcess.spawnSync("docker", args, {
|
||||
stdio: "inherit",
|
||||
env: {
|
||||
...process.env,
|
||||
GITHUB_TOKEN: githubToken,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
console.log(`[DRY RUN] docker ${args.join(" ")}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a command
|
||||
* @param {string} cmd Command to execute
|
||||
* @returns {void}
|
||||
*/
|
||||
export function execSync(cmd) {
|
||||
if (!dryRun) {
|
||||
childProcess.execSync(cmd, { stdio: "inherit" });
|
||||
} else {
|
||||
console.info(`[DRY RUN] ${cmd}`);
|
||||
}
|
||||
}
|
@@ -1,16 +0,0 @@
|
||||
import { buildDist, buildImage, checkDocker, getRepoNames } from "./lib.mjs";
|
||||
|
||||
// Docker Hub repository name
|
||||
const repoNames = getRepoNames();
|
||||
|
||||
// Check if docker is running
|
||||
checkDocker();
|
||||
|
||||
// Build frontend dist (it will build on the host machine, TODO: build on a container?)
|
||||
buildDist();
|
||||
|
||||
// Build full image (rootless)
|
||||
buildImage(repoNames, [ "nightly2-rootless" ], "nightly-rootless");
|
||||
|
||||
// Build full image
|
||||
buildImage(repoNames, [ "nightly2" ], "nightly");
|
@@ -1,6 +0,0 @@
|
||||
import { uploadArtifacts } from "./lib.mjs";
|
||||
|
||||
const version = process.env.RELEASE_BETA_VERSION;
|
||||
const githubToken = process.env.RELEASE_GITHUB_TOKEN;
|
||||
|
||||
uploadArtifacts(version, githubToken);
|
@@ -1,6 +0,0 @@
|
||||
import { uploadArtifacts } from "./lib.mjs";
|
||||
|
||||
const version = process.env.RELEASE_VERSION;
|
||||
const githubToken = process.env.RELEASE_GITHUB_TOKEN;
|
||||
|
||||
uploadArtifacts(version, githubToken);
|
@@ -1,25 +0,0 @@
|
||||
// For #5231
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
let path = "../src/lang";
|
||||
|
||||
// list directories in the lang directory
|
||||
let jsonFileList = fs.readdirSync(path);
|
||||
|
||||
for (let jsonFile of jsonFileList) {
|
||||
if (!jsonFile.endsWith(".json")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let jsonPath = path + "/" + jsonFile;
|
||||
let langData = JSON.parse(fs.readFileSync(jsonPath, "utf8"));
|
||||
|
||||
for (let key in langData) {
|
||||
if (langData[key] === "") {
|
||||
delete langData[key];
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(jsonPath, JSON.stringify(langData, null, 4) + "\n");
|
||||
}
|
@@ -1,24 +0,0 @@
|
||||
const { R } = require("redbean-node");
|
||||
const Database = require("../server/database");
|
||||
const args = require("args-parser")(process.argv);
|
||||
const { Settings } = require("../server/settings");
|
||||
|
||||
const main = async () => {
|
||||
console.log("Connecting the database");
|
||||
Database.initDataDir(args);
|
||||
await Database.connect(false, false, true);
|
||||
|
||||
console.log("Deleting all data from aggregate tables");
|
||||
await R.exec("DELETE FROM stat_minutely");
|
||||
await R.exec("DELETE FROM stat_hourly");
|
||||
await R.exec("DELETE FROM stat_daily");
|
||||
|
||||
console.log("Resetting the aggregate table state");
|
||||
await Settings.set("migrateAggregateTableState", "");
|
||||
|
||||
await Database.close();
|
||||
console.log("Done");
|
||||
};
|
||||
|
||||
main();
|
||||
|
@@ -8,7 +8,6 @@ const User = require("../server/model/user");
|
||||
const { io } = require("socket.io-client");
|
||||
const { localWebSocketURL } = require("../server/config");
|
||||
const args = require("args-parser")(process.argv);
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
@@ -20,10 +19,10 @@ const main = async () => {
|
||||
}
|
||||
|
||||
console.log("Connecting the database");
|
||||
Database.initDataDir(args);
|
||||
await Database.connect(false, false, true);
|
||||
|
||||
try {
|
||||
Database.initDataDir(args);
|
||||
await Database.connect(false, false, true);
|
||||
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
|
||||
if (!process.env.TEST_BACKEND) {
|
||||
const user = await R.findOne("user");
|
||||
|
9
extra/test-docker.js
Normal file
9
extra/test-docker.js
Normal file
@@ -0,0 +1,9 @@
|
||||
// Check if docker is running
|
||||
const { exec } = require("child_process");
|
||||
|
||||
exec("docker ps", (err, stdout, stderr) => {
|
||||
if (err) {
|
||||
console.error("Docker is not running. Please start docker and try again.");
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
@@ -2,6 +2,7 @@
|
||||
|
||||
import fs from "fs";
|
||||
import util from "util";
|
||||
import rmSync from "../fs-rmSync.js";
|
||||
|
||||
/**
|
||||
* Copy across the required language files
|
||||
@@ -15,10 +16,7 @@ import util from "util";
|
||||
*/
|
||||
function copyFiles(langCode, baseLang) {
|
||||
if (fs.existsSync("./languages")) {
|
||||
fs.rmSync("./languages", {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
rmSync("./languages", { recursive: true });
|
||||
}
|
||||
fs.mkdirSync("./languages");
|
||||
|
||||
@@ -95,9 +93,6 @@ console.log("Updating: " + langCode);
|
||||
|
||||
copyFiles(langCode, baseLangCode);
|
||||
await updateLanguage(langCode, baseLangCode);
|
||||
fs.rmSync("./languages", {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
rmSync("./languages", { recursive: true });
|
||||
|
||||
console.log("Done. Fixing formatting by ESLint...");
|
||||
|
@@ -5,7 +5,7 @@ const util = require("../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const newVersion = process.env.RELEASE_VERSION;
|
||||
const newVersion = process.env.VERSION;
|
||||
|
||||
console.log("New Version: " + newVersion);
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const childProcess = require("child_process");
|
||||
const fs = require("fs");
|
||||
|
||||
const newVersion = process.env.RELEASE_VERSION;
|
||||
const newVersion = process.env.VERSION;
|
||||
|
||||
if (!newVersion) {
|
||||
console.log("Missing version");
|
||||
|
13069
package-lock.json
generated
13069
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
87
package.json
87
package.json
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"name": "uptime-kuma",
|
||||
"version": "2.0.0-beta.2",
|
||||
"version": "2.0.0-dev",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/louislam/uptime-kuma.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": "18 || >= 20.4.0"
|
||||
"node": "14 || 16 || 18 || >= 20.4.0"
|
||||
},
|
||||
"scripts": {
|
||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||
@@ -27,21 +27,29 @@
|
||||
"build": "vite build --config ./config/vite.config.js",
|
||||
"test": "npm run test-backend && npm run test-e2e",
|
||||
"test-with-build": "npm run build && npm test",
|
||||
"test-backend": "cross-env TEST_BACKEND=1 node --test test/backend-test",
|
||||
"test-backend": "node test/backend-test-entry.js",
|
||||
"test-backend:14": "cross-env TEST_BACKEND=1 NODE_OPTIONS=\"--experimental-abortcontroller --no-warnings\" node--test test/backend-test",
|
||||
"test-backend:18": "cross-env TEST_BACKEND=1 node --test test/backend-test",
|
||||
"test-e2e": "playwright test --config ./config/playwright.config.js",
|
||||
"test-e2e-ui": "playwright test --config ./config/playwright.config.js --ui --ui-port=51063",
|
||||
"playwright-codegen": "playwright codegen localhost:3000 --save-storage=./private/e2e-auth.json",
|
||||
"playwright-show-report": "playwright show-report ./private/playwright-report",
|
||||
"tsc": "tsc",
|
||||
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
||||
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
|
||||
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
|
||||
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
|
||||
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
||||
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
||||
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
||||
"build-docker-slim-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim-rootless -t louislam/uptime-kuma:$VERSION-slim-rootless --target rootless --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||
"build-docker-full-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-rootless -t louislam/uptime-kuma:$VERSION-rootless --target rootless . --push",
|
||||
"build-docker-nightly-rootless": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2-rootless --target nightly-rootless . --push",
|
||||
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test2 --target pr-test2 . --push",
|
||||
"upload-artifacts": "node extra/release/upload-artifacts.mjs",
|
||||
"upload-artifacts-beta": "node extra/release/upload-artifacts-beta.mjs",
|
||||
"setup": "git checkout 1.23.16 && npm ci --omit dev && npm run download-dist",
|
||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||
"setup": "git checkout 1.23.10 && npm ci --production && npm run download-dist",
|
||||
"download-dist": "node extra/download-dist.js",
|
||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||
"reset-password": "node extra/reset-password.js",
|
||||
@@ -52,9 +60,8 @@
|
||||
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
|
||||
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
|
||||
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
||||
"release-final": "node ./extra/release/final.mjs",
|
||||
"release-beta": "node ./extra/release/beta.mjs",
|
||||
"release-nightly": "node ./extra/release/nightly.mjs",
|
||||
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||
"git-remove-tag": "git tag -d",
|
||||
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
||||
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
||||
@@ -64,15 +71,15 @@
|
||||
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
|
||||
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate",
|
||||
"rebase-pr-to-1.23.X": "node extra/rebase-pr.js 1.23.X",
|
||||
"reset-migrate-aggregate-table-state": "node extra/reset-migrate-aggregate-table-state.js"
|
||||
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "~1.8.22",
|
||||
"@grpc/grpc-js": "~1.7.3",
|
||||
"@louislam/ping": "~0.4.4-mod.1",
|
||||
"@louislam/sqlite3": "15.1.6",
|
||||
"@vvo/tzdb": "^6.125.0",
|
||||
"args-parser": "~1.3.0",
|
||||
"axios": "~0.29.0",
|
||||
"axios": "~0.27.0",
|
||||
"axios-ntlm": "1.3.0",
|
||||
"badge-maker": "~3.3.1",
|
||||
"bcryptjs": "~2.4.3",
|
||||
"chardet": "~1.4.0",
|
||||
@@ -82,21 +89,19 @@
|
||||
"command-exists": "~1.2.9",
|
||||
"compare-versions": "~3.6.0",
|
||||
"compression": "~1.7.4",
|
||||
"croner": "~8.1.0",
|
||||
"croner": "~6.0.5",
|
||||
"dayjs": "~1.11.5",
|
||||
"dev-null": "^0.1.1",
|
||||
"dotenv": "~16.0.3",
|
||||
"express": "~4.21.0",
|
||||
"express": "~4.17.3",
|
||||
"express-basic-auth": "~1.2.1",
|
||||
"express-static-gzip": "~2.1.7",
|
||||
"feed": "^4.2.2",
|
||||
"form-data": "~4.0.0",
|
||||
"gamedig": "^4.2.0",
|
||||
"html-escaper": "^3.0.3",
|
||||
"http-cookie-agent": "~5.0.4",
|
||||
"http-graceful-shutdown": "~3.1.7",
|
||||
"http-proxy-agent": "~7.0.2",
|
||||
"https-proxy-agent": "~7.0.6",
|
||||
"http-proxy-agent": "~5.0.0",
|
||||
"https-proxy-agent": "~5.0.1",
|
||||
"iconv-lite": "~0.6.3",
|
||||
"isomorphic-ws": "^5.0.0",
|
||||
"jsesc": "~3.0.2",
|
||||
@@ -104,21 +109,19 @@
|
||||
"jsonwebtoken": "~9.0.0",
|
||||
"jwt-decode": "~3.1.2",
|
||||
"kafkajs": "^2.2.4",
|
||||
"knex": "~3.1.0",
|
||||
"knex": "^2.4.2",
|
||||
"limiter": "~2.1.0",
|
||||
"liquidjs": "^10.7.0",
|
||||
"marked": "^14.0.0",
|
||||
"mitt": "~3.0.1",
|
||||
"mongodb": "~4.17.1",
|
||||
"mqtt": "~4.3.7",
|
||||
"mssql": "~11.0.0",
|
||||
"mysql2": "~3.11.3",
|
||||
"mssql": "~8.1.4",
|
||||
"mysql2": "~3.6.2",
|
||||
"nanoid": "~3.3.4",
|
||||
"net-snmp": "^3.11.2",
|
||||
"node-cloudflared-tunnel": "~1.0.9",
|
||||
"node-radius-client": "~1.0.0",
|
||||
"nodemailer": "~6.9.13",
|
||||
"nostr-tools": "^2.10.4",
|
||||
"nodemailer": "~6.6.5",
|
||||
"nostr-tools": "^1.13.1",
|
||||
"notp": "~2.0.3",
|
||||
"openid-client": "^5.4.2",
|
||||
"password-hash": "~1.2.2",
|
||||
@@ -133,31 +136,29 @@
|
||||
"redbean-node": "~0.3.0",
|
||||
"redis": "~4.5.1",
|
||||
"semver": "~7.5.4",
|
||||
"socket.io": "~4.8.0",
|
||||
"socket.io-client": "~4.8.0",
|
||||
"socks-proxy-agent": "~8.0.5",
|
||||
"tar": "~6.2.1",
|
||||
"socket.io": "~4.6.1",
|
||||
"socket.io-client": "~4.6.1",
|
||||
"socks-proxy-agent": "6.1.1",
|
||||
"tar": "~6.1.11",
|
||||
"tcp-ping": "~0.1.1",
|
||||
"thirty-two": "~1.0.2",
|
||||
"tough-cookie": "~4.1.3",
|
||||
"ws": "^8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/github": "~6.0.0",
|
||||
"@actions/github": "~5.0.1",
|
||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||
"@fortawesome/free-solid-svg-icons": "~5.15.4",
|
||||
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
||||
"@playwright/test": "~1.39.0",
|
||||
"@popperjs/core": "~2.10.2",
|
||||
"@testcontainers/hivemq": "^10.13.1",
|
||||
"@testcontainers/rabbitmq": "^10.13.2",
|
||||
"@types/bootstrap": "~5.1.9",
|
||||
"@types/node": "^20.8.6",
|
||||
"@typescript-eslint/eslint-plugin": "^6.7.5",
|
||||
"@typescript-eslint/parser": "^6.7.5",
|
||||
"@vitejs/plugin-vue": "~5.0.1",
|
||||
"@vue/compiler-sfc": "~3.4.2",
|
||||
"@vitejs/plugin-vue": "~4.2.3",
|
||||
"@vue/compiler-sfc": "~3.3.4",
|
||||
"@vuepic/vue-datepicker": "~3.4.8",
|
||||
"aedes": "^0.46.3",
|
||||
"bootstrap": "5.1.3",
|
||||
@@ -169,17 +170,18 @@
|
||||
"cross-env": "~7.0.3",
|
||||
"delay": "^5.0.0",
|
||||
"dns2": "~2.0.1",
|
||||
"dompurify": "~3.2.4",
|
||||
"dompurify": "~2.4.3",
|
||||
"eslint": "~8.14.0",
|
||||
"eslint-plugin-jsdoc": "~46.4.6",
|
||||
"eslint-plugin-vue": "~8.7.1",
|
||||
"favico.js": "~0.3.10",
|
||||
"get-port-please": "^3.1.1",
|
||||
"marked": "~4.2.5",
|
||||
"node-ssh": "~13.1.0",
|
||||
"postcss-html": "~1.5.0",
|
||||
"postcss-rtlcss": "~3.7.2",
|
||||
"postcss-scss": "~4.0.4",
|
||||
"prismjs": "~1.30.0",
|
||||
"prismjs": "~1.29.0",
|
||||
"qrcode": "~1.5.0",
|
||||
"rollup-plugin-visualizer": "^5.6.0",
|
||||
"sass": "~1.42.1",
|
||||
@@ -187,22 +189,21 @@
|
||||
"stylelint-config-standard": "~25.0.0",
|
||||
"terser": "~5.15.0",
|
||||
"test": "~3.3.0",
|
||||
"testcontainers": "^10.13.1",
|
||||
"timezones-list": "~3.0.1",
|
||||
"typescript": "~4.4.4",
|
||||
"v-pagination-3": "~0.1.7",
|
||||
"vite": "~5.4.15",
|
||||
"vite": "~4.4.1",
|
||||
"vite-plugin-compression": "^0.5.1",
|
||||
"vite-plugin-vue-devtools": "^7.0.15",
|
||||
"vue": "~3.4.2",
|
||||
"vue": "~3.3.4",
|
||||
"vue-chartjs": "~5.2.0",
|
||||
"vue-confirm-dialog": "~1.0.2",
|
||||
"vue-contenteditable": "~3.0.4",
|
||||
"vue-i18n": "~9.14.3",
|
||||
"vue-i18n": "~9.2.2",
|
||||
"vue-image-crop-upload": "~3.0.3",
|
||||
"vue-multiselect": "~3.0.0-alpha.2",
|
||||
"vue-prism-editor": "~2.0.0-alpha.2",
|
||||
"vue-qrcode": "~1.0.0",
|
||||
"vue-router": "~4.2.5",
|
||||
"vue-router": "~4.0.14",
|
||||
"vue-toastification": "~2.0.0-rc.5",
|
||||
"vuedraggable": "~4.1.0",
|
||||
"wait-on": "^7.2.0",
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 8.7 KiB After Width: | Height: | Size: 4.7 KiB |
Binary file not shown.
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 2.6 KiB |
Binary file not shown.
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 9.5 KiB |
@@ -130,7 +130,7 @@ function userAuthorizer(username, password, callback) {
|
||||
* @param {express.Request} req Express request object
|
||||
* @param {express.Response} res Express response object
|
||||
* @param {express.NextFunction} next Next handler in chain
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.basicAuth = async function (req, res, next) {
|
||||
const middleware = basicAuth({
|
||||
@@ -153,7 +153,7 @@ exports.basicAuth = async function (req, res, next) {
|
||||
* @param {express.Request} req Express request object
|
||||
* @param {express.Response} res Express response object
|
||||
* @param {express.NextFunction} next Next handler in chain
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
exports.apiAuth = async function (req, res, next) {
|
||||
if (!await Settings.get("disableAuth")) {
|
||||
|
@@ -8,7 +8,6 @@ const server = UptimeKumaServer.getInstance();
|
||||
const io = server.io;
|
||||
const { setting } = require("./util-server");
|
||||
const checkVersion = require("./check-version");
|
||||
const Database = require("./database");
|
||||
|
||||
/**
|
||||
* Send list of notification providers to client
|
||||
@@ -145,20 +144,17 @@ async function sendInfo(socket, hideVersion = false) {
|
||||
let version;
|
||||
let latestVersion;
|
||||
let isContainer;
|
||||
let dbType;
|
||||
|
||||
if (!hideVersion) {
|
||||
version = checkVersion.version;
|
||||
latestVersion = checkVersion.latestVersion;
|
||||
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
|
||||
dbType = Database.dbConfig.type;
|
||||
}
|
||||
|
||||
socket.emit("info", {
|
||||
version,
|
||||
latestVersion,
|
||||
isContainer,
|
||||
dbType,
|
||||
primaryBaseURL: await setting("primaryBaseURL"),
|
||||
serverTimezone: await server.getTimezone(),
|
||||
serverTimezoneOffset: server.getTimezoneOffset(),
|
||||
@@ -213,32 +209,6 @@ async function sendRemoteBrowserList(socket) {
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send list of monitor types to client
|
||||
* @param {Socket} socket Socket.io socket instance
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function sendMonitorTypeList(socket) {
|
||||
const result = Object.entries(UptimeKumaServer.monitorTypeList).map(([ key, type ]) => {
|
||||
return [ key, {
|
||||
supportsConditions: type.supportsConditions,
|
||||
conditionVariables: type.conditionVariables.map(v => {
|
||||
return {
|
||||
id: v.id,
|
||||
operators: v.operators.map(o => {
|
||||
return {
|
||||
id: o.id,
|
||||
caption: o.caption,
|
||||
};
|
||||
}),
|
||||
};
|
||||
}),
|
||||
}];
|
||||
});
|
||||
|
||||
io.to(socket.userID).emit("monitorTypeList", Object.fromEntries(result));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendNotificationList,
|
||||
sendImportantHeartbeatList,
|
||||
@@ -248,5 +218,4 @@ module.exports = {
|
||||
sendInfo,
|
||||
sendDockerHostList,
|
||||
sendRemoteBrowserList,
|
||||
sendMonitorTypeList,
|
||||
};
|
||||
|
@@ -6,11 +6,6 @@ const knex = require("knex");
|
||||
const path = require("path");
|
||||
const { EmbeddedMariaDB } = require("./embedded-mariadb");
|
||||
const mysql = require("mysql2/promise");
|
||||
const { Settings } = require("./settings");
|
||||
const { UptimeCalculator } = require("./uptime-calculator");
|
||||
const dayjs = require("dayjs");
|
||||
const { SimpleMigrationServer } = require("./utils/simple-migration-server");
|
||||
const KumaColumnCompiler = require("./utils/knex/lib/dialects/mysql2/schema/mysql2-columncompiler");
|
||||
|
||||
/**
|
||||
* Database & App Data Folder
|
||||
@@ -110,8 +105,7 @@ class Database {
|
||||
"patch-add-gamedig-given-port.sql": true,
|
||||
"patch-notification-config.sql": true,
|
||||
"patch-fix-kafka-producer-booleans.sql": true,
|
||||
"patch-timeout.sql": true,
|
||||
"patch-monitor-tls-info-add-fk.sql": true, // The last file so far converted to a knex migration file
|
||||
"patch-timeout.sql": true, // The last file so far converted to a knex migration file
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -199,14 +193,6 @@ class Database {
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async connect(testMode = false, autoloadModels = true, noLog = false) {
|
||||
// Patch "mysql2" knex client
|
||||
// Workaround: Tried extending the ColumnCompiler class, but it didn't work for unknown reasons, so I override the function via prototype
|
||||
const { getDialectByNameOrAlias } = require("knex/lib/dialects");
|
||||
const mysql2 = getDialectByNameOrAlias("mysql2");
|
||||
mysql2.prototype.columnCompiler = function () {
|
||||
return new KumaColumnCompiler(this, ...arguments);
|
||||
};
|
||||
|
||||
const acquireConnectionTimeout = 120 * 1000;
|
||||
let dbConfig;
|
||||
try {
|
||||
@@ -222,9 +208,9 @@ class Database {
|
||||
let config = {};
|
||||
|
||||
let mariadbPoolConfig = {
|
||||
min: 0,
|
||||
max: 10,
|
||||
idleTimeoutMillis: 30000,
|
||||
afterCreate: function (conn, done) {
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
log.info("db", `Database Type: ${dbConfig.type}`);
|
||||
@@ -296,7 +282,7 @@ class Database {
|
||||
client: "mysql2",
|
||||
connection: {
|
||||
socketPath: embeddedMariaDB.socketPath,
|
||||
user: embeddedMariaDB.username,
|
||||
user: "node",
|
||||
database: "kuma",
|
||||
timezone: "Z",
|
||||
typeCast: function (field, next) {
|
||||
@@ -392,11 +378,9 @@ class Database {
|
||||
|
||||
/**
|
||||
* Patch the database
|
||||
* @param {number} port Start the migration server for aggregate tables on this port if provided
|
||||
* @param {string} hostname Start the migration server for aggregate tables on this hostname if provided
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
static async patch(port = undefined, hostname = undefined) {
|
||||
static async patch() {
|
||||
// Still need to keep this for old versions of Uptime Kuma
|
||||
if (Database.dbConfig.type === "sqlite") {
|
||||
await this.patchSqlite();
|
||||
@@ -406,23 +390,9 @@ class Database {
|
||||
// https://knexjs.org/guide/migrations.html
|
||||
// https://gist.github.com/NigelEarle/70db130cc040cc2868555b29a0278261
|
||||
try {
|
||||
// Disable foreign key check for SQLite
|
||||
// Known issue of knex: https://github.com/drizzle-team/drizzle-orm/issues/1813
|
||||
if (Database.dbConfig.type === "sqlite") {
|
||||
await R.exec("PRAGMA foreign_keys = OFF");
|
||||
}
|
||||
|
||||
await R.knex.migrate.latest({
|
||||
directory: Database.knexMigrationsPath,
|
||||
});
|
||||
|
||||
// Enable foreign key check for SQLite
|
||||
if (Database.dbConfig.type === "sqlite") {
|
||||
await R.exec("PRAGMA foreign_keys = ON");
|
||||
}
|
||||
|
||||
await this.migrateAggregateTable(port, hostname);
|
||||
|
||||
} catch (e) {
|
||||
// Allow missing patch files for downgrade or testing pr.
|
||||
if (e.message.includes("the following files are missing:")) {
|
||||
@@ -740,175 +710,6 @@ class Database {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate the old data in the heartbeat table to the new format (stat_daily, stat_hourly, stat_minutely)
|
||||
* It should be run once while upgrading V1 to V2
|
||||
*
|
||||
* Normally, it should be in transaction, but UptimeCalculator wasn't designed to be in transaction before that.
|
||||
* I don't want to heavily modify the UptimeCalculator, so it is not in transaction.
|
||||
* Run `npm run reset-migrate-aggregate-table-state` to reset, in case the migration is interrupted.
|
||||
* @param {number} port Start the migration server on this port if provided
|
||||
* @param {string} hostname Start the migration server on this hostname if provided
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async migrateAggregateTable(port, hostname = undefined) {
|
||||
log.debug("db", "Enter Migrate Aggregate Table function");
|
||||
|
||||
// Add a setting for 2.0.0-dev users to skip this migration
|
||||
if (process.env.SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE === "1") {
|
||||
log.warn("db", "SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE is set to 1, skipping aggregate table migration forever (for 2.0.0-dev users)");
|
||||
await Settings.set("migrateAggregateTableState", "migrated");
|
||||
}
|
||||
|
||||
let migrateState = await Settings.get("migrateAggregateTableState");
|
||||
|
||||
// Skip if already migrated
|
||||
// If it is migrating, it possibly means the migration was interrupted, or the migration is in progress
|
||||
if (migrateState === "migrated") {
|
||||
log.debug("db", "Migrated aggregate table already, skip");
|
||||
return;
|
||||
} else if (migrateState === "migrating") {
|
||||
log.warn("db", "Aggregate table migration is already in progress, or it was interrupted");
|
||||
throw new Error("Aggregate table migration is already in progress");
|
||||
}
|
||||
|
||||
/**
|
||||
* Start migration server for displaying the migration status
|
||||
* @type {SimpleMigrationServer}
|
||||
*/
|
||||
let migrationServer;
|
||||
let msg;
|
||||
|
||||
if (port) {
|
||||
migrationServer = new SimpleMigrationServer();
|
||||
await migrationServer.start(port, hostname);
|
||||
}
|
||||
|
||||
log.info("db", "Migrating Aggregate Table");
|
||||
|
||||
log.info("db", "Getting list of unique monitors");
|
||||
|
||||
// Get a list of unique monitors from the heartbeat table, using raw sql
|
||||
let monitors = await R.getAll(`
|
||||
SELECT DISTINCT monitor_id
|
||||
FROM heartbeat
|
||||
ORDER BY monitor_id ASC
|
||||
`);
|
||||
|
||||
// Stop if stat_* tables are not empty
|
||||
for (let table of [ "stat_minutely", "stat_hourly", "stat_daily" ]) {
|
||||
let countResult = await R.getRow(`SELECT COUNT(*) AS count FROM ${table}`);
|
||||
let count = countResult.count;
|
||||
if (count > 0) {
|
||||
log.warn("db", `Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`);
|
||||
await migrationServer?.stop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
await Settings.set("migrateAggregateTableState", "migrating");
|
||||
|
||||
let progressPercent = 0;
|
||||
let part = 100 / monitors.length;
|
||||
let i = 1;
|
||||
for (let monitor of monitors) {
|
||||
// Get a list of unique dates from the heartbeat table, using raw sql
|
||||
let dates = await R.getAll(`
|
||||
SELECT DISTINCT DATE(time) AS date
|
||||
FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
ORDER BY date ASC
|
||||
`, [
|
||||
monitor.monitor_id
|
||||
]);
|
||||
|
||||
for (let date of dates) {
|
||||
// New Uptime Calculator
|
||||
let calculator = new UptimeCalculator();
|
||||
calculator.monitorID = monitor.monitor_id;
|
||||
calculator.setMigrationMode(true);
|
||||
|
||||
// Get all the heartbeats for this monitor and date
|
||||
let heartbeats = await R.getAll(`
|
||||
SELECT status, ping, time
|
||||
FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
AND DATE(time) = ?
|
||||
ORDER BY time ASC
|
||||
`, [ monitor.monitor_id, date.date ]);
|
||||
|
||||
if (heartbeats.length > 0) {
|
||||
msg = `[DON'T STOP] Migrating monitor data ${monitor.monitor_id} - ${date.date} [${progressPercent.toFixed(2)}%][${i}/${monitors.length}]`;
|
||||
log.info("db", msg);
|
||||
migrationServer?.update(msg);
|
||||
}
|
||||
|
||||
for (let heartbeat of heartbeats) {
|
||||
await calculator.update(heartbeat.status, parseFloat(heartbeat.ping), dayjs(heartbeat.time));
|
||||
}
|
||||
|
||||
progressPercent += (Math.round(part / dates.length * 100) / 100);
|
||||
|
||||
// Lazy to fix the floating point issue, it is acceptable since it is just a progress bar
|
||||
if (progressPercent > 100) {
|
||||
progressPercent = 100;
|
||||
}
|
||||
}
|
||||
|
||||
i++;
|
||||
}
|
||||
|
||||
msg = "Clearing non-important heartbeats";
|
||||
log.info("db", msg);
|
||||
migrationServer?.update(msg);
|
||||
|
||||
await Database.clearHeartbeatData(true);
|
||||
await Settings.set("migrateAggregateTableState", "migrated");
|
||||
await migrationServer?.stop();
|
||||
|
||||
if (monitors.length > 0) {
|
||||
log.info("db", "Aggregate Table Migration Completed");
|
||||
} else {
|
||||
log.info("db", "No data to migrate");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all non-important heartbeats from heartbeat table, keep last 24-hour or {KEEP_LAST_ROWS} rows for each monitor
|
||||
* @param {boolean} detailedLog Log detailed information
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async clearHeartbeatData(detailedLog = false) {
|
||||
let monitors = await R.getAll("SELECT id FROM monitor");
|
||||
const sqlHourOffset = Database.sqlHourOffset();
|
||||
|
||||
for (let monitor of monitors) {
|
||||
if (detailedLog) {
|
||||
log.info("db", "Deleting non-important heartbeats for monitor " + monitor.id);
|
||||
}
|
||||
await R.exec(`
|
||||
DELETE FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
AND important = 0
|
||||
AND time < ${sqlHourOffset}
|
||||
AND id NOT IN (
|
||||
SELECT id FROM ( -- written this way for Maria's support
|
||||
SELECT id
|
||||
FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
ORDER BY time DESC
|
||||
LIMIT ?
|
||||
) AS limited_ids
|
||||
)
|
||||
`, [
|
||||
monitor.id,
|
||||
-24,
|
||||
monitor.id,
|
||||
100,
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Database;
|
||||
|
@@ -65,7 +65,7 @@ class DockerHost {
|
||||
/**
|
||||
* Fetches the amount of containers on the Docker host
|
||||
* @param {object} dockerHost Docker host to check for
|
||||
* @returns {Promise<number>} Total amount of containers on the host
|
||||
* @returns {number} Total amount of containers on the host
|
||||
*/
|
||||
static async testDockerHost(dockerHost) {
|
||||
const options = {
|
||||
|
@@ -14,15 +14,9 @@ class EmbeddedMariaDB {
|
||||
|
||||
mariadbDataDir = "/app/data/mariadb";
|
||||
|
||||
runDir = "/app/data/run";
|
||||
runDir = "/app/data/run/mariadb";
|
||||
|
||||
socketPath = this.runDir + "/mariadb.sock";
|
||||
|
||||
/**
|
||||
* The username to connect to the MariaDB
|
||||
* @type {string}
|
||||
*/
|
||||
username = null;
|
||||
socketPath = this.runDir + "/mysqld.sock";
|
||||
|
||||
/**
|
||||
* @type {ChildProcessWithoutNullStreams}
|
||||
@@ -52,42 +46,16 @@ class EmbeddedMariaDB {
|
||||
|
||||
/**
|
||||
* Start the embedded MariaDB
|
||||
* @throws {Error} If the current user is not "node" or "root"
|
||||
* @returns {Promise<void>|void} A promise that resolves when the MariaDB is started or void if it is already started
|
||||
*/
|
||||
start() {
|
||||
// Check if the current user is "node" or "root"
|
||||
this.username = require("os").userInfo().username;
|
||||
if (this.username !== "node" && this.username !== "root") {
|
||||
throw new Error("Embedded Mariadb supports only 'node' or 'root' user, but the current user is: " + this.username);
|
||||
}
|
||||
|
||||
this.initDB();
|
||||
|
||||
this.startChildProcess();
|
||||
|
||||
return new Promise((resolve) => {
|
||||
let interval = setInterval(() => {
|
||||
if (this.started) {
|
||||
clearInterval(interval);
|
||||
resolve();
|
||||
} else {
|
||||
log.info("mariadb", "Waiting for Embedded MariaDB to start...");
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the child process
|
||||
* @returns {void}
|
||||
*/
|
||||
startChildProcess() {
|
||||
if (this.childProcess) {
|
||||
log.info("mariadb", "Already started");
|
||||
return;
|
||||
}
|
||||
|
||||
this.initDB();
|
||||
|
||||
this.running = true;
|
||||
log.info("mariadb", "Starting Embedded MariaDB");
|
||||
this.childProcess = childProcess.spawn(this.exec, [
|
||||
@@ -95,8 +63,6 @@ class EmbeddedMariaDB {
|
||||
"--datadir=" + this.mariadbDataDir,
|
||||
`--socket=${this.socketPath}`,
|
||||
`--pid-file=${this.runDir}/mysqld.pid`,
|
||||
// Don't add the following option, the mariadb will not report message to the console, which affects initDBAfterStarted()
|
||||
// "--log-error=" + `${this.mariadbDataDir}/mariadb-error.log`,
|
||||
]);
|
||||
|
||||
this.childProcess.on("close", (code) => {
|
||||
@@ -106,8 +72,8 @@ class EmbeddedMariaDB {
|
||||
log.info("mariadb", "Stopped Embedded MariaDB: " + code);
|
||||
|
||||
if (code !== 0) {
|
||||
log.error("mariadb", "Try to restart Embedded MariaDB as it is not stopped by user");
|
||||
this.startChildProcess();
|
||||
log.info("mariadb", "Try to restart Embedded MariaDB as it is not stopped by user");
|
||||
this.start();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -120,7 +86,7 @@ class EmbeddedMariaDB {
|
||||
});
|
||||
|
||||
let handler = (data) => {
|
||||
log.info("mariadb", data.toString("utf-8"));
|
||||
log.debug("mariadb", data.toString("utf-8"));
|
||||
if (data.toString("utf-8").includes("ready for connections")) {
|
||||
this.initDBAfterStarted();
|
||||
}
|
||||
@@ -128,6 +94,17 @@ class EmbeddedMariaDB {
|
||||
|
||||
this.childProcess.stdout.on("data", handler);
|
||||
this.childProcess.stderr.on("data", handler);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
let interval = setInterval(() => {
|
||||
if (this.started) {
|
||||
clearInterval(interval);
|
||||
resolve();
|
||||
} else {
|
||||
log.info("mariadb", "Waiting for Embedded MariaDB to start...");
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -152,11 +129,9 @@ class EmbeddedMariaDB {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
let result = childProcess.spawnSync("mariadb-install-db", [
|
||||
let result = childProcess.spawnSync("mysql_install_db", [
|
||||
"--user=node",
|
||||
"--auth-root-socket-user=node",
|
||||
"--datadir=" + this.mariadbDataDir,
|
||||
"--auth-root-authentication-method=socket",
|
||||
"--ldata=" + this.mariadbDataDir,
|
||||
]);
|
||||
|
||||
if (result.status !== 0) {
|
||||
@@ -168,17 +143,6 @@ class EmbeddedMariaDB {
|
||||
}
|
||||
}
|
||||
|
||||
// Check the owner of the mariadb directory, and change it if necessary
|
||||
let stat = fs.statSync(this.mariadbDataDir);
|
||||
if (stat.uid !== 1000 || stat.gid !== 1000) {
|
||||
fs.chownSync(this.mariadbDataDir, 1000, 1000);
|
||||
}
|
||||
|
||||
// Check the permission of the mariadb directory, and change it if it is not 755
|
||||
if (stat.mode !== 0o755) {
|
||||
fs.chmodSync(this.mariadbDataDir, 0o755);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(this.runDir)) {
|
||||
log.info("mariadb", `Embedded MariaDB: ${this.runDir} is not found, create one now.`);
|
||||
fs.mkdirSync(this.runDir, {
|
||||
@@ -186,13 +150,6 @@ class EmbeddedMariaDB {
|
||||
});
|
||||
}
|
||||
|
||||
stat = fs.statSync(this.runDir);
|
||||
if (stat.uid !== 1000 || stat.gid !== 1000) {
|
||||
fs.chownSync(this.runDir, 1000, 1000);
|
||||
}
|
||||
if (stat.mode !== 0o755) {
|
||||
fs.chmodSync(this.runDir, 0o755);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -202,7 +159,7 @@ class EmbeddedMariaDB {
|
||||
async initDBAfterStarted() {
|
||||
const connection = mysql.createConnection({
|
||||
socketPath: this.socketPath,
|
||||
user: this.username,
|
||||
user: "node",
|
||||
});
|
||||
|
||||
let result = await connection.execute("CREATE DATABASE IF NOT EXISTS `kuma`");
|
||||
|
@@ -1,22 +1,21 @@
|
||||
const { R } = require("redbean-node");
|
||||
const { log } = require("../../src/util");
|
||||
const { setSetting, setting } = require("../util-server");
|
||||
const Database = require("../database");
|
||||
const { Settings } = require("../settings");
|
||||
const dayjs = require("dayjs");
|
||||
|
||||
const DEFAULT_KEEP_PERIOD = 365;
|
||||
const DEFAULT_KEEP_PERIOD = 180;
|
||||
|
||||
/**
|
||||
* Clears old data from the heartbeat table and the stat_daily of the database.
|
||||
* Clears old data from the heartbeat table of the database.
|
||||
* @returns {Promise<void>} A promise that resolves when the data has been cleared.
|
||||
*/
|
||||
|
||||
const clearOldData = async () => {
|
||||
await Database.clearHeartbeatData();
|
||||
let period = await Settings.get("keepDataPeriodDays");
|
||||
let period = await setting("keepDataPeriodDays");
|
||||
|
||||
// Set Default Period
|
||||
if (period == null) {
|
||||
await Settings.set("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
|
||||
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
|
||||
period = DEFAULT_KEEP_PERIOD;
|
||||
}
|
||||
|
||||
@@ -26,28 +25,23 @@ const clearOldData = async () => {
|
||||
parsedPeriod = parseInt(period);
|
||||
} catch (_) {
|
||||
log.warn("clearOldData", "Failed to parse setting, resetting to default..");
|
||||
await Settings.set("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
|
||||
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
|
||||
parsedPeriod = DEFAULT_KEEP_PERIOD;
|
||||
}
|
||||
|
||||
if (parsedPeriod < 1) {
|
||||
log.info("clearOldData", `Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
|
||||
} else {
|
||||
|
||||
log.debug("clearOldData", `Clearing Data older than ${parsedPeriod} days...`);
|
||||
|
||||
const sqlHourOffset = Database.sqlHourOffset();
|
||||
|
||||
try {
|
||||
// Heartbeat
|
||||
await R.exec("DELETE FROM heartbeat WHERE time < " + sqlHourOffset, [
|
||||
parsedPeriod * -24,
|
||||
]);
|
||||
|
||||
let timestamp = dayjs().subtract(parsedPeriod, "day").utc().startOf("day").unix();
|
||||
|
||||
// stat_daily
|
||||
await R.exec("DELETE FROM stat_daily WHERE timestamp < ? ", [
|
||||
timestamp,
|
||||
]);
|
||||
await R.exec(
|
||||
"DELETE FROM heartbeat WHERE time < " + sqlHourOffset,
|
||||
[ parsedPeriod * -24 ]
|
||||
);
|
||||
|
||||
if (Database.dbConfig.type === "sqlite") {
|
||||
await R.exec("PRAGMA optimize;");
|
||||
@@ -56,8 +50,6 @@ const clearOldData = async () => {
|
||||
log.error("clearOldData", `Failed to clear old data: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
log.debug("clearOldData", "Data cleared.");
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
@@ -9,7 +9,7 @@ class Group extends BeanModel {
|
||||
* @param {boolean} showTags Should the JSON include monitor tags
|
||||
* @param {boolean} certExpiry Should JSON include info about
|
||||
* certificate expiry?
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
* @returns {object} Object ready to parse
|
||||
*/
|
||||
async toPublicJSON(showTags = false, certExpiry = false) {
|
||||
let monitorBeanList = await this.getMonitorList();
|
||||
@@ -29,7 +29,7 @@ class Group extends BeanModel {
|
||||
|
||||
/**
|
||||
* Get all monitors
|
||||
* @returns {Promise<Bean[]>} List of monitors
|
||||
* @returns {Bean[]} List of monitors
|
||||
*/
|
||||
async getMonitorList() {
|
||||
return R.convertToBeans("monitor", await R.getAll(`
|
||||
|
@@ -11,7 +11,7 @@ class Maintenance extends BeanModel {
|
||||
/**
|
||||
* Return an object that ready to parse to JSON for public
|
||||
* Only show necessary data to public
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
* @returns {object} Object ready to parse
|
||||
*/
|
||||
async toPublicJSON() {
|
||||
|
||||
@@ -98,7 +98,7 @@ class Maintenance extends BeanModel {
|
||||
/**
|
||||
* Return an object that ready to parse to JSON
|
||||
* @param {string} timezone If not specified, the timeRange will be in UTC
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
* @returns {object} Object ready to parse
|
||||
*/
|
||||
async toJSON(timezone = null) {
|
||||
return this.toPublicJSON(timezone);
|
||||
@@ -143,7 +143,7 @@ class Maintenance extends BeanModel {
|
||||
* Convert data from socket to bean
|
||||
* @param {Bean} bean Bean to fill in
|
||||
* @param {object} obj Data to fill bean with
|
||||
* @returns {Promise<Bean>} Filled bean
|
||||
* @returns {Bean} Filled bean
|
||||
*/
|
||||
static async jsonToBean(bean, obj) {
|
||||
if (obj.id) {
|
||||
@@ -189,9 +189,9 @@ class Maintenance extends BeanModel {
|
||||
/**
|
||||
* Throw error if cron is invalid
|
||||
* @param {string|Date} cron Pattern or date
|
||||
* @returns {void}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static validateCron(cron) {
|
||||
static async validateCron(cron) {
|
||||
let job = new Cron(cron, () => {});
|
||||
job.stop();
|
||||
}
|
||||
@@ -239,7 +239,19 @@ class Maintenance extends BeanModel {
|
||||
this.beanMeta.status = "under-maintenance";
|
||||
clearTimeout(this.beanMeta.durationTimeout);
|
||||
|
||||
let duration = this.inferDuration(customDuration);
|
||||
// Check if duration is still in the window. If not, use the duration from the current time to the end of the window
|
||||
let duration;
|
||||
|
||||
if (customDuration > 0) {
|
||||
duration = customDuration;
|
||||
} else if (this.end_date) {
|
||||
let d = dayjs(this.end_date).diff(dayjs(), "second");
|
||||
if (d < this.duration) {
|
||||
duration = d * 1000;
|
||||
}
|
||||
} else {
|
||||
duration = this.duration * 1000;
|
||||
}
|
||||
|
||||
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
|
||||
|
||||
@@ -251,21 +263,9 @@ class Maintenance extends BeanModel {
|
||||
};
|
||||
|
||||
// Create Cron
|
||||
if (this.strategy === "recurring-interval") {
|
||||
// For recurring-interval, Croner needs to have interval and startAt
|
||||
const startDate = dayjs(this.startDate);
|
||||
const [ hour, minute ] = this.startTime.split(":");
|
||||
const startDateTime = startDate.hour(hour).minute(minute);
|
||||
this.beanMeta.job = new Cron(this.cron, {
|
||||
timezone: await this.getTimezone(),
|
||||
interval: this.interval_day * 24 * 60 * 60,
|
||||
startAt: startDateTime.toISOString(),
|
||||
}, startEvent);
|
||||
} else {
|
||||
this.beanMeta.job = new Cron(this.cron, {
|
||||
timezone: await this.getTimezone(),
|
||||
}, startEvent);
|
||||
}
|
||||
this.beanMeta.job = new Cron(this.cron, {
|
||||
timezone: await this.getTimezone(),
|
||||
}, startEvent);
|
||||
|
||||
// Continue if the maintenance is still in the window
|
||||
let runningTimeslot = this.getRunningTimeslot();
|
||||
@@ -311,24 +311,6 @@ class Maintenance extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the maintenance duration
|
||||
* @param {number} customDuration - The custom duration in milliseconds.
|
||||
* @returns {number} The inferred duration in milliseconds.
|
||||
*/
|
||||
inferDuration(customDuration) {
|
||||
// Check if duration is still in the window. If not, use the duration from the current time to the end of the window
|
||||
if (customDuration > 0) {
|
||||
return customDuration;
|
||||
} else if (this.end_date) {
|
||||
let d = dayjs(this.end_date).diff(dayjs(), "second");
|
||||
if (d < this.duration) {
|
||||
return d * 1000;
|
||||
}
|
||||
}
|
||||
return this.duration * 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the maintenance
|
||||
* @returns {void}
|
||||
@@ -342,7 +324,7 @@ class Maintenance extends BeanModel {
|
||||
|
||||
/**
|
||||
* Is this maintenance currently active
|
||||
* @returns {Promise<boolean>} The maintenance is active?
|
||||
* @returns {boolean} The maintenance is active?
|
||||
*/
|
||||
async isUnderMaintenance() {
|
||||
return (await this.getStatus()) === "under-maintenance";
|
||||
@@ -350,7 +332,7 @@ class Maintenance extends BeanModel {
|
||||
|
||||
/**
|
||||
* Get the timezone of the maintenance
|
||||
* @returns {Promise<string>} timezone
|
||||
* @returns {string} timezone
|
||||
*/
|
||||
async getTimezone() {
|
||||
if (!this.timezone || this.timezone === "SAME_AS_SERVER") {
|
||||
@@ -361,7 +343,7 @@ class Maintenance extends BeanModel {
|
||||
|
||||
/**
|
||||
* Get offset for timezone
|
||||
* @returns {Promise<string>} offset
|
||||
* @returns {string} offset
|
||||
*/
|
||||
async getTimezoneOffset() {
|
||||
return dayjs.tz(dayjs(), await this.getTimezone()).format("Z");
|
||||
@@ -369,7 +351,7 @@ class Maintenance extends BeanModel {
|
||||
|
||||
/**
|
||||
* Get the current status of the maintenance
|
||||
* @returns {Promise<string>} Current status
|
||||
* @returns {string} Current status
|
||||
*/
|
||||
async getStatus() {
|
||||
if (!this.active) {
|
||||
@@ -413,8 +395,10 @@ class Maintenance extends BeanModel {
|
||||
} else if (!this.strategy.startsWith("recurring-")) {
|
||||
this.cron = "";
|
||||
} else if (this.strategy === "recurring-interval") {
|
||||
// For intervals, the pattern is calculated in the run function as the interval-option is set
|
||||
this.cron = "* * * * *";
|
||||
let array = this.start_time.split(":");
|
||||
let hour = parseInt(array[0]);
|
||||
let minute = parseInt(array[1]);
|
||||
this.cron = minute + " " + hour + " */" + this.interval_day + " * *";
|
||||
this.duration = this.calcDuration();
|
||||
log.debug("maintenance", "Cron: " + this.cron);
|
||||
log.debug("maintenance", "Duration: " + this.duration);
|
||||
|
@@ -2,10 +2,10 @@ const dayjs = require("dayjs");
|
||||
const axios = require("axios");
|
||||
const { Prometheus } = require("../prometheus");
|
||||
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
|
||||
SQL_DATETIME_FORMAT, evaluateJsonQuery
|
||||
SQL_DATETIME_FORMAT
|
||||
} = require("../../src/util");
|
||||
const { tcping, ping, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, setSetting, httpNtlm, radius, grpcQuery,
|
||||
redisPingAsync, kafkaProducerAsync, getOidcTokenClientCredentials, rootCertificatesFingerprints, axiosAbortSignal
|
||||
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials, rootCertificatesFingerprints, axiosAbortSignal
|
||||
} = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
@@ -17,6 +17,7 @@ const apicache = require("../modules/apicache");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const { DockerHost } = require("../docker");
|
||||
const Gamedig = require("gamedig");
|
||||
const jsonata = require("jsonata");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const crypto = require("crypto");
|
||||
const { UptimeCalculator } = require("../uptime-calculator");
|
||||
@@ -42,7 +43,7 @@ class Monitor extends BeanModel {
|
||||
* @param {boolean} showTags Include tags in JSON
|
||||
* @param {boolean} certExpiry Include certificate expiry info in
|
||||
* JSON
|
||||
* @returns {Promise<object>} Object ready to parse
|
||||
* @returns {object} Object ready to parse
|
||||
*/
|
||||
async toPublicJSON(showTags = false, certExpiry = false) {
|
||||
let obj = {
|
||||
@@ -71,12 +72,23 @@ class Monitor extends BeanModel {
|
||||
|
||||
/**
|
||||
* Return an object that ready to parse to JSON
|
||||
* @param {object} preloadData to prevent n+1 problems, we query the data in a batch outside of this function
|
||||
* @param {boolean} includeSensitiveData Include sensitive data in
|
||||
* JSON
|
||||
* @returns {object} Object ready to parse
|
||||
*/
|
||||
toJSON(preloadData = {}, includeSensitiveData = true) {
|
||||
async toJSON(includeSensitiveData = true) {
|
||||
|
||||
let notificationIDList = {};
|
||||
|
||||
let list = await R.find("monitor_notification", " monitor_id = ? ", [
|
||||
this.id,
|
||||
]);
|
||||
|
||||
for (let bean of list) {
|
||||
notificationIDList[bean.notification_id] = true;
|
||||
}
|
||||
|
||||
const tags = await this.getTags();
|
||||
|
||||
let screenshot = null;
|
||||
|
||||
@@ -84,25 +96,21 @@ class Monitor extends BeanModel {
|
||||
screenshot = "/screenshots/" + jwt.sign(this.id, UptimeKumaServer.getInstance().jwtSecret) + ".png";
|
||||
}
|
||||
|
||||
const path = preloadData.paths.get(this.id) || [];
|
||||
const pathName = path.join(" / ");
|
||||
|
||||
let data = {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
path,
|
||||
pathName,
|
||||
pathName: await this.getPathName(),
|
||||
parent: this.parent,
|
||||
childrenIDs: preloadData.childrenIDs.get(this.id) || [],
|
||||
childrenIDs: await Monitor.getAllChildrenIDs(this.id),
|
||||
url: this.url,
|
||||
method: this.method,
|
||||
hostname: this.hostname,
|
||||
port: this.port,
|
||||
maxretries: this.maxretries,
|
||||
weight: this.weight,
|
||||
active: preloadData.activeStatus.get(this.id),
|
||||
forceInactive: preloadData.forceInactive.get(this.id),
|
||||
active: await this.isActive(),
|
||||
forceInactive: !await Monitor.isParentActive(this.id),
|
||||
type: this.type,
|
||||
timeout: this.timeout,
|
||||
interval: this.interval,
|
||||
@@ -122,9 +130,9 @@ class Monitor extends BeanModel {
|
||||
docker_container: this.docker_container,
|
||||
docker_host: this.docker_host,
|
||||
proxyId: this.proxy_id,
|
||||
notificationIDList: preloadData.notifications.get(this.id) || {},
|
||||
tags: preloadData.tags.get(this.id) || [],
|
||||
maintenance: preloadData.maintenanceStatus.get(this.id),
|
||||
notificationIDList,
|
||||
tags: tags,
|
||||
maintenance: await Monitor.isUnderMaintenance(this.id),
|
||||
mqttTopic: this.mqttTopic,
|
||||
mqttSuccessMessage: this.mqttSuccessMessage,
|
||||
mqttCheckType: this.mqttCheckType,
|
||||
@@ -148,13 +156,7 @@ class Monitor extends BeanModel {
|
||||
kafkaProducerAllowAutoTopicCreation: this.getKafkaProducerAllowAutoTopicCreation(),
|
||||
kafkaProducerMessage: this.kafkaProducerMessage,
|
||||
screenshot,
|
||||
cacheBust: this.getCacheBust(),
|
||||
remote_browser: this.remote_browser,
|
||||
snmpOid: this.snmpOid,
|
||||
jsonPathOperator: this.jsonPathOperator,
|
||||
snmpVersion: this.snmpVersion,
|
||||
rabbitmqNodes: JSON.parse(this.rabbitmqNodes),
|
||||
conditions: JSON.parse(this.conditions),
|
||||
};
|
||||
|
||||
if (includeSensitiveData) {
|
||||
@@ -184,8 +186,6 @@ class Monitor extends BeanModel {
|
||||
tlsCert: this.tlsCert,
|
||||
tlsKey: this.tlsKey,
|
||||
kafkaProducerSaslOptions: JSON.parse(this.kafkaProducerSaslOptions),
|
||||
rabbitmqUsername: this.rabbitmqUsername,
|
||||
rabbitmqPassword: this.rabbitmqPassword,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -193,6 +193,16 @@ class Monitor extends BeanModel {
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the monitor is active based on itself and its parents
|
||||
* @returns {Promise<boolean>} Is the monitor active?
|
||||
*/
|
||||
async isActive() {
|
||||
const parentActive = await Monitor.isParentActive(this.id);
|
||||
|
||||
return (this.active === 1) && parentActive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all tags applied to this monitor
|
||||
* @returns {Promise<LooseObject<any>[]>} List of tags on the
|
||||
@@ -231,12 +241,12 @@ class Monitor extends BeanModel {
|
||||
/**
|
||||
* Encode user and password to Base64 encoding
|
||||
* for HTTP "basic" auth, as per RFC-7617
|
||||
* @param {string|null} user - The username (nullable if not changed by a user)
|
||||
* @param {string|null} pass - The password (nullable if not changed by a user)
|
||||
* @returns {string} Encoded Base64 string
|
||||
* @param {string} user Username to encode
|
||||
* @param {string} pass Password to encode
|
||||
* @returns {string} Encoded username:password
|
||||
*/
|
||||
encodeBase64(user, pass) {
|
||||
return Buffer.from(`${user || ""}:${pass || ""}`).toString("base64");
|
||||
return Buffer.from(user + ":" + pass).toString("base64");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -279,14 +289,6 @@ class Monitor extends BeanModel {
|
||||
return Boolean(this.grpcEnableTls);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean} if cachebusting is enabled
|
||||
*/
|
||||
getCacheBust() {
|
||||
return Boolean(this.cacheBust);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get accepted status codes
|
||||
* @returns {object} Accepted status codes
|
||||
@@ -322,9 +324,9 @@ class Monitor extends BeanModel {
|
||||
/**
|
||||
* Start monitor
|
||||
* @param {Server} io Socket server instance
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
async start(io) {
|
||||
start(io) {
|
||||
let previousBeat = null;
|
||||
let retries = 0;
|
||||
|
||||
@@ -492,14 +494,6 @@ class Monitor extends BeanModel {
|
||||
options.data = bodyValue;
|
||||
}
|
||||
|
||||
if (this.cacheBust) {
|
||||
const randomFloatString = Math.random().toString(36);
|
||||
const cacheBust = randomFloatString.substring(2);
|
||||
options.params = {
|
||||
uptime_kuma_cachebuster: cacheBust,
|
||||
};
|
||||
}
|
||||
|
||||
if (this.proxy_id) {
|
||||
const proxy = await R.load("proxy", this.proxy_id);
|
||||
|
||||
@@ -535,18 +529,6 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
let tlsInfo = {};
|
||||
// Store tlsInfo when secureConnect event is emitted
|
||||
// The keylog event listener is a workaround to access the tlsSocket
|
||||
options.httpsAgent.once("keylog", async (line, tlsSocket) => {
|
||||
tlsSocket.once("secureConnect", async () => {
|
||||
tlsInfo = checkCertificate(tlsSocket);
|
||||
tlsInfo.valid = tlsSocket.authorized || false;
|
||||
|
||||
await this.handleTlsInfo(tlsInfo);
|
||||
});
|
||||
});
|
||||
|
||||
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
|
||||
log.debug("monitor", `[${this.name}] Axios Request`);
|
||||
|
||||
@@ -556,19 +538,31 @@ class Monitor extends BeanModel {
|
||||
bean.msg = `${res.status} - ${res.statusText}`;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
// fallback for if kelog event is not emitted, but we may still have tlsInfo,
|
||||
// e.g. if the connection is made through a proxy
|
||||
if (this.getUrl()?.protocol === "https:" && tlsInfo.valid === undefined) {
|
||||
const tlsSocket = res.request.res.socket;
|
||||
// Check certificate if https is used
|
||||
let certInfoStartTime = dayjs().valueOf();
|
||||
if (this.getUrl()?.protocol === "https:") {
|
||||
log.debug("monitor", `[${this.name}] Check cert`);
|
||||
try {
|
||||
let tlsInfoObject = checkCertificate(res);
|
||||
tlsInfo = await this.updateTlsInfo(tlsInfoObject);
|
||||
|
||||
if (tlsSocket) {
|
||||
tlsInfo = checkCertificate(tlsSocket);
|
||||
tlsInfo.valid = tlsSocket.authorized || false;
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call checkCertExpiryNotifications`);
|
||||
await this.checkCertExpiryNotifications(tlsInfoObject);
|
||||
}
|
||||
|
||||
await this.handleTlsInfo(tlsInfo);
|
||||
} catch (e) {
|
||||
if (e.message !== "No TLS certificate in response") {
|
||||
log.error("monitor", "Caught error");
|
||||
log.error("monitor", e.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.TIMELOGGER === "1") {
|
||||
log.debug("monitor", "Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||
}
|
||||
|
||||
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID === this.id) {
|
||||
log.info("monitor", res.data);
|
||||
}
|
||||
@@ -600,15 +594,21 @@ class Monitor extends BeanModel {
|
||||
} else if (this.type === "json-query") {
|
||||
let data = res.data;
|
||||
|
||||
const { status, response } = await evaluateJsonQuery(data, this.jsonPath, this.jsonPathOperator, this.expectedValue);
|
||||
|
||||
if (status) {
|
||||
bean.status = UP;
|
||||
bean.msg = `JSON query passes (comparing ${response} ${this.jsonPathOperator} ${this.expectedValue})`;
|
||||
} else {
|
||||
throw new Error(`JSON query does not pass (comparing ${response} ${this.jsonPathOperator} ${this.expectedValue})`);
|
||||
// convert data to object
|
||||
if (typeof data === "string") {
|
||||
data = JSON.parse(data);
|
||||
}
|
||||
|
||||
let expression = jsonata(this.jsonPath);
|
||||
|
||||
let result = await expression.evaluate(data);
|
||||
|
||||
if (result.toString() === this.expectedValue) {
|
||||
bean.msg += ", expected value is found";
|
||||
bean.status = UP;
|
||||
} else {
|
||||
throw new Error(bean.msg + ", but value is not equal to expected value, value was: [" + result + "]");
|
||||
}
|
||||
}
|
||||
|
||||
} else if (this.type === "port") {
|
||||
@@ -814,6 +814,15 @@ class Monitor extends BeanModel {
|
||||
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1", mysqlPassword);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
} else if (this.type === "mongodb") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
await mongodbPing(this.databaseConnectionString);
|
||||
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
} else if (this.type === "radius") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
@@ -844,7 +853,7 @@ class Monitor extends BeanModel {
|
||||
} else if (this.type === "redis") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
bean.msg = await redisPingAsync(this.databaseConnectionString, !this.ignoreTls);
|
||||
bean.msg = await redisPingAsync(this.databaseConnectionString);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
@@ -934,7 +943,7 @@ class Monitor extends BeanModel {
|
||||
log.debug("monitor", `[${this.name}] apicache clear`);
|
||||
apicache.clear();
|
||||
|
||||
await UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
|
||||
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
|
||||
|
||||
} else {
|
||||
bean.important = false;
|
||||
@@ -1089,9 +1098,9 @@ class Monitor extends BeanModel {
|
||||
|
||||
/**
|
||||
* Stop monitor
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
async stop() {
|
||||
stop() {
|
||||
clearTimeout(this.heartbeatInterval);
|
||||
this.isStop = true;
|
||||
|
||||
@@ -1178,18 +1187,6 @@ class Monitor extends BeanModel {
|
||||
return checkCertificateResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the monitor is active based on itself and its parents
|
||||
* @param {number} monitorID ID of monitor to send
|
||||
* @param {boolean} active is active
|
||||
* @returns {Promise<boolean>} Is the monitor active?
|
||||
*/
|
||||
static async isActive(monitorID, active) {
|
||||
const parentActive = await Monitor.isParentActive(monitorID);
|
||||
|
||||
return (active === 1) && parentActive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send statistics to clients
|
||||
* @param {Server} io Socket server instance
|
||||
@@ -1326,10 +1323,7 @@ class Monitor extends BeanModel {
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
const heartbeatJSON = bean.toJSON();
|
||||
const monitorData = [{ id: monitor.id,
|
||||
active: monitor.active
|
||||
}];
|
||||
const preloadData = await Monitor.preparePreloadData(monitorData);
|
||||
|
||||
// Prevent if the msg is undefined, notifications such as Discord cannot send out.
|
||||
if (!heartbeatJSON["msg"]) {
|
||||
heartbeatJSON["msg"] = "N/A";
|
||||
@@ -1340,7 +1334,7 @@ class Monitor extends BeanModel {
|
||||
heartbeatJSON["timezoneOffset"] = UptimeKumaServer.getInstance().getTimezoneOffset();
|
||||
heartbeatJSON["localDateTime"] = dayjs.utc(heartbeatJSON["time"]).tz(heartbeatJSON["timezone"]).format(SQL_DATETIME_FORMAT);
|
||||
|
||||
await Notification.send(JSON.parse(notification.config), msg, monitor.toJSON(preloadData, false), heartbeatJSON);
|
||||
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), heartbeatJSON);
|
||||
} catch (e) {
|
||||
log.error("monitor", "Cannot send notification to " + notification.name);
|
||||
log.error("monitor", e);
|
||||
@@ -1379,7 +1373,7 @@ class Monitor extends BeanModel {
|
||||
let notifyDays = await setting("tlsExpiryNotifyDays");
|
||||
if (notifyDays == null || !Array.isArray(notifyDays)) {
|
||||
// Reset Default
|
||||
await setSetting("tlsExpiryNotifyDays", [ 7, 14, 21 ], "general");
|
||||
setSetting("tlsExpiryNotifyDays", [ 7, 14, 21 ], "general");
|
||||
notifyDays = [ 7, 14, 21 ];
|
||||
}
|
||||
|
||||
@@ -1434,7 +1428,7 @@ class Monitor extends BeanModel {
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
log.debug("monitor", "Sending to " + notification.name);
|
||||
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] ${certType} certificate ${certCN} will expire in ${daysRemaining} days`);
|
||||
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] ${certType} certificate ${certCN} will be expired in ${daysRemaining} days`);
|
||||
sent = true;
|
||||
} catch (e) {
|
||||
log.error("monitor", "Cannot send cert notification to " + notification.name);
|
||||
@@ -1502,110 +1496,6 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets monitor notification of multiple monitor
|
||||
* @param {Array} monitorIDs IDs of monitor to get
|
||||
* @returns {Promise<LooseObject<any>>} object
|
||||
*/
|
||||
static async getMonitorNotification(monitorIDs) {
|
||||
return await R.getAll(`
|
||||
SELECT monitor_notification.monitor_id, monitor_notification.notification_id
|
||||
FROM monitor_notification
|
||||
WHERE monitor_notification.monitor_id IN (${monitorIDs.map((_) => "?").join(",")})
|
||||
`, monitorIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets monitor tags of multiple monitor
|
||||
* @param {Array} monitorIDs IDs of monitor to get
|
||||
* @returns {Promise<LooseObject<any>>} object
|
||||
*/
|
||||
static async getMonitorTag(monitorIDs) {
|
||||
return await R.getAll(`
|
||||
SELECT monitor_tag.monitor_id, monitor_tag.tag_id, monitor_tag.value, tag.name, tag.color
|
||||
FROM monitor_tag
|
||||
JOIN tag ON monitor_tag.tag_id = tag.id
|
||||
WHERE monitor_tag.monitor_id IN (${monitorIDs.map((_) => "?").join(",")})
|
||||
`, monitorIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* prepare preloaded data for efficient access
|
||||
* @param {Array} monitorData IDs & active field of monitor to get
|
||||
* @returns {Promise<LooseObject<any>>} object
|
||||
*/
|
||||
static async preparePreloadData(monitorData) {
|
||||
|
||||
const notificationsMap = new Map();
|
||||
const tagsMap = new Map();
|
||||
const maintenanceStatusMap = new Map();
|
||||
const childrenIDsMap = new Map();
|
||||
const activeStatusMap = new Map();
|
||||
const forceInactiveMap = new Map();
|
||||
const pathsMap = new Map();
|
||||
|
||||
if (monitorData.length > 0) {
|
||||
const monitorIDs = monitorData.map(monitor => monitor.id);
|
||||
const notifications = await Monitor.getMonitorNotification(monitorIDs);
|
||||
const tags = await Monitor.getMonitorTag(monitorIDs);
|
||||
const maintenanceStatuses = await Promise.all(monitorData.map(monitor => Monitor.isUnderMaintenance(monitor.id)));
|
||||
const childrenIDs = await Promise.all(monitorData.map(monitor => Monitor.getAllChildrenIDs(monitor.id)));
|
||||
const activeStatuses = await Promise.all(monitorData.map(monitor => Monitor.isActive(monitor.id, monitor.active)));
|
||||
const forceInactiveStatuses = await Promise.all(monitorData.map(monitor => Monitor.isParentActive(monitor.id)));
|
||||
const paths = await Promise.all(monitorData.map(monitor => Monitor.getAllPath(monitor.id, monitor.name)));
|
||||
|
||||
notifications.forEach(row => {
|
||||
if (!notificationsMap.has(row.monitor_id)) {
|
||||
notificationsMap.set(row.monitor_id, {});
|
||||
}
|
||||
notificationsMap.get(row.monitor_id)[row.notification_id] = true;
|
||||
});
|
||||
|
||||
tags.forEach(row => {
|
||||
if (!tagsMap.has(row.monitor_id)) {
|
||||
tagsMap.set(row.monitor_id, []);
|
||||
}
|
||||
tagsMap.get(row.monitor_id).push({
|
||||
tag_id: row.tag_id,
|
||||
monitor_id: row.monitor_id,
|
||||
value: row.value,
|
||||
name: row.name,
|
||||
color: row.color
|
||||
});
|
||||
});
|
||||
|
||||
monitorData.forEach((monitor, index) => {
|
||||
maintenanceStatusMap.set(monitor.id, maintenanceStatuses[index]);
|
||||
});
|
||||
|
||||
monitorData.forEach((monitor, index) => {
|
||||
childrenIDsMap.set(monitor.id, childrenIDs[index]);
|
||||
});
|
||||
|
||||
monitorData.forEach((monitor, index) => {
|
||||
activeStatusMap.set(monitor.id, activeStatuses[index]);
|
||||
});
|
||||
|
||||
monitorData.forEach((monitor, index) => {
|
||||
forceInactiveMap.set(monitor.id, !forceInactiveStatuses[index]);
|
||||
});
|
||||
|
||||
monitorData.forEach((monitor, index) => {
|
||||
pathsMap.set(monitor.id, paths[index]);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
notifications: notificationsMap,
|
||||
tags: tagsMap,
|
||||
maintenanceStatus: maintenanceStatusMap,
|
||||
childrenIDs: childrenIDsMap,
|
||||
activeStatus: activeStatusMap,
|
||||
forceInactive: forceInactiveMap,
|
||||
paths: pathsMap,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets Parent of the monitor
|
||||
* @param {number} monitorID ID of monitor to get
|
||||
@@ -1637,21 +1527,19 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the full path
|
||||
* @param {number} monitorID ID of the monitor to get
|
||||
* @param {string} name of the monitor to get
|
||||
* @returns {Promise<string[]>} Full path (includes groups and the name) of the monitor
|
||||
* Gets Full Path-Name (Groups and Name)
|
||||
* @returns {Promise<string>} Full path name of this monitor
|
||||
*/
|
||||
static async getAllPath(monitorID, name) {
|
||||
const path = [ name ];
|
||||
async getPathName() {
|
||||
let path = this.name;
|
||||
|
||||
if (this.parent === null) {
|
||||
return path;
|
||||
}
|
||||
|
||||
let parent = await Monitor.getParent(monitorID);
|
||||
let parent = await Monitor.getParent(this.id);
|
||||
while (parent !== null) {
|
||||
path.unshift(parent.name);
|
||||
path = `${parent.name} / ${path}`;
|
||||
parent = await Monitor.getParent(parent.id);
|
||||
}
|
||||
|
||||
@@ -1681,7 +1569,7 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlinks all children of the group monitor
|
||||
* Unlinks all children of the the group monitor
|
||||
* @param {number} groupID ID of group to remove children of
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
@@ -1723,20 +1611,6 @@ class Monitor extends BeanModel {
|
||||
return oAuthAccessToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store TLS certificate information and check for expiry
|
||||
* @param {object} tlsInfo Information about the TLS connection
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async handleTlsInfo(tlsInfo) {
|
||||
await this.updateTlsInfo(tlsInfo);
|
||||
this.prometheus?.update(null, tlsInfo);
|
||||
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call checkCertExpiryNotifications`);
|
||||
await this.checkCertExpiryNotifications(tlsInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Monitor;
|
||||
|
@@ -4,11 +4,6 @@ const cheerio = require("cheerio");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const jsesc = require("jsesc");
|
||||
const googleAnalytics = require("../google-analytics");
|
||||
const { marked } = require("marked");
|
||||
const { Feed } = require("feed");
|
||||
const config = require("../config");
|
||||
|
||||
const { STATUS_PAGE_ALL_DOWN, STATUS_PAGE_ALL_UP, STATUS_PAGE_MAINTENANCE, STATUS_PAGE_PARTIAL_DOWN, UP, MAINTENANCE, DOWN } = require("../../src/util");
|
||||
|
||||
class StatusPage extends BeanModel {
|
||||
|
||||
@@ -18,30 +13,12 @@ class StatusPage extends BeanModel {
|
||||
*/
|
||||
static domainMappingList = { };
|
||||
|
||||
/**
|
||||
* Handle responses to RSS pages
|
||||
* @param {Response} response Response object
|
||||
* @param {string} slug Status page slug
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async handleStatusPageRSSResponse(response, slug) {
|
||||
let statusPage = await R.findOne("status_page", " slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
|
||||
if (statusPage) {
|
||||
response.send(await StatusPage.renderRSS(statusPage, slug));
|
||||
} else {
|
||||
response.status(404).send(UptimeKumaServer.getInstance().indexHTML);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle responses to status page
|
||||
* @param {Response} response Response object
|
||||
* @param {string} indexHTML HTML to render
|
||||
* @param {string} slug Status page slug
|
||||
* @returns {Promise<void>}
|
||||
* @returns {void}
|
||||
*/
|
||||
static async handleStatusPageResponse(response, indexHTML, slug) {
|
||||
// Handle url with trailing slash (http://localhost:3001/status/)
|
||||
@@ -61,51 +38,15 @@ class StatusPage extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SSR for RSS feed
|
||||
* @param {statusPage} statusPage object
|
||||
* @param {slug} slug from router
|
||||
* @returns {Promise<string>} the rendered html
|
||||
*/
|
||||
static async renderRSS(statusPage, slug) {
|
||||
const { heartbeats, statusDescription } = await StatusPage.getRSSPageData(statusPage);
|
||||
|
||||
let proto = config.isSSL ? "https" : "http";
|
||||
let host = `${proto}://${config.hostname || "localhost"}:${config.port}/status/${slug}`;
|
||||
|
||||
const feed = new Feed({
|
||||
title: "uptime kuma rss feed",
|
||||
description: `current status: ${statusDescription}`,
|
||||
link: host,
|
||||
language: "en", // optional, used only in RSS 2.0, possible values: http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
|
||||
updated: new Date(), // optional, default = today
|
||||
});
|
||||
|
||||
heartbeats.forEach(heartbeat => {
|
||||
feed.addItem({
|
||||
title: `${heartbeat.name} is down`,
|
||||
description: `${heartbeat.name} has been down since ${heartbeat.time}`,
|
||||
id: heartbeat.monitorID,
|
||||
date: new Date(heartbeat.time),
|
||||
});
|
||||
});
|
||||
|
||||
return feed.rss2();
|
||||
}
|
||||
|
||||
/**
|
||||
* SSR for status pages
|
||||
* @param {string} indexHTML HTML page to render
|
||||
* @param {StatusPage} statusPage Status page populate HTML with
|
||||
* @returns {Promise<string>} the rendered html
|
||||
* @returns {void}
|
||||
*/
|
||||
static async renderHTML(indexHTML, statusPage) {
|
||||
const $ = cheerio.load(indexHTML);
|
||||
|
||||
const description155 = marked(statusPage.description ?? "")
|
||||
.replace(/<[^>]+>/gm, "")
|
||||
.trim()
|
||||
.substring(0, 155);
|
||||
const description155 = statusPage.description?.substring(0, 155) ?? "";
|
||||
|
||||
$("title").text(statusPage.title);
|
||||
$("meta[name=description]").attr("content", description155);
|
||||
@@ -152,109 +93,6 @@ class StatusPage extends BeanModel {
|
||||
return $.root().html();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {heartbeats} heartbeats from getRSSPageData
|
||||
* @returns {number} status_page constant from util.ts
|
||||
*/
|
||||
static overallStatus(heartbeats) {
|
||||
if (heartbeats.length === 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
let status = STATUS_PAGE_ALL_UP;
|
||||
let hasUp = false;
|
||||
|
||||
for (let beat of heartbeats) {
|
||||
if (beat.status === MAINTENANCE) {
|
||||
return STATUS_PAGE_MAINTENANCE;
|
||||
} else if (beat.status === UP) {
|
||||
hasUp = true;
|
||||
} else {
|
||||
status = STATUS_PAGE_PARTIAL_DOWN;
|
||||
}
|
||||
}
|
||||
|
||||
if (! hasUp) {
|
||||
status = STATUS_PAGE_ALL_DOWN;
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} status from overallStatus
|
||||
* @returns {string} description
|
||||
*/
|
||||
static getStatusDescription(status) {
|
||||
if (status === -1) {
|
||||
return "No Services";
|
||||
}
|
||||
|
||||
if (status === STATUS_PAGE_ALL_UP) {
|
||||
return "All Systems Operational";
|
||||
}
|
||||
|
||||
if (status === STATUS_PAGE_PARTIAL_DOWN) {
|
||||
return "Partially Degraded Service";
|
||||
}
|
||||
|
||||
if (status === STATUS_PAGE_ALL_DOWN) {
|
||||
return "Degraded Service";
|
||||
}
|
||||
|
||||
// TODO: show the real maintenance information: title, description, time
|
||||
if (status === MAINTENANCE) {
|
||||
return "Under maintenance";
|
||||
}
|
||||
|
||||
return "?";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all data required for RSS
|
||||
* @param {StatusPage} statusPage Status page to get data for
|
||||
* @returns {object} Status page data
|
||||
*/
|
||||
static async getRSSPageData(statusPage) {
|
||||
// get all heartbeats that correspond to this statusPage
|
||||
const config = await statusPage.toPublicJSON();
|
||||
|
||||
// Public Group List
|
||||
const showTags = !!statusPage.show_tags;
|
||||
|
||||
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [
|
||||
statusPage.id
|
||||
]);
|
||||
|
||||
let heartbeats = [];
|
||||
|
||||
for (let groupBean of list) {
|
||||
let monitorGroup = await groupBean.toPublicJSON(showTags, config?.showCertificateExpiry);
|
||||
for (const monitor of monitorGroup.monitorList) {
|
||||
const heartbeat = await R.findOne("heartbeat", "monitor_id = ? ORDER BY time DESC", [ monitor.id ]);
|
||||
if (heartbeat) {
|
||||
heartbeats.push({
|
||||
...monitor,
|
||||
status: heartbeat.status,
|
||||
time: heartbeat.time
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// calculate RSS feed description
|
||||
let status = StatusPage.overallStatus(heartbeats);
|
||||
let statusDescription = StatusPage.getStatusDescription(status);
|
||||
|
||||
// keep only DOWN heartbeats in the RSS feed
|
||||
heartbeats = heartbeats.filter(heartbeat => heartbeat.status === DOWN);
|
||||
|
||||
return {
|
||||
heartbeats,
|
||||
statusDescription
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all status page data in one call
|
||||
* @param {StatusPage} statusPage Status page to get data for
|
||||
@@ -400,7 +238,6 @@ class StatusPage extends BeanModel {
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
theme: this.theme,
|
||||
autoRefreshInterval: this.autoRefreshInterval,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
domainNameList: this.getDomainNameList(),
|
||||
@@ -423,7 +260,6 @@ class StatusPage extends BeanModel {
|
||||
title: this.title,
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
autoRefreshInterval: this.autoRefreshInterval,
|
||||
theme: this.theme,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
|
@@ -485,7 +485,7 @@ function ApiCache() {
|
||||
}
|
||||
|
||||
if (typeof duration === "string") {
|
||||
let split = duration.match(/^([\d\.,]+)\s?([a-zA-Z]+)$/);
|
||||
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
|
||||
|
||||
if (split.length === 3) {
|
||||
let len = parseFloat(split[1]);
|
||||
|
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 CatButtes
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@@ -1,77 +0,0 @@
|
||||
'use strict';
|
||||
// Original file https://raw.githubusercontent.com/elasticio/node-ntlm-client/master/lib/flags.js
|
||||
module.exports.NTLMFLAG_NEGOTIATE_UNICODE = 1 << 0;
|
||||
/* Indicates that Unicode strings are supported for use in security buffer
|
||||
data. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_OEM = 1 << 1;
|
||||
/* Indicates that OEM strings are supported for use in security buffer data. */
|
||||
module.exports.NTLMFLAG_REQUEST_TARGET = 1 << 2;
|
||||
/* Requests that the server's authentication realm be included in the Type 2
|
||||
message. */
|
||||
/* unknown (1<<3) */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_SIGN = 1 << 4;
|
||||
/* Specifies that authenticated communication between the client and server
|
||||
should carry a digital signature (message integrity). */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_SEAL = 1 << 5;
|
||||
/* Specifies that authenticated communication between the client and server
|
||||
should be encrypted (message confidentiality). */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_DATAGRAM_STYLE = 1 << 6;
|
||||
/* Indicates that datagram authentication is being used. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_LM_KEY = 1 << 7;
|
||||
/* Indicates that the LAN Manager session key should be used for signing and
|
||||
sealing authenticated communications. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_NETWARE = 1 << 8;
|
||||
/* unknown purpose */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_NTLM_KEY = 1 << 9;
|
||||
/* Indicates that NTLM authentication is being used. */
|
||||
/* unknown (1<<10) */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_ANONYMOUS = 1 << 11;
|
||||
/* Sent by the client in the Type 3 message to indicate that an anonymous
|
||||
context has been established. This also affects the response fields. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_DOMAIN_SUPPLIED = 1 << 12;
|
||||
/* Sent by the client in the Type 1 message to indicate that a desired
|
||||
authentication realm is included in the message. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_WORKSTATION_SUPPLIED = 1 << 13;
|
||||
/* Sent by the client in the Type 1 message to indicate that the client
|
||||
workstation's name is included in the message. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_LOCAL_CALL = 1 << 14;
|
||||
/* Sent by the server to indicate that the server and client are on the same
|
||||
machine. Implies that the client may use a pre-established local security
|
||||
context rather than responding to the challenge. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_ALWAYS_SIGN = 1 << 15;
|
||||
/* Indicates that authenticated communication between the client and server
|
||||
should be signed with a "dummy" signature. */
|
||||
module.exports.NTLMFLAG_TARGET_TYPE_DOMAIN = 1 << 16;
|
||||
/* Sent by the server in the Type 2 message to indicate that the target
|
||||
authentication realm is a domain. */
|
||||
module.exports.NTLMFLAG_TARGET_TYPE_SERVER = 1 << 17;
|
||||
/* Sent by the server in the Type 2 message to indicate that the target
|
||||
authentication realm is a server. */
|
||||
module.exports.NTLMFLAG_TARGET_TYPE_SHARE = 1 << 18;
|
||||
/* Sent by the server in the Type 2 message to indicate that the target
|
||||
authentication realm is a share. Presumably, this is for share-level
|
||||
authentication. Usage is unclear. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_NTLM2_KEY = 1 << 19;
|
||||
/* Indicates that the NTLM2 signing and sealing scheme should be used for
|
||||
protecting authenticated communications. */
|
||||
module.exports.NTLMFLAG_REQUEST_INIT_RESPONSE = 1 << 20;
|
||||
/* unknown purpose */
|
||||
module.exports.NTLMFLAG_REQUEST_ACCEPT_RESPONSE = 1 << 21;
|
||||
/* unknown purpose */
|
||||
module.exports.NTLMFLAG_REQUEST_NONNT_SESSION_KEY = 1 << 22;
|
||||
/* unknown purpose */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_TARGET_INFO = 1 << 23;
|
||||
/* Sent by the server in the Type 2 message to indicate that it is including a
|
||||
Target Information block in the message. */
|
||||
/* unknown (1<24) */
|
||||
/* unknown (1<25) */
|
||||
/* unknown (1<26) */
|
||||
/* unknown (1<27) */
|
||||
/* unknown (1<28) */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_128 = 1 << 29;
|
||||
/* Indicates that 128-bit encryption is supported. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_KEY_EXCHANGE = 1 << 30;
|
||||
/* Indicates that the client will provide an encrypted master key in
|
||||
the "Session Key" field of the Type 3 message. */
|
||||
module.exports.NTLMFLAG_NEGOTIATE_56 = 1 << 31;
|
||||
//# sourceMappingURL=flags.js.map
|
@@ -1,122 +0,0 @@
|
||||
'use strict';
|
||||
// Original source at https://github.com/elasticio/node-ntlm-client/blob/master/lib/hash.js
|
||||
var crypto = require('crypto');
|
||||
function createLMResponse(challenge, lmhash) {
|
||||
var buf = new Buffer.alloc(24), pwBuffer = new Buffer.alloc(21).fill(0);
|
||||
lmhash.copy(pwBuffer);
|
||||
calculateDES(pwBuffer.slice(0, 7), challenge).copy(buf);
|
||||
calculateDES(pwBuffer.slice(7, 14), challenge).copy(buf, 8);
|
||||
calculateDES(pwBuffer.slice(14), challenge).copy(buf, 16);
|
||||
return buf;
|
||||
}
|
||||
function createLMHash(password) {
|
||||
var buf = new Buffer.alloc(16), pwBuffer = new Buffer.alloc(14), magicKey = new Buffer.from('KGS!@#$%', 'ascii');
|
||||
if (password.length > 14) {
|
||||
buf.fill(0);
|
||||
return buf;
|
||||
}
|
||||
pwBuffer.fill(0);
|
||||
pwBuffer.write(password.toUpperCase(), 0, 'ascii');
|
||||
return Buffer.concat([
|
||||
calculateDES(pwBuffer.slice(0, 7), magicKey),
|
||||
calculateDES(pwBuffer.slice(7), magicKey)
|
||||
]);
|
||||
}
|
||||
function calculateDES(key, message) {
|
||||
var desKey = new Buffer.alloc(8);
|
||||
desKey[0] = key[0] & 0xFE;
|
||||
desKey[1] = ((key[0] << 7) & 0xFF) | (key[1] >> 1);
|
||||
desKey[2] = ((key[1] << 6) & 0xFF) | (key[2] >> 2);
|
||||
desKey[3] = ((key[2] << 5) & 0xFF) | (key[3] >> 3);
|
||||
desKey[4] = ((key[3] << 4) & 0xFF) | (key[4] >> 4);
|
||||
desKey[5] = ((key[4] << 3) & 0xFF) | (key[5] >> 5);
|
||||
desKey[6] = ((key[5] << 2) & 0xFF) | (key[6] >> 6);
|
||||
desKey[7] = (key[6] << 1) & 0xFF;
|
||||
for (var i = 0; i < 8; i++) {
|
||||
var parity = 0;
|
||||
for (var j = 1; j < 8; j++) {
|
||||
parity += (desKey[i] >> j) % 2;
|
||||
}
|
||||
desKey[i] |= (parity % 2) === 0 ? 1 : 0;
|
||||
}
|
||||
var des = crypto.createCipheriv('DES-ECB', desKey, '');
|
||||
return des.update(message);
|
||||
}
|
||||
function createNTLMResponse(challenge, ntlmhash) {
|
||||
var buf = new Buffer.alloc(24), ntlmBuffer = new Buffer.alloc(21).fill(0);
|
||||
ntlmhash.copy(ntlmBuffer);
|
||||
calculateDES(ntlmBuffer.slice(0, 7), challenge).copy(buf);
|
||||
calculateDES(ntlmBuffer.slice(7, 14), challenge).copy(buf, 8);
|
||||
calculateDES(ntlmBuffer.slice(14), challenge).copy(buf, 16);
|
||||
return buf;
|
||||
}
|
||||
function createNTLMHash(password) {
|
||||
var md4sum = crypto.createHash('md4');
|
||||
md4sum.update(new Buffer.from(password, 'ucs2'));
|
||||
return md4sum.digest();
|
||||
}
|
||||
function createNTLMv2Hash(ntlmhash, username, authTargetName) {
|
||||
var hmac = crypto.createHmac('md5', ntlmhash);
|
||||
hmac.update(new Buffer.from(username.toUpperCase() + authTargetName, 'ucs2'));
|
||||
return hmac.digest();
|
||||
}
|
||||
function createLMv2Response(type2message, username, ntlmhash, nonce, targetName) {
|
||||
var buf = new Buffer.alloc(24), ntlm2hash = createNTLMv2Hash(ntlmhash, username, targetName), hmac = crypto.createHmac('md5', ntlm2hash);
|
||||
//server challenge
|
||||
type2message.challenge.copy(buf, 8);
|
||||
//client nonce
|
||||
buf.write(nonce || createPseudoRandomValue(16), 16, 'hex');
|
||||
//create hash
|
||||
hmac.update(buf.slice(8));
|
||||
var hashedBuffer = hmac.digest();
|
||||
hashedBuffer.copy(buf);
|
||||
return buf;
|
||||
}
|
||||
function createNTLMv2Response(type2message, username, ntlmhash, nonce, targetName) {
|
||||
var buf = new Buffer.alloc(48 + type2message.targetInfo.buffer.length), ntlm2hash = createNTLMv2Hash(ntlmhash, username, targetName), hmac = crypto.createHmac('md5', ntlm2hash);
|
||||
//the first 8 bytes are spare to store the hashed value before the blob
|
||||
//server challenge
|
||||
type2message.challenge.copy(buf, 8);
|
||||
//blob signature
|
||||
buf.writeUInt32BE(0x01010000, 16);
|
||||
//reserved
|
||||
buf.writeUInt32LE(0, 20);
|
||||
//timestamp
|
||||
//TODO: we are loosing precision here since js is not able to handle those large integers
|
||||
// maybe think about a different solution here
|
||||
// 11644473600000 = diff between 1970 and 1601
|
||||
var timestamp = ((Date.now() + 11644473600000) * 10000).toString(16);
|
||||
var timestampLow = Number('0x' + timestamp.substring(Math.max(0, timestamp.length - 8)));
|
||||
var timestampHigh = Number('0x' + timestamp.substring(0, Math.max(0, timestamp.length - 8)));
|
||||
buf.writeUInt32LE(timestampLow, 24, false);
|
||||
buf.writeUInt32LE(timestampHigh, 28, false);
|
||||
//random client nonce
|
||||
buf.write(nonce || createPseudoRandomValue(16), 32, 'hex');
|
||||
//zero
|
||||
buf.writeUInt32LE(0, 40);
|
||||
//complete target information block from type 2 message
|
||||
type2message.targetInfo.buffer.copy(buf, 44);
|
||||
//zero
|
||||
buf.writeUInt32LE(0, 44 + type2message.targetInfo.buffer.length);
|
||||
hmac.update(buf.slice(8));
|
||||
var hashedBuffer = hmac.digest();
|
||||
hashedBuffer.copy(buf);
|
||||
return buf;
|
||||
}
|
||||
function createPseudoRandomValue(length) {
|
||||
var str = '';
|
||||
while (str.length < length) {
|
||||
str += Math.floor(Math.random() * 16).toString(16);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
module.exports = {
|
||||
createLMHash: createLMHash,
|
||||
createNTLMHash: createNTLMHash,
|
||||
createLMResponse: createLMResponse,
|
||||
createNTLMResponse: createNTLMResponse,
|
||||
createLMv2Response: createLMv2Response,
|
||||
createNTLMv2Response: createNTLMv2Response,
|
||||
createPseudoRandomValue: createPseudoRandomValue
|
||||
};
|
||||
//# sourceMappingURL=hash.js.map
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user