mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-09-12 06:26:59 +08:00
Compare commits
277 Commits
1.22.0-bet
...
improve-he
Author | SHA1 | Date | |
---|---|---|---|
|
df2f76dbe3 | ||
|
31c00081fa | ||
|
fe431d6385 | ||
|
ce0289855d | ||
|
c0174dc1c4 | ||
|
e745bd69da | ||
|
72741ebb10 | ||
|
ff88018b0c | ||
|
db3a7d69fe | ||
|
d33b4f46e4 | ||
|
dd62bd3d91 | ||
|
8a92054c2b | ||
|
921c8f8100 | ||
|
da4f4e3d76 | ||
|
e001fd7d1c | ||
|
83307b3446 | ||
|
3e0f5f4231 | ||
|
de83863627 | ||
|
d7e0ff4b8c | ||
|
112cc3d7b8 | ||
|
b44f6e4af2 | ||
|
18a2a8eb1e | ||
|
e9585ccbf4 | ||
|
5e39b0daf6 | ||
|
71fca3f0c3 | ||
|
2921f33c24 | ||
|
4e0bb394db | ||
|
ced576feba | ||
|
ceb5708bfd | ||
|
23fdd32de0 | ||
|
36777c5eff | ||
|
439b6517d1 | ||
|
c6e68fac97 | ||
|
bce4835362 | ||
|
a032e11a2e | ||
|
d231a05526 | ||
|
f4967615c0 | ||
|
67b1974718 | ||
|
062e9db2a7 | ||
|
90e1b4cf56 | ||
|
43698b23c6 | ||
|
f19b27c1c7 | ||
|
42b5d30a33 | ||
|
cda77c1a32 | ||
|
157cf47d38 | ||
|
09c6798a30 | ||
|
587d9e4781 | ||
|
ae2867e305 | ||
|
65ffd77d35 | ||
|
9dd652733e | ||
|
0a59fef7d8 | ||
|
27ce47277b | ||
|
7f68e4a987 | ||
|
149d08ecbb | ||
|
eb6167aaf1 | ||
|
db66195f7d | ||
|
0ab3507faf | ||
|
a0bd4b248b | ||
|
59245e624d | ||
|
c0eb0cb42c | ||
|
573f158f7f | ||
|
6413d4cbdf | ||
|
cf5a04bc5e | ||
|
a0203372ce | ||
|
5ccf2d23fc | ||
|
bf68e0a7bc | ||
|
6984596568 | ||
|
f3562864ee | ||
|
9058a829a1 | ||
|
0684313ec9 | ||
|
d7e12dc92d | ||
|
1d9a28e9ab | ||
|
ddbf367011 | ||
|
50d4091ded | ||
|
66cfbd02c3 | ||
|
688f23035b | ||
|
7701e2ad36 | ||
|
8e72d6f534 | ||
|
278b88a9d9 | ||
|
084cf01fcd | ||
|
f6c1b92fc6 | ||
|
25c8196641 | ||
|
baf5613dfa | ||
|
695691468c | ||
|
4891ec4527 | ||
|
e2a87eb430 | ||
|
80927332cb | ||
|
a0eb733d54 | ||
|
21d556528f | ||
|
357466cc90 | ||
|
b038d09349 | ||
|
5dd4231e56 | ||
|
c6d0c431bd | ||
|
d1b7f4c834 | ||
|
5c4180fb45 | ||
|
345e61abca | ||
|
dd1526deff | ||
|
be26bb75d9 | ||
|
99fb5836e2 | ||
|
2f5a565ce4 | ||
|
973db9d4b2 | ||
|
2e75142fe5 | ||
|
ad3ffacf45 | ||
|
6f4af30701 | ||
|
b1f266ceb1 | ||
|
6bece8796e | ||
|
e7d1b4e14a | ||
|
e5c6783781 | ||
|
ac68a35d3a | ||
|
d825dbf828 | ||
|
cfb4bbc6cb | ||
|
293015ff35 | ||
|
18d8b3a8e0 | ||
|
d55794e1a5 | ||
|
3d50572dd7 | ||
|
cdb38d49eb | ||
|
80b55786a4 | ||
|
fe40d819bd | ||
|
3dbd8277f0 | ||
|
771d21c4ad | ||
|
ed6b4e5ae5 | ||
|
3b9c95a8a8 | ||
|
cdf6922bdd | ||
|
9954ba82e7 | ||
|
19873e5b9e | ||
|
13ae878ee8 | ||
|
4ea5771f97 | ||
|
1774bb86dc | ||
|
c583037dff | ||
|
8223121cd8 | ||
|
ff22010330 | ||
|
a9d691a6a8 | ||
|
7c529d8f83 | ||
|
4fe0891a60 | ||
|
bd5496d267 | ||
|
a0736e04b2 | ||
|
df8fcffb19 | ||
|
9da712054a | ||
|
af78da1dd9 | ||
|
9e041f219b | ||
|
8c60e902e1 | ||
|
ccc39b9516 | ||
|
de74efb2e6 | ||
|
e26abc3156 | ||
|
d286c534bd | ||
|
7975caf29e | ||
|
16a1a66e09 | ||
|
9ee2780e9e | ||
|
a386f1fc9e | ||
|
35154ef9c5 | ||
|
1baa592824 | ||
|
9882fc65b1 | ||
|
3e5e7e6e32 | ||
|
0e725569e5 | ||
|
afcfb7e19c | ||
|
eaee55fc8f | ||
|
affac0a97b | ||
|
a12e7eba72 | ||
|
2e2747fb52 | ||
|
5388a37a26 | ||
|
b2a1bd5214 | ||
|
a2d147b88e | ||
|
4f6035899d | ||
|
dd77baabe1 | ||
|
820f2eec9f | ||
|
4b913c8b4c | ||
|
d01c7c3faa | ||
|
772a946234 | ||
|
f8c9a20afd | ||
|
cea894cc6d | ||
|
79b38e0e7b | ||
|
7cc9783436 | ||
|
21405f71b5 | ||
|
b4b6e07e6b | ||
|
cf4220901b | ||
|
f3996fdef4 | ||
|
1dfe5227ad | ||
|
4ead0609af | ||
|
a8bf52b1e0 | ||
|
ede6d90497 | ||
|
4b8e86efb7 | ||
|
5f706e1921 | ||
|
722c64a4d1 | ||
|
23de52ca5a | ||
|
3d3fb357f9 | ||
|
3b9aa00126 | ||
|
29267e5c2e | ||
|
3e801323b6 | ||
|
b80fd81d24 | ||
|
9cb776405a | ||
|
de7ae3e2db | ||
|
e49ced0524 | ||
|
7e782edf44 | ||
|
43e1e3c272 | ||
|
dc4cf7087f | ||
|
65a0a2b2b5 | ||
|
2d269c3639 | ||
|
11bad53709 | ||
|
9f7782b1c1 | ||
|
9fb8f94e22 | ||
|
7a34103da6 | ||
|
8955c3816b | ||
|
7761e9a05e | ||
|
c9d6e576ab | ||
|
97d38ee1a8 | ||
|
cc94609423 | ||
|
149f8c3646 | ||
|
bdcbd6389b | ||
|
c06b929529 | ||
|
d3ecdb8456 | ||
|
4e420ee3ff | ||
|
a00561ff09 | ||
|
6af44e0780 | ||
|
596402e71f | ||
|
62bbc1cf55 | ||
|
19fc7d31e6 | ||
|
6708eed121 | ||
|
3c56a6f395 | ||
|
2b46693995 | ||
|
c61a3d360f | ||
|
392f95cdd2 | ||
|
dfc6e5ea5b | ||
|
ba4d925374 | ||
|
d37c33ad42 | ||
|
c04194191f | ||
|
de9ad0fe60 | ||
|
8884c2108b | ||
|
ac8ca36895 | ||
|
37ae8eb44a | ||
|
8897385690 | ||
|
6132a45c7c | ||
|
f68452c47a | ||
|
9d71e34a83 | ||
|
1fa8c0f9fe | ||
|
9a8bea5761 | ||
|
376d84c742 | ||
|
f3fe392ec4 | ||
|
9c3bb67b6b | ||
|
5200e10aab | ||
|
f1a396b0f7 | ||
|
f70b971810 | ||
|
9825b33ef3 | ||
|
00f733d352 | ||
|
fd10897988 | ||
|
317024ed72 | ||
|
f604d96c5b | ||
|
f30f00655f | ||
|
891f09def7 | ||
|
6b5e179bb0 | ||
|
f653aba735 | ||
|
682f8e52a8 | ||
|
171aff1226 | ||
|
38fab198bb | ||
|
8d5679a8ab | ||
|
bb7de6aa88 | ||
|
f2633a5d01 | ||
|
4056951915 | ||
|
e4183ee2b7 | ||
|
db4663d6be | ||
|
68ead3414d | ||
|
e06c3ee5d4 | ||
|
d4752b65de | ||
|
dc4d2a77bb | ||
|
33d9c1bbb1 | ||
|
27eddb7253 | ||
|
0b40c65139 | ||
|
6de0c6a90c | ||
|
94b69935fe | ||
|
3f30feaefb | ||
|
a4de93f976 | ||
|
5e976afb27 | ||
|
00b52f23cf | ||
|
ba1f7762b1 | ||
|
73f7fbabd3 | ||
|
0039f1f521 | ||
|
b0d39b44ce | ||
|
301b2007a0 |
28
.devcontainer/README.md
Normal file
28
.devcontainer/README.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Codespaces
|
||||||
|
|
||||||
|
You can modifiy Uptime Kuma in your browser without setting up a local development.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
1. Click `Code` -> `Create codespace on master`
|
||||||
|
2. Wait a few minutes until you see there are two exposed ports
|
||||||
|
3. Go to the `3000` url, see if it is working
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Frontend
|
||||||
|
|
||||||
|
Since the frontend is using [Vite.js](https://vitejs.dev/), all changes in this area will be hot-reloaded.
|
||||||
|
You don't need to restart the frontend, unless you try to add a new frontend dependency.
|
||||||
|
|
||||||
|
## Backend
|
||||||
|
|
||||||
|
The backend does not automatically hot-reload.
|
||||||
|
You will need to restart the backend after changing something using these steps:
|
||||||
|
|
||||||
|
1. Click `Terminal`
|
||||||
|
2. Click `Codespaces: server-dev` in the right panel
|
||||||
|
3. Press `Ctrl + C` to stop the server
|
||||||
|
4. Press `Up` to run `npm run start-server-dev`
|
||||||
|
|
||||||
|

|
22
.devcontainer/devcontainer.json
Normal file
22
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"image": "mcr.microsoft.com/devcontainers/javascript-node:dev-18-bookworm",
|
||||||
|
"features": {
|
||||||
|
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||||
|
},
|
||||||
|
"updateContentCommand": "npm ci",
|
||||||
|
"postCreateCommand": "",
|
||||||
|
"postAttachCommand": {
|
||||||
|
"frontend-dev": "npm run start-frontend-devcontainer",
|
||||||
|
"server-dev": "npm run start-server-dev",
|
||||||
|
"open-port": "gh codespace ports visibility 3001:public -c $CODESPACE_NAME"
|
||||||
|
},
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"extensions": [
|
||||||
|
"streetsidesoftware.code-spell-checker",
|
||||||
|
"dbaeumer.vscode-eslint"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"forwardPorts": [3000, 3001]
|
||||||
|
}
|
@@ -1,6 +1,6 @@
|
|||||||
/.idea
|
/.idea
|
||||||
/node_modules
|
/node_modules
|
||||||
/data
|
/data*
|
||||||
/cypress
|
/cypress
|
||||||
/out
|
/out
|
||||||
/test
|
/test
|
||||||
@@ -18,6 +18,7 @@ README.md
|
|||||||
.vscode
|
.vscode
|
||||||
.eslint*
|
.eslint*
|
||||||
.stylelint*
|
.stylelint*
|
||||||
|
/.devcontainer
|
||||||
/.github
|
/.github
|
||||||
yarn.lock
|
yarn.lock
|
||||||
app.json
|
app.json
|
||||||
@@ -35,6 +36,7 @@ tsconfig.json
|
|||||||
/extra/healthcheck
|
/extra/healthcheck
|
||||||
extra/exe-builder
|
extra/exe-builder
|
||||||
|
|
||||||
|
|
||||||
### .gitignore content (commented rules are duplicated)
|
### .gitignore content (commented rules are duplicated)
|
||||||
|
|
||||||
#node_modules
|
#node_modules
|
||||||
|
42
.eslintrc.js
42
.eslintrc.js
@@ -14,6 +14,7 @@ module.exports = {
|
|||||||
extends: [
|
extends: [
|
||||||
"eslint:recommended",
|
"eslint:recommended",
|
||||||
"plugin:vue/vue3-recommended",
|
"plugin:vue/vue3-recommended",
|
||||||
|
"plugin:jsdoc/recommended-error",
|
||||||
],
|
],
|
||||||
parser: "vue-eslint-parser",
|
parser: "vue-eslint-parser",
|
||||||
parserOptions: {
|
parserOptions: {
|
||||||
@@ -21,6 +22,9 @@ module.exports = {
|
|||||||
sourceType: "module",
|
sourceType: "module",
|
||||||
requireConfigFile: false,
|
requireConfigFile: false,
|
||||||
},
|
},
|
||||||
|
plugins: [
|
||||||
|
"jsdoc"
|
||||||
|
],
|
||||||
rules: {
|
rules: {
|
||||||
"yoda": "error",
|
"yoda": "error",
|
||||||
eqeqeq: [ "warn", "smart" ],
|
eqeqeq: [ "warn", "smart" ],
|
||||||
@@ -97,7 +101,43 @@ module.exports = {
|
|||||||
}],
|
}],
|
||||||
"no-control-regex": "off",
|
"no-control-regex": "off",
|
||||||
"one-var": [ "error", "never" ],
|
"one-var": [ "error", "never" ],
|
||||||
"max-statements-per-line": [ "error", { "max": 1 }]
|
"max-statements-per-line": [ "error", { "max": 1 }],
|
||||||
|
"jsdoc/check-tag-names": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"definedTags": [ "link" ]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"jsdoc/no-undefined-types": "off",
|
||||||
|
"jsdoc/no-defaults": [
|
||||||
|
"error",
|
||||||
|
{ "noOptionalParamNames": true }
|
||||||
|
],
|
||||||
|
"jsdoc/require-throws": "warn",
|
||||||
|
"jsdoc/require-jsdoc": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"require": {
|
||||||
|
"FunctionDeclaration": true,
|
||||||
|
"MethodDefinition": true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"jsdoc/no-blank-block-descriptions": "error",
|
||||||
|
"jsdoc/require-returns-description": "warn",
|
||||||
|
"jsdoc/require-returns-check": [
|
||||||
|
"error",
|
||||||
|
{ "reportMissingReturnForUndefinedTypes": false }
|
||||||
|
],
|
||||||
|
"jsdoc/require-returns": [
|
||||||
|
"warn",
|
||||||
|
{
|
||||||
|
"forceRequireReturn": true,
|
||||||
|
"forceReturnsWithAsync": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"jsdoc/require-param-type": "warn",
|
||||||
|
"jsdoc/require-param-description": "warn"
|
||||||
},
|
},
|
||||||
"overrides": [
|
"overrides": [
|
||||||
{
|
{
|
||||||
|
4
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
4
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
@@ -44,7 +44,7 @@ body:
|
|||||||
id: operating-system
|
id: operating-system
|
||||||
attributes:
|
attributes:
|
||||||
label: "💻 Operating System and Arch"
|
label: "💻 Operating System and Arch"
|
||||||
description: "Which OS is your server/device running on?"
|
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
|
||||||
placeholder: "Ex. Ubuntu 20.04 x86"
|
placeholder: "Ex. Ubuntu 20.04 x86"
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
@@ -52,7 +52,7 @@ body:
|
|||||||
id: browser-vendor
|
id: browser-vendor
|
||||||
attributes:
|
attributes:
|
||||||
label: "🌐 Browser"
|
label: "🌐 Browser"
|
||||||
description: "Which browser are you running on?"
|
description: "Which browser are you running on? (For Replit, please do not report this bug)"
|
||||||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
69
.github/workflows/auto-test.yml
vendored
69
.github/workflows/auto-test.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
|
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
|
||||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
||||||
|
|
||||||
name: Auto Test
|
name: Auto Test
|
||||||
@@ -9,7 +9,7 @@ on:
|
|||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '*.md'
|
- '*.md'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ master ]
|
branches: [ master, 2.0.X ]
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '*.md'
|
- '*.md'
|
||||||
|
|
||||||
@@ -21,8 +21,8 @@ jobs:
|
|||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
||||||
node: [ 14, 16, 18, 20 ]
|
node: [ 14, 20 ]
|
||||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version: ${{ matrix.node }}
|
||||||
cache: 'npm'
|
- run: npm install npm@latest -g
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm test
|
- run: npm test
|
||||||
@@ -41,6 +41,29 @@ jobs:
|
|||||||
HEADLESS_TEST: 1
|
HEADLESS_TEST: 1
|
||||||
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
||||||
|
|
||||||
|
# As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
|
||||||
|
armv7-simple-test:
|
||||||
|
needs: [ check-linters ]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
timeout-minutes: 15
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ ARMv7 ]
|
||||||
|
node: [ 14, 20 ]
|
||||||
|
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Use Node.js ${{ matrix.node }}
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node }}
|
||||||
|
- run: npm install npm@latest -g
|
||||||
|
- run: npm ci --production
|
||||||
|
|
||||||
check-linters:
|
check-linters:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
@@ -48,29 +71,28 @@ jobs:
|
|||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Use Node.js 14
|
- name: Use Node.js 20
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14
|
node-version: 20
|
||||||
cache: 'npm'
|
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm run lint
|
- run: npm run lint
|
||||||
|
|
||||||
e2e-tests:
|
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet
|
||||||
needs: [ check-linters ]
|
# e2e-tests:
|
||||||
runs-on: ubuntu-latest
|
# needs: [ check-linters ]
|
||||||
steps:
|
# runs-on: ubuntu-latest
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
# steps:
|
||||||
- uses: actions/checkout@v3
|
# - run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
|
# - uses: actions/checkout@v3
|
||||||
- name: Use Node.js 14
|
#
|
||||||
uses: actions/setup-node@v3
|
# - name: Use Node.js 14
|
||||||
with:
|
# uses: actions/setup-node@v3
|
||||||
node-version: 14
|
# with:
|
||||||
cache: 'npm'
|
# node-version: 14
|
||||||
- run: npm install
|
# - run: npm install
|
||||||
- run: npm run build
|
# - run: npm run build
|
||||||
- run: npm run cy:test
|
# - run: npm run cy:test
|
||||||
|
|
||||||
frontend-unit-tests:
|
frontend-unit-tests:
|
||||||
needs: [ check-linters ]
|
needs: [ check-linters ]
|
||||||
@@ -83,7 +105,6 @@ jobs:
|
|||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14
|
node-version: 14
|
||||||
cache: 'npm'
|
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm run cy:run:unit
|
- run: npm run cy:run:unit
|
||||||
|
3
.github/workflows/json-yaml-validate.yml
vendored
3
.github/workflows/json-yaml-validate.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: json-yaml-validate
|
name: json-yaml-validate
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
@@ -6,6 +6,7 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
|
- 2.0.X
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,6 +7,7 @@ dist-ssr
|
|||||||
|
|
||||||
/data
|
/data
|
||||||
!/data/.gitkeep
|
!/data/.gitkeep
|
||||||
|
/data*
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
/private
|
/private
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
"color-function-notation": "legacy",
|
"color-function-notation": "legacy",
|
||||||
"shorthand-property-no-redundant-values": null,
|
"shorthand-property-no-redundant-values": null,
|
||||||
"color-hex-length": null,
|
"color-hex-length": null,
|
||||||
"declaration-block-no-redundant-longhand-properties": null
|
"declaration-block-no-redundant-longhand-properties": null,
|
||||||
|
"at-rule-no-unknown": null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -34,30 +34,30 @@ Yes or no, it depends on what you will try to do. Since I don't want to waste yo
|
|||||||
|
|
||||||
Here are some references:
|
Here are some references:
|
||||||
|
|
||||||
✅ Usually Accept:
|
### ✅ Usually accepted:
|
||||||
- Bug fix
|
- Bug fix
|
||||||
- Security fix
|
- Security fix
|
||||||
- Adding notification providers
|
- Adding notification providers
|
||||||
- Adding new language files (You should go to https://weblate.kuma.pet for existing languages)
|
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||||
- Adding new language keys: `$t("...")`
|
- Adding new language keys: `$t("...")`
|
||||||
|
|
||||||
⚠️ Discussion First
|
### ⚠️ Discussion required:
|
||||||
- Large pull requests
|
- Large pull requests
|
||||||
- New features
|
- New features
|
||||||
|
|
||||||
❌ Won't Merge
|
### ❌ Won't be merged:
|
||||||
- A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
|
- A dedicated pr for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||||
- Do not pass auto test
|
- Do not pass the auto test
|
||||||
- Any breaking changes
|
- Any breaking changes
|
||||||
- Duplicated pull request
|
- Duplicated pull requests
|
||||||
- Buggy
|
- Buggy
|
||||||
- UI/UX is not close to Uptime Kuma
|
- UI/UX is not close to Uptime Kuma
|
||||||
- Existing logic is completely modified or deleted for no reason
|
- Modifications or deletions of existing logic without a valid reason.
|
||||||
- A function that is completely out of scope
|
- Adding functions that is completely out of scope
|
||||||
- Convert existing code into other programming languages
|
- Converting existing code into other programming languages
|
||||||
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests)
|
- Unnecessarily large code changes that are hard to review and cause conflicts with other PRs.
|
||||||
|
|
||||||
The above cases cannot cover all situations.
|
The above cases may not cover all possible situations.
|
||||||
|
|
||||||
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
||||||
|
|
||||||
@@ -106,11 +106,11 @@ I personally do not like something that requires so many configurations before y
|
|||||||
|
|
||||||
## Tools
|
## Tools
|
||||||
|
|
||||||
- Node.js >= 14
|
- [`Node.js`](https://nodejs.org/) >= 14
|
||||||
- NPM >= 8.5
|
- [`npm`](https://www.npmjs.com/) >= 8.5
|
||||||
- Git
|
- [`git`](https://git-scm.com/)
|
||||||
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
|
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
|
||||||
- A SQLite GUI tool (SQLite Expert Personal is suggested)
|
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
|
||||||
|
|
||||||
## Install Dependencies for Development
|
## Install Dependencies for Development
|
||||||
|
|
||||||
@@ -214,11 +214,21 @@ Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely,
|
|||||||
|
|
||||||
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
|
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
|
||||||
|
|
||||||
If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes.
|
If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change.
|
||||||
|
|
||||||
## Translations
|
## Translations
|
||||||
|
|
||||||
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
|
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are ommited, they can not be translated).
|
||||||
|
|
||||||
|
**Don't include any other languages in your inital Pull-Request** (even if this is your mother tounge), to avoid merge-conflicts between weblate and `master`.
|
||||||
|
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language-skills.
|
||||||
|
|
||||||
|
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||||
|
|
||||||
|
## Spelling & Grammar
|
||||||
|
|
||||||
|
Feel free to correct the grammar in the documentation or code.
|
||||||
|
My mother language is not english and my grammar is not that great.
|
||||||
|
|
||||||
## Wiki
|
## Wiki
|
||||||
|
|
||||||
|
31
README.md
31
README.md
@@ -1,16 +1,16 @@
|
|||||||
|
<div align="center" width="100%">
|
||||||
|
<img src="./public/icon.svg" width="128" alt="" />
|
||||||
|
</div>
|
||||||
|
|
||||||
# Uptime Kuma
|
# Uptime Kuma
|
||||||
|
|
||||||
|
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||||
|
|
||||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||||
[](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
[](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
||||||
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
<div align="center" width="100%">
|
|
||||||
<img src="./public/icon.svg" width="128" alt="" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
|
||||||
|
|
||||||
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
|
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
|
||||||
|
|
||||||
## 🥔 Live Demo
|
## 🥔 Live Demo
|
||||||
@@ -23,7 +23,7 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
|
|||||||
|
|
||||||
## ⭐ Features
|
## ⭐ Features
|
||||||
|
|
||||||
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
||||||
* Fancy, Reactive, Fast UI/UX
|
* Fancy, Reactive, Fast UI/UX
|
||||||
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
||||||
* 20 second intervals
|
* 20 second intervals
|
||||||
@@ -49,14 +49,14 @@ Uptime Kuma is now running on http://localhost:3001
|
|||||||
|
|
||||||
### 💪🏻 Non-Docker
|
### 💪🏻 Non-Docker
|
||||||
|
|
||||||
Requirements:
|
Requirements:
|
||||||
- Platform
|
- Platform
|
||||||
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
||||||
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
||||||
- ❌ Replit / Heroku
|
- ❌ Replit / Heroku
|
||||||
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 (20 is not supported)
|
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
|
||||||
- [npm](https://docs.npmjs.com/cli/) >= 7
|
- [npm](https://docs.npmjs.com/cli/) >= 7
|
||||||
- [Git](https://git-scm.com/downloads)
|
- [Git](https://git-scm.com/downloads)
|
||||||
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -71,7 +71,7 @@ npm run setup
|
|||||||
node server/server.js
|
node server/server.js
|
||||||
|
|
||||||
# (Recommended) Option 2. Run in background using PM2
|
# (Recommended) Option 2. Run in background using PM2
|
||||||
# Install PM2 if you don't have it:
|
# Install PM2 if you don't have it:
|
||||||
npm install pm2 -g && pm2 install pm2-logrotate
|
npm install pm2 -g && pm2 install pm2-logrotate
|
||||||
|
|
||||||
# Start Server
|
# Start Server
|
||||||
@@ -93,7 +93,7 @@ pm2 save && pm2 startup
|
|||||||
|
|
||||||
### Windows Portable (x64)
|
### Windows Portable (x64)
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma/releases/download/1.21.0/uptime-kuma-win64-portable-1.0.0.zip
|
https://github.com/louislam/uptime-kuma/files/11886108/uptime-kuma-win64-portable-1.0.1.zip
|
||||||
|
|
||||||
### Advanced Installation
|
### Advanced Installation
|
||||||
|
|
||||||
@@ -184,7 +184,10 @@ If you want to report a bug or request a new feature, feel free to open a [new i
|
|||||||
### Translations
|
### Translations
|
||||||
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||||
|
|
||||||
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
|
## Spelling & Grammar
|
||||||
|
|
||||||
|
Feel free to correct the grammar in the documentation or code.
|
||||||
|
My mother language is not english and my grammar is not that great.
|
||||||
|
|
||||||
### Create Pull Requests
|
### Create Pull Requests
|
||||||
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||||
|
@@ -4,8 +4,4 @@ if (process.env.TEST_FRONTEND) {
|
|||||||
config.presets = [ "@babel/preset-env" ];
|
config.presets = [ "@babel/preset-env" ];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.env.TEST_BACKEND) {
|
|
||||||
config.plugins = [ "babel-plugin-rewire" ];
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = config;
|
module.exports = config;
|
||||||
|
@@ -3,6 +3,7 @@ import vue from "@vitejs/plugin-vue";
|
|||||||
import { defineConfig } from "vite";
|
import { defineConfig } from "vite";
|
||||||
import visualizer from "rollup-plugin-visualizer";
|
import visualizer from "rollup-plugin-visualizer";
|
||||||
import viteCompression from "vite-plugin-compression";
|
import viteCompression from "vite-plugin-compression";
|
||||||
|
import commonjs from "vite-plugin-commonjs";
|
||||||
|
|
||||||
const postCssScss = require("postcss-scss");
|
const postCssScss = require("postcss-scss");
|
||||||
const postcssRTLCSS = require("postcss-rtlcss");
|
const postcssRTLCSS = require("postcss-rtlcss");
|
||||||
@@ -16,8 +17,12 @@ export default defineConfig({
|
|||||||
},
|
},
|
||||||
define: {
|
define: {
|
||||||
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
|
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
|
||||||
|
"DEVCONTAINER": JSON.stringify(process.env.DEVCONTAINER),
|
||||||
|
"GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN": JSON.stringify(process.env.GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN),
|
||||||
|
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
|
commonjs(),
|
||||||
vue(),
|
vue(),
|
||||||
legacy({
|
legacy({
|
||||||
targets: [ "since 2015" ],
|
targets: [ "since 2015" ],
|
||||||
@@ -42,6 +47,9 @@ export default defineConfig({
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
build: {
|
build: {
|
||||||
|
commonjsOptions: {
|
||||||
|
include: [ /.js$/ ],
|
||||||
|
},
|
||||||
rollupOptions: {
|
rollupOptions: {
|
||||||
output: {
|
output: {
|
||||||
manualChunks(id, { getModuleInfo, getModuleIds }) {
|
manualChunks(id, { getModuleInfo, getModuleIds }) {
|
||||||
|
559
db/knex_init_db.js
Normal file
559
db/knex_init_db.js
Normal file
@@ -0,0 +1,559 @@
|
|||||||
|
const { R } = require("redbean-node");
|
||||||
|
const { log } = require("../src/util");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ⚠️⚠️⚠️⚠️⚠️⚠️ DO NOT ADD ANYTHING HERE!
|
||||||
|
* IF YOU NEED TO ADD FIELDS, ADD IT TO ./db/knex_migrations
|
||||||
|
* See ./db/knex_migrations/README.md for more information
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
async function createTables() {
|
||||||
|
log.info("mariadb", "Creating basic tables for MariaDB");
|
||||||
|
const knex = R.knex;
|
||||||
|
|
||||||
|
// TODO: Should check later if it is really the final patch sql file.
|
||||||
|
|
||||||
|
// docker_host
|
||||||
|
await knex.schema.createTable("docker_host", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("user_id").unsigned().notNullable();
|
||||||
|
table.string("docker_daemon", 255);
|
||||||
|
table.string("docker_type", 255);
|
||||||
|
table.string("name", 255);
|
||||||
|
});
|
||||||
|
|
||||||
|
// group
|
||||||
|
await knex.schema.createTable("group", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("name", 255).notNullable();
|
||||||
|
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
table.boolean("public").notNullable().defaultTo(false);
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.integer("weight").notNullable().defaultTo(1000);
|
||||||
|
table.integer("status_page_id").unsigned();
|
||||||
|
});
|
||||||
|
|
||||||
|
// proxy
|
||||||
|
await knex.schema.createTable("proxy", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("user_id").unsigned().notNullable();
|
||||||
|
table.string("protocol", 10).notNullable();
|
||||||
|
table.string("host", 255).notNullable();
|
||||||
|
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
|
||||||
|
table.boolean("auth").notNullable();
|
||||||
|
table.string("username", 255).nullable();
|
||||||
|
table.string("password", 255).nullable();
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.boolean("default").notNullable().defaultTo(false);
|
||||||
|
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
|
||||||
|
table.index("user_id", "proxy_user_id");
|
||||||
|
});
|
||||||
|
|
||||||
|
// user
|
||||||
|
await knex.schema.createTable("user", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("username", 255).notNullable().unique().collate("utf8_general_ci");
|
||||||
|
table.string("password", 255);
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.string("timezone", 150);
|
||||||
|
table.string("twofa_secret", 64);
|
||||||
|
table.boolean("twofa_status").notNullable().defaultTo(false);
|
||||||
|
table.string("twofa_last_token", 6);
|
||||||
|
});
|
||||||
|
|
||||||
|
// monitor
|
||||||
|
await knex.schema.createTable("monitor", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("name", 150);
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.integer("user_id").unsigned()
|
||||||
|
.references("id").inTable("user")
|
||||||
|
.onDelete("SET NULL")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("interval").notNullable().defaultTo(20);
|
||||||
|
table.text("url");
|
||||||
|
table.string("type", 20);
|
||||||
|
table.integer("weight").defaultTo(2000);
|
||||||
|
table.string("hostname", 255);
|
||||||
|
table.integer("port");
|
||||||
|
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
table.string("keyword", 255);
|
||||||
|
table.integer("maxretries").notNullable().defaultTo(0);
|
||||||
|
table.boolean("ignore_tls").notNullable().defaultTo(false);
|
||||||
|
table.boolean("upside_down").notNullable().defaultTo(false);
|
||||||
|
table.integer("maxredirects").notNullable().defaultTo(10);
|
||||||
|
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
|
||||||
|
table.string("dns_resolve_type", 5);
|
||||||
|
table.string("dns_resolve_server", 255);
|
||||||
|
table.string("dns_last_result", 255);
|
||||||
|
table.integer("retry_interval").notNullable().defaultTo(0);
|
||||||
|
table.string("push_token", 20).defaultTo(null);
|
||||||
|
table.text("method").notNullable().defaultTo("GET");
|
||||||
|
table.text("body").defaultTo(null);
|
||||||
|
table.text("headers").defaultTo(null);
|
||||||
|
table.text("basic_auth_user").defaultTo(null);
|
||||||
|
table.text("basic_auth_pass").defaultTo(null);
|
||||||
|
table.integer("docker_host").unsigned()
|
||||||
|
.references("id").inTable("docker_host");
|
||||||
|
table.string("docker_container", 255);
|
||||||
|
table.integer("proxy_id").unsigned()
|
||||||
|
.references("id").inTable("proxy");
|
||||||
|
table.boolean("expiry_notification").defaultTo(true);
|
||||||
|
table.text("mqtt_topic");
|
||||||
|
table.string("mqtt_success_message", 255);
|
||||||
|
table.string("mqtt_username", 255);
|
||||||
|
table.string("mqtt_password", 255);
|
||||||
|
table.string("database_connection_string", 2000);
|
||||||
|
table.text("database_query");
|
||||||
|
table.string("auth_method", 250);
|
||||||
|
table.text("auth_domain");
|
||||||
|
table.text("auth_workstation");
|
||||||
|
table.string("grpc_url", 255).defaultTo(null);
|
||||||
|
table.text("grpc_protobuf").defaultTo(null);
|
||||||
|
table.text("grpc_body").defaultTo(null);
|
||||||
|
table.text("grpc_metadata").defaultTo(null);
|
||||||
|
table.text("grpc_method").defaultTo(null);
|
||||||
|
table.text("grpc_service_name").defaultTo(null);
|
||||||
|
table.boolean("grpc_enable_tls").notNullable().defaultTo(false);
|
||||||
|
table.string("radius_username", 255);
|
||||||
|
table.string("radius_password", 255);
|
||||||
|
table.string("radius_calling_station_id", 50);
|
||||||
|
table.string("radius_called_station_id", 50);
|
||||||
|
table.string("radius_secret", 255);
|
||||||
|
table.integer("resend_interval").notNullable().defaultTo(0);
|
||||||
|
table.integer("packet_size").notNullable().defaultTo(56);
|
||||||
|
table.string("game", 255);
|
||||||
|
});
|
||||||
|
|
||||||
|
// heartbeat
|
||||||
|
await knex.schema.createTable("heartbeat", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.boolean("important").notNullable().defaultTo(false);
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.smallint("status").notNullable();
|
||||||
|
|
||||||
|
table.text("msg");
|
||||||
|
table.datetime("time").notNullable();
|
||||||
|
table.integer("ping");
|
||||||
|
table.integer("duration").notNullable().defaultTo(0);
|
||||||
|
table.integer("down_count").notNullable().defaultTo(0);
|
||||||
|
|
||||||
|
table.index("important");
|
||||||
|
table.index([ "monitor_id", "time" ], "monitor_time_index");
|
||||||
|
table.index("monitor_id");
|
||||||
|
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
|
||||||
|
});
|
||||||
|
|
||||||
|
// incident
|
||||||
|
await knex.schema.createTable("incident", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("title", 255).notNullable();
|
||||||
|
table.text("content", 255).notNullable();
|
||||||
|
table.string("style", 30).notNullable().defaultTo("warning");
|
||||||
|
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
table.datetime("last_updated_date");
|
||||||
|
table.boolean("pin").notNullable().defaultTo(true);
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.integer("status_page_id").unsigned();
|
||||||
|
});
|
||||||
|
|
||||||
|
// maintenance
|
||||||
|
await knex.schema.createTable("maintenance", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("title", 150).notNullable();
|
||||||
|
table.text("description").notNullable();
|
||||||
|
table.integer("user_id").unsigned()
|
||||||
|
.references("id").inTable("user")
|
||||||
|
.onDelete("SET NULL")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.string("strategy", 50).notNullable().defaultTo("single");
|
||||||
|
table.datetime("start_date");
|
||||||
|
table.datetime("end_date");
|
||||||
|
table.time("start_time");
|
||||||
|
table.time("end_time");
|
||||||
|
table.string("weekdays", 250).defaultTo("[]");
|
||||||
|
table.text("days_of_month").defaultTo("[]");
|
||||||
|
table.integer("interval_day");
|
||||||
|
|
||||||
|
table.index("active");
|
||||||
|
table.index([ "strategy", "active" ], "manual_active");
|
||||||
|
table.index("user_id", "maintenance_user_id");
|
||||||
|
});
|
||||||
|
|
||||||
|
// status_page
|
||||||
|
await knex.schema.createTable("status_page", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("slug", 255).notNullable().unique().collate("utf8_general_ci");
|
||||||
|
table.string("title", 255).notNullable();
|
||||||
|
table.text("description");
|
||||||
|
table.string("icon", 255).notNullable();
|
||||||
|
table.string("theme", 30).notNullable();
|
||||||
|
table.boolean("published").notNullable().defaultTo(true);
|
||||||
|
table.boolean("search_engine_index").notNullable().defaultTo(true);
|
||||||
|
table.boolean("show_tags").notNullable().defaultTo(false);
|
||||||
|
table.string("password");
|
||||||
|
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
table.datetime("modified_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
table.text("footer_text");
|
||||||
|
table.text("custom_css");
|
||||||
|
table.boolean("show_powered_by").notNullable().defaultTo(true);
|
||||||
|
table.string("google_analytics_tag_id");
|
||||||
|
});
|
||||||
|
|
||||||
|
// maintenance_status_page
|
||||||
|
await knex.schema.createTable("maintenance_status_page", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
|
||||||
|
table.integer("status_page_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("status_page")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
|
||||||
|
table.integer("maintenance_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("maintenance")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
});
|
||||||
|
|
||||||
|
// maintenance_timeslot
|
||||||
|
await knex.schema.createTable("maintenance_timeslot", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("maintenance_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("maintenance")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.datetime("start_date").notNullable();
|
||||||
|
table.datetime("end_date");
|
||||||
|
table.boolean("generated_next").defaultTo(false);
|
||||||
|
|
||||||
|
table.index("maintenance_id");
|
||||||
|
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
|
||||||
|
table.index("generated_next", "generated_next_index");
|
||||||
|
});
|
||||||
|
|
||||||
|
// monitor_group
|
||||||
|
await knex.schema.createTable("monitor_group", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("group_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("group")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("weight").notNullable().defaultTo(1000);
|
||||||
|
table.boolean("send_url").notNullable().defaultTo(false);
|
||||||
|
|
||||||
|
table.index([ "monitor_id", "group_id" ], "fk");
|
||||||
|
});
|
||||||
|
// monitor_maintenance
|
||||||
|
await knex.schema.createTable("monitor_maintenance", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("maintenance_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("maintenance")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
|
||||||
|
table.index("maintenance_id", "maintenance_id_index2");
|
||||||
|
table.index("monitor_id", "monitor_id_index");
|
||||||
|
});
|
||||||
|
|
||||||
|
// notification
|
||||||
|
await knex.schema.createTable("notification", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("name", 255);
|
||||||
|
table.string("config", 255); // TODO: should use TEXT!
|
||||||
|
table.boolean("active").notNullable().defaultTo(true);
|
||||||
|
table.integer("user_id").unsigned();
|
||||||
|
table.boolean("is_default").notNullable().defaultTo(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
// monitor_notification
|
||||||
|
await knex.schema.createTable("monitor_notification", (table) => {
|
||||||
|
table.increments("id").unsigned(); // TODO: no auto increment????
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("notification_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("notification")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
|
||||||
|
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
|
||||||
|
});
|
||||||
|
|
||||||
|
// tag
|
||||||
|
await knex.schema.createTable("tag", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("name", 255).notNullable();
|
||||||
|
table.string("color", 255).notNullable();
|
||||||
|
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
||||||
|
});
|
||||||
|
|
||||||
|
// monitor_tag
|
||||||
|
await knex.schema.createTable("monitor_tag", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("tag_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("tag")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.text("value");
|
||||||
|
});
|
||||||
|
|
||||||
|
// monitor_tls_info
|
||||||
|
await knex.schema.createTable("monitor_tls_info", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
|
||||||
|
table.text("info_json");
|
||||||
|
});
|
||||||
|
|
||||||
|
// notification_sent_history
|
||||||
|
await knex.schema.createTable("notification_sent_history", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("type", 50).notNullable();
|
||||||
|
table.integer("monitor_id").unsigned().notNullable();
|
||||||
|
table.integer("days").notNullable();
|
||||||
|
table.unique([ "type", "monitor_id", "days" ]);
|
||||||
|
table.index([ "type", "monitor_id", "days" ], "good_index");
|
||||||
|
});
|
||||||
|
|
||||||
|
// setting
|
||||||
|
await knex.schema.createTable("setting", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("key", 200).notNullable().unique().collate("utf8_general_ci");
|
||||||
|
table.text("value");
|
||||||
|
table.string("type", 20);
|
||||||
|
});
|
||||||
|
|
||||||
|
// status_page_cname
|
||||||
|
await knex.schema.createTable("status_page_cname", (table) => {
|
||||||
|
table.increments("id");
|
||||||
|
table.integer("status_page_id").unsigned()
|
||||||
|
.references("id").inTable("status_page")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.string("domain").notNullable().unique().collate("utf8_general_ci");
|
||||||
|
});
|
||||||
|
|
||||||
|
/*********************
|
||||||
|
* Converted Patch here
|
||||||
|
*********************/
|
||||||
|
|
||||||
|
// 2023-06-30-1348-http-body-encoding.js
|
||||||
|
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
|
||||||
|
// UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.string("http_body_encoding", 25);
|
||||||
|
});
|
||||||
|
|
||||||
|
await knex("monitor")
|
||||||
|
.where(function () {
|
||||||
|
this.where("type", "http").orWhere("type", "keyword");
|
||||||
|
})
|
||||||
|
.whereNull("http_body_encoding")
|
||||||
|
.update({
|
||||||
|
http_body_encoding: "json",
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2023-06-30-1354-add-description-monitor.js
|
||||||
|
// ALTER TABLE monitor ADD description TEXT default null;
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.text("description").defaultTo(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2023-06-30-1357-api-key-table.js
|
||||||
|
/*
|
||||||
|
CREATE TABLE [api_key] (
|
||||||
|
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
|
[key] VARCHAR(255) NOT NULL,
|
||||||
|
[name] VARCHAR(255) NOT NULL,
|
||||||
|
[user_id] INTEGER NOT NULL,
|
||||||
|
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
|
||||||
|
[active] BOOLEAN DEFAULT 1 NOT NULL,
|
||||||
|
[expires] DATETIME DEFAULT NULL,
|
||||||
|
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
*/
|
||||||
|
await knex.schema.createTable("api_key", function (table) {
|
||||||
|
table.increments("id").primary();
|
||||||
|
table.string("key", 255).notNullable();
|
||||||
|
table.string("name", 255).notNullable();
|
||||||
|
table.integer("user_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("user")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
|
||||||
|
table.boolean("active").defaultTo(1).notNullable();
|
||||||
|
table.dateTime("expires").defaultTo(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2023-06-30-1400-monitor-tls.js
|
||||||
|
/*
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD tls_ca TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD tls_cert TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD tls_key TEXT default null;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.text("tls_ca").defaultTo(null);
|
||||||
|
table.text("tls_cert").defaultTo(null);
|
||||||
|
table.text("tls_key").defaultTo(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2023-06-30-1401-maintenance-cron.js
|
||||||
|
/*
|
||||||
|
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
|
||||||
|
DROP TABLE maintenance_timeslot;
|
||||||
|
ALTER TABLE maintenance ADD cron TEXT;
|
||||||
|
ALTER TABLE maintenance ADD timezone VARCHAR(255);
|
||||||
|
ALTER TABLE maintenance ADD duration INTEGER;
|
||||||
|
*/
|
||||||
|
await knex.schema
|
||||||
|
.dropTableIfExists("maintenance_timeslot")
|
||||||
|
.table("maintenance", function (table) {
|
||||||
|
table.text("cron");
|
||||||
|
table.string("timezone", 255);
|
||||||
|
table.integer("duration");
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2023-06-30-1413-add-parent-monitor.js.
|
||||||
|
/*
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.integer("parent").unsigned()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("SET NULL")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-add-invert-keyword.sql
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD invert_keyword BOOLEAN default 0 not null;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.boolean("invert_keyword").defaultTo(0).notNullable();
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-added-json-query.sql
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD json_path TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD expected_value VARCHAR(255);
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.text("json_path");
|
||||||
|
table.string("expected_value", 255);
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-added-kafka-producer.sql
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_topic VARCHAR(255);
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_brokers TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_ssl INTEGER;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_sasl_options TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_message TEXT;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.string("kafka_producer_topic", 255);
|
||||||
|
table.text("kafka_producer_brokers");
|
||||||
|
table.integer("kafka_producer_ssl");
|
||||||
|
table.string("kafka_producer_allow_auto_topic_creation", 255);
|
||||||
|
table.text("kafka_producer_sasl_options");
|
||||||
|
table.text("kafka_producer_message");
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-add-certificate-expiry-status-page.sql
|
||||||
|
ALTER TABLE status_page
|
||||||
|
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("status_page", function (table) {
|
||||||
|
table.boolean("show_certificate_expiry").defaultTo(0).notNullable();
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-monitor-oauth-cc.sql
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_client_id TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_client_secret TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_token_url TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_scopes TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_auth_method TEXT default null;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.text("oauth_client_id").defaultTo(null);
|
||||||
|
table.text("oauth_client_secret").defaultTo(null);
|
||||||
|
table.text("oauth_token_url").defaultTo(null);
|
||||||
|
table.text("oauth_scopes").defaultTo(null);
|
||||||
|
table.text("oauth_auth_method").defaultTo(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-add-timeout-monitor.sql
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD timeout DOUBLE default 0 not null;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.double("timeout").defaultTo(0).notNullable();
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
patch-add-gamedig-given-port.sql
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD gamedig_given_port_only BOOLEAN default 1 not null;
|
||||||
|
*/
|
||||||
|
await knex.schema.table("monitor", function (table) {
|
||||||
|
table.boolean("gamedig_given_port_only").defaultTo(1).notNullable();
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("mariadb", "Created basic tables for MariaDB");
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
createTables,
|
||||||
|
};
|
57
db/knex_migrations/README.md
Normal file
57
db/knex_migrations/README.md
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
## Info
|
||||||
|
|
||||||
|
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
|
||||||
|
|
||||||
|
## Basic rules
|
||||||
|
- All tables must have a primary key named `id`
|
||||||
|
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
|
||||||
|
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports multiple databases
|
||||||
|
|
||||||
|
## Template
|
||||||
|
|
||||||
|
Filename: YYYYMMDDHHMMSS_name.js
|
||||||
|
|
||||||
|
```js
|
||||||
|
exports.up = function(knex) {
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function(knex) {
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
// exports.config = { transaction: false };
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
Filename: 2023-06-30-1348-create-user-and-product.js
|
||||||
|
|
||||||
|
```js
|
||||||
|
exports.up = function(knex) {
|
||||||
|
return knex.schema
|
||||||
|
.createTable('user', function (table) {
|
||||||
|
table.increments('id');
|
||||||
|
table.string('first_name', 255).notNullable();
|
||||||
|
table.string('last_name', 255).notNullable();
|
||||||
|
})
|
||||||
|
.createTable('product', function (table) {
|
||||||
|
table.increments('id');
|
||||||
|
table.decimal('price').notNullable();
|
||||||
|
table.string('name', 1000).notNullable();
|
||||||
|
}).then(() => {
|
||||||
|
knex("products").insert([
|
||||||
|
{ price: 10, name: "Apple" },
|
||||||
|
{ price: 20, name: "Orange" },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function(knex) {
|
||||||
|
return knex.schema
|
||||||
|
.dropTable("product")
|
||||||
|
.dropTable("user");
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
https://knexjs.org/guide/migrations.html#transactions-in-migrations
|
3
db/old_migrations/README.md
Normal file
3
db/old_migrations/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Don't create a new migration file here
|
||||||
|
|
||||||
|
Please go to ./db/knex_migrations/README.md
|
@@ -0,0 +1,7 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE status_page
|
||||||
|
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/old_migrations/patch-add-gamedig-given-port.sql
Normal file
7
db/old_migrations/patch-add-gamedig-given-port.sql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD gamedig_given_port_only BOOLEAN default 1 not null;
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/old_migrations/patch-add-invert-keyword.sql
Normal file
7
db/old_migrations/patch-add-invert-keyword.sql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD invert_keyword BOOLEAN default 0 not null;
|
||||||
|
|
||||||
|
COMMIT;
|
6
db/old_migrations/patch-add-timeout-monitor.sql
Normal file
6
db/old_migrations/patch-add-timeout-monitor.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD timeout DOUBLE default 0 not null;
|
||||||
|
COMMIT;
|
10
db/old_migrations/patch-added-json-query.sql
Normal file
10
db/old_migrations/patch-added-json-query.sql
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD json_path TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD expected_value VARCHAR(255);
|
||||||
|
|
||||||
|
COMMIT;
|
22
db/old_migrations/patch-added-kafka-producer.sql
Normal file
22
db/old_migrations/patch-added-kafka-producer.sql
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_topic VARCHAR(255);
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_brokers TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_ssl INTEGER;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_sasl_options TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD kafka_producer_message TEXT;
|
||||||
|
|
||||||
|
COMMIT;
|
19
db/old_migrations/patch-monitor-oauth-cc.sql
Normal file
19
db/old_migrations/patch-monitor-oauth-cc.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_client_id TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_client_secret TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_token_url TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_scopes TEXT default null;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD oauth_auth_method TEXT default null;
|
||||||
|
|
||||||
|
COMMIT;
|
@@ -1,8 +0,0 @@
|
|||||||
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
|
||||||
FROM node:16-alpine3.12
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install apprise, iputils for non-root ping, setpriv
|
|
||||||
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
|
|
||||||
pip3 --no-cache-dir install apprise==1.4.0 && \
|
|
||||||
rm -rf /root/.cache
|
|
@@ -1,28 +1,56 @@
|
|||||||
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
|
||||||
# If the image changed, the second stage image should be changed too
|
# If the image changed, the second stage image should be changed too
|
||||||
FROM node:16-buster-slim
|
FROM node:20-bookworm-slim AS base2-slim
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install Curl
|
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
|
||||||
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
|
# apprise = for notifications (From testing repo)
|
||||||
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
|
# sqlite3 = for debugging
|
||||||
RUN apt-get update && \
|
# iputils-ping = for ping
|
||||||
apt-get --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
# util-linux = for setpriv (Should be dropped in 2.0.0?)
|
||||||
sqlite3 iputils-ping util-linux dumb-init git curl ca-certificates && \
|
# dumb-init = avoid zombie processes (#480)
|
||||||
pip3 --no-cache-dir install apprise==1.4.0 && \
|
# curl = for debugging
|
||||||
|
# ca-certificates = keep the cert up-to-date
|
||||||
|
# sudo = for start service nscd with non-root user
|
||||||
|
# nscd = for better DNS caching
|
||||||
|
RUN echo "deb http://deb.debian.org/debian testing main" >> /etc/apt/sources.list && \
|
||||||
|
apt update && \
|
||||||
|
apt --yes --no-install-recommends -t testing install apprise sqlite3 ca-certificates && \
|
||||||
|
apt --yes --no-install-recommends -t stable install \
|
||||||
|
iputils-ping \
|
||||||
|
util-linux \
|
||||||
|
dumb-init \
|
||||||
|
curl \
|
||||||
|
sudo \
|
||||||
|
nscd && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
apt --yes autoremove
|
apt --yes autoremove
|
||||||
|
|
||||||
|
|
||||||
# Install cloudflared
|
# Install cloudflared
|
||||||
RUN set -eux && \
|
RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
|
||||||
mkdir -p --mode=0755 /usr/share/keyrings && \
|
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bullseye main' | tee /etc/apt/sources.list.d/cloudflared.list && \
|
||||||
curl --fail --show-error --silent --location --insecure https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
|
apt update && \
|
||||||
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared buster main' | tee /etc/apt/sources.list.d/cloudflared.list && \
|
apt install --yes --no-install-recommends -t stable cloudflared && \
|
||||||
apt-get update && \
|
|
||||||
apt-get install --yes --no-install-recommends cloudflared && \
|
|
||||||
cloudflared version && \
|
cloudflared version && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
apt --yes autoremove
|
apt --yes autoremove
|
||||||
|
|
||||||
|
# For nscd
|
||||||
|
COPY ./docker/etc/nscd.conf /etc/nscd.conf
|
||||||
|
COPY ./docker/etc/sudoers /etc/sudoers
|
||||||
|
|
||||||
|
|
||||||
|
# Full Base Image
|
||||||
|
# MariaDB, Chromium and fonts
|
||||||
|
# Not working for armv7, so use the older version (10.5) of MariaDB from the debian repo
|
||||||
|
# curl -LsS https://r.mariadb.com/downloads/mariadb_repo_setup | bash -s -- --mariadb-server-version="mariadb-11.1" && \
|
||||||
|
FROM base2-slim AS base2
|
||||||
|
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
|
||||||
|
RUN apt update && \
|
||||||
|
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
|
||||||
|
apt --yes remove curl && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
apt --yes autoremove && \
|
||||||
|
chown -R node:node /var/lib/mysql
|
||||||
|
14
docker/docker-compose-dev.yml
Normal file
14
docker/docker-compose-dev.yml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
uptime-kuma:
|
||||||
|
container_name: uptime-kuma-dev
|
||||||
|
image: louislam/uptime-kuma:nightly2
|
||||||
|
volumes:
|
||||||
|
#- ./data:/app/data
|
||||||
|
- ../server:/app/server
|
||||||
|
- ../db:/app/db
|
||||||
|
ports:
|
||||||
|
- "3001:3001" # <Host Port>:<Container Port>
|
||||||
|
- "3307:3306"
|
||||||
|
|
@@ -1,14 +1,15 @@
|
|||||||
# Simple docker-compose.yml
|
version: '3.8'
|
||||||
# You can change your port or volume location
|
|
||||||
|
|
||||||
version: '3.3'
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
uptime-kuma:
|
uptime-kuma:
|
||||||
image: louislam/uptime-kuma:1
|
image: louislam/uptime-kuma:2
|
||||||
container_name: uptime-kuma
|
container_name: uptime-kuma
|
||||||
volumes:
|
volumes:
|
||||||
- ./uptime-kuma-data:/app/data
|
- uptime-kuma:/app/data
|
||||||
ports:
|
ports:
|
||||||
- 3001:3001 # <Host Port>:<Container Port>
|
- "3001:3001" # <Host Port>:<Container Port>
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
uptime-kuma:
|
||||||
|
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
|
ARG BASE_IMAGE=louislam/uptime-kuma:base2
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Build in Golang
|
# Build in Golang
|
||||||
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
|
# Run npm run build-healthcheck-armv7 in the host first, otherwise it will be super slow where it is building the armv7 healthcheck
|
||||||
# Check file: builder-go.dockerfile
|
# Check file: builder-go.dockerfile
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
|
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
|
||||||
@@ -8,47 +10,47 @@ FROM louislam/uptime-kuma:builder-go AS build_healthcheck
|
|||||||
############################################
|
############################################
|
||||||
# Build in Node.js
|
# Build in Node.js
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base-debian AS build
|
FROM louislam/uptime-kuma:base2 AS build
|
||||||
|
USER node
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
COPY .npmrc .npmrc
|
COPY --chown=node:node .npmrc .npmrc
|
||||||
COPY package.json package.json
|
COPY --chown=node:node package.json package.json
|
||||||
COPY package-lock.json package-lock.json
|
COPY --chown=node:node package-lock.json package-lock.json
|
||||||
RUN npm ci --omit=dev
|
RUN npm ci --omit=dev
|
||||||
COPY . .
|
COPY . .
|
||||||
COPY --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
|
COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
|
||||||
RUN chmod +x /app/extra/entrypoint.sh
|
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# ⭐ Main Image
|
# ⭐ Main Image
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base-debian AS release
|
FROM $BASE_IMAGE AS release
|
||||||
|
USER node
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy app files from build layer
|
ENV UPTIME_KUMA_IS_CONTAINER=1
|
||||||
COPY --from=build /app /app
|
|
||||||
|
|
||||||
|
# Copy app files from build layer
|
||||||
|
COPY --chown=node:node --from=build /app /app
|
||||||
|
|
||||||
EXPOSE 3001
|
EXPOSE 3001
|
||||||
VOLUME ["/app/data"]
|
|
||||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
||||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||||
CMD ["node", "server/server.js"]
|
CMD ["node", "server/server.js"]
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Mark as Nightly
|
# Mark as Nightly
|
||||||
############################################
|
############################################
|
||||||
FROM release AS nightly
|
FROM release AS nightly
|
||||||
|
USER node
|
||||||
RUN npm run mark-as-nightly
|
RUN npm run mark-as-nightly
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Build an image for testing pr
|
# Build an image for testing pr
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base-debian AS pr-test
|
FROM louislam/uptime-kuma:base2 AS pr-test2
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
|
|
||||||
## Install Git
|
## Install Git
|
||||||
@@ -70,14 +72,13 @@ RUN git clone https://github.com/louislam/uptime-kuma.git .
|
|||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
|
||||||
EXPOSE 3000 3001
|
EXPOSE 3000 3001
|
||||||
VOLUME ["/app/data"]
|
|
||||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
||||||
CMD ["npm", "run", "start-pr-test"]
|
CMD ["npm", "run", "start-pr-test"]
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Upload the artifact to Github
|
# Upload the artifact to Github
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base-debian AS upload-artifact
|
FROM louislam/uptime-kuma:base2 AS upload-artifact
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
RUN apt update && \
|
RUN apt update && \
|
||||||
apt --yes install curl file
|
apt --yes install curl file
|
||||||
|
@@ -1,27 +0,0 @@
|
|||||||
FROM louislam/uptime-kuma:base-alpine AS build
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
|
||||||
|
|
||||||
COPY .npmrc .npmrc
|
|
||||||
COPY package.json package.json
|
|
||||||
COPY package-lock.json package-lock.json
|
|
||||||
RUN npm ci --omit=dev
|
|
||||||
COPY . .
|
|
||||||
RUN chmod +x /app/extra/entrypoint.sh
|
|
||||||
|
|
||||||
FROM louislam/uptime-kuma:base-alpine AS release
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy app files from build layer
|
|
||||||
COPY --from=build /app /app
|
|
||||||
|
|
||||||
EXPOSE 3001
|
|
||||||
VOLUME ["/app/data"]
|
|
||||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
|
|
||||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
|
||||||
CMD ["node", "server/server.js"]
|
|
||||||
|
|
||||||
|
|
||||||
FROM release AS nightly
|
|
||||||
RUN npm run mark-as-nightly
|
|
90
docker/etc/nscd.conf
Normal file
90
docker/etc/nscd.conf
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
#
|
||||||
|
# /etc/nscd.conf
|
||||||
|
#
|
||||||
|
# An example Name Service Cache config file. This file is needed by nscd.
|
||||||
|
#
|
||||||
|
# Legal entries are:
|
||||||
|
#
|
||||||
|
# logfile <file>
|
||||||
|
# debug-level <level>
|
||||||
|
# threads <initial #threads to use>
|
||||||
|
# max-threads <maximum #threads to use>
|
||||||
|
# server-user <user to run server as instead of root>
|
||||||
|
# server-user is ignored if nscd is started with -S parameters
|
||||||
|
# stat-user <user who is allowed to request statistics>
|
||||||
|
# reload-count unlimited|<number>
|
||||||
|
# paranoia <yes|no>
|
||||||
|
# restart-interval <time in seconds>
|
||||||
|
#
|
||||||
|
# enable-cache <service> <yes|no>
|
||||||
|
# positive-time-to-live <service> <time in seconds>
|
||||||
|
# negative-time-to-live <service> <time in seconds>
|
||||||
|
# suggested-size <service> <prime number>
|
||||||
|
# check-files <service> <yes|no>
|
||||||
|
# persistent <service> <yes|no>
|
||||||
|
# shared <service> <yes|no>
|
||||||
|
# max-db-size <service> <number bytes>
|
||||||
|
# auto-propagate <service> <yes|no>
|
||||||
|
#
|
||||||
|
# Currently supported cache names (services): passwd, group, hosts, services
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
# logfile /var/log/nscd.log
|
||||||
|
# threads 4
|
||||||
|
# max-threads 32
|
||||||
|
# server-user node
|
||||||
|
# stat-user somebody
|
||||||
|
debug-level 0
|
||||||
|
# reload-count 5
|
||||||
|
paranoia no
|
||||||
|
# restart-interval 3600
|
||||||
|
|
||||||
|
enable-cache passwd no
|
||||||
|
positive-time-to-live passwd 600
|
||||||
|
negative-time-to-live passwd 20
|
||||||
|
suggested-size passwd 211
|
||||||
|
check-files passwd yes
|
||||||
|
persistent passwd yes
|
||||||
|
shared passwd yes
|
||||||
|
max-db-size passwd 33554432
|
||||||
|
auto-propagate passwd yes
|
||||||
|
|
||||||
|
enable-cache group no
|
||||||
|
positive-time-to-live group 3600
|
||||||
|
negative-time-to-live group 60
|
||||||
|
suggested-size group 211
|
||||||
|
check-files group yes
|
||||||
|
persistent group yes
|
||||||
|
shared group yes
|
||||||
|
max-db-size group 33554432
|
||||||
|
auto-propagate group yes
|
||||||
|
|
||||||
|
enable-cache hosts yes
|
||||||
|
positive-time-to-live hosts 3600
|
||||||
|
negative-time-to-live hosts 20
|
||||||
|
suggested-size hosts 211
|
||||||
|
check-files hosts yes
|
||||||
|
persistent hosts yes
|
||||||
|
# Set shared to "no" to display stats in `nscd -g`
|
||||||
|
# Read more: https://stackoverflow.com/questions/40429245/nscdcentos7curl-0-dns-cache-hit-rate
|
||||||
|
shared hosts no
|
||||||
|
max-db-size hosts 33554432
|
||||||
|
|
||||||
|
enable-cache services no
|
||||||
|
positive-time-to-live services 28800
|
||||||
|
negative-time-to-live services 20
|
||||||
|
suggested-size services 211
|
||||||
|
check-files services yes
|
||||||
|
persistent services yes
|
||||||
|
shared services yes
|
||||||
|
max-db-size services 33554432
|
||||||
|
|
||||||
|
enable-cache netgroup no
|
||||||
|
positive-time-to-live netgroup 28800
|
||||||
|
negative-time-to-live netgroup 20
|
||||||
|
suggested-size netgroup 211
|
||||||
|
check-files netgroup yes
|
||||||
|
persistent netgroup yes
|
||||||
|
shared netgroup yes
|
||||||
|
max-db-size netgroup 33554432
|
31
docker/etc/sudoers
Normal file
31
docker/etc/sudoers
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
#
|
||||||
|
# This file MUST be edited with the 'visudo' command as root.
|
||||||
|
#
|
||||||
|
# Please consider adding local content in /etc/sudoers.d/ instead of
|
||||||
|
# directly modifying this file.
|
||||||
|
#
|
||||||
|
# See the man page for details on how to write a sudoers file.
|
||||||
|
#
|
||||||
|
Defaults env_reset
|
||||||
|
Defaults mail_badpass
|
||||||
|
Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||||
|
|
||||||
|
# Host alias specification
|
||||||
|
|
||||||
|
# User alias specification
|
||||||
|
|
||||||
|
# Cmnd alias specification
|
||||||
|
|
||||||
|
# User privilege specification
|
||||||
|
root ALL=(ALL:ALL) ALL
|
||||||
|
|
||||||
|
# Allow members of group sudo to execute any command
|
||||||
|
%sudo ALL=(ALL:ALL) ALL
|
||||||
|
|
||||||
|
# See sudoers(5) for more information on "#include" directives:
|
||||||
|
|
||||||
|
#includedir /etc/sudoers.d
|
||||||
|
|
||||||
|
# Allow `node` to control service (mainly for nscd)
|
||||||
|
node ALL=(root) NOPASSWD: /usr/sbin/nscdservice
|
||||||
|
node ALL=(root) NOPASSWD: /usr/sbin/service
|
@@ -36,6 +36,8 @@ if (! exists) {
|
|||||||
/**
|
/**
|
||||||
* Commit updated files
|
* Commit updated files
|
||||||
* @param {string} version Version to update to
|
* @param {string} version Version to update to
|
||||||
|
* @returns {void}
|
||||||
|
* @throws Error committing files
|
||||||
*/
|
*/
|
||||||
function commit(version) {
|
function commit(version) {
|
||||||
let msg = "Update to " + version;
|
let msg = "Update to " + version;
|
||||||
@@ -55,6 +57,7 @@ function commit(version) {
|
|||||||
/**
|
/**
|
||||||
* Create a tag with the specified version
|
* Create a tag with the specified version
|
||||||
* @param {string} version Tag to create
|
* @param {string} version Tag to create
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function tag(version) {
|
function tag(version) {
|
||||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||||
@@ -68,6 +71,7 @@ function tag(version) {
|
|||||||
* Check if a tag exists for the specified version
|
* Check if a tag exists for the specified version
|
||||||
* @param {string} version Version to check
|
* @param {string} version Version to check
|
||||||
* @returns {boolean} Does the tag already exist
|
* @returns {boolean} Does the tag already exist
|
||||||
|
* @throws Version is not valid
|
||||||
*/
|
*/
|
||||||
function tagExists(version) {
|
function tagExists(version) {
|
||||||
if (! version) {
|
if (! version) {
|
||||||
|
@@ -15,6 +15,7 @@ download(url);
|
|||||||
/**
|
/**
|
||||||
* Downloads the latest version of the dist from a GitHub release.
|
* Downloads the latest version of the dist from a GitHub release.
|
||||||
* @param {string} url The URL to download from.
|
* @param {string} url The URL to download from.
|
||||||
|
* @returns {void}
|
||||||
*
|
*
|
||||||
* Generated by Trelent
|
* Generated by Trelent
|
||||||
*/
|
*/
|
||||||
|
@@ -1,21 +0,0 @@
|
|||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
# set -e Exit the script if an error happens
|
|
||||||
set -e
|
|
||||||
PUID=${PUID=0}
|
|
||||||
PGID=${PGID=0}
|
|
||||||
|
|
||||||
files_ownership () {
|
|
||||||
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
|
|
||||||
# -R Recursively descends the specified directories
|
|
||||||
# -c Like verbose but report only when a change is made
|
|
||||||
chown -hRc "$PUID":"$PGID" /app/data
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "==> Performing startup jobs and maintenance tasks"
|
|
||||||
files_ownership
|
|
||||||
|
|
||||||
echo "==> Starting application with user $PUID group $PGID"
|
|
||||||
|
|
||||||
# --clear-groups Clear supplementary groups.
|
|
||||||
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"
|
|
@@ -1,3 +1,3 @@
|
|||||||
<Weavers xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="FodyWeavers.xsd">
|
<Weavers xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="FodyWeavers.xsd">
|
||||||
<Costura />
|
<Costura DisableCompression='true' IncludeDebugSymbols='false' />
|
||||||
</Weavers>
|
</Weavers>
|
@@ -6,9 +6,9 @@ using System.Runtime.InteropServices;
|
|||||||
// set of attributes. Change these attribute values to modify the information
|
// set of attributes. Change these attribute values to modify the information
|
||||||
// associated with an assembly.
|
// associated with an assembly.
|
||||||
[assembly: AssemblyTitle("Uptime Kuma")]
|
[assembly: AssemblyTitle("Uptime Kuma")]
|
||||||
[assembly: AssemblyDescription("")]
|
[assembly: AssemblyDescription("A portable executable for running Uptime Kuma")]
|
||||||
[assembly: AssemblyConfiguration("")]
|
[assembly: AssemblyConfiguration("")]
|
||||||
[assembly: AssemblyCompany("")]
|
[assembly: AssemblyCompany("Uptime Kuma")]
|
||||||
[assembly: AssemblyProduct("Uptime Kuma")]
|
[assembly: AssemblyProduct("Uptime Kuma")]
|
||||||
[assembly: AssemblyCopyright("Copyright © 2023 Louis Lam")]
|
[assembly: AssemblyCopyright("Copyright © 2023 Louis Lam")]
|
||||||
[assembly: AssemblyTrademark("")]
|
[assembly: AssemblyTrademark("")]
|
||||||
@@ -20,7 +20,7 @@ using System.Runtime.InteropServices;
|
|||||||
[assembly: ComVisible(false)]
|
[assembly: ComVisible(false)]
|
||||||
|
|
||||||
// The following GUID is for the ID of the typelib if this project is exposed to COM
|
// The following GUID is for the ID of the typelib if this project is exposed to COM
|
||||||
[assembly: Guid("2DB53988-1D93-4AC0-90C4-96ADEAAC5C04")]
|
[assembly: Guid("86B40AFB-61FC-433D-8C31-650B0F32EA8F")]
|
||||||
|
|
||||||
// Version information for an assembly consists of the following four values:
|
// Version information for an assembly consists of the following four values:
|
||||||
//
|
//
|
||||||
@@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
|
|||||||
// You can specify all the values or you can default the Build and Revision Numbers
|
// You can specify all the values or you can default the Build and Revision Numbers
|
||||||
// by using the '*' as shown below:
|
// by using the '*' as shown below:
|
||||||
// [assembly: AssemblyVersion("1.0.*")]
|
// [assembly: AssemblyVersion("1.0.*")]
|
||||||
[assembly: AssemblyVersion("1.0.0.0")]
|
[assembly: AssemblyVersion("1.0.1.0")]
|
||||||
[assembly: AssemblyFileVersion("1.0.0.0")]
|
[assembly: AssemblyFileVersion("1.0.1.0")]
|
||||||
|
@@ -4,12 +4,12 @@ const fs = require("fs");
|
|||||||
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
||||||
* or the `recursive` property removing completely in the future Node.js version.
|
* or the `recursive` property removing completely in the future Node.js version.
|
||||||
* See the link below.
|
* See the link below.
|
||||||
*
|
|
||||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
||||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||||
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
const rmSync = (path, options) => {
|
const rmSync = (path, options) => {
|
||||||
if (typeof fs.rmSync === "function") {
|
if (typeof fs.rmSync === "function") {
|
||||||
|
@@ -5,15 +5,15 @@
|
|||||||
|
|
||||||
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||||
println("=====================");
|
println("=====================");
|
||||||
println("Uptime Kuma Installer");
|
println("Uptime Kuma Install Script");
|
||||||
println("=====================");
|
println("=====================");
|
||||||
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
|
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8");
|
||||||
println("---------------------------------------");
|
println("---------------------------------------");
|
||||||
println("This script is designed for Linux and basic usage.");
|
println("This script is designed for Linux and basic usage.");
|
||||||
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
||||||
println("---------------------------------------");
|
println("---------------------------------------");
|
||||||
println("");
|
println("");
|
||||||
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
|
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2");
|
||||||
println("Docker - Install Uptime Kuma Docker container");
|
println("Docker - Install Uptime Kuma Docker container");
|
||||||
println("");
|
println("");
|
||||||
|
|
||||||
@@ -29,14 +29,10 @@ function checkNode() {
|
|||||||
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
||||||
println("Node Version: " ++ nodeVersion);
|
println("Node Version: " ++ nodeVersion);
|
||||||
|
|
||||||
if (nodeVersion < "12") {
|
if (nodeVersion <= "12") {
|
||||||
println("Error: Required Node.js 14");
|
println("Error: Required Node.js 14");
|
||||||
call("exit", "1");
|
call("exit", "1");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nodeVersion == "12") {
|
|
||||||
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function deb() {
|
function deb() {
|
||||||
@@ -60,8 +56,8 @@ function deb() {
|
|||||||
bash("apt --yes install curl");
|
bash("apt --yes install curl");
|
||||||
}
|
}
|
||||||
|
|
||||||
println("Installing Node.js 14");
|
println("Installing Node.js 16");
|
||||||
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
|
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt");
|
||||||
bash("apt --yes install nodejs");
|
bash("apt --yes install nodejs");
|
||||||
bash("node -v");
|
bash("node -v");
|
||||||
|
|
||||||
@@ -91,6 +87,10 @@ if (type == "local") {
|
|||||||
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
||||||
if (os == "Ubuntu") {
|
if (os == "Ubuntu") {
|
||||||
distribution = "ubuntu";
|
distribution = "ubuntu";
|
||||||
|
|
||||||
|
// Get ubuntu version
|
||||||
|
bash(". /etc/lsb-release");
|
||||||
|
version = DISTRIB_RELEASE;
|
||||||
}
|
}
|
||||||
if (os == "Debian") {
|
if (os == "Debian") {
|
||||||
distribution = "debian";
|
distribution = "debian";
|
||||||
@@ -101,6 +101,7 @@ if (type == "local") {
|
|||||||
|
|
||||||
println("Your OS: " ++ os);
|
println("Your OS: " ++ os);
|
||||||
println("Distribution: " ++ distribution);
|
println("Distribution: " ++ distribution);
|
||||||
|
println("Version: " ++ version);
|
||||||
println("Arch: " ++ arch);
|
println("Arch: " ++ arch);
|
||||||
|
|
||||||
if ("$3" != "") {
|
if ("$3" != "") {
|
||||||
@@ -131,15 +132,32 @@ if (type == "local") {
|
|||||||
checkNode();
|
checkNode();
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
bash("curlCheck=$(curl --version)");
|
bash("dnfCheck=$(dnf --version)");
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
// Use yum
|
||||||
bash("yum -y -q install curl");
|
if (dnfCheck == "") {
|
||||||
|
bash("curlCheck=$(curl --version)");
|
||||||
|
if (curlCheck == "") {
|
||||||
|
println("Installing Curl");
|
||||||
|
bash("yum -y -q install curl");
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Installing Node.js 16");
|
||||||
|
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
||||||
|
bash("yum install -y -q nodejs");
|
||||||
|
} else {
|
||||||
|
bash("curlCheck=$(curl --version)");
|
||||||
|
if (curlCheck == "") {
|
||||||
|
println("Installing Curl");
|
||||||
|
bash("dnf -y install curl");
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Installing Node.js 16");
|
||||||
|
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
||||||
|
bash("dnf install -y nodejs");
|
||||||
}
|
}
|
||||||
|
|
||||||
println("Installing Node.js 14");
|
|
||||||
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
|
|
||||||
bash("yum install -y -q nodejs");
|
|
||||||
bash("node -v");
|
bash("node -v");
|
||||||
|
|
||||||
bash("nodeCheckAgain=$(node -v)");
|
bash("nodeCheckAgain=$(node -v)");
|
||||||
@@ -193,6 +211,14 @@ if (type == "local") {
|
|||||||
bash("pm2 startup");
|
bash("pm2 startup");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Check again
|
||||||
|
bash("check=$(pm2 --version)");
|
||||||
|
if (check == "") {
|
||||||
|
println("Error: pm2 is not found!");
|
||||||
|
bash("exit 1");
|
||||||
|
}
|
||||||
|
|
||||||
bash("mkdir -p $installPath");
|
bash("mkdir -p $installPath");
|
||||||
bash("cd $installPath");
|
bash("cd $installPath");
|
||||||
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
||||||
|
@@ -12,7 +12,7 @@ const rl = readline.createInterface({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
Database.init(args);
|
Database.initDataDir(args);
|
||||||
await Database.connect();
|
await Database.connect();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@@ -13,7 +13,7 @@ const rl = readline.createInterface({
|
|||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
console.log("Connecting the database");
|
console.log("Connecting the database");
|
||||||
Database.init(args);
|
Database.initDataDir(args);
|
||||||
await Database.connect(false, false, true);
|
await Database.connect(false, false, true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@@ -138,7 +138,7 @@ server.listen({
|
|||||||
/**
|
/**
|
||||||
* Get human readable request type from request code
|
* Get human readable request type from request code
|
||||||
* @param {number} code Request code to translate
|
* @param {number} code Request code to translate
|
||||||
* @returns {string} Human readable request type
|
* @returns {string|void} Human readable request type
|
||||||
*/
|
*/
|
||||||
function type(code) {
|
function type(code) {
|
||||||
for (let name in Packet.TYPE) {
|
for (let name in Packet.TYPE) {
|
||||||
|
@@ -7,11 +7,17 @@ class SimpleMqttServer {
|
|||||||
aedes = require("aedes")();
|
aedes = require("aedes")();
|
||||||
server = require("net").createServer(this.aedes.handle);
|
server = require("net").createServer(this.aedes.handle);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {number} port Port to listen on
|
||||||
|
*/
|
||||||
constructor(port) {
|
constructor(port) {
|
||||||
this.port = port;
|
this.port = port;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Start the MQTT server */
|
/**
|
||||||
|
* Start the MQTT server
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
start() {
|
start() {
|
||||||
this.server.listen(this.port, () => {
|
this.server.listen(this.port, () => {
|
||||||
console.log("server started and listening on port ", this.port);
|
console.log("server started and listening on port ", this.port);
|
||||||
|
9
extra/test-docker.js
Normal file
9
extra/test-docker.js
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
// Check if docker is running
|
||||||
|
const { exec } = require("child_process");
|
||||||
|
|
||||||
|
exec("docker ps", (err, stdout, stderr) => {
|
||||||
|
if (err) {
|
||||||
|
console.error("Docker is not running. Please start docker and try again.");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
@@ -12,6 +12,7 @@ import rmSync from "../fs-rmSync.js";
|
|||||||
* created with this code if one does not already exist
|
* created with this code if one does not already exist
|
||||||
* @param {string} baseLang The second base language file to copy. This
|
* @param {string} baseLang The second base language file to copy. This
|
||||||
* will be ignored if set to "en" as en.js is copied by default
|
* will be ignored if set to "en" as en.js is copied by default
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function copyFiles(langCode, baseLang) {
|
function copyFiles(langCode, baseLang) {
|
||||||
if (fs.existsSync("./languages")) {
|
if (fs.existsSync("./languages")) {
|
||||||
@@ -33,7 +34,8 @@ function copyFiles(langCode, baseLang) {
|
|||||||
/**
|
/**
|
||||||
* Update the specified language file
|
* Update the specified language file
|
||||||
* @param {string} langCode Language code to update
|
* @param {string} langCode Language code to update
|
||||||
* @param {string} baseLang Second language to copy keys from
|
* @param {string} baseLangCode Second language to copy keys from
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
async function updateLanguage(langCode, baseLangCode) {
|
async function updateLanguage(langCode, baseLangCode) {
|
||||||
const en = (await import("./languages/en.js")).default;
|
const en = (await import("./languages/en.js")).default;
|
||||||
|
@@ -39,6 +39,8 @@ if (! exists) {
|
|||||||
/**
|
/**
|
||||||
* Commit updated files
|
* Commit updated files
|
||||||
* @param {string} version Version to update to
|
* @param {string} version Version to update to
|
||||||
|
* @returns {void}
|
||||||
|
* @throws Error when committing files
|
||||||
*/
|
*/
|
||||||
function commit(version) {
|
function commit(version) {
|
||||||
let msg = "Update to " + version;
|
let msg = "Update to " + version;
|
||||||
@@ -55,6 +57,7 @@ function commit(version) {
|
|||||||
/**
|
/**
|
||||||
* Create a tag with the specified version
|
* Create a tag with the specified version
|
||||||
* @param {string} version Tag to create
|
* @param {string} version Tag to create
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function tag(version) {
|
function tag(version) {
|
||||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||||
@@ -65,6 +68,7 @@ function tag(version) {
|
|||||||
* Check if a tag exists for the specified version
|
* Check if a tag exists for the specified version
|
||||||
* @param {string} version Version to check
|
* @param {string} version Version to check
|
||||||
* @returns {boolean} Does the tag already exist
|
* @returns {boolean} Does the tag already exist
|
||||||
|
* @throws Version is not valid
|
||||||
*/
|
*/
|
||||||
function tagExists(version) {
|
function tagExists(version) {
|
||||||
if (! version) {
|
if (! version) {
|
||||||
|
@@ -13,6 +13,7 @@ updateWiki(newVersion);
|
|||||||
/**
|
/**
|
||||||
* Update the wiki with new version number
|
* Update the wiki with new version number
|
||||||
* @param {string} newVersion Version to update to
|
* @param {string} newVersion Version to update to
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function updateWiki(newVersion) {
|
function updateWiki(newVersion) {
|
||||||
const wikiDir = "./tmp/wiki";
|
const wikiDir = "./tmp/wiki";
|
||||||
@@ -46,6 +47,7 @@ function updateWiki(newVersion) {
|
|||||||
/**
|
/**
|
||||||
* Check if a directory exists and then delete it
|
* Check if a directory exists and then delete it
|
||||||
* @param {string} dir Directory to delete
|
* @param {string} dir Directory to delete
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function safeDelete(dir) {
|
function safeDelete(dir) {
|
||||||
if (fs.existsSync(dir)) {
|
if (fs.existsSync(dir)) {
|
||||||
|
16
index.html
16
index.html
@@ -9,8 +9,24 @@
|
|||||||
<meta name="theme-color" id="theme-color" content="" />
|
<meta name="theme-color" id="theme-color" content="" />
|
||||||
<meta name="description" content="Uptime Kuma monitoring tool" />
|
<meta name="description" content="Uptime Kuma monitoring tool" />
|
||||||
<title>Uptime Kuma</title>
|
<title>Uptime Kuma</title>
|
||||||
|
<style>
|
||||||
|
.noscript-message {
|
||||||
|
font-size: 20px;
|
||||||
|
text-align: center;
|
||||||
|
padding: 10px;
|
||||||
|
max-width: 500px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
|
<noscript>
|
||||||
|
<div class="noscript-message">
|
||||||
|
Sorry, you don't seem to have JavaScript enabled or your browser
|
||||||
|
doesn't support it.<br />This website requires JavaScript to function.
|
||||||
|
Please enable JavaScript in your browser settings to continue.
|
||||||
|
</div>
|
||||||
|
</noscript>
|
||||||
<div id="app"></div>
|
<div id="app"></div>
|
||||||
<script type="module" src="/src/main.js"></script>
|
<script type="module" src="/src/main.js"></script>
|
||||||
</body>
|
</body>
|
||||||
|
54
install.sh
54
install.sh
@@ -3,15 +3,15 @@
|
|||||||
# The command is working on Windows PowerShell and Docker for Windows only.
|
# The command is working on Windows PowerShell and Docker for Windows only.
|
||||||
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||||
"echo" "-e" "====================="
|
"echo" "-e" "====================="
|
||||||
"echo" "-e" "Uptime Kuma Installer"
|
"echo" "-e" "Uptime Kuma Install Script"
|
||||||
"echo" "-e" "====================="
|
"echo" "-e" "====================="
|
||||||
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
|
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"
|
||||||
"echo" "-e" "---------------------------------------"
|
"echo" "-e" "---------------------------------------"
|
||||||
"echo" "-e" "This script is designed for Linux and basic usage."
|
"echo" "-e" "This script is designed for Linux and basic usage."
|
||||||
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
||||||
"echo" "-e" "---------------------------------------"
|
"echo" "-e" "---------------------------------------"
|
||||||
"echo" "-e" ""
|
"echo" "-e" ""
|
||||||
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
|
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"
|
||||||
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
||||||
"echo" "-e" ""
|
"echo" "-e" ""
|
||||||
if [ "$1" != "" ]; then
|
if [ "$1" != "" ]; then
|
||||||
@@ -25,12 +25,9 @@ function checkNode {
|
|||||||
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
||||||
"echo" "-e" "Node Version: ""$nodeVersion"
|
"echo" "-e" "Node Version: ""$nodeVersion"
|
||||||
_0="12"
|
_0="12"
|
||||||
if [ $(($nodeVersion < $_0)) == 1 ]; then
|
if [ $(($nodeVersion <= $_0)) == 1 ]; then
|
||||||
"echo" "-e" "Error: Required Node.js 14"
|
"echo" "-e" "Error: Required Node.js 14"
|
||||||
"exit" "1"
|
"exit" "1"
|
||||||
fi
|
|
||||||
if [ "$nodeVersion" == "12" ]; then
|
|
||||||
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
function deb {
|
function deb {
|
||||||
@@ -50,8 +47,8 @@ fi
|
|||||||
"echo" "-e" "Installing Curl"
|
"echo" "-e" "Installing Curl"
|
||||||
apt --yes install curl
|
apt --yes install curl
|
||||||
fi
|
fi
|
||||||
"echo" "-e" "Installing Node.js 14"
|
"echo" "-e" "Installing Node.js 16"
|
||||||
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
|
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt
|
||||||
apt --yes install nodejs
|
apt --yes install nodejs
|
||||||
node -v
|
node -v
|
||||||
nodeCheckAgain=$(node -v)
|
nodeCheckAgain=$(node -v)
|
||||||
@@ -75,7 +72,10 @@ if [ "$type" == "local" ]; then
|
|||||||
if [ -e "/etc/issue" ]; then
|
if [ -e "/etc/issue" ]; then
|
||||||
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
||||||
if [ "$os" == "Ubuntu" ]; then
|
if [ "$os" == "Ubuntu" ]; then
|
||||||
distribution="ubuntu"
|
distribution="ubuntu"
|
||||||
|
# Get ubuntu version
|
||||||
|
. /etc/lsb-release
|
||||||
|
version="$DISTRIB_RELEASE"
|
||||||
fi
|
fi
|
||||||
if [ "$os" == "Debian" ]; then
|
if [ "$os" == "Debian" ]; then
|
||||||
distribution="debian"
|
distribution="debian"
|
||||||
@@ -85,6 +85,7 @@ fi
|
|||||||
arch=$(uname -i)
|
arch=$(uname -i)
|
||||||
"echo" "-e" "Your OS: ""$os"
|
"echo" "-e" "Your OS: ""$os"
|
||||||
"echo" "-e" "Distribution: ""$distribution"
|
"echo" "-e" "Distribution: ""$distribution"
|
||||||
|
"echo" "-e" "Version: ""$version"
|
||||||
"echo" "-e" "Arch: ""$arch"
|
"echo" "-e" "Arch: ""$arch"
|
||||||
if [ "$3" != "" ]; then
|
if [ "$3" != "" ]; then
|
||||||
port="$3"
|
port="$3"
|
||||||
@@ -108,14 +109,27 @@ fi
|
|||||||
if [ "$nodeCheck" != "" ]; then
|
if [ "$nodeCheck" != "" ]; then
|
||||||
"checkNode"
|
"checkNode"
|
||||||
else
|
else
|
||||||
curlCheck=$(curl --version)
|
dnfCheck=$(dnf --version)
|
||||||
if [ "$curlCheck" == "" ]; then
|
# Use yum
|
||||||
"echo" "-e" "Installing Curl"
|
if [ "$dnfCheck" == "" ]; then
|
||||||
yum -y -q install curl
|
curlCheck=$(curl --version)
|
||||||
|
if [ "$curlCheck" == "" ]; then
|
||||||
|
"echo" "-e" "Installing Curl"
|
||||||
|
yum -y -q install curl
|
||||||
fi
|
fi
|
||||||
"echo" "-e" "Installing Node.js 14"
|
"echo" "-e" "Installing Node.js 16"
|
||||||
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
|
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
||||||
yum install -y -q nodejs
|
yum install -y -q nodejs
|
||||||
|
else
|
||||||
|
curlCheck=$(curl --version)
|
||||||
|
if [ "$curlCheck" == "" ]; then
|
||||||
|
"echo" "-e" "Installing Curl"
|
||||||
|
dnf -y install curl
|
||||||
|
fi
|
||||||
|
"echo" "-e" "Installing Node.js 16"
|
||||||
|
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
||||||
|
dnf install -y nodejs
|
||||||
|
fi
|
||||||
node -v
|
node -v
|
||||||
nodeCheckAgain=$(node -v)
|
nodeCheckAgain=$(node -v)
|
||||||
if [ "$nodeCheckAgain" == "" ]; then
|
if [ "$nodeCheckAgain" == "" ]; then
|
||||||
@@ -161,6 +175,12 @@ fi
|
|||||||
"echo" "-e" "Installing PM2"
|
"echo" "-e" "Installing PM2"
|
||||||
npm install pm2 -g && pm2 install pm2-logrotate
|
npm install pm2 -g && pm2 install pm2-logrotate
|
||||||
pm2 startup
|
pm2 startup
|
||||||
|
fi
|
||||||
|
# Check again
|
||||||
|
check=$(pm2 --version)
|
||||||
|
if [ "$check" == "" ]; then
|
||||||
|
"echo" "-e" "Error: pm2 is not found!"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
mkdir -p $installPath
|
mkdir -p $installPath
|
||||||
cd $installPath
|
cd $installPath
|
||||||
|
5732
package-lock.json
generated
5732
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
84
package.json
84
package.json
@@ -1,13 +1,13 @@
|
|||||||
{
|
{
|
||||||
"name": "uptime-kuma",
|
"name": "uptime-kuma",
|
||||||
"version": "1.22.0-beta.0",
|
"version": "1.23.0-beta.1",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/louislam/uptime-kuma.git"
|
"url": "https://github.com/louislam/uptime-kuma.git"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "14.* || >=16.*"
|
"node": "14 || 16 || 18 || >= 20.4.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"install-legacy": "npm install",
|
"install-legacy": "npm install",
|
||||||
@@ -19,6 +19,7 @@
|
|||||||
"lint": "npm run lint:js && npm run lint:style",
|
"lint": "npm run lint:js && npm run lint:style",
|
||||||
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
||||||
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
||||||
|
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
||||||
"start": "npm run start-server",
|
"start": "npm run start-server",
|
||||||
"start-server": "node server/server.js",
|
"start-server": "node server/server.js",
|
||||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||||
@@ -28,34 +29,34 @@
|
|||||||
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
|
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
|
||||||
"tsc": "tsc",
|
"tsc": "tsc",
|
||||||
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
||||||
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
|
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
|
||||||
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
|
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
|
||||||
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
|
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
|
||||||
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
||||||
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
|
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||||
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
|
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
||||||
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
||||||
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
|
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
||||||
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
|
||||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
|
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
|
||||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||||
"setup": "git checkout 1.21.3 && npm ci --production && npm run download-dist",
|
"setup": "git checkout 1.22.1 && npm ci --production && npm run download-dist",
|
||||||
"download-dist": "node extra/download-dist.js",
|
"download-dist": "node extra/download-dist.js",
|
||||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||||
"reset-password": "node extra/reset-password.js",
|
"reset-password": "node extra/reset-password.js",
|
||||||
"remove-2fa": "node extra/remove-2fa.js",
|
"remove-2fa": "node extra/remove-2fa.js",
|
||||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
||||||
|
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
|
||||||
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
||||||
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
|
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
|
||||||
|
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
|
||||||
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
||||||
|
"test-install-script-ubuntu1804": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1804.dockerfile .",
|
||||||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||||
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
|
|
||||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||||
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
||||||
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
||||||
"ncu-patch": "npm-check-updates -u -t patch",
|
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||||
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||||
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
|
||||||
"git-remove-tag": "git tag -d",
|
"git-remove-tag": "git tag -d",
|
||||||
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
||||||
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
||||||
@@ -65,11 +66,13 @@
|
|||||||
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
|
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
|
||||||
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
||||||
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
||||||
"sort-contributors": "node extra/sort-contributors.js"
|
"sort-contributors": "node extra/sort-contributors.js",
|
||||||
|
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
|
||||||
|
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@grpc/grpc-js": "~1.7.3",
|
"@grpc/grpc-js": "~1.7.3",
|
||||||
"@louislam/ping": "~0.4.4-mod.0",
|
"@louislam/ping": "~0.4.4-mod.1",
|
||||||
"@louislam/sqlite3": "15.1.6",
|
"@louislam/sqlite3": "15.1.6",
|
||||||
"args-parser": "~1.3.0",
|
"args-parser": "~1.3.0",
|
||||||
"axios": "~0.27.0",
|
"axios": "~0.27.0",
|
||||||
@@ -84,50 +87,60 @@
|
|||||||
"command-exists": "~1.2.9",
|
"command-exists": "~1.2.9",
|
||||||
"compare-versions": "~3.6.0",
|
"compare-versions": "~3.6.0",
|
||||||
"compression": "~1.7.4",
|
"compression": "~1.7.4",
|
||||||
"croner": "^6.0.3",
|
"croner": "~6.0.5",
|
||||||
"dayjs": "~1.11.5",
|
"dayjs": "~1.11.5",
|
||||||
"dotenv": "~16.0.3",
|
"dotenv": "~16.0.3",
|
||||||
"express": "~4.17.3",
|
"express": "~4.17.3",
|
||||||
"express-basic-auth": "~1.2.1",
|
"express-basic-auth": "~1.2.1",
|
||||||
"express-static-gzip": "~2.1.7",
|
"express-static-gzip": "~2.1.7",
|
||||||
"form-data": "~4.0.0",
|
"form-data": "~4.0.0",
|
||||||
"gamedig": "^4.0.5",
|
"gamedig": "~4.0.5",
|
||||||
"http-graceful-shutdown": "~3.1.7",
|
"http-graceful-shutdown": "~3.1.7",
|
||||||
"http-proxy-agent": "~5.0.0",
|
"http-proxy-agent": "~5.0.0",
|
||||||
"https-proxy-agent": "~5.0.1",
|
"https-proxy-agent": "~5.0.1",
|
||||||
"iconv-lite": "~0.6.3",
|
"iconv-lite": "~0.6.3",
|
||||||
|
"isomorphic-ws": "^5.0.0",
|
||||||
"jsesc": "~3.0.2",
|
"jsesc": "~3.0.2",
|
||||||
|
"jsonata": "^2.0.3",
|
||||||
"jsonwebtoken": "~9.0.0",
|
"jsonwebtoken": "~9.0.0",
|
||||||
"jwt-decode": "~3.1.2",
|
"jwt-decode": "~3.1.2",
|
||||||
|
"kafkajs": "^2.2.4",
|
||||||
|
"knex": "^2.4.2",
|
||||||
"limiter": "~2.1.0",
|
"limiter": "~2.1.0",
|
||||||
|
"liquidjs": "^10.7.0",
|
||||||
"mongodb": "~4.14.0",
|
"mongodb": "~4.14.0",
|
||||||
"mqtt": "~4.3.7",
|
"mqtt": "~4.3.7",
|
||||||
"mssql": "~8.1.4",
|
"mssql": "~8.1.4",
|
||||||
"mysql2": "~2.3.3",
|
"mysql2": "~2.3.3",
|
||||||
"nanoid": "^3.3.4",
|
"nanoid": "~3.3.4",
|
||||||
"node-cloudflared-tunnel": "~1.0.9",
|
"node-cloudflared-tunnel": "~1.0.9",
|
||||||
"node-radius-client": "~1.0.0",
|
"node-radius-client": "~1.0.0",
|
||||||
"nodemailer": "~6.6.5",
|
"nodemailer": "~6.6.5",
|
||||||
|
"nostr-tools": "^1.13.1",
|
||||||
"notp": "~2.0.3",
|
"notp": "~2.0.3",
|
||||||
|
"openid-client": "^5.4.2",
|
||||||
"password-hash": "~1.2.2",
|
"password-hash": "~1.2.2",
|
||||||
"pg": "~8.8.0",
|
"pg": "~8.8.0",
|
||||||
"pg-connection-string": "~2.5.0",
|
"pg-connection-string": "~2.5.0",
|
||||||
|
"playwright-core": "~1.35.1",
|
||||||
"prom-client": "~13.2.0",
|
"prom-client": "~13.2.0",
|
||||||
"prometheus-api-metrics": "~3.2.1",
|
"prometheus-api-metrics": "~3.2.1",
|
||||||
"protobufjs": "~7.1.1",
|
"protobufjs": "~7.2.4",
|
||||||
"qs": "~6.10.4",
|
"qs": "~6.10.4",
|
||||||
"redbean-node": "~0.2.0",
|
"redbean-node": "~0.3.0",
|
||||||
"redis": "~4.5.1",
|
"redis": "~4.5.1",
|
||||||
|
"semver": "~7.5.4",
|
||||||
"socket.io": "~4.6.1",
|
"socket.io": "~4.6.1",
|
||||||
"socket.io-client": "~4.6.1",
|
"socket.io-client": "~4.6.1",
|
||||||
"socks-proxy-agent": "6.1.1",
|
"socks-proxy-agent": "6.1.1",
|
||||||
"tar": "~6.1.11",
|
"tar": "~6.1.11",
|
||||||
"tcp-ping": "~0.1.1",
|
"tcp-ping": "~0.1.1",
|
||||||
"thirty-two": "~1.0.2"
|
"thirty-two": "~1.0.2",
|
||||||
|
"ws": "^8.13.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@actions/github": "~5.0.1",
|
"@actions/github": "~5.0.1",
|
||||||
"@babel/eslint-parser": "~7.17.0",
|
"@babel/eslint-parser": "^7.22.7",
|
||||||
"@babel/preset-env": "^7.15.8",
|
"@babel/preset-env": "^7.15.8",
|
||||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||||
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||||
@@ -135,12 +148,11 @@
|
|||||||
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
||||||
"@popperjs/core": "~2.10.2",
|
"@popperjs/core": "~2.10.2",
|
||||||
"@types/bootstrap": "~5.1.9",
|
"@types/bootstrap": "~5.1.9",
|
||||||
"@vitejs/plugin-legacy": "~2.1.0",
|
"@vitejs/plugin-legacy": "~4.1.0",
|
||||||
"@vitejs/plugin-vue": "~3.1.0",
|
"@vitejs/plugin-vue": "~4.2.3",
|
||||||
"@vue/compiler-sfc": "~3.2.36",
|
"@vue/compiler-sfc": "~3.3.4",
|
||||||
"@vuepic/vue-datepicker": "~3.4.8",
|
"@vuepic/vue-datepicker": "~3.4.8",
|
||||||
"aedes": "^0.46.3",
|
"aedes": "^0.46.3",
|
||||||
"babel-plugin-rewire": "~1.2.0",
|
|
||||||
"bootstrap": "5.1.3",
|
"bootstrap": "5.1.3",
|
||||||
"chart.js": "~4.2.1",
|
"chart.js": "~4.2.1",
|
||||||
"chartjs-adapter-dayjs-4": "~1.0.4",
|
"chartjs-adapter-dayjs-4": "~1.0.4",
|
||||||
@@ -148,16 +160,17 @@
|
|||||||
"core-js": "~3.26.1",
|
"core-js": "~3.26.1",
|
||||||
"cronstrue": "~2.24.0",
|
"cronstrue": "~2.24.0",
|
||||||
"cross-env": "~7.0.3",
|
"cross-env": "~7.0.3",
|
||||||
"cypress": "^10.1.0",
|
"cypress": "^12.17.0",
|
||||||
"delay": "^5.0.0",
|
"delay": "^5.0.0",
|
||||||
"dns2": "~2.0.1",
|
"dns2": "~2.0.1",
|
||||||
"dompurify": "~2.4.3",
|
"dompurify": "~2.4.3",
|
||||||
"eslint": "~8.14.0",
|
"eslint": "~8.14.0",
|
||||||
|
"eslint-plugin-jsdoc": "^46.4.6",
|
||||||
"eslint-plugin-vue": "~8.7.1",
|
"eslint-plugin-vue": "~8.7.1",
|
||||||
"favico.js": "~0.3.10",
|
"favico.js": "~0.3.10",
|
||||||
"jest": "~27.2.5",
|
"jest": "~29.6.1",
|
||||||
"marked": "~4.2.5",
|
"marked": "~4.2.5",
|
||||||
"node-ssh": "~13.0.1",
|
"node-ssh": "~13.1.0",
|
||||||
"postcss-html": "~1.5.0",
|
"postcss-html": "~1.5.0",
|
||||||
"postcss-rtlcss": "~3.7.2",
|
"postcss-rtlcss": "~3.7.2",
|
||||||
"postcss-scss": "~4.0.4",
|
"postcss-scss": "~4.0.4",
|
||||||
@@ -165,15 +178,16 @@
|
|||||||
"qrcode": "~1.5.0",
|
"qrcode": "~1.5.0",
|
||||||
"rollup-plugin-visualizer": "^5.6.0",
|
"rollup-plugin-visualizer": "^5.6.0",
|
||||||
"sass": "~1.42.1",
|
"sass": "~1.42.1",
|
||||||
"stylelint": "~14.7.1",
|
"stylelint": "^15.10.1",
|
||||||
"stylelint-config-standard": "~25.0.0",
|
"stylelint-config-standard": "~25.0.0",
|
||||||
"terser": "~5.15.0",
|
"terser": "~5.15.0",
|
||||||
"timezones-list": "~3.0.1",
|
"timezones-list": "~3.0.1",
|
||||||
"typescript": "~4.4.4",
|
"typescript": "~4.4.4",
|
||||||
"v-pagination-3": "~0.1.7",
|
"v-pagination-3": "~0.1.7",
|
||||||
"vite": "~3.2.7",
|
"vite": "~4.4.1",
|
||||||
|
"vite-plugin-commonjs": "^0.8.0",
|
||||||
"vite-plugin-compression": "^0.5.1",
|
"vite-plugin-compression": "^0.5.1",
|
||||||
"vue": "~3.2.47",
|
"vue": "~3.3.4",
|
||||||
"vue-chartjs": "~5.2.0",
|
"vue-chartjs": "~5.2.0",
|
||||||
"vue-confirm-dialog": "~1.0.2",
|
"vue-confirm-dialog": "~1.0.2",
|
||||||
"vue-contenteditable": "~3.0.4",
|
"vue-contenteditable": "~3.0.4",
|
||||||
|
@@ -2,15 +2,16 @@ const basicAuth = require("express-basic-auth");
|
|||||||
const passwordHash = require("./password-hash");
|
const passwordHash = require("./password-hash");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const { setting } = require("./util-server");
|
const { setting } = require("./util-server");
|
||||||
|
const { log } = require("../src/util");
|
||||||
const { loginRateLimiter, apiRateLimiter } = require("./rate-limiter");
|
const { loginRateLimiter, apiRateLimiter } = require("./rate-limiter");
|
||||||
const { Settings } = require("./settings");
|
const { Settings } = require("./settings");
|
||||||
const dayjs = require("dayjs");
|
const dayjs = require("dayjs");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Login to web app
|
* Login to web app
|
||||||
* @param {string} username
|
* @param {string} username Username to login with
|
||||||
* @param {string} password
|
* @param {string} password Password to login with
|
||||||
* @returns {Promise<(Bean|null)>}
|
* @returns {Promise<(Bean|null)>} User or null if login failed
|
||||||
*/
|
*/
|
||||||
exports.login = async function (username, password) {
|
exports.login = async function (username, password) {
|
||||||
if (typeof username !== "string" || typeof password !== "string") {
|
if (typeof username !== "string" || typeof password !== "string") {
|
||||||
@@ -38,6 +39,7 @@ exports.login = async function (username, password) {
|
|||||||
/**
|
/**
|
||||||
* Validate a provided API key
|
* Validate a provided API key
|
||||||
* @param {string} key API key to verify
|
* @param {string} key API key to verify
|
||||||
|
* @returns {boolean} API is ok?
|
||||||
*/
|
*/
|
||||||
async function verifyAPIKey(key) {
|
async function verifyAPIKey(key) {
|
||||||
if (typeof key !== "string") {
|
if (typeof key !== "string") {
|
||||||
@@ -72,21 +74,26 @@ async function verifyAPIKey(key) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom authorizer for express-basic-auth
|
* Custom authorizer for express-basic-auth
|
||||||
* @param {string} username
|
* @param {string} username Username to login with
|
||||||
* @param {string} password
|
* @param {string} password Password to login with
|
||||||
* @param {authCallback} callback
|
* @param {authCallback} callback Callback to handle login result
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function apiAuthorizer(username, password, callback) {
|
function apiAuthorizer(username, password, callback) {
|
||||||
// API Rate Limit
|
// API Rate Limit
|
||||||
apiRateLimiter.pass(null, 0).then((pass) => {
|
apiRateLimiter.pass(null, 0).then((pass) => {
|
||||||
if (pass) {
|
if (pass) {
|
||||||
verifyAPIKey(password).then((valid) => {
|
verifyAPIKey(password).then((valid) => {
|
||||||
|
if (!valid) {
|
||||||
|
log.warn("api-auth", "Failed API auth attempt: invalid API Key");
|
||||||
|
}
|
||||||
callback(null, valid);
|
callback(null, valid);
|
||||||
// Only allow a set number of api requests per minute
|
// Only allow a set number of api requests per minute
|
||||||
// (currently set to 60)
|
// (currently set to 60)
|
||||||
apiRateLimiter.removeTokens(1);
|
apiRateLimiter.removeTokens(1);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
log.warn("api-auth", "Failed API auth attempt: rate limit exceeded");
|
||||||
callback(null, false);
|
callback(null, false);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -94,9 +101,10 @@ function apiAuthorizer(username, password, callback) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom authorizer for express-basic-auth
|
* Custom authorizer for express-basic-auth
|
||||||
* @param {string} username
|
* @param {string} username Username to login with
|
||||||
* @param {string} password
|
* @param {string} password Password to login with
|
||||||
* @param {authCallback} callback
|
* @param {authCallback} callback Callback to handle login result
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
function userAuthorizer(username, password, callback) {
|
function userAuthorizer(username, password, callback) {
|
||||||
// Login Rate Limit
|
// Login Rate Limit
|
||||||
@@ -106,10 +114,12 @@ function userAuthorizer(username, password, callback) {
|
|||||||
callback(null, user != null);
|
callback(null, user != null);
|
||||||
|
|
||||||
if (user == null) {
|
if (user == null) {
|
||||||
|
log.warn("basic-auth", "Failed basic auth attempt: invalid username/password");
|
||||||
loginRateLimiter.removeTokens(1);
|
loginRateLimiter.removeTokens(1);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
log.warn("basic-auth", "Failed basic auth attempt: rate limit exceeded");
|
||||||
callback(null, false);
|
callback(null, false);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -119,7 +129,8 @@ function userAuthorizer(username, password, callback) {
|
|||||||
* Use basic auth if auth is not disabled
|
* Use basic auth if auth is not disabled
|
||||||
* @param {express.Request} req Express request object
|
* @param {express.Request} req Express request object
|
||||||
* @param {express.Response} res Express response object
|
* @param {express.Response} res Express response object
|
||||||
* @param {express.NextFunction} next
|
* @param {express.NextFunction} next Next handler in chain
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
exports.basicAuth = async function (req, res, next) {
|
exports.basicAuth = async function (req, res, next) {
|
||||||
const middleware = basicAuth({
|
const middleware = basicAuth({
|
||||||
@@ -141,7 +152,8 @@ exports.basicAuth = async function (req, res, next) {
|
|||||||
* Use use API Key if API keys enabled, else use basic auth
|
* Use use API Key if API keys enabled, else use basic auth
|
||||||
* @param {express.Request} req Express request object
|
* @param {express.Request} req Express request object
|
||||||
* @param {express.Response} res Express response object
|
* @param {express.Response} res Express response object
|
||||||
* @param {express.NextFunction} next
|
* @param {express.NextFunction} next Next handler in chain
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
exports.apiAuth = async function (req, res, next) {
|
exports.apiAuth = async function (req, res, next) {
|
||||||
if (!await Settings.get("disableAuth")) {
|
if (!await Settings.get("disableAuth")) {
|
||||||
|
@@ -15,6 +15,7 @@ class CacheableDnsHttpAgent {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Register/Disable cacheable to global agents
|
* Register/Disable cacheable to global agents
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
static async update() {
|
static async update() {
|
||||||
log.debug("CacheableDnsHttpAgent", "update");
|
log.debug("CacheableDnsHttpAgent", "update");
|
||||||
@@ -40,14 +41,15 @@ class CacheableDnsHttpAgent {
|
|||||||
/**
|
/**
|
||||||
* Attach cacheable to HTTP agent
|
* Attach cacheable to HTTP agent
|
||||||
* @param {http.Agent} agent Agent to install
|
* @param {http.Agent} agent Agent to install
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
static install(agent) {
|
static install(agent) {
|
||||||
this.cacheable.install(agent);
|
this.cacheable.install(agent);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @var {https.AgentOptions} agentOptions
|
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent
|
||||||
* @return {https.Agent}
|
* @returns {https.Agent} The new HTTPS agent
|
||||||
*/
|
*/
|
||||||
static getHttpsAgent(agentOptions) {
|
static getHttpsAgent(agentOptions) {
|
||||||
if (!this.enable) {
|
if (!this.enable) {
|
||||||
@@ -63,8 +65,8 @@ class CacheableDnsHttpAgent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @var {http.AgentOptions} agentOptions
|
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent
|
||||||
* @return {https.Agents}
|
* @returns {https.Agents} The new HTTP agent
|
||||||
*/
|
*/
|
||||||
static getHttpAgent(agentOptions) {
|
static getHttpAgent(agentOptions) {
|
||||||
if (!this.enable) {
|
if (!this.enable) {
|
||||||
|
@@ -1,27 +1,33 @@
|
|||||||
const { setSetting, setting } = require("./util-server");
|
const { setSetting, setting } = require("./util-server");
|
||||||
const axios = require("axios");
|
const axios = require("axios");
|
||||||
const compareVersions = require("compare-versions");
|
const compareVersions = require("compare-versions");
|
||||||
|
const { log } = require("../src/util");
|
||||||
|
|
||||||
exports.version = require("../package.json").version;
|
exports.version = require("../package.json").version;
|
||||||
exports.latestVersion = null;
|
exports.latestVersion = null;
|
||||||
|
|
||||||
|
// How much time in ms to wait between update checks
|
||||||
|
const UPDATE_CHECKER_INTERVAL_MS = 1000 * 60 * 60 * 48;
|
||||||
|
const UPDATE_CHECKER_LATEST_VERSION_URL = "https://uptime.kuma.pet/version";
|
||||||
|
|
||||||
let interval;
|
let interval;
|
||||||
|
|
||||||
/** Start 48 hour check interval */
|
|
||||||
exports.startInterval = () => {
|
exports.startInterval = () => {
|
||||||
let check = async () => {
|
let check = async () => {
|
||||||
|
if (await setting("checkUpdate") === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("update-checker", "Retrieving latest versions");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await axios.get("https://uptime.kuma.pet/version");
|
const res = await axios.get(UPDATE_CHECKER_LATEST_VERSION_URL);
|
||||||
|
|
||||||
// For debug
|
// For debug
|
||||||
if (process.env.TEST_CHECK_VERSION === "1") {
|
if (process.env.TEST_CHECK_VERSION === "1") {
|
||||||
res.data.slow = "1000.0.0";
|
res.data.slow = "1000.0.0";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (await setting("checkUpdate") === false) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let checkBeta = await setting("checkBeta");
|
let checkBeta = await setting("checkBeta");
|
||||||
|
|
||||||
if (checkBeta && res.data.beta) {
|
if (checkBeta && res.data.beta) {
|
||||||
@@ -35,12 +41,14 @@ exports.startInterval = () => {
|
|||||||
exports.latestVersion = res.data.slow;
|
exports.latestVersion = res.data.slow;
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (_) { }
|
} catch (_) {
|
||||||
|
log.info("update-checker", "Failed to check for new versions");
|
||||||
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
check();
|
check();
|
||||||
interval = setInterval(check, 3600 * 1000 * 48);
|
interval = setInterval(check, UPDATE_CHECKER_INTERVAL_MS);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -12,7 +12,7 @@ const checkVersion = require("./check-version");
|
|||||||
/**
|
/**
|
||||||
* Send list of notification providers to client
|
* Send list of notification providers to client
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @returns {Promise<Bean[]>}
|
* @returns {Promise<Bean[]>} List of notifications
|
||||||
*/
|
*/
|
||||||
async function sendNotificationList(socket) {
|
async function sendNotificationList(socket) {
|
||||||
const timeLogger = new TimeLogger();
|
const timeLogger = new TimeLogger();
|
||||||
@@ -40,8 +40,8 @@ async function sendNotificationList(socket) {
|
|||||||
* Send Heartbeat History list to socket
|
* Send Heartbeat History list to socket
|
||||||
* @param {Socket} socket Socket.io instance
|
* @param {Socket} socket Socket.io instance
|
||||||
* @param {number} monitorID ID of monitor to send heartbeat history
|
* @param {number} monitorID ID of monitor to send heartbeat history
|
||||||
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
|
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only
|
||||||
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
|
* @param {boolean} overwrite Overwrite client-side's heartbeat list
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||||
@@ -71,8 +71,8 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
|
|||||||
* Important Heart beat list (aka event list)
|
* Important Heart beat list (aka event list)
|
||||||
* @param {Socket} socket Socket.io instance
|
* @param {Socket} socket Socket.io instance
|
||||||
* @param {number} monitorID ID of monitor to send heartbeat history
|
* @param {number} monitorID ID of monitor to send heartbeat history
|
||||||
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
|
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only
|
||||||
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
|
* @param {boolean} overwrite Overwrite client-side's heartbeat list
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||||
@@ -100,7 +100,7 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
|
|||||||
/**
|
/**
|
||||||
* Emit proxy list to client
|
* Emit proxy list to client
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @return {Promise<Bean[]>}
|
* @returns {Promise<Bean[]>} List of proxies
|
||||||
*/
|
*/
|
||||||
async function sendProxyList(socket) {
|
async function sendProxyList(socket) {
|
||||||
const timeLogger = new TimeLogger();
|
const timeLogger = new TimeLogger();
|
||||||
@@ -141,12 +141,21 @@ async function sendAPIKeyList(socket) {
|
|||||||
/**
|
/**
|
||||||
* Emits the version information to the client.
|
* Emits the version information to the client.
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
|
* @param {boolean} hideVersion Should we hide the version information in the response?
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function sendInfo(socket) {
|
async function sendInfo(socket, hideVersion = false) {
|
||||||
|
let version;
|
||||||
|
let latestVersion;
|
||||||
|
|
||||||
|
if (!hideVersion) {
|
||||||
|
version = checkVersion.version;
|
||||||
|
latestVersion = checkVersion.latestVersion;
|
||||||
|
}
|
||||||
|
|
||||||
socket.emit("info", {
|
socket.emit("info", {
|
||||||
version: checkVersion.version,
|
version,
|
||||||
latestVersion: checkVersion.latestVersion,
|
latestVersion,
|
||||||
primaryBaseURL: await setting("primaryBaseURL"),
|
primaryBaseURL: await setting("primaryBaseURL"),
|
||||||
serverTimezone: await server.getTimezone(),
|
serverTimezone: await server.getTimezone(),
|
||||||
serverTimezoneOffset: server.getTimezoneOffset(),
|
serverTimezoneOffset: server.getTimezoneOffset(),
|
||||||
@@ -156,7 +165,7 @@ async function sendInfo(socket) {
|
|||||||
/**
|
/**
|
||||||
* Send list of docker hosts to client
|
* Send list of docker hosts to client
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @returns {Promise<Bean[]>}
|
* @returns {Promise<Bean[]>} List of docker hosts
|
||||||
*/
|
*/
|
||||||
async function sendDockerHostList(socket) {
|
async function sendDockerHostList(socket) {
|
||||||
const timeLogger = new TimeLogger();
|
const timeLogger = new TimeLogger();
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
const args = require("args-parser")(process.argv);
|
// Interop with browser
|
||||||
|
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||||
const demoMode = args["demo"] || false;
|
const demoMode = args["demo"] || false;
|
||||||
|
|
||||||
const badgeConstants = {
|
const badgeConstants = {
|
||||||
|
@@ -3,7 +3,9 @@ const { R } = require("redbean-node");
|
|||||||
const { setSetting, setting } = require("./util-server");
|
const { setSetting, setting } = require("./util-server");
|
||||||
const { log, sleep } = require("../src/util");
|
const { log, sleep } = require("../src/util");
|
||||||
const knex = require("knex");
|
const knex = require("knex");
|
||||||
const { PluginsManager } = require("./plugins-manager");
|
const path = require("path");
|
||||||
|
const { EmbeddedMariaDB } = require("./embedded-mariadb");
|
||||||
|
const mysql = require("mysql2/promise");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Database & App Data Folder
|
* Database & App Data Folder
|
||||||
@@ -22,7 +24,11 @@ class Database {
|
|||||||
*/
|
*/
|
||||||
static uploadDir;
|
static uploadDir;
|
||||||
|
|
||||||
static path;
|
static screenshotDir;
|
||||||
|
|
||||||
|
static sqlitePath;
|
||||||
|
|
||||||
|
static dockerTLSDir;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {boolean}
|
* @type {boolean}
|
||||||
@@ -30,11 +36,13 @@ class Database {
|
|||||||
static patched = false;
|
static patched = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* SQLite only
|
||||||
* Add patch filename in key
|
* Add patch filename in key
|
||||||
* Values:
|
* Values:
|
||||||
* true: Add it regardless of order
|
* true: Add it regardless of order
|
||||||
* false: Do nothing
|
* false: Do nothing
|
||||||
* { parents: []}: Need parents before add it
|
* { parents: []}: Need parents before add it
|
||||||
|
* @deprecated
|
||||||
*/
|
*/
|
||||||
static patchList = {
|
static patchList = {
|
||||||
"patch-setting-value-type.sql": true,
|
"patch-setting-value-type.sql": true,
|
||||||
@@ -70,6 +78,13 @@ class Database {
|
|||||||
"patch-monitor-tls.sql": true,
|
"patch-monitor-tls.sql": true,
|
||||||
"patch-maintenance-cron.sql": true,
|
"patch-maintenance-cron.sql": true,
|
||||||
"patch-add-parent-monitor.sql": true,
|
"patch-add-parent-monitor.sql": true,
|
||||||
|
"patch-add-invert-keyword.sql": true,
|
||||||
|
"patch-added-json-query.sql": true,
|
||||||
|
"patch-added-kafka-producer.sql": true,
|
||||||
|
"patch-add-certificate-expiry-status-page.sql": true,
|
||||||
|
"patch-monitor-oauth-cc.sql": true,
|
||||||
|
"patch-add-timeout-monitor.sql": true,
|
||||||
|
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -80,64 +95,172 @@ class Database {
|
|||||||
|
|
||||||
static noReject = true;
|
static noReject = true;
|
||||||
|
|
||||||
|
static dbConfig = {};
|
||||||
|
|
||||||
|
static knexMigrationsPath = "./db/knex_migrations";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the database
|
* Initialize the data directory
|
||||||
* @param {Object} args Arguments to initialize DB with
|
* @param {object} args Arguments to initialize DB with
|
||||||
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
static init(args) {
|
static initDataDir(args) {
|
||||||
// Data Directory (must be end with "/")
|
// Data Directory (must be end with "/")
|
||||||
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
||||||
|
|
||||||
// Plugin feature is working only if the dataDir = "./data";
|
Database.sqlitePath = path.join(Database.dataDir, "kuma.db");
|
||||||
if (Database.dataDir !== "./data/") {
|
|
||||||
log.warn("PLUGIN", "Warning: In order to enable plugin feature, you need to use the default data directory: ./data/");
|
|
||||||
PluginsManager.disable = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Database.path = Database.dataDir + "kuma.db";
|
|
||||||
if (! fs.existsSync(Database.dataDir)) {
|
if (! fs.existsSync(Database.dataDir)) {
|
||||||
fs.mkdirSync(Database.dataDir, { recursive: true });
|
fs.mkdirSync(Database.dataDir, { recursive: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
Database.uploadDir = Database.dataDir + "upload/";
|
Database.uploadDir = path.join(Database.dataDir, "upload/");
|
||||||
|
|
||||||
if (! fs.existsSync(Database.uploadDir)) {
|
if (! fs.existsSync(Database.uploadDir)) {
|
||||||
fs.mkdirSync(Database.uploadDir, { recursive: true });
|
fs.mkdirSync(Database.uploadDir, { recursive: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create screenshot dir
|
||||||
|
Database.screenshotDir = path.join(Database.dataDir, "screenshots/");
|
||||||
|
if (! fs.existsSync(Database.screenshotDir)) {
|
||||||
|
fs.mkdirSync(Database.screenshotDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
Database.dockerTLSDir = path.join(Database.dataDir, "docker-tls/");
|
||||||
|
if (! fs.existsSync(Database.dockerTLSDir)) {
|
||||||
|
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
log.info("db", `Data Dir: ${Database.dataDir}`);
|
log.info("db", `Data Dir: ${Database.dataDir}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static readDBConfig() {
|
||||||
|
let dbConfig;
|
||||||
|
|
||||||
|
let dbConfigString = fs.readFileSync(path.join(Database.dataDir, "db-config.json")).toString("utf-8");
|
||||||
|
dbConfig = JSON.parse(dbConfigString);
|
||||||
|
|
||||||
|
if (typeof dbConfig !== "object") {
|
||||||
|
throw new Error("Invalid db-config.json, it must be an object");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof dbConfig.type !== "string") {
|
||||||
|
throw new Error("Invalid db-config.json, type must be a string");
|
||||||
|
}
|
||||||
|
return dbConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param dbConfig
|
||||||
|
*/
|
||||||
|
static writeDBConfig(dbConfig) {
|
||||||
|
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to the database
|
* Connect to the database
|
||||||
* @param {boolean} [testMode=false] Should the connection be
|
* @param {boolean} testMode Should the connection be
|
||||||
* started in test mode?
|
* started in test mode?
|
||||||
* @param {boolean} [autoloadModels=true] Should models be
|
* @param {boolean} autoloadModels Should models be
|
||||||
* automatically loaded?
|
* automatically loaded?
|
||||||
* @param {boolean} [noLog=false] Should logs not be output?
|
* @param {boolean} noLog Should logs not be output?
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async connect(testMode = false, autoloadModels = true, noLog = false) {
|
static async connect(testMode = false, autoloadModels = true, noLog = false) {
|
||||||
const acquireConnectionTimeout = 120 * 1000;
|
const acquireConnectionTimeout = 120 * 1000;
|
||||||
|
let dbConfig;
|
||||||
|
try {
|
||||||
|
dbConfig = this.readDBConfig();
|
||||||
|
Database.dbConfig = dbConfig;
|
||||||
|
} catch (err) {
|
||||||
|
log.warn("db", err.message);
|
||||||
|
dbConfig = {
|
||||||
|
type: "sqlite",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
let config = {};
|
||||||
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
|
||||||
|
|
||||||
const knexInstance = knex({
|
log.info("db", `Database Type: ${dbConfig.type}`);
|
||||||
client: Dialect,
|
|
||||||
connection: {
|
if (dbConfig.type === "sqlite") {
|
||||||
filename: Database.path,
|
|
||||||
acquireConnectionTimeout: acquireConnectionTimeout,
|
if (! fs.existsSync(Database.sqlitePath)) {
|
||||||
},
|
log.info("server", "Copying Database");
|
||||||
useNullAsDefault: true,
|
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
|
||||||
pool: {
|
|
||||||
min: 1,
|
|
||||||
max: 1,
|
|
||||||
idleTimeoutMillis: 120 * 1000,
|
|
||||||
propagateCreateError: false,
|
|
||||||
acquireTimeoutMillis: acquireConnectionTimeout,
|
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
||||||
|
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
||||||
|
|
||||||
|
config = {
|
||||||
|
client: Dialect,
|
||||||
|
connection: {
|
||||||
|
filename: Database.sqlitePath,
|
||||||
|
acquireConnectionTimeout: acquireConnectionTimeout,
|
||||||
|
},
|
||||||
|
useNullAsDefault: true,
|
||||||
|
pool: {
|
||||||
|
min: 1,
|
||||||
|
max: 1,
|
||||||
|
idleTimeoutMillis: 120 * 1000,
|
||||||
|
propagateCreateError: false,
|
||||||
|
acquireTimeoutMillis: acquireConnectionTimeout,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else if (dbConfig.type === "mariadb") {
|
||||||
|
if (!/^\w+$/.test(dbConfig.dbName)) {
|
||||||
|
throw Error("Invalid database name. A database name can only consist of letters, numbers and underscores");
|
||||||
|
}
|
||||||
|
|
||||||
|
const connection = await mysql.createConnection({
|
||||||
|
host: dbConfig.hostname,
|
||||||
|
port: dbConfig.port,
|
||||||
|
user: dbConfig.username,
|
||||||
|
password: dbConfig.password,
|
||||||
|
});
|
||||||
|
|
||||||
|
await connection.execute("CREATE DATABASE IF NOT EXISTS " + dbConfig.dbName + " CHARACTER SET utf8mb4");
|
||||||
|
connection.end();
|
||||||
|
|
||||||
|
config = {
|
||||||
|
client: "mysql2",
|
||||||
|
connection: {
|
||||||
|
host: dbConfig.hostname,
|
||||||
|
port: dbConfig.port,
|
||||||
|
user: dbConfig.username,
|
||||||
|
password: dbConfig.password,
|
||||||
|
database: dbConfig.dbName,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else if (dbConfig.type === "embedded-mariadb") {
|
||||||
|
let embeddedMariaDB = EmbeddedMariaDB.getInstance();
|
||||||
|
await embeddedMariaDB.start();
|
||||||
|
log.info("mariadb", "Embedded MariaDB started");
|
||||||
|
config = {
|
||||||
|
client: "mysql2",
|
||||||
|
connection: {
|
||||||
|
socketPath: embeddedMariaDB.socketPath,
|
||||||
|
user: "node",
|
||||||
|
database: "kuma",
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
throw new Error("Unknown Database type: " + dbConfig.type);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set to utf8mb4 for MariaDB
|
||||||
|
if (dbConfig.type.endsWith("mariadb")) {
|
||||||
|
config.pool = {
|
||||||
|
afterCreate(conn, done) {
|
||||||
|
conn.query("SET CHARACTER SET utf8mb4;", (err) => done(err, conn));
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const knexInstance = knex(config);
|
||||||
|
|
||||||
R.setup(knexInstance);
|
R.setup(knexInstance);
|
||||||
|
|
||||||
@@ -152,6 +275,18 @@ class Database {
|
|||||||
await R.autoloadModels("./server/model");
|
await R.autoloadModels("./server/model");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (dbConfig.type === "sqlite") {
|
||||||
|
await this.initSQLite(testMode, noLog);
|
||||||
|
} else if (dbConfig.type.endsWith("mariadb")) {
|
||||||
|
await this.initMariaDB();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param testMode
|
||||||
|
* @param noLog
|
||||||
|
*/
|
||||||
|
static async initSQLite(testMode, noLog) {
|
||||||
await R.exec("PRAGMA foreign_keys = ON");
|
await R.exec("PRAGMA foreign_keys = ON");
|
||||||
if (testMode) {
|
if (testMode) {
|
||||||
// Change to MEMORY
|
// Change to MEMORY
|
||||||
@@ -161,12 +296,12 @@ class Database {
|
|||||||
await R.exec("PRAGMA journal_mode = WAL");
|
await R.exec("PRAGMA journal_mode = WAL");
|
||||||
}
|
}
|
||||||
await R.exec("PRAGMA cache_size = -12000");
|
await R.exec("PRAGMA cache_size = -12000");
|
||||||
await R.exec("PRAGMA auto_vacuum = FULL");
|
await R.exec("PRAGMA auto_vacuum = INCREMENTAL");
|
||||||
|
|
||||||
// This ensures that an operating system crash or power failure will not corrupt the database.
|
// This ensures that an operating system crash or power failure will not corrupt the database.
|
||||||
// FULL synchronous is very safe, but it is also slower.
|
// FULL synchronous is very safe, but it is also slower.
|
||||||
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
|
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
|
||||||
await R.exec("PRAGMA synchronous = FULL");
|
await R.exec("PRAGMA synchronous = NORMAL");
|
||||||
|
|
||||||
if (!noLog) {
|
if (!noLog) {
|
||||||
log.info("db", "SQLite config:");
|
log.info("db", "SQLite config:");
|
||||||
@@ -176,8 +311,56 @@ class Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Patch the database */
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static async initMariaDB() {
|
||||||
|
log.debug("db", "Checking if MariaDB database exists...");
|
||||||
|
|
||||||
|
let hasTable = await R.hasTable("docker_host");
|
||||||
|
if (!hasTable) {
|
||||||
|
const { createTables } = require("../db/knex_init_db");
|
||||||
|
await createTables();
|
||||||
|
} else {
|
||||||
|
log.debug("db", "MariaDB database already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Patch the database
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
static async patch() {
|
static async patch() {
|
||||||
|
// Still need to keep this for old versions of Uptime Kuma
|
||||||
|
if (Database.dbConfig.type === "sqlite") {
|
||||||
|
await this.patchSqlite();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using knex migrations
|
||||||
|
// https://knexjs.org/guide/migrations.html
|
||||||
|
// https://gist.github.com/NigelEarle/70db130cc040cc2868555b29a0278261
|
||||||
|
try {
|
||||||
|
await R.knex.migrate.latest({
|
||||||
|
directory: Database.knexMigrationsPath,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
log.error("db", "Database migration failed");
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
static async rollbackLatestPatch() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Patch the database for SQLite
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
static async patchSqlite() {
|
||||||
let version = parseInt(await setting("database_version"));
|
let version = parseInt(await setting("database_version"));
|
||||||
|
|
||||||
if (! version) {
|
if (! version) {
|
||||||
@@ -197,7 +380,7 @@ class Database {
|
|||||||
// Try catch anything here
|
// Try catch anything here
|
||||||
try {
|
try {
|
||||||
for (let i = version + 1; i <= this.latestVersion; i++) {
|
for (let i = version + 1; i <= this.latestVersion; i++) {
|
||||||
const sqlFile = `./db/patch${i}.sql`;
|
const sqlFile = `./db/old_migrations/patch${i}.sql`;
|
||||||
log.info("db", `Patching ${sqlFile}`);
|
log.info("db", `Patching ${sqlFile}`);
|
||||||
await Database.importSQLFile(sqlFile);
|
await Database.importSQLFile(sqlFile);
|
||||||
log.info("db", `Patched ${sqlFile}`);
|
log.info("db", `Patched ${sqlFile}`);
|
||||||
@@ -214,17 +397,18 @@ class Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.patch2();
|
await this.patchSqlite2();
|
||||||
await this.migrateNewStatusPage();
|
await this.migrateNewStatusPage();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Patch DB using new process
|
* Patch DB using new process
|
||||||
* Call it from patch() only
|
* Call it from patch() only
|
||||||
|
* @deprecated
|
||||||
* @private
|
* @private
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async patch2() {
|
static async patchSqlite2() {
|
||||||
log.info("db", "Database Patch 2.0 Process");
|
log.info("db", "Database Patch 2.0 Process");
|
||||||
let databasePatchedFiles = await setting("databasePatchedFiles");
|
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||||
|
|
||||||
@@ -258,6 +442,7 @@ class Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* SQlite only
|
||||||
* Migrate status page value in setting to "status_page" table
|
* Migrate status page value in setting to "status_page" table
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
@@ -329,8 +514,8 @@ class Database {
|
|||||||
* Patch database using new patching process
|
* Patch database using new patching process
|
||||||
* Used it patch2() only
|
* Used it patch2() only
|
||||||
* @private
|
* @private
|
||||||
* @param sqlFilename
|
* @param {string} sqlFilename Name of SQL file to load
|
||||||
* @param databasePatchedFiles
|
* @param {object} databasePatchedFiles Patch status of database files
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
||||||
@@ -354,7 +539,7 @@ class Database {
|
|||||||
|
|
||||||
log.info("db", sqlFilename + " is patching");
|
log.info("db", sqlFilename + " is patching");
|
||||||
this.patched = true;
|
this.patched = true;
|
||||||
await this.importSQLFile("./db/" + sqlFilename);
|
await this.importSQLFile("./db/old_migrations/" + sqlFilename);
|
||||||
databasePatchedFiles[sqlFilename] = true;
|
databasePatchedFiles[sqlFilename] = true;
|
||||||
log.info("db", sqlFilename + " was patched successfully");
|
log.info("db", sqlFilename + " was patched successfully");
|
||||||
|
|
||||||
@@ -365,7 +550,7 @@ class Database {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Load an SQL file and execute it
|
* Load an SQL file and execute it
|
||||||
* @param filename Filename of SQL file to import
|
* @param {string} filename Filename of SQL file to import
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async importSQLFile(filename) {
|
static async importSQLFile(filename) {
|
||||||
@@ -399,7 +584,7 @@ class Database {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Aquire a direct connection to database
|
* Aquire a direct connection to database
|
||||||
* @returns {any}
|
* @returns {any} Database connection
|
||||||
*/
|
*/
|
||||||
static getBetterSQLite3Database() {
|
static getBetterSQLite3Database() {
|
||||||
return R.knex.client.acquireConnection();
|
return R.knex.client.acquireConnection();
|
||||||
@@ -436,10 +621,13 @@ class Database {
|
|||||||
process.removeListener("unhandledRejection", listener);
|
process.removeListener("unhandledRejection", listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get the size of the database */
|
/**
|
||||||
|
* Get the size of the database
|
||||||
|
* @returns {number} Size of database
|
||||||
|
*/
|
||||||
static getSize() {
|
static getSize() {
|
||||||
log.debug("db", "Database.getSize()");
|
log.debug("db", "Database.getSize()");
|
||||||
let stats = fs.statSync(Database.path);
|
let stats = fs.statSync(Database.sqlitePath);
|
||||||
log.debug("db", stats);
|
log.debug("db", stats);
|
||||||
return stats.size;
|
return stats.size;
|
||||||
}
|
}
|
||||||
@@ -451,6 +639,18 @@ class Database {
|
|||||||
static async shrink() {
|
static async shrink() {
|
||||||
await R.exec("VACUUM");
|
await R.exec("VACUUM");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static sqlHourOffset() {
|
||||||
|
if (this.dbConfig.client === "sqlite3") {
|
||||||
|
return "DATETIME('now', ? || ' hours')";
|
||||||
|
} else {
|
||||||
|
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Database;
|
module.exports = Database;
|
||||||
|
@@ -2,14 +2,22 @@ const axios = require("axios");
|
|||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const version = require("../package.json").version;
|
const version = require("../package.json").version;
|
||||||
const https = require("https");
|
const https = require("https");
|
||||||
|
const fs = require("fs");
|
||||||
|
const path = require("path");
|
||||||
|
const Database = require("./database");
|
||||||
|
|
||||||
class DockerHost {
|
class DockerHost {
|
||||||
|
|
||||||
|
static CertificateFileNameCA = "ca.pem";
|
||||||
|
static CertificateFileNameCert = "cert.pem";
|
||||||
|
static CertificateFileNameKey = "key.pem";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Save a docker host
|
* Save a docker host
|
||||||
* @param {Object} dockerHost Docker host to save
|
* @param {object} dockerHost Docker host to save
|
||||||
* @param {?number} dockerHostID ID of the docker host to update
|
* @param {?number} dockerHostID ID of the docker host to update
|
||||||
* @param {number} userID ID of the user who adds the docker host
|
* @param {number} userID ID of the user who adds the docker host
|
||||||
* @returns {Promise<Bean>}
|
* @returns {Promise<Bean>} Updated docker host
|
||||||
*/
|
*/
|
||||||
static async save(dockerHost, dockerHostID, userID) {
|
static async save(dockerHost, dockerHostID, userID) {
|
||||||
let bean;
|
let bean;
|
||||||
@@ -56,7 +64,7 @@ class DockerHost {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the amount of containers on the Docker host
|
* Fetches the amount of containers on the Docker host
|
||||||
* @param {Object} dockerHost Docker host to check for
|
* @param {object} dockerHost Docker host to check for
|
||||||
* @returns {number} Total amount of containers on the host
|
* @returns {number} Total amount of containers on the host
|
||||||
*/
|
*/
|
||||||
static async testDockerHost(dockerHost) {
|
static async testDockerHost(dockerHost) {
|
||||||
@@ -66,10 +74,6 @@ class DockerHost {
|
|||||||
"Accept": "*/*",
|
"Accept": "*/*",
|
||||||
"User-Agent": "Uptime-Kuma/" + version
|
"User-Agent": "Uptime-Kuma/" + version
|
||||||
},
|
},
|
||||||
httpsAgent: new https.Agent({
|
|
||||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (dockerHost.dockerType === "socket") {
|
if (dockerHost.dockerType === "socket") {
|
||||||
@@ -77,6 +81,7 @@ class DockerHost {
|
|||||||
} else if (dockerHost.dockerType === "tcp") {
|
} else if (dockerHost.dockerType === "tcp") {
|
||||||
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
|
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
|
||||||
}
|
}
|
||||||
|
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
||||||
|
|
||||||
let res = await axios.request(options);
|
let res = await axios.request(options);
|
||||||
|
|
||||||
@@ -103,6 +108,8 @@ class DockerHost {
|
|||||||
/**
|
/**
|
||||||
* Since axios 0.27.X, it does not accept `tcp://` protocol.
|
* Since axios 0.27.X, it does not accept `tcp://` protocol.
|
||||||
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
|
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
|
||||||
|
* @param {any} url URL to fix
|
||||||
|
* @returns {any} URL with tcp:// replaced by http://
|
||||||
*/
|
*/
|
||||||
static patchDockerURL(url) {
|
static patchDockerURL(url) {
|
||||||
if (typeof url === "string") {
|
if (typeof url === "string") {
|
||||||
@@ -111,6 +118,52 @@ class DockerHost {
|
|||||||
}
|
}
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns HTTPS agent options with client side TLS parameters if certificate files
|
||||||
|
* for the given host are available under a predefined directory path.
|
||||||
|
*
|
||||||
|
* The base path where certificates are looked for can be set with the
|
||||||
|
* 'DOCKER_TLS_DIR_PATH' environmental variable or defaults to 'data/docker-tls/'.
|
||||||
|
*
|
||||||
|
* If a directory in this path exists with a name matching the FQDN of the docker host
|
||||||
|
* (e.g. the FQDN of 'https://example.com:2376' is 'example.com' so the directory
|
||||||
|
* 'data/docker-tls/example.com/' would be searched for certificate files),
|
||||||
|
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
|
||||||
|
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
|
||||||
|
* @param {string} dockerType i.e. "tcp" or "socket"
|
||||||
|
* @param {string} url The docker host URL rewritten to https://
|
||||||
|
* @returns {object} HTTP agent options
|
||||||
|
*/
|
||||||
|
static getHttpsAgentOptions(dockerType, url) {
|
||||||
|
let baseOptions = {
|
||||||
|
maxCachedSessions: 0,
|
||||||
|
rejectUnauthorized: true
|
||||||
|
};
|
||||||
|
let certOptions = {};
|
||||||
|
|
||||||
|
let dirName = (new URL(url)).hostname;
|
||||||
|
|
||||||
|
let caPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCA);
|
||||||
|
let certPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCert);
|
||||||
|
let keyPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameKey);
|
||||||
|
|
||||||
|
if (dockerType === "tcp" && fs.existsSync(caPath) && fs.existsSync(certPath) && fs.existsSync(keyPath)) {
|
||||||
|
let ca = fs.readFileSync(caPath);
|
||||||
|
let key = fs.readFileSync(keyPath);
|
||||||
|
let cert = fs.readFileSync(certPath);
|
||||||
|
certOptions = {
|
||||||
|
ca,
|
||||||
|
key,
|
||||||
|
cert
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...baseOptions,
|
||||||
|
...certOptions
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user