mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-09-10 12:56:13 +08:00
Compare commits
580 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
5bb329fa0e | ||
|
09dedc07fb | ||
|
6cfae01a0d | ||
|
32dc76a085 | ||
|
c6d6061a9f | ||
|
243726b03c | ||
|
936665aac3 | ||
|
1185b259c2 | ||
|
a81f949f98 | ||
|
59f10d542b | ||
|
2778929f74 | ||
|
f71d35e53e | ||
|
1490443618 | ||
|
add5c128ce | ||
|
e797abd108 | ||
|
7a9e2f5de6 | ||
|
7b5d2a71ff | ||
|
893278bd3d | ||
|
0e30ea830d | ||
|
c67a2070b8 | ||
|
9863a10321 | ||
|
ee7f8680c1 | ||
|
c1301804d4 | ||
|
b385e81608 | ||
|
f37f55e06c | ||
|
87d7a780e3 | ||
|
0fc372f558 | ||
|
67a13e1259 | ||
|
2b8f55194f | ||
|
288cab6dd7 | ||
|
b4e45c7ce8 | ||
|
7635ab54a0 | ||
|
458cdf9f9b | ||
|
f1e2ee74ea | ||
|
8d847abf35 | ||
|
8151ac0e25 | ||
|
4185ec20b0 | ||
|
4245ea86e7 | ||
|
f861a48dfc | ||
|
fa1214ae5e | ||
|
621419e434 | ||
|
482049c72b | ||
|
2815cc73cf | ||
|
e1147c06aa | ||
|
abc8f2b131 | ||
|
777ef6bc7b | ||
|
b244e8fcbb | ||
|
031947319a | ||
|
74a908a069 | ||
|
9c56c9b346 | ||
|
37666bf35f | ||
|
90badfabee | ||
|
e3396251a8 | ||
|
9c9a086788 | ||
|
9fb95fe95e | ||
|
1e75d81bcf | ||
|
cb3a104dc0 | ||
|
57a18958d6 | ||
|
1708b67949 | ||
|
73239d441d | ||
|
4ceeb304f1 | ||
|
711380bbbe | ||
|
9536c6aa6a | ||
|
4255496b11 | ||
|
f28dccf4e1 | ||
|
b689733d59 | ||
|
afaa7bb2f0 | ||
|
121d1a11af | ||
|
8e61158758 | ||
|
bf58838b89 | ||
|
33ce0ef02c | ||
|
c1aaad0d85 | ||
|
954e05b72f | ||
|
6d4a45f18c | ||
|
f0975cd929 | ||
|
40d6a21453 | ||
|
b383392e8f | ||
|
9964b6c4d8 | ||
|
d56bf08cd7 | ||
|
291d5d7c55 | ||
|
8e3ff25f7b | ||
|
6e80c850f4 | ||
|
0608881954 | ||
|
38efd97b28 | ||
|
ce0ba6c0ca | ||
|
c43223a16d | ||
|
9f170a68d7 | ||
|
1a862e47ab | ||
|
e64bf0e3fe | ||
|
523d137e2b | ||
|
18169c59a1 | ||
|
4ccf263481 | ||
|
1c13a75970 | ||
|
c3e3f27457 | ||
|
794f1810bf | ||
|
168357d93c | ||
|
476deb9fec | ||
|
a36f2a75ca | ||
|
88afab6571 | ||
|
bd9c44cccf | ||
|
1b148786a5 | ||
|
66a10b8993 | ||
|
2ab21ccf8a | ||
|
90d0e8ccde | ||
|
16a396debb | ||
|
6b3d69e1d3 | ||
|
b3b8e9f3a0 | ||
|
e5345848a2 | ||
|
d8a8f6c08b | ||
|
f98a1ce077 | ||
|
86fa57449e | ||
|
ff51704cdf | ||
|
33804d8823 | ||
|
1e12ca4786 | ||
|
0af4ee6c34 | ||
|
1f29fabe64 | ||
|
c4e222d1e6 | ||
|
f2a1c26ef8 | ||
|
8772baad9a | ||
|
215c89e8d3 | ||
|
e6a055af19 | ||
|
88d71d2c7a | ||
|
cd2d5325df | ||
|
75a1245b70 | ||
|
f666eb6d83 | ||
|
cb10643f57 | ||
|
c9ba4e7e8b | ||
|
94187bca5d | ||
|
39b4aa5966 | ||
|
cd79df07e1 | ||
|
0c40f02584 | ||
|
db42c13e05 | ||
|
5f85d8f749 | ||
|
c0e273df5b | ||
|
4da1341aa5 | ||
|
e765e6a1b8 | ||
|
eee9a1f004 | ||
|
4d07b65bdd | ||
|
1772158d62 | ||
|
7bfdb82f5d | ||
|
8945316ce6 | ||
|
9564550d5f | ||
|
a78e7a423e | ||
|
9dddd0b657 | ||
|
d04d86d74e | ||
|
eb11c18203 | ||
|
3da2d78ad9 | ||
|
2b4ec765ff | ||
|
72dcefff76 | ||
|
3a894958eb | ||
|
fe431d6385 | ||
|
ce0289855d | ||
|
c0174dc1c4 | ||
|
e745bd69da | ||
|
72741ebb10 | ||
|
ff88018b0c | ||
|
921c8f8100 | ||
|
de83863627 | ||
|
d7e0ff4b8c | ||
|
112cc3d7b8 | ||
|
b44f6e4af2 | ||
|
18a2a8eb1e | ||
|
e9585ccbf4 | ||
|
5e39b0daf6 | ||
|
71fca3f0c3 | ||
|
2921f33c24 | ||
|
4e0bb394db | ||
|
ced576feba | ||
|
ceb5708bfd | ||
|
23fdd32de0 | ||
|
36777c5eff | ||
|
439b6517d1 | ||
|
c6e68fac97 | ||
|
bce4835362 | ||
|
a032e11a2e | ||
|
d231a05526 | ||
|
f4967615c0 | ||
|
67b1974718 | ||
|
062e9db2a7 | ||
|
90e1b4cf56 | ||
|
43698b23c6 | ||
|
f19b27c1c7 | ||
|
42b5d30a33 | ||
|
cda77c1a32 | ||
|
157cf47d38 | ||
|
09c6798a30 | ||
|
587d9e4781 | ||
|
ae2867e305 | ||
|
65ffd77d35 | ||
|
9dd652733e | ||
|
0a59fef7d8 | ||
|
27ce47277b | ||
|
7f68e4a987 | ||
|
149d08ecbb | ||
|
eb6167aaf1 | ||
|
db66195f7d | ||
|
59245e624d | ||
|
c0eb0cb42c | ||
|
573f158f7f | ||
|
6413d4cbdf | ||
|
cf5a04bc5e | ||
|
a0203372ce | ||
|
5ccf2d23fc | ||
|
bf68e0a7bc | ||
|
6984596568 | ||
|
f3562864ee | ||
|
9058a829a1 | ||
|
0684313ec9 | ||
|
d7e12dc92d | ||
|
1d9a28e9ab | ||
|
ddbf367011 | ||
|
50d4091ded | ||
|
66cfbd02c3 | ||
|
688f23035b | ||
|
7701e2ad36 | ||
|
8e72d6f534 | ||
|
278b88a9d9 | ||
|
084cf01fcd | ||
|
f6c1b92fc6 | ||
|
25c8196641 | ||
|
baf5613dfa | ||
|
695691468c | ||
|
4891ec4527 | ||
|
e2a87eb430 | ||
|
80927332cb | ||
|
a0eb733d54 | ||
|
21d556528f | ||
|
357466cc90 | ||
|
b038d09349 | ||
|
5dd4231e56 | ||
|
c6d0c431bd | ||
|
d1b7f4c834 | ||
|
5c4180fb45 | ||
|
345e61abca | ||
|
dd1526deff | ||
|
be26bb75d9 | ||
|
99fb5836e2 | ||
|
2f5a565ce4 | ||
|
973db9d4b2 | ||
|
2e75142fe5 | ||
|
ad3ffacf45 | ||
|
6f4af30701 | ||
|
b1f266ceb1 | ||
|
6bece8796e | ||
|
e7d1b4e14a | ||
|
e5c6783781 | ||
|
ac68a35d3a | ||
|
d825dbf828 | ||
|
cfb4bbc6cb | ||
|
293015ff35 | ||
|
18d8b3a8e0 | ||
|
d55794e1a5 | ||
|
3d50572dd7 | ||
|
cdb38d49eb | ||
|
80b55786a4 | ||
|
fe40d819bd | ||
|
3dbd8277f0 | ||
|
771d21c4ad | ||
|
ed6b4e5ae5 | ||
|
3b9c95a8a8 | ||
|
cdf6922bdd | ||
|
9954ba82e7 | ||
|
19873e5b9e | ||
|
13ae878ee8 | ||
|
4ea5771f97 | ||
|
1774bb86dc | ||
|
c583037dff | ||
|
8223121cd8 | ||
|
ff22010330 | ||
|
a9d691a6a8 | ||
|
7c529d8f83 | ||
|
4fe0891a60 | ||
|
bd5496d267 | ||
|
a0736e04b2 | ||
|
df8fcffb19 | ||
|
9da712054a | ||
|
af78da1dd9 | ||
|
9e041f219b | ||
|
8c60e902e1 | ||
|
de74efb2e6 | ||
|
9ee2780e9e | ||
|
a386f1fc9e | ||
|
35154ef9c5 | ||
|
1baa592824 | ||
|
9882fc65b1 | ||
|
3e5e7e6e32 | ||
|
0e725569e5 | ||
|
afcfb7e19c | ||
|
eaee55fc8f | ||
|
affac0a97b | ||
|
a12e7eba72 | ||
|
4f6035899d | ||
|
dd77baabe1 | ||
|
820f2eec9f | ||
|
4b913c8b4c | ||
|
d01c7c3faa | ||
|
772a946234 | ||
|
f8c9a20afd | ||
|
cea894cc6d | ||
|
79b38e0e7b | ||
|
7cc9783436 | ||
|
21405f71b5 | ||
|
b4b6e07e6b | ||
|
cf4220901b | ||
|
f3996fdef4 | ||
|
1dfe5227ad | ||
|
4ead0609af | ||
|
a8bf52b1e0 | ||
|
ede6d90497 | ||
|
4b8e86efb7 | ||
|
5f706e1921 | ||
|
722c64a4d1 | ||
|
23de52ca5a | ||
|
3d3fb357f9 | ||
|
3b9aa00126 | ||
|
29267e5c2e | ||
|
3e801323b6 | ||
|
b80fd81d24 | ||
|
9cb776405a | ||
|
de7ae3e2db | ||
|
e49ced0524 | ||
|
7e782edf44 | ||
|
43e1e3c272 | ||
|
dc4cf7087f | ||
|
65a0a2b2b5 | ||
|
2d269c3639 | ||
|
11bad53709 | ||
|
9f7782b1c1 | ||
|
9fb8f94e22 | ||
|
7a34103da6 | ||
|
8955c3816b | ||
|
7761e9a05e | ||
|
c9d6e576ab | ||
|
97d38ee1a8 | ||
|
cc94609423 | ||
|
149f8c3646 | ||
|
bdcbd6389b | ||
|
c06b929529 | ||
|
d3ecdb8456 | ||
|
4e420ee3ff | ||
|
a00561ff09 | ||
|
6af44e0780 | ||
|
596402e71f | ||
|
62bbc1cf55 | ||
|
19fc7d31e6 | ||
|
6708eed121 | ||
|
3c56a6f395 | ||
|
2b46693995 | ||
|
c61a3d360f | ||
|
392f95cdd2 | ||
|
dfc6e5ea5b | ||
|
ba4d925374 | ||
|
d37c33ad42 | ||
|
c04194191f | ||
|
de9ad0fe60 | ||
|
8884c2108b | ||
|
ac8ca36895 | ||
|
71c34694b7 | ||
|
2128ed5ce3 | ||
|
09ab6a015b | ||
|
ec858eb67a | ||
|
c4c3fc81b2 | ||
|
8aa577529f | ||
|
cdd5067b17 | ||
|
5fdb01308a | ||
|
eb1a1d0ac7 | ||
|
a1fc283b3c | ||
|
419b684433 | ||
|
8bc139d8c1 | ||
|
91dfd8dfaa | ||
|
1f405cf2a0 | ||
|
cf61077dd8 | ||
|
4396e0d4d8 | ||
|
240db1d173 | ||
|
ef06b5376d | ||
|
186d733134 | ||
|
225ba61e22 | ||
|
11b32ce553 | ||
|
3707919025 | ||
|
d10e378fb1 | ||
|
370328c3b8 | ||
|
1d8a82ae3e | ||
|
7da336e975 | ||
|
b8c12cca2a | ||
|
88fcfcc6fc | ||
|
fcb22f7d05 | ||
|
5be41990bc | ||
|
09149f50e0 | ||
|
8f60274582 | ||
|
7dadac3ebe | ||
|
28c29f755d | ||
|
b426840b5b | ||
|
4f2d39d5fc | ||
|
4012fc6964 | ||
|
d1b52bc098 | ||
|
c9a32f9dbb | ||
|
b884f82de6 | ||
|
65928a26c7 | ||
|
abe00efa7f | ||
|
0c364fc288 | ||
|
0d21529037 | ||
|
37ae8eb44a | ||
|
8897385690 | ||
|
6132a45c7c | ||
|
f68452c47a | ||
|
fea8ef8367 | ||
|
9d71e34a83 | ||
|
37031fb9a7 | ||
|
58ec53fb1d | ||
|
57190b58c6 | ||
|
6c2948d2de | ||
|
68f389868c | ||
|
af5d7cbb0b | ||
|
1fa8c0f9fe | ||
|
9a8bea5761 | ||
|
56f448bfe5 | ||
|
2b46da0f47 | ||
|
9bd76c2795 | ||
|
376d84c742 | ||
|
4b3a2ee71b | ||
|
1634df5a39 | ||
|
039fdb0730 | ||
|
20af2d9d95 | ||
|
04806ba4f3 | ||
|
3ff910a8f8 | ||
|
343a1d3344 | ||
|
f1c184c30c | ||
|
f3fe392ec4 | ||
|
f3c09f2bbd | ||
|
85eb084305 | ||
|
0735f12d19 | ||
|
8ed2b59410 | ||
|
0b8dddba24 | ||
|
2114295381 | ||
|
bc95875aa0 | ||
|
c1efe0f26d | ||
|
a0d0d5b015 | ||
|
8d05d80a5f | ||
|
36942de329 | ||
|
771ca09331 | ||
|
3cb287a40e | ||
|
9c3bb67b6b | ||
|
5200e10aab | ||
|
f1a396b0f7 | ||
|
83a59bd984 | ||
|
446b5fa9e4 | ||
|
0d1b5321ad | ||
|
1e1cc86a10 | ||
|
9825b33ef3 | ||
|
00f733d352 | ||
|
fd10897988 | ||
|
317024ed72 | ||
|
f604d96c5b | ||
|
f30f00655f | ||
|
891f09def7 | ||
|
6b5e179bb0 | ||
|
f653aba735 | ||
|
9dc02bb8e2 | ||
|
bb15fa0179 | ||
|
966066b897 | ||
|
8d24891b8e | ||
|
ba7de3fd37 | ||
|
80c8fd7372 | ||
|
a27386bb92 | ||
|
ce70b3fc62 | ||
|
06fba5b55a | ||
|
f2c294e9e5 | ||
|
332e54937e | ||
|
a1adc30a89 | ||
|
6ce882ad4a | ||
|
e392d12585 | ||
|
253214ad2b | ||
|
33de7bdb1c | ||
|
7f5d0e5490 | ||
|
1a344c1371 | ||
|
28b0f8fc00 | ||
|
0eaaa8b6fa | ||
|
5cd506e340 | ||
|
f0beccf6bf | ||
|
72c16c3aa2 | ||
|
aa8454b73f | ||
|
d23cb0b382 | ||
|
9975050872 | ||
|
f8c2909576 | ||
|
fcfe13e52d | ||
|
9f51115a19 | ||
|
4057ca6e72 | ||
|
8a3bce44ef | ||
|
dfe6f52f6a | ||
|
333a631389 | ||
|
eaa948579b | ||
|
74dd07c3ca | ||
|
f75cf3a186 | ||
|
a3e31b22bc | ||
|
078d1f96a5 | ||
|
8207f16396 | ||
|
ba82abe5f3 | ||
|
eb9c748071 | ||
|
3579520575 | ||
|
030faddd1c | ||
|
0e516a42e5 | ||
|
680dccefea | ||
|
8c9423f4de | ||
|
f433f33418 | ||
|
d4a31cf02a | ||
|
a7588adc52 | ||
|
6356b1e50a | ||
|
af6e01ee3a | ||
|
11f4cb8725 | ||
|
1bf97e701d | ||
|
4c1ac5e870 | ||
|
9e320dc5fb | ||
|
2f3f929fbd | ||
|
b776e88b26 | ||
|
49741bbef2 | ||
|
682f8e52a8 | ||
|
171aff1226 | ||
|
1f7f1f70bf | ||
|
be7d3f6142 | ||
|
7706c29564 | ||
|
9dd1b1ca0f | ||
|
21ad715e6a | ||
|
23af66f618 | ||
|
6b078b83bd | ||
|
22f730499f | ||
|
1be74e2720 | ||
|
32f84b5e4e | ||
|
97c7ad9cc7 | ||
|
b975c24531 | ||
|
ba52e1c885 | ||
|
fc4312ca1a | ||
|
ca52047bf5 | ||
|
df47609671 | ||
|
e63f7562f8 | ||
|
8921ed0cff | ||
|
35a56dd9e0 | ||
|
442f54de84 | ||
|
cf59832d51 | ||
|
8f259e1756 | ||
|
29b2809279 | ||
|
16f2701f61 | ||
|
3bbf269da0 | ||
|
56d716cee4 | ||
|
e8814e8479 | ||
|
bb7de6aa88 | ||
|
150607cc93 | ||
|
cbbd3e20ad | ||
|
beb22f743d | ||
|
6fc34e44d9 | ||
|
7b4f90ce92 | ||
|
db6b863445 | ||
|
186ca30508 | ||
|
896e33815d | ||
|
0be8b111e2 | ||
|
cef0a0faf4 | ||
|
dfb95dfdcb | ||
|
e10ba9ed7e | ||
|
9446c2d102 | ||
|
2c581ade90 | ||
|
f286386f59 | ||
|
9286dcb6ce | ||
|
a6894d36f2 | ||
|
66573934f6 | ||
|
c444d78706 | ||
|
661fa87134 | ||
|
d48eb24046 | ||
|
aee4c22dee | ||
|
9a46b50989 | ||
|
faf3488b1e | ||
|
f3ac351d75 | ||
|
aba515e172 | ||
|
97bd306a09 | ||
|
645fd94bba | ||
|
71f00b3690 | ||
|
a21a47de93 | ||
|
f6d0f28b3a | ||
|
6de0c6a90c | ||
|
94b69935fe | ||
|
3f30feaefb | ||
|
9404efd86d |
28
.devcontainer/README.md
Normal file
28
.devcontainer/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Codespaces
|
||||
|
||||
You can modifiy Uptime Kuma in your browser without setting up a local development.
|
||||
|
||||

|
||||
|
||||
1. Click `Code` -> `Create codespace on master`
|
||||
2. Wait a few minutes until you see there are two exposed ports
|
||||
3. Go to the `3000` url, see if it is working
|
||||
|
||||

|
||||
|
||||
## Frontend
|
||||
|
||||
Since the frontend is using [Vite.js](https://vitejs.dev/), all changes in this area will be hot-reloaded.
|
||||
You don't need to restart the frontend, unless you try to add a new frontend dependency.
|
||||
|
||||
## Backend
|
||||
|
||||
The backend does not automatically hot-reload.
|
||||
You will need to restart the backend after changing something using these steps:
|
||||
|
||||
1. Click `Terminal`
|
||||
2. Click `Codespaces: server-dev` in the right panel
|
||||
3. Press `Ctrl + C` to stop the server
|
||||
4. Press `Up` to run `npm run start-server-dev`
|
||||
|
||||

|
22
.devcontainer/devcontainer.json
Normal file
22
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"image": "mcr.microsoft.com/devcontainers/javascript-node:dev-18-bookworm",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
"updateContentCommand": "npm ci",
|
||||
"postCreateCommand": "",
|
||||
"postAttachCommand": {
|
||||
"frontend-dev": "npm run start-frontend-devcontainer",
|
||||
"server-dev": "npm run start-server-dev",
|
||||
"open-port": "gh codespace ports visibility 3001:public -c $CODESPACE_NAME"
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"streetsidesoftware.code-spell-checker",
|
||||
"dbaeumer.vscode-eslint"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [3000, 3001]
|
||||
}
|
@@ -18,6 +18,7 @@ README.md
|
||||
.vscode
|
||||
.eslint*
|
||||
.stylelint*
|
||||
/.devcontainer
|
||||
/.github
|
||||
yarn.lock
|
||||
app.json
|
||||
@@ -33,7 +34,13 @@ tsconfig.json
|
||||
/ecosystem.config.js
|
||||
/extra/healthcheck.exe
|
||||
/extra/healthcheck
|
||||
extra/exe-builder
|
||||
/extra/exe-builder
|
||||
/extra/push-examples
|
||||
/extra/uptime-kuma-push
|
||||
|
||||
# Comment the following line if you want to rebuild the healthcheck binary
|
||||
/extra/healthcheck-armv7
|
||||
|
||||
|
||||
### .gitignore content (commented rules are duplicated)
|
||||
|
||||
|
@@ -78,7 +78,7 @@ module.exports = {
|
||||
"checkLoops": false,
|
||||
}],
|
||||
"space-before-blocks": "warn",
|
||||
//'no-console': 'warn',
|
||||
//"no-console": "warn",
|
||||
"no-extra-boolean-cast": "off",
|
||||
"no-multiple-empty-lines": [ "warn", {
|
||||
"max": 1,
|
||||
@@ -90,7 +90,8 @@ module.exports = {
|
||||
"no-unneeded-ternary": "error",
|
||||
"array-bracket-newline": [ "error", "consistent" ],
|
||||
"eol-last": [ "error", "always" ],
|
||||
//'prefer-template': 'error',
|
||||
//"prefer-template": "error",
|
||||
"template-curly-spacing": [ "warn", "never" ],
|
||||
"comma-dangle": [ "warn", "only-multiline" ],
|
||||
"no-empty": [ "error", {
|
||||
"allowEmptyCatch": true
|
||||
|
4
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
4
.github/ISSUE_TEMPLATE/ask-for-help.yaml
vendored
@@ -44,7 +44,7 @@ body:
|
||||
id: operating-system
|
||||
attributes:
|
||||
label: "💻 Operating System and Arch"
|
||||
description: "Which OS is your server/device running on?"
|
||||
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
|
||||
placeholder: "Ex. Ubuntu 20.04 x86"
|
||||
validations:
|
||||
required: true
|
||||
@@ -52,7 +52,7 @@ body:
|
||||
id: browser-vendor
|
||||
attributes:
|
||||
label: "🌐 Browser"
|
||||
description: "Which browser are you running on?"
|
||||
description: "Which browser are you running on? (For Replit, please do not report this bug)"
|
||||
placeholder: "Ex. Google Chrome 95.0.4638.69"
|
||||
validations:
|
||||
required: true
|
||||
|
4
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
4
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -61,8 +61,8 @@ body:
|
||||
id: operating-system
|
||||
attributes:
|
||||
label: "💻 Operating System and Arch"
|
||||
description: "Which OS is your server/device running on?"
|
||||
placeholder: "Ex. Ubuntu 20.04 x86"
|
||||
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
|
||||
placeholder: "Ex. Ubuntu 20.04 x64 "
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
|
74
.github/workflows/auto-test.yml
vendored
74
.github/workflows/auto-test.yml
vendored
@@ -1,15 +1,15 @@
|
||||
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
|
||||
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
||||
|
||||
name: Auto Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches: [ master, 1.23.X ]
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
branches: [ master, 1.23.X ]
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
|
||||
@@ -21,54 +21,73 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
node: [ 14, 16, 18, 19 ]
|
||||
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
||||
node: [ 16, 20.5 ]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
cache: 'npm'
|
||||
- run: npm install
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm test
|
||||
env:
|
||||
HEADLESS_TEST: 1
|
||||
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
|
||||
|
||||
# As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
|
||||
armv7-simple-test:
|
||||
needs: [ check-linters ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 15
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ARMv7 ]
|
||||
node: [ 16, 20.5 ]
|
||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: npm ci --production
|
||||
|
||||
check-linters:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 14
|
||||
uses: actions/setup-node@v3
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 14
|
||||
cache: 'npm'
|
||||
- run: npm install
|
||||
- run: npm run lint
|
||||
node-version: 20.5
|
||||
- run: npm ci
|
||||
- run: npm run lint:prod
|
||||
|
||||
e2e-tests:
|
||||
needs: [ check-linters ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 14
|
||||
uses: actions/setup-node@v3
|
||||
- name: Use Node.js 16
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 14
|
||||
cache: 'npm'
|
||||
- run: npm install
|
||||
node-version: 16
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm run cy:test
|
||||
|
||||
@@ -77,13 +96,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 14
|
||||
uses: actions/setup-node@v3
|
||||
- name: Use Node.js 16
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 14
|
||||
cache: 'npm'
|
||||
- run: npm install
|
||||
node-version: 16
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm run cy:run:unit
|
||||
|
4
.github/workflows/close-incorrect-issue.yml
vendored
4
.github/workflows/close-incorrect-issue.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
node-version: [16]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
|
7
.github/workflows/json-yaml-validate.yml
vendored
7
.github/workflows/json-yaml-validate.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: json-yaml-validate
|
||||
name: json-yaml-validate
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -6,6 +6,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- 1.23.X
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@@ -16,11 +17,11 @@ jobs:
|
||||
json-yaml-validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: json-yaml-validate
|
||||
id: json-yaml-validate
|
||||
uses: GrantBirki/json-yaml-validate@v1.3.0
|
||||
uses: GrantBirki/json-yaml-validate@v2.4.0
|
||||
with:
|
||||
comment: "true" # enable comment mode
|
||||
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
||||
|
2
.github/workflows/stale-bot.yml
vendored
2
.github/workflows/stale-bot.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v7
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -23,3 +23,6 @@ cypress/screenshots
|
||||
|
||||
extra/exe-builder/bin
|
||||
extra/exe-builder/obj
|
||||
|
||||
.vs
|
||||
.vscode
|
||||
|
@@ -10,6 +10,7 @@
|
||||
"color-function-notation": "legacy",
|
||||
"shorthand-property-no-redundant-values": null,
|
||||
"color-hex-length": null,
|
||||
"declaration-block-no-redundant-longhand-properties": null
|
||||
"declaration-block-no-redundant-longhand-properties": null,
|
||||
"at-rule-no-unknown": null
|
||||
}
|
||||
}
|
||||
|
@@ -2,13 +2,13 @@
|
||||
|
||||
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
||||
|
||||
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
|
||||
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
|
||||
|
||||
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
|
||||
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
|
||||
|
||||
## Key Technical Skills
|
||||
|
||||
- Node.js (You should know what are promise, async/await and arrow function etc.)
|
||||
- Node.js (You should know about promise, async/await and arrow function etc.)
|
||||
- Socket.io
|
||||
- SCSS
|
||||
- Vue.js
|
||||
@@ -30,40 +30,40 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
|
||||
|
||||
## Can I create a pull request for Uptime Kuma?
|
||||
|
||||
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
|
||||
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
|
||||
|
||||
Here are some references:
|
||||
|
||||
✅ Usually Accept:
|
||||
### ✅ Usually accepted:
|
||||
- Bug fix
|
||||
- Security fix
|
||||
- Adding notification providers
|
||||
- Adding new language files (You should go to https://weblate.kuma.pet for existing languages)
|
||||
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||
- Adding new language keys: `$t("...")`
|
||||
|
||||
⚠️ Discussion First
|
||||
### ⚠️ Discussion required:
|
||||
- Large pull requests
|
||||
- New features
|
||||
|
||||
❌ Won't Merge
|
||||
- A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
|
||||
- Do not pass auto test
|
||||
### ❌ Won't be merged:
|
||||
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||
- Do not pass the auto-test
|
||||
- Any breaking changes
|
||||
- Duplicated pull request
|
||||
- Duplicated pull requests
|
||||
- Buggy
|
||||
- UI/UX is not close to Uptime Kuma
|
||||
- Existing logic is completely modified or deleted for no reason
|
||||
- A function that is completely out of scope
|
||||
- Convert existing code into other programming languages
|
||||
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests)
|
||||
- Modifications or deletions of existing logic without a valid reason.
|
||||
- Adding functions that is completely out of scope
|
||||
- Converting existing code into other programming languages
|
||||
- Unnecessarily large code changes that are hard to review and cause conflicts with other PRs.
|
||||
|
||||
The above cases cannot cover all situations.
|
||||
The above cases may not cover all possible situations.
|
||||
|
||||
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
||||
|
||||
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
|
||||
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
|
||||
|
||||
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
||||
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
||||
|
||||
|
||||
### Recommended Pull Request Guideline
|
||||
@@ -83,11 +83,11 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
|
||||
|
||||
## Project Styles
|
||||
|
||||
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app.
|
||||
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
|
||||
|
||||
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running
|
||||
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
|
||||
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
|
||||
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`
|
||||
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
|
||||
- Easy to use
|
||||
- The web UI styling should be consistent and nice
|
||||
|
||||
@@ -106,11 +106,11 @@ I personally do not like something that requires so many configurations before y
|
||||
|
||||
## Tools
|
||||
|
||||
- Node.js >= 14
|
||||
- NPM >= 8.5
|
||||
- Git
|
||||
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
|
||||
- A SQLite GUI tool (SQLite Expert Personal is suggested)
|
||||
- [`Node.js`](https://nodejs.org/) >= 14
|
||||
- [`npm`](https://www.npmjs.com/) >= 8.5
|
||||
- [`git`](https://git-scm.com/)
|
||||
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
|
||||
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
|
||||
|
||||
## Install Dependencies for Development
|
||||
|
||||
@@ -130,7 +130,7 @@ Port `3000` and port `3001` will be used.
|
||||
npm run dev
|
||||
```
|
||||
|
||||
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals:
|
||||
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
|
||||
```
|
||||
npm run start-frontend-dev
|
||||
npm run start-server-dev
|
||||
@@ -146,13 +146,13 @@ It is mainly a socket.io app + express.js.
|
||||
express.js is used for:
|
||||
- entry point such as redirecting to a status page or the dashboard
|
||||
- serving the frontend built files (index.html, .js and .css etc.)
|
||||
- serving internal APIs of status page
|
||||
- serving internal APIs of the status page
|
||||
|
||||
|
||||
### Structure in /server/
|
||||
|
||||
- jobs/ (Jobs that are running in another process)
|
||||
- model/ (Object model, auto mapping to the database table name)
|
||||
- model/ (Object model, auto-mapping to the database table name)
|
||||
- modules/ (Modified 3rd-party modules)
|
||||
- monitor_types (Monitor Types)
|
||||
- notification-providers/ (individual notification logic)
|
||||
@@ -163,7 +163,7 @@ express.js is used for:
|
||||
|
||||
## Frontend Dev Server
|
||||
|
||||
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
|
||||
It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
|
||||
|
||||
For production, it is not used. It will be compiled to `dist` directory instead.
|
||||
|
||||
@@ -181,7 +181,7 @@ Uptime Kuma Frontend is a single page application (SPA). Most paths are handled
|
||||
|
||||
The router is in `src/router.js`
|
||||
|
||||
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
|
||||
As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
|
||||
|
||||
The data and socket logic are in `src/mixins/socket.js`.
|
||||
|
||||
@@ -210,15 +210,25 @@ Both frontend and backend share the same package.json. However, the frontend dep
|
||||
|
||||
### Update Dependencies
|
||||
|
||||
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
|
||||
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
|
||||
|
||||
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
|
||||
|
||||
If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes.
|
||||
If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change.
|
||||
|
||||
## Translations
|
||||
|
||||
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
|
||||
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
|
||||
|
||||
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
|
||||
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
|
||||
|
||||
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||
|
||||
## Spelling & Grammar
|
||||
|
||||
Feel free to correct the grammar in the documentation or code.
|
||||
My mother language is not English and my grammar is not that great.
|
||||
|
||||
## Wiki
|
||||
|
||||
|
52
README.md
52
README.md
@@ -1,16 +1,16 @@
|
||||
<div align="center" width="100%">
|
||||
<img src="./public/icon.svg" width="128" alt="" />
|
||||
</div>
|
||||
|
||||
# Uptime Kuma
|
||||
|
||||
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||
|
||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
|
||||
[](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
|
||||
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
|
||||
</a>
|
||||
|
||||
<div align="center" width="100%">
|
||||
<img src="./public/icon.svg" width="128" alt="" />
|
||||
</div>
|
||||
|
||||
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
|
||||
|
||||
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
|
||||
|
||||
## 🥔 Live Demo
|
||||
@@ -23,10 +23,10 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
|
||||
|
||||
## ⭐ Features
|
||||
|
||||
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
||||
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
||||
* Fancy, Reactive, Fast UI/UX
|
||||
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
||||
* 20 second intervals
|
||||
* 20-second intervals
|
||||
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
|
||||
* Multiple status pages
|
||||
* Map status pages to specific domains
|
||||
@@ -49,10 +49,14 @@ Uptime Kuma is now running on http://localhost:3001
|
||||
|
||||
### 💪🏻 Non-Docker
|
||||
|
||||
Required Tools:
|
||||
- [Node.js](https://nodejs.org/en/download/) >= 14
|
||||
Requirements:
|
||||
- Platform
|
||||
- ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
|
||||
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
|
||||
- ❌ Replit / Heroku
|
||||
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
|
||||
- [npm](https://docs.npmjs.com/cli/) >= 7
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
|
||||
|
||||
```bash
|
||||
@@ -66,8 +70,8 @@ npm run setup
|
||||
# Option 1. Try it
|
||||
node server/server.js
|
||||
|
||||
# (Recommended) Option 2. Run in background using PM2
|
||||
# Install PM2 if you don't have it:
|
||||
# (Recommended) Option 2. Run in the background using PM2
|
||||
# Install PM2 if you don't have it:
|
||||
npm install pm2 -g && pm2 install pm2-logrotate
|
||||
|
||||
# Start Server
|
||||
@@ -87,6 +91,10 @@ pm2 monit
|
||||
pm2 save && pm2 startup
|
||||
```
|
||||
|
||||
### Windows Portable (x64)
|
||||
|
||||
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1.zip
|
||||
|
||||
### Advanced Installation
|
||||
|
||||
If you need more options or need to browse via a reverse proxy, please read:
|
||||
@@ -101,7 +109,7 @@ https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
|
||||
|
||||
## 🆕 What's Next?
|
||||
|
||||
I will mark requests/issues to the next milestone.
|
||||
I will assign requests/issues to the next milestone.
|
||||
|
||||
https://github.com/louislam/uptime-kuma/milestones
|
||||
|
||||
@@ -144,17 +152,18 @@ Telegram Notification Sample:
|
||||
|
||||
If you love this project, please consider giving me a ⭐.
|
||||
|
||||
## 🗣️ Discussion
|
||||
## 🗣️ Discussion / Ask for Help
|
||||
|
||||
### Issues Page
|
||||
⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not response if you asked such questions.
|
||||
|
||||
You can discuss or ask for help in [issues](https://github.com/louislam/uptime-kuma/issues).
|
||||
I recommend using Google, GitHub Issues, or Uptime Kuma's Subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
|
||||
|
||||
### Subreddit
|
||||
- [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
|
||||
- [Subreddit r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
|
||||
|
||||
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
|
||||
You can mention me if you ask a question on Reddit.
|
||||
[r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
|
||||
|
||||
|
||||
## Contribute
|
||||
|
||||
@@ -175,7 +184,10 @@ If you want to report a bug or request a new feature, feel free to open a [new i
|
||||
### Translations
|
||||
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||
|
||||
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
|
||||
## Spelling & Grammar
|
||||
|
||||
Feel free to correct the grammar in the documentation or code.
|
||||
My mother language is not english and my grammar is not that great.
|
||||
|
||||
### Create Pull Requests
|
||||
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||
|
@@ -3,19 +3,19 @@
|
||||
## Reporting a Vulnerability
|
||||
|
||||
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
||||
1. Please also create a empty security issues for alerting me, as GitHub Advisory do not send a notification, I probably will miss without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
||||
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
||||
|
||||
Do not use the public issue tracker or discuss it in the public as it will cause more damage.
|
||||
Do not use the public issue tracker or discuss it in public as it will cause more damage.
|
||||
|
||||
## Do you accept other 3rd-party bug bounty platforms?
|
||||
|
||||
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone have tried to send a phishing link to me by this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
|
||||
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
|
||||
|
||||
## Supported Versions
|
||||
|
||||
### Uptime Kuma Versions
|
||||
|
||||
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
|
||||
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
|
||||
|
||||
### Upgradable Docker Tags
|
||||
|
||||
|
@@ -4,8 +4,4 @@ if (process.env.TEST_FRONTEND) {
|
||||
config.presets = [ "@babel/preset-env" ];
|
||||
}
|
||||
|
||||
if (process.env.TEST_BACKEND) {
|
||||
config.plugins = [ "babel-plugin-rewire" ];
|
||||
}
|
||||
|
||||
module.exports = config;
|
||||
|
@@ -16,6 +16,9 @@ export default defineConfig({
|
||||
},
|
||||
define: {
|
||||
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
|
||||
"DEVCONTAINER": JSON.stringify(process.env.DEVCONTAINER),
|
||||
"GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN": JSON.stringify(process.env.GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN),
|
||||
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
||||
},
|
||||
plugins: [
|
||||
vue(),
|
||||
@@ -42,6 +45,9 @@ export default defineConfig({
|
||||
}
|
||||
},
|
||||
build: {
|
||||
commonjsOptions: {
|
||||
include: [ /.js$/ ],
|
||||
},
|
||||
rollupOptions: {
|
||||
output: {
|
||||
manualChunks(id, { getModuleInfo, getModuleIds }) {
|
||||
|
7
db/patch-add-certificate-expiry-status-page.sql
Normal file
7
db/patch-add-certificate-expiry-status-page.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE status_page
|
||||
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
|
||||
|
||||
COMMIT;
|
7
db/patch-add-gamedig-given-port.sql
Normal file
7
db/patch-add-gamedig-given-port.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD gamedig_given_port_only BOOLEAN default 1 not null;
|
||||
|
||||
COMMIT;
|
7
db/patch-add-invert-keyword.sql
Normal file
7
db/patch-add-invert-keyword.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD invert_keyword BOOLEAN default 0 not null;
|
||||
|
||||
COMMIT;
|
6
db/patch-add-parent-monitor.sql
Normal file
6
db/patch-add-parent-monitor.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
COMMIT
|
6
db/patch-add-timeout-monitor.sql
Normal file
6
db/patch-add-timeout-monitor.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD timeout DOUBLE default 0 not null;
|
||||
COMMIT;
|
10
db/patch-added-json-query.sql
Normal file
10
db/patch-added-json-query.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD json_path TEXT;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD expected_value VARCHAR(255);
|
||||
|
||||
COMMIT;
|
22
db/patch-added-kafka-producer.sql
Normal file
22
db/patch-added-kafka-producer.sql
Normal file
@@ -0,0 +1,22 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD kafka_producer_topic VARCHAR(255);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD kafka_producer_brokers TEXT;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD kafka_producer_ssl INTEGER;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD kafka_producer_sasl_options TEXT;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD kafka_producer_message TEXT;
|
||||
|
||||
COMMIT;
|
34
db/patch-fix-kafka-producer-booleans.sql
Normal file
34
db/patch-fix-kafka-producer-booleans.sql
Normal file
@@ -0,0 +1,34 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Rename COLUMNs to another one (suffixed by `_old`)
|
||||
ALTER TABLE monitor
|
||||
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
|
||||
|
||||
ALTER TABLE monitor
|
||||
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
|
||||
|
||||
-- Add correct COLUMNs
|
||||
ALTER TABLE monitor
|
||||
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
|
||||
|
||||
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
|
||||
|
||||
-- Set bring old values from `_old` COLUMNs to correct ones
|
||||
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
|
||||
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
|
||||
|
||||
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
|
||||
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
|
||||
|
||||
-- Remove old COLUMNs
|
||||
ALTER TABLE monitor
|
||||
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
|
||||
|
||||
ALTER TABLE monitor
|
||||
DROP COLUMN kafka_producer_ssl_old;
|
||||
|
||||
COMMIT;
|
19
db/patch-monitor-oauth-cc.sql
Normal file
19
db/patch-monitor-oauth-cc.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD oauth_client_id TEXT default null;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD oauth_client_secret TEXT default null;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD oauth_token_url TEXT default null;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD oauth_scopes TEXT default null;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD oauth_auth_method TEXT default null;
|
||||
|
||||
COMMIT;
|
18
db/patch-monitor-tls-info-add-fk.sql
Normal file
18
db/patch-monitor-tls-info-add-fk.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
PRAGMA writable_schema = TRUE;
|
||||
|
||||
UPDATE
|
||||
SQLITE_MASTER
|
||||
SET
|
||||
sql = replace(sql,
|
||||
'monitor_id INTEGER NOT NULL',
|
||||
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||
)
|
||||
WHERE
|
||||
name = 'monitor_tls_info'
|
||||
AND type = 'table';
|
||||
|
||||
PRAGMA writable_schema = RESET;
|
||||
|
||||
COMMIT;
|
10
db/patch-notification-config.sql
Normal file
10
db/patch-notification-config.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
|
||||
ALTER TABLE notification RENAME COLUMN config TO config_old;
|
||||
ALTER TABLE notification ADD COLUMN config TEXT;
|
||||
UPDATE notification SET config = config_old;
|
||||
ALTER TABLE notification DROP COLUMN config_old;
|
||||
|
||||
COMMIT;
|
7
db/patch-timeout.sql
Normal file
7
db/patch-timeout.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
UPDATE monitor SET timeout = (interval * 0.8)
|
||||
WHERE timeout IS NULL OR timeout <= 0;
|
||||
|
||||
COMMIT;
|
@@ -4,5 +4,5 @@ WORKDIR /app
|
||||
|
||||
# Install apprise, iputils for non-root ping, setpriv
|
||||
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
|
||||
pip3 --no-cache-dir install apprise==1.3.0 && \
|
||||
pip3 --no-cache-dir install apprise==1.4.0 && \
|
||||
rm -rf /root/.cache
|
||||
|
@@ -1,28 +1,48 @@
|
||||
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
||||
# If the image changed, the second stage image should be changed too
|
||||
FROM node:16-buster-slim
|
||||
# DON'T UPDATE TO bullseye-slim, see #372.
|
||||
# There is no 20-buster-slim for armv7 unfortunately, 18-buster-slim is the last one for Uptime Kuma v1.
|
||||
FROM node:18-buster-slim
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Curl
|
||||
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
|
||||
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||
sqlite3 iputils-ping util-linux dumb-init git && \
|
||||
pip3 --no-cache-dir install apprise==1.3.0 && \
|
||||
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
|
||||
# python3* = apprise's dependencies
|
||||
# sqlite3 = for debugging
|
||||
# iputils-ping = for ping
|
||||
# util-linux = for setpriv (Should be dropped in 2.0.0?)
|
||||
# dumb-init = avoid zombie processes (#480)
|
||||
# curl = for debugging
|
||||
# ca-certificates = keep the cert up-to-date
|
||||
# sudo = for start service nscd with non-root user
|
||||
# nscd = for better DNS caching
|
||||
# (pip) apprise = for notifications
|
||||
RUN apt-get update && \
|
||||
apt-get --yes --no-install-recommends install \
|
||||
python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||
sqlite3 \
|
||||
iputils-ping \
|
||||
util-linux \
|
||||
dumb-init \
|
||||
curl \
|
||||
ca-certificates \
|
||||
sudo \
|
||||
nscd && \
|
||||
pip3 --no-cache-dir install apprise==1.6.0 && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
apt --yes autoremove
|
||||
|
||||
# Install cloudflared
|
||||
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583
|
||||
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js
|
||||
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
|
||||
dpkg --add-architecture arm && \
|
||||
apt update && \
|
||||
apt --yes --no-install-recommends install ./cloudflared.deb && \
|
||||
RUN set -eux && \
|
||||
mkdir -p --mode=0755 /usr/share/keyrings && \
|
||||
curl --fail --show-error --silent --location --insecure https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
|
||||
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared buster main' | tee /etc/apt/sources.list.d/cloudflared.list && \
|
||||
apt-get update && \
|
||||
apt-get install --yes --no-install-recommends cloudflared && \
|
||||
cloudflared version && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -f cloudflared.deb && \
|
||||
apt --yes autoremove
|
||||
|
||||
# For nscd
|
||||
COPY ./docker/etc/nscd.conf /etc/nscd.conf
|
||||
COPY ./docker/etc/sudoers /etc/sudoers
|
||||
|
||||
|
@@ -26,6 +26,8 @@ RUN chmod +x /app/extra/entrypoint.sh
|
||||
FROM louislam/uptime-kuma:base-debian AS release
|
||||
WORKDIR /app
|
||||
|
||||
ENV UPTIME_KUMA_IS_CONTAINER=1
|
||||
|
||||
# Copy app files from build layer
|
||||
COPY --from=build /app /app
|
||||
|
||||
@@ -70,7 +72,6 @@ RUN git clone https://github.com/louislam/uptime-kuma.git .
|
||||
RUN npm ci
|
||||
|
||||
EXPOSE 3000 3001
|
||||
VOLUME ["/app/data"]
|
||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
||||
CMD ["npm", "run", "start-pr-test"]
|
||||
|
||||
|
90
docker/etc/nscd.conf
Normal file
90
docker/etc/nscd.conf
Normal file
@@ -0,0 +1,90 @@
|
||||
#
|
||||
# /etc/nscd.conf
|
||||
#
|
||||
# An example Name Service Cache config file. This file is needed by nscd.
|
||||
#
|
||||
# Legal entries are:
|
||||
#
|
||||
# logfile <file>
|
||||
# debug-level <level>
|
||||
# threads <initial #threads to use>
|
||||
# max-threads <maximum #threads to use>
|
||||
# server-user <user to run server as instead of root>
|
||||
# server-user is ignored if nscd is started with -S parameters
|
||||
# stat-user <user who is allowed to request statistics>
|
||||
# reload-count unlimited|<number>
|
||||
# paranoia <yes|no>
|
||||
# restart-interval <time in seconds>
|
||||
#
|
||||
# enable-cache <service> <yes|no>
|
||||
# positive-time-to-live <service> <time in seconds>
|
||||
# negative-time-to-live <service> <time in seconds>
|
||||
# suggested-size <service> <prime number>
|
||||
# check-files <service> <yes|no>
|
||||
# persistent <service> <yes|no>
|
||||
# shared <service> <yes|no>
|
||||
# max-db-size <service> <number bytes>
|
||||
# auto-propagate <service> <yes|no>
|
||||
#
|
||||
# Currently supported cache names (services): passwd, group, hosts, services
|
||||
#
|
||||
|
||||
|
||||
# logfile /var/log/nscd.log
|
||||
# threads 4
|
||||
# max-threads 32
|
||||
# server-user node
|
||||
# stat-user somebody
|
||||
debug-level 0
|
||||
# reload-count 5
|
||||
paranoia no
|
||||
# restart-interval 3600
|
||||
|
||||
enable-cache passwd no
|
||||
positive-time-to-live passwd 600
|
||||
negative-time-to-live passwd 20
|
||||
suggested-size passwd 211
|
||||
check-files passwd yes
|
||||
persistent passwd yes
|
||||
shared passwd yes
|
||||
max-db-size passwd 33554432
|
||||
auto-propagate passwd yes
|
||||
|
||||
enable-cache group no
|
||||
positive-time-to-live group 3600
|
||||
negative-time-to-live group 60
|
||||
suggested-size group 211
|
||||
check-files group yes
|
||||
persistent group yes
|
||||
shared group yes
|
||||
max-db-size group 33554432
|
||||
auto-propagate group yes
|
||||
|
||||
enable-cache hosts yes
|
||||
positive-time-to-live hosts 3600
|
||||
negative-time-to-live hosts 20
|
||||
suggested-size hosts 211
|
||||
check-files hosts yes
|
||||
persistent hosts yes
|
||||
# Set shared to "no" to display stats in `nscd -g`
|
||||
# Read more: https://stackoverflow.com/questions/40429245/nscdcentos7curl-0-dns-cache-hit-rate
|
||||
shared hosts no
|
||||
max-db-size hosts 33554432
|
||||
|
||||
enable-cache services no
|
||||
positive-time-to-live services 28800
|
||||
negative-time-to-live services 20
|
||||
suggested-size services 211
|
||||
check-files services yes
|
||||
persistent services yes
|
||||
shared services yes
|
||||
max-db-size services 33554432
|
||||
|
||||
enable-cache netgroup no
|
||||
positive-time-to-live netgroup 28800
|
||||
negative-time-to-live netgroup 20
|
||||
suggested-size netgroup 211
|
||||
check-files netgroup yes
|
||||
persistent netgroup yes
|
||||
shared netgroup yes
|
||||
max-db-size netgroup 33554432
|
31
docker/etc/sudoers
Normal file
31
docker/etc/sudoers
Normal file
@@ -0,0 +1,31 @@
|
||||
#
|
||||
# This file MUST be edited with the 'visudo' command as root.
|
||||
#
|
||||
# Please consider adding local content in /etc/sudoers.d/ instead of
|
||||
# directly modifying this file.
|
||||
#
|
||||
# See the man page for details on how to write a sudoers file.
|
||||
#
|
||||
Defaults env_reset
|
||||
Defaults mail_badpass
|
||||
Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
|
||||
# Host alias specification
|
||||
|
||||
# User alias specification
|
||||
|
||||
# Cmnd alias specification
|
||||
|
||||
# User privilege specification
|
||||
root ALL=(ALL:ALL) ALL
|
||||
|
||||
# Allow members of group sudo to execute any command
|
||||
%sudo ALL=(ALL:ALL) ALL
|
||||
|
||||
# See sudoers(5) for more information on "#include" directives:
|
||||
|
||||
#includedir /etc/sudoers.d
|
||||
|
||||
# Allow `node` to control service (mainly for nscd)
|
||||
node ALL=(root) NOPASSWD: /usr/sbin/nscdservice
|
||||
node ALL=(root) NOPASSWD: /usr/sbin/service
|
@@ -1,48 +0,0 @@
|
||||
//
|
||||
|
||||
const http = require("https"); // or 'https' for https:// URLs
|
||||
const fs = require("fs");
|
||||
|
||||
const platform = process.argv[2];
|
||||
|
||||
if (!platform) {
|
||||
console.error("No platform??");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let arch = null;
|
||||
|
||||
if (platform === "linux/amd64") {
|
||||
arch = "amd64";
|
||||
} else if (platform === "linux/arm64") {
|
||||
arch = "arm64";
|
||||
} else if (platform === "linux/arm/v7") {
|
||||
arch = "arm";
|
||||
} else {
|
||||
console.error("Invalid platform?? " + platform);
|
||||
}
|
||||
|
||||
const file = fs.createWriteStream("cloudflared.deb");
|
||||
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
|
||||
|
||||
/**
|
||||
* Download specified file
|
||||
* @param {string} url URL to request
|
||||
*/
|
||||
function get(url) {
|
||||
http.get(url, function (res) {
|
||||
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
||||
console.log("Redirect to " + res.headers.location);
|
||||
get(res.headers.location);
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
res.pipe(file);
|
||||
|
||||
res.on("end", function () {
|
||||
console.log("Downloaded");
|
||||
});
|
||||
} else {
|
||||
console.error(res.statusCode);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
@@ -1,3 +1,3 @@
|
||||
<Weavers xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="FodyWeavers.xsd">
|
||||
<Costura />
|
||||
<Costura DisableCompression='true' IncludeDebugSymbols='false' />
|
||||
</Weavers>
|
@@ -6,9 +6,9 @@ using System.Runtime.InteropServices;
|
||||
// set of attributes. Change these attribute values to modify the information
|
||||
// associated with an assembly.
|
||||
[assembly: AssemblyTitle("Uptime Kuma")]
|
||||
[assembly: AssemblyDescription("")]
|
||||
[assembly: AssemblyDescription("A portable executable for running Uptime Kuma")]
|
||||
[assembly: AssemblyConfiguration("")]
|
||||
[assembly: AssemblyCompany("")]
|
||||
[assembly: AssemblyCompany("Uptime Kuma")]
|
||||
[assembly: AssemblyProduct("Uptime Kuma")]
|
||||
[assembly: AssemblyCopyright("Copyright © 2023 Louis Lam")]
|
||||
[assembly: AssemblyTrademark("")]
|
||||
@@ -20,7 +20,7 @@ using System.Runtime.InteropServices;
|
||||
[assembly: ComVisible(false)]
|
||||
|
||||
// The following GUID is for the ID of the typelib if this project is exposed to COM
|
||||
[assembly: Guid("2DB53988-1D93-4AC0-90C4-96ADEAAC5C04")]
|
||||
[assembly: Guid("86B40AFB-61FC-433D-8C31-650B0F32EA8F")]
|
||||
|
||||
// Version information for an assembly consists of the following four values:
|
||||
//
|
||||
@@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
|
||||
// You can specify all the values or you can default the Build and Revision Numbers
|
||||
// by using the '*' as shown below:
|
||||
// [assembly: AssemblyVersion("1.0.*")]
|
||||
[assembly: AssemblyVersion("1.0.0.0")]
|
||||
[assembly: AssemblyFileVersion("1.0.0.0")]
|
||||
[assembly: AssemblyVersion("1.0.1.0")]
|
||||
[assembly: AssemblyFileVersion("1.0.1.0")]
|
||||
|
@@ -6,7 +6,7 @@
|
||||
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
|
||||
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||
*/
|
||||
const { FBSD } = require("../server/util-server");
|
||||
const FBSD = /^freebsd/.test(process.platform);
|
||||
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||
|
||||
|
@@ -5,15 +5,15 @@
|
||||
|
||||
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||
println("=====================");
|
||||
println("Uptime Kuma Installer");
|
||||
println("Uptime Kuma Install Script");
|
||||
println("=====================");
|
||||
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
|
||||
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8");
|
||||
println("---------------------------------------");
|
||||
println("This script is designed for Linux and basic usage.");
|
||||
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
||||
println("---------------------------------------");
|
||||
println("");
|
||||
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
|
||||
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2");
|
||||
println("Docker - Install Uptime Kuma Docker container");
|
||||
println("");
|
||||
|
||||
@@ -29,14 +29,10 @@ function checkNode() {
|
||||
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
||||
println("Node Version: " ++ nodeVersion);
|
||||
|
||||
if (nodeVersion < "12") {
|
||||
if (nodeVersion <= "12") {
|
||||
println("Error: Required Node.js 14");
|
||||
call("exit", "1");
|
||||
}
|
||||
|
||||
if (nodeVersion == "12") {
|
||||
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
|
||||
}
|
||||
}
|
||||
|
||||
function deb() {
|
||||
@@ -60,8 +56,8 @@ function deb() {
|
||||
bash("apt --yes install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 14");
|
||||
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
|
||||
println("Installing Node.js 16");
|
||||
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt");
|
||||
bash("apt --yes install nodejs");
|
||||
bash("node -v");
|
||||
|
||||
@@ -91,6 +87,10 @@ if (type == "local") {
|
||||
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
||||
if (os == "Ubuntu") {
|
||||
distribution = "ubuntu";
|
||||
|
||||
// Get ubuntu version
|
||||
bash(". /etc/lsb-release");
|
||||
version = DISTRIB_RELEASE;
|
||||
}
|
||||
if (os == "Debian") {
|
||||
distribution = "debian";
|
||||
@@ -101,6 +101,7 @@ if (type == "local") {
|
||||
|
||||
println("Your OS: " ++ os);
|
||||
println("Distribution: " ++ distribution);
|
||||
println("Version: " ++ version);
|
||||
println("Arch: " ++ arch);
|
||||
|
||||
if ("$3" != "") {
|
||||
@@ -131,15 +132,32 @@ if (type == "local") {
|
||||
checkNode();
|
||||
} else {
|
||||
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("yum -y -q install curl");
|
||||
bash("dnfCheck=$(dnf --version)");
|
||||
|
||||
// Use yum
|
||||
if (dnfCheck == "") {
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("yum -y -q install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 16");
|
||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
||||
bash("yum install -y -q nodejs");
|
||||
} else {
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("dnf -y install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 16");
|
||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
||||
bash("dnf install -y nodejs");
|
||||
}
|
||||
|
||||
println("Installing Node.js 14");
|
||||
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
|
||||
bash("yum install -y -q nodejs");
|
||||
|
||||
bash("node -v");
|
||||
|
||||
bash("nodeCheckAgain=$(node -v)");
|
||||
@@ -171,13 +189,15 @@ if (type == "local") {
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
error = 1;
|
||||
println("Error: git is missing");
|
||||
println("Error: git is not found!");
|
||||
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
|
||||
}
|
||||
|
||||
bash("check=$(node -v)");
|
||||
if (check == "") {
|
||||
error = 1;
|
||||
println("Error: node is missing");
|
||||
println("Error: node is not found");
|
||||
println("help: an installation guide is available at https://nodejs.org/en/download");
|
||||
}
|
||||
|
||||
if (error > 0) {
|
||||
@@ -193,6 +213,15 @@ if (type == "local") {
|
||||
bash("pm2 startup");
|
||||
}
|
||||
|
||||
|
||||
// Check again
|
||||
bash("check=$(pm2 --version)");
|
||||
if (check == "") {
|
||||
println("Error: pm2 is not found!");
|
||||
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
bash("mkdir -p $installPath");
|
||||
bash("cd $installPath");
|
||||
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
||||
@@ -206,6 +235,7 @@ if (type == "local") {
|
||||
bash("check=$(docker -v)");
|
||||
if (check == "") {
|
||||
println("Error: docker is not found!");
|
||||
println("help: an installation guide is available at https://docs.docker.com/desktop/");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
@@ -213,6 +243,7 @@ if (type == "local") {
|
||||
|
||||
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
||||
\"echo\" \"Error: docker is not running\"
|
||||
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
|
||||
\"exit\" \"1\"
|
||||
fi");
|
||||
|
||||
|
44
extra/reformat-changelog.js
Normal file
44
extra/reformat-changelog.js
Normal file
@@ -0,0 +1,44 @@
|
||||
// Generate on GitHub
|
||||
const input = `
|
||||
* Add Korean translation by @Alanimdeo in https://github.com/louislam/dockge/pull/86
|
||||
`;
|
||||
|
||||
const template = `
|
||||
### 🆕 New Features
|
||||
|
||||
### 💇♀️ Improvements
|
||||
|
||||
### 🐞 Bug Fixes
|
||||
|
||||
### ⬆️ Security Fixes
|
||||
|
||||
### 🦎 Translation Contributions
|
||||
|
||||
### Others
|
||||
- Other small changes, code refactoring and comment/doc updates in this repo:
|
||||
`;
|
||||
|
||||
const lines = input.split("\n").filter((line) => line.trim() !== "");
|
||||
|
||||
for (const line of lines) {
|
||||
// Split the last " by "
|
||||
const usernamePullRequesURL = line.split(" by ").pop();
|
||||
|
||||
if (!usernamePullRequesURL) {
|
||||
console.log("Unable to parse", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
const [ username, pullRequestURL ] = usernamePullRequesURL.split(" in ");
|
||||
const pullRequestID = "#" + pullRequestURL.split("/").pop();
|
||||
let message = line.split(" by ").shift();
|
||||
|
||||
if (!message) {
|
||||
console.log("Unable to parse", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
message = message.split("* ").pop();
|
||||
console.log("-", pullRequestID, message, `(Thanks ${username})`);
|
||||
}
|
||||
console.log(template);
|
@@ -5,6 +5,8 @@ const { R } = require("redbean-node");
|
||||
const readline = require("readline");
|
||||
const { initJWTSecret } = require("../server/util-server");
|
||||
const User = require("../server/model/user");
|
||||
const { io } = require("socket.io-client");
|
||||
const { localWebSocketURL } = require("../server/config");
|
||||
const args = require("args-parser")(process.argv);
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
@@ -36,12 +38,16 @@ const main = async () => {
|
||||
// Reset all sessions by reset jwt secret
|
||||
await initJWTSecret();
|
||||
|
||||
// Disconnect all other socket clients of the user
|
||||
await disconnectAllSocketClients(user.username, password);
|
||||
|
||||
break;
|
||||
} else {
|
||||
console.log("Passwords do not match, please try again.");
|
||||
}
|
||||
}
|
||||
console.log("Password reset successfully.");
|
||||
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Error: " + e.message);
|
||||
@@ -66,6 +72,44 @@ function question(question) {
|
||||
});
|
||||
}
|
||||
|
||||
function disconnectAllSocketClients(username, password) {
|
||||
return new Promise((resolve) => {
|
||||
console.log("Connecting to " + localWebSocketURL + " to disconnect all other socket clients");
|
||||
|
||||
// Disconnect all socket connections
|
||||
const socket = io(localWebSocketURL, {
|
||||
reconnection: false,
|
||||
timeout: 5000,
|
||||
});
|
||||
socket.on("connect", () => {
|
||||
socket.emit("login", {
|
||||
username,
|
||||
password,
|
||||
}, (res) => {
|
||||
if (res.ok) {
|
||||
console.log("Logged in.");
|
||||
socket.emit("disconnectOtherSocketClients");
|
||||
} else {
|
||||
console.warn("Login failed.");
|
||||
console.warn("Please restart the server to disconnect all sessions.");
|
||||
}
|
||||
socket.close();
|
||||
});
|
||||
});
|
||||
|
||||
socket.on("connect_error", function () {
|
||||
// The localWebSocketURL is not guaranteed to be working for some complicated Uptime Kuma setup
|
||||
// Ask the user to restart the server manually
|
||||
console.warn("Failed to connect to " + localWebSocketURL);
|
||||
console.warn("Please restart the server to disconnect all sessions manually.");
|
||||
resolve();
|
||||
});
|
||||
socket.on("disconnect", () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (!process.env.TEST_BACKEND) {
|
||||
main();
|
||||
}
|
||||
|
9
extra/test-docker.js
Normal file
9
extra/test-docker.js
Normal file
@@ -0,0 +1,9 @@
|
||||
// Check if docker is running
|
||||
const { exec } = require("child_process");
|
||||
|
||||
exec("docker ps", (err, stdout, stderr) => {
|
||||
if (err) {
|
||||
console.error("Docker is not running. Please start docker and try again.");
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
18
index.html
18
index.html
@@ -2,15 +2,31 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
|
||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<meta name="theme-color" id="theme-color" content="" />
|
||||
<meta name="description" content="Uptime Kuma monitoring tool" />
|
||||
<title>Uptime Kuma</title>
|
||||
<style>
|
||||
.noscript-message {
|
||||
font-size: 20px;
|
||||
text-align: center;
|
||||
padding: 10px;
|
||||
max-width: 500px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<noscript>
|
||||
<div class="noscript-message">
|
||||
Sorry, you don't seem to have JavaScript enabled or your browser
|
||||
doesn't support it.<br />This website requires JavaScript to function.
|
||||
Please enable JavaScript in your browser settings to continue.
|
||||
</div>
|
||||
</noscript>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.js"></script>
|
||||
</body>
|
||||
|
63
install.sh
63
install.sh
@@ -3,15 +3,15 @@
|
||||
# The command is working on Windows PowerShell and Docker for Windows only.
|
||||
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||
"echo" "-e" "====================="
|
||||
"echo" "-e" "Uptime Kuma Installer"
|
||||
"echo" "-e" "Uptime Kuma Install Script"
|
||||
"echo" "-e" "====================="
|
||||
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
|
||||
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"
|
||||
"echo" "-e" "---------------------------------------"
|
||||
"echo" "-e" "This script is designed for Linux and basic usage."
|
||||
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
||||
"echo" "-e" "---------------------------------------"
|
||||
"echo" "-e" ""
|
||||
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
|
||||
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"
|
||||
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
||||
"echo" "-e" ""
|
||||
if [ "$1" != "" ]; then
|
||||
@@ -25,12 +25,9 @@ function checkNode {
|
||||
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
||||
"echo" "-e" "Node Version: ""$nodeVersion"
|
||||
_0="12"
|
||||
if [ $(($nodeVersion < $_0)) == 1 ]; then
|
||||
if [ $(($nodeVersion <= $_0)) == 1 ]; then
|
||||
"echo" "-e" "Error: Required Node.js 14"
|
||||
"exit" "1"
|
||||
fi
|
||||
if [ "$nodeVersion" == "12" ]; then
|
||||
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
|
||||
fi
|
||||
}
|
||||
function deb {
|
||||
@@ -50,8 +47,8 @@ fi
|
||||
"echo" "-e" "Installing Curl"
|
||||
apt --yes install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 14"
|
||||
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
|
||||
"echo" "-e" "Installing Node.js 16"
|
||||
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt
|
||||
apt --yes install nodejs
|
||||
node -v
|
||||
nodeCheckAgain=$(node -v)
|
||||
@@ -75,7 +72,10 @@ if [ "$type" == "local" ]; then
|
||||
if [ -e "/etc/issue" ]; then
|
||||
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
||||
if [ "$os" == "Ubuntu" ]; then
|
||||
distribution="ubuntu"
|
||||
distribution="ubuntu"
|
||||
# Get ubuntu version
|
||||
. /etc/lsb-release
|
||||
version="$DISTRIB_RELEASE"
|
||||
fi
|
||||
if [ "$os" == "Debian" ]; then
|
||||
distribution="debian"
|
||||
@@ -85,6 +85,7 @@ fi
|
||||
arch=$(uname -i)
|
||||
"echo" "-e" "Your OS: ""$os"
|
||||
"echo" "-e" "Distribution: ""$distribution"
|
||||
"echo" "-e" "Version: ""$version"
|
||||
"echo" "-e" "Arch: ""$arch"
|
||||
if [ "$3" != "" ]; then
|
||||
port="$3"
|
||||
@@ -108,14 +109,27 @@ fi
|
||||
if [ "$nodeCheck" != "" ]; then
|
||||
"checkNode"
|
||||
else
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
yum -y -q install curl
|
||||
dnfCheck=$(dnf --version)
|
||||
# Use yum
|
||||
if [ "$dnfCheck" == "" ]; then
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
yum -y -q install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 14"
|
||||
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
|
||||
yum install -y -q nodejs
|
||||
"echo" "-e" "Installing Node.js 16"
|
||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
||||
yum install -y -q nodejs
|
||||
else
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
dnf -y install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 16"
|
||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
||||
dnf install -y nodejs
|
||||
fi
|
||||
node -v
|
||||
nodeCheckAgain=$(node -v)
|
||||
if [ "$nodeCheckAgain" == "" ]; then
|
||||
@@ -142,12 +156,14 @@ fi
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
error=$((1))
|
||||
"echo" "-e" "Error: git is missing"
|
||||
"echo" "-e" "Error: git is not found!"
|
||||
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
|
||||
fi
|
||||
check=$(node -v)
|
||||
if [ "$check" == "" ]; then
|
||||
error=$((1))
|
||||
"echo" "-e" "Error: node is missing"
|
||||
"echo" "-e" "Error: node is not found"
|
||||
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
|
||||
fi
|
||||
if [ $(($error > 0)) == 1 ]; then
|
||||
"echo" "-e" "Please install above missing software"
|
||||
@@ -161,6 +177,13 @@ fi
|
||||
"echo" "-e" "Installing PM2"
|
||||
npm install pm2 -g && pm2 install pm2-logrotate
|
||||
pm2 startup
|
||||
fi
|
||||
# Check again
|
||||
check=$(pm2 --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Error: pm2 is not found!"
|
||||
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p $installPath
|
||||
cd $installPath
|
||||
@@ -172,11 +195,13 @@ else
|
||||
check=$(docker -v)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Error: docker is not found!"
|
||||
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
|
||||
exit 1
|
||||
fi
|
||||
check=$(docker info)
|
||||
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
||||
"echo" "Error: docker is not running"
|
||||
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
|
||||
"exit" "1"
|
||||
fi
|
||||
if [ "$3" != "" ]; then
|
||||
|
16815
package-lock.json
generated
16815
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
114
package.json
114
package.json
@@ -1,27 +1,30 @@
|
||||
{
|
||||
"name": "uptime-kuma",
|
||||
"version": "1.21.2",
|
||||
"version": "1.23.16",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/louislam/uptime-kuma.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": "14.* || >=16.*"
|
||||
"node": "14 || 16 || 18 || >= 20.4.0"
|
||||
},
|
||||
"scripts": {
|
||||
"install-legacy": "npm install",
|
||||
"update-legacy": "npm update",
|
||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||
"lint:js-prod": "npm run lint:js -- --max-warnings 0",
|
||||
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
||||
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
|
||||
"lint": "npm run lint:js && npm run lint:style",
|
||||
"lint:prod": "npm run lint:js-prod && npm run lint:style",
|
||||
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
||||
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
||||
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
||||
"start": "npm run start-server",
|
||||
"start-server": "node server/server.js",
|
||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js --data-dir=./data/v1/",
|
||||
"build": "vite build --config ./config/vite.config.js",
|
||||
"test": "node test/prepare-test-server.js && npm run jest-backend",
|
||||
"test-with-build": "npm run build && npm test",
|
||||
@@ -34,28 +37,34 @@
|
||||
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
||||
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
|
||||
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
|
||||
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
|
||||
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
|
||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||
"setup": "git checkout 1.21.2 && npm ci --production && npm run download-dist",
|
||||
"setup": "git checkout 1.23.16 && npm ci --production && npm run download-dist",
|
||||
"download-dist": "node extra/download-dist.js",
|
||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||
"reset-password": "node extra/reset-password.js",
|
||||
"remove-2fa": "node extra/remove-2fa.js",
|
||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
||||
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
|
||||
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
||||
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
|
||||
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
|
||||
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
|
||||
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
||||
"test-install-script-ubuntu1804": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1804.dockerfile .",
|
||||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
|
||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
||||
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
|
||||
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
|
||||
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
|
||||
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
||||
"ncu-patch": "npm-check-updates -u -t patch",
|
||||
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||
"git-remove-tag": "git tag -d",
|
||||
"build-dist-and-restart": "npm run build && npm run start-server-dev",
|
||||
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
|
||||
@@ -64,71 +73,82 @@
|
||||
"cy:run:unit": "npx cypress run --browser chrome --headless --config-file ./config/cypress.frontend.config.js",
|
||||
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
|
||||
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
||||
"depoly-demo-server": "node extra/deploy-demo-server.js",
|
||||
"sort-contributors": "node extra/sort-contributors.js"
|
||||
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
||||
"sort-contributors": "node extra/sort-contributors.js",
|
||||
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "~1.7.3",
|
||||
"@louislam/ping": "~0.4.4-mod.0",
|
||||
"@grpc/grpc-js": "~1.8.22",
|
||||
"@louislam/ping": "~0.4.4-mod.1",
|
||||
"@louislam/sqlite3": "15.1.6",
|
||||
"args-parser": "~1.3.0",
|
||||
"axios": "~0.27.0",
|
||||
"axios": "~0.28.1",
|
||||
"axios-ntlm": "1.3.0",
|
||||
"badge-maker": "~3.3.1",
|
||||
"bcryptjs": "~2.4.3",
|
||||
"bree": "~7.1.5",
|
||||
"cacheable-lookup": "~6.0.4",
|
||||
"chardet": "~1.4.0",
|
||||
"check-password-strength": "^2.0.5",
|
||||
"cheerio": "~1.0.0-rc.12",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"chroma-js": "~2.4.2",
|
||||
"command-exists": "~1.2.9",
|
||||
"compare-versions": "~3.6.0",
|
||||
"compression": "~1.7.4",
|
||||
"croner": "^6.0.3",
|
||||
"croner": "~6.0.5",
|
||||
"dayjs": "~1.11.5",
|
||||
"dotenv": "~16.0.3",
|
||||
"express": "~4.17.3",
|
||||
"express": "~4.21.0",
|
||||
"express-basic-auth": "~1.2.1",
|
||||
"express-static-gzip": "~2.1.7",
|
||||
"form-data": "~4.0.0",
|
||||
"gamedig": "^4.0.5",
|
||||
"gamedig": "^4.2.0",
|
||||
"html-escaper": "^3.0.3",
|
||||
"http-graceful-shutdown": "~3.1.7",
|
||||
"http-proxy-agent": "~5.0.0",
|
||||
"https-proxy-agent": "~5.0.1",
|
||||
"iconv-lite": "~0.6.3",
|
||||
"isomorphic-ws": "^5.0.0",
|
||||
"jsesc": "~3.0.2",
|
||||
"jsonata": "^2.0.3",
|
||||
"jsonwebtoken": "~9.0.0",
|
||||
"jwt-decode": "~3.1.2",
|
||||
"kafkajs": "^2.2.4",
|
||||
"limiter": "~2.1.0",
|
||||
"mongodb": "~4.14.0",
|
||||
"liquidjs": "^10.7.0",
|
||||
"mongodb": "~4.17.1",
|
||||
"mqtt": "~4.3.7",
|
||||
"mssql": "~8.1.4",
|
||||
"mysql2": "~2.3.3",
|
||||
"nanoid": "^3.3.4",
|
||||
"mysql2": "~3.9.6",
|
||||
"nanoid": "~3.3.4",
|
||||
"node-cloudflared-tunnel": "~1.0.9",
|
||||
"node-radius-client": "~1.0.0",
|
||||
"nodemailer": "~6.6.5",
|
||||
"nodemailer": "~6.9.13",
|
||||
"nostr-tools": "^1.13.1",
|
||||
"notp": "~2.0.3",
|
||||
"openid-client": "^5.4.2",
|
||||
"password-hash": "~1.2.2",
|
||||
"pg": "~8.8.0",
|
||||
"pg-connection-string": "~2.5.0",
|
||||
"pg": "~8.11.3",
|
||||
"pg-connection-string": "~2.6.2",
|
||||
"playwright-core": "~1.35.1",
|
||||
"prom-client": "~13.2.0",
|
||||
"prometheus-api-metrics": "~3.2.1",
|
||||
"protobufjs": "~7.1.1",
|
||||
"promisify-child-process": "~4.1.2",
|
||||
"protobufjs": "~7.2.4",
|
||||
"qs": "~6.10.4",
|
||||
"redbean-node": "~0.2.0",
|
||||
"redbean-node": "~0.3.0",
|
||||
"redis": "~4.5.1",
|
||||
"socket.io": "~4.5.3",
|
||||
"socket.io-client": "~4.5.3",
|
||||
"semver": "~7.5.4",
|
||||
"socket.io": "~4.8.0",
|
||||
"socket.io-client": "~4.8.0",
|
||||
"socks-proxy-agent": "6.1.1",
|
||||
"tar": "~6.1.11",
|
||||
"tar": "~6.2.1",
|
||||
"tcp-ping": "~0.1.1",
|
||||
"thirty-two": "~1.0.2"
|
||||
"thirty-two": "~1.0.2",
|
||||
"ws": "^8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/github": "~5.0.1",
|
||||
"@babel/eslint-parser": "~7.17.0",
|
||||
"@actions/github": "~5.1.1",
|
||||
"@babel/eslint-parser": "^7.22.7",
|
||||
"@babel/preset-env": "^7.15.8",
|
||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||
"@fortawesome/free-regular-svg-icons": "~5.15.4",
|
||||
@@ -136,29 +156,28 @@
|
||||
"@fortawesome/vue-fontawesome": "~3.0.0-5",
|
||||
"@popperjs/core": "~2.10.2",
|
||||
"@types/bootstrap": "~5.1.9",
|
||||
"@vitejs/plugin-legacy": "~2.1.0",
|
||||
"@vitejs/plugin-vue": "~3.1.0",
|
||||
"@vue/compiler-sfc": "~3.2.36",
|
||||
"@vitejs/plugin-legacy": "~4.1.0",
|
||||
"@vitejs/plugin-vue": "~4.2.3",
|
||||
"@vue/compiler-sfc": "~3.3.4",
|
||||
"@vuepic/vue-datepicker": "~3.4.8",
|
||||
"aedes": "^0.46.3",
|
||||
"babel-plugin-rewire": "~1.2.0",
|
||||
"bootstrap": "5.1.3",
|
||||
"chart.js": "~3.6.2",
|
||||
"chartjs-adapter-dayjs": "~1.0.0",
|
||||
"chart.js": "~4.2.1",
|
||||
"chartjs-adapter-dayjs-4": "~1.0.4",
|
||||
"concurrently": "^7.1.0",
|
||||
"core-js": "~3.26.1",
|
||||
"cronstrue": "~2.24.0",
|
||||
"cross-env": "~7.0.3",
|
||||
"cypress": "^10.1.0",
|
||||
"cypress": "^13.2.0",
|
||||
"delay": "^5.0.0",
|
||||
"dns2": "~2.0.1",
|
||||
"dompurify": "~2.4.3",
|
||||
"dompurify": "~3.1.7",
|
||||
"eslint": "~8.14.0",
|
||||
"eslint-plugin-vue": "~8.7.1",
|
||||
"favico.js": "~0.3.10",
|
||||
"jest": "~27.2.5",
|
||||
"jest": "~29.6.1",
|
||||
"marked": "~4.2.5",
|
||||
"node-ssh": "~13.0.1",
|
||||
"node-ssh": "~13.1.0",
|
||||
"postcss-html": "~1.5.0",
|
||||
"postcss-rtlcss": "~3.7.2",
|
||||
"postcss-scss": "~4.0.4",
|
||||
@@ -166,16 +185,16 @@
|
||||
"qrcode": "~1.5.0",
|
||||
"rollup-plugin-visualizer": "^5.6.0",
|
||||
"sass": "~1.42.1",
|
||||
"stylelint": "~14.7.1",
|
||||
"stylelint": "^15.10.1",
|
||||
"stylelint-config-standard": "~25.0.0",
|
||||
"terser": "~5.15.0",
|
||||
"timezones-list": "~3.0.1",
|
||||
"typescript": "~4.4.4",
|
||||
"v-pagination-3": "~0.1.7",
|
||||
"vite": "~3.1.0",
|
||||
"vite": "~5.2.8",
|
||||
"vite-plugin-compression": "^0.5.1",
|
||||
"vue": "~3.2.47",
|
||||
"vue-chart-3": "3.0.9",
|
||||
"vue": "~3.3.4",
|
||||
"vue-chartjs": "~5.2.0",
|
||||
"vue-confirm-dialog": "~1.0.2",
|
||||
"vue-contenteditable": "~3.0.4",
|
||||
"vue-i18n": "~9.2.2",
|
||||
@@ -186,6 +205,7 @@
|
||||
"vue-router": "~4.0.14",
|
||||
"vue-toastification": "~2.0.0-rc.5",
|
||||
"vuedraggable": "~4.1.0",
|
||||
"wait-on": "^6.0.1"
|
||||
"wait-on": "^7.2.0",
|
||||
"whatwg-url": "~12.0.1"
|
||||
}
|
||||
}
|
||||
|
@@ -1,10 +1,9 @@
|
||||
<svg width="640" height="640" viewBox="0 0 640 640" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" fill="url(#paint0_linear_381_799)"/>
|
||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" stroke="#F2F2F2" stroke-opacity="0.51" stroke-width="200"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_381_799" x1="259.78" y1="261.15" x2="463.85" y2="456.49" gradientUnits="userSpaceOnUse">
|
||||
<svg width="640" height="640" viewBox="0 0 640 640" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
|
||||
<g transform="matrix(1 0 0 1 320 320)">
|
||||
<linearGradient id="S3" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1 0 0 1 -319.99875 -320.0001577393)" x1="259.78" y1="261.15" x2="463.85" y2="456.49">
|
||||
<stop stop-color="#5CDD8B"/>
|
||||
<stop offset="1" stop-color="#86E6A9"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path style="stroke: rgb(242,242,242); stroke-opacity: 0.51; stroke-width: 200; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: url(#S3); fill-rule: nonzero; opacity: 1;" transform=" translate(0, 0)" d="M 170.40125 -84.36016 C 224.09125 38.37984 224.09125 115.33984 170.40125 146.49984 C 89.85125000000001 193.23984000000002 -120.03875 207.48984000000002 -180.45875 135.63984 C -220.73875 87.73983999999999 -220.73875 14.399839999999998 -180.45875 -84.36016000000001 C -139.49875 -151.82016 -81.28875000000001 -185.55016 -5.828750000000014 -185.55016 C 69.64124999999999 -185.55016 128.38125 -151.82016000000002 170.40124999999998 -84.36016000000001 z" stroke-linecap="round" />
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 893 B After Width: | Height: | Size: 1.1 KiB |
@@ -2,6 +2,7 @@ const basicAuth = require("express-basic-auth");
|
||||
const passwordHash = require("./password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
const { setting } = require("./util-server");
|
||||
const { log } = require("../src/util");
|
||||
const { loginRateLimiter, apiRateLimiter } = require("./rate-limiter");
|
||||
const { Settings } = require("./settings");
|
||||
const dayjs = require("dayjs");
|
||||
@@ -81,12 +82,16 @@ function apiAuthorizer(username, password, callback) {
|
||||
apiRateLimiter.pass(null, 0).then((pass) => {
|
||||
if (pass) {
|
||||
verifyAPIKey(password).then((valid) => {
|
||||
if (!valid) {
|
||||
log.warn("api-auth", "Failed API auth attempt: invalid API Key");
|
||||
}
|
||||
callback(null, valid);
|
||||
// Only allow a set number of api requests per minute
|
||||
// (currently set to 60)
|
||||
apiRateLimiter.removeTokens(1);
|
||||
});
|
||||
} else {
|
||||
log.warn("api-auth", "Failed API auth attempt: rate limit exceeded");
|
||||
callback(null, false);
|
||||
}
|
||||
});
|
||||
@@ -106,10 +111,12 @@ function userAuthorizer(username, password, callback) {
|
||||
callback(null, user != null);
|
||||
|
||||
if (user == null) {
|
||||
log.warn("basic-auth", "Failed basic auth attempt: invalid username/password");
|
||||
loginRateLimiter.removeTokens(1);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
log.warn("basic-auth", "Failed basic auth attempt: rate limit exceeded");
|
||||
callback(null, false);
|
||||
}
|
||||
});
|
||||
|
@@ -1,27 +1,33 @@
|
||||
const { setSetting, setting } = require("./util-server");
|
||||
const axios = require("axios");
|
||||
const compareVersions = require("compare-versions");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
exports.version = require("../package.json").version;
|
||||
exports.latestVersion = null;
|
||||
|
||||
// How much time in ms to wait between update checks
|
||||
const UPDATE_CHECKER_INTERVAL_MS = 1000 * 60 * 60 * 48;
|
||||
const UPDATE_CHECKER_LATEST_VERSION_URL = "https://uptime.kuma.pet/version";
|
||||
|
||||
let interval;
|
||||
|
||||
/** Start 48 hour check interval */
|
||||
exports.startInterval = () => {
|
||||
let check = async () => {
|
||||
if (await setting("checkUpdate") === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug("update-checker", "Retrieving latest versions");
|
||||
|
||||
try {
|
||||
const res = await axios.get("https://uptime.kuma.pet/version");
|
||||
const res = await axios.get(UPDATE_CHECKER_LATEST_VERSION_URL);
|
||||
|
||||
// For debug
|
||||
if (process.env.TEST_CHECK_VERSION === "1") {
|
||||
res.data.slow = "1000.0.0";
|
||||
}
|
||||
|
||||
if (await setting("checkUpdate") === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
let checkBeta = await setting("checkBeta");
|
||||
|
||||
if (checkBeta && res.data.beta) {
|
||||
@@ -35,12 +41,14 @@ exports.startInterval = () => {
|
||||
exports.latestVersion = res.data.slow;
|
||||
}
|
||||
|
||||
} catch (_) { }
|
||||
} catch (_) {
|
||||
log.info("update-checker", "Failed to check for new versions");
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
check();
|
||||
interval = setInterval(check, 3600 * 1000 * 48);
|
||||
interval = setInterval(check, UPDATE_CHECKER_INTERVAL_MS);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@@ -141,12 +141,24 @@ async function sendAPIKeyList(socket) {
|
||||
/**
|
||||
* Emits the version information to the client.
|
||||
* @param {Socket} socket Socket.io socket instance
|
||||
* @param {boolean} hideVersion
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function sendInfo(socket) {
|
||||
async function sendInfo(socket, hideVersion = false) {
|
||||
let version;
|
||||
let latestVersion;
|
||||
let isContainer;
|
||||
|
||||
if (!hideVersion) {
|
||||
version = checkVersion.version;
|
||||
latestVersion = checkVersion.latestVersion;
|
||||
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
|
||||
}
|
||||
|
||||
socket.emit("info", {
|
||||
version: checkVersion.version,
|
||||
latestVersion: checkVersion.latestVersion,
|
||||
version,
|
||||
latestVersion,
|
||||
isContainer,
|
||||
primaryBaseURL: await setting("primaryBaseURL"),
|
||||
serverTimezone: await server.getTimezone(),
|
||||
serverTimezoneOffset: server.getTimezoneOffset(),
|
||||
|
@@ -1,28 +1,42 @@
|
||||
const args = require("args-parser")(process.argv);
|
||||
const demoMode = args["demo"] || false;
|
||||
const isFreeBSD = /^freebsd/.test(process.platform);
|
||||
|
||||
const badgeConstants = {
|
||||
naColor: "#999",
|
||||
defaultUpColor: "#66c20a",
|
||||
defaultWarnColor: "#eed202",
|
||||
defaultDownColor: "#c2290a",
|
||||
defaultPendingColor: "#f8a306",
|
||||
defaultMaintenanceColor: "#1747f5",
|
||||
defaultPingColor: "blue", // as defined by badge-maker / shields.io
|
||||
defaultStyle: "flat",
|
||||
defaultPingValueSuffix: "ms",
|
||||
defaultPingLabelSuffix: "h",
|
||||
defaultUptimeValueSuffix: "%",
|
||||
defaultUptimeLabelSuffix: "h",
|
||||
defaultCertExpValueSuffix: " days",
|
||||
defaultCertExpLabelSuffix: "h",
|
||||
// Values Come From Default Notification Times
|
||||
defaultCertExpireWarnDays: "14",
|
||||
defaultCertExpireDownDays: "7"
|
||||
};
|
||||
// Interop with browser
|
||||
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||
|
||||
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
|
||||
// Dual-stack support for (::)
|
||||
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
|
||||
let hostEnv = isFreeBSD ? null : process.env.HOST;
|
||||
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
|
||||
|
||||
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
|
||||
.map(portValue => parseInt(portValue))
|
||||
.find(portValue => !isNaN(portValue));
|
||||
|
||||
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
|
||||
|
||||
const isSSL = sslKey && sslCert;
|
||||
|
||||
function getLocalWebSocketURL() {
|
||||
const protocol = isSSL ? "wss" : "ws";
|
||||
const host = hostname || "localhost";
|
||||
return `${protocol}://${host}:${port}`;
|
||||
}
|
||||
|
||||
const localWebSocketURL = getLocalWebSocketURL();
|
||||
|
||||
const demoMode = args["demo"] || false;
|
||||
|
||||
module.exports = {
|
||||
args,
|
||||
hostname,
|
||||
port,
|
||||
sslKey,
|
||||
sslCert,
|
||||
sslKeyPassphrase,
|
||||
isSSL,
|
||||
localWebSocketURL,
|
||||
demoMode,
|
||||
badgeConstants,
|
||||
};
|
||||
|
@@ -2,9 +2,8 @@ const fs = require("fs");
|
||||
const { R } = require("redbean-node");
|
||||
const { setSetting, setting } = require("./util-server");
|
||||
const { log, sleep } = require("../src/util");
|
||||
const dayjs = require("dayjs");
|
||||
const knex = require("knex");
|
||||
const { PluginsManager } = require("./plugins-manager");
|
||||
const path = require("path");
|
||||
|
||||
/**
|
||||
* Database & App Data Folder
|
||||
@@ -23,8 +22,12 @@ class Database {
|
||||
*/
|
||||
static uploadDir;
|
||||
|
||||
static screenshotDir;
|
||||
|
||||
static path;
|
||||
|
||||
static dockerTLSDir;
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
@@ -70,6 +73,18 @@ class Database {
|
||||
"patch-api-key-table.sql": true,
|
||||
"patch-monitor-tls.sql": true,
|
||||
"patch-maintenance-cron.sql": true,
|
||||
"patch-add-parent-monitor.sql": true,
|
||||
"patch-add-invert-keyword.sql": true,
|
||||
"patch-added-json-query.sql": true,
|
||||
"patch-added-kafka-producer.sql": true,
|
||||
"patch-add-certificate-expiry-status-page.sql": true,
|
||||
"patch-monitor-oauth-cc.sql": true,
|
||||
"patch-add-timeout-monitor.sql": true,
|
||||
"patch-add-gamedig-given-port.sql": true,
|
||||
"patch-notification-config.sql": true,
|
||||
"patch-fix-kafka-producer-booleans.sql": true,
|
||||
"patch-timeout.sql": true,
|
||||
"patch-monitor-tls-info-add-fk.sql": true,
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -88,23 +103,28 @@ class Database {
|
||||
// Data Directory (must be end with "/")
|
||||
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
||||
|
||||
// Plugin feature is working only if the dataDir = "./data";
|
||||
if (Database.dataDir !== "./data/") {
|
||||
log.warn("PLUGIN", "Warning: In order to enable plugin feature, you need to use the default data directory: ./data/");
|
||||
PluginsManager.disable = true;
|
||||
}
|
||||
|
||||
Database.path = Database.dataDir + "kuma.db";
|
||||
Database.path = path.join(Database.dataDir, "kuma.db");
|
||||
if (! fs.existsSync(Database.dataDir)) {
|
||||
fs.mkdirSync(Database.dataDir, { recursive: true });
|
||||
}
|
||||
|
||||
Database.uploadDir = Database.dataDir + "upload/";
|
||||
Database.uploadDir = path.join(Database.dataDir, "upload/");
|
||||
|
||||
if (! fs.existsSync(Database.uploadDir)) {
|
||||
fs.mkdirSync(Database.uploadDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create screenshot dir
|
||||
Database.screenshotDir = path.join(Database.dataDir, "screenshots/");
|
||||
if (! fs.existsSync(Database.screenshotDir)) {
|
||||
fs.mkdirSync(Database.screenshotDir, { recursive: true });
|
||||
}
|
||||
|
||||
Database.dockerTLSDir = path.join(Database.dataDir, "docker-tls/");
|
||||
if (! fs.existsSync(Database.dockerTLSDir)) {
|
||||
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
|
||||
}
|
||||
|
||||
log.info("db", `Data Dir: ${Database.dataDir}`);
|
||||
}
|
||||
|
||||
@@ -161,12 +181,12 @@ class Database {
|
||||
await R.exec("PRAGMA journal_mode = WAL");
|
||||
}
|
||||
await R.exec("PRAGMA cache_size = -12000");
|
||||
await R.exec("PRAGMA auto_vacuum = FULL");
|
||||
await R.exec("PRAGMA auto_vacuum = INCREMENTAL");
|
||||
|
||||
// This ensures that an operating system crash or power failure will not corrupt the database.
|
||||
// FULL synchronous is very safe, but it is also slower.
|
||||
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
|
||||
await R.exec("PRAGMA synchronous = FULL");
|
||||
await R.exec("PRAGMA synchronous = NORMAL");
|
||||
|
||||
if (!noLog) {
|
||||
log.info("db", "SQLite config:");
|
||||
@@ -417,6 +437,9 @@ class Database {
|
||||
|
||||
log.info("db", "Closing the database");
|
||||
|
||||
// Flush WAL to main database
|
||||
await R.exec("PRAGMA wal_checkpoint(TRUNCATE)");
|
||||
|
||||
while (true) {
|
||||
Database.noReject = true;
|
||||
await R.close();
|
||||
|
@@ -2,8 +2,16 @@ const axios = require("axios");
|
||||
const { R } = require("redbean-node");
|
||||
const version = require("../package.json").version;
|
||||
const https = require("https");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const Database = require("./database");
|
||||
|
||||
class DockerHost {
|
||||
|
||||
static CertificateFileNameCA = "ca.pem";
|
||||
static CertificateFileNameCert = "cert.pem";
|
||||
static CertificateFileNameKey = "key.pem";
|
||||
|
||||
/**
|
||||
* Save a docker host
|
||||
* @param {Object} dockerHost Docker host to save
|
||||
@@ -66,16 +74,13 @@ class DockerHost {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Uptime-Kuma/" + version
|
||||
},
|
||||
httpsAgent: new https.Agent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: false,
|
||||
}),
|
||||
};
|
||||
|
||||
if (dockerHost.dockerType === "socket") {
|
||||
options.socketPath = dockerHost.dockerDaemon;
|
||||
} else if (dockerHost.dockerType === "tcp") {
|
||||
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
|
||||
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
||||
}
|
||||
|
||||
let res = await axios.request(options);
|
||||
@@ -111,6 +116,53 @@ class DockerHost {
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTTPS agent options with client side TLS parameters if certificate files
|
||||
* for the given host are available under a predefined directory path.
|
||||
*
|
||||
* The base path where certificates are looked for can be set with the
|
||||
* 'DOCKER_TLS_DIR_PATH' environmental variable or defaults to 'data/docker-tls/'.
|
||||
*
|
||||
* If a directory in this path exists with a name matching the FQDN of the docker host
|
||||
* (e.g. the FQDN of 'https://example.com:2376' is 'example.com' so the directory
|
||||
* 'data/docker-tls/example.com/' would be searched for certificate files),
|
||||
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
|
||||
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
|
||||
*
|
||||
* @param {String} dockerType i.e. "tcp" or "socket"
|
||||
* @param {String} url The docker host URL rewritten to https://
|
||||
* @return {Object}
|
||||
* */
|
||||
static getHttpsAgentOptions(dockerType, url) {
|
||||
let baseOptions = {
|
||||
maxCachedSessions: 0,
|
||||
rejectUnauthorized: true
|
||||
};
|
||||
let certOptions = {};
|
||||
|
||||
let dirName = (new URL(url)).hostname;
|
||||
|
||||
let caPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCA);
|
||||
let certPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCert);
|
||||
let keyPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameKey);
|
||||
|
||||
if (dockerType === "tcp" && fs.existsSync(caPath) && fs.existsSync(certPath) && fs.existsSync(keyPath)) {
|
||||
let ca = fs.readFileSync(caPath);
|
||||
let key = fs.readFileSync(keyPath);
|
||||
let cert = fs.readFileSync(certPath);
|
||||
certOptions = {
|
||||
ca,
|
||||
key,
|
||||
cert
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...baseOptions,
|
||||
...certOptions
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
@@ -1,24 +0,0 @@
|
||||
const childProcess = require("child_process");
|
||||
|
||||
class Git {
|
||||
|
||||
static clone(repoURL, cwd, targetDir = ".") {
|
||||
let result = childProcess.spawnSync("git", [
|
||||
"clone",
|
||||
repoURL,
|
||||
targetDir,
|
||||
], {
|
||||
cwd: cwd,
|
||||
});
|
||||
|
||||
if (result.status !== 0) {
|
||||
throw new Error(result.stderr.toString("utf-8"));
|
||||
} else {
|
||||
return result.stdout.toString("utf-8") + result.stderr.toString("utf-8");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Git,
|
||||
};
|
@@ -1,4 +1,5 @@
|
||||
const jsesc = require("jsesc");
|
||||
const { escape } = require("html-escaper");
|
||||
|
||||
/**
|
||||
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
|
||||
@@ -7,15 +8,18 @@ const jsesc = require("jsesc");
|
||||
* @returns {string}
|
||||
*/
|
||||
function getGoogleAnalyticsScript(tagId) {
|
||||
let escapedTagId = jsesc(tagId, { isScriptContext: true });
|
||||
let escapedTagIdJS = jsesc(tagId, { isScriptContext: true });
|
||||
|
||||
if (escapedTagId) {
|
||||
escapedTagId = escapedTagId.trim();
|
||||
if (escapedTagIdJS) {
|
||||
escapedTagIdJS = escapedTagIdJS.trim();
|
||||
}
|
||||
|
||||
// Escape the tag ID for use in an HTML attribute.
|
||||
let escapedTagIdHTMLAttribute = escape(tagId);
|
||||
|
||||
return `
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagId}"></script>
|
||||
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagId}'); </script>
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagIdHTMLAttribute}"></script>
|
||||
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagIdJS}'); </script>
|
||||
`;
|
||||
}
|
||||
|
||||
|
@@ -1,41 +1,51 @@
|
||||
const path = require("path");
|
||||
const Bree = require("bree");
|
||||
const { SHARE_ENV } = require("worker_threads");
|
||||
const { log } = require("../src/util");
|
||||
let bree;
|
||||
const { UptimeKumaServer } = require("./uptime-kuma-server");
|
||||
const { clearOldData } = require("./jobs/clear-old-data");
|
||||
const { incrementalVacuum } = require("./jobs/incremental-vacuum");
|
||||
const Cron = require("croner");
|
||||
|
||||
const jobs = [
|
||||
{
|
||||
name: "clear-old-data",
|
||||
interval: "at 03:14",
|
||||
interval: "14 03 * * *",
|
||||
jobFunc: clearOldData,
|
||||
croner: null,
|
||||
},
|
||||
{
|
||||
name: "incremental-vacuum",
|
||||
interval: "*/5 * * * *",
|
||||
jobFunc: incrementalVacuum,
|
||||
croner: null,
|
||||
}
|
||||
];
|
||||
|
||||
/**
|
||||
* Initialize background jobs
|
||||
* @param {Object} args Arguments to pass to workers
|
||||
* @returns {Bree}
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const initBackgroundJobs = function (args) {
|
||||
bree = new Bree({
|
||||
root: path.resolve("server", "jobs"),
|
||||
jobs,
|
||||
worker: {
|
||||
env: SHARE_ENV,
|
||||
workerData: args,
|
||||
},
|
||||
workerMessageHandler: (message) => {
|
||||
log.info("jobs", message);
|
||||
}
|
||||
});
|
||||
const initBackgroundJobs = async function () {
|
||||
const timezone = await UptimeKumaServer.getInstance().getTimezone();
|
||||
|
||||
for (const job of jobs) {
|
||||
const cornerJob = new Cron(
|
||||
job.interval,
|
||||
{
|
||||
name: job.name,
|
||||
timezone,
|
||||
},
|
||||
job.jobFunc,
|
||||
);
|
||||
job.croner = cornerJob;
|
||||
}
|
||||
|
||||
bree.start();
|
||||
return bree;
|
||||
};
|
||||
|
||||
/** Stop all background jobs if running */
|
||||
const stopBackgroundJobs = function () {
|
||||
if (bree) {
|
||||
bree.stop();
|
||||
for (const job of jobs) {
|
||||
if (job.croner) {
|
||||
job.croner.stop();
|
||||
job.croner = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -1,12 +1,15 @@
|
||||
const { log, exit, connectDb } = require("./util-worker");
|
||||
const { R } = require("redbean-node");
|
||||
const { log } = require("../../src/util");
|
||||
const { setSetting, setting } = require("../util-server");
|
||||
|
||||
const DEFAULT_KEEP_PERIOD = 180;
|
||||
|
||||
(async () => {
|
||||
await connectDb();
|
||||
/**
|
||||
* Clears old data from the heartbeat table of the database.
|
||||
* @return {Promise<void>} A promise that resolves when the data has been cleared.
|
||||
*/
|
||||
|
||||
const clearOldData = async () => {
|
||||
let period = await setting("keepDataPeriodDays");
|
||||
|
||||
// Set Default Period
|
||||
@@ -20,26 +23,30 @@ const DEFAULT_KEEP_PERIOD = 180;
|
||||
try {
|
||||
parsedPeriod = parseInt(period);
|
||||
} catch (_) {
|
||||
log("Failed to parse setting, resetting to default..");
|
||||
log.warn("clearOldData", "Failed to parse setting, resetting to default..");
|
||||
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
|
||||
parsedPeriod = DEFAULT_KEEP_PERIOD;
|
||||
}
|
||||
|
||||
if (parsedPeriod < 1) {
|
||||
log(`Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
|
||||
log.info("clearOldData", `Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
|
||||
} else {
|
||||
|
||||
log(`Clearing Data older than ${parsedPeriod} days...`);
|
||||
log.debug("clearOldData", `Clearing Data older than ${parsedPeriod} days...`);
|
||||
|
||||
try {
|
||||
await R.exec(
|
||||
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
|
||||
[ parsedPeriod ]
|
||||
);
|
||||
|
||||
await R.exec("PRAGMA optimize;");
|
||||
} catch (e) {
|
||||
log(`Failed to clear old data: ${e.message}`);
|
||||
log.error("clearOldData", `Failed to clear old data: ${e.message}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exit();
|
||||
})();
|
||||
module.exports = {
|
||||
clearOldData,
|
||||
};
|
||||
|
21
server/jobs/incremental-vacuum.js
Normal file
21
server/jobs/incremental-vacuum.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const { R } = require("redbean-node");
|
||||
const { log } = require("../../src/util");
|
||||
|
||||
/**
|
||||
* Run incremental_vacuum and checkpoint the WAL.
|
||||
* @return {Promise<void>} A promise that resolves when the process is finished.
|
||||
*/
|
||||
|
||||
const incrementalVacuum = async () => {
|
||||
try {
|
||||
log.debug("incrementalVacuum", "Running incremental_vacuum and wal_checkpoint(PASSIVE)...");
|
||||
await R.exec("PRAGMA incremental_vacuum(200)");
|
||||
await R.exec("PRAGMA wal_checkpoint(PASSIVE)");
|
||||
} catch (e) {
|
||||
log.error("incrementalVacuum", `Failed: ${e.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
incrementalVacuum,
|
||||
};
|
@@ -1,50 +0,0 @@
|
||||
const { parentPort, workerData } = require("worker_threads");
|
||||
const Database = require("../database");
|
||||
const path = require("path");
|
||||
|
||||
/**
|
||||
* Send message to parent process for logging
|
||||
* since worker_thread does not have access to stdout, this is used
|
||||
* instead of console.log()
|
||||
* @param {any} any The message to log
|
||||
*/
|
||||
const log = function (any) {
|
||||
if (parentPort) {
|
||||
parentPort.postMessage(any);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Exit the worker process
|
||||
* @param {number} error The status code to exit
|
||||
*/
|
||||
const exit = function (error) {
|
||||
if (error && error !== 0) {
|
||||
process.exit(error);
|
||||
} else {
|
||||
if (parentPort) {
|
||||
parentPort.postMessage("done");
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/** Connects to the database */
|
||||
const connectDb = async function () {
|
||||
const dbPath = path.join(
|
||||
process.env.DATA_DIR || workerData["data-dir"] || "./data/"
|
||||
);
|
||||
|
||||
Database.init({
|
||||
"data-dir": dbPath,
|
||||
});
|
||||
|
||||
await Database.connect();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
log,
|
||||
exit,
|
||||
connectDb,
|
||||
};
|
@@ -9,12 +9,12 @@ class Group extends BeanModel {
|
||||
* @param {boolean} [showTags=false] Should the JSON include monitor tags
|
||||
* @returns {Object}
|
||||
*/
|
||||
async toPublicJSON(showTags = false) {
|
||||
async toPublicJSON(showTags = false, certExpiry = false) {
|
||||
let monitorBeanList = await this.getMonitorList();
|
||||
let monitorList = [];
|
||||
|
||||
for (let bean of monitorBeanList) {
|
||||
monitorList.push(await bean.toPublicJSON(showTags));
|
||||
monitorList.push(await bean.toPublicJSON(showTags, certExpiry));
|
||||
}
|
||||
|
||||
return {
|
||||
|
@@ -47,7 +47,8 @@ class Maintenance extends BeanModel {
|
||||
cron: this.cron,
|
||||
duration: this.duration,
|
||||
durationMinutes: parseInt(this.duration / 60),
|
||||
timezone: await this.getTimezone(),
|
||||
timezone: await this.getTimezone(), // Only valid timezone
|
||||
timezoneOption: this.timezone, // Mainly for dropdown menu, because there is a option "SAME_AS_SERVER"
|
||||
timezoneOffset: await this.getTimezoneOffset(),
|
||||
status: await this.getStatus(),
|
||||
};
|
||||
@@ -153,7 +154,7 @@ class Maintenance extends BeanModel {
|
||||
bean.description = obj.description;
|
||||
bean.strategy = obj.strategy;
|
||||
bean.interval_day = obj.intervalDay;
|
||||
bean.timezone = obj.timezone;
|
||||
bean.timezone = obj.timezoneOption;
|
||||
bean.active = obj.active;
|
||||
|
||||
if (obj.dateRange[0]) {
|
||||
@@ -290,7 +291,7 @@ class Maintenance extends BeanModel {
|
||||
}
|
||||
|
||||
getRunningTimeslot() {
|
||||
let start = dayjs(this.beanMeta.job.nextRun(dayjs().add(-this.duration, "second").format("YYYY-MM-DD HH:mm:ss")));
|
||||
let start = dayjs(this.beanMeta.job.nextRun(dayjs().add(-this.duration, "second").toDate()));
|
||||
let end = start.add(this.duration, "second");
|
||||
let current = dayjs();
|
||||
|
||||
@@ -316,7 +317,7 @@ class Maintenance extends BeanModel {
|
||||
}
|
||||
|
||||
async getTimezone() {
|
||||
if (!this.timezone) {
|
||||
if (!this.timezone || this.timezone === "SAME_AS_SERVER") {
|
||||
return await UptimeKumaServer.getInstance().getTimezone();
|
||||
}
|
||||
return this.timezone;
|
||||
|
@@ -2,9 +2,11 @@ const https = require("https");
|
||||
const dayjs = require("dayjs");
|
||||
const axios = require("axios");
|
||||
const { Prometheus } = require("../prometheus");
|
||||
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND } = require("../../src/util");
|
||||
const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND,
|
||||
SQL_DATETIME_FORMAT
|
||||
} = require("../../src/util");
|
||||
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
|
||||
redisPingAsync, mongodbPing,
|
||||
redisPingAsync, mongodbPing, kafkaProducerAsync, getOidcTokenClientCredentials, rootCertificatesFingerprints, axiosAbortSignal
|
||||
} = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
@@ -18,6 +20,11 @@ const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
|
||||
const { DockerHost } = require("../docker");
|
||||
const { UptimeCacheList } = require("../uptime-cache-list");
|
||||
const Gamedig = require("gamedig");
|
||||
const jsonata = require("jsonata");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const crypto = require("crypto");
|
||||
|
||||
const rootCertificates = rootCertificatesFingerprints();
|
||||
|
||||
/**
|
||||
* status:
|
||||
@@ -33,11 +40,12 @@ class Monitor extends BeanModel {
|
||||
* Only show necessary data to public
|
||||
* @returns {Object}
|
||||
*/
|
||||
async toPublicJSON(showTags = false) {
|
||||
async toPublicJSON(showTags = false, certExpiry = false) {
|
||||
let obj = {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
sendUrl: this.sendUrl,
|
||||
type: this.type,
|
||||
};
|
||||
|
||||
if (this.sendUrl) {
|
||||
@@ -47,6 +55,13 @@ class Monitor extends BeanModel {
|
||||
if (showTags) {
|
||||
obj.tags = await this.getTags();
|
||||
}
|
||||
|
||||
if (certExpiry && (this.type === "http" || this.type === "keyword" || this.type === "json-query") && this.getURLProtocol() === "https:") {
|
||||
const { certExpiryDaysRemaining, validCert } = await this.getCertExpiry(this.id);
|
||||
obj.certExpiryDaysRemaining = certExpiryDaysRemaining;
|
||||
obj.validCert = validCert;
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
@@ -68,22 +83,34 @@ class Monitor extends BeanModel {
|
||||
|
||||
const tags = await this.getTags();
|
||||
|
||||
let screenshot = null;
|
||||
|
||||
if (this.type === "real-browser") {
|
||||
screenshot = "/screenshots/" + jwt.sign(this.id, UptimeKumaServer.getInstance().jwtSecret) + ".png";
|
||||
}
|
||||
|
||||
let data = {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
pathName: await this.getPathName(),
|
||||
parent: this.parent,
|
||||
childrenIDs: await Monitor.getAllChildrenIDs(this.id),
|
||||
url: this.url,
|
||||
method: this.method,
|
||||
hostname: this.hostname,
|
||||
port: this.port,
|
||||
maxretries: this.maxretries,
|
||||
weight: this.weight,
|
||||
active: this.active,
|
||||
active: await this.isActive(),
|
||||
forceInactive: !await Monitor.isParentActive(this.id),
|
||||
type: this.type,
|
||||
timeout: this.timeout,
|
||||
interval: this.interval,
|
||||
retryInterval: this.retryInterval,
|
||||
resendInterval: this.resendInterval,
|
||||
keyword: this.keyword,
|
||||
invertKeyword: this.isInvertKeyword(),
|
||||
expiryNotification: this.isEnabledExpiryNotification(),
|
||||
ignoreTls: this.getIgnoreTls(),
|
||||
upsideDown: this.isUpsideDown(),
|
||||
@@ -111,7 +138,16 @@ class Monitor extends BeanModel {
|
||||
radiusCalledStationId: this.radiusCalledStationId,
|
||||
radiusCallingStationId: this.radiusCallingStationId,
|
||||
game: this.game,
|
||||
httpBodyEncoding: this.httpBodyEncoding
|
||||
gamedigGivenPortOnly: this.getGameDigGivenPortOnly(),
|
||||
httpBodyEncoding: this.httpBodyEncoding,
|
||||
jsonPath: this.jsonPath,
|
||||
expectedValue: this.expectedValue,
|
||||
kafkaProducerTopic: this.kafkaProducerTopic,
|
||||
kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers),
|
||||
kafkaProducerSsl: this.getKafkaProducerSsl(),
|
||||
kafkaProducerAllowAutoTopicCreation: this.getKafkaProducerAllowAutoTopicCreation(),
|
||||
kafkaProducerMessage: this.kafkaProducerMessage,
|
||||
screenshot,
|
||||
};
|
||||
|
||||
if (includeSensitiveData) {
|
||||
@@ -123,6 +159,11 @@ class Monitor extends BeanModel {
|
||||
grpcMetadata: this.grpcMetadata,
|
||||
basic_auth_user: this.basic_auth_user,
|
||||
basic_auth_pass: this.basic_auth_pass,
|
||||
oauth_client_id: this.oauth_client_id,
|
||||
oauth_client_secret: this.oauth_client_secret,
|
||||
oauth_token_url: this.oauth_token_url,
|
||||
oauth_scopes: this.oauth_scopes,
|
||||
oauth_auth_method: this.oauth_auth_method,
|
||||
pushToken: this.pushToken,
|
||||
databaseConnectionString: this.databaseConnectionString,
|
||||
radiusUsername: this.radiusUsername,
|
||||
@@ -135,6 +176,7 @@ class Monitor extends BeanModel {
|
||||
tlsCa: this.tlsCa,
|
||||
tlsCert: this.tlsCert,
|
||||
tlsKey: this.tlsKey,
|
||||
kafkaProducerSaslOptions: JSON.parse(this.kafkaProducerSaslOptions),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -142,6 +184,16 @@ class Monitor extends BeanModel {
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the monitor is active based on itself and its parents
|
||||
* @returns {Promise<Boolean>}
|
||||
*/
|
||||
async isActive() {
|
||||
const parentActive = await Monitor.isParentActive(this.id);
|
||||
|
||||
return (this.active === 1) && parentActive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all tags applied to this monitor
|
||||
* @returns {Promise<LooseObject<any>[]>}
|
||||
@@ -150,13 +202,40 @@ class Monitor extends BeanModel {
|
||||
return await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ? ORDER BY tag.name", [ this.id ]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets certificate expiry for this monitor
|
||||
* @param {number} monitorID ID of monitor to send
|
||||
* @returns {Promise<LooseObject<any>>}
|
||||
*/
|
||||
async getCertExpiry(monitorID) {
|
||||
let tlsInfoBean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
monitorID,
|
||||
]);
|
||||
let tlsInfo;
|
||||
if (tlsInfoBean) {
|
||||
tlsInfo = JSON.parse(tlsInfoBean?.info_json);
|
||||
if (tlsInfo?.valid && tlsInfo?.certInfo?.daysRemaining) {
|
||||
return {
|
||||
certExpiryDaysRemaining: tlsInfo.certInfo.daysRemaining,
|
||||
validCert: true
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
certExpiryDaysRemaining: "",
|
||||
validCert: false
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode user and password to Base64 encoding
|
||||
* for HTTP "basic" auth, as per RFC-7617
|
||||
* @param {string|null} user - The username (nullable if not changed by a user)
|
||||
* @param {string|null} pass - The password (nullable if not changed by a user)
|
||||
* @returns {string}
|
||||
*/
|
||||
encodeBase64(user, pass) {
|
||||
return Buffer.from(user + ":" + pass).toString("base64");
|
||||
return Buffer.from(`${user || ""}:${pass || ""}`).toString("base64");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -183,6 +262,14 @@ class Monitor extends BeanModel {
|
||||
return Boolean(this.upsideDown);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isInvertKeyword() {
|
||||
return Boolean(this.invertKeyword);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean}
|
||||
@@ -199,6 +286,26 @@ class Monitor extends BeanModel {
|
||||
return JSON.parse(this.accepted_statuscodes_json);
|
||||
}
|
||||
|
||||
getGameDigGivenPortOnly() {
|
||||
return Boolean(this.gamedigGivenPortOnly);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean} Kafka Producer Ssl enabled?
|
||||
*/
|
||||
getKafkaProducerSsl() {
|
||||
return Boolean(this.kafkaProducerSsl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean} Kafka Producer Allow Auto Topic Creation Enabled?
|
||||
*/
|
||||
getKafkaProducerAllowAutoTopicCreation() {
|
||||
return Boolean(this.kafkaProducerAllowAutoTopicCreation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start monitor
|
||||
* @param {Server} io Socket server instance
|
||||
@@ -253,11 +360,50 @@ class Monitor extends BeanModel {
|
||||
bean.duration = 0;
|
||||
}
|
||||
|
||||
// Runtime patch timeout if it is 0
|
||||
// See https://github.com/louislam/uptime-kuma/pull/3961#issuecomment-1804149144
|
||||
if (!this.timeout || this.timeout <= 0) {
|
||||
this.timeout = this.interval * 1000 * 0.8;
|
||||
}
|
||||
|
||||
try {
|
||||
if (await Monitor.isUnderMaintenance(this.id)) {
|
||||
bean.msg = "Monitor under maintenance";
|
||||
bean.status = MAINTENANCE;
|
||||
} else if (this.type === "http" || this.type === "keyword") {
|
||||
} else if (this.type === "group") {
|
||||
const children = await Monitor.getChildren(this.id);
|
||||
|
||||
if (children.length > 0) {
|
||||
bean.status = UP;
|
||||
bean.msg = "All children up and running";
|
||||
for (const child of children) {
|
||||
if (!child.active) {
|
||||
// Ignore inactive childs
|
||||
continue;
|
||||
}
|
||||
const lastBeat = await Monitor.getPreviousHeartbeat(child.id);
|
||||
|
||||
// Only change state if the monitor is in worse conditions then the ones before
|
||||
// lastBeat.status could be null
|
||||
if (!lastBeat) {
|
||||
bean.status = PENDING;
|
||||
} else if (bean.status === UP && (lastBeat.status === PENDING || lastBeat.status === DOWN)) {
|
||||
bean.status = lastBeat.status;
|
||||
} else if (bean.status === PENDING && lastBeat.status === DOWN) {
|
||||
bean.status = lastBeat.status;
|
||||
}
|
||||
}
|
||||
|
||||
if (bean.status !== UP) {
|
||||
bean.msg = "Child inaccessible";
|
||||
}
|
||||
} else {
|
||||
// Set status pending if group is empty
|
||||
bean.status = PENDING;
|
||||
bean.msg = "Group empty";
|
||||
}
|
||||
|
||||
} else if (this.type === "http" || this.type === "keyword" || this.type === "json-query") {
|
||||
// Do not do any queries/high loading things before the "bean.ping"
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
@@ -269,9 +415,28 @@ class Monitor extends BeanModel {
|
||||
};
|
||||
}
|
||||
|
||||
// OIDC: Basic client credential flow.
|
||||
// Additional grants might be implemented in the future
|
||||
let oauth2AuthHeader = {};
|
||||
if (this.auth_method === "oauth2-cc") {
|
||||
try {
|
||||
if (this.oauthAccessToken === undefined || new Date(this.oauthAccessToken.expires_at * 1000) <= new Date()) {
|
||||
log.debug("monitor", `[${this.name}] The oauth access-token undefined or expired. Requesting a new one`);
|
||||
this.oauthAccessToken = await getOidcTokenClientCredentials(this.oauth_token_url, this.oauth_client_id, this.oauth_client_secret, this.oauth_scopes, this.oauth_auth_method);
|
||||
log.debug("monitor", `[${this.name}] Obtained oauth access-token. Expires at ${new Date(this.oauthAccessToken.expires_at * 1000)}`);
|
||||
}
|
||||
oauth2AuthHeader = {
|
||||
"Authorization": this.oauthAccessToken.token_type + " " + this.oauthAccessToken.access_token,
|
||||
};
|
||||
} catch (e) {
|
||||
throw new Error("The oauth config is invalid. " + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
const httpsAgentOptions = {
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||
};
|
||||
|
||||
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
|
||||
@@ -297,18 +462,20 @@ class Monitor extends BeanModel {
|
||||
const options = {
|
||||
url: this.url,
|
||||
method: (this.method || "get").toLowerCase(),
|
||||
timeout: this.interval * 1000 * 0.8,
|
||||
timeout: this.timeout * 1000,
|
||||
headers: {
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
|
||||
"User-Agent": "Uptime-Kuma/" + version,
|
||||
...(contentType ? { "Content-Type": contentType } : {}),
|
||||
...(basicAuthHeader),
|
||||
...(oauth2AuthHeader),
|
||||
...(this.headers ? JSON.parse(this.headers) : {})
|
||||
},
|
||||
maxRedirects: this.maxredirects,
|
||||
validateStatus: (status) => {
|
||||
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
||||
},
|
||||
signal: axiosAbortSignal((this.timeout + 10) * 1000),
|
||||
};
|
||||
|
||||
if (bodyValue) {
|
||||
@@ -345,6 +512,18 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
let tlsInfo = {};
|
||||
// Store tlsInfo when secureConnect event is emitted
|
||||
// The keylog event listener is a workaround to access the tlsSocket
|
||||
options.httpsAgent.once("keylog", async (line, tlsSocket) => {
|
||||
tlsSocket.once("secureConnect", async () => {
|
||||
tlsInfo = checkCertificate(tlsSocket);
|
||||
tlsInfo.valid = tlsSocket.authorized || false;
|
||||
|
||||
await this.handleTlsInfo(tlsInfo);
|
||||
});
|
||||
});
|
||||
|
||||
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
|
||||
log.debug("monitor", `[${this.name}] Axios Request`);
|
||||
|
||||
@@ -354,38 +533,26 @@ class Monitor extends BeanModel {
|
||||
bean.msg = `${res.status} - ${res.statusText}`;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
// Check certificate if https is used
|
||||
let certInfoStartTime = dayjs().valueOf();
|
||||
if (this.getUrl()?.protocol === "https:") {
|
||||
log.debug("monitor", `[${this.name}] Check cert`);
|
||||
try {
|
||||
let tlsInfoObject = checkCertificate(res);
|
||||
tlsInfo = await this.updateTlsInfo(tlsInfoObject);
|
||||
// fallback for if kelog event is not emitted, but we may still have tlsInfo,
|
||||
// e.g. if the connection is made through a proxy
|
||||
if (this.getUrl()?.protocol === "https:" && tlsInfo.valid === undefined) {
|
||||
const tlsSocket = res.request.res.socket;
|
||||
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call sendCertNotification`);
|
||||
await this.sendCertNotification(tlsInfoObject);
|
||||
}
|
||||
if (tlsSocket) {
|
||||
tlsInfo = checkCertificate(tlsSocket);
|
||||
tlsInfo.valid = tlsSocket.authorized || false;
|
||||
|
||||
} catch (e) {
|
||||
if (e.message !== "No TLS certificate in response") {
|
||||
log.error("monitor", "Caught error");
|
||||
log.error("monitor", e.message);
|
||||
}
|
||||
await this.handleTlsInfo(tlsInfo);
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.TIMELOGGER === "1") {
|
||||
log.debug("monitor", "Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||
}
|
||||
|
||||
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID === this.id) {
|
||||
log.info("monitor", res.data);
|
||||
}
|
||||
|
||||
if (this.type === "http") {
|
||||
bean.status = UP;
|
||||
} else {
|
||||
} else if (this.type === "keyword") {
|
||||
|
||||
let data = res.data;
|
||||
|
||||
@@ -394,17 +561,41 @@ class Monitor extends BeanModel {
|
||||
data = JSON.stringify(data);
|
||||
}
|
||||
|
||||
if (data.includes(this.keyword)) {
|
||||
bean.msg += ", keyword is found";
|
||||
let keywordFound = data.includes(this.keyword);
|
||||
if (keywordFound === !this.isInvertKeyword()) {
|
||||
bean.msg += ", keyword " + (keywordFound ? "is" : "not") + " found";
|
||||
bean.status = UP;
|
||||
} else {
|
||||
data = data.replace(/<[^>]*>?|[\n\r]|\s+/gm, " ");
|
||||
data = data.replace(/<[^>]*>?|[\n\r]|\s+/gm, " ").trim();
|
||||
if (data.length > 50) {
|
||||
data = data.substring(0, 47) + "...";
|
||||
}
|
||||
throw new Error(bean.msg + ", but keyword is not in [" + data + "]");
|
||||
throw new Error(bean.msg + ", but keyword is " +
|
||||
(keywordFound ? "present" : "not") + " in [" + data + "]");
|
||||
}
|
||||
|
||||
} else if (this.type === "json-query") {
|
||||
let data = res.data;
|
||||
|
||||
// convert data to object
|
||||
if (typeof data === "string" && res.headers["content-type"] !== "application/json") {
|
||||
try {
|
||||
data = JSON.parse(data);
|
||||
} catch (_) {
|
||||
// Failed to parse as JSON, just process it as a string
|
||||
}
|
||||
}
|
||||
|
||||
let expression = jsonata(this.jsonPath);
|
||||
|
||||
let result = await expression.evaluate(data);
|
||||
|
||||
if (result.toString() === this.expectedValue) {
|
||||
bean.msg += ", expected value is found";
|
||||
bean.status = UP;
|
||||
} else {
|
||||
throw new Error(bean.msg + ", but value is not equal to expected value, value was: [" + result + "]");
|
||||
}
|
||||
}
|
||||
|
||||
} else if (this.type === "port") {
|
||||
@@ -423,13 +614,13 @@ class Monitor extends BeanModel {
|
||||
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.port, this.dns_resolve_type);
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
if (this.dns_resolve_type === "A" || this.dns_resolve_type === "AAAA" || this.dns_resolve_type === "TXT") {
|
||||
if (this.dns_resolve_type === "A" || this.dns_resolve_type === "AAAA" || this.dns_resolve_type === "TXT" || this.dns_resolve_type === "PTR") {
|
||||
dnsMessage += "Records: ";
|
||||
dnsMessage += dnsRes.join(" | ");
|
||||
} else if (this.dns_resolve_type === "CNAME" || this.dns_resolve_type === "PTR") {
|
||||
dnsMessage = dnsRes[0];
|
||||
} else if (this.dns_resolve_type === "CNAME") {
|
||||
dnsMessage += dnsRes[0];
|
||||
} else if (this.dns_resolve_type === "CAA") {
|
||||
dnsMessage = dnsRes[0].issue;
|
||||
dnsMessage += dnsRes[0].issue;
|
||||
} else if (this.dns_resolve_type === "MX") {
|
||||
dnsRes.forEach(record => {
|
||||
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
|
||||
@@ -447,7 +638,7 @@ class Monitor extends BeanModel {
|
||||
dnsMessage = dnsMessage.slice(0, -2);
|
||||
}
|
||||
|
||||
if (this.dnsLastResult !== dnsMessage) {
|
||||
if (this.dnsLastResult !== dnsMessage && dnsMessage !== undefined) {
|
||||
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
|
||||
dnsMessage,
|
||||
this.id
|
||||
@@ -479,7 +670,7 @@ class Monitor extends BeanModel {
|
||||
// No need to insert successful heartbeat for push type, so end here
|
||||
retries = 0;
|
||||
log.debug("monitor", `[${this.name}] timeout = ${timeout}`);
|
||||
this.heartbeatInterval = setTimeout(beat, timeout);
|
||||
this.heartbeatInterval = setTimeout(safeBeat, timeout);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
@@ -496,7 +687,7 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
|
||||
let res = await axios.get(steamApiUrl, {
|
||||
timeout: this.interval * 1000 * 0.8,
|
||||
timeout: this.timeout * 1000,
|
||||
headers: {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Uptime-Kuma/" + version,
|
||||
@@ -504,6 +695,7 @@ class Monitor extends BeanModel {
|
||||
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||
}),
|
||||
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
|
||||
maxCachedSessions: 0,
|
||||
@@ -534,7 +726,7 @@ class Monitor extends BeanModel {
|
||||
type: this.game,
|
||||
host: this.hostname,
|
||||
port: this.port,
|
||||
givenPortOnly: true,
|
||||
givenPortOnly: this.getGameDigGivenPortOnly(),
|
||||
});
|
||||
|
||||
bean.msg = state.name;
|
||||
@@ -546,8 +738,6 @@ class Monitor extends BeanModel {
|
||||
} else if (this.type === "docker") {
|
||||
log.debug("monitor", `[${this.name}] Prepare Options for Axios`);
|
||||
|
||||
const dockerHost = await R.load("docker_host", this.docker_host);
|
||||
|
||||
const options = {
|
||||
url: `/containers/${this.docker_container}/json`,
|
||||
timeout: this.interval * 1000 * 0.8,
|
||||
@@ -558,23 +748,39 @@ class Monitor extends BeanModel {
|
||||
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: !this.getIgnoreTls(),
|
||||
secureOptions: crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT,
|
||||
}),
|
||||
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
|
||||
maxCachedSessions: 0,
|
||||
}),
|
||||
};
|
||||
|
||||
const dockerHost = await R.load("docker_host", this.docker_host);
|
||||
|
||||
if (!dockerHost) {
|
||||
throw new Error("Failed to load docker host config");
|
||||
}
|
||||
|
||||
if (dockerHost._dockerType === "socket") {
|
||||
options.socketPath = dockerHost._dockerDaemon;
|
||||
} else if (dockerHost._dockerType === "tcp") {
|
||||
options.baseURL = DockerHost.patchDockerURL(dockerHost._dockerDaemon);
|
||||
options.httpsAgent = CacheableDnsHttpAgent.getHttpsAgent(
|
||||
DockerHost.getHttpsAgentOptions(dockerHost._dockerType, options.baseURL)
|
||||
);
|
||||
}
|
||||
|
||||
log.debug("monitor", `[${this.name}] Axios Request`);
|
||||
let res = await axios.request(options);
|
||||
|
||||
if (res.data.State.Running) {
|
||||
bean.status = UP;
|
||||
bean.msg = res.data.State.Status;
|
||||
if (res.data.State.Health && res.data.State.Health.Status !== "healthy") {
|
||||
bean.status = PENDING;
|
||||
bean.msg = res.data.State.Health.Status;
|
||||
} else {
|
||||
bean.status = UP;
|
||||
bean.msg = res.data.State.Health ? res.data.State.Health.Status : res.data.State.Status;
|
||||
}
|
||||
} else {
|
||||
throw Error("Container State is " + res.data.State.Status);
|
||||
}
|
||||
@@ -589,7 +795,7 @@ class Monitor extends BeanModel {
|
||||
} else if (this.type === "sqlserver") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
await mssqlQuery(this.databaseConnectionString, this.databaseQuery);
|
||||
await mssqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
|
||||
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
@@ -603,7 +809,6 @@ class Monitor extends BeanModel {
|
||||
grpcEnableTls: this.grpcEnableTls,
|
||||
grpcMethod: this.grpcMethod,
|
||||
grpcBody: this.grpcBody,
|
||||
keyword: this.keyword
|
||||
};
|
||||
const response = await grpcQuery(options);
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
@@ -616,19 +821,20 @@ class Monitor extends BeanModel {
|
||||
bean.status = DOWN;
|
||||
bean.msg = `Error in send gRPC ${response.code} ${response.errorMessage}`;
|
||||
} else {
|
||||
if (response.data.toString().includes(this.keyword)) {
|
||||
let keywordFound = response.data.toString().includes(this.keyword);
|
||||
if (keywordFound === !this.isInvertKeyword()) {
|
||||
bean.status = UP;
|
||||
bean.msg = `${responseData}, keyword [${this.keyword}] is found`;
|
||||
bean.msg = `${responseData}, keyword [${this.keyword}] ${keywordFound ? "is" : "not"} found`;
|
||||
} else {
|
||||
log.debug("monitor:", `GRPC response [${response.data}] + ", but keyword [${this.keyword}] is not in [" + ${response.data} + "]"`);
|
||||
log.debug("monitor:", `GRPC response [${response.data}] + ", but keyword [${this.keyword}] is ${keywordFound ? "present" : "not"} in [" + ${response.data} + "]"`);
|
||||
bean.status = DOWN;
|
||||
bean.msg = `, but keyword [${this.keyword}] is not in [" + ${responseData} + "]`;
|
||||
bean.msg = `, but keyword [${this.keyword}] is ${keywordFound ? "present" : "not"} in [" + ${responseData} + "]`;
|
||||
}
|
||||
}
|
||||
} else if (this.type === "postgres") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
await postgresQuery(this.databaseConnectionString, this.databaseQuery);
|
||||
await postgresQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1");
|
||||
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
@@ -636,7 +842,11 @@ class Monitor extends BeanModel {
|
||||
} else if (this.type === "mysql") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery);
|
||||
// Use `radius_password` as `password` field, since there are too many unnecessary fields
|
||||
// TODO: rename `radius_password` to `password` later for general use
|
||||
let mysqlPassword = this.radiusPassword;
|
||||
|
||||
bean.msg = await mysqlQuery(this.databaseConnectionString, this.databaseQuery || "SELECT 1", mysqlPassword);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
} else if (this.type === "mongodb") {
|
||||
@@ -661,28 +871,19 @@ class Monitor extends BeanModel {
|
||||
port = this.port;
|
||||
}
|
||||
|
||||
try {
|
||||
const resp = await radius(
|
||||
this.hostname,
|
||||
this.radiusUsername,
|
||||
this.radiusPassword,
|
||||
this.radiusCalledStationId,
|
||||
this.radiusCallingStationId,
|
||||
this.radiusSecret,
|
||||
port
|
||||
);
|
||||
if (resp.code) {
|
||||
bean.msg = resp.code;
|
||||
}
|
||||
bean.status = UP;
|
||||
} catch (error) {
|
||||
bean.status = DOWN;
|
||||
if (error.response?.code) {
|
||||
bean.msg = error.response.code;
|
||||
} else {
|
||||
bean.msg = error.message;
|
||||
}
|
||||
}
|
||||
const resp = await radius(
|
||||
this.hostname,
|
||||
this.radiusUsername,
|
||||
this.radiusPassword,
|
||||
this.radiusCalledStationId,
|
||||
this.radiusCallingStationId,
|
||||
this.radiusSecret,
|
||||
port,
|
||||
this.interval * 1000 * 0.4,
|
||||
);
|
||||
|
||||
bean.msg = resp.code;
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
} else if (this.type === "redis") {
|
||||
let startTime = dayjs().valueOf();
|
||||
@@ -694,11 +895,29 @@ class Monitor extends BeanModel {
|
||||
} else if (this.type in UptimeKumaServer.monitorTypeList) {
|
||||
let startTime = dayjs().valueOf();
|
||||
const monitorType = UptimeKumaServer.monitorTypeList[this.type];
|
||||
await monitorType.check(this, bean);
|
||||
await monitorType.check(this, bean, UptimeKumaServer.getInstance());
|
||||
if (!bean.ping) {
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
}
|
||||
|
||||
} else if (this.type === "kafka-producer") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
bean.msg = await kafkaProducerAsync(
|
||||
JSON.parse(this.kafkaProducerBrokers),
|
||||
this.kafkaProducerTopic,
|
||||
this.kafkaProducerMessage,
|
||||
{
|
||||
allowAutoTopicCreation: this.kafkaProducerAllowAutoTopicCreation,
|
||||
ssl: this.kafkaProducerSsl,
|
||||
clientId: `Uptime-Kuma/${version}`,
|
||||
interval: this.interval,
|
||||
},
|
||||
JSON.parse(this.kafkaProducerSaslOptions),
|
||||
);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
} else {
|
||||
throw new Error("Unknown Monitor Type");
|
||||
}
|
||||
@@ -715,7 +934,11 @@ class Monitor extends BeanModel {
|
||||
|
||||
} catch (error) {
|
||||
|
||||
bean.msg = error.message;
|
||||
if (error?.name === "CanceledError") {
|
||||
bean.msg = `timeout by AbortSignal (${this.timeout}s)`;
|
||||
} else {
|
||||
bean.msg = error.message;
|
||||
}
|
||||
|
||||
// If UP come in here, it must be upside down mode
|
||||
// Just reset the retries
|
||||
@@ -899,6 +1122,19 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Example: http: or https:
|
||||
* @returns {(null|string)}
|
||||
*/
|
||||
getURLProtocol() {
|
||||
const url = this.getUrl();
|
||||
if (url) {
|
||||
return this.getUrl().protocol;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store TLS info to database
|
||||
* @param checkCertificateResult
|
||||
@@ -1176,12 +1412,18 @@ class Monitor extends BeanModel {
|
||||
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
// Prevent if the msg is undefined, notifications such as Discord cannot send out.
|
||||
const heartbeatJSON = bean.toJSON();
|
||||
|
||||
// Prevent if the msg is undefined, notifications such as Discord cannot send out.
|
||||
if (!heartbeatJSON["msg"]) {
|
||||
heartbeatJSON["msg"] = "N/A";
|
||||
}
|
||||
|
||||
// Also provide the time in server timezone
|
||||
heartbeatJSON["timezone"] = await UptimeKumaServer.getInstance().getTimezone();
|
||||
heartbeatJSON["timezoneOffset"] = UptimeKumaServer.getInstance().getTimezoneOffset();
|
||||
heartbeatJSON["localDateTime"] = dayjs.utc(heartbeatJSON["time"]).tz(heartbeatJSON["timezone"]).format(SQL_DATETIME_FORMAT);
|
||||
|
||||
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), heartbeatJSON);
|
||||
} catch (e) {
|
||||
log.error("monitor", "Cannot send notification to " + notification.name);
|
||||
@@ -1204,13 +1446,19 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
|
||||
/**
|
||||
* Send notification about a certificate
|
||||
* checks certificate chain for expiring certificates
|
||||
* @param {Object} tlsInfoObject Information about certificate
|
||||
*/
|
||||
async sendCertNotification(tlsInfoObject) {
|
||||
async checkCertExpiryNotifications(tlsInfoObject) {
|
||||
if (tlsInfoObject && tlsInfoObject.certInfo && tlsInfoObject.certInfo.daysRemaining) {
|
||||
const notificationList = await Monitor.getNotificationList(this);
|
||||
|
||||
if (! notificationList.length > 0) {
|
||||
// fail fast. If no notification is set, all the following checks can be skipped.
|
||||
log.debug("monitor", "No notification, no need to send cert notification");
|
||||
return;
|
||||
}
|
||||
|
||||
let notifyDays = await setting("tlsExpiryNotifyDays");
|
||||
if (notifyDays == null || !Array.isArray(notifyDays)) {
|
||||
// Reset Default
|
||||
@@ -1218,10 +1466,22 @@ class Monitor extends BeanModel {
|
||||
notifyDays = [ 7, 14, 21 ];
|
||||
}
|
||||
|
||||
if (notifyDays != null && Array.isArray(notifyDays)) {
|
||||
for (const day of notifyDays) {
|
||||
log.debug("monitor", "call sendCertNotificationByTargetDays", day);
|
||||
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, day, notificationList);
|
||||
if (Array.isArray(notifyDays)) {
|
||||
for (const targetDays of notifyDays) {
|
||||
let certInfo = tlsInfoObject.certInfo;
|
||||
while (certInfo) {
|
||||
let subjectCN = certInfo.subject["CN"];
|
||||
if (rootCertificates.has(certInfo.fingerprint256)) {
|
||||
log.debug("monitor", `Known root cert: ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
||||
break;
|
||||
} else if (certInfo.daysRemaining > targetDays) {
|
||||
log.debug("monitor", `No need to send cert notification for ${certInfo.certType} certificate "${subjectCN}" (${certInfo.daysRemaining} days valid) on ${targetDays} deadline.`);
|
||||
} else {
|
||||
log.debug("monitor", `call sendCertNotificationByTargetDays for ${targetDays} deadline on certificate ${subjectCN}.`);
|
||||
await this.sendCertNotificationByTargetDays(subjectCN, certInfo.certType, certInfo.daysRemaining, targetDays, notificationList);
|
||||
}
|
||||
certInfo = certInfo.issuerCertificate;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1230,55 +1490,47 @@ class Monitor extends BeanModel {
|
||||
/**
|
||||
* Send a certificate notification when certificate expires in less
|
||||
* than target days
|
||||
* @param {number} daysRemaining Number of days remaining on certifcate
|
||||
* @param {string} certCN Common Name attribute from the certificate subject
|
||||
* @param {string} certType certificate type
|
||||
* @param {number} daysRemaining Number of days remaining on certificate
|
||||
* @param {number} targetDays Number of days to alert after
|
||||
* @param {LooseObject<any>[]} notificationList List of notification providers
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async sendCertNotificationByTargetDays(daysRemaining, targetDays, notificationList) {
|
||||
async sendCertNotificationByTargetDays(certCN, certType, daysRemaining, targetDays, notificationList) {
|
||||
|
||||
if (daysRemaining > targetDays) {
|
||||
log.debug("monitor", `No need to send cert notification. ${daysRemaining} > ${targetDays}`);
|
||||
let row = await R.getRow("SELECT * FROM notification_sent_history WHERE type = ? AND monitor_id = ? AND days <= ?", [
|
||||
"certificate",
|
||||
this.id,
|
||||
targetDays,
|
||||
]);
|
||||
|
||||
// Sent already, no need to send again
|
||||
if (row) {
|
||||
log.debug("monitor", "Sent already, no need to send again");
|
||||
return;
|
||||
}
|
||||
|
||||
if (notificationList.length > 0) {
|
||||
let sent = false;
|
||||
log.debug("monitor", "Send certificate notification");
|
||||
|
||||
let row = await R.getRow("SELECT * FROM notification_sent_history WHERE type = ? AND monitor_id = ? AND days <= ?", [
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
log.debug("monitor", "Sending to " + notification.name);
|
||||
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] ${certType} certificate ${certCN} will be expired in ${daysRemaining} days`);
|
||||
sent = true;
|
||||
} catch (e) {
|
||||
log.error("monitor", "Cannot send cert notification to " + notification.name);
|
||||
log.error("monitor", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (sent) {
|
||||
await R.exec("INSERT INTO notification_sent_history (type, monitor_id, days) VALUES(?, ?, ?)", [
|
||||
"certificate",
|
||||
this.id,
|
||||
targetDays,
|
||||
]);
|
||||
|
||||
// Sent already, no need to send again
|
||||
if (row) {
|
||||
log.debug("monitor", "Sent already, no need to send again");
|
||||
return;
|
||||
}
|
||||
|
||||
let sent = false;
|
||||
log.debug("monitor", "Send certificate notification");
|
||||
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
log.debug("monitor", "Sending to " + notification.name);
|
||||
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] Certificate will expire in ${daysRemaining} days`);
|
||||
sent = true;
|
||||
} catch (e) {
|
||||
log.error("monitor", "Cannot send cert notification to " + notification.name);
|
||||
log.error("monitor", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (sent) {
|
||||
await R.exec("INSERT INTO notification_sent_history (type, monitor_id, days) VALUES(?, ?, ?)", [
|
||||
"certificate",
|
||||
this.id,
|
||||
targetDays,
|
||||
]);
|
||||
}
|
||||
} else {
|
||||
log.debug("monitor", "No notification, no need to send cert notification");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1314,6 +1566,11 @@ class Monitor extends BeanModel {
|
||||
}
|
||||
}
|
||||
|
||||
const parent = await Monitor.getParent(monitorID);
|
||||
if (parent != null) {
|
||||
return await Monitor.isUnderMaintenance(parent.id);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1326,6 +1583,120 @@ class Monitor extends BeanModel {
|
||||
throw new Error(`Interval cannot be less than ${MIN_INTERVAL_SECOND} seconds`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets Parent of the monitor
|
||||
* @param {number} monitorID ID of monitor to get
|
||||
* @returns {Promise<LooseObject<any>>}
|
||||
*/
|
||||
static async getParent(monitorID) {
|
||||
return await R.getRow(`
|
||||
SELECT parent.* FROM monitor parent
|
||||
LEFT JOIN monitor child
|
||||
ON child.parent = parent.id
|
||||
WHERE child.id = ?
|
||||
`, [
|
||||
monitorID,
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all Children of the monitor
|
||||
* @param {number} monitorID ID of monitor to get
|
||||
* @returns {Promise<LooseObject<any>>}
|
||||
*/
|
||||
static async getChildren(monitorID) {
|
||||
return await R.getAll(`
|
||||
SELECT * FROM monitor
|
||||
WHERE parent = ?
|
||||
`, [
|
||||
monitorID,
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets Full Path-Name (Groups and Name)
|
||||
* @returns {Promise<String>}
|
||||
*/
|
||||
async getPathName() {
|
||||
let path = this.name;
|
||||
|
||||
if (this.parent === null) {
|
||||
return path;
|
||||
}
|
||||
|
||||
let parent = await Monitor.getParent(this.id);
|
||||
while (parent !== null) {
|
||||
path = `${parent.name} / ${path}`;
|
||||
parent = await Monitor.getParent(parent.id);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets recursive all child ids
|
||||
* @param {number} monitorID ID of the monitor to get
|
||||
* @returns {Promise<Array>}
|
||||
*/
|
||||
static async getAllChildrenIDs(monitorID) {
|
||||
const childs = await Monitor.getChildren(monitorID);
|
||||
|
||||
if (childs === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let childrenIDs = [];
|
||||
|
||||
for (const child of childs) {
|
||||
childrenIDs.push(child.id);
|
||||
childrenIDs = childrenIDs.concat(await Monitor.getAllChildrenIDs(child.id));
|
||||
}
|
||||
|
||||
return childrenIDs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlinks all children of the the group monitor
|
||||
* @param {number} groupID ID of group to remove children of
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async unlinkAllChildren(groupID) {
|
||||
return await R.exec("UPDATE `monitor` SET parent = ? WHERE parent = ? ", [
|
||||
null, groupID
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks recursive if parent (ancestors) are active
|
||||
* @param {number} monitorID ID of the monitor to get
|
||||
* @returns {Promise<Boolean>}
|
||||
*/
|
||||
static async isParentActive(monitorID) {
|
||||
const parent = await Monitor.getParent(monitorID);
|
||||
|
||||
if (parent === null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const parentActive = await Monitor.isParentActive(parent.id);
|
||||
return parent.active && parentActive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store TLS certificate information and check for expiry
|
||||
* @param {Object} tlsInfo Information about the TLS connection
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async handleTlsInfo(tlsInfo) {
|
||||
await this.updateTlsInfo(tlsInfo);
|
||||
this.prometheus?.update(null, tlsInfo);
|
||||
|
||||
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
|
||||
log.debug("monitor", `[${this.name}] call checkCertExpiryNotifications`);
|
||||
await this.checkCertExpiryNotifications(tlsInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Monitor;
|
||||
|
@@ -90,6 +90,8 @@ class StatusPage extends BeanModel {
|
||||
* @param {StatusPage} statusPage
|
||||
*/
|
||||
static async getStatusPageData(statusPage) {
|
||||
const config = await statusPage.toPublicJSON();
|
||||
|
||||
// Incident
|
||||
let incident = await R.findOne("incident", " pin = 1 AND active = 1 AND status_page_id = ? ", [
|
||||
statusPage.id,
|
||||
@@ -110,13 +112,13 @@ class StatusPage extends BeanModel {
|
||||
]);
|
||||
|
||||
for (let groupBean of list) {
|
||||
let monitorGroup = await groupBean.toPublicJSON(showTags);
|
||||
let monitorGroup = await groupBean.toPublicJSON(showTags, config?.showCertificateExpiry);
|
||||
publicGroupList.push(monitorGroup);
|
||||
}
|
||||
|
||||
// Response
|
||||
return {
|
||||
config: await statusPage.toPublicJSON(),
|
||||
config,
|
||||
incident,
|
||||
publicGroupList,
|
||||
maintenanceList,
|
||||
@@ -234,6 +236,7 @@ class StatusPage extends BeanModel {
|
||||
footerText: this.footer_text,
|
||||
showPoweredBy: !!this.show_powered_by,
|
||||
googleAnalyticsId: this.google_analytics_tag_id,
|
||||
showCertificateExpiry: !!this.show_certificate_expiry,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -255,6 +258,7 @@ class StatusPage extends BeanModel {
|
||||
footerText: this.footer_text,
|
||||
showPoweredBy: !!this.show_powered_by,
|
||||
googleAnalyticsId: this.google_analytics_tag_id,
|
||||
showCertificateExpiry: !!this.show_certificate_expiry,
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,8 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const passwordHash = require("../password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const { shake256, SHAKE256_LENGTH } = require("../util-server");
|
||||
|
||||
class User extends BeanModel {
|
||||
/**
|
||||
@@ -27,6 +29,19 @@ class User extends BeanModel {
|
||||
this.password = newPassword;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new JWT for a user
|
||||
* @param {User} user
|
||||
* @param {string} jwtSecret
|
||||
* @return {string}
|
||||
*/
|
||||
static createJWT(user, jwtSecret) {
|
||||
return jwt.sign({
|
||||
username: user.username,
|
||||
h: shake256(user.password, SHAKE256_LENGTH),
|
||||
}, jwtSecret);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = User;
|
||||
|
@@ -6,9 +6,10 @@ class MonitorType {
|
||||
*
|
||||
* @param {Monitor} monitor
|
||||
* @param {Heartbeat} heartbeat
|
||||
* @param {UptimeKumaServer} server
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async check(monitor, heartbeat) {
|
||||
async check(monitor, heartbeat, server) {
|
||||
throw new Error("You need to override check()");
|
||||
}
|
||||
|
||||
|
233
server/monitor-types/real-browser-monitor-type.js
Normal file
233
server/monitor-types/real-browser-monitor-type.js
Normal file
@@ -0,0 +1,233 @@
|
||||
const { MonitorType } = require("./monitor-type");
|
||||
const { chromium } = require("playwright-core");
|
||||
const { UP, log } = require("../../src/util");
|
||||
const { Settings } = require("../settings");
|
||||
const commandExistsSync = require("command-exists").sync;
|
||||
const childProcess = require("child_process");
|
||||
const path = require("path");
|
||||
const Database = require("../database");
|
||||
const jwt = require("jsonwebtoken");
|
||||
const config = require("../config");
|
||||
|
||||
/**
|
||||
* Cached instance of a browser
|
||||
* @type {import ("playwright-core").Browser}
|
||||
*/
|
||||
let browser = null;
|
||||
|
||||
let allowedList = [];
|
||||
let lastAutoDetectChromeExecutable = null;
|
||||
|
||||
if (process.platform === "win32") {
|
||||
allowedList.push(process.env.LOCALAPPDATA + "\\Google\\Chrome\\Application\\chrome.exe");
|
||||
allowedList.push(process.env.PROGRAMFILES + "\\Google\\Chrome\\Application\\chrome.exe");
|
||||
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Google\\Chrome\\Application\\chrome.exe");
|
||||
|
||||
// Allow Chromium too
|
||||
allowedList.push(process.env.LOCALAPPDATA + "\\Chromium\\Application\\chrome.exe");
|
||||
allowedList.push(process.env.PROGRAMFILES + "\\Chromium\\Application\\chrome.exe");
|
||||
allowedList.push(process.env["ProgramFiles(x86)"] + "\\Chromium\\Application\\chrome.exe");
|
||||
|
||||
// For Loop A to Z
|
||||
for (let i = 65; i <= 90; i++) {
|
||||
let drive = String.fromCharCode(i);
|
||||
allowedList.push(drive + ":\\Program Files\\Google\\Chrome\\Application\\chrome.exe");
|
||||
allowedList.push(drive + ":\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe");
|
||||
}
|
||||
|
||||
} else if (process.platform === "linux") {
|
||||
allowedList = [
|
||||
"chromium",
|
||||
"chromium-browser",
|
||||
"google-chrome",
|
||||
|
||||
"/usr/bin/chromium",
|
||||
"/usr/bin/chromium-browser",
|
||||
"/usr/bin/google-chrome",
|
||||
"/snap/bin/chromium", // Ubuntu
|
||||
];
|
||||
} else if (process.platform === "darwin") {
|
||||
// TODO: Generated by GitHub Copilot, but not sure if it's correct
|
||||
allowedList = [
|
||||
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
|
||||
"/Applications/Chromium.app/Contents/MacOS/Chromium",
|
||||
];
|
||||
}
|
||||
|
||||
log.debug("chrome", allowedList);
|
||||
|
||||
async function isAllowedChromeExecutable(executablePath) {
|
||||
console.log(config.args);
|
||||
if (config.args["allow-all-chrome-exec"] || process.env.UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC === "1") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if the executablePath is in the list of allowed executables
|
||||
return allowedList.includes(executablePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current instance of the browser. If there isn't one, create
|
||||
* it.
|
||||
* @returns {Promise<import ("playwright-core").Browser>} The browser
|
||||
*/
|
||||
async function getBrowser() {
|
||||
if (browser && browser.isConnected()) {
|
||||
return browser;
|
||||
} else {
|
||||
let executablePath = await Settings.get("chromeExecutable");
|
||||
|
||||
executablePath = await prepareChromeExecutable(executablePath);
|
||||
|
||||
browser = await chromium.launch({
|
||||
//headless: false,
|
||||
executablePath,
|
||||
});
|
||||
|
||||
return browser;
|
||||
}
|
||||
}
|
||||
|
||||
async function prepareChromeExecutable(executablePath) {
|
||||
// Special code for using the playwright_chromium
|
||||
if (typeof executablePath === "string" && executablePath.toLocaleLowerCase() === "#playwright_chromium") {
|
||||
// Set to undefined = use playwright_chromium
|
||||
executablePath = undefined;
|
||||
} else if (!executablePath) {
|
||||
if (process.env.UPTIME_KUMA_IS_CONTAINER) {
|
||||
executablePath = "/usr/bin/chromium";
|
||||
|
||||
// Install chromium in container via apt install
|
||||
if ( !commandExistsSync(executablePath)) {
|
||||
await new Promise((resolve, reject) => {
|
||||
log.info("Chromium", "Installing Chromium...");
|
||||
let child = childProcess.exec("apt update && apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk");
|
||||
|
||||
// On exit
|
||||
child.on("exit", (code) => {
|
||||
log.info("Chromium", "apt install chromium exited with code " + code);
|
||||
|
||||
if (code === 0) {
|
||||
log.info("Chromium", "Installed Chromium");
|
||||
let version = childProcess.execSync(executablePath + " --version").toString("utf8");
|
||||
log.info("Chromium", "Chromium version: " + version);
|
||||
resolve();
|
||||
} else if (code === 100) {
|
||||
reject(new Error("Installing Chromium, please wait..."));
|
||||
} else {
|
||||
reject(new Error("apt install chromium failed with code " + code));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
} else {
|
||||
executablePath = findChrome(allowedList);
|
||||
}
|
||||
} else {
|
||||
// User specified a path
|
||||
// Check if the executablePath is in the list of allowed
|
||||
if (!await isAllowedChromeExecutable(executablePath)) {
|
||||
throw new Error("This Chromium executable path is not allowed by default. If you are sure this is safe, please add an environment variable UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC=1 to allow it.");
|
||||
}
|
||||
}
|
||||
return executablePath;
|
||||
}
|
||||
|
||||
function findChrome(executables) {
|
||||
// Use the last working executable, so we don't have to search for it again
|
||||
if (lastAutoDetectChromeExecutable) {
|
||||
if (commandExistsSync(lastAutoDetectChromeExecutable)) {
|
||||
return lastAutoDetectChromeExecutable;
|
||||
}
|
||||
}
|
||||
|
||||
for (let executable of executables) {
|
||||
if (commandExistsSync(executable)) {
|
||||
lastAutoDetectChromeExecutable = executable;
|
||||
return executable;
|
||||
}
|
||||
}
|
||||
throw new Error("Chromium not found, please specify Chromium executable path in the settings page.");
|
||||
}
|
||||
|
||||
async function resetChrome() {
|
||||
if (browser) {
|
||||
await browser.close();
|
||||
browser = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if the chrome executable is valid and return the version
|
||||
* @param executablePath
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async function testChrome(executablePath) {
|
||||
try {
|
||||
executablePath = await prepareChromeExecutable(executablePath);
|
||||
|
||||
log.info("Chromium", "Testing Chromium executable: " + executablePath);
|
||||
|
||||
const browser = await chromium.launch({
|
||||
executablePath,
|
||||
});
|
||||
const version = browser.version();
|
||||
await browser.close();
|
||||
return version;
|
||||
} catch (e) {
|
||||
throw new Error(e.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: connect remote browser? https://playwright.dev/docs/api/class-browsertype#browser-type-connect
|
||||
*
|
||||
*/
|
||||
class RealBrowserMonitorType extends MonitorType {
|
||||
|
||||
name = "real-browser";
|
||||
|
||||
async check(monitor, heartbeat, server) {
|
||||
const browser = await getBrowser();
|
||||
const context = await browser.newContext();
|
||||
const page = await context.newPage();
|
||||
|
||||
// Prevent Local File Inclusion
|
||||
// Accept only http:// and https://
|
||||
// https://github.com/louislam/uptime-kuma/security/advisories/GHSA-2qgm-m29m-cj2h
|
||||
let url = new URL(monitor.url);
|
||||
if (url.protocol !== "http:" && url.protocol !== "https:") {
|
||||
throw new Error("Invalid url protocol, only http and https are allowed.");
|
||||
}
|
||||
|
||||
const res = await page.goto(monitor.url, {
|
||||
waitUntil: "networkidle",
|
||||
timeout: monitor.interval * 1000 * 0.8,
|
||||
});
|
||||
|
||||
let filename = jwt.sign(monitor.id, server.jwtSecret) + ".png";
|
||||
|
||||
await page.screenshot({
|
||||
path: path.join(Database.screenshotDir, filename),
|
||||
});
|
||||
|
||||
await context.close();
|
||||
|
||||
if (res.status() >= 200 && res.status() < 400) {
|
||||
heartbeat.status = UP;
|
||||
heartbeat.msg = res.status();
|
||||
|
||||
const timing = res.request().timing();
|
||||
heartbeat.ping = timing.responseEnd;
|
||||
} else {
|
||||
throw new Error(res.status() + "");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
RealBrowserMonitorType,
|
||||
testChrome,
|
||||
resetChrome,
|
||||
};
|
88
server/monitor-types/tailscale-ping.js
Normal file
88
server/monitor-types/tailscale-ping.js
Normal file
@@ -0,0 +1,88 @@
|
||||
const { MonitorType } = require("./monitor-type");
|
||||
const { UP } = require("../../src/util");
|
||||
const childProcessAsync = require("promisify-child-process");
|
||||
|
||||
/**
|
||||
* A TailscalePing class extends the MonitorType.
|
||||
* It runs Tailscale ping to monitor the status of a specific node.
|
||||
*/
|
||||
class TailscalePing extends MonitorType {
|
||||
|
||||
name = "tailscale-ping";
|
||||
|
||||
/**
|
||||
* Checks the ping status of the URL associated with the monitor.
|
||||
* It then parses the Tailscale ping command output to update the heatrbeat.
|
||||
*
|
||||
* @param {Object} monitor - The monitor object associated with the check.
|
||||
* @param {Object} heartbeat - The heartbeat object to update.
|
||||
* @throws Will throw an error if checking Tailscale ping encounters any error
|
||||
*/
|
||||
async check(monitor, heartbeat) {
|
||||
try {
|
||||
let tailscaleOutput = await this.runTailscalePing(monitor.hostname, monitor.interval);
|
||||
this.parseTailscaleOutput(tailscaleOutput, heartbeat);
|
||||
} catch (err) {
|
||||
// trigger log function somewhere to display a notification or alert to the user (but how?)
|
||||
throw new Error(`Error checking Tailscale ping: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the Tailscale ping command to the given URL.
|
||||
*
|
||||
* @param {string} hostname - The hostname to ping.
|
||||
* @param {number} interval
|
||||
* @returns {Promise<string>} - A Promise that resolves to the output of the Tailscale ping command
|
||||
* @throws Will throw an error if the command execution encounters any error.
|
||||
*/
|
||||
async runTailscalePing(hostname, interval) {
|
||||
let timeout = interval * 1000 * 0.8;
|
||||
let res = await childProcessAsync.spawn("tailscale", [ "ping", "--c", "1", hostname ], {
|
||||
timeout: timeout,
|
||||
encoding: "utf8",
|
||||
});
|
||||
if (res.stderr && res.stderr.toString()) {
|
||||
throw new Error(`Error in output: ${res.stderr.toString()}`);
|
||||
}
|
||||
if (res.stdout && res.stdout.toString()) {
|
||||
return res.stdout.toString();
|
||||
} else {
|
||||
throw new Error("No output from Tailscale ping");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the output of the Tailscale ping command to update the heartbeat.
|
||||
*
|
||||
* @param {string} tailscaleOutput - The output of the Tailscale ping command.
|
||||
* @param {Object} heartbeat - The heartbeat object to update.
|
||||
* @throws Will throw an eror if the output contains any unexpected string.
|
||||
*/
|
||||
parseTailscaleOutput(tailscaleOutput, heartbeat) {
|
||||
let lines = tailscaleOutput.split("\n");
|
||||
|
||||
for (let line of lines) {
|
||||
if (line.includes("pong from")) {
|
||||
heartbeat.status = UP;
|
||||
let time = line.split(" in ")[1].split(" ")[0];
|
||||
heartbeat.ping = parseInt(time);
|
||||
heartbeat.msg = "OK";
|
||||
break;
|
||||
} else if (line.includes("timed out")) {
|
||||
throw new Error(`Ping timed out: "${line}"`);
|
||||
// Immediately throws upon "timed out" message, the server is expected to re-call the check function
|
||||
} else if (line.includes("no matching peer")) {
|
||||
throw new Error(`Nonexistant or inaccessible due to ACLs: "${line}"`);
|
||||
} else if (line.includes("is local Tailscale IP")) {
|
||||
throw new Error(`Tailscale only works if used on other machines: "${line}"`);
|
||||
} else if (line !== "") {
|
||||
throw new Error(`Unexpected output: "${line}"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
TailscalePing,
|
||||
};
|
@@ -18,7 +18,7 @@ class AliyunSMS extends NotificationProvider {
|
||||
status: this.statusToString(heartbeatJSON["status"]),
|
||||
msg: heartbeatJSON["msg"],
|
||||
});
|
||||
if (this.sendSms(notification, msgBody)) {
|
||||
if (await this.sendSms(notification, msgBody)) {
|
||||
return okMsg;
|
||||
}
|
||||
} else {
|
||||
@@ -28,7 +28,7 @@ class AliyunSMS extends NotificationProvider {
|
||||
status: "",
|
||||
msg: msg,
|
||||
});
|
||||
if (this.sendSms(notification, msgBody)) {
|
||||
if (await this.sendSms(notification, msgBody)) {
|
||||
return okMsg;
|
||||
}
|
||||
}
|
||||
@@ -73,7 +73,8 @@ class AliyunSMS extends NotificationProvider {
|
||||
if (result.data.Message === "OK") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
throw new Error(result.data.Message);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const childProcess = require("child_process");
|
||||
const childProcessAsync = require("promisify-child-process");
|
||||
|
||||
class Apprise extends NotificationProvider {
|
||||
|
||||
@@ -11,7 +11,9 @@ class Apprise extends NotificationProvider {
|
||||
args.push("-t");
|
||||
args.push(notification.title);
|
||||
}
|
||||
const s = childProcess.spawnSync("apprise", args);
|
||||
const s = await childProcessAsync.spawn("apprise", args, {
|
||||
encoding: "utf8",
|
||||
});
|
||||
|
||||
const output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
||||
|
||||
|
@@ -15,10 +15,10 @@ class DingDing extends NotificationProvider {
|
||||
msgtype: "markdown",
|
||||
markdown: {
|
||||
title: `[${this.statusToString(heartbeatJSON["status"])}] ${monitorJSON["name"]}`,
|
||||
text: `## [${this.statusToString(heartbeatJSON["status"])}] ${monitorJSON["name"]} \n > ${heartbeatJSON["msg"]} \n > Time(UTC):${heartbeatJSON["time"]}`,
|
||||
text: `## [${this.statusToString(heartbeatJSON["status"])}] ${monitorJSON["name"]} \n> ${heartbeatJSON["msg"]}\n> Time (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`,
|
||||
}
|
||||
};
|
||||
if (this.sendToDingDing(notification, params)) {
|
||||
if (await this.sendToDingDing(notification, params)) {
|
||||
return okMsg;
|
||||
}
|
||||
} else {
|
||||
@@ -28,7 +28,7 @@ class DingDing extends NotificationProvider {
|
||||
content: msg
|
||||
}
|
||||
};
|
||||
if (this.sendToDingDing(notification, params)) {
|
||||
if (await this.sendToDingDing(notification, params)) {
|
||||
return okMsg;
|
||||
}
|
||||
}
|
||||
@@ -59,7 +59,7 @@ class DingDing extends NotificationProvider {
|
||||
if (result.data.errmsg === "ok") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
throw new Error(result.data.errmsg);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -30,6 +30,7 @@ class Discord extends NotificationProvider {
|
||||
break;
|
||||
case "port":
|
||||
case "dns":
|
||||
case "gamedig":
|
||||
case "steam":
|
||||
address = monitorJSON["hostname"];
|
||||
if (monitorJSON["port"]) {
|
||||
@@ -59,8 +60,8 @@ class Discord extends NotificationProvider {
|
||||
value: monitorJSON["type"] === "push" ? "Heartbeat" : address,
|
||||
},
|
||||
{
|
||||
name: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
name: `Time (${heartbeatJSON["timezone"]})`,
|
||||
value: heartbeatJSON["localDateTime"],
|
||||
},
|
||||
{
|
||||
name: "Error",
|
||||
@@ -94,8 +95,8 @@ class Discord extends NotificationProvider {
|
||||
value: monitorJSON["type"] === "push" ? "Heartbeat" : address,
|
||||
},
|
||||
{
|
||||
name: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
name: `Time (${heartbeatJSON["timezone"]})`,
|
||||
value: heartbeatJSON["localDateTime"],
|
||||
},
|
||||
{
|
||||
name: "Ping",
|
||||
|
@@ -35,8 +35,7 @@ class Feishu extends NotificationProvider {
|
||||
text:
|
||||
"[Down] " +
|
||||
heartbeatJSON["msg"] +
|
||||
"\nTime (UTC): " +
|
||||
heartbeatJSON["time"],
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
},
|
||||
],
|
||||
],
|
||||
@@ -62,8 +61,7 @@ class Feishu extends NotificationProvider {
|
||||
text:
|
||||
"[Up] " +
|
||||
heartbeatJSON["msg"] +
|
||||
"\nTime (UTC): " +
|
||||
heartbeatJSON["time"],
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`,
|
||||
},
|
||||
],
|
||||
],
|
||||
|
98
server/notification-providers/flashduty.js
Normal file
98
server/notification-providers/flashduty.js
Normal file
@@ -0,0 +1,98 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { UP, DOWN, getMonitorRelativeURL } = require("../../src/util");
|
||||
const { setting } = require("../util-server");
|
||||
const successMessage = "Sent Successfully.";
|
||||
|
||||
class FlashDuty extends NotificationProvider {
|
||||
name = "FlashDuty";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
const title = "Uptime Kuma Alert";
|
||||
const monitor = {
|
||||
type: "ping",
|
||||
url: msg,
|
||||
name: "https://flashcat.cloud"
|
||||
};
|
||||
return this.postNotification(notification, title, msg, monitor);
|
||||
}
|
||||
|
||||
if (heartbeatJSON.status === UP) {
|
||||
const title = "Uptime Kuma Monitor ✅ Up";
|
||||
|
||||
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, "Ok");
|
||||
}
|
||||
|
||||
if (heartbeatJSON.status === DOWN) {
|
||||
const title = "Uptime Kuma Monitor 🔴 Down";
|
||||
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, notification.flashdutySeverity);
|
||||
}
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate a monitor url from the monitors infomation
|
||||
* @param {Object} monitorInfo Monitor details
|
||||
* @returns {string|undefined}
|
||||
*/
|
||||
|
||||
genMonitorUrl(monitorInfo) {
|
||||
if (monitorInfo.type === "port" && monitorInfo.port) {
|
||||
return monitorInfo.hostname + ":" + monitorInfo.port;
|
||||
}
|
||||
if (monitorInfo.hostname != null) {
|
||||
return monitorInfo.hostname;
|
||||
}
|
||||
return monitorInfo.url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the message
|
||||
* @param {BeanModel} notification Message title
|
||||
* @param {string} title Message
|
||||
* @param {string} body Message
|
||||
* @param {Object} monitorInfo Monitor details
|
||||
* @param {string} eventStatus Monitor status (Info, Warning, Critical, Ok)
|
||||
* @returns {string}
|
||||
*/
|
||||
async postNotification(notification, title, body, monitorInfo, eventStatus) {
|
||||
const options = {
|
||||
method: "POST",
|
||||
url: "https://api.flashcat.cloud/event/push/alert/standard?integration_key=" + notification.flashdutyIntegrationKey,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
data: {
|
||||
description: `[${title}] [${monitorInfo.name}] ${body}`,
|
||||
title,
|
||||
event_status: eventStatus || "Info",
|
||||
alert_key: String(monitorInfo.id) || Math.random().toString(36).substring(7),
|
||||
labels: monitorInfo?.tags?.reduce((acc, item) => ({ ...acc,
|
||||
[item.name]: item.value
|
||||
}), { resource: this.genMonitorUrl(monitorInfo) }),
|
||||
}
|
||||
};
|
||||
|
||||
const baseURL = await setting("primaryBaseURL");
|
||||
if (baseURL && monitorInfo) {
|
||||
options.client = "Uptime Kuma";
|
||||
options.client_url = baseURL + getMonitorRelativeURL(monitorInfo.id);
|
||||
}
|
||||
|
||||
let result = await axios.request(options);
|
||||
if (result.status == null) {
|
||||
throw new Error("FlashDuty notification failed with invalid response!");
|
||||
}
|
||||
if (result.status < 200 || result.status >= 300) {
|
||||
throw new Error("FlashDuty notification failed with status code " + result.status);
|
||||
}
|
||||
if (result.statusText != null) {
|
||||
return "FlashDuty notification succeed: " + result.statusText;
|
||||
}
|
||||
|
||||
return successMessage;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FlashDuty;
|
@@ -11,7 +11,7 @@ class HomeAssistant extends NotificationProvider {
|
||||
|
||||
try {
|
||||
await axios.post(
|
||||
`${notification.homeAssistantUrl}/api/services/notify/${notificationService}`,
|
||||
`${notification.homeAssistantUrl.trim().replace(/\/*$/, "")}/api/services/notify/${notificationService}`,
|
||||
{
|
||||
title: "Uptime Kuma",
|
||||
message,
|
||||
|
@@ -33,7 +33,10 @@ class Line extends NotificationProvider {
|
||||
"messages": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "UptimeKuma Alert: [🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
"text": "UptimeKuma Alert: [🔴 Down]\n" +
|
||||
"Name: " + monitorJSON["name"] + " \n" +
|
||||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
}
|
||||
]
|
||||
};
|
||||
@@ -44,7 +47,10 @@ class Line extends NotificationProvider {
|
||||
"messages": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "UptimeKuma Alert: [✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
"text": "UptimeKuma Alert: [✅ Up]\n" +
|
||||
"Name: " + monitorJSON["name"] + " \n" +
|
||||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
}
|
||||
]
|
||||
};
|
||||
|
@@ -24,12 +24,18 @@ class LineNotify extends NotificationProvider {
|
||||
await axios.post(lineAPIUrl, qs.stringify(testMessage), config);
|
||||
} else if (heartbeatJSON["status"] === DOWN) {
|
||||
let downMessage = {
|
||||
"message": "\n[🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
"message": "\n[🔴 Down]\n" +
|
||||
"Name: " + monitorJSON["name"] + " \n" +
|
||||
heartbeatJSON["msg"] + "\n" +
|
||||
`Time (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lineAPIUrl, qs.stringify(downMessage), config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let upMessage = {
|
||||
"message": "\n[✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
"message": "\n[✅ Up]\n" +
|
||||
"Name: " + monitorJSON["name"] + " \n" +
|
||||
heartbeatJSON["msg"] + "\n" +
|
||||
`Time (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lineAPIUrl, qs.stringify(upMessage), config);
|
||||
}
|
||||
|
@@ -28,7 +28,9 @@ class LunaSea extends NotificationProvider {
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
let downdata = {
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
"body": "[🔴 Down] " +
|
||||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lunaseaurl, downdata);
|
||||
return okMsg;
|
||||
@@ -37,7 +39,9 @@ class LunaSea extends NotificationProvider {
|
||||
if (heartbeatJSON["status"] === UP) {
|
||||
let updata = {
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
"body": "[✅ Up] " +
|
||||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`
|
||||
};
|
||||
await axios.post(lunaseaurl, updata);
|
||||
return okMsg;
|
||||
|
@@ -10,7 +10,7 @@ class Mattermost extends NotificationProvider {
|
||||
let okMsg = "Sent Successfully.";
|
||||
try {
|
||||
const mattermostUserName = notification.mattermostusername || "Uptime Kuma";
|
||||
// If heartbeatJSON is null, assume we're testing.
|
||||
// If heartbeatJSON is null, assume non monitoring notification (Certificate warning) or testing.
|
||||
if (heartbeatJSON == null) {
|
||||
let mattermostTestData = {
|
||||
username: mattermostUserName,
|
||||
@@ -27,97 +27,79 @@ class Mattermost extends NotificationProvider {
|
||||
}
|
||||
|
||||
const mattermostIconEmoji = notification.mattermosticonemo;
|
||||
const mattermostIconUrl = notification.mattermosticonurl;
|
||||
let mattermostIconEmojiOnline = "";
|
||||
let mattermostIconEmojiOffline = "";
|
||||
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
let mattermostdowndata = {
|
||||
username: mattermostUserName,
|
||||
text: "Uptime Kuma Alert",
|
||||
channel: mattermostChannel,
|
||||
icon_emoji: mattermostIconEmoji,
|
||||
icon_url: mattermostIconUrl,
|
||||
attachments: [
|
||||
{
|
||||
fallback:
|
||||
"Your " +
|
||||
monitorJSON["name"] +
|
||||
" service went down.",
|
||||
color: "#FF0000",
|
||||
title:
|
||||
"❌ " +
|
||||
monitorJSON["name"] +
|
||||
" service went down. ❌",
|
||||
title_link: monitorJSON["url"],
|
||||
fields: [
|
||||
{
|
||||
short: true,
|
||||
title: "Service Name",
|
||||
value: monitorJSON["name"],
|
||||
},
|
||||
{
|
||||
short: true,
|
||||
title: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
},
|
||||
{
|
||||
short: false,
|
||||
title: "Error",
|
||||
value: heartbeatJSON["msg"],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
await axios.post(
|
||||
notification.mattermostWebhookUrl,
|
||||
mattermostdowndata
|
||||
);
|
||||
return okMsg;
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let mattermostupdata = {
|
||||
username: mattermostUserName,
|
||||
text: "Uptime Kuma Alert",
|
||||
channel: mattermostChannel,
|
||||
icon_emoji: mattermostIconEmoji,
|
||||
icon_url: mattermostIconUrl,
|
||||
attachments: [
|
||||
{
|
||||
fallback:
|
||||
"Your " +
|
||||
monitorJSON["name"] +
|
||||
" service went up!",
|
||||
color: "#32CD32",
|
||||
title:
|
||||
"✅ " +
|
||||
monitorJSON["name"] +
|
||||
" service went up! ✅",
|
||||
title_link: monitorJSON["url"],
|
||||
fields: [
|
||||
{
|
||||
short: true,
|
||||
title: "Service Name",
|
||||
value: monitorJSON["name"],
|
||||
},
|
||||
{
|
||||
short: true,
|
||||
title: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
},
|
||||
{
|
||||
short: false,
|
||||
title: "Ping",
|
||||
value: heartbeatJSON["ping"] + "ms",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
await axios.post(
|
||||
notification.mattermostWebhookUrl,
|
||||
mattermostupdata
|
||||
);
|
||||
return okMsg;
|
||||
if (mattermostIconEmoji && typeof mattermostIconEmoji === "string") {
|
||||
const emojiArray = mattermostIconEmoji.split(" ");
|
||||
if (emojiArray.length >= 2) {
|
||||
mattermostIconEmojiOnline = emojiArray[0];
|
||||
mattermostIconEmojiOffline = emojiArray[1];
|
||||
}
|
||||
}
|
||||
const mattermostIconUrl = notification.mattermosticonurl;
|
||||
let iconEmoji = mattermostIconEmoji;
|
||||
let statusField = {
|
||||
short: false,
|
||||
title: "Error",
|
||||
value: heartbeatJSON.msg,
|
||||
};
|
||||
let statusText = "unknown";
|
||||
let color = "#000000";
|
||||
if (heartbeatJSON.status === DOWN) {
|
||||
iconEmoji = mattermostIconEmojiOffline || mattermostIconEmoji;
|
||||
statusField = {
|
||||
short: false,
|
||||
title: "Error",
|
||||
value: heartbeatJSON.msg,
|
||||
};
|
||||
statusText = "down.";
|
||||
color = "#FF0000";
|
||||
} else if (heartbeatJSON.status === UP) {
|
||||
iconEmoji = mattermostIconEmojiOnline || mattermostIconEmoji;
|
||||
statusField = {
|
||||
short: false,
|
||||
title: "Ping",
|
||||
value: heartbeatJSON.ping + "ms",
|
||||
};
|
||||
statusText = "up!";
|
||||
color = "#32CD32";
|
||||
}
|
||||
|
||||
let mattermostdata = {
|
||||
username: monitorJSON.name + " " + mattermostUserName,
|
||||
channel: mattermostChannel,
|
||||
icon_emoji: iconEmoji,
|
||||
icon_url: mattermostIconUrl,
|
||||
attachments: [
|
||||
{
|
||||
fallback:
|
||||
"Your " +
|
||||
monitorJSON.name +
|
||||
" service went " +
|
||||
statusText,
|
||||
color: color,
|
||||
title:
|
||||
monitorJSON.name +
|
||||
" service went " +
|
||||
statusText,
|
||||
title_link: monitorJSON.url,
|
||||
fields: [
|
||||
statusField,
|
||||
{
|
||||
short: true,
|
||||
title: `Time (${heartbeatJSON["timezone"]})`,
|
||||
value: heartbeatJSON.localDateTime,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
await axios.post(
|
||||
notification.mattermostWebhookUrl,
|
||||
mattermostdata
|
||||
);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
119
server/notification-providers/nostr.js
Normal file
119
server/notification-providers/nostr.js
Normal file
@@ -0,0 +1,119 @@
|
||||
const { log } = require("../../src/util");
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const {
|
||||
relayInit,
|
||||
getPublicKey,
|
||||
getEventHash,
|
||||
getSignature,
|
||||
nip04,
|
||||
nip19
|
||||
} = require("nostr-tools");
|
||||
|
||||
// polyfills for node versions
|
||||
const semver = require("semver");
|
||||
const nodeVersion = process.version;
|
||||
if (semver.lt(nodeVersion, "16.0.0")) {
|
||||
log.warn("monitor", "Node <= 16 is unsupported for nostr, sorry :(");
|
||||
} else if (semver.lt(nodeVersion, "18.0.0")) {
|
||||
// polyfills for node 16
|
||||
global.crypto = require("crypto");
|
||||
global.WebSocket = require("isomorphic-ws");
|
||||
if (typeof crypto !== "undefined" && !crypto.subtle && crypto.webcrypto) {
|
||||
crypto.subtle = crypto.webcrypto.subtle;
|
||||
}
|
||||
} else if (semver.lt(nodeVersion, "20.0.0")) {
|
||||
// polyfills for node 18
|
||||
global.crypto = require("crypto");
|
||||
global.WebSocket = require("isomorphic-ws");
|
||||
} else {
|
||||
// polyfills for node 20
|
||||
global.WebSocket = require("isomorphic-ws");
|
||||
}
|
||||
|
||||
class Nostr extends NotificationProvider {
|
||||
name = "nostr";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
// All DMs should have same timestamp
|
||||
const createdAt = Math.floor(Date.now() / 1000);
|
||||
|
||||
const senderPrivateKey = await this.getPrivateKey(notification.sender);
|
||||
const senderPublicKey = getPublicKey(senderPrivateKey);
|
||||
const recipientsPublicKeys = await this.getPublicKeys(notification.recipients);
|
||||
|
||||
// Create NIP-04 encrypted direct message event for each recipient
|
||||
const events = [];
|
||||
for (const recipientPublicKey of recipientsPublicKeys) {
|
||||
const ciphertext = await nip04.encrypt(senderPrivateKey, recipientPublicKey, msg);
|
||||
let event = {
|
||||
kind: 4,
|
||||
pubkey: senderPublicKey,
|
||||
created_at: createdAt,
|
||||
tags: [[ "p", recipientPublicKey ]],
|
||||
content: ciphertext,
|
||||
};
|
||||
event.id = getEventHash(event);
|
||||
event.sig = getSignature(event, senderPrivateKey);
|
||||
events.push(event);
|
||||
}
|
||||
|
||||
// Publish events to each relay
|
||||
const relays = notification.relays.split("\n");
|
||||
let successfulRelays = 0;
|
||||
|
||||
// Connect to each relay
|
||||
for (const relayUrl of relays) {
|
||||
const relay = relayInit(relayUrl);
|
||||
try {
|
||||
await relay.connect();
|
||||
successfulRelays++;
|
||||
|
||||
// Publish events
|
||||
for (const event of events) {
|
||||
relay.publish(event);
|
||||
}
|
||||
} catch (error) {
|
||||
continue;
|
||||
} finally {
|
||||
relay.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Report success or failure
|
||||
if (successfulRelays === 0) {
|
||||
throw Error("Failed to connect to any relays.");
|
||||
}
|
||||
return `${successfulRelays}/${relays.length} relays connected.`;
|
||||
}
|
||||
|
||||
async getPrivateKey(sender) {
|
||||
try {
|
||||
const senderDecodeResult = await nip19.decode(sender);
|
||||
const { data } = senderDecodeResult;
|
||||
return data;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get private key: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getPublicKeys(recipients) {
|
||||
const recipientsList = recipients.split("\n");
|
||||
const publicKeys = [];
|
||||
for (const recipient of recipientsList) {
|
||||
try {
|
||||
const recipientDecodeResult = await nip19.decode(recipient);
|
||||
const { type, data } = recipientDecodeResult;
|
||||
if (type === "npub") {
|
||||
publicKeys.push(data);
|
||||
} else {
|
||||
throw new Error("not an npub");
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Error decoding recipient: ${error}`);
|
||||
}
|
||||
}
|
||||
return publicKeys;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Nostr;
|
@@ -1,5 +1,6 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Ntfy extends NotificationProvider {
|
||||
|
||||
@@ -9,16 +10,54 @@ class Ntfy extends NotificationProvider {
|
||||
let okMsg = "Sent Successfully.";
|
||||
try {
|
||||
let headers = {};
|
||||
if (notification.ntfyusername) {
|
||||
if (notification.ntfyAuthenticationMethod === "usernamePassword") {
|
||||
headers = {
|
||||
"Authorization": "Basic " + Buffer.from(notification.ntfyusername + ":" + notification.ntfypassword).toString("base64"),
|
||||
};
|
||||
} else if (notification.ntfyAuthenticationMethod === "accessToken") {
|
||||
headers = {
|
||||
"Authorization": "Bearer " + notification.ntfyaccesstoken,
|
||||
};
|
||||
}
|
||||
// If heartbeatJSON is null, assume non monitoring notification (Certificate warning) or testing.
|
||||
if (heartbeatJSON == null) {
|
||||
let ntfyTestData = {
|
||||
"topic": notification.ntfytopic,
|
||||
"title": (monitorJSON?.name || notification.ntfytopic) + " [Uptime-Kuma]",
|
||||
"message": msg,
|
||||
"priority": notification.ntfyPriority,
|
||||
"tags": [ "test_tube" ],
|
||||
};
|
||||
await axios.post(`${notification.ntfyserverurl}`, ntfyTestData, { headers: headers });
|
||||
return okMsg;
|
||||
}
|
||||
let tags = [];
|
||||
let status = "unknown";
|
||||
let priority = notification.ntfyPriority || 4;
|
||||
if ("status" in heartbeatJSON) {
|
||||
if (heartbeatJSON.status === DOWN) {
|
||||
tags = [ "red_circle" ];
|
||||
status = "Down";
|
||||
// if priority is not 5, increase priority for down alerts
|
||||
priority = priority === 5 ? priority : priority + 1;
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
tags = [ "green_circle" ];
|
||||
status = "Up";
|
||||
}
|
||||
}
|
||||
let data = {
|
||||
"topic": notification.ntfytopic,
|
||||
"message": msg,
|
||||
"priority": notification.ntfyPriority || 4,
|
||||
"title": "Uptime-Kuma",
|
||||
"message": heartbeatJSON.msg,
|
||||
"priority": priority,
|
||||
"title": monitorJSON.name + " " + status + " [Uptime-Kuma]",
|
||||
"tags": tags,
|
||||
"actions": [
|
||||
{
|
||||
"action": "view",
|
||||
"label": "Open " + monitorJSON.name,
|
||||
"url": monitorJSON.url,
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
if (notification.ntfyIcon) {
|
||||
|
@@ -15,15 +15,15 @@ class Opsgenie extends NotificationProvider {
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let opsgenieAlertsUrl;
|
||||
let priority = (notification.opsgeniePriority == "") ? 3 : notification.opsgeniePriority;
|
||||
let priority = (!notification.opsgeniePriority) ? 3 : notification.opsgeniePriority;
|
||||
const textMsg = "Uptime Kuma Alert";
|
||||
|
||||
try {
|
||||
switch (notification.opsgenieRegion) {
|
||||
case "US":
|
||||
case "us":
|
||||
opsgenieAlertsUrl = opsgenieAlertsUrlUS;
|
||||
break;
|
||||
case "EU":
|
||||
case "eu":
|
||||
opsgenieAlertsUrl = opsgenieAlertsUrlEU;
|
||||
break;
|
||||
default:
|
||||
|
@@ -29,14 +29,18 @@ class Pushbullet extends NotificationProvider {
|
||||
let downData = {
|
||||
"type": "note",
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
"body": "[🔴 Down] " +
|
||||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`,
|
||||
};
|
||||
await axios.post(pushbulletUrl, downData, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
let upData = {
|
||||
"type": "note",
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
"body": "[✅ Up] " +
|
||||
heartbeatJSON["msg"] +
|
||||
`\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`,
|
||||
};
|
||||
await axios.post(pushbulletUrl, upData, config);
|
||||
}
|
||||
|
@@ -8,7 +8,9 @@ class PushDeer extends NotificationProvider {
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let pushdeerlink = "https://api2.pushdeer.com/message/push";
|
||||
let endpoint = "/message/push";
|
||||
let serverUrl = notification.pushdeerServer || "https://api2.pushdeer.com";
|
||||
let pushdeerlink = `${serverUrl.trim().replace(/\/*$/, "")}${endpoint}`;
|
||||
|
||||
let valid = msg != null && monitorJSON != null && heartbeatJSON != null;
|
||||
|
||||
|
@@ -24,13 +24,16 @@ class Pushover extends NotificationProvider {
|
||||
if (notification.pushoverdevice) {
|
||||
data.device = notification.pushoverdevice;
|
||||
}
|
||||
if (notification.pushoverttl) {
|
||||
data.ttl = notification.pushoverttl;
|
||||
}
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
await axios.post(pushoverlink, data);
|
||||
return okMsg;
|
||||
} else {
|
||||
data.message += "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"];
|
||||
data.message += `\n<b>Time (${heartbeatJSON["timezone"]})</b>:${heartbeatJSON["localDateTime"]}`;
|
||||
await axios.post(pushoverlink, data);
|
||||
return okMsg;
|
||||
}
|
||||
|
@@ -22,8 +22,6 @@ class RocketChat extends NotificationProvider {
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
const time = heartbeatJSON["time"];
|
||||
|
||||
let data = {
|
||||
"text": "Uptime Kuma Alert",
|
||||
"channel": notification.rocketchannel,
|
||||
@@ -31,7 +29,7 @@ class RocketChat extends NotificationProvider {
|
||||
"icon_emoji": notification.rocketiconemo,
|
||||
"attachments": [
|
||||
{
|
||||
"title": "Uptime Kuma Alert *Time (UTC)*\n" + time,
|
||||
"title": `Uptime Kuma Alert *Time (${heartbeatJSON["timezone"]})*\n${heartbeatJSON["localDateTime"]}`,
|
||||
"text": "*Message*\n" + msg,
|
||||
}
|
||||
]
|
||||
|
@@ -27,6 +27,11 @@ class Slack extends NotificationProvider {
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
if (notification.slackchannelnotify) {
|
||||
msg += " <!channel>";
|
||||
}
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let data = {
|
||||
@@ -39,7 +44,6 @@ class Slack extends NotificationProvider {
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
const time = heartbeatJSON["time"];
|
||||
const textMsg = "Uptime Kuma Alert";
|
||||
let data = {
|
||||
"text": `${textMsg}\n${msg}`,
|
||||
@@ -54,7 +58,7 @@ class Slack extends NotificationProvider {
|
||||
"type": "header",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Uptime Kuma Alert",
|
||||
"text": textMsg,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -65,7 +69,7 @@ class Slack extends NotificationProvider {
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Time (UTC)*\n" + time,
|
||||
"text": `*Time (${heartbeatJSON["timezone"]})*\n${heartbeatJSON["localDateTime"]}`,
|
||||
}],
|
||||
}
|
||||
],
|
||||
|
42
server/notification-providers/smsc.js
Normal file
42
server/notification-providers/smsc.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class SMSC extends NotificationProvider {
|
||||
name = "smsc";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
try {
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "text/json",
|
||||
}
|
||||
};
|
||||
|
||||
let getArray = [
|
||||
"fmt=3",
|
||||
"translit=" + notification.smscTranslit,
|
||||
"login=" + notification.smscLogin,
|
||||
"psw=" + notification.smscPassword,
|
||||
"phones=" + notification.smscToNumber,
|
||||
"mes=" + encodeURIComponent(msg.replace(/[^\x00-\x7F]/g, "")),
|
||||
];
|
||||
if (notification.smscSenderName !== "") {
|
||||
getArray.push("sender=" + notification.smscSenderName);
|
||||
}
|
||||
|
||||
let resp = await axios.get("https://smsc.kz/sys/send.php?" + getArray.join("&"), config);
|
||||
if (resp.data.id === undefined) {
|
||||
let error = `Something gone wrong. Api returned code ${resp.data.error_code}: ${resp.data.error}`;
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SMSC;
|
@@ -13,7 +13,7 @@ class SMTP extends NotificationProvider {
|
||||
port: notification.smtpPort,
|
||||
secure: notification.smtpSecure,
|
||||
tls: {
|
||||
rejectUnauthorized: notification.smtpIgnoreTLSError || false,
|
||||
rejectUnauthorized: !notification.smtpIgnoreTLSError || false,
|
||||
}
|
||||
};
|
||||
|
||||
@@ -67,7 +67,7 @@ class SMTP extends NotificationProvider {
|
||||
if (monitorJSON !== null) {
|
||||
monitorName = monitorJSON["name"];
|
||||
|
||||
if (monitorJSON["type"] === "http" || monitorJSON["type"] === "keyword") {
|
||||
if (monitorJSON["type"] === "http" || monitorJSON["type"] === "keyword" || monitorJSON["type"] === "json-query") {
|
||||
monitorHostnameOrURL = monitorJSON["url"];
|
||||
} else {
|
||||
monitorHostnameOrURL = monitorJSON["hostname"];
|
||||
@@ -91,7 +91,7 @@ class SMTP extends NotificationProvider {
|
||||
|
||||
let bodyTextContent = msg;
|
||||
if (heartbeatJSON) {
|
||||
bodyTextContent = `${msg}\nTime (UTC): ${heartbeatJSON["time"]}`;
|
||||
bodyTextContent = `${msg}\nTime (${heartbeatJSON["timezone"]}): ${heartbeatJSON["localDateTime"]}`;
|
||||
}
|
||||
|
||||
// send mail with defined transport object
|
||||
|
@@ -25,8 +25,11 @@ class Telegram extends NotificationProvider {
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
let msg = (error.response.data.description) ? error.response.data.description : "Error without description";
|
||||
throw new Error(msg);
|
||||
if (error.response && error.response.data && error.response.data.description) {
|
||||
throw new Error(error.response.data.description);
|
||||
} else {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,6 +10,7 @@ class Twilio extends NotificationProvider {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
let accountSID = notification.twilioAccountSID;
|
||||
let apiKey = notification.twilioApiKey ? notification.twilioApiKey : accountSID;
|
||||
let authToken = notification.twilioAuthToken;
|
||||
|
||||
try {
|
||||
@@ -17,7 +18,7 @@ class Twilio extends NotificationProvider {
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded;charset=utf-8",
|
||||
"Authorization": "Basic " + Buffer.from(accountSID + ":" + authToken).toString("base64"),
|
||||
"Authorization": "Basic " + Buffer.from(apiKey + ":" + authToken).toString("base64"),
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const FormData = require("form-data");
|
||||
const { Liquid } = require("liquidjs");
|
||||
|
||||
class Webhook extends NotificationProvider {
|
||||
|
||||
@@ -15,17 +16,27 @@ class Webhook extends NotificationProvider {
|
||||
monitor: monitorJSON,
|
||||
msg,
|
||||
};
|
||||
let finalData;
|
||||
let config = {
|
||||
headers: {}
|
||||
};
|
||||
|
||||
if (notification.webhookContentType === "form-data") {
|
||||
finalData = new FormData();
|
||||
finalData.append("data", JSON.stringify(data));
|
||||
config.headers = finalData.getHeaders();
|
||||
} else {
|
||||
finalData = data;
|
||||
const formData = new FormData();
|
||||
formData.append("data", JSON.stringify(data));
|
||||
config.headers = formData.getHeaders();
|
||||
data = formData;
|
||||
} else if (notification.webhookContentType === "custom") {
|
||||
// Initialize LiquidJS and parse the custom Body Template
|
||||
const engine = new Liquid();
|
||||
const tpl = engine.parse(notification.webhookCustomBody);
|
||||
|
||||
// Insert templated values into Body
|
||||
data = await engine.render(tpl,
|
||||
{
|
||||
msg,
|
||||
heartbeatJSON,
|
||||
monitorJSON
|
||||
});
|
||||
}
|
||||
|
||||
if (notification.webhookAdditionalHeaders) {
|
||||
@@ -39,7 +50,7 @@ class Webhook extends NotificationProvider {
|
||||
}
|
||||
}
|
||||
|
||||
await axios.post(notification.webhookURL, finalData, config);
|
||||
await axios.post(notification.webhookURL, data, config);
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
|
@@ -6,6 +6,7 @@ const AliyunSms = require("./notification-providers/aliyun-sms");
|
||||
const Apprise = require("./notification-providers/apprise");
|
||||
const Bark = require("./notification-providers/bark");
|
||||
const ClickSendSMS = require("./notification-providers/clicksendsms");
|
||||
const SMSC = require("./notification-providers/smsc");
|
||||
const DingDing = require("./notification-providers/dingding");
|
||||
const Discord = require("./notification-providers/discord");
|
||||
const Feishu = require("./notification-providers/feishu");
|
||||
@@ -20,11 +21,13 @@ const LineNotify = require("./notification-providers/linenotify");
|
||||
const LunaSea = require("./notification-providers/lunasea");
|
||||
const Matrix = require("./notification-providers/matrix");
|
||||
const Mattermost = require("./notification-providers/mattermost");
|
||||
const Nostr = require("./notification-providers/nostr");
|
||||
const Ntfy = require("./notification-providers/ntfy");
|
||||
const Octopush = require("./notification-providers/octopush");
|
||||
const OneBot = require("./notification-providers/onebot");
|
||||
const Opsgenie = require("./notification-providers/opsgenie");
|
||||
const PagerDuty = require("./notification-providers/pagerduty");
|
||||
const FlashDuty = require("./notification-providers/flashduty");
|
||||
const PagerTree = require("./notification-providers/pagertree");
|
||||
const PromoSMS = require("./notification-providers/promosms");
|
||||
const Pushbullet = require("./notification-providers/pushbullet");
|
||||
@@ -68,6 +71,7 @@ class Notification {
|
||||
new Apprise(),
|
||||
new Bark(),
|
||||
new ClickSendSMS(),
|
||||
new SMSC(),
|
||||
new DingDing(),
|
||||
new Discord(),
|
||||
new Feishu(),
|
||||
@@ -82,11 +86,13 @@ class Notification {
|
||||
new LunaSea(),
|
||||
new Matrix(),
|
||||
new Mattermost(),
|
||||
new Nostr(),
|
||||
new Ntfy(),
|
||||
new Octopush(),
|
||||
new OneBot(),
|
||||
new Opsgenie(),
|
||||
new PagerDuty(),
|
||||
new FlashDuty(),
|
||||
new PagerTree(),
|
||||
new PromoSMS(),
|
||||
new Pushbullet(),
|
||||
@@ -113,7 +119,6 @@ class Notification {
|
||||
new GoAlert(),
|
||||
new ZohoCliq()
|
||||
];
|
||||
|
||||
for (let item of list) {
|
||||
if (! item.name) {
|
||||
throw new Error("Notification provider without name");
|
||||
|
@@ -1,13 +0,0 @@
|
||||
class Plugin {
|
||||
async load() {
|
||||
|
||||
}
|
||||
|
||||
async unload() {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Plugin,
|
||||
};
|
@@ -1,256 +0,0 @@
|
||||
const fs = require("fs");
|
||||
const { log } = require("../src/util");
|
||||
const path = require("path");
|
||||
const axios = require("axios");
|
||||
const { Git } = require("./git");
|
||||
const childProcess = require("child_process");
|
||||
|
||||
class PluginsManager {
|
||||
|
||||
static disable = false;
|
||||
|
||||
/**
|
||||
* Plugin List
|
||||
* @type {PluginWrapper[]}
|
||||
*/
|
||||
pluginList = [];
|
||||
|
||||
/**
|
||||
* Plugins Dir
|
||||
*/
|
||||
pluginsDir;
|
||||
|
||||
server;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {UptimeKumaServer} server
|
||||
*/
|
||||
constructor(server) {
|
||||
this.server = server;
|
||||
|
||||
if (!PluginsManager.disable) {
|
||||
this.pluginsDir = "./data/plugins/";
|
||||
|
||||
if (! fs.existsSync(this.pluginsDir)) {
|
||||
fs.mkdirSync(this.pluginsDir, { recursive: true });
|
||||
}
|
||||
|
||||
log.debug("plugin", "Scanning plugin directory");
|
||||
let list = fs.readdirSync(this.pluginsDir);
|
||||
|
||||
this.pluginList = [];
|
||||
for (let item of list) {
|
||||
this.loadPlugin(item);
|
||||
}
|
||||
|
||||
} else {
|
||||
log.warn("PLUGIN", "Skip scanning plugin directory");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a Plugin
|
||||
*/
|
||||
async loadPlugin(name) {
|
||||
log.info("plugin", "Load " + name);
|
||||
let plugin = new PluginWrapper(this.server, this.pluginsDir + name);
|
||||
|
||||
try {
|
||||
await plugin.load();
|
||||
this.pluginList.push(plugin);
|
||||
} catch (e) {
|
||||
log.error("plugin", "Failed to load plugin: " + this.pluginsDir + name);
|
||||
log.error("plugin", "Reason: " + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a Plugin
|
||||
* @param {string} repoURL Git repo url
|
||||
* @param {string} name Directory name, also known as plugin unique name
|
||||
*/
|
||||
downloadPlugin(repoURL, name) {
|
||||
if (fs.existsSync(this.pluginsDir + name)) {
|
||||
log.info("plugin", "Plugin folder already exists? Removing...");
|
||||
fs.rmSync(this.pluginsDir + name, {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
log.info("plugin", "Installing plugin: " + name + " " + repoURL);
|
||||
let result = Git.clone(repoURL, this.pluginsDir, name);
|
||||
log.info("plugin", "Install result: " + result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a plugin
|
||||
* @param {string} name
|
||||
*/
|
||||
async removePlugin(name) {
|
||||
log.info("plugin", "Removing plugin: " + name);
|
||||
for (let plugin of this.pluginList) {
|
||||
if (plugin.info.name === name) {
|
||||
await plugin.unload();
|
||||
|
||||
// Delete the plugin directory
|
||||
fs.rmSync(this.pluginsDir + name, {
|
||||
recursive: true
|
||||
});
|
||||
|
||||
this.pluginList.splice(this.pluginList.indexOf(plugin), 1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
log.warn("plugin", "Plugin not found: " + name);
|
||||
throw new Error("Plugin not found: " + name);
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: Update a plugin
|
||||
* Only available for plugins which were downloaded from the official list
|
||||
* @param pluginID
|
||||
*/
|
||||
updatePlugin(pluginID) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the plugin list from server + local installed plugin list
|
||||
* Item will be merged if the `name` is the same.
|
||||
* @returns {Promise<[]>}
|
||||
*/
|
||||
async fetchPluginList() {
|
||||
let remotePluginList;
|
||||
try {
|
||||
const res = await axios.get("https://uptime.kuma.pet/c/plugins.json");
|
||||
remotePluginList = res.data.pluginList;
|
||||
} catch (e) {
|
||||
log.error("plugin", "Failed to fetch plugin list: " + e.message);
|
||||
remotePluginList = [];
|
||||
}
|
||||
|
||||
for (let plugin of this.pluginList) {
|
||||
let find = false;
|
||||
// Try to merge
|
||||
for (let remotePlugin of remotePluginList) {
|
||||
if (remotePlugin.name === plugin.info.name) {
|
||||
find = true;
|
||||
remotePlugin.installed = true;
|
||||
remotePlugin.name = plugin.info.name;
|
||||
remotePlugin.fullName = plugin.info.fullName;
|
||||
remotePlugin.description = plugin.info.description;
|
||||
remotePlugin.version = plugin.info.version;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Local plugin
|
||||
if (!find) {
|
||||
plugin.info.local = true;
|
||||
remotePluginList.push(plugin.info);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort Installed first, then sort by name
|
||||
return remotePluginList.sort((a, b) => {
|
||||
if (a.installed === b.installed) {
|
||||
if (a.fullName < b.fullName) {
|
||||
return -1;
|
||||
}
|
||||
if (a.fullName > b.fullName) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
} else if (a.installed) {
|
||||
return -1;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class PluginWrapper {
|
||||
|
||||
server = undefined;
|
||||
pluginDir = undefined;
|
||||
|
||||
/**
|
||||
* Must be an `new-able` class.
|
||||
* @type {function}
|
||||
*/
|
||||
pluginClass = undefined;
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {Plugin}
|
||||
*/
|
||||
object = undefined;
|
||||
info = {};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {UptimeKumaServer} server
|
||||
* @param {string} pluginDir
|
||||
*/
|
||||
constructor(server, pluginDir) {
|
||||
this.server = server;
|
||||
this.pluginDir = pluginDir;
|
||||
}
|
||||
|
||||
async load() {
|
||||
let indexFile = this.pluginDir + "/index.js";
|
||||
let packageJSON = this.pluginDir + "/package.json";
|
||||
|
||||
log.info("plugin", "Installing dependencies");
|
||||
|
||||
if (fs.existsSync(indexFile)) {
|
||||
// Install dependencies
|
||||
let result = childProcess.spawnSync("npm", [ "install" ], {
|
||||
cwd: this.pluginDir,
|
||||
env: {
|
||||
...process.env,
|
||||
PLAYWRIGHT_BROWSERS_PATH: "../../browsers", // Special handling for read-browser-monitor
|
||||
}
|
||||
});
|
||||
|
||||
if (result.stdout) {
|
||||
log.info("plugin", "Install dependencies result: " + result.stdout.toString("utf-8"));
|
||||
} else {
|
||||
log.warn("plugin", "Install dependencies result: no output");
|
||||
}
|
||||
|
||||
this.pluginClass = require(path.join(process.cwd(), indexFile));
|
||||
|
||||
let pluginClassType = typeof this.pluginClass;
|
||||
|
||||
if (pluginClassType === "function") {
|
||||
this.object = new this.pluginClass(this.server);
|
||||
await this.object.load();
|
||||
} else {
|
||||
throw new Error("Invalid plugin, it does not export a class");
|
||||
}
|
||||
|
||||
if (fs.existsSync(packageJSON)) {
|
||||
this.info = require(path.join(process.cwd(), packageJSON));
|
||||
} else {
|
||||
this.info.fullName = this.pluginDir;
|
||||
this.info.name = "[unknown]";
|
||||
this.info.version = "[unknown-version]";
|
||||
}
|
||||
|
||||
this.info.installed = true;
|
||||
log.info("plugin", `${this.info.fullName} v${this.info.version} loaded`);
|
||||
}
|
||||
}
|
||||
|
||||
async unload() {
|
||||
await this.object.unload();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PluginsManager,
|
||||
PluginWrapper
|
||||
};
|
@@ -28,7 +28,7 @@ const monitorResponseTime = new PrometheusClient.Gauge({
|
||||
|
||||
const monitorStatus = new PrometheusClient.Gauge({
|
||||
name: "monitor_status",
|
||||
help: "Monitor Status (1 = UP, 0= DOWN)",
|
||||
help: "Monitor Status (1 = UP, 0= DOWN, 2= PENDING, 3= MAINTENANCE)",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
|
||||
@@ -79,23 +79,25 @@ class Prometheus {
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
monitorStatus.set(this.monitorLabelValues, heartbeat.status);
|
||||
} catch (e) {
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof heartbeat.ping === "number") {
|
||||
monitorResponseTime.set(this.monitorLabelValues, heartbeat.ping);
|
||||
} else {
|
||||
// Is it good?
|
||||
monitorResponseTime.set(this.monitorLabelValues, -1);
|
||||
if (heartbeat) {
|
||||
try {
|
||||
monitorStatus.set(this.monitorLabelValues, heartbeat.status);
|
||||
} catch (e) {
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof heartbeat.ping === "number") {
|
||||
monitorResponseTime.set(this.monitorLabelValues, heartbeat.ping);
|
||||
} else {
|
||||
// Is it good?
|
||||
monitorResponseTime.set(this.monitorLabelValues, -1);
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("prometheus", "Caught error");
|
||||
log.error("prometheus", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,15 +1,22 @@
|
||||
let express = require("express");
|
||||
const { allowDevAllOrigin, allowAllOrigin, percentageToColor, filterAndJoin, sendHttpError } = require("../util-server");
|
||||
const {
|
||||
setting,
|
||||
allowDevAllOrigin,
|
||||
allowAllOrigin,
|
||||
percentageToColor,
|
||||
filterAndJoin,
|
||||
sendHttpError,
|
||||
} = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const apicache = require("../modules/apicache");
|
||||
const Monitor = require("../model/monitor");
|
||||
const dayjs = require("dayjs");
|
||||
const { UP, MAINTENANCE, DOWN, PENDING, flipStatus, log } = require("../../src/util");
|
||||
const { UP, MAINTENANCE, DOWN, PENDING, flipStatus, log, badgeConstants } = require("../../src/util");
|
||||
const StatusPage = require("../model/status_page");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const { UptimeCacheList } = require("../uptime-cache-list");
|
||||
const { makeBadge } = require("badge-maker");
|
||||
const { badgeConstants } = require("../config");
|
||||
const { Prometheus } = require("../prometheus");
|
||||
|
||||
let router = express.Router();
|
||||
|
||||
@@ -21,10 +28,14 @@ router.get("/api/entry-page", async (request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
let result = { };
|
||||
let hostname = request.hostname;
|
||||
if ((await setting("trustProxy")) && request.headers["x-forwarded-host"]) {
|
||||
hostname = request.headers["x-forwarded-host"];
|
||||
}
|
||||
|
||||
if (request.hostname in StatusPage.domainMappingList) {
|
||||
if (hostname in StatusPage.domainMappingList) {
|
||||
result.type = "statusPageMatchedDomain";
|
||||
result.statusPageSlug = StatusPage.domainMappingList[request.hostname];
|
||||
result.statusPageSlug = StatusPage.domainMappingList[hostname];
|
||||
} else {
|
||||
result.type = "entryPage";
|
||||
result.entryPage = server.entryPage;
|
||||
@@ -37,7 +48,7 @@ router.get("/api/push/:pushToken", async (request, response) => {
|
||||
|
||||
let pushToken = request.params.pushToken;
|
||||
let msg = request.query.msg || "OK";
|
||||
let ping = request.query.ping || null;
|
||||
let ping = parseFloat(request.query.ping) || null;
|
||||
let statusString = request.query.status || "up";
|
||||
let status = (statusString === "up") ? UP : DOWN;
|
||||
|
||||
@@ -89,6 +100,7 @@ router.get("/api/push/:pushToken", async (request, response) => {
|
||||
io.to(monitor.user_id).emit("heartbeat", bean.toJSON());
|
||||
UptimeCacheList.clearCache(monitor.id);
|
||||
Monitor.sendStats(io, monitor.id, monitor.user_id);
|
||||
new Prometheus(monitor).update(bean, undefined);
|
||||
|
||||
response.json({
|
||||
ok: true,
|
||||
@@ -440,7 +452,7 @@ router.get("/api/badge/:id/cert-exp", cache("5 minutes"), async (request, respon
|
||||
if (!tlsInfo.valid) {
|
||||
// return a "Bad Cert" badge in naColor (grey), when cert is not valid
|
||||
badgeValues.message = "Bad Cert";
|
||||
badgeValues.color = badgeConstants.downColor;
|
||||
badgeValues.color = downColor;
|
||||
} else {
|
||||
const daysRemaining = parseInt(overrideValue ?? tlsInfo.certInfo.daysRemaining);
|
||||
|
||||
|
@@ -5,6 +5,8 @@ const StatusPage = require("../model/status_page");
|
||||
const { allowDevAllOrigin, sendHttpError } = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const Monitor = require("../model/monitor");
|
||||
const { badgeConstants } = require("../../src/util");
|
||||
const { makeBadge } = require("badge-maker");
|
||||
|
||||
let router = express.Router();
|
||||
|
||||
@@ -139,4 +141,100 @@ router.get("/api/status-page/:slug/manifest.json", cache("1440 minutes"), async
|
||||
}
|
||||
});
|
||||
|
||||
// overall status-page status badge
|
||||
router.get("/api/status-page/:slug/badge", cache("5 minutes"), async (request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
const slug = request.params.slug;
|
||||
const statusPageID = await StatusPage.slugToID(slug);
|
||||
const {
|
||||
label,
|
||||
upColor = badgeConstants.defaultUpColor,
|
||||
downColor = badgeConstants.defaultDownColor,
|
||||
partialColor = "#F6BE00",
|
||||
maintenanceColor = "#808080",
|
||||
style = badgeConstants.defaultStyle
|
||||
} = request.query;
|
||||
|
||||
try {
|
||||
let monitorIDList = await R.getCol(`
|
||||
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
|
||||
WHERE monitor_group.group_id = \`group\`.id
|
||||
AND public = 1
|
||||
AND \`group\`.status_page_id = ?
|
||||
`, [
|
||||
statusPageID
|
||||
]);
|
||||
|
||||
let hasUp = false;
|
||||
let hasDown = false;
|
||||
let hasMaintenance = false;
|
||||
|
||||
for (let monitorID of monitorIDList) {
|
||||
// retrieve the latest heartbeat
|
||||
let beat = await R.getAll(`
|
||||
SELECT * FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
ORDER BY time DESC
|
||||
LIMIT 1
|
||||
`, [
|
||||
monitorID,
|
||||
]);
|
||||
|
||||
// to be sure, when corresponding monitor not found
|
||||
if (beat.length === 0) {
|
||||
continue;
|
||||
}
|
||||
// handle status of beat
|
||||
if (beat[0].status === 3) {
|
||||
hasMaintenance = true;
|
||||
} else if (beat[0].status === 2) {
|
||||
// ignored
|
||||
} else if (beat[0].status === 1) {
|
||||
hasUp = true;
|
||||
} else {
|
||||
hasDown = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const badgeValues = { style };
|
||||
|
||||
if (!hasUp && !hasDown && !hasMaintenance) {
|
||||
// return a "N/A" badge in naColor (grey), if monitor is not public / not available / non exsitant
|
||||
|
||||
badgeValues.message = "N/A";
|
||||
badgeValues.color = badgeConstants.naColor;
|
||||
|
||||
} else {
|
||||
if (hasMaintenance) {
|
||||
badgeValues.label = label ? label : "";
|
||||
badgeValues.color = maintenanceColor;
|
||||
badgeValues.message = "Maintenance";
|
||||
} else if (hasUp && !hasDown) {
|
||||
badgeValues.label = label ? label : "";
|
||||
badgeValues.color = upColor;
|
||||
badgeValues.message = "Up";
|
||||
} else if (hasUp && hasDown) {
|
||||
badgeValues.label = label ? label : "";
|
||||
badgeValues.color = partialColor;
|
||||
badgeValues.message = "Degraded";
|
||||
} else {
|
||||
badgeValues.label = label ? label : "";
|
||||
badgeValues.color = downColor;
|
||||
badgeValues.message = "Down";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// build the svg based on given values
|
||||
const svg = makeBadge(badgeValues);
|
||||
|
||||
response.type("image/svg+xml");
|
||||
response.send(svg);
|
||||
|
||||
} catch (error) {
|
||||
sendHttpError(response, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
189
server/server.js
189
server/server.js
@@ -15,15 +15,27 @@ dayjs.extend(require("dayjs/plugin/customParseFormat"));
|
||||
require("dotenv").config();
|
||||
|
||||
// Check Node.js Version
|
||||
const nodeVersion = parseInt(process.versions.node.split(".")[0]);
|
||||
const requiredVersion = 14;
|
||||
const nodeVersion = process.versions.node;
|
||||
|
||||
// Get the required Node.js version from package.json
|
||||
const requiredNodeVersions = require("../package.json").engines.node;
|
||||
const bannedNodeVersions = " < 14 || 20.0.* || 20.1.* || 20.2.* || 20.3.* ";
|
||||
console.log(`Your Node.js version: ${nodeVersion}`);
|
||||
|
||||
if (nodeVersion < requiredVersion) {
|
||||
console.error(`Error: Your Node.js version is not supported, please upgrade to Node.js >= ${requiredVersion}.`);
|
||||
const semver = require("semver");
|
||||
const requiredNodeVersionsComma = requiredNodeVersions.split("||").map((version) => version.trim()).join(", ");
|
||||
|
||||
// Exit Uptime Kuma immediately if the Node.js version is banned
|
||||
if (semver.satisfies(nodeVersion, bannedNodeVersions)) {
|
||||
console.error("\x1b[31m%s\x1b[0m", `Error: Your Node.js version: ${nodeVersion} is not supported, please upgrade your Node.js to ${requiredNodeVersionsComma}.`);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
// Warning if the Node.js version is not in the support list, but it maybe still works
|
||||
if (!semver.satisfies(nodeVersion, requiredNodeVersions)) {
|
||||
console.warn("\x1b[31m%s\x1b[0m", `Warning: Your Node.js version: ${nodeVersion} is not officially supported, please upgrade your Node.js to ${requiredNodeVersionsComma}.`);
|
||||
}
|
||||
|
||||
const args = require("args-parser")(process.argv);
|
||||
const { sleep, log, getRandomInt, genSecret, isDev } = require("../src/util");
|
||||
const config = require("./config");
|
||||
@@ -36,7 +48,16 @@ if (! process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = "production";
|
||||
}
|
||||
|
||||
if (!process.env.UPTIME_KUMA_WS_ORIGIN_CHECK) {
|
||||
process.env.UPTIME_KUMA_WS_ORIGIN_CHECK = "cors-like";
|
||||
}
|
||||
|
||||
log.info("server", "Node Env: " + process.env.NODE_ENV);
|
||||
log.info("server", "Inside Container: " + (process.env.UPTIME_KUMA_IS_CONTAINER === "1"));
|
||||
|
||||
if (process.env.UPTIME_KUMA_WS_ORIGIN_CHECK === "bypass") {
|
||||
log.warn("server", "WebSocket Origin Check: " + process.env.UPTIME_KUMA_WS_ORIGIN_CHECK);
|
||||
}
|
||||
|
||||
log.info("server", "Importing Node libraries");
|
||||
const fs = require("fs");
|
||||
@@ -63,15 +84,18 @@ const notp = require("notp");
|
||||
const base32 = require("thirty-two");
|
||||
|
||||
const { UptimeKumaServer } = require("./uptime-kuma-server");
|
||||
const server = UptimeKumaServer.getInstance(args);
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
const io = module.exports.io = server.io;
|
||||
const app = server.app;
|
||||
|
||||
log.info("server", "Importing this project modules");
|
||||
log.debug("server", "Importing Monitor");
|
||||
const Monitor = require("./model/monitor");
|
||||
const User = require("./model/user");
|
||||
|
||||
log.debug("server", "Importing Settings");
|
||||
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, startUnitTest, FBSD, doubleCheckPassword, startE2eTests } = require("./util-server");
|
||||
const { getSettings, setSettings, setting, initJWTSecret, checkLogin, startUnitTest, doubleCheckPassword, startE2eTests, shake256, SHAKE256_LENGTH
|
||||
} = require("./util-server");
|
||||
|
||||
log.debug("server", "Importing Notification");
|
||||
const { Notification } = require("./notification");
|
||||
@@ -94,19 +118,13 @@ const passwordHash = require("./password-hash");
|
||||
const checkVersion = require("./check-version");
|
||||
log.info("server", "Version: " + checkVersion.version);
|
||||
|
||||
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
|
||||
// Dual-stack support for (::)
|
||||
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
|
||||
let hostEnv = FBSD ? null : process.env.HOST;
|
||||
let hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
|
||||
const hostname = config.hostname;
|
||||
|
||||
if (hostname) {
|
||||
log.info("server", "Custom hostname: " + hostname);
|
||||
}
|
||||
|
||||
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
|
||||
.map(portValue => parseInt(portValue))
|
||||
.find(portValue => !isNaN(portValue));
|
||||
const port = config.port;
|
||||
|
||||
const disableFrameSameOrigin = !!process.env.UPTIME_KUMA_DISABLE_FRAME_SAMEORIGIN || args["disable-frame-sameorigin"] || false;
|
||||
const cloudflaredToken = args["cloudflared-token"] || process.env.UPTIME_KUMA_CLOUDFLARED_TOKEN || undefined;
|
||||
@@ -142,8 +160,8 @@ const { apiKeySocketHandler } = require("./socket-handlers/api-key-socket-handle
|
||||
const { generalSocketHandler } = require("./socket-handlers/general-socket-handler");
|
||||
const { Settings } = require("./settings");
|
||||
const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent");
|
||||
const { pluginsHandler } = require("./socket-handlers/plugins-handler");
|
||||
const apicache = require("./modules/apicache");
|
||||
const { resetChrome } = require("./monitor-types/real-browser-monitor-type");
|
||||
|
||||
app.use(express.json());
|
||||
|
||||
@@ -156,12 +174,6 @@ app.use(function (req, res, next) {
|
||||
next();
|
||||
});
|
||||
|
||||
/**
|
||||
* Use for decode the auth object
|
||||
* @type {null}
|
||||
*/
|
||||
let jwtSecret = null;
|
||||
|
||||
/**
|
||||
* Show Setup Page
|
||||
* @type {boolean}
|
||||
@@ -172,7 +184,6 @@ let needSetup = false;
|
||||
Database.init(args);
|
||||
await initDatabase(testMode);
|
||||
await server.initAfterDatabaseReady();
|
||||
server.loadPlugins();
|
||||
server.entryPage = await Settings.get("entryPage");
|
||||
await StatusPage.loadDomainMappingList();
|
||||
|
||||
@@ -210,6 +221,7 @@ let needSetup = false;
|
||||
});
|
||||
|
||||
if (isDev) {
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
app.post("/test-webhook", async (request, response) => {
|
||||
log.debug("test", request.headers);
|
||||
log.debug("test", request.body);
|
||||
@@ -264,7 +276,7 @@ let needSetup = false;
|
||||
log.info("server", "Adding socket handler");
|
||||
io.on("connection", async (socket) => {
|
||||
|
||||
sendInfo(socket);
|
||||
sendInfo(socket, true);
|
||||
|
||||
if (needSetup) {
|
||||
log.info("server", "Redirect to setup page");
|
||||
@@ -281,7 +293,7 @@ let needSetup = false;
|
||||
log.info("auth", `Login by token. IP=${clientIP}`);
|
||||
|
||||
try {
|
||||
let decoded = jwt.verify(token, jwtSecret);
|
||||
let decoded = jwt.verify(token, server.jwtSecret);
|
||||
|
||||
log.info("auth", "Username from JWT: " + decoded.username);
|
||||
|
||||
@@ -290,6 +302,11 @@ let needSetup = false;
|
||||
]);
|
||||
|
||||
if (user) {
|
||||
// Check if the password changed
|
||||
if (decoded.h !== shake256(user.password, SHAKE256_LENGTH)) {
|
||||
throw new Error("The token is invalid due to password change or old token");
|
||||
}
|
||||
|
||||
log.debug("auth", "afterLogin");
|
||||
afterLogin(socket, user);
|
||||
log.debug("auth", "afterLogin ok");
|
||||
@@ -309,9 +326,10 @@ let needSetup = false;
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
log.error("auth", `Invalid token. IP=${clientIP}`);
|
||||
|
||||
if (error.message) {
|
||||
log.error("auth", error.message, `IP=${clientIP}`);
|
||||
}
|
||||
callback({
|
||||
ok: false,
|
||||
msg: "Invalid token.",
|
||||
@@ -350,9 +368,7 @@ let needSetup = false;
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
token: jwt.sign({
|
||||
username: data.username,
|
||||
}, jwtSecret),
|
||||
token: User.createJWT(user, server.jwtSecret),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -380,9 +396,7 @@ let needSetup = false;
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
token: jwt.sign({
|
||||
username: data.username,
|
||||
}, jwtSecret),
|
||||
token: User.createJWT(user, server.jwtSecret),
|
||||
});
|
||||
} else {
|
||||
|
||||
@@ -634,9 +648,16 @@ let needSetup = false;
|
||||
let notificationIDList = monitor.notificationIDList;
|
||||
delete monitor.notificationIDList;
|
||||
|
||||
// Ensure status code ranges are strings
|
||||
if (!monitor.accepted_statuscodes.every((code) => typeof code === "string")) {
|
||||
throw new Error("Accepted status codes are not all strings");
|
||||
}
|
||||
monitor.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes);
|
||||
delete monitor.accepted_statuscodes;
|
||||
|
||||
monitor.kafkaProducerBrokers = JSON.stringify(monitor.kafkaProducerBrokers);
|
||||
monitor.kafkaProducerSaslOptions = JSON.stringify(monitor.kafkaProducerSaslOptions);
|
||||
|
||||
bean.import(monitor);
|
||||
bean.user_id = socket.userID;
|
||||
|
||||
@@ -647,7 +668,10 @@ let needSetup = false;
|
||||
await updateMonitorNotification(bean.id, notificationIDList);
|
||||
|
||||
await server.sendMonitorList(socket);
|
||||
await startMonitor(socket.userID, bean.id);
|
||||
|
||||
if (monitor.active !== false) {
|
||||
await startMonitor(socket.userID, bean.id);
|
||||
}
|
||||
|
||||
log.info("monitor", `Added Monitor: ${monitor.id} User ID: ${socket.userID}`);
|
||||
|
||||
@@ -671,6 +695,7 @@ let needSetup = false;
|
||||
// Edit a monitor
|
||||
socket.on("editMonitor", async (monitor, callback) => {
|
||||
try {
|
||||
let removeGroupChildren = false;
|
||||
checkLogin(socket);
|
||||
|
||||
let bean = await R.findOne("monitor", " id = ? ", [ monitor.id ]);
|
||||
@@ -679,8 +704,27 @@ let needSetup = false;
|
||||
throw new Error("Permission denied.");
|
||||
}
|
||||
|
||||
// Check if Parent is Descendant (would cause endless loop)
|
||||
if (monitor.parent !== null) {
|
||||
const childIDs = await Monitor.getAllChildrenIDs(monitor.id);
|
||||
if (childIDs.includes(monitor.parent)) {
|
||||
throw new Error("Invalid Monitor Group");
|
||||
}
|
||||
}
|
||||
|
||||
// Remove children if monitor type has changed (from group to non-group)
|
||||
if (bean.type === "group" && monitor.type !== bean.type) {
|
||||
removeGroupChildren = true;
|
||||
}
|
||||
|
||||
// Ensure status code ranges are strings
|
||||
if (!monitor.accepted_statuscodes.every((code) => typeof code === "string")) {
|
||||
throw new Error("Accepted status codes are not all strings");
|
||||
}
|
||||
|
||||
bean.name = monitor.name;
|
||||
bean.description = monitor.description;
|
||||
bean.parent = monitor.parent;
|
||||
bean.type = monitor.type;
|
||||
bean.url = monitor.url;
|
||||
bean.method = monitor.method;
|
||||
@@ -688,6 +732,12 @@ let needSetup = false;
|
||||
bean.headers = monitor.headers;
|
||||
bean.basic_auth_user = monitor.basic_auth_user;
|
||||
bean.basic_auth_pass = monitor.basic_auth_pass;
|
||||
bean.timeout = monitor.timeout;
|
||||
bean.oauth_client_id = monitor.oauth_client_id;
|
||||
bean.oauth_client_secret = monitor.oauth_client_secret;
|
||||
bean.oauth_auth_method = monitor.oauth_auth_method;
|
||||
bean.oauth_token_url = monitor.oauth_token_url;
|
||||
bean.oauth_scopes = monitor.oauth_scopes;
|
||||
bean.tlsCa = monitor.tlsCa;
|
||||
bean.tlsCert = monitor.tlsCert;
|
||||
bean.tlsKey = monitor.tlsKey;
|
||||
@@ -699,6 +749,7 @@ let needSetup = false;
|
||||
bean.maxretries = monitor.maxretries;
|
||||
bean.port = parseInt(monitor.port);
|
||||
bean.keyword = monitor.keyword;
|
||||
bean.invertKeyword = monitor.invertKeyword;
|
||||
bean.ignoreTls = monitor.ignoreTls;
|
||||
bean.expiryNotification = monitor.expiryNotification;
|
||||
bean.upsideDown = monitor.upsideDown;
|
||||
@@ -733,14 +784,29 @@ let needSetup = false;
|
||||
bean.radiusCallingStationId = monitor.radiusCallingStationId;
|
||||
bean.radiusSecret = monitor.radiusSecret;
|
||||
bean.httpBodyEncoding = monitor.httpBodyEncoding;
|
||||
bean.expectedValue = monitor.expectedValue;
|
||||
bean.jsonPath = monitor.jsonPath;
|
||||
bean.kafkaProducerTopic = monitor.kafkaProducerTopic;
|
||||
bean.kafkaProducerBrokers = JSON.stringify(monitor.kafkaProducerBrokers);
|
||||
bean.kafkaProducerAllowAutoTopicCreation = monitor.kafkaProducerAllowAutoTopicCreation;
|
||||
bean.kafkaProducerSaslOptions = JSON.stringify(monitor.kafkaProducerSaslOptions);
|
||||
bean.kafkaProducerMessage = monitor.kafkaProducerMessage;
|
||||
bean.kafkaProducerSsl = monitor.kafkaProducerSsl;
|
||||
bean.kafkaProducerAllowAutoTopicCreation =
|
||||
monitor.kafkaProducerAllowAutoTopicCreation;
|
||||
bean.gamedigGivenPortOnly = monitor.gamedigGivenPortOnly;
|
||||
|
||||
bean.validate();
|
||||
|
||||
await R.store(bean);
|
||||
|
||||
if (removeGroupChildren) {
|
||||
await Monitor.unlinkAllChildren(monitor.id);
|
||||
}
|
||||
|
||||
await updateMonitorNotification(bean.id, monitor.notificationIDList);
|
||||
|
||||
if (bean.active) {
|
||||
if (await bean.isActive()) {
|
||||
await restartMonitor(socket.userID, bean.id);
|
||||
}
|
||||
|
||||
@@ -883,6 +949,8 @@ let needSetup = false;
|
||||
delete server.monitorList[monitorID];
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
await R.exec("DELETE FROM monitor WHERE id = ? AND user_id = ? ", [
|
||||
monitorID,
|
||||
socket.userID,
|
||||
@@ -891,6 +959,10 @@ let needSetup = false;
|
||||
// Fix #2880
|
||||
apicache.clear();
|
||||
|
||||
const endTime = Date.now();
|
||||
|
||||
log.info("DB", `Delete Monitor completed in : ${endTime - startTime} ms`);
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Deleted Successfully.",
|
||||
@@ -1054,9 +1126,6 @@ let needSetup = false;
|
||||
value,
|
||||
]);
|
||||
|
||||
// Cleanup unused Tags
|
||||
await R.exec("delete from tag where ( select count(*) from monitor_tag mt where tag.id = mt.tag_id ) = 0");
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Deleted Successfully.",
|
||||
@@ -1085,6 +1154,8 @@ let needSetup = false;
|
||||
let user = await doubleCheckPassword(socket, password.currentPassword);
|
||||
await user.resetPassword(password.newPassword);
|
||||
|
||||
server.disconnectAllSocketClients(user.id, socket.id);
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Password has been updated successfully.",
|
||||
@@ -1134,6 +1205,15 @@ let needSetup = false;
|
||||
await doubleCheckPassword(socket, currentPassword);
|
||||
}
|
||||
|
||||
// Log out all clients if enabling auth
|
||||
// GHSA-23q2-5gf8-gjpp
|
||||
if (currentDisabledAuth && !data.disableAuth) {
|
||||
server.disconnectAllSocketClients(socket.userID, socket.id);
|
||||
}
|
||||
|
||||
const previousChromeExecutable = await Settings.get("chromeExecutable");
|
||||
const previousNSCDStatus = await Settings.get("nscd");
|
||||
|
||||
await setSettings("general", data);
|
||||
server.entryPage = data.entryPage;
|
||||
|
||||
@@ -1144,6 +1224,21 @@ let needSetup = false;
|
||||
await server.setTimezone(data.serverTimezone);
|
||||
}
|
||||
|
||||
// If Chrome Executable is changed, need to reset the browser
|
||||
if (previousChromeExecutable !== data.chromeExecutable) {
|
||||
log.info("settings", "Chrome executable is changed. Resetting Chrome...");
|
||||
await resetChrome();
|
||||
}
|
||||
|
||||
// Update nscd status
|
||||
if (previousNSCDStatus !== data.nscd) {
|
||||
if (data.nscd) {
|
||||
await server.startNSCDServices();
|
||||
} else {
|
||||
await server.stopNSCDServices();
|
||||
}
|
||||
}
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Saved"
|
||||
@@ -1313,6 +1408,7 @@ let needSetup = false;
|
||||
|
||||
// Define default values
|
||||
let retryInterval = 0;
|
||||
let timeout = monitorListData[i].timeout || (monitorListData[i].interval * 0.8); // fallback to old value
|
||||
|
||||
/*
|
||||
Only replace the default value with the backup file data for the specific version, where it appears the first time
|
||||
@@ -1338,6 +1434,7 @@ let needSetup = false;
|
||||
basic_auth_pass: monitorListData[i].basic_auth_pass,
|
||||
authWorkstation: monitorListData[i].authWorkstation,
|
||||
authDomain: monitorListData[i].authDomain,
|
||||
timeout,
|
||||
interval: monitorListData[i].interval,
|
||||
retryInterval: retryInterval,
|
||||
resendInterval: monitorListData[i].resendInterval || 0,
|
||||
@@ -1345,13 +1442,14 @@ let needSetup = false;
|
||||
maxretries: monitorListData[i].maxretries,
|
||||
port: monitorListData[i].port,
|
||||
keyword: monitorListData[i].keyword,
|
||||
invertKeyword: monitorListData[i].invertKeyword,
|
||||
ignoreTls: monitorListData[i].ignoreTls,
|
||||
upsideDown: monitorListData[i].upsideDown,
|
||||
maxredirects: monitorListData[i].maxredirects,
|
||||
accepted_statuscodes: monitorListData[i].accepted_statuscodes,
|
||||
dns_resolve_type: monitorListData[i].dns_resolve_type,
|
||||
dns_resolve_server: monitorListData[i].dns_resolve_server,
|
||||
notificationIDList: {},
|
||||
notificationIDList: monitorListData[i].notificationIDList,
|
||||
proxy_id: monitorListData[i].proxy_id || null,
|
||||
};
|
||||
|
||||
@@ -1513,7 +1611,6 @@ let needSetup = false;
|
||||
maintenanceSocketHandler(socket);
|
||||
apiKeySocketHandler(socket);
|
||||
generalSocketHandler(socket, server);
|
||||
pluginsHandler(socket, server);
|
||||
|
||||
log.debug("server", "added all socket handlers");
|
||||
|
||||
@@ -1539,6 +1636,8 @@ let needSetup = false;
|
||||
await shutdownFunction();
|
||||
});
|
||||
|
||||
server.start();
|
||||
|
||||
server.httpServer.listen(port, hostname, () => {
|
||||
if (hostname) {
|
||||
log.info("server", `Listening on ${hostname}:${port}`);
|
||||
@@ -1557,7 +1656,7 @@ let needSetup = false;
|
||||
}
|
||||
});
|
||||
|
||||
initBackgroundJobs(args);
|
||||
await initBackgroundJobs();
|
||||
|
||||
// Start cloudflared at the end if configured
|
||||
await cloudflaredAutoStart(cloudflaredToken);
|
||||
@@ -1616,6 +1715,7 @@ async function afterLogin(socket, user) {
|
||||
socket.join(user.id);
|
||||
|
||||
let monitorList = await server.sendMonitorList(socket);
|
||||
sendInfo(socket);
|
||||
server.sendMaintenanceList(socket);
|
||||
sendNotificationList(socket);
|
||||
sendProxyList(socket);
|
||||
@@ -1683,7 +1783,7 @@ async function initDatabase(testMode = false) {
|
||||
needSetup = true;
|
||||
}
|
||||
|
||||
jwtSecret = jwtSecretBean.value;
|
||||
server.jwtSecret = jwtSecretBean.value;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1742,6 +1842,7 @@ async function pauseMonitor(userID, monitorID) {
|
||||
|
||||
if (monitorID in server.monitorList) {
|
||||
server.monitorList[monitorID].stop();
|
||||
server.monitorList[monitorID].active = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1800,8 +1901,10 @@ gracefulShutdown(server.httpServer, {
|
||||
});
|
||||
|
||||
// Catch unexpected errors here
|
||||
process.addListener("unhandledRejection", (error, promise) => {
|
||||
let unexpectedErrorHandler = (error, promise) => {
|
||||
console.trace(error);
|
||||
UptimeKumaServer.errorLog(error, false);
|
||||
console.error("If you keep encountering errors, please report to https://github.com/louislam/uptime-kuma/issues");
|
||||
});
|
||||
};
|
||||
process.addListener("unhandledRejection", unexpectedErrorHandler);
|
||||
process.addListener("uncaughtException", unexpectedErrorHandler);
|
||||
|
@@ -3,6 +3,7 @@ const { Settings } = require("../settings");
|
||||
const { sendInfo } = require("../client");
|
||||
const { checkLogin } = require("../util-server");
|
||||
const GameResolver = require("gamedig/lib/GameResolver");
|
||||
const { testChrome } = require("../monitor-types/real-browser-monitor-type");
|
||||
|
||||
let gameResolver = new GameResolver();
|
||||
let gameList = null;
|
||||
@@ -41,10 +42,50 @@ module.exports.generalSocketHandler = (socket, server) => {
|
||||
});
|
||||
|
||||
socket.on("getGameList", async (callback) => {
|
||||
callback({
|
||||
ok: true,
|
||||
gameList: getGameList(),
|
||||
});
|
||||
try {
|
||||
checkLogin(socket);
|
||||
callback({
|
||||
ok: true,
|
||||
gameList: getGameList(),
|
||||
});
|
||||
} catch (e) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("testChrome", (executable, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
// Just noticed that await call could block the whole socket.io server!!! Use pure promise instead.
|
||||
testChrome(executable).then((version) => {
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Found Chromium/Chrome. Version: " + version,
|
||||
});
|
||||
}).catch((e) => {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
});
|
||||
} catch (e) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Disconnect all other socket clients of the user
|
||||
socket.on("disconnectOtherSocketClients", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
server.disconnectAllSocketClients(socket.userID, socket.id);
|
||||
} catch (e) {
|
||||
log.warn("disconnectAllSocketClients", e.message);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -186,7 +186,7 @@ module.exports.maintenanceSocketHandler = (socket) => {
|
||||
|
||||
log.debug("maintenance", `Get Monitors for Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
|
||||
|
||||
let monitors = await R.getAll("SELECT monitor.id, monitor.name FROM monitor_maintenance mm JOIN monitor ON mm.monitor_id = monitor.id WHERE mm.maintenance_id = ? ", [
|
||||
let monitors = await R.getAll("SELECT monitor.id FROM monitor_maintenance mm JOIN monitor ON mm.monitor_id = monitor.id WHERE mm.maintenance_id = ? ", [
|
||||
maintenanceID,
|
||||
]);
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user