mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-09-11 05:16:55 +08:00
Compare commits
1101 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
67f221d3c7 | ||
|
6eceb4c744 | ||
|
3e4154dfb5 | ||
|
fbc8828ddc | ||
|
3677aa639f | ||
|
aaddfa1786 | ||
|
6d65d248f4 | ||
|
87a4748b40 | ||
|
182bdf13a7 | ||
|
0f66e5cfc5 | ||
|
fe5ae46013 | ||
|
756f317f82 | ||
|
cc9fe26584 | ||
|
99818aa370 | ||
|
682265fe9c | ||
|
4406e51ab6 | ||
|
c3d5be5a5e | ||
|
dfe12c99c1 | ||
|
bb69851160 | ||
|
6c9323351d | ||
|
8e9fa20e57 | ||
|
da7c29f4b9 | ||
|
e545d48583 | ||
|
8d84d8f891 | ||
|
74acc2cea7 | ||
|
ba4a4089eb | ||
|
122631c91b | ||
|
1e12f25c4b | ||
|
9e10296290 | ||
|
87e213085f | ||
|
d9038f1da2 | ||
|
3fb2d0ce68 | ||
|
50a33e3b45 | ||
|
ce9a97a107 | ||
|
0afa3a2c21 | ||
|
51512b6f5f | ||
|
3d79e841c9 | ||
|
d2af42e579 | ||
|
d6e3bd2282 | ||
|
70fbe9a2d9 | ||
|
c669e7eaba | ||
|
9a9fd41f62 | ||
|
50b868e751 | ||
|
a856780066 | ||
|
266b03fbf7 | ||
|
662c97dcde | ||
|
1ebf752f1a | ||
|
69bb6ef290 | ||
|
720ea850e1 | ||
|
7fb55b8875 | ||
|
4786514e9f | ||
|
32c9dfbb31 | ||
|
d3d4363031 | ||
|
6f352a6e3c | ||
|
3fb06a8ba4 | ||
|
b3a5a5b0ba | ||
|
a4cad3db65 | ||
|
5fa9b33c79 | ||
|
f6a984b671 | ||
|
23a63213aa | ||
|
a9bb8ae6a1 | ||
|
27d4c3c194 | ||
|
439f45d91e | ||
|
66598a81cc | ||
|
45a6f364e1 | ||
|
95391be2ab | ||
|
5f533b9091 | ||
|
29920f6b60 | ||
|
ad1bb93730 | ||
|
0a5a6e6a4b | ||
|
fe0fc63843 | ||
|
5b2e1f6086 | ||
|
8c7ee94769 | ||
|
0834770509 | ||
|
a71814c80b | ||
|
17073fd786 | ||
|
15c00d9158 | ||
|
d2b34f9285 | ||
|
a96a515087 | ||
|
2eab919ae0 | ||
|
c09b67b94d | ||
|
61c737c53c | ||
|
2820118eba | ||
|
469e8f6fd6 | ||
|
780cb0a145 | ||
|
1c8b3ce451 | ||
|
e8ef63e0a3 | ||
|
4591adc05e | ||
|
5f6aa32844 | ||
|
a8e170f6a8 | ||
|
6aaf984f29 | ||
|
76a39f1388 | ||
|
34c0fa59a8 | ||
|
0664217a09 | ||
|
b0e9c5bcb4 | ||
|
0b572df3d0 | ||
|
bcc02c1680 | ||
|
795d5f586f | ||
|
7ee98d989c | ||
|
7dc1e84e44 | ||
|
6350c43cc3 | ||
|
fd95d41d9f | ||
|
7665bae927 | ||
|
09e38269c6 | ||
|
6681f49a58 | ||
|
3fc2ba3d76 | ||
|
78e12ab899 | ||
|
2b8c049e7b | ||
|
f0ac3c82d2 | ||
|
803029a9e4 | ||
|
c3f1cebeab | ||
|
5d9814559c | ||
|
9ef45a9c7e | ||
|
7f78cc8d0f | ||
|
ca84044809 | ||
|
9eaa4ab846 | ||
|
c3122a9807 | ||
|
f6498caa9a | ||
|
3a0bc80016 | ||
|
cbbc503f8e | ||
|
5be51abd8f | ||
|
6ec219908b | ||
|
a6fdd272a6 | ||
|
1b5e723f60 | ||
|
4bdada36a9 | ||
|
250c2bdd6d | ||
|
8e49d84050 | ||
|
112d72da47 | ||
|
9b8f01cfc6 | ||
|
579e07ded4 | ||
|
a04878fa84 | ||
|
2955abb5d9 | ||
|
8230cfe13f | ||
|
8b463e70df | ||
|
392f8275b3 | ||
|
783ec97004 | ||
|
4f4fe39c9b | ||
|
611d214a32 | ||
|
7a0cebf5bb | ||
|
54aa68ec58 | ||
|
217637aa1f | ||
|
ab22961538 | ||
|
8ba8de07ae | ||
|
a34b2623c8 | ||
|
79920b5f2c | ||
|
72783fd94c | ||
|
80322cbfe7 | ||
|
7e0272077b | ||
|
b2ff6b6098 | ||
|
512ff09cca | ||
|
e8f4fabcd0 | ||
|
f679c1cbaf | ||
|
2ab06f87b8 | ||
|
76db55b657 | ||
|
1693873f4a | ||
|
53bfdb7dad | ||
|
db05b506f3 | ||
|
0752f810f1 | ||
|
200d233607 | ||
|
7274913686 | ||
|
1300448bed | ||
|
3f67326fce | ||
|
8b48f9bd22 | ||
|
76348cae5d | ||
|
0b32adadb8 | ||
|
3425a27a0e | ||
|
fff5fd8e9e | ||
|
1d6670ed9a | ||
|
795411a11c | ||
|
3234aec5b3 | ||
|
afe91078c4 | ||
|
2009f7097d | ||
|
c14b71a4a7 | ||
|
35fe9690e8 | ||
|
ad2062713c | ||
|
379656335d | ||
|
fc9b4617ca | ||
|
40d301457a | ||
|
9902c181bc | ||
|
069c811af8 | ||
|
f9311e4e7f | ||
|
34abff4724 | ||
|
d7a230ac15 | ||
|
9da2a01a74 | ||
|
74da02da2c | ||
|
97360dab26 | ||
|
56dad006b5 | ||
|
47092258f8 | ||
|
a0838543b9 | ||
|
ccb8736b3d | ||
|
f351b423c5 | ||
|
ab6b97d77a | ||
|
0e16e88fa8 | ||
|
c746923018 | ||
|
df8754827d | ||
|
2c02dad1f9 | ||
|
600ec6d171 | ||
|
728636de06 | ||
|
8be7fd5c45 | ||
|
59ff9fa293 | ||
|
a654e409df | ||
|
76f1f34a0a | ||
|
35aca46b68 | ||
|
6a925c7ed4 | ||
|
8b9b86b478 | ||
|
7c4b5f189b | ||
|
a6f9762c4d | ||
|
a0e4e96160 | ||
|
fcbeed55bf | ||
|
9b5abf9bb1 | ||
|
73f4b8e177 | ||
|
e6669fbb9e | ||
|
e66a2d362d | ||
|
c2148fc0f1 | ||
|
6e3a904aaa | ||
|
50175b733c | ||
|
2617e1f4d8 | ||
|
91ee39ec60 | ||
|
4711aeb355 | ||
|
db9e97d859 | ||
|
7199bb5ead | ||
|
bb87972543 | ||
|
ea723c7595 | ||
|
e205adfd7b | ||
|
063d64eec8 | ||
|
5abf2e7597 | ||
|
7c83bed273 | ||
|
1cc788da68 | ||
|
54ffef8b7a | ||
|
44aa837a9d | ||
|
f47f7758f9 | ||
|
ddcd3c2a19 | ||
|
7df9698e5d | ||
|
471333ad03 | ||
|
d313966d80 | ||
|
8205f90f3d | ||
|
02247c4174 | ||
|
8352d9abbe | ||
|
2d7767c905 | ||
|
c52b9b63d5 | ||
|
e55193270d | ||
|
9d39071a91 | ||
|
389d247463 | ||
|
a170694a83 | ||
|
5969e913b5 | ||
|
0d280f5edb | ||
|
660b969178 | ||
|
c26622aa39 | ||
|
a7674755ba | ||
|
6d7cb44acc | ||
|
400fc13cf2 | ||
|
db14768229 | ||
|
3d27561e5a | ||
|
b80c88d9a0 | ||
|
862672731f | ||
|
7fee4a7ea7 | ||
|
c4f78d776e | ||
|
f8f9f59464 | ||
|
934685637a | ||
|
295ccba44b | ||
|
8cd5bad44c | ||
|
d003abcd60 | ||
|
f6d1a82989 | ||
|
651b525d06 | ||
|
66769e1c79 | ||
|
3e25f0e9d9 | ||
|
39c7b5e748 | ||
|
b709857e19 | ||
|
9e10f7d35f | ||
|
6caae725f9 | ||
|
c387fb264e | ||
|
4b0a8087a2 | ||
|
08c7a37052 | ||
|
0969be5981 | ||
|
2ed8f12dc0 | ||
|
2da77d8448 | ||
|
f8322de5da | ||
|
e21a18a593 | ||
|
14b7688b70 | ||
|
d953ba7c60 | ||
|
ef1604675b | ||
|
83fc844e8e | ||
|
22a7acbaf6 | ||
|
201bba63d7 | ||
|
0d6888c586 | ||
|
a06fc14213 | ||
|
31b4a5c33e | ||
|
951ece8a65 | ||
|
a49df29a87 | ||
|
08de0090dc | ||
|
97df200d6c | ||
|
d5b17a3778 | ||
|
2f0d994c83 | ||
|
e683033f4e | ||
|
c3c576bd13 | ||
|
c62732fa9c | ||
|
d823493fad | ||
|
463c385cf1 | ||
|
59cccf8c50 | ||
|
403202d4d4 | ||
|
2583dbe0e0 | ||
|
ca479673e7 | ||
|
573c7faddd | ||
|
7a4432de1e | ||
|
94a6c1a344 | ||
|
ee9e48fe53 | ||
|
a4aee49b03 | ||
|
e330875c80 | ||
|
44eba70b78 | ||
|
c6dab944d1 | ||
|
0f4bc5850b | ||
|
f37190a73d | ||
|
29f96fae93 | ||
|
331ae5ec20 | ||
|
8ee34c7904 | ||
|
4f07c2ea9a | ||
|
0b1d0d22ff | ||
|
24facc79d7 | ||
|
9f9c1007d7 | ||
|
70a6f0313c | ||
|
1d05df6ec9 | ||
|
b54bbc302d | ||
|
dd283423ab | ||
|
6006038689 | ||
|
a7b50c3630 | ||
|
0ddbac5109 | ||
|
0f440596c8 | ||
|
5a0fcebd6e | ||
|
87678ea92d | ||
|
0bf0cfa104 | ||
|
a7cf14c663 | ||
|
325ce72a71 | ||
|
9d00bd9029 | ||
|
f6a168282e | ||
|
acf6f1883b | ||
|
f44f951e40 | ||
|
dea9b05d49 | ||
|
230a9bfaf9 | ||
|
d761d54d0e | ||
|
e37621853e | ||
|
f2cec60481 | ||
|
2212cf9c08 | ||
|
f7562e00c1 | ||
|
678e248966 | ||
|
c1aebef005 | ||
|
1ef4562905 | ||
|
69a7c4dab4 | ||
|
2c3880a326 | ||
|
f4e885982d | ||
|
dcf6dd8b32 | ||
|
093cfec8dd | ||
|
f13dcb3c5b | ||
|
1a5ffd4755 | ||
|
a2f9e9e9c4 | ||
|
62712f5cc4 | ||
|
f0f1847ad8 | ||
|
0aeaf87f5b | ||
|
5ca0dd628d | ||
|
4a106431f3 | ||
|
d164b6ccce | ||
|
da74391c3e | ||
|
850563442a | ||
|
242e494cb5 | ||
|
4faa409027 | ||
|
f842fb409e | ||
|
72dd894856 | ||
|
0b0417e064 | ||
|
fe35d95441 | ||
|
da131a5156 | ||
|
926c15ea40 | ||
|
87e874406a | ||
|
0e288ea92d | ||
|
8a4a87716f | ||
|
d01fa9bcfc | ||
|
ec8c1cad31 | ||
|
fb0fa2a843 | ||
|
f5c6d422d5 | ||
|
ea4240455f | ||
|
ce30ee74e9 | ||
|
66b5f157eb | ||
|
5fe8a0917a | ||
|
848296b77a | ||
|
edfaacbb5f | ||
|
bb8385d690 | ||
|
b95404b6e0 | ||
|
cc351b2883 | ||
|
3c06f249ca | ||
|
31648dc5e8 | ||
|
1197cfa11e | ||
|
fd8c95d64e | ||
|
58240aceef | ||
|
e8b814733d | ||
|
dcc7799108 | ||
|
424f20f8e1 | ||
|
d4ff5d8b32 | ||
|
899b33b3a9 | ||
|
1b8b33c4c3 | ||
|
d34ed00841 | ||
|
f9c177b150 | ||
|
9952463350 | ||
|
5837c353b7 | ||
|
d5b32ffbb8 | ||
|
61936ae5eb | ||
|
299506ce45 | ||
|
ca8d4ab61b | ||
|
ffbdf97478 | ||
|
cc25787878 | ||
|
17453a8812 | ||
|
f1a151b4a1 | ||
|
d35b205fcc | ||
|
2a34e41d8c | ||
|
41d32bb9dd | ||
|
0b9e410ea5 | ||
|
904ed326ca | ||
|
778995a4fb | ||
|
ee60d74910 | ||
|
6a603203cc | ||
|
4b8e7fcffc | ||
|
7fd12f5485 | ||
|
0d87a6dfea | ||
|
b0acda52f9 | ||
|
e9cd9be03a | ||
|
6ae279c7f3 | ||
|
9c32adfb55 | ||
|
d346afd33b | ||
|
3bf380c684 | ||
|
dca5c59982 | ||
|
8f9a973ede | ||
|
b98ec0c3d4 | ||
|
2082c3f68a | ||
|
ea9f434553 | ||
|
fa1216c198 | ||
|
6d8aa20fc6 | ||
|
5430da955a | ||
|
e6e2b0ebf8 | ||
|
445dc486be | ||
|
d2151737c1 | ||
|
78fc6de542 | ||
|
c15e6631ae | ||
|
ebf362754c | ||
|
f84135ca67 | ||
|
80eca886ce | ||
|
274a9050c0 | ||
|
1f4ad938b1 | ||
|
312aedf391 | ||
|
4f28bfbde3 | ||
|
ee76ab4ec2 | ||
|
18616ee590 | ||
|
19a4d570ec | ||
|
79fda8f442 | ||
|
53a14cf4f5 | ||
|
2241d8817f | ||
|
3831dfe0b9 | ||
|
fdde862549 | ||
|
e31be8caf5 | ||
|
60f2f08cea | ||
|
b1647a310e | ||
|
23f1a73fc8 | ||
|
d2bf2a551d | ||
|
7d70c4d8cd | ||
|
532ad3044c | ||
|
f23ecef636 | ||
|
51cf2ff6f9 | ||
|
b30b1d3a52 | ||
|
582e14098d | ||
|
6e3e2fc85c | ||
|
b604807cfe | ||
|
3ee13bddd1 | ||
|
c74986647e | ||
|
dac410c850 | ||
|
b88b357b55 | ||
|
9a5cd5172b | ||
|
941788db49 | ||
|
99725aabe7 | ||
|
2dd392e609 | ||
|
9d41da4aa2 | ||
|
a0f372e946 | ||
|
877ad1438f | ||
|
9116654a33 | ||
|
dbc70bc5ed | ||
|
a29b65e801 | ||
|
bd9568ce5b | ||
|
c13cc62d3d | ||
|
7a109689d9 | ||
|
e7929f461d | ||
|
a2cf7f394e | ||
|
e1f378ee6c | ||
|
eeb00a5511 | ||
|
8e1e4b9204 | ||
|
66f9ad5754 | ||
|
129dc3324b | ||
|
268d2e2353 | ||
|
7d2cf0ee57 | ||
|
2d408732db | ||
|
0cc5053f14 | ||
|
1c4e5b79be | ||
|
4aae402b36 | ||
|
b604910bbb | ||
|
2f6c5963c5 | ||
|
dce2ba8f9f | ||
|
ed5c75282c | ||
|
00ac560bd6 | ||
|
8ea4dec5a0 | ||
|
3006d13aee | ||
|
c7e6cb9f37 | ||
|
9b82bb248e | ||
|
e55cf65f28 | ||
|
f24002838c | ||
|
b4fd4f7d90 | ||
|
3a3f1762ac | ||
|
6376d4eb92 | ||
|
e37cf9b1a7 | ||
|
3b0191210b | ||
|
923d325b44 | ||
|
ad38e61c26 | ||
|
22095c09b1 | ||
|
a5e62141d5 | ||
|
e4b76717be | ||
|
bc70ecfba7 | ||
|
f1238ab762 | ||
|
cd1a3a2fb9 | ||
|
25db162721 | ||
|
7b92166d18 | ||
|
1341d220ed | ||
|
3e7bb355fd | ||
|
697fa6bdfd | ||
|
527e0c3444 | ||
|
a41534ca60 | ||
|
fa549cb80e | ||
|
0127d5102e | ||
|
ec731d174d | ||
|
0d65918a6a | ||
|
21db31bb30 | ||
|
3ee0addb1f | ||
|
8c5d1945be | ||
|
bb799163e8 | ||
|
bf29f28726 | ||
|
22db9c9b8a | ||
|
96ff70faaa | ||
|
2776f942ab | ||
|
14b9afb400 | ||
|
3ad736692f | ||
|
1952e34110 | ||
|
e644a1e36f | ||
|
78b7e36a38 | ||
|
f30232c35d | ||
|
257a4ee994 | ||
|
fd1ba46d3d | ||
|
ada6606217 | ||
|
dd877cfc70 | ||
|
93edb8817d | ||
|
858affa808 | ||
|
2bb182b2b4 | ||
|
303adbf9b1 | ||
|
712da02324 | ||
|
27a4dbb722 | ||
|
0e676060f2 | ||
|
a14c40f9bb | ||
|
309fbfa094 | ||
|
46dcd31142 | ||
|
59e9315647 | ||
|
91e82bd12c | ||
|
d4dd650bfe | ||
|
354ee1a58c | ||
|
2901a38628 | ||
|
6464207f4b | ||
|
7652b4849a | ||
|
03b4086372 | ||
|
acd5cf63fd | ||
|
177af2d588 | ||
|
b46b214175 | ||
|
d2f0a15076 | ||
|
2d50d24276 | ||
|
34b6976a03 | ||
|
d60c11e845 | ||
|
890aea8756 | ||
|
c55f2b93f7 | ||
|
d4cf838af7 | ||
|
508586fcfd | ||
|
feb0feda76 | ||
|
34ca5c5c15 | ||
|
2b8c5e2e65 | ||
|
44d9967cfb | ||
|
8318c2e8ff | ||
|
46ac753c46 | ||
|
72740ba477 | ||
|
5d438ca2b6 | ||
|
02a12e68b8 | ||
|
a7cd70f7de | ||
|
26db0471da | ||
|
d313a06d5c | ||
|
4c1f2f85f8 | ||
|
23851ef539 | ||
|
397fd12081 | ||
|
564bc96735 | ||
|
682e4d45e2 | ||
|
f96d792fa1 | ||
|
2d20634738 | ||
|
9442c3fa05 | ||
|
302d2665d2 | ||
|
2b68be52b0 | ||
|
393c4fb1a7 | ||
|
13f6c79e79 | ||
|
ca69d06e0d | ||
|
d8d428907e | ||
|
a17c14ea1c | ||
|
28a51d806b | ||
|
6eead46fa6 | ||
|
44d9fa63f0 | ||
|
dd4c00eed3 | ||
|
752ac05149 | ||
|
14652c9b5f | ||
|
054186370e | ||
|
40d80dfdfd | ||
|
b9684e32d3 | ||
|
8e726da82a | ||
|
df41c40cc2 | ||
|
5dc834794c | ||
|
36ace3e56c | ||
|
3f12afce28 | ||
|
833b4733a8 | ||
|
066b67dfce | ||
|
b2041cb36b | ||
|
aa2233eb2d | ||
|
e5981b10ce | ||
|
46cb955172 | ||
|
50f300dd28 | ||
|
2f50fc4c00 | ||
|
a02edf1b4f | ||
|
a61a65e5ae | ||
|
ffaaeae794 | ||
|
ee3bf2961c | ||
|
ce79f8bfc7 | ||
|
c79be19ec3 | ||
|
b892a92fc8 | ||
|
2912ca1248 | ||
|
2bff1ebe0f | ||
|
ec0dbf3cbe | ||
|
210a0d414c | ||
|
ca38cc91e9 | ||
|
c90d17bd66 | ||
|
dbd3f48f68 | ||
|
49ba5fb1b2 | ||
|
d27789a8ae | ||
|
b53582a812 | ||
|
05680472a7 | ||
|
ca3b0a0f19 | ||
|
4571a9b8c1 | ||
|
362eabab8d | ||
|
a22d0f6951 | ||
|
b1168d4cdb | ||
|
6fcc2253ec | ||
|
f21937b197 | ||
|
1e7623c459 | ||
|
22047fe932 | ||
|
209e44c2e1 | ||
|
30b8d3d0ab | ||
|
64498163e1 | ||
|
4f70a70dda | ||
|
5b5a32967c | ||
|
ae8b5eea5a | ||
|
b761aaffdf | ||
|
7ffdb2eb80 | ||
|
2d36f4cd4a | ||
|
2339405f90 | ||
|
8f5e5ad944 | ||
|
575c3ee182 | ||
|
c9aa110f6c | ||
|
bb0af35d47 | ||
|
61944d642e | ||
|
21640e1bbe | ||
|
de4515ea6e | ||
|
b41799f801 | ||
|
d8bcfcaaa2 | ||
|
cf5168a4e6 | ||
|
60b0ee2959 | ||
|
f1e5e53e8f | ||
|
432388a905 | ||
|
746c1b6acc | ||
|
269ac2410b | ||
|
f72cdcc663 | ||
|
6b3fbcd1e7 | ||
|
8a48f5dd71 | ||
|
d218661f3d | ||
|
77369bd002 | ||
|
29a89df524 | ||
|
e257fa7b2d | ||
|
6980c38a6c | ||
|
8d57df7256 | ||
|
64501bf065 | ||
|
440c178403 | ||
|
c9c51e47e1 | ||
|
5e52f230b1 | ||
|
61e758d872 | ||
|
86826fb826 | ||
|
7a32e5e6ff | ||
|
610f2f9c47 | ||
|
01e9c76a6f | ||
|
5927c2703f | ||
|
316db89b9a | ||
|
eed6d3e847 | ||
|
2a62f6daae | ||
|
e09c296410 | ||
|
d7f660ec57 | ||
|
798f39acf0 | ||
|
31d5b4fd3d | ||
|
fc76c2836b | ||
|
0b30bfff87 | ||
|
72f0724b9a | ||
|
35176a614f | ||
|
8e883c9c6a | ||
|
2f89ee4937 | ||
|
5d0b6190c3 | ||
|
cb85905c33 | ||
|
233c5661af | ||
|
91d4c15b4d | ||
|
981ed5f29f | ||
|
0b45694f2f | ||
|
60531d0b15 | ||
|
a3de63ac3c | ||
|
80eadcb236 | ||
|
7e5a8c896b | ||
|
efe75bde75 | ||
|
af34e861c5 | ||
|
2ae2022e62 | ||
|
37f1d60f82 | ||
|
d39b43dacc | ||
|
7ca80fc086 | ||
|
eb34dc6cc2 | ||
|
ed93aae1c2 | ||
|
e1a38f64f8 | ||
|
6a8ccf627a | ||
|
8f150aaeb9 | ||
|
6ed1d8cb2f | ||
|
71bec74081 | ||
|
2bd735035c | ||
|
48c6d8f19f | ||
|
2d176a38af | ||
|
b14f63491d | ||
|
24b87fcd5a | ||
|
45c162583b | ||
|
365ea0a189 | ||
|
2461f5084e | ||
|
1d0b332b42 | ||
|
d5149f90b4 | ||
|
e0ae9a9e73 | ||
|
3227a2660b | ||
|
764160f38c | ||
|
70e7945a66 | ||
|
b413427a37 | ||
|
debcac4924 | ||
|
268dd33792 | ||
|
692a11e51e | ||
|
5eb4f55dfd | ||
|
e7cc5340e5 | ||
|
4d4d504d6e | ||
|
2a4695a774 | ||
|
f089bf73c3 | ||
|
f099e4270d | ||
|
81636c7b44 | ||
|
98fa995d3f | ||
|
42d24258cf | ||
|
3f56167198 | ||
|
5163e16482 | ||
|
d93f6e2716 | ||
|
d6fad7f1ef | ||
|
5512b15162 | ||
|
8979311653 | ||
|
4f058c5b47 | ||
|
9ba1743900 | ||
|
1e4f9c7e15 | ||
|
974672f7c1 | ||
|
01ac6d54be | ||
|
113899e278 | ||
|
d1d000bd74 | ||
|
ef4677a640 | ||
|
e39c46ff9b | ||
|
0e46ce42d1 | ||
|
efc9a254f4 | ||
|
116d803592 | ||
|
ba1d271afa | ||
|
12910b23ed | ||
|
550c9703a6 | ||
|
b69185ee9e | ||
|
ddcfa558f7 | ||
|
478d2c4e8c | ||
|
1352a0a162 | ||
|
7274b82143 | ||
|
69b1454cf5 | ||
|
8f2a9fe883 | ||
|
1b8476417d | ||
|
2a65402ad8 | ||
|
59ef1f13db | ||
|
bf33f97c9e | ||
|
d0aad3400c | ||
|
6f489e7e0f | ||
|
f9cb8293f3 | ||
|
11b8c61079 | ||
|
f69ba12c10 | ||
|
e78cfaa492 | ||
|
9c17f59fe8 | ||
|
519add4fab | ||
|
46c7e5d058 | ||
|
6291b7b8bb | ||
|
3fb515e871 | ||
|
8e440f7dff | ||
|
6d58c98b24 | ||
|
6ca7ca4e7e | ||
|
44391117ab | ||
|
9fa8d5c1fa | ||
|
3265c3cbc3 | ||
|
b3721e03a8 | ||
|
4ff68238c4 | ||
|
7b1000d995 | ||
|
a79e6aa338 | ||
|
3005585c0f | ||
|
123fca43a1 | ||
|
d5b40dfebf | ||
|
c990edc87d | ||
|
2677f5dd87 | ||
|
4469b3a19b | ||
|
ebf207c2f5 | ||
|
a50aa93e84 | ||
|
91fce75a93 | ||
|
3a7414125a | ||
|
5a6e5b7948 | ||
|
adcd251076 | ||
|
dadc270876 | ||
|
a98ba41c8e | ||
|
a40816b948 | ||
|
d3e24df225 | ||
|
908176c910 | ||
|
9ade9af1e2 | ||
|
8350bff629 | ||
|
93ea2c277a | ||
|
6251f47050 | ||
|
8f7885e58a | ||
|
dffe3cf8f2 | ||
|
d411143f3c | ||
|
a03dd91e40 | ||
|
2c2ac9dc59 | ||
|
d06711a1a7 | ||
|
94f2219715 | ||
|
d315e8306b | ||
|
8cd0e7a058 | ||
|
8fce62632d | ||
|
38c0c170e7 | ||
|
655536e457 | ||
|
807db8a2d8 | ||
|
d707eba046 | ||
|
e34a8e2e4a | ||
|
6bd9d85a9a | ||
|
f2de6299f6 | ||
|
a28d6eafae | ||
|
fce0edebc9 | ||
|
48a4ced9a5 | ||
|
221aad55de | ||
|
377d475e05 | ||
|
0c3c59df4e | ||
|
eba996b0f2 | ||
|
4d71e03039 | ||
|
2740f096c0 | ||
|
8ebaca4c5c | ||
|
fceb594442 | ||
|
5b9d3357aa | ||
|
5689b30985 | ||
|
44c8ca9da8 | ||
|
6f044de6e6 | ||
|
7877adf7a3 | ||
|
f0e5e9f463 | ||
|
0263cfa7e4 | ||
|
8b733592cb | ||
|
71fa55c218 | ||
|
ee071e41f5 | ||
|
6f868c9ec3 | ||
|
33d7f8645a | ||
|
c6a66fad79 | ||
|
9f0be5f531 | ||
|
7f42888546 | ||
|
642a711bcd | ||
|
659d83b13c | ||
|
4b93900866 | ||
|
204624bfe9 | ||
|
b7fbc2c0e6 | ||
|
2ebd79d037 | ||
|
3f84e5e8ab | ||
|
ab1fe2e2d1 | ||
|
67a4e949a2 | ||
|
15ee853fac | ||
|
d58be56cb9 | ||
|
00cc140acd | ||
|
63f0a36811 | ||
|
06377af7e5 | ||
|
60aa67892d | ||
|
17b58eac9a | ||
|
b9a6088f50 | ||
|
2b3a48995b | ||
|
e032072900 | ||
|
bcf2a319c2 | ||
|
cdaa0a54a4 | ||
|
47b19ea2f2 | ||
|
1006b37a67 | ||
|
b91e9ddb7a | ||
|
be22fcb87d | ||
|
5a053e5875 | ||
|
081abcb6a1 | ||
|
71af902a4e | ||
|
4b86c84c36 | ||
|
f9a10d1672 | ||
|
e6915d8964 | ||
|
063697c20a | ||
|
435e4faef3 | ||
|
1425d0e91a | ||
|
7dbec90c95 | ||
|
53a90347ca | ||
|
133c7230bc | ||
|
3666ebb931 | ||
|
6bce270f42 | ||
|
4a9690437f | ||
|
c8c2300483 | ||
|
ac0f418294 | ||
|
d54bc866b4 | ||
|
be1fc0c2b6 | ||
|
d97091af51 | ||
|
4c8fdd07d9 | ||
|
9648d700d7 | ||
|
8331e795e7 | ||
|
3c6af6d3f4 | ||
|
209fa83cff | ||
|
cc6f1d7487 | ||
|
36436ed4ef | ||
|
934b797623 | ||
|
0f0a6299c0 | ||
|
e6ca105600 | ||
|
ef45aedda5 | ||
|
7edd79a74d | ||
|
fade240c7f | ||
|
46337ec348 | ||
|
cafd2c7388 | ||
|
4d7c2d329b | ||
|
1982e2f8b8 | ||
|
2819094377 | ||
|
06c4523ce3 | ||
|
ac3732f6cc | ||
|
bf3e9dccd2 | ||
|
5b18a6a518 | ||
|
caec933186 | ||
|
51ac7a58dc | ||
|
db26b7d123 | ||
|
7b8459c73a | ||
|
d0c63ebe3e | ||
|
803f0d6219 | ||
|
d556509d07 | ||
|
3cc4955cad | ||
|
48f82b55f8 | ||
|
280ba84aca | ||
|
0dbecca10f | ||
|
8279368b4d | ||
|
2450b3d082 | ||
|
6b72d5033a | ||
|
4d262bbb6a | ||
|
d1370a62bd | ||
|
063fd6ef43 | ||
|
1d4d7fa9c4 | ||
|
248b5292dc | ||
|
47d830db1f | ||
|
3a8fbff514 | ||
|
a93fd274fd | ||
|
77fbfc23be | ||
|
3b45006567 | ||
|
b7a32d4ab6 | ||
|
5a219554b3 | ||
|
70b1f197c1 | ||
|
720051a351 | ||
|
32a5e838ba | ||
|
86e18ac11d | ||
|
3dcbae0889 | ||
|
96242dce0d | ||
|
f20ab4b0e3 | ||
|
96c60dd94a | ||
|
7acb265559 | ||
|
582fb2fe29 | ||
|
e3d4a896b1 | ||
|
9a1bf6006a | ||
|
ef41a32353 | ||
|
ccda6f05f5 | ||
|
03b3bb5b30 | ||
|
7e4a1ad279 | ||
|
916b9da0dc | ||
|
a64ce81457 | ||
|
c575afc8e0 | ||
|
1e42343aee | ||
|
afd4cf2425 | ||
|
e02eb72863 | ||
|
1c0dc18d72 | ||
|
c00612c1a9 | ||
|
32345fcbe9 | ||
|
fd90458e77 | ||
|
d89e6f4649 | ||
|
c4ca8e2acb | ||
|
94b5a557bf | ||
|
14e1d1f105 | ||
|
8b905b6b12 | ||
|
fa57d40c3c | ||
|
62e231e92b | ||
|
02b4dfc100 | ||
|
657acf748b | ||
|
054269ecf0 | ||
|
71dd68bb6d | ||
|
02230930c5 | ||
|
a8b102ad4a | ||
|
58d029445d | ||
|
77af41bfff | ||
|
058032a26a | ||
|
cbb9d3f91b | ||
|
5bd3184ebf | ||
|
59ebe134f1 | ||
|
851ceef3d5 | ||
|
efd7608ba2 | ||
|
05fdaf0c96 | ||
|
01b0e82d52 | ||
|
d2ccfd5366 | ||
|
7cba9ce231 | ||
|
69e8c56e3e | ||
|
9928ea8c30 | ||
|
25c370c9ff | ||
|
9227ff6ea3 | ||
|
2d943620c7 | ||
|
f7bd67c413 | ||
|
9ca2444dab | ||
|
1d45a7606d | ||
|
16f363ac38 | ||
|
d6b9403f60 | ||
|
92e5ddd97c | ||
|
23611e540c | ||
|
f9274557f3 | ||
|
44b66cbd2e | ||
|
66037e236c | ||
|
386c002fda | ||
|
7c1aab6a15 | ||
|
37884cfd08 | ||
|
ce6841eae7 | ||
|
7c94c3b502 | ||
|
268c8e50f5 | ||
|
13c9244e3f | ||
|
13b3a5be9c | ||
|
2e31e780a1 | ||
|
6a2b5f9dd8 | ||
|
16767dc042 | ||
|
fb3e000dc3 | ||
|
6f3ea21864 | ||
|
403137280e | ||
|
d94894b7e0 | ||
|
a173700cd4 | ||
|
9c248776e7 | ||
|
309caa4279 | ||
|
28b14ceb70 | ||
|
9f9c42c30b | ||
|
2bff62cade | ||
|
bfb4a5bcd4 | ||
|
e87b78501b | ||
|
637422494f | ||
|
db34484ff2 | ||
|
790c071f2c | ||
|
149688e669 | ||
|
7dae5279fb | ||
|
c203317b3b | ||
|
01f2fccb23 | ||
|
7808aef58f | ||
|
ce2d78f45a | ||
|
20cad50593 | ||
|
829a2a191d | ||
|
65b320d06b | ||
|
1935da5b16 | ||
|
78f5d2cd8b | ||
|
f62b70c9a9 | ||
|
dfa9b3a0ca | ||
|
b3bff8d735 | ||
|
f2af5bc064 | ||
|
91b736f391 | ||
|
275f77d4bb | ||
|
b00524067a | ||
|
25a93b05dc | ||
|
53e203d2f9 | ||
|
f48f957ba9 | ||
|
bfb117cb76 | ||
|
2b8e33caed | ||
|
60493f0f86 | ||
|
63c6e29e62 | ||
|
5f6d5588a6 | ||
|
18744d834f | ||
|
8dd5b97b79 | ||
|
386c8bfdf1 | ||
|
126f00e739 | ||
|
80466ac957 | ||
|
3b52433202 | ||
|
137f5da3da | ||
|
338d002d42 | ||
|
77ab9fbc57 | ||
|
b6b7835d7e | ||
|
d4fe5908f5 |
@@ -1,11 +0,0 @@
|
||||
spec:
|
||||
name: uptime-kuma
|
||||
services:
|
||||
- name: server
|
||||
git:
|
||||
repo_clone_url: https://github.com/louislam/uptime-kuma
|
||||
branch: master
|
||||
http_port: 3001
|
||||
build_command: npm run setup
|
||||
run_command: npm run start-server
|
||||
|
@@ -1,4 +1,46 @@
|
||||
/.idea
|
||||
/dist
|
||||
/node_modules
|
||||
/data/kuma.db
|
||||
/data
|
||||
/out
|
||||
/test
|
||||
/kubernetes
|
||||
/.do
|
||||
**/.dockerignore
|
||||
/private
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/docker-compose*
|
||||
**/[Dd]ockerfile*
|
||||
LICENSE
|
||||
README.md
|
||||
.editorconfig
|
||||
.vscode
|
||||
.eslint*
|
||||
.stylelint*
|
||||
/.github
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
app.json
|
||||
CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
CNAME
|
||||
install.sh
|
||||
SECURITY.md
|
||||
tsconfig.json
|
||||
|
||||
|
||||
### .gitignore content (commented rules are duplicated)
|
||||
|
||||
#node_modules
|
||||
.DS_Store
|
||||
#dist
|
||||
dist-ssr
|
||||
*.local
|
||||
#.idea
|
||||
|
||||
#/data
|
||||
#!/data/.gitkeep
|
||||
#.vscode
|
||||
|
||||
### End of .gitignore content
|
||||
|
@@ -16,3 +16,6 @@ indent_size = 2
|
||||
|
||||
[*.yml]
|
||||
indent_size = 2
|
||||
|
||||
[*.vue]
|
||||
trim_trailing_whitespace = false
|
||||
|
96
.eslintrc.js
Normal file
96
.eslintrc.js
Normal file
@@ -0,0 +1,96 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
browser: true,
|
||||
commonjs: true,
|
||||
es2020: true,
|
||||
node: true,
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:vue/vue3-recommended",
|
||||
],
|
||||
parser: "vue-eslint-parser",
|
||||
parserOptions: {
|
||||
parser: "@babel/eslint-parser",
|
||||
sourceType: "module",
|
||||
requireConfigFile: false,
|
||||
},
|
||||
rules: {
|
||||
"linebreak-style": ["error", "unix"],
|
||||
"camelcase": ["warn", {
|
||||
"properties": "never",
|
||||
"ignoreImports": true
|
||||
}],
|
||||
// override/add rules settings here, such as:
|
||||
// 'vue/no-unused-vars': 'error'
|
||||
"no-unused-vars": "warn",
|
||||
indent: [
|
||||
"error",
|
||||
4,
|
||||
{
|
||||
ignoredNodes: ["TemplateLiteral"],
|
||||
SwitchCase: 1,
|
||||
},
|
||||
],
|
||||
quotes: ["warn", "double"],
|
||||
semi: "warn",
|
||||
"vue/html-indent": ["warn", 4], // default: 2
|
||||
"vue/max-attributes-per-line": "off",
|
||||
"vue/singleline-html-element-content-newline": "off",
|
||||
"vue/html-self-closing": "off",
|
||||
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
|
||||
"no-multi-spaces": ["error", {
|
||||
ignoreEOLComments: true,
|
||||
}],
|
||||
"space-before-function-paren": ["error", {
|
||||
"anonymous": "always",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}],
|
||||
"curly": "error",
|
||||
"object-curly-spacing": ["error", "always"],
|
||||
"object-curly-newline": "off",
|
||||
"object-property-newline": "error",
|
||||
"comma-spacing": "error",
|
||||
"brace-style": "error",
|
||||
"no-var": "error",
|
||||
"key-spacing": "warn",
|
||||
"keyword-spacing": "warn",
|
||||
"space-infix-ops": "warn",
|
||||
"arrow-spacing": "warn",
|
||||
"no-trailing-spaces": "warn",
|
||||
"no-constant-condition": ["error", {
|
||||
"checkLoops": false,
|
||||
}],
|
||||
"space-before-blocks": "warn",
|
||||
//'no-console': 'warn',
|
||||
"no-extra-boolean-cast": "off",
|
||||
"no-multiple-empty-lines": ["warn", {
|
||||
"max": 1,
|
||||
"maxBOF": 0,
|
||||
}],
|
||||
"lines-between-class-members": ["warn", "always", {
|
||||
exceptAfterSingleLine: true,
|
||||
}],
|
||||
"no-unneeded-ternary": "error",
|
||||
"array-bracket-newline": ["error", "consistent"],
|
||||
"eol-last": ["error", "always"],
|
||||
//'prefer-template': 'error',
|
||||
"comma-dangle": ["warn", "only-multiline"],
|
||||
"no-empty": ["error", {
|
||||
"allowEmptyCatch": true
|
||||
}],
|
||||
"no-control-regex": "off",
|
||||
"one-var": ["error", "never"],
|
||||
"max-statements-per-line": ["error", { "max": 1 }]
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": [ "src/languages/*.js", "src/icon.js" ],
|
||||
"rules": {
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
#github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
#patreon: # Replace with a single Patreon username
|
||||
open_collective: uptime-kuma # Replace with a single Open Collective username
|
||||
#ko_fi: # Replace with a single Ko-fi username
|
||||
#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
#liberapay: # Replace with a single Liberapay username
|
||||
#issuehunt: # Replace with a single IssueHunt username
|
||||
#otechie: # Replace with a single Otechie username
|
||||
#custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
18
.github/ISSUE_TEMPLATE/ask-for-help.md
vendored
Normal file
18
.github/ISSUE_TEMPLATE/ask-for-help.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
name: Ask for help
|
||||
about: You can ask any question related to Uptime Kuma.
|
||||
title: ''
|
||||
labels: help
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
**Is it a duplicate question?**
|
||||
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
|
||||
|
||||
**Info**
|
||||
Uptime Kuma Version:
|
||||
Using Docker?: Yes/No
|
||||
Docker Version:
|
||||
Node.js Version (Without Docker only):
|
||||
OS:
|
||||
Browser:
|
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is it a duplicate question?**
|
||||
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Info**
|
||||
Uptime Kuma Version:
|
||||
Using Docker?: Yes/No
|
||||
Docker Version:
|
||||
Node.js Version (Without Docker only):
|
||||
OS:
|
||||
Browser:
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Error Log**
|
||||
It is easier for us to find out the problem.
|
||||
|
||||
Docker: `docker logs <container id>`
|
||||
PM2: `~/.pm2/logs/` (e.g. `/home/ubuntu/.pm2/logs`)
|
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
**Is it a duplicate question?**
|
||||
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -7,3 +7,7 @@ dist-ssr
|
||||
|
||||
/data
|
||||
!/data/.gitkeep
|
||||
.vscode
|
||||
|
||||
/private
|
||||
/out
|
||||
|
9
.stylelintrc
Normal file
9
.stylelintrc
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
"rules": {
|
||||
"indentation": 4,
|
||||
"no-descending-specificity": null,
|
||||
"selector-list-comma-newline-after": null,
|
||||
"declaration-empty-line-before": null
|
||||
}
|
||||
}
|
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
louis@uptimekuma.louislam.net.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
144
CONTRIBUTING.md
Normal file
144
CONTRIBUTING.md
Normal file
@@ -0,0 +1,144 @@
|
||||
# Project Info
|
||||
|
||||
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structed and commented so well, lol. Sorry about that.
|
||||
|
||||
The project was created with vite.js (vue3). Then I created a sub-directory called "server" for server part. Both frontend and backend share the same package.json.
|
||||
|
||||
The frontend code build into "dist" directory. The server uses "dist" as root. This is how production is working.
|
||||
|
||||
# Can I create a pull request for Uptime Kuma?
|
||||
|
||||
Generally, if the pull request is working fine and it do not affect any existing logic, workflow and perfomance, I will merge to the master branch once it is tested.
|
||||
|
||||
If you are not sure, feel free to create an empty pull request draft first.
|
||||
|
||||
## Pull Request Examples
|
||||
|
||||
### ✅ High - Medium Priority
|
||||
|
||||
- Add a new notification
|
||||
- Add a chart
|
||||
- Fix a bug
|
||||
|
||||
### *️⃣ Requires one more reviewer
|
||||
|
||||
I do not have such knowledge to test it.
|
||||
|
||||
- Add k8s supports
|
||||
|
||||
### *️⃣ Low Priority
|
||||
|
||||
It changed my current workflow and require further studies.
|
||||
|
||||
- Change my release approach
|
||||
|
||||
### ❌ Won't Merge
|
||||
|
||||
- Duplicated pull request
|
||||
- Buggy
|
||||
- Existing logic is completely modified or deleted
|
||||
- A function that is completely out of scope
|
||||
|
||||
# Project Styles
|
||||
|
||||
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
|
||||
|
||||
For example, recently, because I am not a python expert, I spent a 2 hours to resolve all problems in order to install and use the Apprise cli. Apprise requires so many hidden requirements, I have to figure out myself how to solve the problems by Google search for my OS. That is painful. I do not want Uptime Kuma to be like this way, so:
|
||||
|
||||
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
|
||||
- Single container for Docker users, no very complex docker-composer file. Just map the volume and expose the port, then good to go
|
||||
- All settings in frontend.
|
||||
- Easy to use
|
||||
|
||||
# Coding Styles
|
||||
|
||||
- Follow `.editorconfig`
|
||||
- Follow ESLint
|
||||
|
||||
## Name convention
|
||||
|
||||
- Javascript/Typescript: camelCaseType
|
||||
- SQLite: underscore_type
|
||||
- CSS/SCSS: dash-type
|
||||
|
||||
# Tools
|
||||
|
||||
- Node.js >= 14
|
||||
- Git
|
||||
- IDE that supports EditorConfig and ESLint (I am using Intellji Idea)
|
||||
- A SQLite tool (I am using SQLite Expert Personal)
|
||||
|
||||
# Install dependencies
|
||||
|
||||
```bash
|
||||
npm install --dev
|
||||
```
|
||||
|
||||
For npm@7, you need --legacy-peer-deps
|
||||
|
||||
```bash
|
||||
npm install --legacy-peer-deps --dev
|
||||
```
|
||||
|
||||
# Backend Dev
|
||||
|
||||
(2021-09-23 Update)
|
||||
|
||||
```bash
|
||||
npm run start-server-dev
|
||||
```
|
||||
|
||||
It binds to `0.0.0.0:3001` by default.
|
||||
|
||||
## Backend Details
|
||||
|
||||
It is mainly a socket.io app + express.js.
|
||||
|
||||
express.js is just used for serving the frontend built files (index.html, .js and .css etc.)
|
||||
|
||||
# Frontend Dev
|
||||
|
||||
Start frontend dev server. Hot-reload enabled in this way. It binds to `0.0.0.0:3000` by default.
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
PS: You can ignore those scss warnings, those warnings are from Bootstrap that I cannot fix.
|
||||
|
||||
You can use Vue.js devtools Chrome extension for debugging.
|
||||
|
||||
After the frontend server started. It cannot connect to the websocket server even you have started the server. You need to tell the frontend that is a dev env by running this in DevTool console and refresh:
|
||||
|
||||
```javascript
|
||||
localStorage.dev = "dev";
|
||||
```
|
||||
|
||||
So that the frontend will try to connect websocket server in 3001.
|
||||
|
||||
Alternately, you can specific `NODE_ENV` to "development".
|
||||
|
||||
## Build the frontend
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
## Frontend Details
|
||||
|
||||
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
|
||||
|
||||
The router is in `src/router.js`
|
||||
|
||||
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
|
||||
|
||||
The data and socket logic are in `src/mixins/socket.js`.
|
||||
|
||||
# Database Migration
|
||||
|
||||
1. Create `patch{num}.sql` in `./db/`
|
||||
2. Update `latestVersion` in `./server/database.js`
|
||||
|
||||
# Unit Test
|
||||
|
||||
Yes, no unit test for now. I know it is very important, but at the same time my spare time is very limited. I want to implement my ideas first. I will go back to this in some points.
|
114
README.md
114
README.md
@@ -1,5 +1,7 @@
|
||||
# Uptime Kuma
|
||||
|
||||
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a>
|
||||
|
||||
<div align="center" width="100%">
|
||||
<img src="./public/icon.svg" width="128" alt="" />
|
||||
</div>
|
||||
@@ -8,79 +10,84 @@ It is a self-hosted monitoring tool like "Uptime Robot".
|
||||
|
||||
<img src="https://louislam.net/uptimekuma/1.jpg" width="512" alt="" />
|
||||
|
||||
# Features
|
||||
## 🥔 Live Demo
|
||||
|
||||
* Monitoring uptime for HTTP(s) / TCP / Ping.
|
||||
Try it!
|
||||
|
||||
https://demo.uptime.kuma.pet
|
||||
|
||||
It is a 5 minutes live demo, all data will be deleted after that. The server is located at Tokyo, if you live far away from here, it may affact your experience. I suggest that you should install to try it.
|
||||
|
||||
VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollective.com/uptime-kuma)! Thank you so much!
|
||||
|
||||
## ⭐ Features
|
||||
|
||||
* Monitoring uptime for HTTP(s) / TCP / Ping / DNS Record.
|
||||
* Fancy, Reactive, Fast UI/UX.
|
||||
* Notifications via Webhook, Telegram, Discord and email (SMTP).
|
||||
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [70+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/issues/284).
|
||||
* 20 seconds interval.
|
||||
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
|
||||
|
||||
# How to Use
|
||||
## 🔧 How to Install
|
||||
|
||||
### 🐳 Docker
|
||||
|
||||
### Docker
|
||||
```bash
|
||||
# Create a volume
|
||||
docker volume create uptime-kuma
|
||||
|
||||
# Start the container
|
||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma
|
||||
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
```
|
||||
|
||||
Browse to http://localhost:3001 after started.
|
||||
|
||||
Change Port and Volume
|
||||
### 💪🏻 Without Docker
|
||||
|
||||
Required Tools: Node.js >= 14, git and pm2.
|
||||
|
||||
```bash
|
||||
docker run -d --restart=always -p <YOUR_PORT>:3001 -v <YOUR_DIR OR VOLUME>:/app/data --name uptime-kuma louislam/uptime-kuma
|
||||
```
|
||||
# Update your npm to the latest version
|
||||
npm install npm -g
|
||||
|
||||
### Without Docker
|
||||
|
||||
Required Tools: Node.js >= 14, git and pm2.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/louislam/uptime-kuma.git
|
||||
cd uptime-kuma
|
||||
npm run setup
|
||||
|
||||
# Option 1. Try it
|
||||
npm run start-server
|
||||
node server/server.js
|
||||
|
||||
# (Recommended)
|
||||
# Option 2. Run in background using PM2
|
||||
# (Recommended) Option 2. Run in background using PM2
|
||||
# Install PM2 if you don't have: npm install pm2 -g
|
||||
pm2 start npm --name uptime-kuma -- run start-server
|
||||
|
||||
# Listen to different port or hostname
|
||||
pm2 start npm --name uptime-kuma -- run start-server -- --port=80 --hostname=0.0.0.0
|
||||
|
||||
pm2 start server/server.js --name uptime-kuma
|
||||
```
|
||||
|
||||
Browse to http://localhost:3001 after started.
|
||||
|
||||
### One-click Deploy to DigitalOcean
|
||||
### Advanced Installation
|
||||
|
||||
[](https://cloud.digitalocean.com/apps/new?repo=https://github.com/louislam/uptime-kuma/tree/master&refcode=e2c7eb658434)
|
||||
If you need more options or need to browse via a reserve proxy, please read:
|
||||
|
||||
Choose Cheapest Plan is enough. (US$ 5)
|
||||
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install
|
||||
|
||||
# How to Update
|
||||
## 🆙 How to Update
|
||||
|
||||
### Docker
|
||||
Please read:
|
||||
|
||||
Re-pull the latest docker image and create another container with the same volume.
|
||||
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
|
||||
|
||||
### Without Docker
|
||||
## 🆕 What's Next?
|
||||
|
||||
```bash
|
||||
git fetch --all
|
||||
git checkout 1.0.2 --force
|
||||
npm install
|
||||
npm run build
|
||||
pm2 restart uptime-kuma
|
||||
```
|
||||
I will mark requests/issues to the next milestone.
|
||||
|
||||
# More Screenshots
|
||||
https://github.com/louislam/uptime-kuma/milestones
|
||||
|
||||
Project Plan:
|
||||
|
||||
https://github.com/louislam/uptime-kuma/projects/1
|
||||
|
||||
## 🖼 More Screenshots
|
||||
|
||||
Dark Mode:
|
||||
|
||||
<img src="https://user-images.githubusercontent.com/1336778/128710166-908f8d88-9256-43f3-9c49-bfc2c56011d2.png" width="400" alt="" />
|
||||
|
||||
Settings Page:
|
||||
|
||||
@@ -90,16 +97,33 @@ Telegram Notification Sample:
|
||||
|
||||
<img src="https://louislam.net/uptimekuma/3.jpg" width="400" alt="" />
|
||||
|
||||
## Motivation
|
||||
|
||||
# Motivation
|
||||
|
||||
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close one is statping. Unfortunately, it is not stable and unmaintained.
|
||||
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and unmaintained.
|
||||
* Want to build a fancy UI.
|
||||
* Learn Vue 3 and vite.js.
|
||||
* Show the power of Bootstrap 5.
|
||||
* Show the power of Bootstrap 5.
|
||||
* Try to use WebSocket with SPA instead of REST API.
|
||||
* Deploy my first Docker image to Docker Hub.
|
||||
|
||||
|
||||
If you love this project, please consider giving me a ⭐.
|
||||
|
||||
## 🗣️ Discussion
|
||||
|
||||
### Issues Page
|
||||
You can discuss or ask for help in [Issues](https://github.com/louislam/uptime-kuma/issues).
|
||||
|
||||
### Subreddit
|
||||
My Reddit account: louislamlam
|
||||
You can mention me if you ask question on Reddit.
|
||||
https://www.reddit.com/r/UptimeKuma/
|
||||
|
||||
## Contribute
|
||||
|
||||
If you want to report a bug or request a new feature. Free feel to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
|
||||
|
||||
If you want to translate Uptime Kuma into your langauge, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
|
||||
|
||||
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
|
||||
|
||||
English proofreading is needed too because my grammar is not that great sadly. Feel free to correct my grammar in this readme, source code, or wiki.
|
||||
|
15
SECURITY.md
Normal file
15
SECURITY.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Use this section to tell people about which versions of your project are
|
||||
currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 1.x.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
Please report security issues to uptime@kuma.pet.
|
||||
|
||||
Do not use the issue tracker or discuss it in the public as it will cause more damage.
|
BIN
db/kuma.db
BIN
db/kuma.db
Binary file not shown.
10
db/patch-2fa.sql
Normal file
10
db/patch-2fa.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE user
|
||||
ADD twofa_secret VARCHAR(64);
|
||||
|
||||
ALTER TABLE user
|
||||
ADD twofa_status BOOLEAN default 0 NOT NULL;
|
||||
|
||||
COMMIT;
|
7
db/patch-add-retry-interval-monitor.sql
Normal file
7
db/patch-add-retry-interval-monitor.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD retry_interval INTEGER default 0 not null;
|
||||
|
||||
COMMIT;
|
30
db/patch-group-table.sql
Normal file
30
db/patch-group-table.sql
Normal file
@@ -0,0 +1,30 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table `group`
|
||||
(
|
||||
id INTEGER not null
|
||||
constraint group_pk
|
||||
primary key autoincrement,
|
||||
name VARCHAR(255) not null,
|
||||
created_date DATETIME default (DATETIME('now')) not null,
|
||||
public BOOLEAN default 0 not null,
|
||||
active BOOLEAN default 1 not null,
|
||||
weight BOOLEAN NOT NULL DEFAULT 1000
|
||||
);
|
||||
|
||||
CREATE TABLE [monitor_group]
|
||||
(
|
||||
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
[monitor_id] INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
[group_id] INTEGER NOT NULL REFERENCES [group] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
weight BOOLEAN NOT NULL DEFAULT 1000
|
||||
);
|
||||
|
||||
CREATE INDEX [fk]
|
||||
ON [monitor_group] (
|
||||
[monitor_id],
|
||||
[group_id]);
|
||||
|
||||
|
||||
COMMIT;
|
10
db/patch-improve-performance.sql
Normal file
10
db/patch-improve-performance.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- For sendHeartbeatList
|
||||
CREATE INDEX monitor_time_index ON heartbeat (monitor_id, time);
|
||||
|
||||
-- For sendImportantHeartbeatList
|
||||
CREATE INDEX monitor_important_time_index ON heartbeat (monitor_id, important,time);
|
||||
|
||||
COMMIT;
|
18
db/patch-incident-table.sql
Normal file
18
db/patch-incident-table.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table incident
|
||||
(
|
||||
id INTEGER not null
|
||||
constraint incident_pk
|
||||
primary key autoincrement,
|
||||
title VARCHAR(255) not null,
|
||||
content TEXT not null,
|
||||
style VARCHAR(30) default 'warning' not null,
|
||||
created_date DATETIME default (DATETIME('now')) not null,
|
||||
last_updated_date DATETIME,
|
||||
pin BOOLEAN default 1 not null,
|
||||
active BOOLEAN default 1 not null
|
||||
);
|
||||
|
||||
COMMIT;
|
22
db/patch-setting-value-type.sql
Normal file
22
db/patch-setting-value-type.sql
Normal file
@@ -0,0 +1,22 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
-- Generated by Intellij IDEA
|
||||
create table setting_dg_tmp
|
||||
(
|
||||
id INTEGER
|
||||
primary key autoincrement,
|
||||
key VARCHAR(200) not null
|
||||
unique,
|
||||
value TEXT,
|
||||
type VARCHAR(20)
|
||||
);
|
||||
|
||||
insert into setting_dg_tmp(id, key, value, type) select id, key, value, type from setting;
|
||||
|
||||
drop table setting;
|
||||
|
||||
alter table setting_dg_tmp rename to setting;
|
||||
|
||||
|
||||
COMMIT;
|
37
db/patch1.sql
Normal file
37
db/patch1.sql
Normal file
@@ -0,0 +1,37 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
-- Change Monitor.created_date from "TIMESTAMP" to "DATETIME"
|
||||
-- SQL Generated by Intellij Idea
|
||||
PRAGMA foreign_keys=off;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table monitor_dg_tmp
|
||||
(
|
||||
id INTEGER not null
|
||||
primary key autoincrement,
|
||||
name VARCHAR(150),
|
||||
active BOOLEAN default 1 not null,
|
||||
user_id INTEGER
|
||||
references user
|
||||
on update cascade on delete set null,
|
||||
interval INTEGER default 20 not null,
|
||||
url TEXT,
|
||||
type VARCHAR(20),
|
||||
weight INTEGER default 2000,
|
||||
hostname VARCHAR(255),
|
||||
port INTEGER,
|
||||
created_date DATETIME,
|
||||
keyword VARCHAR(255)
|
||||
);
|
||||
|
||||
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
|
||||
|
||||
drop table monitor;
|
||||
|
||||
alter table monitor_dg_tmp rename to monitor;
|
||||
|
||||
create index user_id on monitor (user_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys=on;
|
19
db/patch10.sql
Normal file
19
db/patch10.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
CREATE TABLE tag (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
color VARCHAR(255) NOT NULL,
|
||||
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE monitor_tag (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
monitor_id INTEGER NOT NULL,
|
||||
tag_id INTEGER NOT NULL,
|
||||
value TEXT,
|
||||
CONSTRAINT FK_tag FOREIGN KEY (tag_id) REFERENCES tag(id) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor(id) ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX monitor_tag_monitor_id_index ON monitor_tag (monitor_id);
|
||||
CREATE INDEX monitor_tag_tag_id_index ON monitor_tag (tag_id);
|
9
db/patch2.sql
Normal file
9
db/patch2.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE monitor_tls_info (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
monitor_id INTEGER NOT NULL,
|
||||
info_json TEXT
|
||||
);
|
||||
|
||||
COMMIT;
|
37
db/patch3.sql
Normal file
37
db/patch3.sql
Normal file
@@ -0,0 +1,37 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
-- Add maxretries column to monitor
|
||||
PRAGMA foreign_keys=off;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table monitor_dg_tmp
|
||||
(
|
||||
id INTEGER not null
|
||||
primary key autoincrement,
|
||||
name VARCHAR(150),
|
||||
active BOOLEAN default 1 not null,
|
||||
user_id INTEGER
|
||||
references user
|
||||
on update cascade on delete set null,
|
||||
interval INTEGER default 20 not null,
|
||||
url TEXT,
|
||||
type VARCHAR(20),
|
||||
weight INTEGER default 2000,
|
||||
hostname VARCHAR(255),
|
||||
port INTEGER,
|
||||
created_date DATETIME,
|
||||
keyword VARCHAR(255),
|
||||
maxretries INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
|
||||
|
||||
drop table monitor;
|
||||
|
||||
alter table monitor_dg_tmp rename to monitor;
|
||||
|
||||
create index user_id on monitor (user_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys=on;
|
40
db/patch4.sql
Normal file
40
db/patch4.sql
Normal file
@@ -0,0 +1,40 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
-- OK.... serious wrong, missing maxretries column
|
||||
-- Developers should patch it manually if you have missing the maxretries column
|
||||
PRAGMA foreign_keys=off;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table monitor_dg_tmp
|
||||
(
|
||||
id INTEGER not null
|
||||
primary key autoincrement,
|
||||
name VARCHAR(150),
|
||||
active BOOLEAN default 1 not null,
|
||||
user_id INTEGER
|
||||
references user
|
||||
on update cascade on delete set null,
|
||||
interval INTEGER default 20 not null,
|
||||
url TEXT,
|
||||
type VARCHAR(20),
|
||||
weight INTEGER default 2000,
|
||||
hostname VARCHAR(255),
|
||||
port INTEGER,
|
||||
created_date DATETIME,
|
||||
keyword VARCHAR(255),
|
||||
maxretries INTEGER NOT NULL DEFAULT 0,
|
||||
ignore_tls BOOLEAN default 0 not null,
|
||||
upside_down BOOLEAN default 0 not null
|
||||
);
|
||||
|
||||
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries from monitor;
|
||||
|
||||
drop table monitor;
|
||||
|
||||
alter table monitor_dg_tmp rename to monitor;
|
||||
|
||||
create index user_id on monitor (user_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys=on;
|
70
db/patch5.sql
Normal file
70
db/patch5.sql
Normal file
@@ -0,0 +1,70 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
PRAGMA foreign_keys = off;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table monitor_dg_tmp (
|
||||
id INTEGER not null primary key autoincrement,
|
||||
name VARCHAR(150),
|
||||
active BOOLEAN default 1 not null,
|
||||
user_id INTEGER references user on update cascade on delete
|
||||
set
|
||||
null,
|
||||
interval INTEGER default 20 not null,
|
||||
url TEXT,
|
||||
type VARCHAR(20),
|
||||
weight INTEGER default 2000,
|
||||
hostname VARCHAR(255),
|
||||
port INTEGER,
|
||||
created_date DATETIME default (DATETIME('now')) not null,
|
||||
keyword VARCHAR(255),
|
||||
maxretries INTEGER NOT NULL DEFAULT 0,
|
||||
ignore_tls BOOLEAN default 0 not null,
|
||||
upside_down BOOLEAN default 0 not null
|
||||
);
|
||||
|
||||
insert into
|
||||
monitor_dg_tmp(
|
||||
id,
|
||||
name,
|
||||
active,
|
||||
user_id,
|
||||
interval,
|
||||
url,
|
||||
type,
|
||||
weight,
|
||||
hostname,
|
||||
port,
|
||||
keyword,
|
||||
maxretries,
|
||||
ignore_tls,
|
||||
upside_down
|
||||
)
|
||||
select
|
||||
id,
|
||||
name,
|
||||
active,
|
||||
user_id,
|
||||
interval,
|
||||
url,
|
||||
type,
|
||||
weight,
|
||||
hostname,
|
||||
port,
|
||||
keyword,
|
||||
maxretries,
|
||||
ignore_tls,
|
||||
upside_down
|
||||
from
|
||||
monitor;
|
||||
|
||||
drop table monitor;
|
||||
|
||||
alter table
|
||||
monitor_dg_tmp rename to monitor;
|
||||
|
||||
create index user_id on monitor (user_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys = on;
|
74
db/patch6.sql
Normal file
74
db/patch6.sql
Normal file
@@ -0,0 +1,74 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
PRAGMA foreign_keys = off;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
create table monitor_dg_tmp (
|
||||
id INTEGER not null primary key autoincrement,
|
||||
name VARCHAR(150),
|
||||
active BOOLEAN default 1 not null,
|
||||
user_id INTEGER references user on update cascade on delete
|
||||
set
|
||||
null,
|
||||
interval INTEGER default 20 not null,
|
||||
url TEXT,
|
||||
type VARCHAR(20),
|
||||
weight INTEGER default 2000,
|
||||
hostname VARCHAR(255),
|
||||
port INTEGER,
|
||||
created_date DATETIME default (DATETIME('now')) not null,
|
||||
keyword VARCHAR(255),
|
||||
maxretries INTEGER NOT NULL DEFAULT 0,
|
||||
ignore_tls BOOLEAN default 0 not null,
|
||||
upside_down BOOLEAN default 0 not null,
|
||||
maxredirects INTEGER default 10 not null,
|
||||
accepted_statuscodes_json TEXT default '["200-299"]' not null
|
||||
);
|
||||
|
||||
insert into
|
||||
monitor_dg_tmp(
|
||||
id,
|
||||
name,
|
||||
active,
|
||||
user_id,
|
||||
interval,
|
||||
url,
|
||||
type,
|
||||
weight,
|
||||
hostname,
|
||||
port,
|
||||
created_date,
|
||||
keyword,
|
||||
maxretries,
|
||||
ignore_tls,
|
||||
upside_down
|
||||
)
|
||||
select
|
||||
id,
|
||||
name,
|
||||
active,
|
||||
user_id,
|
||||
interval,
|
||||
url,
|
||||
type,
|
||||
weight,
|
||||
hostname,
|
||||
port,
|
||||
created_date,
|
||||
keyword,
|
||||
maxretries,
|
||||
ignore_tls,
|
||||
upside_down
|
||||
from
|
||||
monitor;
|
||||
|
||||
drop table monitor;
|
||||
|
||||
alter table
|
||||
monitor_dg_tmp rename to monitor;
|
||||
|
||||
create index user_id on monitor (user_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys = on;
|
10
db/patch7.sql
Normal file
10
db/patch7.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD dns_resolve_type VARCHAR(5);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD dns_resolve_server VARCHAR(255);
|
||||
|
||||
COMMIT;
|
7
db/patch8.sql
Normal file
7
db/patch8.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD dns_last_result VARCHAR(255);
|
||||
|
||||
COMMIT;
|
7
db/patch9.sql
Normal file
7
db/patch9.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE notification
|
||||
ADD is_default BOOLEAN default 0 NOT NULL;
|
||||
|
||||
COMMIT;
|
38
dockerfile
38
dockerfile
@@ -1,15 +1,33 @@
|
||||
FROM node:14-alpine3.14
|
||||
|
||||
# sqlite have to build on arm
|
||||
# TODO: use prebuilt sqlite for arm, because it is very very slow.
|
||||
RUN apk add --no-cache make g++ python3
|
||||
RUN ln -s /usr/bin/python3 /usr/bin/python
|
||||
|
||||
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
||||
# If the image changed, the second stage image should be changed too
|
||||
FROM node:14-buster-slim AS build
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
RUN npm install
|
||||
RUN npm run build
|
||||
RUN npm install --legacy-peer-deps && \
|
||||
npm run build && \
|
||||
npm prune --production && \
|
||||
chmod +x /app/extra/entrypoint.sh
|
||||
|
||||
|
||||
FROM node:14-buster-slim AS release
|
||||
WORKDIR /app
|
||||
|
||||
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
|
||||
RUN apt update && \
|
||||
apt --yes install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||
sqlite3 iputils-ping util-linux dumb-init && \
|
||||
pip3 --no-cache-dir install apprise && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy app files from build layer
|
||||
COPY --from=build /app /app
|
||||
|
||||
EXPOSE 3001
|
||||
VOLUME ["/app/data"]
|
||||
CMD ["npm", "run", "start-server"]
|
||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
||||
CMD ["node", "server/server.js"]
|
||||
|
||||
FROM release AS nightly
|
||||
RUN npm run mark-as-nightly
|
||||
|
30
dockerfile-alpine
Normal file
30
dockerfile-alpine
Normal file
@@ -0,0 +1,30 @@
|
||||
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
||||
FROM node:14-alpine3.12 AS build
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
RUN npm install --legacy-peer-deps && \
|
||||
npm run build && \
|
||||
npm prune --production && \
|
||||
chmod +x /app/extra/entrypoint.sh
|
||||
|
||||
|
||||
FROM node:14-alpine3.12 AS release
|
||||
WORKDIR /app
|
||||
|
||||
# Install apprise, iputils for non-root ping, setpriv
|
||||
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
|
||||
pip3 --no-cache-dir install apprise && \
|
||||
rm -rf /root/.cache
|
||||
|
||||
# Copy app files from build layer
|
||||
COPY --from=build /app /app
|
||||
|
||||
EXPOSE 3001
|
||||
VOLUME ["/app/data"]
|
||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
||||
CMD ["node", "server/server.js"]
|
||||
|
||||
FROM release AS nightly
|
||||
RUN npm run mark-as-nightly
|
2
extra/compile-install-script.ps1
Normal file
2
extra/compile-install-script.ps1
Normal file
@@ -0,0 +1,2 @@
|
||||
# Must enable File Sharing in Docker Desktop
|
||||
docker run -it --rm -v ${pwd}:/app louislam/batsh /usr/bin/batsh bash --output ./install.sh ./extra/install.batsh
|
21
extra/entrypoint.sh
Normal file
21
extra/entrypoint.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# set -e Exit the script if an error happens
|
||||
set -e
|
||||
PUID=${PUID=0}
|
||||
PGID=${PGID=0}
|
||||
|
||||
files_ownership () {
|
||||
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
|
||||
# -R Recursively descends the specified directories
|
||||
# -c Like verbose but report only when a change is made
|
||||
chown -hRc "$PUID":"$PGID" /app/data
|
||||
}
|
||||
|
||||
echo "==> Performing startup jobs and maintenance tasks"
|
||||
files_ownership
|
||||
|
||||
echo "==> Starting application with user $PUID group $PGID"
|
||||
|
||||
# --clear-groups Clear supplementary groups.
|
||||
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"
|
34
extra/healthcheck.js
Normal file
34
extra/healthcheck.js
Normal file
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||
*/
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||
|
||||
let client;
|
||||
|
||||
if (process.env.SSL_KEY && process.env.SSL_CERT) {
|
||||
client = require("https");
|
||||
} else {
|
||||
client = require("http");
|
||||
}
|
||||
|
||||
let options = {
|
||||
host: process.env.HOST || "127.0.0.1",
|
||||
port: parseInt(process.env.PORT) || 3001,
|
||||
timeout: 28 * 1000,
|
||||
};
|
||||
|
||||
let request = client.request(options, (res) => {
|
||||
console.log(`Health Check OK [Res Code: ${res.statusCode}]`);
|
||||
if (res.statusCode === 200) {
|
||||
process.exit(0);
|
||||
} else {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
request.on("error", function (err) {
|
||||
console.error("Health Check ERROR");
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
request.end();
|
245
extra/install.batsh
Normal file
245
extra/install.batsh
Normal file
@@ -0,0 +1,245 @@
|
||||
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
||||
// "npm run compile-install-script" to compile install.sh
|
||||
// The command is working on Windows PowerShell and Docker for Windows only.
|
||||
|
||||
|
||||
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||
println("=====================");
|
||||
println("Uptime Kuma Installer");
|
||||
println("=====================");
|
||||
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
|
||||
println("---------------------------------------");
|
||||
println("This script is designed for Linux and basic usage.");
|
||||
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
||||
println("---------------------------------------");
|
||||
println("");
|
||||
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
|
||||
println("Docker - Install Uptime Kuma Docker container");
|
||||
println("");
|
||||
|
||||
if ("$1" != "") {
|
||||
type = "$1";
|
||||
} else {
|
||||
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
|
||||
}
|
||||
|
||||
defaultPort = "3001";
|
||||
|
||||
function checkNode() {
|
||||
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
||||
println("Node Version: " ++ nodeVersion);
|
||||
|
||||
if (nodeVersion < "12") {
|
||||
println("Error: Required Node.js 14");
|
||||
call("exit", "1");
|
||||
}
|
||||
|
||||
if (nodeVersion == "12") {
|
||||
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
|
||||
}
|
||||
}
|
||||
|
||||
function deb() {
|
||||
bash("nodeCheck=$(node -v)");
|
||||
bash("apt --yes update");
|
||||
|
||||
if (nodeCheck != "") {
|
||||
checkNode();
|
||||
} else {
|
||||
|
||||
// Old nodejs binary name is "nodejs"
|
||||
bash("check=$(nodejs --version)");
|
||||
if (check != "") {
|
||||
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("apt --yes install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 14");
|
||||
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
|
||||
bash("apt --yes install nodejs");
|
||||
bash("node -v");
|
||||
|
||||
bash("nodeCheckAgain=$(node -v)");
|
||||
|
||||
if (nodeCheckAgain == "") {
|
||||
println("Error during Node.js installation");
|
||||
bash("exit 1");
|
||||
}
|
||||
}
|
||||
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
println("Installing Git");
|
||||
bash("apt --yes install git");
|
||||
}
|
||||
}
|
||||
|
||||
if (type == "local") {
|
||||
defaultInstallPath = "/opt/uptime-kuma";
|
||||
|
||||
if (exists("/etc/redhat-release")) {
|
||||
os = call("cat", "/etc/redhat-release");
|
||||
distribution = "rhel";
|
||||
|
||||
} else if (exists("/etc/issue")) {
|
||||
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
||||
if (os == "Ubuntu") {
|
||||
distribution = "ubuntu";
|
||||
}
|
||||
if (os == "Debian") {
|
||||
distribution = "debian";
|
||||
}
|
||||
}
|
||||
|
||||
bash("arch=$(uname -i)");
|
||||
|
||||
println("Your OS: " ++ os);
|
||||
println("Distribution: " ++ distribution);
|
||||
println("Arch: " ++ arch);
|
||||
|
||||
if ("$3" != "") {
|
||||
port = "$3";
|
||||
} else {
|
||||
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
|
||||
|
||||
if (port == "") {
|
||||
port = defaultPort;
|
||||
}
|
||||
}
|
||||
|
||||
if ("$2" != "") {
|
||||
installPath = "$2";
|
||||
} else {
|
||||
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
|
||||
|
||||
if (installPath == "") {
|
||||
installPath = defaultInstallPath;
|
||||
}
|
||||
}
|
||||
|
||||
// CentOS
|
||||
if (distribution == "rhel") {
|
||||
bash("nodeCheck=$(node -v)");
|
||||
|
||||
if (nodeCheck != "") {
|
||||
checkNode();
|
||||
} else {
|
||||
|
||||
bash("curlCheck=$(curl --version)");
|
||||
if (curlCheck == "") {
|
||||
println("Installing Curl");
|
||||
bash("yum -y -q install curl");
|
||||
}
|
||||
|
||||
println("Installing Node.js 14");
|
||||
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
|
||||
bash("yum install -y -q nodejs");
|
||||
bash("node -v");
|
||||
|
||||
bash("nodeCheckAgain=$(node -v)");
|
||||
|
||||
if (nodeCheckAgain == "") {
|
||||
println("Error during Node.js installation");
|
||||
bash("exit 1");
|
||||
}
|
||||
}
|
||||
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
println("Installing Git");
|
||||
bash("yum -y -q install git");
|
||||
}
|
||||
|
||||
// Ubuntu
|
||||
} else if (distribution == "ubuntu") {
|
||||
deb();
|
||||
|
||||
// Debian
|
||||
} else if (distribution == "debian") {
|
||||
deb();
|
||||
|
||||
} else {
|
||||
// Unknown distribution
|
||||
error = 0;
|
||||
|
||||
bash("check=$(git --version)");
|
||||
if (check == "") {
|
||||
error = 1;
|
||||
println("Error: git is missing");
|
||||
}
|
||||
|
||||
bash("check=$(node -v)");
|
||||
if (check == "") {
|
||||
error = 1;
|
||||
println("Error: node is missing");
|
||||
}
|
||||
|
||||
if (error > 0) {
|
||||
println("Please install above missing software");
|
||||
bash("exit 1");
|
||||
}
|
||||
}
|
||||
|
||||
bash("check=$(pm2 --version)");
|
||||
if (check == "") {
|
||||
println("Installing PM2");
|
||||
bash("npm install pm2 -g");
|
||||
bash("pm2 startup");
|
||||
}
|
||||
|
||||
bash("mkdir -p $installPath");
|
||||
bash("cd $installPath");
|
||||
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
||||
bash("npm run setup");
|
||||
|
||||
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
|
||||
|
||||
} else {
|
||||
defaultVolume = "uptime-kuma";
|
||||
|
||||
bash("check=$(docker -v)");
|
||||
if (check == "") {
|
||||
println("Error: docker is not found!");
|
||||
bash("exit 1");
|
||||
}
|
||||
|
||||
bash("check=$(docker info)");
|
||||
|
||||
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
||||
\"echo\" \"Error: docker is not running\"
|
||||
\"exit\" \"1\"
|
||||
fi");
|
||||
|
||||
if ("$3" != "") {
|
||||
port = "$3";
|
||||
} else {
|
||||
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
|
||||
|
||||
if (port == "") {
|
||||
port = defaultPort;
|
||||
}
|
||||
}
|
||||
|
||||
if ("$2" != "") {
|
||||
volume = "$2";
|
||||
} else {
|
||||
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
|
||||
|
||||
if (volume == "") {
|
||||
volume = defaultVolume;
|
||||
}
|
||||
}
|
||||
|
||||
println("Port: $port");
|
||||
println("Volume: $volume");
|
||||
bash("docker volume create $volume");
|
||||
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
|
||||
}
|
||||
|
||||
println("http://localhost:$port");
|
24
extra/mark-as-nightly.js
Normal file
24
extra/mark-as-nightly.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const pkg = require("../package.json");
|
||||
const fs = require("fs");
|
||||
const util = require("../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const oldVersion = pkg.version
|
||||
const newVersion = oldVersion + "-nightly"
|
||||
|
||||
console.log("Old Version: " + oldVersion)
|
||||
console.log("New Version: " + newVersion)
|
||||
|
||||
if (newVersion) {
|
||||
// Process package.json
|
||||
pkg.version = newVersion
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion)
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion)
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n")
|
||||
|
||||
// Process README.md
|
||||
if (fs.existsSync("README.md")) {
|
||||
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion))
|
||||
}
|
||||
}
|
61
extra/reset-password.js
Normal file
61
extra/reset-password.js
Normal file
@@ -0,0 +1,61 @@
|
||||
console.log("== Uptime Kuma Reset Password Tool ==");
|
||||
|
||||
console.log("Loading the database");
|
||||
|
||||
const Database = require("../server/database");
|
||||
const { R } = require("redbean-node");
|
||||
const readline = require("readline");
|
||||
const { initJWTSecret } = require("../server/util-server");
|
||||
const args = require("args-parser")(process.argv);
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
});
|
||||
|
||||
(async () => {
|
||||
Database.init(args);
|
||||
await Database.connect();
|
||||
|
||||
try {
|
||||
const user = await R.findOne("user");
|
||||
|
||||
if (! user) {
|
||||
throw new Error("user not found, have you installed?");
|
||||
}
|
||||
|
||||
console.log("Found user: " + user.username);
|
||||
|
||||
while (true) {
|
||||
let password = await question("New Password: ");
|
||||
let confirmPassword = await question("Confirm New Password: ");
|
||||
|
||||
if (password === confirmPassword) {
|
||||
await user.resetPassword(password);
|
||||
|
||||
// Reset all sessions by reset jwt secret
|
||||
await initJWTSecret();
|
||||
|
||||
rl.close();
|
||||
break;
|
||||
} else {
|
||||
console.log("Passwords do not match, please try again.");
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Password reset successfully.");
|
||||
} catch (e) {
|
||||
console.error("Error: " + e.message);
|
||||
}
|
||||
|
||||
await Database.close();
|
||||
|
||||
console.log("Finished. You should restart the Uptime Kuma server.")
|
||||
})();
|
||||
|
||||
function question(question) {
|
||||
return new Promise((resolve) => {
|
||||
rl.question(question, (answer) => {
|
||||
resolve(answer);
|
||||
})
|
||||
});
|
||||
}
|
144
extra/simple-dns-server.js
Normal file
144
extra/simple-dns-server.js
Normal file
@@ -0,0 +1,144 @@
|
||||
/*
|
||||
* Simple DNS Server
|
||||
* For testing DNS monitoring type, dev only
|
||||
*/
|
||||
const dns2 = require("dns2");
|
||||
|
||||
const { Packet } = dns2;
|
||||
|
||||
const server = dns2.createServer({
|
||||
udp: true
|
||||
});
|
||||
|
||||
server.on("request", (request, send, rinfo) => {
|
||||
for (let question of request.questions) {
|
||||
console.log(question.name, type(question.type), question.class);
|
||||
|
||||
const response = Packet.createResponseFromRequest(request);
|
||||
|
||||
if (question.name === "existing.com") {
|
||||
|
||||
if (question.type === Packet.TYPE.A) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
address: "1.2.3.4"
|
||||
});
|
||||
} if (question.type === Packet.TYPE.AAAA) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
address: "fe80::::1234:5678:abcd:ef00",
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.CNAME) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
domain: "cname1.existing.com",
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.MX) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
exchange: "mx1.existing.com",
|
||||
priority: 5
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.NS) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
ns: "ns1.existing.com",
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.SOA) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
primary: "existing.com",
|
||||
admin: "admin@existing.com",
|
||||
serial: 2021082701,
|
||||
refresh: 300,
|
||||
retry: 3,
|
||||
expiration: 10,
|
||||
minimum: 10,
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.SRV) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
priority: 5,
|
||||
weight: 5,
|
||||
port: 8080,
|
||||
target: "srv1.existing.com",
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.TXT) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
data: "#v=spf1 include:_spf.existing.com ~all",
|
||||
});
|
||||
} else if (question.type === Packet.TYPE.CAA) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
flags: 0,
|
||||
tag: "issue",
|
||||
value: "ca.existing.com",
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (question.name === "4.3.2.1.in-addr.arpa") {
|
||||
if (question.type === Packet.TYPE.PTR) {
|
||||
response.answers.push({
|
||||
name: question.name,
|
||||
type: question.type,
|
||||
class: question.class,
|
||||
ttl: 300,
|
||||
domain: "ptr1.existing.com",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
send(response);
|
||||
}
|
||||
});
|
||||
|
||||
server.on("listening", () => {
|
||||
console.log("Listening");
|
||||
console.log(server.addresses());
|
||||
});
|
||||
|
||||
server.on("close", () => {
|
||||
console.log("server closed");
|
||||
});
|
||||
|
||||
server.listen({
|
||||
udp: 5300
|
||||
});
|
||||
|
||||
function type(code) {
|
||||
for (let name in Packet.TYPE) {
|
||||
if (Packet.TYPE[name] === code) {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
}
|
3
extra/update-language-files/.gitignore
vendored
Normal file
3
extra/update-language-files/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package-lock.json
|
||||
test.js
|
||||
languages/
|
84
extra/update-language-files/index.js
Normal file
84
extra/update-language-files/index.js
Normal file
@@ -0,0 +1,84 @@
|
||||
// Need to use ES6 to read language files
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import util from "util";
|
||||
|
||||
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
|
||||
/**
|
||||
* Look ma, it's cp -R.
|
||||
* @param {string} src The path to the thing to copy.
|
||||
* @param {string} dest The path to the new copy.
|
||||
*/
|
||||
const copyRecursiveSync = function (src, dest) {
|
||||
let exists = fs.existsSync(src);
|
||||
let stats = exists && fs.statSync(src);
|
||||
let isDirectory = exists && stats.isDirectory();
|
||||
|
||||
if (isDirectory) {
|
||||
fs.mkdirSync(dest);
|
||||
fs.readdirSync(src).forEach(function (childItemName) {
|
||||
copyRecursiveSync(path.join(src, childItemName),
|
||||
path.join(dest, childItemName));
|
||||
});
|
||||
} else {
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
};
|
||||
|
||||
console.log("Arguments:", process.argv)
|
||||
const baseLangCode = process.argv[2] || "en";
|
||||
console.log("Base Lang: " + baseLangCode);
|
||||
fs.rmdirSync("./languages", { recursive: true });
|
||||
copyRecursiveSync("../../src/languages", "./languages");
|
||||
|
||||
const en = (await import("./languages/en.js")).default;
|
||||
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
|
||||
const files = fs.readdirSync("./languages");
|
||||
console.log("Files:", files);
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith(".js")) {
|
||||
console.log("Skipping " + file)
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log("Processing " + file);
|
||||
const lang = await import("./languages/" + file);
|
||||
|
||||
let obj;
|
||||
|
||||
if (lang.default) {
|
||||
obj = lang.default;
|
||||
} else {
|
||||
console.log("Empty file");
|
||||
obj = {
|
||||
languageName: "<Your Language name in your language (not in English)>"
|
||||
};
|
||||
}
|
||||
|
||||
// En first
|
||||
for (const key in en) {
|
||||
if (! obj[key]) {
|
||||
obj[key] = en[key];
|
||||
}
|
||||
}
|
||||
|
||||
if (baseLang !== en) {
|
||||
// Base second
|
||||
for (const key in baseLang) {
|
||||
if (! obj[key]) {
|
||||
obj[key] = key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const code = "export default " + util.inspect(obj, {
|
||||
depth: null,
|
||||
});
|
||||
|
||||
fs.writeFileSync(`../../src/languages/${file}`, code);
|
||||
}
|
||||
|
||||
fs.rmdirSync("./languages", { recursive: true });
|
||||
console.log("Done. Fixing formatting by ESLint...");
|
12
extra/update-language-files/package.json
Normal file
12
extra/update-language-files/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "update-language-files",
|
||||
"type": "module",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC"
|
||||
}
|
61
extra/update-version.js
Normal file
61
extra/update-version.js
Normal file
@@ -0,0 +1,61 @@
|
||||
const pkg = require("../package.json");
|
||||
const fs = require("fs");
|
||||
const child_process = require("child_process");
|
||||
const util = require("../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const oldVersion = pkg.version;
|
||||
const newVersion = process.argv[2];
|
||||
|
||||
console.log("Old Version: " + oldVersion);
|
||||
console.log("New Version: " + newVersion);
|
||||
|
||||
if (! newVersion) {
|
||||
console.error("invalid version");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const exists = tagExists(newVersion);
|
||||
|
||||
if (! exists) {
|
||||
// Process package.json
|
||||
pkg.version = newVersion;
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker-alpine"] = pkg.scripts["build-docker-alpine"].replaceAll(oldVersion, newVersion);
|
||||
pkg.scripts["build-docker-debian"] = pkg.scripts["build-docker-debian"].replaceAll(oldVersion, newVersion);
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||
|
||||
commit(newVersion);
|
||||
tag(newVersion);
|
||||
} else {
|
||||
console.log("version exists")
|
||||
}
|
||||
|
||||
function commit(version) {
|
||||
let msg = "update to " + version;
|
||||
|
||||
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
|
||||
let stdout = res.stdout.toString().trim();
|
||||
console.log(stdout)
|
||||
|
||||
if (stdout.includes("no changes added to commit")) {
|
||||
throw new Error("commit error")
|
||||
}
|
||||
}
|
||||
|
||||
function tag(version) {
|
||||
let res = child_process.spawnSync("git", ["tag", version]);
|
||||
console.log(res.stdout.toString().trim())
|
||||
}
|
||||
|
||||
function tagExists(version) {
|
||||
if (! version) {
|
||||
throw new Error("invalid version");
|
||||
}
|
||||
|
||||
let res = child_process.spawnSync("git", ["tag", "-l", version]);
|
||||
|
||||
return res.stdout.toString().trim() === version;
|
||||
}
|
@@ -1,39 +0,0 @@
|
||||
/**
|
||||
* String.prototype.replaceAll() polyfill
|
||||
* https://gomakethings.com/how-to-replace-a-section-of-a-string-with-another-one-with-vanilla-js/
|
||||
* @author Chris Ferdinandi
|
||||
* @license MIT
|
||||
*/
|
||||
if (!String.prototype.replaceAll) {
|
||||
String.prototype.replaceAll = function(str, newStr){
|
||||
|
||||
// If a regex pattern
|
||||
if (Object.prototype.toString.call(str).toLowerCase() === '[object regexp]') {
|
||||
return this.replace(str, newStr);
|
||||
}
|
||||
|
||||
// If a string
|
||||
return this.replace(new RegExp(str, 'g'), newStr);
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
const pkg = require('../package.json');
|
||||
const fs = require("fs");
|
||||
const oldVersion = pkg.version
|
||||
const newVersion = process.argv[2]
|
||||
|
||||
console.log("Old Version: " + oldVersion)
|
||||
console.log("New Version: " + newVersion)
|
||||
|
||||
if (newVersion) {
|
||||
// Process package.json
|
||||
pkg.version = newVersion
|
||||
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion)
|
||||
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion)
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n")
|
||||
|
||||
// Process README.md
|
||||
fs.writeFileSync("README.md", fs.readFileSync("README.md", 'utf8').replaceAll(oldVersion, newVersion))
|
||||
}
|
||||
|
18
index.html
18
index.html
@@ -1,13 +1,17 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
|
||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||
<link rel="manifest" href="manifest.json" />
|
||||
<meta name="theme-color" id="theme-color" content="" />
|
||||
<meta name="description" content="Uptime Kuma monitoring tool" />
|
||||
<title>Uptime Kuma</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.js"></script>
|
||||
</body>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
203
install.sh
Normal file
203
install.sh
Normal file
@@ -0,0 +1,203 @@
|
||||
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
||||
# "npm run compile-install-script" to compile install.sh
|
||||
# The command is working on Windows PowerShell and Docker for Windows only.
|
||||
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
||||
"echo" "-e" "====================="
|
||||
"echo" "-e" "Uptime Kuma Installer"
|
||||
"echo" "-e" "====================="
|
||||
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
|
||||
"echo" "-e" "---------------------------------------"
|
||||
"echo" "-e" "This script is designed for Linux and basic usage."
|
||||
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
||||
"echo" "-e" "---------------------------------------"
|
||||
"echo" "-e" ""
|
||||
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
|
||||
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
||||
"echo" "-e" ""
|
||||
if [ "$1" != "" ]; then
|
||||
type="$1"
|
||||
else
|
||||
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
|
||||
fi
|
||||
defaultPort="3001"
|
||||
function checkNode {
|
||||
local _0
|
||||
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
||||
"echo" "-e" "Node Version: ""$nodeVersion"
|
||||
_0="12"
|
||||
if [ $(($nodeVersion < $_0)) == 1 ]; then
|
||||
"echo" "-e" "Error: Required Node.js 14"
|
||||
"exit" "1"
|
||||
fi
|
||||
if [ "$nodeVersion" == "12" ]; then
|
||||
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
|
||||
fi
|
||||
}
|
||||
function deb {
|
||||
nodeCheck=$(node -v)
|
||||
apt --yes update
|
||||
if [ "$nodeCheck" != "" ]; then
|
||||
"checkNode"
|
||||
else
|
||||
# Old nodejs binary name is "nodejs"
|
||||
check=$(nodejs --version)
|
||||
if [ "$check" != "" ]; then
|
||||
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
|
||||
exit 1
|
||||
fi
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
apt --yes install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 14"
|
||||
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
|
||||
apt --yes install nodejs
|
||||
node -v
|
||||
nodeCheckAgain=$(node -v)
|
||||
if [ "$nodeCheckAgain" == "" ]; then
|
||||
"echo" "-e" "Error during Node.js installation"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing Git"
|
||||
apt --yes install git
|
||||
fi
|
||||
}
|
||||
if [ "$type" == "local" ]; then
|
||||
defaultInstallPath="/opt/uptime-kuma"
|
||||
if [ -e "/etc/redhat-release" ]; then
|
||||
os=$("cat" "/etc/redhat-release")
|
||||
distribution="rhel"
|
||||
else
|
||||
if [ -e "/etc/issue" ]; then
|
||||
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
||||
if [ "$os" == "Ubuntu" ]; then
|
||||
distribution="ubuntu"
|
||||
fi
|
||||
if [ "$os" == "Debian" ]; then
|
||||
distribution="debian"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
arch=$(uname -i)
|
||||
"echo" "-e" "Your OS: ""$os"
|
||||
"echo" "-e" "Distribution: ""$distribution"
|
||||
"echo" "-e" "Arch: ""$arch"
|
||||
if [ "$3" != "" ]; then
|
||||
port="$3"
|
||||
else
|
||||
"read" "-p" "Listening Port [$defaultPort]: " "port"
|
||||
if [ "$port" == "" ]; then
|
||||
port="$defaultPort"
|
||||
fi
|
||||
fi
|
||||
if [ "$2" != "" ]; then
|
||||
installPath="$2"
|
||||
else
|
||||
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
|
||||
if [ "$installPath" == "" ]; then
|
||||
installPath="$defaultInstallPath"
|
||||
fi
|
||||
fi
|
||||
# CentOS
|
||||
if [ "$distribution" == "rhel" ]; then
|
||||
nodeCheck=$(node -v)
|
||||
if [ "$nodeCheck" != "" ]; then
|
||||
"checkNode"
|
||||
else
|
||||
curlCheck=$(curl --version)
|
||||
if [ "$curlCheck" == "" ]; then
|
||||
"echo" "-e" "Installing Curl"
|
||||
yum -y -q install curl
|
||||
fi
|
||||
"echo" "-e" "Installing Node.js 14"
|
||||
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
|
||||
yum install -y -q nodejs
|
||||
node -v
|
||||
nodeCheckAgain=$(node -v)
|
||||
if [ "$nodeCheckAgain" == "" ]; then
|
||||
"echo" "-e" "Error during Node.js installation"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing Git"
|
||||
yum -y -q install git
|
||||
fi
|
||||
# Ubuntu
|
||||
else
|
||||
if [ "$distribution" == "ubuntu" ]; then
|
||||
"deb"
|
||||
# Debian
|
||||
else
|
||||
if [ "$distribution" == "debian" ]; then
|
||||
"deb"
|
||||
else
|
||||
# Unknown distribution
|
||||
error=$((0))
|
||||
check=$(git --version)
|
||||
if [ "$check" == "" ]; then
|
||||
error=$((1))
|
||||
"echo" "-e" "Error: git is missing"
|
||||
fi
|
||||
check=$(node -v)
|
||||
if [ "$check" == "" ]; then
|
||||
error=$((1))
|
||||
"echo" "-e" "Error: node is missing"
|
||||
fi
|
||||
if [ $(($error > 0)) == 1 ]; then
|
||||
"echo" "-e" "Please install above missing software"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
check=$(pm2 --version)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Installing PM2"
|
||||
npm install pm2 -g
|
||||
pm2 startup
|
||||
fi
|
||||
mkdir -p $installPath
|
||||
cd $installPath
|
||||
git clone https://github.com/louislam/uptime-kuma.git .
|
||||
npm run setup
|
||||
pm2 start server/server.js --name uptime-kuma -- --port=$port
|
||||
else
|
||||
defaultVolume="uptime-kuma"
|
||||
check=$(docker -v)
|
||||
if [ "$check" == "" ]; then
|
||||
"echo" "-e" "Error: docker is not found!"
|
||||
exit 1
|
||||
fi
|
||||
check=$(docker info)
|
||||
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
||||
"echo" "Error: docker is not running"
|
||||
"exit" "1"
|
||||
fi
|
||||
if [ "$3" != "" ]; then
|
||||
port="$3"
|
||||
else
|
||||
"read" "-p" "Expose Port [$defaultPort]: " "port"
|
||||
if [ "$port" == "" ]; then
|
||||
port="$defaultPort"
|
||||
fi
|
||||
fi
|
||||
if [ "$2" != "" ]; then
|
||||
volume="$2"
|
||||
else
|
||||
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
|
||||
if [ "$volume" == "" ]; then
|
||||
volume="$defaultVolume"
|
||||
fi
|
||||
fi
|
||||
"echo" "-e" "Port: $port"
|
||||
"echo" "-e" "Volume: $volume"
|
||||
docker volume create $volume
|
||||
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
||||
fi
|
||||
"echo" "-e" "http://localhost:$port"
|
32
kubernetes/README.md
Normal file
32
kubernetes/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Uptime-Kuma K8s Deployment
|
||||
|
||||
⚠ Warning: K8s deployment is provided by contributors. I have no experience with K8s and I can't fix error in the future. I only test Docker and Node.js. Use at your own risk.
|
||||
|
||||
## How does it work?
|
||||
|
||||
Kustomize is a tool which builds a complete deployment file for all config elements.
|
||||
You can edit the files in the ```uptime-kuma``` folder except the ```kustomization.yml``` until you know what you're doing.
|
||||
If you want to choose another namespace you can edit the ```kustomization.yml``` in the ```kubernetes```-Folder and change the ```namespace: uptime-kuma``` to something you like.
|
||||
|
||||
It creates a certificate with the specified Issuer and creates the Ingress for the Uptime-Kuma ClusterIP-Service.
|
||||
|
||||
## What do I have to edit?
|
||||
|
||||
You have to edit the ```ingressroute.yml``` to your needs.
|
||||
This ingressroute.yml is for the [nginx-ingress-controller](https://kubernetes.github.io/ingress-nginx/) in combination with the [cert-manager](https://cert-manager.io/).
|
||||
|
||||
- Host
|
||||
- Secrets and secret names
|
||||
- (Cluster)Issuer (optional)
|
||||
- The Version in the Deployment-File
|
||||
- Update:
|
||||
- Change to newer version and run the above commands, it will update the pods one after another
|
||||
|
||||
## How To use
|
||||
|
||||
- Install [kustomize](https://kubectl.docs.kubernetes.io/installation/kustomize/)
|
||||
- Edit files mentioned above to your needs
|
||||
- Run ```kustomize build > apply.yml```
|
||||
- Run ```kubectl apply -f apply.yml```
|
||||
|
||||
Now you should see some k8s magic and Uptime-Kuma should be available at the specified address.
|
10
kubernetes/kustomization.yml
Normal file
10
kubernetes/kustomization.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
namespace: uptime-kuma
|
||||
namePrefix: uptime-kuma-
|
||||
|
||||
commonLabels:
|
||||
app: uptime-kuma
|
||||
|
||||
bases:
|
||||
- uptime-kuma
|
||||
|
||||
|
45
kubernetes/uptime-kuma/deployment.yml
Normal file
45
kubernetes/uptime-kuma/deployment.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
component: uptime-kuma
|
||||
name: deployment
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
component: uptime-kuma
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: Recreate
|
||||
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
component: uptime-kuma
|
||||
spec:
|
||||
containers:
|
||||
- name: app
|
||||
image: louislam/uptime-kuma:1
|
||||
ports:
|
||||
- containerPort: 3001
|
||||
volumeMounts:
|
||||
- mountPath: /app/data
|
||||
name: storage
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- node
|
||||
- extra/healthcheck.js
|
||||
initialDelaySeconds: 180
|
||||
periodSeconds: 60
|
||||
timeoutSeconds: 30
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 3001
|
||||
scheme: HTTP
|
||||
|
||||
volumes:
|
||||
- name: storage
|
||||
persistentVolumeClaim:
|
||||
claimName: pvc
|
39
kubernetes/uptime-kuma/ingressroute.yml
Normal file
39
kubernetes/uptime-kuma/ingressroute.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: nginx
|
||||
cert-manager.io/cluster-issuer: letsencrypt-prod
|
||||
nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
|
||||
nginx.ingress.kubernetes.io/proxy-send-timeout: "3600"
|
||||
nginx.ingress.kubernetes.io/server-snippets: |
|
||||
location / {
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header X-Forwarded-Host $http_host;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
name: ingress
|
||||
spec:
|
||||
tls:
|
||||
- hosts:
|
||||
- example.com
|
||||
secretName: example-com-tls
|
||||
rules:
|
||||
- host: example.com
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: service
|
||||
port:
|
||||
number: 3001
|
5
kubernetes/uptime-kuma/kustomization.yml
Normal file
5
kubernetes/uptime-kuma/kustomization.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
resources:
|
||||
- deployment.yml
|
||||
- service.yml
|
||||
- ingressroute.yml
|
||||
- pvc.yml
|
10
kubernetes/uptime-kuma/pvc.yml
Normal file
10
kubernetes/uptime-kuma/pvc.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: pvc
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 4Gi
|
13
kubernetes/uptime-kuma/service.yml
Normal file
13
kubernetes/uptime-kuma/service.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: service
|
||||
spec:
|
||||
selector:
|
||||
component: uptime-kuma
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: http
|
||||
port: 3001
|
||||
targetPort: 3001
|
||||
protocol: TCP
|
17775
package-lock.json
generated
17775
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
113
package.json
113
package.json
@@ -1,45 +1,108 @@
|
||||
{
|
||||
"name": "uptime-kuma",
|
||||
"version": "1.0.2",
|
||||
"version": "1.7.2",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/louislam/uptime-kuma.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": "14.*"
|
||||
},
|
||||
"scripts": {
|
||||
"install-legacy": "npm install --legacy-peer-deps",
|
||||
"update-legacy": "npm update --legacy-peer-deps",
|
||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||
"lint": "npm run lint:js && npm run lint:style",
|
||||
"dev": "vite --host",
|
||||
"start": "npm run start-server",
|
||||
"start-server": "node server/server.js",
|
||||
"update": "",
|
||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
||||
"build": "vite build",
|
||||
"tsc": "tsc",
|
||||
"vite-preview-dist": "vite preview --host",
|
||||
"build-docker": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.0.2 . --push",
|
||||
"build-docker-nightly": "docker buildx build --platform linux/amd64 -t louislam/uptime-kuma:nightly . --push",
|
||||
"setup": "git checkout 1.0.2 && npm install && npm run build",
|
||||
"version-global-replace": "node extra/version-global-replace.js"
|
||||
"build-docker": "npm run build-docker-debian && npm run build-docker-alpine",
|
||||
"build-docker-alpine": "docker buildx build -f dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.7.2-alpine --target release . --push",
|
||||
"build-docker-debian": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.7.2 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.7.2-debian --target release . --push",
|
||||
"build-docker-nightly": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||
"build-docker-nightly-alpine": "docker buildx build -f dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
|
||||
"build-docker-nightly-amd64": "docker buildx build --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
||||
"setup": "git checkout 1.7.2 && npm install --legacy-peer-deps && node node_modules/esbuild/install.js && npm run build && npm prune",
|
||||
"update-version": "node extra/update-version.js",
|
||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||
"reset-password": "node extra/reset-password.js",
|
||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
||||
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
||||
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
|
||||
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
||||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
|
||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
|
||||
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@popperjs/core": "^2.9.2",
|
||||
"@fortawesome/fontawesome-svg-core": "^1.2.36",
|
||||
"@fortawesome/free-regular-svg-icons": "^5.15.4",
|
||||
"@fortawesome/free-solid-svg-icons": "^5.15.4",
|
||||
"@fortawesome/vue-fontawesome": "^3.0.0-4",
|
||||
"@louislam/sqlite3": "^5.0.6",
|
||||
"@popperjs/core": "^2.10.1",
|
||||
"args-parser": "^1.3.0",
|
||||
"axios": "^0.21.1",
|
||||
"bcrypt": "^5.0.1",
|
||||
"bootstrap": "^5.0.0",
|
||||
"dayjs": "^1.10.4",
|
||||
"axios": "^0.21.4",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"bootstrap": "^5.1.1",
|
||||
"chart.js": "^3.5.1",
|
||||
"chartjs-adapter-dayjs": "^1.0.0",
|
||||
"command-exists": "^1.2.9",
|
||||
"compare-versions": "^3.6.0",
|
||||
"dayjs": "^1.10.7",
|
||||
"express": "^4.17.1",
|
||||
"express-basic-auth": "^1.2.0",
|
||||
"form-data": "^4.0.0",
|
||||
"http-graceful-shutdown": "^3.1.4",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"nodemailer": "^6.6.2",
|
||||
"nodemailer": "^6.6.5",
|
||||
"notp": "^2.0.3",
|
||||
"password-hash": "^1.2.2",
|
||||
"redbean-node": "0.0.20",
|
||||
"socket.io": "^4.0.2",
|
||||
"socket.io-client": "^4.1.2",
|
||||
"sqlite3": "^5.0.0",
|
||||
"prom-client": "^13.2.0",
|
||||
"prometheus-api-metrics": "^3.2.0",
|
||||
"qrcode": "^1.4.4",
|
||||
"redbean-node": "0.1.2",
|
||||
"socket.io": "^4.2.0",
|
||||
"socket.io-client": "^4.2.0",
|
||||
"tcp-ping": "^0.1.1",
|
||||
"vue": "^3.0.5",
|
||||
"thirty-two": "^1.0.2",
|
||||
"timezones-list": "^3.0.1",
|
||||
"v-pagination-3": "^0.1.6",
|
||||
"vue": "next",
|
||||
"vue-chart-3": "^0.5.8",
|
||||
"vue-confirm-dialog": "^1.0.2",
|
||||
"vue-router": "^4.0.10",
|
||||
"vue-toastification": "^2.0.0-rc.1"
|
||||
"vue-contenteditable": "^3.0.4",
|
||||
"vue-i18n": "^9.1.7",
|
||||
"vue-image-crop-upload": "^3.0.3",
|
||||
"vue-multiselect": "^3.0.0-alpha.2",
|
||||
"vue-qrcode": "^1.0.0",
|
||||
"vue-router": "^4.0.11",
|
||||
"vue-toastification": "^2.0.0-rc.1",
|
||||
"vuedraggable": "^4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitejs/plugin-legacy": "^1.4.3",
|
||||
"@vitejs/plugin-vue": "^1.2.3",
|
||||
"@vue/compiler-sfc": "^3.0.5",
|
||||
"core-js": "^3.15.2",
|
||||
"sass": "^1.35.1",
|
||||
"vite": "^2.3.7"
|
||||
"@babel/eslint-parser": "^7.15.7",
|
||||
"@types/bootstrap": "^5.1.6",
|
||||
"@vitejs/plugin-legacy": "^1.5.3",
|
||||
"@vitejs/plugin-vue": "^1.9.1",
|
||||
"@vue/compiler-sfc": "^3.2.16",
|
||||
"core-js": "^3.18.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dns2": "^2.0.1",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-plugin-vue": "^7.18.0",
|
||||
"sass": "^1.42.1",
|
||||
"stylelint": "^13.13.1",
|
||||
"stylelint-config-standard": "^22.0.0",
|
||||
"typescript": "^4.4.3",
|
||||
"vite": "^2.5.10"
|
||||
}
|
||||
}
|
||||
|
BIN
public/apple-touch-icon-precomposed.png
Normal file
BIN
public/apple-touch-icon-precomposed.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.7 KiB |
BIN
public/apple-touch-icon.png
Normal file
BIN
public/apple-touch-icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.7 KiB |
BIN
public/favicon.ico
Normal file
BIN
public/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
BIN
public/icon-192x192.png
Normal file
BIN
public/icon-192x192.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.6 KiB |
BIN
public/icon-512x512.png
Normal file
BIN
public/icon-512x512.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.5 KiB |
19
public/manifest.json
Normal file
19
public/manifest.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "Uptime Kuma",
|
||||
"short_name": "Uptime Kuma",
|
||||
"start_url": "/",
|
||||
"background_color": "#fff",
|
||||
"display": "standalone",
|
||||
"icons": [
|
||||
{
|
||||
"src": "icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "icon-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
51
server/auth.js
Normal file
51
server/auth.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const basicAuth = require("express-basic-auth")
|
||||
const passwordHash = require("./password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
const { setting } = require("./util-server");
|
||||
const { debug } = require("../src/util");
|
||||
|
||||
/**
|
||||
*
|
||||
* @param username : string
|
||||
* @param password : string
|
||||
* @returns {Promise<Bean|null>}
|
||||
*/
|
||||
exports.login = async function (username, password) {
|
||||
let user = await R.findOne("user", " username = ? AND active = 1 ", [
|
||||
username,
|
||||
])
|
||||
|
||||
if (user && passwordHash.verify(password, user.password)) {
|
||||
// Upgrade the hash to bcrypt
|
||||
if (passwordHash.needRehash(user.password)) {
|
||||
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
|
||||
passwordHash.generate(password),
|
||||
user.id,
|
||||
]);
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function myAuthorizer(username, password, callback) {
|
||||
|
||||
setting("disableAuth").then((result) => {
|
||||
|
||||
if (result) {
|
||||
callback(null, true)
|
||||
} else {
|
||||
exports.login(username, password).then((user) => {
|
||||
callback(null, user != null)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
exports.basicAuth = basicAuth({
|
||||
authorizer: myAuthorizer,
|
||||
authorizeAsync: true,
|
||||
challenge: true,
|
||||
});
|
44
server/check-version.js
Normal file
44
server/check-version.js
Normal file
@@ -0,0 +1,44 @@
|
||||
const { setSetting } = require("./util-server");
|
||||
const axios = require("axios");
|
||||
const { isDev } = require("../src/util");
|
||||
|
||||
exports.version = require("../package.json").version;
|
||||
exports.latestVersion = null;
|
||||
|
||||
let interval;
|
||||
|
||||
exports.startInterval = () => {
|
||||
let check = async () => {
|
||||
try {
|
||||
const res = await axios.get("https://raw.githubusercontent.com/louislam/uptime-kuma/master/package.json");
|
||||
|
||||
if (typeof res.data === "string") {
|
||||
res.data = JSON.parse(res.data);
|
||||
}
|
||||
|
||||
// For debug
|
||||
if (process.env.TEST_CHECK_VERSION === "1") {
|
||||
res.data.version = "1000.0.0";
|
||||
}
|
||||
|
||||
exports.latestVersion = res.data.version;
|
||||
console.log("Latest Version: " + exports.latestVersion);
|
||||
} catch (_) { }
|
||||
|
||||
};
|
||||
|
||||
check();
|
||||
interval = setInterval(check, 3600 * 1000 * 48);
|
||||
};
|
||||
|
||||
exports.enableCheckUpdate = async (value) => {
|
||||
await setSetting("checkUpdate", value);
|
||||
|
||||
clearInterval(interval);
|
||||
|
||||
if (value) {
|
||||
exports.startInterval();
|
||||
}
|
||||
};
|
||||
|
||||
exports.socket = null;
|
88
server/client.js
Normal file
88
server/client.js
Normal file
@@ -0,0 +1,88 @@
|
||||
/*
|
||||
* For Client Socket
|
||||
*/
|
||||
const { TimeLogger } = require("../src/util");
|
||||
const { R } = require("redbean-node");
|
||||
const { io } = require("./server");
|
||||
|
||||
async function sendNotificationList(socket) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
let result = [];
|
||||
let list = await R.find("notification", " user_id = ? ", [
|
||||
socket.userID,
|
||||
]);
|
||||
|
||||
for (let bean of list) {
|
||||
result.push(bean.export())
|
||||
}
|
||||
|
||||
io.to(socket.userID).emit("notificationList", result)
|
||||
|
||||
timeLogger.print("Send Notification List");
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send Heartbeat History list to socket
|
||||
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
|
||||
* @param overwrite Overwrite client-side's heartbeat list
|
||||
*/
|
||||
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
let list = await R.getAll(`
|
||||
SELECT * FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
ORDER BY time DESC
|
||||
LIMIT 100
|
||||
`, [
|
||||
monitorID,
|
||||
])
|
||||
|
||||
let result = list.reverse();
|
||||
|
||||
if (toUser) {
|
||||
io.to(socket.userID).emit("heartbeatList", monitorID, result, overwrite);
|
||||
} else {
|
||||
socket.emit("heartbeatList", monitorID, result, overwrite);
|
||||
}
|
||||
|
||||
timeLogger.print(`[Monitor: ${monitorID}] sendHeartbeatList`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Important Heart beat list (aka event list)
|
||||
* @param socket
|
||||
* @param monitorID
|
||||
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
|
||||
* @param overwrite Overwrite client-side's heartbeat list
|
||||
*/
|
||||
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
let list = await R.find("heartbeat", `
|
||||
monitor_id = ?
|
||||
AND important = 1
|
||||
ORDER BY time DESC
|
||||
LIMIT 500
|
||||
`, [
|
||||
monitorID,
|
||||
])
|
||||
|
||||
timeLogger.print(`[Monitor: ${monitorID}] sendImportantHeartbeatList`);
|
||||
|
||||
if (toUser) {
|
||||
io.to(socket.userID).emit("importantHeartbeatList", monitorID, list, overwrite);
|
||||
} else {
|
||||
socket.emit("importantHeartbeatList", monitorID, list, overwrite);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendNotificationList,
|
||||
sendImportantHeartbeatList,
|
||||
sendHeartbeatList,
|
||||
}
|
376
server/database.js
Normal file
376
server/database.js
Normal file
@@ -0,0 +1,376 @@
|
||||
const fs = require("fs");
|
||||
const { R } = require("redbean-node");
|
||||
const { setSetting, setting } = require("./util-server");
|
||||
const { debug, sleep } = require("../src/util");
|
||||
const dayjs = require("dayjs");
|
||||
const knex = require("knex");
|
||||
|
||||
/**
|
||||
* Database & App Data Folder
|
||||
*/
|
||||
class Database {
|
||||
|
||||
static templatePath = "./db/kuma.db";
|
||||
|
||||
/**
|
||||
* Data Dir (Default: ./data)
|
||||
*/
|
||||
static dataDir;
|
||||
|
||||
/**
|
||||
* User Upload Dir (Default: ./data/upload)
|
||||
*/
|
||||
static uploadDir;
|
||||
|
||||
static path;
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
static patched = false;
|
||||
|
||||
/**
|
||||
* For Backup only
|
||||
*/
|
||||
static backupPath = null;
|
||||
|
||||
/**
|
||||
* Add patch filename in key
|
||||
* Values:
|
||||
* true: Add it regardless of order
|
||||
* false: Do nothing
|
||||
* { parents: []}: Need parents before add it
|
||||
*/
|
||||
static patchList = {
|
||||
"patch-setting-value-type.sql": true,
|
||||
"patch-improve-performance.sql": true,
|
||||
"patch-2fa.sql": true,
|
||||
"patch-add-retry-interval-monitor.sql": true,
|
||||
"patch-incident-table.sql": true,
|
||||
"patch-group-table.sql": true,
|
||||
}
|
||||
|
||||
/**
|
||||
* The finally version should be 10 after merged tag feature
|
||||
* @deprecated Use patchList for any new feature
|
||||
*/
|
||||
static latestVersion = 10;
|
||||
|
||||
static noReject = true;
|
||||
|
||||
static init(args) {
|
||||
// Data Directory (must be end with "/")
|
||||
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
||||
Database.path = Database.dataDir + "kuma.db";
|
||||
if (! fs.existsSync(Database.dataDir)) {
|
||||
fs.mkdirSync(Database.dataDir, { recursive: true });
|
||||
}
|
||||
|
||||
Database.uploadDir = Database.dataDir + "upload/";
|
||||
|
||||
if (! fs.existsSync(Database.uploadDir)) {
|
||||
fs.mkdirSync(Database.uploadDir, { recursive: true });
|
||||
}
|
||||
|
||||
console.log(`Data Dir: ${Database.dataDir}`);
|
||||
}
|
||||
|
||||
static async connect() {
|
||||
const acquireConnectionTimeout = 120 * 1000;
|
||||
|
||||
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
||||
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
||||
|
||||
const knexInstance = knex({
|
||||
client: Dialect,
|
||||
connection: {
|
||||
filename: Database.path,
|
||||
acquireConnectionTimeout: acquireConnectionTimeout,
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
pool: {
|
||||
min: 1,
|
||||
max: 1,
|
||||
idleTimeoutMillis: 120 * 1000,
|
||||
propagateCreateError: false,
|
||||
acquireTimeoutMillis: acquireConnectionTimeout,
|
||||
}
|
||||
});
|
||||
|
||||
R.setup(knexInstance);
|
||||
|
||||
if (process.env.SQL_LOG === "1") {
|
||||
R.debug(true);
|
||||
}
|
||||
|
||||
// Auto map the model to a bean object
|
||||
R.freeze(true);
|
||||
await R.autoloadModels("./server/model");
|
||||
|
||||
await R.exec("PRAGMA foreign_keys = ON");
|
||||
// Change to WAL
|
||||
await R.exec("PRAGMA journal_mode = WAL");
|
||||
await R.exec("PRAGMA cache_size = -12000");
|
||||
|
||||
console.log("SQLite config:");
|
||||
console.log(await R.getAll("PRAGMA journal_mode"));
|
||||
console.log(await R.getAll("PRAGMA cache_size"));
|
||||
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
|
||||
}
|
||||
|
||||
static async patch() {
|
||||
let version = parseInt(await setting("database_version"));
|
||||
|
||||
if (! version) {
|
||||
version = 0;
|
||||
}
|
||||
|
||||
console.info("Your database version: " + version);
|
||||
console.info("Latest database version: " + this.latestVersion);
|
||||
|
||||
if (version === this.latestVersion) {
|
||||
console.info("Database no need to patch");
|
||||
} else if (version > this.latestVersion) {
|
||||
console.info("Warning: Database version is newer than expected");
|
||||
} else {
|
||||
console.info("Database patch is needed");
|
||||
|
||||
this.backup(version);
|
||||
|
||||
// Try catch anything here, if gone wrong, restore the backup
|
||||
try {
|
||||
for (let i = version + 1; i <= this.latestVersion; i++) {
|
||||
const sqlFile = `./db/patch${i}.sql`;
|
||||
console.info(`Patching ${sqlFile}`);
|
||||
await Database.importSQLFile(sqlFile);
|
||||
console.info(`Patched ${sqlFile}`);
|
||||
await setSetting("database_version", i);
|
||||
}
|
||||
} catch (ex) {
|
||||
await Database.close();
|
||||
|
||||
console.error(ex);
|
||||
console.error("Start Uptime-Kuma failed due to patch db failed");
|
||||
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||
|
||||
this.restore();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
await this.patch2();
|
||||
}
|
||||
|
||||
/**
|
||||
* Call it from patch() only
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async patch2() {
|
||||
console.log("Database Patch 2.0 Process");
|
||||
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||
|
||||
if (! databasePatchedFiles) {
|
||||
databasePatchedFiles = {};
|
||||
}
|
||||
|
||||
debug("Patched files:");
|
||||
debug(databasePatchedFiles);
|
||||
|
||||
try {
|
||||
for (let sqlFilename in this.patchList) {
|
||||
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
|
||||
}
|
||||
|
||||
if (this.patched) {
|
||||
console.log("Database Patched Successfully");
|
||||
}
|
||||
|
||||
} catch (ex) {
|
||||
await Database.close();
|
||||
|
||||
console.error(ex);
|
||||
console.error("Start Uptime-Kuma failed due to patch db failed");
|
||||
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||
|
||||
this.restore();
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await setSetting("databasePatchedFiles", databasePatchedFiles);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used it patch2() only
|
||||
* @param sqlFilename
|
||||
* @param databasePatchedFiles
|
||||
*/
|
||||
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
||||
let value = this.patchList[sqlFilename];
|
||||
|
||||
if (! value) {
|
||||
console.log(sqlFilename + " skip");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if patched
|
||||
if (! databasePatchedFiles[sqlFilename]) {
|
||||
console.log(sqlFilename + " is not patched");
|
||||
|
||||
if (value.parents) {
|
||||
console.log(sqlFilename + " need parents");
|
||||
for (let parentSQLFilename of value.parents) {
|
||||
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
|
||||
}
|
||||
}
|
||||
|
||||
this.backup(dayjs().format("YYYYMMDDHHmmss"));
|
||||
|
||||
console.log(sqlFilename + " is patching");
|
||||
this.patched = true;
|
||||
await this.importSQLFile("./db/" + sqlFilename);
|
||||
databasePatchedFiles[sqlFilename] = true;
|
||||
console.log(sqlFilename + " is patched successfully");
|
||||
|
||||
} else {
|
||||
debug(sqlFilename + " is already patched, skip");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
|
||||
* @param filename
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async importSQLFile(filename) {
|
||||
|
||||
await R.getCell("SELECT 1");
|
||||
|
||||
let text = fs.readFileSync(filename).toString();
|
||||
|
||||
// Remove all comments (--)
|
||||
let lines = text.split("\n");
|
||||
lines = lines.filter((line) => {
|
||||
return ! line.startsWith("--");
|
||||
});
|
||||
|
||||
// Split statements by semicolon
|
||||
// Filter out empty line
|
||||
text = lines.join("\n");
|
||||
|
||||
let statements = text.split(";")
|
||||
.map((statement) => {
|
||||
return statement.trim();
|
||||
})
|
||||
.filter((statement) => {
|
||||
return statement !== "";
|
||||
});
|
||||
|
||||
for (let statement of statements) {
|
||||
await R.exec(statement);
|
||||
}
|
||||
}
|
||||
|
||||
static getBetterSQLite3Database() {
|
||||
return R.knex.client.acquireConnection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Special handle, because tarn.js throw a promise reject that cannot be caught
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async close() {
|
||||
const listener = (reason, p) => {
|
||||
Database.noReject = false;
|
||||
};
|
||||
process.addListener("unhandledRejection", listener);
|
||||
|
||||
console.log("Closing DB");
|
||||
|
||||
while (true) {
|
||||
Database.noReject = true;
|
||||
await R.close();
|
||||
await sleep(2000);
|
||||
|
||||
if (Database.noReject) {
|
||||
break;
|
||||
} else {
|
||||
console.log("Waiting to close the db");
|
||||
}
|
||||
}
|
||||
console.log("SQLite closed");
|
||||
|
||||
process.removeListener("unhandledRejection", listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* One backup one time in this process.
|
||||
* Reset this.backupPath if you want to backup again
|
||||
* @param version
|
||||
*/
|
||||
static backup(version) {
|
||||
if (! this.backupPath) {
|
||||
console.info("Backup the db");
|
||||
this.backupPath = this.dataDir + "kuma.db.bak" + version;
|
||||
fs.copyFileSync(Database.path, this.backupPath);
|
||||
|
||||
const shmPath = Database.path + "-shm";
|
||||
if (fs.existsSync(shmPath)) {
|
||||
this.backupShmPath = shmPath + ".bak" + version;
|
||||
fs.copyFileSync(shmPath, this.backupShmPath);
|
||||
}
|
||||
|
||||
const walPath = Database.path + "-wal";
|
||||
if (fs.existsSync(walPath)) {
|
||||
this.backupWalPath = walPath + ".bak" + version;
|
||||
fs.copyFileSync(walPath, this.backupWalPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
static restore() {
|
||||
if (this.backupPath) {
|
||||
console.error("Patch db failed!!! Restoring the backup");
|
||||
|
||||
const shmPath = Database.path + "-shm";
|
||||
const walPath = Database.path + "-wal";
|
||||
|
||||
// Delete patch failed db
|
||||
try {
|
||||
if (fs.existsSync(Database.path)) {
|
||||
fs.unlinkSync(Database.path);
|
||||
}
|
||||
|
||||
if (fs.existsSync(shmPath)) {
|
||||
fs.unlinkSync(shmPath);
|
||||
}
|
||||
|
||||
if (fs.existsSync(walPath)) {
|
||||
fs.unlinkSync(walPath);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("Restore failed, you may need to restore the backup manually");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Restore backup
|
||||
fs.copyFileSync(this.backupPath, Database.path);
|
||||
|
||||
if (this.backupShmPath) {
|
||||
fs.copyFileSync(this.backupShmPath, shmPath);
|
||||
}
|
||||
|
||||
if (this.backupWalPath) {
|
||||
fs.copyFileSync(this.backupWalPath, walPath);
|
||||
}
|
||||
|
||||
} else {
|
||||
console.log("Nothing to restore");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Database;
|
57
server/image-data-uri.js
Normal file
57
server/image-data-uri.js
Normal file
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
From https://github.com/DiegoZoracKy/image-data-uri/blob/master/lib/image-data-uri.js
|
||||
Modified with 0 dependencies
|
||||
*/
|
||||
let fs = require("fs");
|
||||
|
||||
let ImageDataURI = (() => {
|
||||
|
||||
function decode(dataURI) {
|
||||
if (!/data:image\//.test(dataURI)) {
|
||||
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
|
||||
return null;
|
||||
}
|
||||
|
||||
let regExMatches = dataURI.match("data:(image/.*);base64,(.*)");
|
||||
return {
|
||||
imageType: regExMatches[1],
|
||||
dataBase64: regExMatches[2],
|
||||
dataBuffer: new Buffer(regExMatches[2], "base64")
|
||||
};
|
||||
}
|
||||
|
||||
function encode(data, mediaType) {
|
||||
if (!data || !mediaType) {
|
||||
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
|
||||
return null;
|
||||
}
|
||||
|
||||
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
|
||||
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
|
||||
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
|
||||
|
||||
return dataImgBase64;
|
||||
}
|
||||
|
||||
function outputFile(dataURI, filePath) {
|
||||
filePath = filePath || "./";
|
||||
return new Promise((resolve, reject) => {
|
||||
let imageDecoded = decode(dataURI);
|
||||
|
||||
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
|
||||
if (err) {
|
||||
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
|
||||
}
|
||||
resolve(filePath);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
decode: decode,
|
||||
encode: encode,
|
||||
outputFile: outputFile,
|
||||
};
|
||||
})();
|
||||
|
||||
module.exports = ImageDataURI;
|
34
server/model/group.js
Normal file
34
server/model/group.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const { R } = require("redbean-node");
|
||||
|
||||
class Group extends BeanModel {
|
||||
|
||||
async toPublicJSON() {
|
||||
let monitorBeanList = await this.getMonitorList();
|
||||
let monitorList = [];
|
||||
|
||||
for (let bean of monitorBeanList) {
|
||||
monitorList.push(await bean.toPublicJSON());
|
||||
}
|
||||
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
weight: this.weight,
|
||||
monitorList,
|
||||
};
|
||||
}
|
||||
|
||||
async getMonitorList() {
|
||||
return R.convertToBeans("monitor", await R.getAll(`
|
||||
SELECT monitor.* FROM monitor, monitor_group
|
||||
WHERE monitor.id = monitor_group.monitor_id
|
||||
AND group_id = ?
|
||||
ORDER BY monitor_group.weight
|
||||
`, [
|
||||
this.id,
|
||||
]));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Group;
|
@@ -1,20 +1,27 @@
|
||||
const dayjs = require("dayjs");
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
var timezone = require('dayjs/plugin/timezone')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(timezone)
|
||||
const axios = require("axios");
|
||||
const {R} = require("redbean-node");
|
||||
const {BeanModel} = require("redbean-node/dist/bean-model");
|
||||
|
||||
const utc = require("dayjs/plugin/utc");
|
||||
let timezone = require("dayjs/plugin/timezone");
|
||||
dayjs.extend(utc);
|
||||
dayjs.extend(timezone);
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
||||
/**
|
||||
* status:
|
||||
* 0 = DOWN
|
||||
* 1 = UP
|
||||
* 2 = PENDING
|
||||
*/
|
||||
class Heartbeat extends BeanModel {
|
||||
|
||||
toPublicJSON() {
|
||||
return {
|
||||
status: this.status,
|
||||
time: this.time,
|
||||
msg: "", // Hide for public
|
||||
ping: this.ping,
|
||||
};
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
monitorID: this.monitor_id,
|
||||
|
18
server/model/incident.js
Normal file
18
server/model/incident.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
||||
class Incident extends BeanModel {
|
||||
|
||||
toPublicJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
style: this.style,
|
||||
title: this.title,
|
||||
content: this.content,
|
||||
pin: this.pin,
|
||||
createdDate: this.createdDate,
|
||||
lastUpdatedDate: this.lastUpdatedDate,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Incident;
|
@@ -1,170 +1,396 @@
|
||||
|
||||
const https = require("https");
|
||||
const dayjs = require("dayjs");
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
var timezone = require('dayjs/plugin/timezone')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(timezone)
|
||||
const utc = require("dayjs/plugin/utc");
|
||||
let timezone = require("dayjs/plugin/timezone");
|
||||
dayjs.extend(utc);
|
||||
dayjs.extend(timezone);
|
||||
const axios = require("axios");
|
||||
const {tcping, ping} = require("../util-server");
|
||||
const {R} = require("redbean-node");
|
||||
const {BeanModel} = require("redbean-node/dist/bean-model");
|
||||
const {Notification} = require("../notification")
|
||||
const { Prometheus } = require("../prometheus");
|
||||
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
|
||||
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom } = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const { Notification } = require("../notification");
|
||||
const version = require("../../package.json").version;
|
||||
|
||||
/**
|
||||
* status:
|
||||
* 0 = DOWN
|
||||
* 1 = UP
|
||||
* 2 = PENDING
|
||||
*/
|
||||
class Monitor extends BeanModel {
|
||||
|
||||
/**
|
||||
* Return a object that ready to parse to JSON for public
|
||||
* Only show necessary data to public
|
||||
*/
|
||||
async toPublicJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a object that ready to parse to JSON
|
||||
*/
|
||||
async toJSON() {
|
||||
|
||||
let notificationIDList = {};
|
||||
|
||||
let list = await R.find("monitor_notification", " monitor_id = ? ", [
|
||||
this.id
|
||||
])
|
||||
this.id,
|
||||
]);
|
||||
|
||||
for (let bean of list) {
|
||||
notificationIDList[bean.notification_id] = true;
|
||||
}
|
||||
|
||||
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
|
||||
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
url: this.url,
|
||||
hostname: this.hostname,
|
||||
port: this.port,
|
||||
maxretries: this.maxretries,
|
||||
weight: this.weight,
|
||||
active: this.active,
|
||||
type: this.type,
|
||||
interval: this.interval,
|
||||
retryInterval: this.retryInterval,
|
||||
keyword: this.keyword,
|
||||
notificationIDList
|
||||
ignoreTls: this.getIgnoreTls(),
|
||||
upsideDown: this.isUpsideDown(),
|
||||
maxredirects: this.maxredirects,
|
||||
accepted_statuscodes: this.getAcceptedStatuscodes(),
|
||||
dns_resolve_type: this.dns_resolve_type,
|
||||
dns_resolve_server: this.dns_resolve_server,
|
||||
dns_last_result: this.dns_last_result,
|
||||
notificationIDList,
|
||||
tags: tags,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean}
|
||||
*/
|
||||
getIgnoreTls() {
|
||||
return Boolean(this.ignoreTls);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse to boolean
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isUpsideDown() {
|
||||
return Boolean(this.upsideDown);
|
||||
}
|
||||
|
||||
getAcceptedStatuscodes() {
|
||||
return JSON.parse(this.accepted_statuscodes_json);
|
||||
}
|
||||
|
||||
start(io) {
|
||||
let previousBeat = null;
|
||||
let retries = 0;
|
||||
|
||||
let prometheus = new Prometheus(this);
|
||||
|
||||
const beat = async () => {
|
||||
console.log(`Monitor ${this.id}: Heartbeat`)
|
||||
|
||||
// Expose here for prometheus update
|
||||
// undefined if not https
|
||||
let tlsInfo = undefined;
|
||||
|
||||
if (! previousBeat) {
|
||||
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
|
||||
this.id
|
||||
])
|
||||
this.id,
|
||||
]);
|
||||
}
|
||||
|
||||
let bean = R.dispense("heartbeat")
|
||||
const isFirstBeat = !previousBeat;
|
||||
|
||||
let bean = R.dispense("heartbeat");
|
||||
bean.monitor_id = this.id;
|
||||
bean.time = R.isoDateTime(dayjs.utc());
|
||||
bean.status = 0;
|
||||
bean.status = DOWN;
|
||||
|
||||
if (this.isUpsideDown()) {
|
||||
bean.status = flipStatus(bean.status);
|
||||
}
|
||||
|
||||
// Duration
|
||||
if (previousBeat) {
|
||||
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), 'second');
|
||||
if (! isFirstBeat) {
|
||||
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), "second");
|
||||
} else {
|
||||
bean.duration = 0;
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.type === "http" || this.type === "keyword") {
|
||||
// Do not do any queries/high loading things before the "bean.ping"
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
let res = await axios.get(this.url, {
|
||||
headers: { 'User-Agent':'Uptime-Kuma' }
|
||||
})
|
||||
bean.msg = `${res.status} - ${res.statusText}`
|
||||
timeout: this.interval * 1000 * 0.8,
|
||||
headers: {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Uptime-Kuma/" + version,
|
||||
},
|
||||
httpsAgent: new https.Agent({
|
||||
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
|
||||
rejectUnauthorized: ! this.getIgnoreTls(),
|
||||
}),
|
||||
maxRedirects: this.maxredirects,
|
||||
validateStatus: (status) => {
|
||||
return checkStatusCode(status, this.getAcceptedStatuscodes());
|
||||
},
|
||||
});
|
||||
bean.msg = `${res.status} - ${res.statusText}`;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
// Check certificate if https is used
|
||||
let certInfoStartTime = dayjs().valueOf();
|
||||
if (this.getUrl()?.protocol === "https:") {
|
||||
try {
|
||||
tlsInfo = await this.updateTlsInfo(checkCertificate(res));
|
||||
} catch (e) {
|
||||
if (e.message !== "No TLS certificate in response") {
|
||||
console.error(e.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
|
||||
|
||||
if (this.type === "http") {
|
||||
bean.status = 1;
|
||||
bean.status = UP;
|
||||
} else {
|
||||
|
||||
let data = res.data;
|
||||
|
||||
// Convert to string for object/array
|
||||
if (typeof data !== "string") {
|
||||
data = JSON.stringify(data)
|
||||
data = JSON.stringify(data);
|
||||
}
|
||||
|
||||
if (data.includes(this.keyword)) {
|
||||
bean.msg += ", keyword is found"
|
||||
bean.status = 1;
|
||||
bean.msg += ", keyword is found";
|
||||
bean.status = UP;
|
||||
} else {
|
||||
throw new Error(bean.msg + ", but keyword is not found")
|
||||
throw new Error(bean.msg + ", but keyword is not found");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
} else if (this.type === "port") {
|
||||
bean.ping = await tcping(this.hostname, this.port);
|
||||
bean.msg = ""
|
||||
bean.status = 1;
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
|
||||
} else if (this.type === "ping") {
|
||||
bean.ping = await ping(this.hostname);
|
||||
bean.msg = ""
|
||||
bean.status = 1;
|
||||
bean.msg = "";
|
||||
bean.status = UP;
|
||||
} else if (this.type === "dns") {
|
||||
let startTime = dayjs().valueOf();
|
||||
let dnsMessage = "";
|
||||
|
||||
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.dns_resolve_type);
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
if (this.dns_resolve_type == "A" || this.dns_resolve_type == "AAAA" || this.dns_resolve_type == "TXT") {
|
||||
dnsMessage += "Records: ";
|
||||
dnsMessage += dnsRes.join(" | ");
|
||||
} else if (this.dns_resolve_type == "CNAME" || this.dns_resolve_type == "PTR") {
|
||||
dnsMessage = dnsRes[0];
|
||||
} else if (this.dns_resolve_type == "CAA") {
|
||||
dnsMessage = dnsRes[0].issue;
|
||||
} else if (this.dns_resolve_type == "MX") {
|
||||
dnsRes.forEach(record => {
|
||||
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
|
||||
});
|
||||
dnsMessage = dnsMessage.slice(0, -2);
|
||||
} else if (this.dns_resolve_type == "NS") {
|
||||
dnsMessage += "Servers: ";
|
||||
dnsMessage += dnsRes.join(" | ");
|
||||
} else if (this.dns_resolve_type == "SOA") {
|
||||
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
|
||||
} else if (this.dns_resolve_type == "SRV") {
|
||||
dnsRes.forEach(record => {
|
||||
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
|
||||
});
|
||||
dnsMessage = dnsMessage.slice(0, -2);
|
||||
}
|
||||
|
||||
if (this.dnsLastResult !== dnsMessage) {
|
||||
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
|
||||
dnsMessage,
|
||||
this.id
|
||||
]);
|
||||
}
|
||||
|
||||
bean.msg = dnsMessage;
|
||||
bean.status = UP;
|
||||
}
|
||||
|
||||
if (this.isUpsideDown()) {
|
||||
bean.status = flipStatus(bean.status);
|
||||
|
||||
if (bean.status === DOWN) {
|
||||
throw new Error("Flip UP to DOWN");
|
||||
}
|
||||
}
|
||||
|
||||
retries = 0;
|
||||
|
||||
} catch (error) {
|
||||
|
||||
bean.msg = error.message;
|
||||
|
||||
// If UP come in here, it must be upside down mode
|
||||
// Just reset the retries
|
||||
if (this.isUpsideDown() && bean.status === UP) {
|
||||
retries = 0;
|
||||
|
||||
} else if ((this.maxretries > 0) && (retries < this.maxretries)) {
|
||||
retries++;
|
||||
bean.status = PENDING;
|
||||
}
|
||||
}
|
||||
|
||||
// Mark as important if status changed
|
||||
if (! previousBeat || previousBeat.status !== bean.status) {
|
||||
// * ? -> ANY STATUS = important [isFirstBeat]
|
||||
// UP -> PENDING = not important
|
||||
// * UP -> DOWN = important
|
||||
// UP -> UP = not important
|
||||
// PENDING -> PENDING = not important
|
||||
// * PENDING -> DOWN = important
|
||||
// PENDING -> UP = not important
|
||||
// DOWN -> PENDING = this case not exists
|
||||
// DOWN -> DOWN = not important
|
||||
// * DOWN -> UP = important
|
||||
let isImportant = isFirstBeat ||
|
||||
(previousBeat.status === UP && bean.status === DOWN) ||
|
||||
(previousBeat.status === DOWN && bean.status === UP) ||
|
||||
(previousBeat.status === PENDING && bean.status === DOWN);
|
||||
|
||||
// Mark as important if status changed, ignore pending pings,
|
||||
// Don't notify if disrupted changes to up
|
||||
if (isImportant) {
|
||||
bean.important = true;
|
||||
|
||||
// Do not send if first beat is UP
|
||||
if (previousBeat || bean.status !== 1) {
|
||||
let notificationList = await R.getAll(`SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id `, [
|
||||
this.id
|
||||
])
|
||||
|
||||
let promiseList = [];
|
||||
// Send only if the first beat is DOWN
|
||||
if (!isFirstBeat || bean.status === DOWN) {
|
||||
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
|
||||
this.id,
|
||||
]);
|
||||
|
||||
let text;
|
||||
if (bean.status === 1) {
|
||||
text = "✅ Up"
|
||||
if (bean.status === UP) {
|
||||
text = "✅ Up";
|
||||
} else {
|
||||
text = "🔴 Down"
|
||||
text = "🔴 Down";
|
||||
}
|
||||
|
||||
let msg = `[${this.name}] [${text}] ${bean.msg}`;
|
||||
|
||||
for(let notification of notificationList) {
|
||||
promiseList.push(Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON()));
|
||||
for (let notification of notificationList) {
|
||||
try {
|
||||
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON());
|
||||
} catch (e) {
|
||||
console.error("Cannot send notification to " + notification.name);
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(promiseList);
|
||||
}
|
||||
|
||||
} else {
|
||||
bean.important = false;
|
||||
}
|
||||
|
||||
io.to(this.user_id).emit("heartbeat", bean.toJSON());
|
||||
let beatInterval = this.interval;
|
||||
|
||||
await R.store(bean)
|
||||
Monitor.sendStats(io, this.id, this.user_id)
|
||||
if (bean.status === UP) {
|
||||
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
} else if (bean.status === PENDING) {
|
||||
if (this.retryInterval > 0) {
|
||||
beatInterval = this.retryInterval;
|
||||
}
|
||||
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
} else {
|
||||
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
|
||||
}
|
||||
|
||||
io.to(this.user_id).emit("heartbeat", bean.toJSON());
|
||||
Monitor.sendStats(io, this.id, this.user_id);
|
||||
|
||||
await R.store(bean);
|
||||
prometheus.update(bean, tlsInfo);
|
||||
|
||||
previousBeat = bean;
|
||||
}
|
||||
|
||||
if (! this.isStop) {
|
||||
this.heartbeatInterval = setTimeout(beat, beatInterval * 1000);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
beat();
|
||||
this.heartbeatInterval = setInterval(beat, this.interval * 1000);
|
||||
}
|
||||
|
||||
stop() {
|
||||
clearInterval(this.heartbeatInterval)
|
||||
clearTimeout(this.heartbeatInterval);
|
||||
this.isStop = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper Method:
|
||||
* returns URL object for further usage
|
||||
* returns null if url is invalid
|
||||
* @returns {null|URL}
|
||||
*/
|
||||
getUrl() {
|
||||
try {
|
||||
return new URL(this.url);
|
||||
} catch (_) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store TLS info to database
|
||||
* @param checkCertificateResult
|
||||
* @returns {Promise<object>}
|
||||
*/
|
||||
async updateTlsInfo(checkCertificateResult) {
|
||||
let tls_info_bean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
this.id,
|
||||
]);
|
||||
if (tls_info_bean == null) {
|
||||
tls_info_bean = R.dispense("monitor_tls_info");
|
||||
tls_info_bean.monitor_id = this.id;
|
||||
}
|
||||
tls_info_bean.info_json = JSON.stringify(checkCertificateResult);
|
||||
await R.store(tls_info_bean);
|
||||
|
||||
return checkCertificateResult;
|
||||
}
|
||||
|
||||
static async sendStats(io, monitorID, userID) {
|
||||
Monitor.sendAvgPing(24, io, monitorID, userID);
|
||||
Monitor.sendUptime(24, io, monitorID, userID);
|
||||
Monitor.sendUptime(24 * 30, io, monitorID, userID);
|
||||
const hasClients = getTotalClientInRoom(io, userID) > 0;
|
||||
|
||||
if (hasClients) {
|
||||
await Monitor.sendAvgPing(24, io, monitorID, userID);
|
||||
await Monitor.sendUptime(24, io, monitorID, userID);
|
||||
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
|
||||
await Monitor.sendCertInfo(io, monitorID, userID);
|
||||
} else {
|
||||
debug("No clients in the room, no need to send stats");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -172,6 +398,8 @@ class Monitor extends BeanModel {
|
||||
* @param duration : int Hours
|
||||
*/
|
||||
static async sendAvgPing(duration, io, monitorID, userID) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
let avgPing = parseInt(await R.getCell(`
|
||||
SELECT AVG(ping)
|
||||
FROM heartbeat
|
||||
@@ -179,74 +407,97 @@ class Monitor extends BeanModel {
|
||||
AND ping IS NOT NULL
|
||||
AND monitor_id = ? `, [
|
||||
-duration,
|
||||
monitorID
|
||||
monitorID,
|
||||
]));
|
||||
|
||||
timeLogger.print(`[Monitor: ${monitorID}] avgPing`);
|
||||
|
||||
io.to(userID).emit("avgPing", monitorID, avgPing);
|
||||
}
|
||||
|
||||
static async sendCertInfo(io, monitorID, userID) {
|
||||
let tls_info = await R.findOne("monitor_tls_info", "monitor_id = ?", [
|
||||
monitorID,
|
||||
]);
|
||||
if (tls_info != null) {
|
||||
io.to(userID).emit("certInfo", monitorID, tls_info.info_json);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uptime with calculation
|
||||
* Calculation based on:
|
||||
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
|
||||
* @param duration : int Hours
|
||||
*/
|
||||
static async sendUptime(duration, io, monitorID, userID) {
|
||||
let sec = duration * 3600;
|
||||
static async calcUptime(duration, monitorID) {
|
||||
const timeLogger = new TimeLogger();
|
||||
|
||||
let heartbeatList = await R.getAll(`
|
||||
SELECT duration, time, status
|
||||
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
|
||||
|
||||
// Handle if heartbeat duration longer than the target duration
|
||||
// e.g. If the last beat's duration is bigger that the 24hrs window, it will use the duration between the (beat time - window margin) (THEN case in SQL)
|
||||
let result = await R.getRow(`
|
||||
SELECT
|
||||
-- SUM all duration, also trim off the beat out of time window
|
||||
SUM(
|
||||
CASE
|
||||
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
|
||||
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
|
||||
ELSE duration
|
||||
END
|
||||
) AS total_duration,
|
||||
|
||||
-- SUM all uptime duration, also trim off the beat out of time window
|
||||
SUM(
|
||||
CASE
|
||||
WHEN (status = 1)
|
||||
THEN
|
||||
CASE
|
||||
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
|
||||
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
|
||||
ELSE duration
|
||||
END
|
||||
END
|
||||
) AS uptime_duration
|
||||
FROM heartbeat
|
||||
WHERE time > DATETIME('now', ? || ' hours')
|
||||
AND monitor_id = ? `, [
|
||||
-duration,
|
||||
monitorID
|
||||
WHERE time > ?
|
||||
AND monitor_id = ?
|
||||
`, [
|
||||
startTime, startTime, startTime, startTime, startTime,
|
||||
monitorID,
|
||||
]);
|
||||
|
||||
let downtime = 0;
|
||||
let total = 0;
|
||||
let uptime;
|
||||
timeLogger.print(`[Monitor: ${monitorID}][${duration}] sendUptime`);
|
||||
|
||||
// Special handle for the first heartbeat only
|
||||
if (heartbeatList.length === 1) {
|
||||
let totalDuration = result.total_duration;
|
||||
let uptimeDuration = result.uptime_duration;
|
||||
let uptime = 0;
|
||||
|
||||
if (heartbeatList[0].status === 1) {
|
||||
uptime = 1;
|
||||
} else {
|
||||
if (totalDuration > 0) {
|
||||
uptime = uptimeDuration / totalDuration;
|
||||
if (uptime < 0) {
|
||||
uptime = 0;
|
||||
}
|
||||
|
||||
} else {
|
||||
for (let row of heartbeatList) {
|
||||
let value = parseInt(row.duration)
|
||||
let time = row.time
|
||||
// Handle new monitor with only one beat, because the beat's duration = 0
|
||||
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
|
||||
|
||||
// Handle if heartbeat duration longer than the target duration
|
||||
// e.g. Heartbeat duration = 28hrs, but target duration = 24hrs
|
||||
if (value > sec) {
|
||||
let trim = dayjs.utc().diff(dayjs(time), 'second');
|
||||
value = sec - trim;
|
||||
|
||||
if (value < 0) {
|
||||
value = 0;
|
||||
}
|
||||
}
|
||||
|
||||
total += value;
|
||||
if (row.status === 0) {
|
||||
downtime += value;
|
||||
}
|
||||
}
|
||||
|
||||
uptime = (total - downtime) / total;
|
||||
|
||||
if (uptime < 0) {
|
||||
uptime = 0;
|
||||
if (status === UP) {
|
||||
uptime = 1;
|
||||
}
|
||||
}
|
||||
|
||||
return uptime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Send Uptime
|
||||
* @param duration : int Hours
|
||||
*/
|
||||
static async sendUptime(duration, io, monitorID, userID) {
|
||||
const uptime = await this.calcUptime(duration, monitorID);
|
||||
io.to(userID).emit("uptime", monitorID, duration, uptime);
|
||||
}
|
||||
}
|
||||
|
13
server/model/tag.js
Normal file
13
server/model/tag.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
||||
class Tag extends BeanModel {
|
||||
toJSON() {
|
||||
return {
|
||||
id: this._id,
|
||||
name: this._name,
|
||||
color: this._color,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Tag;
|
21
server/model/user.js
Normal file
21
server/model/user.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const passwordHash = require("../password-hash");
|
||||
const { R } = require("redbean-node");
|
||||
|
||||
class User extends BeanModel {
|
||||
|
||||
/**
|
||||
* Direct execute, no need R.store()
|
||||
* @param newPassword
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async resetPassword(newPassword) {
|
||||
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
|
||||
passwordHash.generate(newPassword),
|
||||
this.id
|
||||
]);
|
||||
this.password = newPassword;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = User;
|
749
server/modules/apicache/apicache.js
Normal file
749
server/modules/apicache/apicache.js
Normal file
@@ -0,0 +1,749 @@
|
||||
let url = require("url");
|
||||
let MemoryCache = require("./memory-cache");
|
||||
|
||||
let t = {
|
||||
ms: 1,
|
||||
second: 1000,
|
||||
minute: 60000,
|
||||
hour: 3600000,
|
||||
day: 3600000 * 24,
|
||||
week: 3600000 * 24 * 7,
|
||||
month: 3600000 * 24 * 30,
|
||||
};
|
||||
|
||||
let instances = [];
|
||||
|
||||
let matches = function (a) {
|
||||
return function (b) {
|
||||
return a === b;
|
||||
};
|
||||
};
|
||||
|
||||
let doesntMatch = function (a) {
|
||||
return function (b) {
|
||||
return !matches(a)(b);
|
||||
};
|
||||
};
|
||||
|
||||
let logDuration = function (d, prefix) {
|
||||
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
|
||||
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
|
||||
};
|
||||
|
||||
function getSafeHeaders(res) {
|
||||
return res.getHeaders ? res.getHeaders() : res._headers;
|
||||
}
|
||||
|
||||
function ApiCache() {
|
||||
let memCache = new MemoryCache();
|
||||
|
||||
let globalOptions = {
|
||||
debug: false,
|
||||
defaultDuration: 3600000,
|
||||
enabled: true,
|
||||
appendKey: [],
|
||||
jsonp: false,
|
||||
redisClient: false,
|
||||
headerBlacklist: [],
|
||||
statusCodes: {
|
||||
include: [],
|
||||
exclude: [],
|
||||
},
|
||||
events: {
|
||||
expire: undefined,
|
||||
},
|
||||
headers: {
|
||||
// 'cache-control': 'no-cache' // example of header overwrite
|
||||
},
|
||||
trackPerformance: false,
|
||||
respectCacheControl: false,
|
||||
};
|
||||
|
||||
let middlewareOptions = [];
|
||||
let instance = this;
|
||||
let index = null;
|
||||
let timers = {};
|
||||
let performanceArray = []; // for tracking cache hit rate
|
||||
|
||||
instances.push(this);
|
||||
this.id = instances.length;
|
||||
|
||||
function debug(a, b, c, d) {
|
||||
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
|
||||
return arg !== undefined;
|
||||
});
|
||||
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
|
||||
|
||||
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
|
||||
}
|
||||
|
||||
function shouldCacheResponse(request, response, toggle) {
|
||||
let opt = globalOptions;
|
||||
let codes = opt.statusCodes;
|
||||
|
||||
if (!response) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (toggle && !toggle(request, response)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (codes.exclude && codes.exclude.length && codes.exclude.indexOf(response.statusCode) !== -1) {
|
||||
return false;
|
||||
}
|
||||
if (codes.include && codes.include.length && codes.include.indexOf(response.statusCode) === -1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function addIndexEntries(key, req) {
|
||||
let groupName = req.apicacheGroup;
|
||||
|
||||
if (groupName) {
|
||||
debug("group detected \"" + groupName + "\"");
|
||||
let group = (index.groups[groupName] = index.groups[groupName] || []);
|
||||
group.unshift(key);
|
||||
}
|
||||
|
||||
index.all.unshift(key);
|
||||
}
|
||||
|
||||
function filterBlacklistedHeaders(headers) {
|
||||
return Object.keys(headers)
|
||||
.filter(function (key) {
|
||||
return globalOptions.headerBlacklist.indexOf(key) === -1;
|
||||
})
|
||||
.reduce(function (acc, header) {
|
||||
acc[header] = headers[header];
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
function createCacheObject(status, headers, data, encoding) {
|
||||
return {
|
||||
status: status,
|
||||
headers: filterBlacklistedHeaders(headers),
|
||||
data: data,
|
||||
encoding: encoding,
|
||||
timestamp: new Date().getTime() / 1000, // seconds since epoch. This is used to properly decrement max-age headers in cached responses.
|
||||
};
|
||||
}
|
||||
|
||||
function cacheResponse(key, value, duration) {
|
||||
let redis = globalOptions.redisClient;
|
||||
let expireCallback = globalOptions.events.expire;
|
||||
|
||||
if (redis && redis.connected) {
|
||||
try {
|
||||
redis.hset(key, "response", JSON.stringify(value));
|
||||
redis.hset(key, "duration", duration);
|
||||
redis.expire(key, duration / 1000, expireCallback || function () {});
|
||||
} catch (err) {
|
||||
debug("[apicache] error in redis.hset()");
|
||||
}
|
||||
} else {
|
||||
memCache.add(key, value, duration, expireCallback);
|
||||
}
|
||||
|
||||
// add automatic cache clearing from duration, includes max limit on setTimeout
|
||||
timers[key] = setTimeout(function () {
|
||||
instance.clear(key, true);
|
||||
}, Math.min(duration, 2147483647));
|
||||
}
|
||||
|
||||
function accumulateContent(res, content) {
|
||||
if (content) {
|
||||
if (typeof content == "string") {
|
||||
res._apicache.content = (res._apicache.content || "") + content;
|
||||
} else if (Buffer.isBuffer(content)) {
|
||||
let oldContent = res._apicache.content;
|
||||
|
||||
if (typeof oldContent === "string") {
|
||||
oldContent = !Buffer.from ? new Buffer(oldContent) : Buffer.from(oldContent);
|
||||
}
|
||||
|
||||
if (!oldContent) {
|
||||
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
|
||||
}
|
||||
|
||||
res._apicache.content = Buffer.concat(
|
||||
[oldContent, content],
|
||||
oldContent.length + content.length
|
||||
);
|
||||
} else {
|
||||
res._apicache.content = content;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
|
||||
// monkeypatch res.end to create cache object
|
||||
res._apicache = {
|
||||
write: res.write,
|
||||
writeHead: res.writeHead,
|
||||
end: res.end,
|
||||
cacheable: true,
|
||||
content: undefined,
|
||||
};
|
||||
|
||||
// append header overwrites if applicable
|
||||
Object.keys(globalOptions.headers).forEach(function (name) {
|
||||
res.setHeader(name, globalOptions.headers[name]);
|
||||
});
|
||||
|
||||
res.writeHead = function () {
|
||||
// add cache control headers
|
||||
if (!globalOptions.headers["cache-control"]) {
|
||||
if (shouldCacheResponse(req, res, toggle)) {
|
||||
res.setHeader("cache-control", "max-age=" + (duration / 1000).toFixed(0));
|
||||
} else {
|
||||
res.setHeader("cache-control", "no-cache, no-store, must-revalidate");
|
||||
}
|
||||
}
|
||||
|
||||
res._apicache.headers = Object.assign({}, getSafeHeaders(res));
|
||||
return res._apicache.writeHead.apply(this, arguments);
|
||||
};
|
||||
|
||||
// patch res.write
|
||||
res.write = function (content) {
|
||||
accumulateContent(res, content);
|
||||
return res._apicache.write.apply(this, arguments);
|
||||
};
|
||||
|
||||
// patch res.end
|
||||
res.end = function (content, encoding) {
|
||||
if (shouldCacheResponse(req, res, toggle)) {
|
||||
accumulateContent(res, content);
|
||||
|
||||
if (res._apicache.cacheable && res._apicache.content) {
|
||||
addIndexEntries(key, req);
|
||||
let headers = res._apicache.headers || getSafeHeaders(res);
|
||||
let cacheObject = createCacheObject(
|
||||
res.statusCode,
|
||||
headers,
|
||||
res._apicache.content,
|
||||
encoding
|
||||
);
|
||||
cacheResponse(key, cacheObject, duration);
|
||||
|
||||
// display log entry
|
||||
let elapsed = new Date() - req.apicacheTimer;
|
||||
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
|
||||
debug("_apicache.headers: ", res._apicache.headers);
|
||||
debug("res.getHeaders(): ", getSafeHeaders(res));
|
||||
debug("cacheObject: ", cacheObject);
|
||||
}
|
||||
}
|
||||
|
||||
return res._apicache.end.apply(this, arguments);
|
||||
};
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
|
||||
if (toggle && !toggle(request, response)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
let headers = getSafeHeaders(response);
|
||||
|
||||
// Modified by @louislam, removed Cache-control, since I don't need client side cache!
|
||||
// Original Source: https://github.com/kwhitley/apicache/blob/0d5686cc21fad353c6dddee646288c2fca3e4f50/src/apicache.js#L254
|
||||
Object.assign(headers, filterBlacklistedHeaders(cacheObject.headers || {}));
|
||||
|
||||
// only embed apicache headers when not in production environment
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
Object.assign(headers, {
|
||||
"apicache-store": globalOptions.redisClient ? "redis" : "memory",
|
||||
"apicache-version": "1.6.2-modified",
|
||||
});
|
||||
}
|
||||
|
||||
// unstringify buffers
|
||||
let data = cacheObject.data;
|
||||
if (data && data.type === "Buffer") {
|
||||
data =
|
||||
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
|
||||
}
|
||||
|
||||
// test Etag against If-None-Match for 304
|
||||
let cachedEtag = cacheObject.headers.etag;
|
||||
let requestEtag = request.headers["if-none-match"];
|
||||
|
||||
if (requestEtag && cachedEtag === requestEtag) {
|
||||
response.writeHead(304, headers);
|
||||
return response.end();
|
||||
}
|
||||
|
||||
response.writeHead(cacheObject.status || 200, headers);
|
||||
|
||||
return response.end(data, cacheObject.encoding);
|
||||
}
|
||||
|
||||
function syncOptions() {
|
||||
for (let i in middlewareOptions) {
|
||||
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
|
||||
}
|
||||
}
|
||||
|
||||
this.clear = function (target, isAutomatic) {
|
||||
let group = index.groups[target];
|
||||
let redis = globalOptions.redisClient;
|
||||
|
||||
if (group) {
|
||||
debug("clearing group \"" + target + "\"");
|
||||
|
||||
group.forEach(function (key) {
|
||||
debug("clearing cached entry for \"" + key + "\"");
|
||||
clearTimeout(timers[key]);
|
||||
delete timers[key];
|
||||
if (!globalOptions.redisClient) {
|
||||
memCache.delete(key);
|
||||
} else {
|
||||
try {
|
||||
redis.del(key);
|
||||
} catch (err) {
|
||||
console.log("[apicache] error in redis.del(\"" + key + "\")");
|
||||
}
|
||||
}
|
||||
index.all = index.all.filter(doesntMatch(key));
|
||||
});
|
||||
|
||||
delete index.groups[target];
|
||||
} else if (target) {
|
||||
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
|
||||
clearTimeout(timers[target]);
|
||||
delete timers[target];
|
||||
// clear actual cached entry
|
||||
if (!redis) {
|
||||
memCache.delete(target);
|
||||
} else {
|
||||
try {
|
||||
redis.del(target);
|
||||
} catch (err) {
|
||||
console.log("[apicache] error in redis.del(\"" + target + "\")");
|
||||
}
|
||||
}
|
||||
|
||||
// remove from global index
|
||||
index.all = index.all.filter(doesntMatch(target));
|
||||
|
||||
// remove target from each group that it may exist in
|
||||
Object.keys(index.groups).forEach(function (groupName) {
|
||||
index.groups[groupName] = index.groups[groupName].filter(doesntMatch(target));
|
||||
|
||||
// delete group if now empty
|
||||
if (!index.groups[groupName].length) {
|
||||
delete index.groups[groupName];
|
||||
}
|
||||
});
|
||||
} else {
|
||||
debug("clearing entire index");
|
||||
|
||||
if (!redis) {
|
||||
memCache.clear();
|
||||
} else {
|
||||
// clear redis keys one by one from internal index to prevent clearing non-apicache entries
|
||||
index.all.forEach(function (key) {
|
||||
clearTimeout(timers[key]);
|
||||
delete timers[key];
|
||||
try {
|
||||
redis.del(key);
|
||||
} catch (err) {
|
||||
console.log("[apicache] error in redis.del(\"" + key + "\")");
|
||||
}
|
||||
});
|
||||
}
|
||||
this.resetIndex();
|
||||
}
|
||||
|
||||
return this.getIndex();
|
||||
};
|
||||
|
||||
function parseDuration(duration, defaultDuration) {
|
||||
if (typeof duration === "number") {
|
||||
return duration;
|
||||
}
|
||||
|
||||
if (typeof duration === "string") {
|
||||
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
|
||||
|
||||
if (split.length === 3) {
|
||||
let len = parseFloat(split[1]);
|
||||
let unit = split[2].replace(/s$/i, "").toLowerCase();
|
||||
if (unit === "m") {
|
||||
unit = "ms";
|
||||
}
|
||||
|
||||
return (len || 1) * (t[unit] || 0);
|
||||
}
|
||||
}
|
||||
|
||||
return defaultDuration;
|
||||
}
|
||||
|
||||
this.getDuration = function (duration) {
|
||||
return parseDuration(duration, globalOptions.defaultDuration);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return cache performance statistics (hit rate). Suitable for putting into a route:
|
||||
* <code>
|
||||
* app.get('/api/cache/performance', (req, res) => {
|
||||
* res.json(apicache.getPerformance())
|
||||
* })
|
||||
* </code>
|
||||
*/
|
||||
this.getPerformance = function () {
|
||||
return performanceArray.map(function (p) {
|
||||
return p.report();
|
||||
});
|
||||
};
|
||||
|
||||
this.getIndex = function (group) {
|
||||
if (group) {
|
||||
return index.groups[group];
|
||||
} else {
|
||||
return index;
|
||||
}
|
||||
};
|
||||
|
||||
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
|
||||
let duration = instance.getDuration(strDuration);
|
||||
let opt = {};
|
||||
|
||||
middlewareOptions.push({
|
||||
options: opt,
|
||||
});
|
||||
|
||||
let options = function (localOptions) {
|
||||
if (localOptions) {
|
||||
middlewareOptions.find(function (middleware) {
|
||||
return middleware.options === opt;
|
||||
}).localOptions = localOptions;
|
||||
}
|
||||
|
||||
syncOptions();
|
||||
|
||||
return opt;
|
||||
};
|
||||
|
||||
options(localOptions);
|
||||
|
||||
/**
|
||||
* A Function for non tracking performance
|
||||
*/
|
||||
function NOOPCachePerformance() {
|
||||
this.report = this.hit = this.miss = function () {}; // noop;
|
||||
}
|
||||
|
||||
/**
|
||||
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
|
||||
*/
|
||||
function CachePerformance() {
|
||||
/**
|
||||
* Tracks the hit rate for the last 100 requests.
|
||||
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
|
||||
*/
|
||||
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
|
||||
|
||||
/**
|
||||
* Tracks the hit rate for the last 1000 requests.
|
||||
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
|
||||
*/
|
||||
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
|
||||
|
||||
/**
|
||||
* Tracks the hit rate for the last 10000 requests.
|
||||
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
|
||||
*/
|
||||
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
|
||||
|
||||
/**
|
||||
* Tracks the hit rate for the last 100000 requests.
|
||||
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
|
||||
*/
|
||||
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
|
||||
|
||||
/**
|
||||
* The number of calls that have passed through the middleware since the server started.
|
||||
*/
|
||||
this.callCount = 0;
|
||||
|
||||
/**
|
||||
* The total number of hits since the server started
|
||||
*/
|
||||
this.hitCount = 0;
|
||||
|
||||
/**
|
||||
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
|
||||
*/
|
||||
this.lastCacheHit = null;
|
||||
|
||||
/**
|
||||
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
|
||||
*/
|
||||
this.lastCacheMiss = null;
|
||||
|
||||
/**
|
||||
* Return performance statistics
|
||||
*/
|
||||
this.report = function () {
|
||||
return {
|
||||
lastCacheHit: this.lastCacheHit,
|
||||
lastCacheMiss: this.lastCacheMiss,
|
||||
callCount: this.callCount,
|
||||
hitCount: this.hitCount,
|
||||
missCount: this.callCount - this.hitCount,
|
||||
hitRate: this.callCount == 0 ? null : this.hitCount / this.callCount,
|
||||
hitRateLast100: this.hitRate(this.hitsLast100),
|
||||
hitRateLast1000: this.hitRate(this.hitsLast1000),
|
||||
hitRateLast10000: this.hitRate(this.hitsLast10000),
|
||||
hitRateLast100000: this.hitRate(this.hitsLast100000),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Computes a cache hit rate from an array of hits and misses.
|
||||
* @param {Uint8Array} array An array representing hits and misses.
|
||||
* @returns a number between 0 and 1, or null if the array has no hits or misses
|
||||
*/
|
||||
this.hitRate = function (array) {
|
||||
let hits = 0;
|
||||
let misses = 0;
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
let n8 = array[i];
|
||||
for (let j = 0; j < 4; j++) {
|
||||
switch (n8 & 3) {
|
||||
case 1:
|
||||
hits++;
|
||||
break;
|
||||
case 2:
|
||||
misses++;
|
||||
break;
|
||||
}
|
||||
n8 >>= 2;
|
||||
}
|
||||
}
|
||||
let total = hits + misses;
|
||||
if (total == 0) {
|
||||
return null;
|
||||
}
|
||||
return hits / total;
|
||||
};
|
||||
|
||||
/**
|
||||
* Record a hit or miss in the given array. It will be recorded at a position determined
|
||||
* by the current value of the callCount variable.
|
||||
* @param {Uint8Array} array An array representing hits and misses.
|
||||
* @param {boolean} hit true for a hit, false for a miss
|
||||
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
|
||||
* Each hit or miss is encoded as to bits as follows:
|
||||
* 00 means no hit or miss has been recorded in these bits
|
||||
* 01 encodes a hit
|
||||
* 10 encodes a miss
|
||||
*/
|
||||
this.recordHitInArray = function (array, hit) {
|
||||
let arrayIndex = ~~(this.callCount / 4) % array.length;
|
||||
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
|
||||
let clearMask = ~(3 << bitOffset);
|
||||
let record = (hit ? 1 : 2) << bitOffset;
|
||||
array[arrayIndex] = (array[arrayIndex] & clearMask) | record;
|
||||
};
|
||||
|
||||
/**
|
||||
* Records the hit or miss in the tracking arrays and increments the call count.
|
||||
* @param {boolean} hit true records a hit, false records a miss
|
||||
*/
|
||||
this.recordHit = function (hit) {
|
||||
this.recordHitInArray(this.hitsLast100, hit);
|
||||
this.recordHitInArray(this.hitsLast1000, hit);
|
||||
this.recordHitInArray(this.hitsLast10000, hit);
|
||||
this.recordHitInArray(this.hitsLast100000, hit);
|
||||
if (hit) {
|
||||
this.hitCount++;
|
||||
}
|
||||
this.callCount++;
|
||||
};
|
||||
|
||||
/**
|
||||
* Records a hit event, setting lastCacheMiss to the given key
|
||||
* @param {string} key The key that had the cache hit
|
||||
*/
|
||||
this.hit = function (key) {
|
||||
this.recordHit(true);
|
||||
this.lastCacheHit = key;
|
||||
};
|
||||
|
||||
/**
|
||||
* Records a miss event, setting lastCacheMiss to the given key
|
||||
* @param {string} key The key that had the cache miss
|
||||
*/
|
||||
this.miss = function (key) {
|
||||
this.recordHit(false);
|
||||
this.lastCacheMiss = key;
|
||||
};
|
||||
}
|
||||
|
||||
let perf = globalOptions.trackPerformance ? new CachePerformance() : new NOOPCachePerformance();
|
||||
|
||||
performanceArray.push(perf);
|
||||
|
||||
let cache = function (req, res, next) {
|
||||
function bypass() {
|
||||
debug("bypass detected, skipping cache.");
|
||||
return next();
|
||||
}
|
||||
|
||||
// initial bypass chances
|
||||
if (!opt.enabled) {
|
||||
return bypass();
|
||||
}
|
||||
if (
|
||||
req.headers["x-apicache-bypass"] ||
|
||||
req.headers["x-apicache-force-fetch"] ||
|
||||
(opt.respectCacheControl && req.headers["cache-control"] == "no-cache")
|
||||
) {
|
||||
return bypass();
|
||||
}
|
||||
|
||||
// REMOVED IN 0.11.1 TO CORRECT MIDDLEWARE TOGGLE EXECUTE ORDER
|
||||
// if (typeof middlewareToggle === 'function') {
|
||||
// if (!middlewareToggle(req, res)) return bypass()
|
||||
// } else if (middlewareToggle !== undefined && !middlewareToggle) {
|
||||
// return bypass()
|
||||
// }
|
||||
|
||||
// embed timer
|
||||
req.apicacheTimer = new Date();
|
||||
|
||||
// In Express 4.x the url is ambigious based on where a router is mounted. originalUrl will give the full Url
|
||||
let key = req.originalUrl || req.url;
|
||||
|
||||
// Remove querystring from key if jsonp option is enabled
|
||||
if (opt.jsonp) {
|
||||
key = url.parse(key).pathname;
|
||||
}
|
||||
|
||||
// add appendKey (either custom function or response path)
|
||||
if (typeof opt.appendKey === "function") {
|
||||
key += "$$appendKey=" + opt.appendKey(req, res);
|
||||
} else if (opt.appendKey.length > 0) {
|
||||
let appendKey = req;
|
||||
|
||||
for (let i = 0; i < opt.appendKey.length; i++) {
|
||||
appendKey = appendKey[opt.appendKey[i]];
|
||||
}
|
||||
key += "$$appendKey=" + appendKey;
|
||||
}
|
||||
|
||||
// attempt cache hit
|
||||
let redis = opt.redisClient;
|
||||
let cached = !redis ? memCache.getValue(key) : null;
|
||||
|
||||
// send if cache hit from memory-cache
|
||||
if (cached) {
|
||||
let elapsed = new Date() - req.apicacheTimer;
|
||||
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
|
||||
|
||||
perf.hit(key);
|
||||
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
|
||||
}
|
||||
|
||||
// send if cache hit from redis
|
||||
if (redis && redis.connected) {
|
||||
try {
|
||||
redis.hgetall(key, function (err, obj) {
|
||||
if (!err && obj && obj.response) {
|
||||
let elapsed = new Date() - req.apicacheTimer;
|
||||
debug("sending cached (redis) version of", key, logDuration(elapsed));
|
||||
|
||||
perf.hit(key);
|
||||
return sendCachedResponse(
|
||||
req,
|
||||
res,
|
||||
JSON.parse(obj.response),
|
||||
middlewareToggle,
|
||||
next,
|
||||
duration
|
||||
);
|
||||
} else {
|
||||
perf.miss(key);
|
||||
return makeResponseCacheable(
|
||||
req,
|
||||
res,
|
||||
next,
|
||||
key,
|
||||
duration,
|
||||
strDuration,
|
||||
middlewareToggle
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
// bypass redis on error
|
||||
perf.miss(key);
|
||||
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
|
||||
}
|
||||
} else {
|
||||
perf.miss(key);
|
||||
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
|
||||
}
|
||||
};
|
||||
|
||||
cache.options = options;
|
||||
|
||||
return cache;
|
||||
};
|
||||
|
||||
this.options = function (options) {
|
||||
if (options) {
|
||||
Object.assign(globalOptions, options);
|
||||
syncOptions();
|
||||
|
||||
if ("defaultDuration" in options) {
|
||||
// Convert the default duration to a number in milliseconds (if needed)
|
||||
globalOptions.defaultDuration = parseDuration(globalOptions.defaultDuration, 3600000);
|
||||
}
|
||||
|
||||
if (globalOptions.trackPerformance) {
|
||||
debug("WARNING: using trackPerformance flag can cause high memory usage!");
|
||||
}
|
||||
|
||||
return this;
|
||||
} else {
|
||||
return globalOptions;
|
||||
}
|
||||
};
|
||||
|
||||
this.resetIndex = function () {
|
||||
index = {
|
||||
all: [],
|
||||
groups: {},
|
||||
};
|
||||
};
|
||||
|
||||
this.newInstance = function (config) {
|
||||
let instance = new ApiCache();
|
||||
|
||||
if (config) {
|
||||
instance.options(config);
|
||||
}
|
||||
|
||||
return instance;
|
||||
};
|
||||
|
||||
this.clone = function () {
|
||||
return this.newInstance(this.options());
|
||||
};
|
||||
|
||||
// initialize index
|
||||
this.resetIndex();
|
||||
}
|
||||
|
||||
module.exports = new ApiCache();
|
14
server/modules/apicache/index.js
Normal file
14
server/modules/apicache/index.js
Normal file
@@ -0,0 +1,14 @@
|
||||
const apicache = require("./apicache");
|
||||
|
||||
apicache.options({
|
||||
headerBlacklist: [
|
||||
"cache-control"
|
||||
],
|
||||
headers: {
|
||||
// Disable client side cache, only server side cache.
|
||||
// BUG! Not working for the second request
|
||||
"cache-control": "no-cache",
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = apicache;
|
59
server/modules/apicache/memory-cache.js
Normal file
59
server/modules/apicache/memory-cache.js
Normal file
@@ -0,0 +1,59 @@
|
||||
function MemoryCache() {
|
||||
this.cache = {};
|
||||
this.size = 0;
|
||||
}
|
||||
|
||||
MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
|
||||
let old = this.cache[key];
|
||||
let instance = this;
|
||||
|
||||
let entry = {
|
||||
value: value,
|
||||
expire: time + Date.now(),
|
||||
timeout: setTimeout(function () {
|
||||
instance.delete(key);
|
||||
return timeoutCallback && typeof timeoutCallback === "function" && timeoutCallback(value, key);
|
||||
}, time)
|
||||
};
|
||||
|
||||
this.cache[key] = entry;
|
||||
this.size = Object.keys(this.cache).length;
|
||||
|
||||
return entry;
|
||||
};
|
||||
|
||||
MemoryCache.prototype.delete = function (key) {
|
||||
let entry = this.cache[key];
|
||||
|
||||
if (entry) {
|
||||
clearTimeout(entry.timeout);
|
||||
}
|
||||
|
||||
delete this.cache[key];
|
||||
|
||||
this.size = Object.keys(this.cache).length;
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
MemoryCache.prototype.get = function (key) {
|
||||
let entry = this.cache[key];
|
||||
|
||||
return entry;
|
||||
};
|
||||
|
||||
MemoryCache.prototype.getValue = function (key) {
|
||||
let entry = this.get(key);
|
||||
|
||||
return entry && entry.value;
|
||||
};
|
||||
|
||||
MemoryCache.prototype.clear = function () {
|
||||
Object.keys(this.cache).forEach(function (key) {
|
||||
this.delete(key);
|
||||
}, this);
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
module.exports = MemoryCache;
|
26
server/notification-providers/apprise.js
Normal file
26
server/notification-providers/apprise.js
Normal file
@@ -0,0 +1,26 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const child_process = require("child_process");
|
||||
|
||||
class Apprise extends NotificationProvider {
|
||||
|
||||
name = "apprise";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let s = child_process.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL])
|
||||
|
||||
let output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
|
||||
|
||||
if (output) {
|
||||
|
||||
if (! output.includes("ERROR")) {
|
||||
return "Sent Successfully";
|
||||
}
|
||||
|
||||
throw new Error(output)
|
||||
} else {
|
||||
return "No output from apprise";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Apprise;
|
115
server/notification-providers/discord.js
Normal file
115
server/notification-providers/discord.js
Normal file
@@ -0,0 +1,115 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Discord extends NotificationProvider {
|
||||
|
||||
name = "discord";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
const discordDisplayName = notification.discordUsername || "Uptime Kuma";
|
||||
|
||||
// If heartbeatJSON is null, assume we're testing.
|
||||
if (heartbeatJSON == null) {
|
||||
let discordtestdata = {
|
||||
username: discordDisplayName,
|
||||
content: msg,
|
||||
}
|
||||
await axios.post(notification.discordWebhookUrl, discordtestdata)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
let url;
|
||||
|
||||
if (monitorJSON["type"] === "port") {
|
||||
url = monitorJSON["hostname"];
|
||||
if (monitorJSON["port"]) {
|
||||
url += ":" + monitorJSON["port"];
|
||||
}
|
||||
|
||||
} else {
|
||||
url = monitorJSON["url"];
|
||||
}
|
||||
|
||||
// If heartbeatJSON is not null, we go into the normal alerting loop.
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
let discorddowndata = {
|
||||
username: discordDisplayName,
|
||||
embeds: [{
|
||||
title: "❌ Your service " + monitorJSON["name"] + " went down. ❌",
|
||||
color: 16711680,
|
||||
timestamp: heartbeatJSON["time"],
|
||||
fields: [
|
||||
{
|
||||
name: "Service Name",
|
||||
value: monitorJSON["name"],
|
||||
},
|
||||
{
|
||||
name: "Service URL",
|
||||
value: url,
|
||||
},
|
||||
{
|
||||
name: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
},
|
||||
{
|
||||
name: "Error",
|
||||
value: heartbeatJSON["msg"],
|
||||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
|
||||
if (notification.discordPrefixMessage) {
|
||||
discorddowndata.content = notification.discordPrefixMessage;
|
||||
}
|
||||
|
||||
await axios.post(notification.discordWebhookUrl, discorddowndata)
|
||||
return okMsg;
|
||||
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
let discordupdata = {
|
||||
username: discordDisplayName,
|
||||
embeds: [{
|
||||
title: "✅ Your service " + monitorJSON["name"] + " is up! ✅",
|
||||
color: 65280,
|
||||
timestamp: heartbeatJSON["time"],
|
||||
fields: [
|
||||
{
|
||||
name: "Service Name",
|
||||
value: monitorJSON["name"],
|
||||
},
|
||||
{
|
||||
name: "Service URL",
|
||||
value: url.startsWith("http") ? "[Visit Service](" + url + ")" : url,
|
||||
},
|
||||
{
|
||||
name: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
},
|
||||
{
|
||||
name: "Ping",
|
||||
value: heartbeatJSON["ping"] + "ms",
|
||||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
|
||||
if (notification.discordPrefixMessage) {
|
||||
discordupdata.content = notification.discordPrefixMessage;
|
||||
}
|
||||
|
||||
await axios.post(notification.discordWebhookUrl, discordupdata)
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Discord;
|
28
server/notification-providers/gotify.js
Normal file
28
server/notification-providers/gotify.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Gotify extends NotificationProvider {
|
||||
|
||||
name = "gotify";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
try {
|
||||
if (notification.gotifyserverurl && notification.gotifyserverurl.endsWith("/")) {
|
||||
notification.gotifyserverurl = notification.gotifyserverurl.slice(0, -1);
|
||||
}
|
||||
await axios.post(`${notification.gotifyserverurl}/message?token=${notification.gotifyapplicationToken}`, {
|
||||
"message": msg,
|
||||
"priority": notification.gotifyPriority || 8,
|
||||
"title": "Uptime-Kuma",
|
||||
})
|
||||
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Gotify;
|
60
server/notification-providers/line.js
Normal file
60
server/notification-providers/line.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Line extends NotificationProvider {
|
||||
|
||||
name = "line";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
try {
|
||||
let lineAPIUrl = "https://api.line.me/v2/bot/message/push";
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer " + notification.lineChannelAccessToken
|
||||
}
|
||||
};
|
||||
if (heartbeatJSON == null) {
|
||||
let testMessage = {
|
||||
"to": notification.lineUserID,
|
||||
"messages": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Test Successful!"
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(lineAPIUrl, testMessage, config)
|
||||
} else if (heartbeatJSON["status"] == DOWN) {
|
||||
let downMessage = {
|
||||
"to": notification.lineUserID,
|
||||
"messages": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "UptimeKuma Alert: [🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(lineAPIUrl, downMessage, config)
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
let upMessage = {
|
||||
"to": notification.lineUserID,
|
||||
"messages": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "UptimeKuma Alert: [✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(lineAPIUrl, upMessage, config)
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Line;
|
48
server/notification-providers/lunasea.js
Normal file
48
server/notification-providers/lunasea.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class LunaSea extends NotificationProvider {
|
||||
|
||||
name = "lunasea";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let testdata = {
|
||||
"title": "Uptime Kuma Alert",
|
||||
"body": "Testing Successful.",
|
||||
}
|
||||
await axios.post(lunaseadevice, testdata)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
let downdata = {
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(lunaseadevice, downdata)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
if (heartbeatJSON["status"] == UP) {
|
||||
let updata = {
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(lunaseadevice, updata)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LunaSea;
|
123
server/notification-providers/mattermost.js
Normal file
123
server/notification-providers/mattermost.js
Normal file
@@ -0,0 +1,123 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Mattermost extends NotificationProvider {
|
||||
|
||||
name = "mattermost";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
try {
|
||||
const mattermostUserName = notification.mattermostusername || "Uptime Kuma";
|
||||
// If heartbeatJSON is null, assume we're testing.
|
||||
if (heartbeatJSON == null) {
|
||||
let mattermostTestData = {
|
||||
username: mattermostUserName,
|
||||
text: msg,
|
||||
}
|
||||
await axios.post(notification.mattermostWebhookUrl, mattermostTestData)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
const mattermostChannel = notification.mattermostchannel;
|
||||
const mattermostIconEmoji = notification.mattermosticonemo;
|
||||
const mattermostIconUrl = notification.mattermosticonurl;
|
||||
|
||||
if (heartbeatJSON["status"] == DOWN) {
|
||||
let mattermostdowndata = {
|
||||
username: mattermostUserName,
|
||||
text: "Uptime Kuma Alert",
|
||||
channel: mattermostChannel,
|
||||
icon_emoji: mattermostIconEmoji,
|
||||
icon_url: mattermostIconUrl,
|
||||
attachments: [
|
||||
{
|
||||
fallback:
|
||||
"Your " +
|
||||
monitorJSON["name"] +
|
||||
" service went down.",
|
||||
color: "#FF0000",
|
||||
title:
|
||||
"❌ " +
|
||||
monitorJSON["name"] +
|
||||
" service went down. ❌",
|
||||
title_link: monitorJSON["url"],
|
||||
fields: [
|
||||
{
|
||||
short: true,
|
||||
title: "Service Name",
|
||||
value: monitorJSON["name"],
|
||||
},
|
||||
{
|
||||
short: true,
|
||||
title: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
},
|
||||
{
|
||||
short: false,
|
||||
title: "Error",
|
||||
value: heartbeatJSON["msg"],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
await axios.post(
|
||||
notification.mattermostWebhookUrl,
|
||||
mattermostdowndata
|
||||
);
|
||||
return okMsg;
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
let mattermostupdata = {
|
||||
username: mattermostUserName,
|
||||
text: "Uptime Kuma Alert",
|
||||
channel: mattermostChannel,
|
||||
icon_emoji: mattermostIconEmoji,
|
||||
icon_url: mattermostIconUrl,
|
||||
attachments: [
|
||||
{
|
||||
fallback:
|
||||
"Your " +
|
||||
monitorJSON["name"] +
|
||||
" service went up!",
|
||||
color: "#32CD32",
|
||||
title:
|
||||
"✅ " +
|
||||
monitorJSON["name"] +
|
||||
" service went up! ✅",
|
||||
title_link: monitorJSON["url"],
|
||||
fields: [
|
||||
{
|
||||
short: true,
|
||||
title: "Service Name",
|
||||
value: monitorJSON["name"],
|
||||
},
|
||||
{
|
||||
short: true,
|
||||
title: "Time (UTC)",
|
||||
value: heartbeatJSON["time"],
|
||||
},
|
||||
{
|
||||
short: false,
|
||||
title: "Ping",
|
||||
value: heartbeatJSON["ping"] + "ms",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
await axios.post(
|
||||
notification.mattermostWebhookUrl,
|
||||
mattermostupdata
|
||||
);
|
||||
return okMsg;
|
||||
}
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Mattermost;
|
36
server/notification-providers/notification-provider.js
Normal file
36
server/notification-providers/notification-provider.js
Normal file
@@ -0,0 +1,36 @@
|
||||
class NotificationProvider {
|
||||
|
||||
/**
|
||||
* Notification Provider Name
|
||||
* @type string
|
||||
*/
|
||||
name = undefined;
|
||||
|
||||
/**
|
||||
* @param notification : BeanModel
|
||||
* @param msg : string General Message
|
||||
* @param monitorJSON : object Monitor details (For Up/Down only)
|
||||
* @param heartbeatJSON : object Heartbeat details (For Up/Down only)
|
||||
* @returns {Promise<string>} Return Successful Message
|
||||
* Throw Error with fail msg
|
||||
*/
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
throw new Error("Have to override Notification.send(...)");
|
||||
}
|
||||
|
||||
throwGeneralAxiosError(error) {
|
||||
let msg = "Error: " + error + " ";
|
||||
|
||||
if (error.response && error.response.data) {
|
||||
if (typeof error.response.data === "string") {
|
||||
msg += error.response.data;
|
||||
} else {
|
||||
msg += JSON.stringify(error.response.data)
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(msg)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NotificationProvider;
|
40
server/notification-providers/octopush.js
Normal file
40
server/notification-providers/octopush.js
Normal file
@@ -0,0 +1,40 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Octopush extends NotificationProvider {
|
||||
|
||||
name = "octopush";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
let config = {
|
||||
headers: {
|
||||
"api-key": notification.octopushAPIKey,
|
||||
"api-login": notification.octopushLogin,
|
||||
"cache-control": "no-cache"
|
||||
}
|
||||
};
|
||||
let data = {
|
||||
"recipients": [
|
||||
{
|
||||
"phone_number": notification.octopushPhoneNumber
|
||||
}
|
||||
],
|
||||
//octopush not supporting non ascii char
|
||||
"text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||
"type": notification.octopushSMSType,
|
||||
"purpose": "alert",
|
||||
"sender": notification.octopushSenderName
|
||||
};
|
||||
|
||||
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config)
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Octopush;
|
50
server/notification-providers/pushbullet.js
Normal file
50
server/notification-providers/pushbullet.js
Normal file
@@ -0,0 +1,50 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Pushbullet extends NotificationProvider {
|
||||
|
||||
name = "pushbullet";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
let pushbulletUrl = "https://api.pushbullet.com/v2/pushes";
|
||||
let config = {
|
||||
headers: {
|
||||
"Access-Token": notification.pushbulletAccessToken,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
};
|
||||
if (heartbeatJSON == null) {
|
||||
let testdata = {
|
||||
"type": "note",
|
||||
"title": "Uptime Kuma Alert",
|
||||
"body": "Testing Successful.",
|
||||
}
|
||||
await axios.post(pushbulletUrl, testdata, config)
|
||||
} else if (heartbeatJSON["status"] == DOWN) {
|
||||
let downdata = {
|
||||
"type": "note",
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(pushbulletUrl, downdata, config)
|
||||
} else if (heartbeatJSON["status"] == UP) {
|
||||
let updata = {
|
||||
"type": "note",
|
||||
"title": "UptimeKuma Alert: " + monitorJSON["name"],
|
||||
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
|
||||
}
|
||||
await axios.post(pushbulletUrl, updata, config)
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Pushbullet;
|
49
server/notification-providers/pushover.js
Normal file
49
server/notification-providers/pushover.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Pushover extends NotificationProvider {
|
||||
|
||||
name = "pushover";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
let pushoverlink = "https://api.pushover.net/1/messages.json"
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let data = {
|
||||
"message": "<b>Uptime Kuma Pushover testing successful.</b>",
|
||||
"user": notification.pushoveruserkey,
|
||||
"token": notification.pushoverapptoken,
|
||||
"sound": notification.pushoversounds,
|
||||
"priority": notification.pushoverpriority,
|
||||
"title": notification.pushovertitle,
|
||||
"retry": "30",
|
||||
"expire": "3600",
|
||||
"html": 1,
|
||||
}
|
||||
await axios.post(pushoverlink, data)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
let data = {
|
||||
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg + "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"],
|
||||
"user": notification.pushoveruserkey,
|
||||
"token": notification.pushoverapptoken,
|
||||
"sound": notification.pushoversounds,
|
||||
"priority": notification.pushoverpriority,
|
||||
"title": notification.pushovertitle,
|
||||
"retry": "30",
|
||||
"expire": "3600",
|
||||
"html": 1,
|
||||
}
|
||||
await axios.post(pushoverlink, data)
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Pushover;
|
30
server/notification-providers/pushy.js
Normal file
30
server/notification-providers/pushy.js
Normal file
@@ -0,0 +1,30 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Pushy extends NotificationProvider {
|
||||
|
||||
name = "pushy";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
await axios.post(`https://api.pushy.me/push?api_key=${notification.pushyAPIKey}`, {
|
||||
"to": notification.pushyToken,
|
||||
"data": {
|
||||
"message": "Uptime-Kuma"
|
||||
},
|
||||
"notification": {
|
||||
"body": msg,
|
||||
"badge": 1,
|
||||
"sound": "ping.aiff"
|
||||
}
|
||||
})
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Pushy;
|
46
server/notification-providers/rocket-chat.js
Normal file
46
server/notification-providers/rocket-chat.js
Normal file
@@ -0,0 +1,46 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class RocketChat extends NotificationProvider {
|
||||
|
||||
name = "rocket.chat";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let data = {
|
||||
"text": "Uptime Kuma Rocket.chat testing successful.",
|
||||
"channel": notification.rocketchannel,
|
||||
"username": notification.rocketusername,
|
||||
"icon_emoji": notification.rocketiconemo,
|
||||
}
|
||||
await axios.post(notification.rocketwebhookURL, data)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
const time = heartbeatJSON["time"];
|
||||
let data = {
|
||||
"text": "Uptime Kuma Alert",
|
||||
"channel": notification.rocketchannel,
|
||||
"username": notification.rocketusername,
|
||||
"icon_emoji": notification.rocketiconemo,
|
||||
"attachments": [
|
||||
{
|
||||
"title": "Uptime Kuma Alert *Time (UTC)*\n" + time,
|
||||
"title_link": notification.rocketbutton,
|
||||
"text": "*Message*\n" + msg,
|
||||
"color": "#32cd32"
|
||||
}
|
||||
]
|
||||
}
|
||||
await axios.post(notification.rocketwebhookURL, data)
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RocketChat;
|
27
server/notification-providers/signal.js
Normal file
27
server/notification-providers/signal.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Signal extends NotificationProvider {
|
||||
|
||||
name = "signal";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
let data = {
|
||||
"message": msg,
|
||||
"number": notification.signalNumber,
|
||||
"recipients": notification.signalRecipients.replace(/\s/g, "").split(","),
|
||||
};
|
||||
let config = {};
|
||||
|
||||
await axios.post(notification.signalURL, data, config)
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Signal;
|
70
server/notification-providers/slack.js
Normal file
70
server/notification-providers/slack.js
Normal file
@@ -0,0 +1,70 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Slack extends NotificationProvider {
|
||||
|
||||
name = "slack";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let data = {
|
||||
"text": "Uptime Kuma Slack testing successful.",
|
||||
"channel": notification.slackchannel,
|
||||
"username": notification.slackusername,
|
||||
"icon_emoji": notification.slackiconemo,
|
||||
}
|
||||
await axios.post(notification.slackwebhookURL, data)
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
const time = heartbeatJSON["time"];
|
||||
let data = {
|
||||
"text": "Uptime Kuma Alert",
|
||||
"channel": notification.slackchannel,
|
||||
"username": notification.slackusername,
|
||||
"icon_emoji": notification.slackiconemo,
|
||||
"blocks": [{
|
||||
"type": "header",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Uptime Kuma Alert",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"fields": [{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Message*\n" + msg,
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Time (UTC)*\n" + time,
|
||||
}],
|
||||
},
|
||||
{
|
||||
"type": "actions",
|
||||
"elements": [
|
||||
{
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Visit Uptime Kuma",
|
||||
},
|
||||
"value": "Uptime-Kuma",
|
||||
"url": notification.slackbutton || "https://github.com/louislam/uptime-kuma",
|
||||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
await axios.post(notification.slackwebhookURL, data)
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Slack;
|
48
server/notification-providers/smtp.js
Normal file
48
server/notification-providers/smtp.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const nodemailer = require("nodemailer");
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
|
||||
class SMTP extends NotificationProvider {
|
||||
|
||||
name = "smtp";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
|
||||
const config = {
|
||||
host: notification.smtpHost,
|
||||
port: notification.smtpPort,
|
||||
secure: notification.smtpSecure,
|
||||
};
|
||||
|
||||
// Should fix the issue in https://github.com/louislam/uptime-kuma/issues/26#issuecomment-896373904
|
||||
if (notification.smtpUsername || notification.smtpPassword) {
|
||||
config.auth = {
|
||||
user: notification.smtpUsername,
|
||||
pass: notification.smtpPassword,
|
||||
};
|
||||
}
|
||||
|
||||
let transporter = nodemailer.createTransport(config);
|
||||
|
||||
let bodyTextContent = msg;
|
||||
if (heartbeatJSON) {
|
||||
bodyTextContent = `${msg}\nTime (UTC): ${heartbeatJSON["time"]}`;
|
||||
}
|
||||
|
||||
// send mail with defined transport object
|
||||
await transporter.sendMail({
|
||||
from: notification.smtpFrom,
|
||||
cc: notification.smtpCC,
|
||||
bcc: notification.smtpBCC,
|
||||
to: notification.smtpTo,
|
||||
subject: msg,
|
||||
text: bodyTextContent,
|
||||
tls: {
|
||||
rejectUnauthorized: notification.smtpIgnoreTLSError || false,
|
||||
},
|
||||
});
|
||||
|
||||
return "Sent Successfully.";
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SMTP;
|
124
server/notification-providers/teams.js
Normal file
124
server/notification-providers/teams.js
Normal file
@@ -0,0 +1,124 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class Teams extends NotificationProvider {
|
||||
name = "teams";
|
||||
|
||||
_statusMessageFactory = (status, monitorName) => {
|
||||
if (status === DOWN) {
|
||||
return `🔴 Application [${monitorName}] went down`;
|
||||
} else if (status === UP) {
|
||||
return `✅ Application [${monitorName}] is back online`;
|
||||
}
|
||||
return "Notification";
|
||||
};
|
||||
|
||||
_getThemeColor = (status) => {
|
||||
if (status === DOWN) {
|
||||
return "ff0000";
|
||||
}
|
||||
if (status === UP) {
|
||||
return "00e804";
|
||||
}
|
||||
return "008cff";
|
||||
};
|
||||
|
||||
_notificationPayloadFactory = ({
|
||||
status,
|
||||
monitorMessage,
|
||||
monitorName,
|
||||
monitorUrl,
|
||||
}) => {
|
||||
const notificationMessage = this._statusMessageFactory(
|
||||
status,
|
||||
monitorName
|
||||
);
|
||||
|
||||
const facts = [];
|
||||
|
||||
if (monitorName) {
|
||||
facts.push({
|
||||
name: "Monitor",
|
||||
value: monitorName,
|
||||
});
|
||||
}
|
||||
|
||||
if (monitorUrl) {
|
||||
facts.push({
|
||||
name: "URL",
|
||||
value: monitorUrl,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
"@context": "https://schema.org/extensions",
|
||||
"@type": "MessageCard",
|
||||
themeColor: this._getThemeColor(status),
|
||||
summary: notificationMessage,
|
||||
sections: [
|
||||
{
|
||||
activityImage:
|
||||
"https://raw.githubusercontent.com/louislam/uptime-kuma/master/public/icon.png",
|
||||
activityTitle: "**Uptime Kuma**",
|
||||
},
|
||||
{
|
||||
activityTitle: notificationMessage,
|
||||
},
|
||||
{
|
||||
activityTitle: "**Description**",
|
||||
text: monitorMessage,
|
||||
facts,
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
_sendNotification = async (webhookUrl, payload) => {
|
||||
await axios.post(webhookUrl, payload);
|
||||
};
|
||||
|
||||
_handleGeneralNotification = (webhookUrl, msg) => {
|
||||
const payload = this._notificationPayloadFactory({
|
||||
monitorMessage: msg
|
||||
});
|
||||
|
||||
return this._sendNotification(webhookUrl, payload);
|
||||
};
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
await this._handleGeneralNotification(notification.webhookUrl, msg);
|
||||
return okMsg;
|
||||
}
|
||||
|
||||
let url;
|
||||
|
||||
if (monitorJSON["type"] === "port") {
|
||||
url = monitorJSON["hostname"];
|
||||
if (monitorJSON["port"]) {
|
||||
url += ":" + monitorJSON["port"];
|
||||
}
|
||||
} else {
|
||||
url = monitorJSON["url"];
|
||||
}
|
||||
|
||||
const payload = this._notificationPayloadFactory({
|
||||
monitorMessage: heartbeatJSON.msg,
|
||||
monitorName: monitorJSON.name,
|
||||
monitorUrl: url,
|
||||
status: heartbeatJSON.status,
|
||||
});
|
||||
|
||||
await this._sendNotification(notification.webhookUrl, payload);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Teams;
|
27
server/notification-providers/telegram.js
Normal file
27
server/notification-providers/telegram.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Telegram extends NotificationProvider {
|
||||
|
||||
name = "telegram";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
await axios.get(`https://api.telegram.org/bot${notification.telegramBotToken}/sendMessage`, {
|
||||
params: {
|
||||
chat_id: notification.telegramChatID,
|
||||
text: msg,
|
||||
},
|
||||
})
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
let msg = (error.response.data.description) ? error.response.data.description : "Error without description"
|
||||
throw new Error(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Telegram;
|
44
server/notification-providers/webhook.js
Normal file
44
server/notification-providers/webhook.js
Normal file
@@ -0,0 +1,44 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const FormData = require("form-data");
|
||||
|
||||
class Webhook extends NotificationProvider {
|
||||
|
||||
name = "webhook";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully. ";
|
||||
|
||||
try {
|
||||
let data = {
|
||||
heartbeat: heartbeatJSON,
|
||||
monitor: monitorJSON,
|
||||
msg,
|
||||
};
|
||||
let finalData;
|
||||
let config = {};
|
||||
|
||||
if (notification.webhookContentType === "form-data") {
|
||||
finalData = new FormData();
|
||||
finalData.append("data", JSON.stringify(data));
|
||||
|
||||
config = {
|
||||
headers: finalData.getHeaders(),
|
||||
}
|
||||
|
||||
} else {
|
||||
finalData = data;
|
||||
}
|
||||
|
||||
await axios.post(notification.webhookURL, finalData, config)
|
||||
return okMsg;
|
||||
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Webhook;
|
@@ -1,117 +1,77 @@
|
||||
const axios = require("axios");
|
||||
const {R} = require("redbean-node");
|
||||
const FormData = require('form-data');
|
||||
const nodemailer = require("nodemailer");
|
||||
const { R } = require("redbean-node");
|
||||
const Apprise = require("./notification-providers/apprise");
|
||||
const Discord = require("./notification-providers/discord");
|
||||
const Gotify = require("./notification-providers/gotify");
|
||||
const Line = require("./notification-providers/line");
|
||||
const LunaSea = require("./notification-providers/lunasea");
|
||||
const Mattermost = require("./notification-providers/mattermost");
|
||||
const Octopush = require("./notification-providers/octopush");
|
||||
const Pushbullet = require("./notification-providers/pushbullet");
|
||||
const Pushover = require("./notification-providers/pushover");
|
||||
const Pushy = require("./notification-providers/pushy");
|
||||
const RocketChat = require("./notification-providers/rocket-chat");
|
||||
const Signal = require("./notification-providers/signal");
|
||||
const Slack = require("./notification-providers/slack");
|
||||
const SMTP = require("./notification-providers/smtp");
|
||||
const Teams = require("./notification-providers/teams");
|
||||
const Telegram = require("./notification-providers/telegram");
|
||||
const Webhook = require("./notification-providers/webhook");
|
||||
|
||||
class Notification {
|
||||
static async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
if (notification.type === "telegram") {
|
||||
try {
|
||||
await axios.get(`https://api.telegram.org/bot${notification.telegramBotToken}/sendMessage`, {
|
||||
params: {
|
||||
chat_id: notification.telegramChatID,
|
||||
text: msg,
|
||||
}
|
||||
})
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return false;
|
||||
|
||||
providerList = {};
|
||||
|
||||
static init() {
|
||||
console.log("Prepare Notification Providers");
|
||||
|
||||
this.providerList = {};
|
||||
|
||||
const list = [
|
||||
new Apprise(),
|
||||
new Discord(),
|
||||
new Teams(),
|
||||
new Gotify(),
|
||||
new Line(),
|
||||
new LunaSea(),
|
||||
new Mattermost(),
|
||||
new Octopush(),
|
||||
new Pushbullet(),
|
||||
new Pushover(),
|
||||
new Pushy(),
|
||||
new RocketChat(),
|
||||
new Signal(),
|
||||
new Slack(),
|
||||
new SMTP(),
|
||||
new Telegram(),
|
||||
new Webhook(),
|
||||
];
|
||||
|
||||
for (let item of list) {
|
||||
if (! item.name) {
|
||||
throw new Error("Notification provider without name");
|
||||
}
|
||||
|
||||
} else if (notification.type === "webhook") {
|
||||
try {
|
||||
|
||||
let data = {
|
||||
heartbeat: heartbeatJSON,
|
||||
monitor: monitorJSON,
|
||||
msg,
|
||||
};
|
||||
let finalData;
|
||||
let config = {};
|
||||
|
||||
if (notification.webhookContentType === "form-data") {
|
||||
finalData = new FormData();
|
||||
finalData.append('data', JSON.stringify(data));
|
||||
|
||||
config = {
|
||||
headers: finalData.getHeaders()
|
||||
}
|
||||
|
||||
} else {
|
||||
finalData = data;
|
||||
}
|
||||
|
||||
let res = await axios.post(notification.webhookURL, finalData, config)
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return false;
|
||||
if (this.providerList[item.name]) {
|
||||
throw new Error("Duplicate notification provider name");
|
||||
}
|
||||
|
||||
} else if (notification.type === "smtp") {
|
||||
return await Notification.smtp(notification, msg)
|
||||
|
||||
} else if (notification.type === "discord") {
|
||||
try {
|
||||
// If heartbeatJSON is null, assume we're testing.
|
||||
if(heartbeatJSON == null) {
|
||||
let data = {
|
||||
username: 'Uptime-Kuma',
|
||||
content: msg
|
||||
}
|
||||
let res = await axios.post(notification.discordWebhookUrl, data)
|
||||
return true;
|
||||
}
|
||||
// If heartbeatJSON is not null, we go into the normal alerting loop.
|
||||
if(heartbeatJSON['status'] == 0) {
|
||||
var alertColor = "16711680";
|
||||
} else if(heartbeatJSON['status'] == 1) {
|
||||
var alertColor = "65280";
|
||||
}
|
||||
let data = {
|
||||
username: 'Uptime-Kuma',
|
||||
embeds: [{
|
||||
title: "Uptime-Kuma Alert",
|
||||
color: alertColor,
|
||||
fields: [
|
||||
{
|
||||
name: "Time (UTC)",
|
||||
value: heartbeatJSON["time"]
|
||||
},
|
||||
{
|
||||
name: "Message",
|
||||
value: msg
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
let res = await axios.post(notification.discordWebhookUrl, data)
|
||||
return true;
|
||||
} catch(error) {
|
||||
console.log(error)
|
||||
return false;
|
||||
}
|
||||
return await Notification.discord(notification, msg)
|
||||
|
||||
} else if (notification.type === "signal") {
|
||||
try {
|
||||
let data = {
|
||||
"message": msg,
|
||||
"number": notification.signalNumber,
|
||||
"recipients": notification.signalRecipients.replace(/\s/g, '').split(",")
|
||||
};
|
||||
let config = {};
|
||||
|
||||
let res = await axios.post(notification.signalURL, data, config)
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return false;
|
||||
this.providerList[item.name] = item;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param notification : BeanModel
|
||||
* @param msg : string General Message
|
||||
* @param monitorJSON : object Monitor details (For Up/Down only)
|
||||
* @param heartbeatJSON : object Heartbeat details (For Up/Down only)
|
||||
* @returns {Promise<string>} Successful msg
|
||||
* Throw Error with fail msg
|
||||
*/
|
||||
static async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
if (this.providerList[notification.type]) {
|
||||
return this.providerList[notification.type].send(notification, msg, monitorJSON, heartbeatJSON);
|
||||
} else {
|
||||
throw new Error("Notification type is not supported")
|
||||
throw new Error("Notification type is not supported");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,8 +94,15 @@ class Notification {
|
||||
|
||||
bean.name = notification.name;
|
||||
bean.user_id = userID;
|
||||
bean.config = JSON.stringify(notification)
|
||||
bean.config = JSON.stringify(notification);
|
||||
bean.is_default = notification.isDefault || false;
|
||||
await R.store(bean)
|
||||
|
||||
if (notification.applyExisting) {
|
||||
await applyNotificationEveryMonitor(bean.id, userID);
|
||||
}
|
||||
|
||||
return bean;
|
||||
}
|
||||
|
||||
static async delete(notificationID, userID) {
|
||||
@@ -151,39 +118,31 @@ class Notification {
|
||||
await R.trash(bean)
|
||||
}
|
||||
|
||||
static async smtp(notification, msg) {
|
||||
|
||||
let transporter = nodemailer.createTransport({
|
||||
host: notification.smtpHost,
|
||||
port: notification.smtpPort,
|
||||
secure: notification.smtpSecure,
|
||||
auth: {
|
||||
user: notification.smtpUsername,
|
||||
pass: notification.smtpPassword,
|
||||
},
|
||||
});
|
||||
|
||||
// send mail with defined transport object
|
||||
let info = await transporter.sendMail({
|
||||
from: `"Uptime Kuma" <${notification.smtpFrom}>`,
|
||||
to: notification.smtpTo,
|
||||
subject: msg,
|
||||
text: msg,
|
||||
});
|
||||
|
||||
return true;
|
||||
static checkApprise() {
|
||||
let commandExistsSync = require("command-exists").sync;
|
||||
let exists = commandExistsSync("apprise");
|
||||
return exists;
|
||||
}
|
||||
|
||||
static async discord(notification, msg) {
|
||||
const client = new Discord.Client();
|
||||
await client.login(notification.discordToken)
|
||||
}
|
||||
|
||||
const channel = await client.channels.fetch(notification.discordChannelID);
|
||||
await channel.send(msg);
|
||||
async function applyNotificationEveryMonitor(notificationID, userID) {
|
||||
let monitors = await R.getAll("SELECT id FROM monitor WHERE user_id = ?", [
|
||||
userID
|
||||
]);
|
||||
|
||||
client.destroy()
|
||||
for (let i = 0; i < monitors.length; i++) {
|
||||
let checkNotification = await R.findOne("monitor_notification", " monitor_id = ? AND notification_id = ? ", [
|
||||
monitors[i].id,
|
||||
notificationID,
|
||||
])
|
||||
|
||||
return true;
|
||||
if (! checkNotification) {
|
||||
let relation = R.dispense("monitor_notification");
|
||||
relation.monitor_id = monitors[i].id;
|
||||
relation.notification_id = notificationID;
|
||||
await R.store(relation)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
const passwordHashOld = require('password-hash');
|
||||
const bcrypt = require('bcrypt');
|
||||
const passwordHashOld = require("password-hash");
|
||||
const bcrypt = require("bcryptjs");
|
||||
const saltRounds = 10;
|
||||
|
||||
exports.generate = function (password) {
|
||||
@@ -9,9 +9,9 @@ exports.generate = function (password) {
|
||||
exports.verify = function (password, hash) {
|
||||
if (isSHA1(hash)) {
|
||||
return passwordHashOld.verify(password, hash)
|
||||
} else {
|
||||
return bcrypt.compareSync(password, hash);
|
||||
}
|
||||
|
||||
return bcrypt.compareSync(password, hash);
|
||||
}
|
||||
|
||||
function isSHA1(hash) {
|
||||
|
@@ -1,45 +1,75 @@
|
||||
// https://github.com/ben-bradley/ping-lite/blob/master/ping-lite.js
|
||||
// Fixed on Windows
|
||||
|
||||
var spawn = require('child_process').spawn,
|
||||
events = require('events'),
|
||||
fs = require('fs'),
|
||||
WIN = /^win/.test(process.platform),
|
||||
LIN = /^linux/.test(process.platform),
|
||||
MAC = /^darwin/.test(process.platform);
|
||||
const net = require("net");
|
||||
const spawn = require("child_process").spawn;
|
||||
const events = require("events");
|
||||
const fs = require("fs");
|
||||
const WIN = /^win/.test(process.platform);
|
||||
const LIN = /^linux/.test(process.platform);
|
||||
const MAC = /^darwin/.test(process.platform);
|
||||
const FBSD = /^freebsd/.test(process.platform);
|
||||
|
||||
module.exports = Ping;
|
||||
|
||||
function Ping(host, options) {
|
||||
if (!host)
|
||||
throw new Error('You must specify a host to ping!');
|
||||
if (!host) {
|
||||
throw new Error("You must specify a host to ping!");
|
||||
}
|
||||
|
||||
this._host = host;
|
||||
this._options = options = (options || {});
|
||||
|
||||
events.EventEmitter.call(this);
|
||||
|
||||
const timeout = 10;
|
||||
|
||||
if (WIN) {
|
||||
this._bin = 'c:/windows/system32/ping.exe';
|
||||
this._args = (options.args) ? options.args : [ '-n', '1', '-w', '5000', host ];
|
||||
this._bin = "c:/windows/system32/ping.exe";
|
||||
this._args = (options.args) ? options.args : [ "-n", "1", "-w", timeout * 1000, host ];
|
||||
this._regmatch = /[><=]([0-9.]+?)ms/;
|
||||
}
|
||||
else if (LIN) {
|
||||
this._bin = '/bin/ping';
|
||||
this._args = (options.args) ? options.args : [ '-n', '-w', '2', '-c', '1', host ];
|
||||
this._regmatch = /=([0-9.]+?) ms/; // need to verify this
|
||||
}
|
||||
else if (MAC) {
|
||||
this._bin = '/sbin/ping';
|
||||
this._args = (options.args) ? options.args : [ '-n', '-t', '2', '-c', '1', host ];
|
||||
|
||||
} else if (LIN) {
|
||||
this._bin = "/bin/ping";
|
||||
|
||||
const defaultArgs = [ "-n", "-w", timeout, "-c", "1", host ];
|
||||
|
||||
if (net.isIPv6(host) || options.ipv6) {
|
||||
defaultArgs.unshift("-6");
|
||||
}
|
||||
|
||||
this._args = (options.args) ? options.args : defaultArgs;
|
||||
this._regmatch = /=([0-9.]+?) ms/;
|
||||
}
|
||||
else {
|
||||
throw new Error('Could not detect your ping binary.');
|
||||
|
||||
} else if (MAC) {
|
||||
|
||||
if (net.isIPv6(host) || options.ipv6) {
|
||||
this._bin = "/sbin/ping6";
|
||||
} else {
|
||||
this._bin = "/sbin/ping";
|
||||
}
|
||||
|
||||
this._args = (options.args) ? options.args : [ "-n", "-t", timeout, "-c", "1", host ];
|
||||
this._regmatch = /=([0-9.]+?) ms/;
|
||||
|
||||
} else if (FBSD) {
|
||||
this._bin = "/sbin/ping";
|
||||
|
||||
const defaultArgs = [ "-n", "-t", timeout, "-c", "1", host ];
|
||||
|
||||
if (net.isIPv6(host) || options.ipv6) {
|
||||
defaultArgs.unshift("-6");
|
||||
}
|
||||
|
||||
this._args = (options.args) ? options.args : defaultArgs;
|
||||
this._regmatch = /=([0-9.]+?) ms/;
|
||||
|
||||
} else {
|
||||
throw new Error("Could not detect your ping binary.");
|
||||
}
|
||||
|
||||
if (!fs.existsSync(this._bin))
|
||||
throw new Error('Could not detect '+this._bin+' on your system');
|
||||
if (!fs.existsSync(this._bin)) {
|
||||
throw new Error("Could not detect " + this._bin + " on your system");
|
||||
}
|
||||
|
||||
this._i = 0;
|
||||
|
||||
@@ -50,62 +80,73 @@ Ping.prototype.__proto__ = events.EventEmitter.prototype;
|
||||
|
||||
// SEND A PING
|
||||
// ===========
|
||||
Ping.prototype.send = function(callback) {
|
||||
var self = this;
|
||||
callback = callback || function(err, ms) {
|
||||
if (err) return self.emit('error', err);
|
||||
else return self.emit('result', ms);
|
||||
Ping.prototype.send = function (callback) {
|
||||
let self = this;
|
||||
callback = callback || function (err, ms) {
|
||||
if (err) {
|
||||
return self.emit("error", err);
|
||||
}
|
||||
return self.emit("result", ms);
|
||||
};
|
||||
|
||||
var _ended, _exited, _errored;
|
||||
let _ended;
|
||||
let _exited;
|
||||
let _errored;
|
||||
|
||||
this._ping = spawn(this._bin, this._args); // spawn the binary
|
||||
|
||||
this._ping.on('error', function(err) { // handle binary errors
|
||||
this._ping.on("error", function (err) { // handle binary errors
|
||||
_errored = true;
|
||||
callback(err);
|
||||
});
|
||||
|
||||
this._ping.stdout.on('data', function(data) { // log stdout
|
||||
this._stdout = (this._stdout || '') + data;
|
||||
this._ping.stdout.on("data", function (data) { // log stdout
|
||||
this._stdout = (this._stdout || "") + data;
|
||||
});
|
||||
|
||||
this._ping.stdout.on('end', function() {
|
||||
this._ping.stdout.on("end", function () {
|
||||
_ended = true;
|
||||
if (_exited && !_errored) onEnd.call(self._ping);
|
||||
if (_exited && !_errored) {
|
||||
onEnd.call(self._ping);
|
||||
}
|
||||
});
|
||||
|
||||
this._ping.stderr.on('data', function(data) { // log stderr
|
||||
this._stderr = (this._stderr || '') + data;
|
||||
this._ping.stderr.on("data", function (data) { // log stderr
|
||||
this._stderr = (this._stderr || "") + data;
|
||||
});
|
||||
|
||||
this._ping.on('exit', function(code) { // handle complete
|
||||
this._ping.on("exit", function (code) { // handle complete
|
||||
_exited = true;
|
||||
if (_ended && !_errored) onEnd.call(self._ping);
|
||||
if (_ended && !_errored) {
|
||||
onEnd.call(self._ping);
|
||||
}
|
||||
});
|
||||
|
||||
function onEnd() {
|
||||
var stdout = this.stdout._stdout,
|
||||
stderr = this.stderr._stderr,
|
||||
ms;
|
||||
let stdout = this.stdout._stdout;
|
||||
let stderr = this.stderr._stderr;
|
||||
let ms;
|
||||
|
||||
if (stderr)
|
||||
if (stderr) {
|
||||
return callback(new Error(stderr));
|
||||
else if (!stdout)
|
||||
return callback(new Error('No stdout detected'));
|
||||
}
|
||||
|
||||
if (!stdout) {
|
||||
return callback(new Error("No stdout detected"));
|
||||
}
|
||||
|
||||
ms = stdout.match(self._regmatch); // parse out the ##ms response
|
||||
ms = (ms && ms[1]) ? Number(ms[1]) : ms;
|
||||
|
||||
callback(null, ms);
|
||||
callback(null, ms, stdout);
|
||||
}
|
||||
};
|
||||
|
||||
// CALL Ping#send(callback) ON A TIMER
|
||||
// ===================================
|
||||
Ping.prototype.start = function(callback) {
|
||||
var self = this;
|
||||
this._i = setInterval(function() {
|
||||
Ping.prototype.start = function (callback) {
|
||||
let self = this;
|
||||
this._i = setInterval(function () {
|
||||
self.send(callback);
|
||||
}, (self._options.interval || 5000));
|
||||
self.send(callback);
|
||||
@@ -113,6 +154,6 @@ Ping.prototype.start = function(callback) {
|
||||
|
||||
// STOP SENDING PINGS
|
||||
// ==================
|
||||
Ping.prototype.stop = function() {
|
||||
Ping.prototype.stop = function () {
|
||||
clearInterval(this._i);
|
||||
};
|
||||
|
90
server/prometheus.js
Normal file
90
server/prometheus.js
Normal file
@@ -0,0 +1,90 @@
|
||||
const PrometheusClient = require("prom-client");
|
||||
|
||||
const commonLabels = [
|
||||
"monitor_name",
|
||||
"monitor_type",
|
||||
"monitor_url",
|
||||
"monitor_hostname",
|
||||
"monitor_port",
|
||||
]
|
||||
|
||||
const monitor_cert_days_remaining = new PrometheusClient.Gauge({
|
||||
name: "monitor_cert_days_remaining",
|
||||
help: "The number of days remaining until the certificate expires",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
|
||||
const monitor_cert_is_valid = new PrometheusClient.Gauge({
|
||||
name: "monitor_cert_is_valid",
|
||||
help: "Is the certificate still valid? (1 = Yes, 0= No)",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
const monitor_response_time = new PrometheusClient.Gauge({
|
||||
name: "monitor_response_time",
|
||||
help: "Monitor Response Time (ms)",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
|
||||
const monitor_status = new PrometheusClient.Gauge({
|
||||
name: "monitor_status",
|
||||
help: "Monitor Status (1 = UP, 0= DOWN)",
|
||||
labelNames: commonLabels
|
||||
});
|
||||
|
||||
class Prometheus {
|
||||
monitorLabelValues = {}
|
||||
|
||||
constructor(monitor) {
|
||||
this.monitorLabelValues = {
|
||||
monitor_name: monitor.name,
|
||||
monitor_type: monitor.type,
|
||||
monitor_url: monitor.url,
|
||||
monitor_hostname: monitor.hostname,
|
||||
monitor_port: monitor.port
|
||||
}
|
||||
}
|
||||
|
||||
update(heartbeat, tlsInfo) {
|
||||
if (typeof tlsInfo !== "undefined") {
|
||||
try {
|
||||
let is_valid = 0
|
||||
if (tlsInfo.valid == true) {
|
||||
is_valid = 1
|
||||
} else {
|
||||
is_valid = 0
|
||||
}
|
||||
monitor_cert_is_valid.set(this.monitorLabelValues, is_valid)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
|
||||
try {
|
||||
monitor_cert_days_remaining.set(this.monitorLabelValues, tlsInfo.daysRemaining)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
monitor_status.set(this.monitorLabelValues, heartbeat.status)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof heartbeat.ping === "number") {
|
||||
monitor_response_time.set(this.monitorLabelValues, heartbeat.ping)
|
||||
} else {
|
||||
// Is it good?
|
||||
monitor_response_time.set(this.monitorLabelValues, -1)
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Prometheus
|
||||
}
|
151
server/routers/api-router.js
Normal file
151
server/routers/api-router.js
Normal file
@@ -0,0 +1,151 @@
|
||||
let express = require("express");
|
||||
const { allowDevAllOrigin, getSettings, setting } = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const server = require("../server");
|
||||
const apicache = require("../modules/apicache");
|
||||
const Monitor = require("../model/monitor");
|
||||
let router = express.Router();
|
||||
|
||||
let cache = apicache.middleware;
|
||||
|
||||
router.get("/api/entry-page", async (_, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
response.json(server.entryPage);
|
||||
});
|
||||
|
||||
// Status Page Config
|
||||
router.get("/api/status-page/config", async (_request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
let config = await getSettings("statusPage");
|
||||
|
||||
if (! config.statusPageTheme) {
|
||||
config.statusPageTheme = "light";
|
||||
}
|
||||
|
||||
if (! config.statusPagePublished) {
|
||||
config.statusPagePublished = true;
|
||||
}
|
||||
|
||||
if (! config.title) {
|
||||
config.title = "Uptime Kuma";
|
||||
}
|
||||
|
||||
response.json(config);
|
||||
});
|
||||
|
||||
// Status Page - Get the current Incident
|
||||
// Can fetch only if published
|
||||
router.get("/api/status-page/incident", async (_, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
try {
|
||||
await checkPublished();
|
||||
|
||||
let incident = await R.findOne("incident", " pin = 1 AND active = 1");
|
||||
|
||||
if (incident) {
|
||||
incident = incident.toPublicJSON();
|
||||
}
|
||||
|
||||
response.json({
|
||||
ok: true,
|
||||
incident,
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
send403(response, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Status Page - Monitor List
|
||||
// Can fetch only if published
|
||||
router.get("/api/status-page/monitor-list", cache("5 minutes"), async (_request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
try {
|
||||
await checkPublished();
|
||||
const publicGroupList = [];
|
||||
let list = await R.find("group", " public = 1 ORDER BY weight ");
|
||||
|
||||
for (let groupBean of list) {
|
||||
publicGroupList.push(await groupBean.toPublicJSON());
|
||||
}
|
||||
|
||||
response.json(publicGroupList);
|
||||
|
||||
} catch (error) {
|
||||
send403(response, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Status Page Polling Data
|
||||
// Can fetch only if published
|
||||
router.get("/api/status-page/heartbeat", cache("5 minutes"), async (_request, response) => {
|
||||
allowDevAllOrigin(response);
|
||||
|
||||
try {
|
||||
await checkPublished();
|
||||
|
||||
let heartbeatList = {};
|
||||
let uptimeList = {};
|
||||
|
||||
let monitorIDList = await R.getCol(`
|
||||
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
|
||||
WHERE monitor_group.group_id = \`group\`.id
|
||||
AND public = 1
|
||||
`);
|
||||
|
||||
for (let monitorID of monitorIDList) {
|
||||
let list = await R.getAll(`
|
||||
SELECT * FROM heartbeat
|
||||
WHERE monitor_id = ?
|
||||
ORDER BY time DESC
|
||||
LIMIT 50
|
||||
`, [
|
||||
monitorID,
|
||||
]);
|
||||
|
||||
list = R.convertToBeans("heartbeat", list);
|
||||
heartbeatList[monitorID] = list.reverse().map(row => row.toPublicJSON());
|
||||
|
||||
const type = 24;
|
||||
uptimeList[`${monitorID}_${type}`] = await Monitor.calcUptime(type, monitorID);
|
||||
}
|
||||
|
||||
response.json({
|
||||
heartbeatList,
|
||||
uptimeList
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
send403(response, error.message);
|
||||
}
|
||||
});
|
||||
|
||||
async function checkPublished() {
|
||||
if (! await isPublished()) {
|
||||
throw new Error("The status page is not published");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Default is published
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function isPublished() {
|
||||
const value = await setting("statusPagePublished");
|
||||
if (value === null) {
|
||||
return true;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function send403(res, msg = "") {
|
||||
res.status(403).json({
|
||||
"status": "fail",
|
||||
"msg": msg,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = router;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user