mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-09-11 22:06:59 +08:00
Compare commits
150 Commits
improve-he
...
1.23.X
Author | SHA1 | Date | |
---|---|---|---|
|
5bb329fa0e | ||
|
09dedc07fb | ||
|
6cfae01a0d | ||
|
32dc76a085 | ||
|
c6d6061a9f | ||
|
243726b03c | ||
|
936665aac3 | ||
|
1185b259c2 | ||
|
a81f949f98 | ||
|
59f10d542b | ||
|
2778929f74 | ||
|
f71d35e53e | ||
|
1490443618 | ||
|
add5c128ce | ||
|
e797abd108 | ||
|
7a9e2f5de6 | ||
|
7b5d2a71ff | ||
|
893278bd3d | ||
|
0e30ea830d | ||
|
c67a2070b8 | ||
|
9863a10321 | ||
|
ee7f8680c1 | ||
|
c1301804d4 | ||
|
b385e81608 | ||
|
f37f55e06c | ||
|
87d7a780e3 | ||
|
0fc372f558 | ||
|
67a13e1259 | ||
|
2b8f55194f | ||
|
288cab6dd7 | ||
|
b4e45c7ce8 | ||
|
7635ab54a0 | ||
|
458cdf9f9b | ||
|
f1e2ee74ea | ||
|
8d847abf35 | ||
|
8151ac0e25 | ||
|
4185ec20b0 | ||
|
4245ea86e7 | ||
|
f861a48dfc | ||
|
fa1214ae5e | ||
|
621419e434 | ||
|
482049c72b | ||
|
2815cc73cf | ||
|
e1147c06aa | ||
|
abc8f2b131 | ||
|
777ef6bc7b | ||
|
b244e8fcbb | ||
|
031947319a | ||
|
74a908a069 | ||
|
9c56c9b346 | ||
|
37666bf35f | ||
|
90badfabee | ||
|
e3396251a8 | ||
|
9c9a086788 | ||
|
9fb95fe95e | ||
|
1e75d81bcf | ||
|
cb3a104dc0 | ||
|
57a18958d6 | ||
|
1708b67949 | ||
|
73239d441d | ||
|
4ceeb304f1 | ||
|
711380bbbe | ||
|
9536c6aa6a | ||
|
4255496b11 | ||
|
f28dccf4e1 | ||
|
b689733d59 | ||
|
afaa7bb2f0 | ||
|
121d1a11af | ||
|
8e61158758 | ||
|
bf58838b89 | ||
|
33ce0ef02c | ||
|
c1aaad0d85 | ||
|
954e05b72f | ||
|
6d4a45f18c | ||
|
f0975cd929 | ||
|
40d6a21453 | ||
|
b383392e8f | ||
|
9964b6c4d8 | ||
|
d56bf08cd7 | ||
|
291d5d7c55 | ||
|
8e3ff25f7b | ||
|
6e80c850f4 | ||
|
0608881954 | ||
|
38efd97b28 | ||
|
ce0ba6c0ca | ||
|
c43223a16d | ||
|
9f170a68d7 | ||
|
1a862e47ab | ||
|
e64bf0e3fe | ||
|
523d137e2b | ||
|
18169c59a1 | ||
|
4ccf263481 | ||
|
1c13a75970 | ||
|
c3e3f27457 | ||
|
794f1810bf | ||
|
168357d93c | ||
|
476deb9fec | ||
|
a36f2a75ca | ||
|
88afab6571 | ||
|
bd9c44cccf | ||
|
1b148786a5 | ||
|
66a10b8993 | ||
|
2ab21ccf8a | ||
|
90d0e8ccde | ||
|
16a396debb | ||
|
6b3d69e1d3 | ||
|
b3b8e9f3a0 | ||
|
e5345848a2 | ||
|
d8a8f6c08b | ||
|
f98a1ce077 | ||
|
86fa57449e | ||
|
ff51704cdf | ||
|
33804d8823 | ||
|
1e12ca4786 | ||
|
0af4ee6c34 | ||
|
1f29fabe64 | ||
|
c4e222d1e6 | ||
|
f2a1c26ef8 | ||
|
8772baad9a | ||
|
215c89e8d3 | ||
|
e6a055af19 | ||
|
88d71d2c7a | ||
|
cd2d5325df | ||
|
75a1245b70 | ||
|
f666eb6d83 | ||
|
cb10643f57 | ||
|
c9ba4e7e8b | ||
|
94187bca5d | ||
|
39b4aa5966 | ||
|
cd79df07e1 | ||
|
0c40f02584 | ||
|
db42c13e05 | ||
|
5f85d8f749 | ||
|
c0e273df5b | ||
|
4da1341aa5 | ||
|
e765e6a1b8 | ||
|
eee9a1f004 | ||
|
4d07b65bdd | ||
|
1772158d62 | ||
|
7bfdb82f5d | ||
|
8945316ce6 | ||
|
9564550d5f | ||
|
a78e7a423e | ||
|
9dddd0b657 | ||
|
d04d86d74e | ||
|
eb11c18203 | ||
|
3da2d78ad9 | ||
|
2b4ec765ff | ||
|
72dcefff76 | ||
|
3a894958eb |
@@ -1,6 +1,6 @@
|
|||||||
/.idea
|
/.idea
|
||||||
/node_modules
|
/node_modules
|
||||||
/data*
|
/data
|
||||||
/cypress
|
/cypress
|
||||||
/out
|
/out
|
||||||
/test
|
/test
|
||||||
@@ -34,7 +34,12 @@ tsconfig.json
|
|||||||
/ecosystem.config.js
|
/ecosystem.config.js
|
||||||
/extra/healthcheck.exe
|
/extra/healthcheck.exe
|
||||||
/extra/healthcheck
|
/extra/healthcheck
|
||||||
extra/exe-builder
|
/extra/exe-builder
|
||||||
|
/extra/push-examples
|
||||||
|
/extra/uptime-kuma-push
|
||||||
|
|
||||||
|
# Comment the following line if you want to rebuild the healthcheck binary
|
||||||
|
/extra/healthcheck-armv7
|
||||||
|
|
||||||
|
|
||||||
### .gitignore content (commented rules are duplicated)
|
### .gitignore content (commented rules are duplicated)
|
||||||
|
47
.eslintrc.js
47
.eslintrc.js
@@ -14,7 +14,6 @@ module.exports = {
|
|||||||
extends: [
|
extends: [
|
||||||
"eslint:recommended",
|
"eslint:recommended",
|
||||||
"plugin:vue/vue3-recommended",
|
"plugin:vue/vue3-recommended",
|
||||||
"plugin:jsdoc/recommended-error",
|
|
||||||
],
|
],
|
||||||
parser: "vue-eslint-parser",
|
parser: "vue-eslint-parser",
|
||||||
parserOptions: {
|
parserOptions: {
|
||||||
@@ -22,9 +21,6 @@ module.exports = {
|
|||||||
sourceType: "module",
|
sourceType: "module",
|
||||||
requireConfigFile: false,
|
requireConfigFile: false,
|
||||||
},
|
},
|
||||||
plugins: [
|
|
||||||
"jsdoc"
|
|
||||||
],
|
|
||||||
rules: {
|
rules: {
|
||||||
"yoda": "error",
|
"yoda": "error",
|
||||||
eqeqeq: [ "warn", "smart" ],
|
eqeqeq: [ "warn", "smart" ],
|
||||||
@@ -82,7 +78,7 @@ module.exports = {
|
|||||||
"checkLoops": false,
|
"checkLoops": false,
|
||||||
}],
|
}],
|
||||||
"space-before-blocks": "warn",
|
"space-before-blocks": "warn",
|
||||||
//'no-console': 'warn',
|
//"no-console": "warn",
|
||||||
"no-extra-boolean-cast": "off",
|
"no-extra-boolean-cast": "off",
|
||||||
"no-multiple-empty-lines": [ "warn", {
|
"no-multiple-empty-lines": [ "warn", {
|
||||||
"max": 1,
|
"max": 1,
|
||||||
@@ -94,50 +90,15 @@ module.exports = {
|
|||||||
"no-unneeded-ternary": "error",
|
"no-unneeded-ternary": "error",
|
||||||
"array-bracket-newline": [ "error", "consistent" ],
|
"array-bracket-newline": [ "error", "consistent" ],
|
||||||
"eol-last": [ "error", "always" ],
|
"eol-last": [ "error", "always" ],
|
||||||
//'prefer-template': 'error',
|
//"prefer-template": "error",
|
||||||
|
"template-curly-spacing": [ "warn", "never" ],
|
||||||
"comma-dangle": [ "warn", "only-multiline" ],
|
"comma-dangle": [ "warn", "only-multiline" ],
|
||||||
"no-empty": [ "error", {
|
"no-empty": [ "error", {
|
||||||
"allowEmptyCatch": true
|
"allowEmptyCatch": true
|
||||||
}],
|
}],
|
||||||
"no-control-regex": "off",
|
"no-control-regex": "off",
|
||||||
"one-var": [ "error", "never" ],
|
"one-var": [ "error", "never" ],
|
||||||
"max-statements-per-line": [ "error", { "max": 1 }],
|
"max-statements-per-line": [ "error", { "max": 1 }]
|
||||||
"jsdoc/check-tag-names": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
"definedTags": [ "link" ]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"jsdoc/no-undefined-types": "off",
|
|
||||||
"jsdoc/no-defaults": [
|
|
||||||
"error",
|
|
||||||
{ "noOptionalParamNames": true }
|
|
||||||
],
|
|
||||||
"jsdoc/require-throws": "warn",
|
|
||||||
"jsdoc/require-jsdoc": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
"require": {
|
|
||||||
"FunctionDeclaration": true,
|
|
||||||
"MethodDefinition": true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"jsdoc/no-blank-block-descriptions": "error",
|
|
||||||
"jsdoc/require-returns-description": "warn",
|
|
||||||
"jsdoc/require-returns-check": [
|
|
||||||
"error",
|
|
||||||
{ "reportMissingReturnForUndefinedTypes": false }
|
|
||||||
],
|
|
||||||
"jsdoc/require-returns": [
|
|
||||||
"warn",
|
|
||||||
{
|
|
||||||
"forceRequireReturn": true,
|
|
||||||
"forceReturnsWithAsync": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"jsdoc/require-param-type": "warn",
|
|
||||||
"jsdoc/require-param-description": "warn"
|
|
||||||
},
|
},
|
||||||
"overrides": [
|
"overrides": [
|
||||||
{
|
{
|
||||||
|
69
.github/workflows/auto-test.yml
vendored
69
.github/workflows/auto-test.yml
vendored
@@ -5,11 +5,11 @@ name: Auto Test
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master, 1.23.X ]
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '*.md'
|
- '*.md'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ master, 2.0.X ]
|
branches: [ master, 1.23.X ]
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '*.md'
|
- '*.md'
|
||||||
|
|
||||||
@@ -22,19 +22,18 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
|
||||||
node: [ 14, 20 ]
|
node: [ 16, 20.5 ]
|
||||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node }}
|
- name: Use Node.js ${{ matrix.node }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version: ${{ matrix.node }}
|
||||||
- run: npm install npm@latest -g
|
- run: npm ci
|
||||||
- run: npm install
|
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm test
|
- run: npm test
|
||||||
env:
|
env:
|
||||||
@@ -50,18 +49,17 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ ARMv7 ]
|
os: [ ARMv7 ]
|
||||||
node: [ 14, 20 ]
|
node: [ 16, 20.5 ]
|
||||||
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node }}
|
- name: Use Node.js ${{ matrix.node }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version: ${{ matrix.node }}
|
||||||
- run: npm install npm@latest -g
|
|
||||||
- run: npm ci --production
|
- run: npm ci --production
|
||||||
|
|
||||||
check-linters:
|
check-linters:
|
||||||
@@ -69,42 +67,41 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js 20
|
- name: Use Node.js 20
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20.5
|
||||||
- run: npm install
|
- run: npm ci
|
||||||
- run: npm run lint
|
- run: npm run lint:prod
|
||||||
|
|
||||||
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet
|
e2e-tests:
|
||||||
# e2e-tests:
|
needs: [ check-linters ]
|
||||||
# needs: [ check-linters ]
|
runs-on: ubuntu-latest
|
||||||
# runs-on: ubuntu-latest
|
steps:
|
||||||
# steps:
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
# - run: git config --global core.autocrlf false # Mainly for Windows
|
- uses: actions/checkout@v4
|
||||||
# - uses: actions/checkout@v3
|
|
||||||
#
|
- name: Use Node.js 16
|
||||||
# - name: Use Node.js 14
|
uses: actions/setup-node@v4
|
||||||
# uses: actions/setup-node@v3
|
with:
|
||||||
# with:
|
node-version: 16
|
||||||
# node-version: 14
|
- run: npm ci
|
||||||
# - run: npm install
|
- run: npm run build
|
||||||
# - run: npm run build
|
- run: npm run cy:test
|
||||||
# - run: npm run cy:test
|
|
||||||
|
|
||||||
frontend-unit-tests:
|
frontend-unit-tests:
|
||||||
needs: [ check-linters ]
|
needs: [ check-linters ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: git config --global core.autocrlf false # Mainly for Windows
|
- run: git config --global core.autocrlf false # Mainly for Windows
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js 14
|
- name: Use Node.js 16
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 14
|
node-version: 16
|
||||||
- run: npm install
|
- run: npm ci
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm run cy:run:unit
|
- run: npm run cy:run:unit
|
||||||
|
4
.github/workflows/close-incorrect-issue.yml
vendored
4
.github/workflows/close-incorrect-issue.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
|||||||
node-version: [16]
|
node-version: [16]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
6
.github/workflows/json-yaml-validate.yml
vendored
6
.github/workflows/json-yaml-validate.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- 2.0.X
|
- 1.23.X
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@@ -17,11 +17,11 @@ jobs:
|
|||||||
json-yaml-validate:
|
json-yaml-validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: json-yaml-validate
|
- name: json-yaml-validate
|
||||||
id: json-yaml-validate
|
id: json-yaml-validate
|
||||||
uses: GrantBirki/json-yaml-validate@v1.3.0
|
uses: GrantBirki/json-yaml-validate@v2.4.0
|
||||||
with:
|
with:
|
||||||
comment: "true" # enable comment mode
|
comment: "true" # enable comment mode
|
||||||
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
||||||
|
2
.github/workflows/stale-bot.yml
vendored
2
.github/workflows/stale-bot.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v7
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
|
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
|
||||||
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
|
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,7 +7,6 @@ dist-ssr
|
|||||||
|
|
||||||
/data
|
/data
|
||||||
!/data/.gitkeep
|
!/data/.gitkeep
|
||||||
/data*
|
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
/private
|
/private
|
||||||
|
@@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
|
||||||
|
|
||||||
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
|
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
|
||||||
|
|
||||||
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
|
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
|
||||||
|
|
||||||
## Key Technical Skills
|
## Key Technical Skills
|
||||||
|
|
||||||
- Node.js (You should know what are promise, async/await and arrow function etc.)
|
- Node.js (You should know about promise, async/await and arrow function etc.)
|
||||||
- Socket.io
|
- Socket.io
|
||||||
- SCSS
|
- SCSS
|
||||||
- Vue.js
|
- Vue.js
|
||||||
@@ -30,7 +30,7 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
|
|||||||
|
|
||||||
## Can I create a pull request for Uptime Kuma?
|
## Can I create a pull request for Uptime Kuma?
|
||||||
|
|
||||||
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
|
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
|
||||||
|
|
||||||
Here are some references:
|
Here are some references:
|
||||||
|
|
||||||
@@ -46,8 +46,8 @@ Here are some references:
|
|||||||
- New features
|
- New features
|
||||||
|
|
||||||
### ❌ Won't be merged:
|
### ❌ Won't be merged:
|
||||||
- A dedicated pr for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
|
||||||
- Do not pass the auto test
|
- Do not pass the auto-test
|
||||||
- Any breaking changes
|
- Any breaking changes
|
||||||
- Duplicated pull requests
|
- Duplicated pull requests
|
||||||
- Buggy
|
- Buggy
|
||||||
@@ -61,9 +61,9 @@ The above cases may not cover all possible situations.
|
|||||||
|
|
||||||
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
|
||||||
|
|
||||||
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
|
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
|
||||||
|
|
||||||
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
|
||||||
|
|
||||||
|
|
||||||
### Recommended Pull Request Guideline
|
### Recommended Pull Request Guideline
|
||||||
@@ -83,11 +83,11 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
|
|||||||
|
|
||||||
## Project Styles
|
## Project Styles
|
||||||
|
|
||||||
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app.
|
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
|
||||||
|
|
||||||
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running
|
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
|
||||||
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
|
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
|
||||||
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`
|
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
|
||||||
- Easy to use
|
- Easy to use
|
||||||
- The web UI styling should be consistent and nice
|
- The web UI styling should be consistent and nice
|
||||||
|
|
||||||
@@ -130,7 +130,7 @@ Port `3000` and port `3001` will be used.
|
|||||||
npm run dev
|
npm run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals:
|
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
|
||||||
```
|
```
|
||||||
npm run start-frontend-dev
|
npm run start-frontend-dev
|
||||||
npm run start-server-dev
|
npm run start-server-dev
|
||||||
@@ -146,13 +146,13 @@ It is mainly a socket.io app + express.js.
|
|||||||
express.js is used for:
|
express.js is used for:
|
||||||
- entry point such as redirecting to a status page or the dashboard
|
- entry point such as redirecting to a status page or the dashboard
|
||||||
- serving the frontend built files (index.html, .js and .css etc.)
|
- serving the frontend built files (index.html, .js and .css etc.)
|
||||||
- serving internal APIs of status page
|
- serving internal APIs of the status page
|
||||||
|
|
||||||
|
|
||||||
### Structure in /server/
|
### Structure in /server/
|
||||||
|
|
||||||
- jobs/ (Jobs that are running in another process)
|
- jobs/ (Jobs that are running in another process)
|
||||||
- model/ (Object model, auto mapping to the database table name)
|
- model/ (Object model, auto-mapping to the database table name)
|
||||||
- modules/ (Modified 3rd-party modules)
|
- modules/ (Modified 3rd-party modules)
|
||||||
- monitor_types (Monitor Types)
|
- monitor_types (Monitor Types)
|
||||||
- notification-providers/ (individual notification logic)
|
- notification-providers/ (individual notification logic)
|
||||||
@@ -163,7 +163,7 @@ express.js is used for:
|
|||||||
|
|
||||||
## Frontend Dev Server
|
## Frontend Dev Server
|
||||||
|
|
||||||
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
|
It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
|
||||||
|
|
||||||
For production, it is not used. It will be compiled to `dist` directory instead.
|
For production, it is not used. It will be compiled to `dist` directory instead.
|
||||||
|
|
||||||
@@ -181,7 +181,7 @@ Uptime Kuma Frontend is a single page application (SPA). Most paths are handled
|
|||||||
|
|
||||||
The router is in `src/router.js`
|
The router is in `src/router.js`
|
||||||
|
|
||||||
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
|
As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
|
||||||
|
|
||||||
The data and socket logic are in `src/mixins/socket.js`.
|
The data and socket logic are in `src/mixins/socket.js`.
|
||||||
|
|
||||||
@@ -210,7 +210,7 @@ Both frontend and backend share the same package.json. However, the frontend dep
|
|||||||
|
|
||||||
### Update Dependencies
|
### Update Dependencies
|
||||||
|
|
||||||
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
|
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
|
||||||
|
|
||||||
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
|
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
|
||||||
|
|
||||||
@@ -218,17 +218,17 @@ If for security / bug / other reasons, a library must be updated, breaking chang
|
|||||||
|
|
||||||
## Translations
|
## Translations
|
||||||
|
|
||||||
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are ommited, they can not be translated).
|
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
|
||||||
|
|
||||||
**Don't include any other languages in your inital Pull-Request** (even if this is your mother tounge), to avoid merge-conflicts between weblate and `master`.
|
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
|
||||||
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language-skills.
|
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
|
||||||
|
|
||||||
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
|
||||||
|
|
||||||
## Spelling & Grammar
|
## Spelling & Grammar
|
||||||
|
|
||||||
Feel free to correct the grammar in the documentation or code.
|
Feel free to correct the grammar in the documentation or code.
|
||||||
My mother language is not english and my grammar is not that great.
|
My mother language is not English and my grammar is not that great.
|
||||||
|
|
||||||
## Wiki
|
## Wiki
|
||||||
|
|
||||||
|
@@ -26,7 +26,7 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
|
|||||||
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
|
||||||
* Fancy, Reactive, Fast UI/UX
|
* Fancy, Reactive, Fast UI/UX
|
||||||
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
|
||||||
* 20 second intervals
|
* 20-second intervals
|
||||||
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
|
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
|
||||||
* Multiple status pages
|
* Multiple status pages
|
||||||
* Map status pages to specific domains
|
* Map status pages to specific domains
|
||||||
@@ -70,7 +70,7 @@ npm run setup
|
|||||||
# Option 1. Try it
|
# Option 1. Try it
|
||||||
node server/server.js
|
node server/server.js
|
||||||
|
|
||||||
# (Recommended) Option 2. Run in background using PM2
|
# (Recommended) Option 2. Run in the background using PM2
|
||||||
# Install PM2 if you don't have it:
|
# Install PM2 if you don't have it:
|
||||||
npm install pm2 -g && pm2 install pm2-logrotate
|
npm install pm2 -g && pm2 install pm2-logrotate
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ pm2 save && pm2 startup
|
|||||||
|
|
||||||
### Windows Portable (x64)
|
### Windows Portable (x64)
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma/files/11886108/uptime-kuma-win64-portable-1.0.1.zip
|
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1.zip
|
||||||
|
|
||||||
### Advanced Installation
|
### Advanced Installation
|
||||||
|
|
||||||
@@ -109,7 +109,7 @@ https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
|
|||||||
|
|
||||||
## 🆕 What's Next?
|
## 🆕 What's Next?
|
||||||
|
|
||||||
I will mark requests/issues to the next milestone.
|
I will assign requests/issues to the next milestone.
|
||||||
|
|
||||||
https://github.com/louislam/uptime-kuma/milestones
|
https://github.com/louislam/uptime-kuma/milestones
|
||||||
|
|
||||||
|
@@ -3,19 +3,19 @@
|
|||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
|
||||||
1. Please also create a empty security issues for alerting me, as GitHub Advisory do not send a notification, I probably will miss without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
|
||||||
|
|
||||||
Do not use the public issue tracker or discuss it in the public as it will cause more damage.
|
Do not use the public issue tracker or discuss it in public as it will cause more damage.
|
||||||
|
|
||||||
## Do you accept other 3rd-party bug bounty platforms?
|
## Do you accept other 3rd-party bug bounty platforms?
|
||||||
|
|
||||||
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone have tried to send a phishing link to me by this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
|
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
|
||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
### Uptime Kuma Versions
|
### Uptime Kuma Versions
|
||||||
|
|
||||||
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
|
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
|
||||||
|
|
||||||
### Upgradable Docker Tags
|
### Upgradable Docker Tags
|
||||||
|
|
||||||
|
@@ -3,7 +3,6 @@ import vue from "@vitejs/plugin-vue";
|
|||||||
import { defineConfig } from "vite";
|
import { defineConfig } from "vite";
|
||||||
import visualizer from "rollup-plugin-visualizer";
|
import visualizer from "rollup-plugin-visualizer";
|
||||||
import viteCompression from "vite-plugin-compression";
|
import viteCompression from "vite-plugin-compression";
|
||||||
import commonjs from "vite-plugin-commonjs";
|
|
||||||
|
|
||||||
const postCssScss = require("postcss-scss");
|
const postCssScss = require("postcss-scss");
|
||||||
const postcssRTLCSS = require("postcss-rtlcss");
|
const postcssRTLCSS = require("postcss-rtlcss");
|
||||||
@@ -22,7 +21,6 @@ export default defineConfig({
|
|||||||
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
commonjs(),
|
|
||||||
vue(),
|
vue(),
|
||||||
legacy({
|
legacy({
|
||||||
targets: [ "since 2015" ],
|
targets: [ "since 2015" ],
|
||||||
|
@@ -1,559 +0,0 @@
|
|||||||
const { R } = require("redbean-node");
|
|
||||||
const { log } = require("../src/util");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* ⚠️⚠️⚠️⚠️⚠️⚠️ DO NOT ADD ANYTHING HERE!
|
|
||||||
* IF YOU NEED TO ADD FIELDS, ADD IT TO ./db/knex_migrations
|
|
||||||
* See ./db/knex_migrations/README.md for more information
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
async function createTables() {
|
|
||||||
log.info("mariadb", "Creating basic tables for MariaDB");
|
|
||||||
const knex = R.knex;
|
|
||||||
|
|
||||||
// TODO: Should check later if it is really the final patch sql file.
|
|
||||||
|
|
||||||
// docker_host
|
|
||||||
await knex.schema.createTable("docker_host", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("user_id").unsigned().notNullable();
|
|
||||||
table.string("docker_daemon", 255);
|
|
||||||
table.string("docker_type", 255);
|
|
||||||
table.string("name", 255);
|
|
||||||
});
|
|
||||||
|
|
||||||
// group
|
|
||||||
await knex.schema.createTable("group", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("name", 255).notNullable();
|
|
||||||
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
table.boolean("public").notNullable().defaultTo(false);
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.integer("weight").notNullable().defaultTo(1000);
|
|
||||||
table.integer("status_page_id").unsigned();
|
|
||||||
});
|
|
||||||
|
|
||||||
// proxy
|
|
||||||
await knex.schema.createTable("proxy", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("user_id").unsigned().notNullable();
|
|
||||||
table.string("protocol", 10).notNullable();
|
|
||||||
table.string("host", 255).notNullable();
|
|
||||||
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
|
|
||||||
table.boolean("auth").notNullable();
|
|
||||||
table.string("username", 255).nullable();
|
|
||||||
table.string("password", 255).nullable();
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.boolean("default").notNullable().defaultTo(false);
|
|
||||||
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
|
|
||||||
table.index("user_id", "proxy_user_id");
|
|
||||||
});
|
|
||||||
|
|
||||||
// user
|
|
||||||
await knex.schema.createTable("user", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("username", 255).notNullable().unique().collate("utf8_general_ci");
|
|
||||||
table.string("password", 255);
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.string("timezone", 150);
|
|
||||||
table.string("twofa_secret", 64);
|
|
||||||
table.boolean("twofa_status").notNullable().defaultTo(false);
|
|
||||||
table.string("twofa_last_token", 6);
|
|
||||||
});
|
|
||||||
|
|
||||||
// monitor
|
|
||||||
await knex.schema.createTable("monitor", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("name", 150);
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.integer("user_id").unsigned()
|
|
||||||
.references("id").inTable("user")
|
|
||||||
.onDelete("SET NULL")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.integer("interval").notNullable().defaultTo(20);
|
|
||||||
table.text("url");
|
|
||||||
table.string("type", 20);
|
|
||||||
table.integer("weight").defaultTo(2000);
|
|
||||||
table.string("hostname", 255);
|
|
||||||
table.integer("port");
|
|
||||||
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
table.string("keyword", 255);
|
|
||||||
table.integer("maxretries").notNullable().defaultTo(0);
|
|
||||||
table.boolean("ignore_tls").notNullable().defaultTo(false);
|
|
||||||
table.boolean("upside_down").notNullable().defaultTo(false);
|
|
||||||
table.integer("maxredirects").notNullable().defaultTo(10);
|
|
||||||
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
|
|
||||||
table.string("dns_resolve_type", 5);
|
|
||||||
table.string("dns_resolve_server", 255);
|
|
||||||
table.string("dns_last_result", 255);
|
|
||||||
table.integer("retry_interval").notNullable().defaultTo(0);
|
|
||||||
table.string("push_token", 20).defaultTo(null);
|
|
||||||
table.text("method").notNullable().defaultTo("GET");
|
|
||||||
table.text("body").defaultTo(null);
|
|
||||||
table.text("headers").defaultTo(null);
|
|
||||||
table.text("basic_auth_user").defaultTo(null);
|
|
||||||
table.text("basic_auth_pass").defaultTo(null);
|
|
||||||
table.integer("docker_host").unsigned()
|
|
||||||
.references("id").inTable("docker_host");
|
|
||||||
table.string("docker_container", 255);
|
|
||||||
table.integer("proxy_id").unsigned()
|
|
||||||
.references("id").inTable("proxy");
|
|
||||||
table.boolean("expiry_notification").defaultTo(true);
|
|
||||||
table.text("mqtt_topic");
|
|
||||||
table.string("mqtt_success_message", 255);
|
|
||||||
table.string("mqtt_username", 255);
|
|
||||||
table.string("mqtt_password", 255);
|
|
||||||
table.string("database_connection_string", 2000);
|
|
||||||
table.text("database_query");
|
|
||||||
table.string("auth_method", 250);
|
|
||||||
table.text("auth_domain");
|
|
||||||
table.text("auth_workstation");
|
|
||||||
table.string("grpc_url", 255).defaultTo(null);
|
|
||||||
table.text("grpc_protobuf").defaultTo(null);
|
|
||||||
table.text("grpc_body").defaultTo(null);
|
|
||||||
table.text("grpc_metadata").defaultTo(null);
|
|
||||||
table.text("grpc_method").defaultTo(null);
|
|
||||||
table.text("grpc_service_name").defaultTo(null);
|
|
||||||
table.boolean("grpc_enable_tls").notNullable().defaultTo(false);
|
|
||||||
table.string("radius_username", 255);
|
|
||||||
table.string("radius_password", 255);
|
|
||||||
table.string("radius_calling_station_id", 50);
|
|
||||||
table.string("radius_called_station_id", 50);
|
|
||||||
table.string("radius_secret", 255);
|
|
||||||
table.integer("resend_interval").notNullable().defaultTo(0);
|
|
||||||
table.integer("packet_size").notNullable().defaultTo(56);
|
|
||||||
table.string("game", 255);
|
|
||||||
});
|
|
||||||
|
|
||||||
// heartbeat
|
|
||||||
await knex.schema.createTable("heartbeat", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.boolean("important").notNullable().defaultTo(false);
|
|
||||||
table.integer("monitor_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("monitor")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.smallint("status").notNullable();
|
|
||||||
|
|
||||||
table.text("msg");
|
|
||||||
table.datetime("time").notNullable();
|
|
||||||
table.integer("ping");
|
|
||||||
table.integer("duration").notNullable().defaultTo(0);
|
|
||||||
table.integer("down_count").notNullable().defaultTo(0);
|
|
||||||
|
|
||||||
table.index("important");
|
|
||||||
table.index([ "monitor_id", "time" ], "monitor_time_index");
|
|
||||||
table.index("monitor_id");
|
|
||||||
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
|
|
||||||
});
|
|
||||||
|
|
||||||
// incident
|
|
||||||
await knex.schema.createTable("incident", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("title", 255).notNullable();
|
|
||||||
table.text("content", 255).notNullable();
|
|
||||||
table.string("style", 30).notNullable().defaultTo("warning");
|
|
||||||
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
table.datetime("last_updated_date");
|
|
||||||
table.boolean("pin").notNullable().defaultTo(true);
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.integer("status_page_id").unsigned();
|
|
||||||
});
|
|
||||||
|
|
||||||
// maintenance
|
|
||||||
await knex.schema.createTable("maintenance", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("title", 150).notNullable();
|
|
||||||
table.text("description").notNullable();
|
|
||||||
table.integer("user_id").unsigned()
|
|
||||||
.references("id").inTable("user")
|
|
||||||
.onDelete("SET NULL")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.string("strategy", 50).notNullable().defaultTo("single");
|
|
||||||
table.datetime("start_date");
|
|
||||||
table.datetime("end_date");
|
|
||||||
table.time("start_time");
|
|
||||||
table.time("end_time");
|
|
||||||
table.string("weekdays", 250).defaultTo("[]");
|
|
||||||
table.text("days_of_month").defaultTo("[]");
|
|
||||||
table.integer("interval_day");
|
|
||||||
|
|
||||||
table.index("active");
|
|
||||||
table.index([ "strategy", "active" ], "manual_active");
|
|
||||||
table.index("user_id", "maintenance_user_id");
|
|
||||||
});
|
|
||||||
|
|
||||||
// status_page
|
|
||||||
await knex.schema.createTable("status_page", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("slug", 255).notNullable().unique().collate("utf8_general_ci");
|
|
||||||
table.string("title", 255).notNullable();
|
|
||||||
table.text("description");
|
|
||||||
table.string("icon", 255).notNullable();
|
|
||||||
table.string("theme", 30).notNullable();
|
|
||||||
table.boolean("published").notNullable().defaultTo(true);
|
|
||||||
table.boolean("search_engine_index").notNullable().defaultTo(true);
|
|
||||||
table.boolean("show_tags").notNullable().defaultTo(false);
|
|
||||||
table.string("password");
|
|
||||||
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
table.datetime("modified_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
table.text("footer_text");
|
|
||||||
table.text("custom_css");
|
|
||||||
table.boolean("show_powered_by").notNullable().defaultTo(true);
|
|
||||||
table.string("google_analytics_tag_id");
|
|
||||||
});
|
|
||||||
|
|
||||||
// maintenance_status_page
|
|
||||||
await knex.schema.createTable("maintenance_status_page", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
|
|
||||||
table.integer("status_page_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("status_page")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
|
|
||||||
table.integer("maintenance_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("maintenance")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
});
|
|
||||||
|
|
||||||
// maintenance_timeslot
|
|
||||||
await knex.schema.createTable("maintenance_timeslot", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("maintenance_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("maintenance")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.datetime("start_date").notNullable();
|
|
||||||
table.datetime("end_date");
|
|
||||||
table.boolean("generated_next").defaultTo(false);
|
|
||||||
|
|
||||||
table.index("maintenance_id");
|
|
||||||
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
|
|
||||||
table.index("generated_next", "generated_next_index");
|
|
||||||
});
|
|
||||||
|
|
||||||
// monitor_group
|
|
||||||
await knex.schema.createTable("monitor_group", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("monitor_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("monitor")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.integer("group_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("group")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.integer("weight").notNullable().defaultTo(1000);
|
|
||||||
table.boolean("send_url").notNullable().defaultTo(false);
|
|
||||||
|
|
||||||
table.index([ "monitor_id", "group_id" ], "fk");
|
|
||||||
});
|
|
||||||
// monitor_maintenance
|
|
||||||
await knex.schema.createTable("monitor_maintenance", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("monitor_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("monitor")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.integer("maintenance_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("maintenance")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
|
|
||||||
table.index("maintenance_id", "maintenance_id_index2");
|
|
||||||
table.index("monitor_id", "monitor_id_index");
|
|
||||||
});
|
|
||||||
|
|
||||||
// notification
|
|
||||||
await knex.schema.createTable("notification", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("name", 255);
|
|
||||||
table.string("config", 255); // TODO: should use TEXT!
|
|
||||||
table.boolean("active").notNullable().defaultTo(true);
|
|
||||||
table.integer("user_id").unsigned();
|
|
||||||
table.boolean("is_default").notNullable().defaultTo(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
// monitor_notification
|
|
||||||
await knex.schema.createTable("monitor_notification", (table) => {
|
|
||||||
table.increments("id").unsigned(); // TODO: no auto increment????
|
|
||||||
table.integer("monitor_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("monitor")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.integer("notification_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("notification")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
|
|
||||||
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
|
|
||||||
});
|
|
||||||
|
|
||||||
// tag
|
|
||||||
await knex.schema.createTable("tag", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("name", 255).notNullable();
|
|
||||||
table.string("color", 255).notNullable();
|
|
||||||
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
|
|
||||||
});
|
|
||||||
|
|
||||||
// monitor_tag
|
|
||||||
await knex.schema.createTable("monitor_tag", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("monitor_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("monitor")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.integer("tag_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("tag")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.text("value");
|
|
||||||
});
|
|
||||||
|
|
||||||
// monitor_tls_info
|
|
||||||
await knex.schema.createTable("monitor_tls_info", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
|
|
||||||
table.text("info_json");
|
|
||||||
});
|
|
||||||
|
|
||||||
// notification_sent_history
|
|
||||||
await knex.schema.createTable("notification_sent_history", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("type", 50).notNullable();
|
|
||||||
table.integer("monitor_id").unsigned().notNullable();
|
|
||||||
table.integer("days").notNullable();
|
|
||||||
table.unique([ "type", "monitor_id", "days" ]);
|
|
||||||
table.index([ "type", "monitor_id", "days" ], "good_index");
|
|
||||||
});
|
|
||||||
|
|
||||||
// setting
|
|
||||||
await knex.schema.createTable("setting", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.string("key", 200).notNullable().unique().collate("utf8_general_ci");
|
|
||||||
table.text("value");
|
|
||||||
table.string("type", 20);
|
|
||||||
});
|
|
||||||
|
|
||||||
// status_page_cname
|
|
||||||
await knex.schema.createTable("status_page_cname", (table) => {
|
|
||||||
table.increments("id");
|
|
||||||
table.integer("status_page_id").unsigned()
|
|
||||||
.references("id").inTable("status_page")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.string("domain").notNullable().unique().collate("utf8_general_ci");
|
|
||||||
});
|
|
||||||
|
|
||||||
/*********************
|
|
||||||
* Converted Patch here
|
|
||||||
*********************/
|
|
||||||
|
|
||||||
// 2023-06-30-1348-http-body-encoding.js
|
|
||||||
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
|
|
||||||
// UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.string("http_body_encoding", 25);
|
|
||||||
});
|
|
||||||
|
|
||||||
await knex("monitor")
|
|
||||||
.where(function () {
|
|
||||||
this.where("type", "http").orWhere("type", "keyword");
|
|
||||||
})
|
|
||||||
.whereNull("http_body_encoding")
|
|
||||||
.update({
|
|
||||||
http_body_encoding: "json",
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2023-06-30-1354-add-description-monitor.js
|
|
||||||
// ALTER TABLE monitor ADD description TEXT default null;
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.text("description").defaultTo(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2023-06-30-1357-api-key-table.js
|
|
||||||
/*
|
|
||||||
CREATE TABLE [api_key] (
|
|
||||||
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
|
||||||
[key] VARCHAR(255) NOT NULL,
|
|
||||||
[name] VARCHAR(255) NOT NULL,
|
|
||||||
[user_id] INTEGER NOT NULL,
|
|
||||||
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
|
|
||||||
[active] BOOLEAN DEFAULT 1 NOT NULL,
|
|
||||||
[expires] DATETIME DEFAULT NULL,
|
|
||||||
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
|
|
||||||
);
|
|
||||||
*/
|
|
||||||
await knex.schema.createTable("api_key", function (table) {
|
|
||||||
table.increments("id").primary();
|
|
||||||
table.string("key", 255).notNullable();
|
|
||||||
table.string("name", 255).notNullable();
|
|
||||||
table.integer("user_id").unsigned().notNullable()
|
|
||||||
.references("id").inTable("user")
|
|
||||||
.onDelete("CASCADE")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
|
|
||||||
table.boolean("active").defaultTo(1).notNullable();
|
|
||||||
table.dateTime("expires").defaultTo(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2023-06-30-1400-monitor-tls.js
|
|
||||||
/*
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD tls_ca TEXT default null;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD tls_cert TEXT default null;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD tls_key TEXT default null;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.text("tls_ca").defaultTo(null);
|
|
||||||
table.text("tls_cert").defaultTo(null);
|
|
||||||
table.text("tls_key").defaultTo(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2023-06-30-1401-maintenance-cron.js
|
|
||||||
/*
|
|
||||||
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
|
|
||||||
DROP TABLE maintenance_timeslot;
|
|
||||||
ALTER TABLE maintenance ADD cron TEXT;
|
|
||||||
ALTER TABLE maintenance ADD timezone VARCHAR(255);
|
|
||||||
ALTER TABLE maintenance ADD duration INTEGER;
|
|
||||||
*/
|
|
||||||
await knex.schema
|
|
||||||
.dropTableIfExists("maintenance_timeslot")
|
|
||||||
.table("maintenance", function (table) {
|
|
||||||
table.text("cron");
|
|
||||||
table.string("timezone", 255);
|
|
||||||
table.integer("duration");
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2023-06-30-1413-add-parent-monitor.js.
|
|
||||||
/*
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.integer("parent").unsigned()
|
|
||||||
.references("id").inTable("monitor")
|
|
||||||
.onDelete("SET NULL")
|
|
||||||
.onUpdate("CASCADE");
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-add-invert-keyword.sql
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD invert_keyword BOOLEAN default 0 not null;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.boolean("invert_keyword").defaultTo(0).notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-added-json-query.sql
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD json_path TEXT;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD expected_value VARCHAR(255);
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.text("json_path");
|
|
||||||
table.string("expected_value", 255);
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-added-kafka-producer.sql
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD kafka_producer_topic VARCHAR(255);
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD kafka_producer_brokers TEXT;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD kafka_producer_ssl INTEGER;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD kafka_producer_sasl_options TEXT;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD kafka_producer_message TEXT;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.string("kafka_producer_topic", 255);
|
|
||||||
table.text("kafka_producer_brokers");
|
|
||||||
table.integer("kafka_producer_ssl");
|
|
||||||
table.string("kafka_producer_allow_auto_topic_creation", 255);
|
|
||||||
table.text("kafka_producer_sasl_options");
|
|
||||||
table.text("kafka_producer_message");
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-add-certificate-expiry-status-page.sql
|
|
||||||
ALTER TABLE status_page
|
|
||||||
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("status_page", function (table) {
|
|
||||||
table.boolean("show_certificate_expiry").defaultTo(0).notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-monitor-oauth-cc.sql
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD oauth_client_id TEXT default null;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD oauth_client_secret TEXT default null;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD oauth_token_url TEXT default null;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD oauth_scopes TEXT default null;
|
|
||||||
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD oauth_auth_method TEXT default null;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.text("oauth_client_id").defaultTo(null);
|
|
||||||
table.text("oauth_client_secret").defaultTo(null);
|
|
||||||
table.text("oauth_token_url").defaultTo(null);
|
|
||||||
table.text("oauth_scopes").defaultTo(null);
|
|
||||||
table.text("oauth_auth_method").defaultTo(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-add-timeout-monitor.sql
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD timeout DOUBLE default 0 not null;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.double("timeout").defaultTo(0).notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
/*
|
|
||||||
patch-add-gamedig-given-port.sql
|
|
||||||
ALTER TABLE monitor
|
|
||||||
ADD gamedig_given_port_only BOOLEAN default 1 not null;
|
|
||||||
*/
|
|
||||||
await knex.schema.table("monitor", function (table) {
|
|
||||||
table.boolean("gamedig_given_port_only").defaultTo(1).notNullable();
|
|
||||||
});
|
|
||||||
|
|
||||||
log.info("mariadb", "Created basic tables for MariaDB");
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
createTables,
|
|
||||||
};
|
|
@@ -1,57 +0,0 @@
|
|||||||
## Info
|
|
||||||
|
|
||||||
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
|
|
||||||
|
|
||||||
## Basic rules
|
|
||||||
- All tables must have a primary key named `id`
|
|
||||||
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
|
|
||||||
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports multiple databases
|
|
||||||
|
|
||||||
## Template
|
|
||||||
|
|
||||||
Filename: YYYYMMDDHHMMSS_name.js
|
|
||||||
|
|
||||||
```js
|
|
||||||
exports.up = function(knex) {
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.down = function(knex) {
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
// exports.config = { transaction: false };
|
|
||||||
```
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
Filename: 2023-06-30-1348-create-user-and-product.js
|
|
||||||
|
|
||||||
```js
|
|
||||||
exports.up = function(knex) {
|
|
||||||
return knex.schema
|
|
||||||
.createTable('user', function (table) {
|
|
||||||
table.increments('id');
|
|
||||||
table.string('first_name', 255).notNullable();
|
|
||||||
table.string('last_name', 255).notNullable();
|
|
||||||
})
|
|
||||||
.createTable('product', function (table) {
|
|
||||||
table.increments('id');
|
|
||||||
table.decimal('price').notNullable();
|
|
||||||
table.string('name', 1000).notNullable();
|
|
||||||
}).then(() => {
|
|
||||||
knex("products").insert([
|
|
||||||
{ price: 10, name: "Apple" },
|
|
||||||
{ price: 20, name: "Orange" },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.down = function(knex) {
|
|
||||||
return knex.schema
|
|
||||||
.dropTable("product")
|
|
||||||
.dropTable("user");
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
https://knexjs.org/guide/migrations.html#transactions-in-migrations
|
|
@@ -1,3 +0,0 @@
|
|||||||
# Don't create a new migration file here
|
|
||||||
|
|
||||||
Please go to ./db/knex_migrations/README.md
|
|
34
db/patch-fix-kafka-producer-booleans.sql
Normal file
34
db/patch-fix-kafka-producer-booleans.sql
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- Rename COLUMNs to another one (suffixed by `_old`)
|
||||||
|
ALTER TABLE monitor
|
||||||
|
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
|
||||||
|
|
||||||
|
-- Add correct COLUMNs
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
|
||||||
|
|
||||||
|
-- Set bring old values from `_old` COLUMNs to correct ones
|
||||||
|
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
|
||||||
|
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
|
||||||
|
|
||||||
|
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
|
||||||
|
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
|
||||||
|
|
||||||
|
-- Remove old COLUMNs
|
||||||
|
ALTER TABLE monitor
|
||||||
|
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
DROP COLUMN kafka_producer_ssl_old;
|
||||||
|
|
||||||
|
COMMIT;
|
18
db/patch-monitor-tls-info-add-fk.sql
Normal file
18
db/patch-monitor-tls-info-add-fk.sql
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
PRAGMA writable_schema = TRUE;
|
||||||
|
|
||||||
|
UPDATE
|
||||||
|
SQLITE_MASTER
|
||||||
|
SET
|
||||||
|
sql = replace(sql,
|
||||||
|
'monitor_id INTEGER NOT NULL',
|
||||||
|
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||||
|
)
|
||||||
|
WHERE
|
||||||
|
name = 'monitor_tls_info'
|
||||||
|
AND type = 'table';
|
||||||
|
|
||||||
|
PRAGMA writable_schema = RESET;
|
||||||
|
|
||||||
|
COMMIT;
|
10
db/patch-notification-config.sql
Normal file
10
db/patch-notification-config.sql
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
|
||||||
|
ALTER TABLE notification RENAME COLUMN config TO config_old;
|
||||||
|
ALTER TABLE notification ADD COLUMN config TEXT;
|
||||||
|
UPDATE notification SET config = config_old;
|
||||||
|
ALTER TABLE notification DROP COLUMN config_old;
|
||||||
|
|
||||||
|
COMMIT;
|
7
db/patch-timeout.sql
Normal file
7
db/patch-timeout.sql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
UPDATE monitor SET timeout = (interval * 0.8)
|
||||||
|
WHERE timeout IS NULL OR timeout <= 0;
|
||||||
|
|
||||||
|
COMMIT;
|
8
docker/alpine-base.dockerfile
Normal file
8
docker/alpine-base.dockerfile
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
||||||
|
FROM node:16-alpine3.12
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install apprise, iputils for non-root ping, setpriv
|
||||||
|
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
|
||||||
|
pip3 --no-cache-dir install apprise==1.4.0 && \
|
||||||
|
rm -rf /root/.cache
|
@@ -1,11 +1,12 @@
|
|||||||
# If the image changed, the second stage image should be changed too
|
# DON'T UPDATE TO bullseye-slim, see #372.
|
||||||
FROM node:20-bookworm-slim AS base2-slim
|
# There is no 20-buster-slim for armv7 unfortunately, 18-buster-slim is the last one for Uptime Kuma v1.
|
||||||
|
FROM node:18-buster-slim
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
|
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
|
||||||
# apprise = for notifications (From testing repo)
|
# python3* = apprise's dependencies
|
||||||
# sqlite3 = for debugging
|
# sqlite3 = for debugging
|
||||||
# iputils-ping = for ping
|
# iputils-ping = for ping
|
||||||
# util-linux = for setpriv (Should be dropped in 2.0.0?)
|
# util-linux = for setpriv (Should be dropped in 2.0.0?)
|
||||||
@@ -14,25 +15,29 @@ WORKDIR /app
|
|||||||
# ca-certificates = keep the cert up-to-date
|
# ca-certificates = keep the cert up-to-date
|
||||||
# sudo = for start service nscd with non-root user
|
# sudo = for start service nscd with non-root user
|
||||||
# nscd = for better DNS caching
|
# nscd = for better DNS caching
|
||||||
RUN echo "deb http://deb.debian.org/debian testing main" >> /etc/apt/sources.list && \
|
# (pip) apprise = for notifications
|
||||||
apt update && \
|
RUN apt-get update && \
|
||||||
apt --yes --no-install-recommends -t testing install apprise sqlite3 ca-certificates && \
|
apt-get --yes --no-install-recommends install \
|
||||||
apt --yes --no-install-recommends -t stable install \
|
python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||||
|
sqlite3 \
|
||||||
iputils-ping \
|
iputils-ping \
|
||||||
util-linux \
|
util-linux \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
curl \
|
curl \
|
||||||
|
ca-certificates \
|
||||||
sudo \
|
sudo \
|
||||||
nscd && \
|
nscd && \
|
||||||
|
pip3 --no-cache-dir install apprise==1.6.0 && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
apt --yes autoremove
|
apt --yes autoremove
|
||||||
|
|
||||||
|
|
||||||
# Install cloudflared
|
# Install cloudflared
|
||||||
RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
|
RUN set -eux && \
|
||||||
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bullseye main' | tee /etc/apt/sources.list.d/cloudflared.list && \
|
mkdir -p --mode=0755 /usr/share/keyrings && \
|
||||||
apt update && \
|
curl --fail --show-error --silent --location --insecure https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
|
||||||
apt install --yes --no-install-recommends -t stable cloudflared && \
|
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared buster main' | tee /etc/apt/sources.list.d/cloudflared.list && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install --yes --no-install-recommends cloudflared && \
|
||||||
cloudflared version && \
|
cloudflared version && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
apt --yes autoremove
|
apt --yes autoremove
|
||||||
@@ -41,16 +46,3 @@ RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyr
|
|||||||
COPY ./docker/etc/nscd.conf /etc/nscd.conf
|
COPY ./docker/etc/nscd.conf /etc/nscd.conf
|
||||||
COPY ./docker/etc/sudoers /etc/sudoers
|
COPY ./docker/etc/sudoers /etc/sudoers
|
||||||
|
|
||||||
|
|
||||||
# Full Base Image
|
|
||||||
# MariaDB, Chromium and fonts
|
|
||||||
# Not working for armv7, so use the older version (10.5) of MariaDB from the debian repo
|
|
||||||
# curl -LsS https://r.mariadb.com/downloads/mariadb_repo_setup | bash -s -- --mariadb-server-version="mariadb-11.1" && \
|
|
||||||
FROM base2-slim AS base2
|
|
||||||
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
|
|
||||||
RUN apt update && \
|
|
||||||
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
|
|
||||||
apt --yes remove curl && \
|
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
|
||||||
apt --yes autoremove && \
|
|
||||||
chown -R node:node /var/lib/mysql
|
|
||||||
|
@@ -1,14 +0,0 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
uptime-kuma:
|
|
||||||
container_name: uptime-kuma-dev
|
|
||||||
image: louislam/uptime-kuma:nightly2
|
|
||||||
volumes:
|
|
||||||
#- ./data:/app/data
|
|
||||||
- ../server:/app/server
|
|
||||||
- ../db:/app/db
|
|
||||||
ports:
|
|
||||||
- "3001:3001" # <Host Port>:<Container Port>
|
|
||||||
- "3307:3306"
|
|
||||||
|
|
@@ -1,15 +1,14 @@
|
|||||||
version: '3.8'
|
# Simple docker-compose.yml
|
||||||
|
# You can change your port or volume location
|
||||||
|
|
||||||
|
version: '3.3'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
uptime-kuma:
|
uptime-kuma:
|
||||||
image: louislam/uptime-kuma:2
|
image: louislam/uptime-kuma:1
|
||||||
container_name: uptime-kuma
|
container_name: uptime-kuma
|
||||||
volumes:
|
volumes:
|
||||||
- uptime-kuma:/app/data
|
- ./uptime-kuma-data:/app/data
|
||||||
ports:
|
ports:
|
||||||
- "3001:3001" # <Host Port>:<Container Port>
|
- 3001:3001 # <Host Port>:<Container Port>
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
volumes:
|
|
||||||
uptime-kuma:
|
|
||||||
|
|
||||||
|
@@ -1,8 +1,6 @@
|
|||||||
ARG BASE_IMAGE=louislam/uptime-kuma:base2
|
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Build in Golang
|
# Build in Golang
|
||||||
# Run npm run build-healthcheck-armv7 in the host first, otherwise it will be super slow where it is building the armv7 healthcheck
|
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
|
||||||
# Check file: builder-go.dockerfile
|
# Check file: builder-go.dockerfile
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
|
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
|
||||||
@@ -10,47 +8,49 @@ FROM louislam/uptime-kuma:builder-go AS build_healthcheck
|
|||||||
############################################
|
############################################
|
||||||
# Build in Node.js
|
# Build in Node.js
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base2 AS build
|
FROM louislam/uptime-kuma:base-debian AS build
|
||||||
USER node
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
COPY --chown=node:node .npmrc .npmrc
|
COPY .npmrc .npmrc
|
||||||
COPY --chown=node:node package.json package.json
|
COPY package.json package.json
|
||||||
COPY --chown=node:node package-lock.json package-lock.json
|
COPY package-lock.json package-lock.json
|
||||||
RUN npm ci --omit=dev
|
RUN npm ci --omit=dev
|
||||||
COPY . .
|
COPY . .
|
||||||
COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
|
COPY --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
|
||||||
|
RUN chmod +x /app/extra/entrypoint.sh
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# ⭐ Main Image
|
# ⭐ Main Image
|
||||||
############################################
|
############################################
|
||||||
FROM $BASE_IMAGE AS release
|
FROM louislam/uptime-kuma:base-debian AS release
|
||||||
USER node
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV UPTIME_KUMA_IS_CONTAINER=1
|
ENV UPTIME_KUMA_IS_CONTAINER=1
|
||||||
|
|
||||||
# Copy app files from build layer
|
# Copy app files from build layer
|
||||||
COPY --chown=node:node --from=build /app /app
|
COPY --from=build /app /app
|
||||||
|
|
||||||
|
|
||||||
EXPOSE 3001
|
EXPOSE 3001
|
||||||
|
VOLUME ["/app/data"]
|
||||||
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
|
||||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
||||||
CMD ["node", "server/server.js"]
|
CMD ["node", "server/server.js"]
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Mark as Nightly
|
# Mark as Nightly
|
||||||
############################################
|
############################################
|
||||||
FROM release AS nightly
|
FROM release AS nightly
|
||||||
USER node
|
|
||||||
RUN npm run mark-as-nightly
|
RUN npm run mark-as-nightly
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Build an image for testing pr
|
# Build an image for testing pr
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base2 AS pr-test2
|
FROM louislam/uptime-kuma:base-debian AS pr-test
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
|
|
||||||
## Install Git
|
## Install Git
|
||||||
@@ -78,7 +78,7 @@ CMD ["npm", "run", "start-pr-test"]
|
|||||||
############################################
|
############################################
|
||||||
# Upload the artifact to Github
|
# Upload the artifact to Github
|
||||||
############################################
|
############################################
|
||||||
FROM louislam/uptime-kuma:base2 AS upload-artifact
|
FROM louislam/uptime-kuma:base-debian AS upload-artifact
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
RUN apt update && \
|
RUN apt update && \
|
||||||
apt --yes install curl file
|
apt --yes install curl file
|
||||||
|
27
docker/dockerfile-alpine
Normal file
27
docker/dockerfile-alpine
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
FROM louislam/uptime-kuma:base-alpine AS build
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||||
|
|
||||||
|
COPY .npmrc .npmrc
|
||||||
|
COPY package.json package.json
|
||||||
|
COPY package-lock.json package-lock.json
|
||||||
|
RUN npm ci --omit=dev
|
||||||
|
COPY . .
|
||||||
|
RUN chmod +x /app/extra/entrypoint.sh
|
||||||
|
|
||||||
|
FROM louislam/uptime-kuma:base-alpine AS release
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy app files from build layer
|
||||||
|
COPY --from=build /app /app
|
||||||
|
|
||||||
|
EXPOSE 3001
|
||||||
|
VOLUME ["/app/data"]
|
||||||
|
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
|
||||||
|
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
|
||||||
|
CMD ["node", "server/server.js"]
|
||||||
|
|
||||||
|
|
||||||
|
FROM release AS nightly
|
||||||
|
RUN npm run mark-as-nightly
|
@@ -36,8 +36,6 @@ if (! exists) {
|
|||||||
/**
|
/**
|
||||||
* Commit updated files
|
* Commit updated files
|
||||||
* @param {string} version Version to update to
|
* @param {string} version Version to update to
|
||||||
* @returns {void}
|
|
||||||
* @throws Error committing files
|
|
||||||
*/
|
*/
|
||||||
function commit(version) {
|
function commit(version) {
|
||||||
let msg = "Update to " + version;
|
let msg = "Update to " + version;
|
||||||
@@ -57,7 +55,6 @@ function commit(version) {
|
|||||||
/**
|
/**
|
||||||
* Create a tag with the specified version
|
* Create a tag with the specified version
|
||||||
* @param {string} version Tag to create
|
* @param {string} version Tag to create
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function tag(version) {
|
function tag(version) {
|
||||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||||
@@ -71,7 +68,6 @@ function tag(version) {
|
|||||||
* Check if a tag exists for the specified version
|
* Check if a tag exists for the specified version
|
||||||
* @param {string} version Version to check
|
* @param {string} version Version to check
|
||||||
* @returns {boolean} Does the tag already exist
|
* @returns {boolean} Does the tag already exist
|
||||||
* @throws Version is not valid
|
|
||||||
*/
|
*/
|
||||||
function tagExists(version) {
|
function tagExists(version) {
|
||||||
if (! version) {
|
if (! version) {
|
||||||
|
@@ -15,7 +15,6 @@ download(url);
|
|||||||
/**
|
/**
|
||||||
* Downloads the latest version of the dist from a GitHub release.
|
* Downloads the latest version of the dist from a GitHub release.
|
||||||
* @param {string} url The URL to download from.
|
* @param {string} url The URL to download from.
|
||||||
* @returns {void}
|
|
||||||
*
|
*
|
||||||
* Generated by Trelent
|
* Generated by Trelent
|
||||||
*/
|
*/
|
||||||
|
21
extra/entrypoint.sh
Normal file
21
extra/entrypoint.sh
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
# set -e Exit the script if an error happens
|
||||||
|
set -e
|
||||||
|
PUID=${PUID=0}
|
||||||
|
PGID=${PGID=0}
|
||||||
|
|
||||||
|
files_ownership () {
|
||||||
|
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
|
||||||
|
# -R Recursively descends the specified directories
|
||||||
|
# -c Like verbose but report only when a change is made
|
||||||
|
chown -hRc "$PUID":"$PGID" /app/data
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "==> Performing startup jobs and maintenance tasks"
|
||||||
|
files_ownership
|
||||||
|
|
||||||
|
echo "==> Starting application with user $PUID group $PGID"
|
||||||
|
|
||||||
|
# --clear-groups Clear supplementary groups.
|
||||||
|
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"
|
@@ -4,12 +4,12 @@ const fs = require("fs");
|
|||||||
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
||||||
* or the `recursive` property removing completely in the future Node.js version.
|
* or the `recursive` property removing completely in the future Node.js version.
|
||||||
* See the link below.
|
* See the link below.
|
||||||
|
*
|
||||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
||||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||||
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
const rmSync = (path, options) => {
|
const rmSync = (path, options) => {
|
||||||
if (typeof fs.rmSync === "function") {
|
if (typeof fs.rmSync === "function") {
|
||||||
|
@@ -6,7 +6,7 @@
|
|||||||
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
|
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
|
||||||
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||||
*/
|
*/
|
||||||
const { FBSD } = require("../server/util-server");
|
const FBSD = /^freebsd/.test(process.platform);
|
||||||
|
|
||||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||||
|
|
||||||
|
@@ -189,13 +189,15 @@ if (type == "local") {
|
|||||||
bash("check=$(git --version)");
|
bash("check=$(git --version)");
|
||||||
if (check == "") {
|
if (check == "") {
|
||||||
error = 1;
|
error = 1;
|
||||||
println("Error: git is missing");
|
println("Error: git is not found!");
|
||||||
|
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
|
||||||
}
|
}
|
||||||
|
|
||||||
bash("check=$(node -v)");
|
bash("check=$(node -v)");
|
||||||
if (check == "") {
|
if (check == "") {
|
||||||
error = 1;
|
error = 1;
|
||||||
println("Error: node is missing");
|
println("Error: node is not found");
|
||||||
|
println("help: an installation guide is available at https://nodejs.org/en/download");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error > 0) {
|
if (error > 0) {
|
||||||
@@ -216,6 +218,7 @@ if (type == "local") {
|
|||||||
bash("check=$(pm2 --version)");
|
bash("check=$(pm2 --version)");
|
||||||
if (check == "") {
|
if (check == "") {
|
||||||
println("Error: pm2 is not found!");
|
println("Error: pm2 is not found!");
|
||||||
|
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
|
||||||
bash("exit 1");
|
bash("exit 1");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,6 +235,7 @@ if (type == "local") {
|
|||||||
bash("check=$(docker -v)");
|
bash("check=$(docker -v)");
|
||||||
if (check == "") {
|
if (check == "") {
|
||||||
println("Error: docker is not found!");
|
println("Error: docker is not found!");
|
||||||
|
println("help: an installation guide is available at https://docs.docker.com/desktop/");
|
||||||
bash("exit 1");
|
bash("exit 1");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -239,6 +243,7 @@ if (type == "local") {
|
|||||||
|
|
||||||
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
||||||
\"echo\" \"Error: docker is not running\"
|
\"echo\" \"Error: docker is not running\"
|
||||||
|
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
|
||||||
\"exit\" \"1\"
|
\"exit\" \"1\"
|
||||||
fi");
|
fi");
|
||||||
|
|
||||||
|
44
extra/reformat-changelog.js
Normal file
44
extra/reformat-changelog.js
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
// Generate on GitHub
|
||||||
|
const input = `
|
||||||
|
* Add Korean translation by @Alanimdeo in https://github.com/louislam/dockge/pull/86
|
||||||
|
`;
|
||||||
|
|
||||||
|
const template = `
|
||||||
|
### 🆕 New Features
|
||||||
|
|
||||||
|
### 💇♀️ Improvements
|
||||||
|
|
||||||
|
### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
### ⬆️ Security Fixes
|
||||||
|
|
||||||
|
### 🦎 Translation Contributions
|
||||||
|
|
||||||
|
### Others
|
||||||
|
- Other small changes, code refactoring and comment/doc updates in this repo:
|
||||||
|
`;
|
||||||
|
|
||||||
|
const lines = input.split("\n").filter((line) => line.trim() !== "");
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
// Split the last " by "
|
||||||
|
const usernamePullRequesURL = line.split(" by ").pop();
|
||||||
|
|
||||||
|
if (!usernamePullRequesURL) {
|
||||||
|
console.log("Unable to parse", line);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [ username, pullRequestURL ] = usernamePullRequesURL.split(" in ");
|
||||||
|
const pullRequestID = "#" + pullRequestURL.split("/").pop();
|
||||||
|
let message = line.split(" by ").shift();
|
||||||
|
|
||||||
|
if (!message) {
|
||||||
|
console.log("Unable to parse", line);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
message = message.split("* ").pop();
|
||||||
|
console.log("-", pullRequestID, message, `(Thanks ${username})`);
|
||||||
|
}
|
||||||
|
console.log(template);
|
@@ -12,7 +12,7 @@ const rl = readline.createInterface({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
Database.initDataDir(args);
|
Database.init(args);
|
||||||
await Database.connect();
|
await Database.connect();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@@ -5,6 +5,8 @@ const { R } = require("redbean-node");
|
|||||||
const readline = require("readline");
|
const readline = require("readline");
|
||||||
const { initJWTSecret } = require("../server/util-server");
|
const { initJWTSecret } = require("../server/util-server");
|
||||||
const User = require("../server/model/user");
|
const User = require("../server/model/user");
|
||||||
|
const { io } = require("socket.io-client");
|
||||||
|
const { localWebSocketURL } = require("../server/config");
|
||||||
const args = require("args-parser")(process.argv);
|
const args = require("args-parser")(process.argv);
|
||||||
const rl = readline.createInterface({
|
const rl = readline.createInterface({
|
||||||
input: process.stdin,
|
input: process.stdin,
|
||||||
@@ -13,7 +15,7 @@ const rl = readline.createInterface({
|
|||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
console.log("Connecting the database");
|
console.log("Connecting the database");
|
||||||
Database.initDataDir(args);
|
Database.init(args);
|
||||||
await Database.connect(false, false, true);
|
await Database.connect(false, false, true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -36,12 +38,16 @@ const main = async () => {
|
|||||||
// Reset all sessions by reset jwt secret
|
// Reset all sessions by reset jwt secret
|
||||||
await initJWTSecret();
|
await initJWTSecret();
|
||||||
|
|
||||||
|
// Disconnect all other socket clients of the user
|
||||||
|
await disconnectAllSocketClients(user.username, password);
|
||||||
|
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
console.log("Passwords do not match, please try again.");
|
console.log("Passwords do not match, please try again.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
console.log("Password reset successfully.");
|
console.log("Password reset successfully.");
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Error: " + e.message);
|
console.error("Error: " + e.message);
|
||||||
@@ -66,6 +72,44 @@ function question(question) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function disconnectAllSocketClients(username, password) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
console.log("Connecting to " + localWebSocketURL + " to disconnect all other socket clients");
|
||||||
|
|
||||||
|
// Disconnect all socket connections
|
||||||
|
const socket = io(localWebSocketURL, {
|
||||||
|
reconnection: false,
|
||||||
|
timeout: 5000,
|
||||||
|
});
|
||||||
|
socket.on("connect", () => {
|
||||||
|
socket.emit("login", {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
}, (res) => {
|
||||||
|
if (res.ok) {
|
||||||
|
console.log("Logged in.");
|
||||||
|
socket.emit("disconnectOtherSocketClients");
|
||||||
|
} else {
|
||||||
|
console.warn("Login failed.");
|
||||||
|
console.warn("Please restart the server to disconnect all sessions.");
|
||||||
|
}
|
||||||
|
socket.close();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
socket.on("connect_error", function () {
|
||||||
|
// The localWebSocketURL is not guaranteed to be working for some complicated Uptime Kuma setup
|
||||||
|
// Ask the user to restart the server manually
|
||||||
|
console.warn("Failed to connect to " + localWebSocketURL);
|
||||||
|
console.warn("Please restart the server to disconnect all sessions manually.");
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
socket.on("disconnect", () => {
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (!process.env.TEST_BACKEND) {
|
if (!process.env.TEST_BACKEND) {
|
||||||
main();
|
main();
|
||||||
}
|
}
|
||||||
|
@@ -138,7 +138,7 @@ server.listen({
|
|||||||
/**
|
/**
|
||||||
* Get human readable request type from request code
|
* Get human readable request type from request code
|
||||||
* @param {number} code Request code to translate
|
* @param {number} code Request code to translate
|
||||||
* @returns {string|void} Human readable request type
|
* @returns {string} Human readable request type
|
||||||
*/
|
*/
|
||||||
function type(code) {
|
function type(code) {
|
||||||
for (let name in Packet.TYPE) {
|
for (let name in Packet.TYPE) {
|
||||||
|
@@ -7,17 +7,11 @@ class SimpleMqttServer {
|
|||||||
aedes = require("aedes")();
|
aedes = require("aedes")();
|
||||||
server = require("net").createServer(this.aedes.handle);
|
server = require("net").createServer(this.aedes.handle);
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} port Port to listen on
|
|
||||||
*/
|
|
||||||
constructor(port) {
|
constructor(port) {
|
||||||
this.port = port;
|
this.port = port;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Start the MQTT server */
|
||||||
* Start the MQTT server
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
start() {
|
start() {
|
||||||
this.server.listen(this.port, () => {
|
this.server.listen(this.port, () => {
|
||||||
console.log("server started and listening on port ", this.port);
|
console.log("server started and listening on port ", this.port);
|
||||||
|
@@ -12,7 +12,6 @@ import rmSync from "../fs-rmSync.js";
|
|||||||
* created with this code if one does not already exist
|
* created with this code if one does not already exist
|
||||||
* @param {string} baseLang The second base language file to copy. This
|
* @param {string} baseLang The second base language file to copy. This
|
||||||
* will be ignored if set to "en" as en.js is copied by default
|
* will be ignored if set to "en" as en.js is copied by default
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function copyFiles(langCode, baseLang) {
|
function copyFiles(langCode, baseLang) {
|
||||||
if (fs.existsSync("./languages")) {
|
if (fs.existsSync("./languages")) {
|
||||||
@@ -34,8 +33,7 @@ function copyFiles(langCode, baseLang) {
|
|||||||
/**
|
/**
|
||||||
* Update the specified language file
|
* Update the specified language file
|
||||||
* @param {string} langCode Language code to update
|
* @param {string} langCode Language code to update
|
||||||
* @param {string} baseLangCode Second language to copy keys from
|
* @param {string} baseLang Second language to copy keys from
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
async function updateLanguage(langCode, baseLangCode) {
|
async function updateLanguage(langCode, baseLangCode) {
|
||||||
const en = (await import("./languages/en.js")).default;
|
const en = (await import("./languages/en.js")).default;
|
||||||
|
@@ -39,8 +39,6 @@ if (! exists) {
|
|||||||
/**
|
/**
|
||||||
* Commit updated files
|
* Commit updated files
|
||||||
* @param {string} version Version to update to
|
* @param {string} version Version to update to
|
||||||
* @returns {void}
|
|
||||||
* @throws Error when committing files
|
|
||||||
*/
|
*/
|
||||||
function commit(version) {
|
function commit(version) {
|
||||||
let msg = "Update to " + version;
|
let msg = "Update to " + version;
|
||||||
@@ -57,7 +55,6 @@ function commit(version) {
|
|||||||
/**
|
/**
|
||||||
* Create a tag with the specified version
|
* Create a tag with the specified version
|
||||||
* @param {string} version Tag to create
|
* @param {string} version Tag to create
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function tag(version) {
|
function tag(version) {
|
||||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||||
@@ -68,7 +65,6 @@ function tag(version) {
|
|||||||
* Check if a tag exists for the specified version
|
* Check if a tag exists for the specified version
|
||||||
* @param {string} version Version to check
|
* @param {string} version Version to check
|
||||||
* @returns {boolean} Does the tag already exist
|
* @returns {boolean} Does the tag already exist
|
||||||
* @throws Version is not valid
|
|
||||||
*/
|
*/
|
||||||
function tagExists(version) {
|
function tagExists(version) {
|
||||||
if (! version) {
|
if (! version) {
|
||||||
|
@@ -13,7 +13,6 @@ updateWiki(newVersion);
|
|||||||
/**
|
/**
|
||||||
* Update the wiki with new version number
|
* Update the wiki with new version number
|
||||||
* @param {string} newVersion Version to update to
|
* @param {string} newVersion Version to update to
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function updateWiki(newVersion) {
|
function updateWiki(newVersion) {
|
||||||
const wikiDir = "./tmp/wiki";
|
const wikiDir = "./tmp/wiki";
|
||||||
@@ -47,7 +46,6 @@ function updateWiki(newVersion) {
|
|||||||
/**
|
/**
|
||||||
* Check if a directory exists and then delete it
|
* Check if a directory exists and then delete it
|
||||||
* @param {string} dir Directory to delete
|
* @param {string} dir Directory to delete
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function safeDelete(dir) {
|
function safeDelete(dir) {
|
||||||
if (fs.existsSync(dir)) {
|
if (fs.existsSync(dir)) {
|
||||||
|
@@ -156,12 +156,14 @@ fi
|
|||||||
check=$(git --version)
|
check=$(git --version)
|
||||||
if [ "$check" == "" ]; then
|
if [ "$check" == "" ]; then
|
||||||
error=$((1))
|
error=$((1))
|
||||||
"echo" "-e" "Error: git is missing"
|
"echo" "-e" "Error: git is not found!"
|
||||||
|
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
|
||||||
fi
|
fi
|
||||||
check=$(node -v)
|
check=$(node -v)
|
||||||
if [ "$check" == "" ]; then
|
if [ "$check" == "" ]; then
|
||||||
error=$((1))
|
error=$((1))
|
||||||
"echo" "-e" "Error: node is missing"
|
"echo" "-e" "Error: node is not found"
|
||||||
|
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
|
||||||
fi
|
fi
|
||||||
if [ $(($error > 0)) == 1 ]; then
|
if [ $(($error > 0)) == 1 ]; then
|
||||||
"echo" "-e" "Please install above missing software"
|
"echo" "-e" "Please install above missing software"
|
||||||
@@ -180,6 +182,7 @@ fi
|
|||||||
check=$(pm2 --version)
|
check=$(pm2 --version)
|
||||||
if [ "$check" == "" ]; then
|
if [ "$check" == "" ]; then
|
||||||
"echo" "-e" "Error: pm2 is not found!"
|
"echo" "-e" "Error: pm2 is not found!"
|
||||||
|
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
mkdir -p $installPath
|
mkdir -p $installPath
|
||||||
@@ -192,11 +195,13 @@ else
|
|||||||
check=$(docker -v)
|
check=$(docker -v)
|
||||||
if [ "$check" == "" ]; then
|
if [ "$check" == "" ]; then
|
||||||
"echo" "-e" "Error: docker is not found!"
|
"echo" "-e" "Error: docker is not found!"
|
||||||
|
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
check=$(docker info)
|
check=$(docker info)
|
||||||
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
||||||
"echo" "Error: docker is not running"
|
"echo" "Error: docker is not running"
|
||||||
|
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
|
||||||
"exit" "1"
|
"exit" "1"
|
||||||
fi
|
fi
|
||||||
if [ "$3" != "" ]; then
|
if [ "$3" != "" ]; then
|
||||||
|
13402
package-lock.json
generated
13402
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
72
package.json
72
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "uptime-kuma",
|
"name": "uptime-kuma",
|
||||||
"version": "1.23.0-beta.1",
|
"version": "1.23.16",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -13,33 +13,36 @@
|
|||||||
"install-legacy": "npm install",
|
"install-legacy": "npm install",
|
||||||
"update-legacy": "npm update",
|
"update-legacy": "npm update",
|
||||||
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
|
||||||
|
"lint:js-prod": "npm run lint:js -- --max-warnings 0",
|
||||||
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
|
||||||
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
|
||||||
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
|
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
|
||||||
"lint": "npm run lint:js && npm run lint:style",
|
"lint": "npm run lint:js && npm run lint:style",
|
||||||
|
"lint:prod": "npm run lint:js-prod && npm run lint:style",
|
||||||
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
|
||||||
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
|
||||||
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
|
||||||
"start": "npm run start-server",
|
"start": "npm run start-server",
|
||||||
"start-server": "node server/server.js",
|
"start-server": "node server/server.js",
|
||||||
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
|
"start-server-dev": "cross-env NODE_ENV=development node server/server.js --data-dir=./data/v1/",
|
||||||
"build": "vite build --config ./config/vite.config.js",
|
"build": "vite build --config ./config/vite.config.js",
|
||||||
"test": "node test/prepare-test-server.js && npm run jest-backend",
|
"test": "node test/prepare-test-server.js && npm run jest-backend",
|
||||||
"test-with-build": "npm run build && npm test",
|
"test-with-build": "npm run build && npm test",
|
||||||
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
|
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
|
||||||
"tsc": "tsc",
|
"tsc": "tsc",
|
||||||
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
|
||||||
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
|
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
|
||||||
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
|
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
|
||||||
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
|
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
|
||||||
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
||||||
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
|
||||||
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
|
||||||
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||||
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
|
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
|
||||||
|
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
||||||
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
|
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
|
||||||
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
|
||||||
"setup": "git checkout 1.22.1 && npm ci --production && npm run download-dist",
|
"setup": "git checkout 1.23.16 && npm ci --production && npm run download-dist",
|
||||||
"download-dist": "node extra/download-dist.js",
|
"download-dist": "node extra/download-dist.js",
|
||||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||||
"reset-password": "node extra/reset-password.js",
|
"reset-password": "node extra/reset-password.js",
|
||||||
@@ -47,6 +50,7 @@
|
|||||||
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
|
||||||
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
|
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
|
||||||
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
|
||||||
|
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
|
||||||
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
|
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
|
||||||
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
|
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
|
||||||
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
|
||||||
@@ -54,7 +58,11 @@
|
|||||||
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
|
||||||
"simple-dns-server": "node extra/simple-dns-server.js",
|
"simple-dns-server": "node extra/simple-dns-server.js",
|
||||||
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
|
||||||
|
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
|
||||||
|
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
|
||||||
|
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
|
||||||
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
|
||||||
|
"ncu-patch": "npm-check-updates -u -t patch",
|
||||||
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
|
||||||
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
|
||||||
"git-remove-tag": "git tag -d",
|
"git-remove-tag": "git tag -d",
|
||||||
@@ -67,22 +75,21 @@
|
|||||||
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
|
||||||
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
"deploy-demo-server": "node extra/deploy-demo-server.js",
|
||||||
"sort-contributors": "node extra/sort-contributors.js",
|
"sort-contributors": "node extra/sort-contributors.js",
|
||||||
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
|
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
|
||||||
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@grpc/grpc-js": "~1.7.3",
|
"@grpc/grpc-js": "~1.8.22",
|
||||||
"@louislam/ping": "~0.4.4-mod.1",
|
"@louislam/ping": "~0.4.4-mod.1",
|
||||||
"@louislam/sqlite3": "15.1.6",
|
"@louislam/sqlite3": "15.1.6",
|
||||||
"args-parser": "~1.3.0",
|
"args-parser": "~1.3.0",
|
||||||
"axios": "~0.27.0",
|
"axios": "~0.28.1",
|
||||||
"axios-ntlm": "1.3.0",
|
"axios-ntlm": "1.3.0",
|
||||||
"badge-maker": "~3.3.1",
|
"badge-maker": "~3.3.1",
|
||||||
"bcryptjs": "~2.4.3",
|
"bcryptjs": "~2.4.3",
|
||||||
"cacheable-lookup": "~6.0.4",
|
"cacheable-lookup": "~6.0.4",
|
||||||
"chardet": "~1.4.0",
|
"chardet": "~1.4.0",
|
||||||
"check-password-strength": "^2.0.5",
|
"check-password-strength": "^2.0.5",
|
||||||
"cheerio": "~1.0.0-rc.12",
|
"cheerio": "1.0.0-rc.12",
|
||||||
"chroma-js": "~2.4.2",
|
"chroma-js": "~2.4.2",
|
||||||
"command-exists": "~1.2.9",
|
"command-exists": "~1.2.9",
|
||||||
"compare-versions": "~3.6.0",
|
"compare-versions": "~3.6.0",
|
||||||
@@ -90,11 +97,12 @@
|
|||||||
"croner": "~6.0.5",
|
"croner": "~6.0.5",
|
||||||
"dayjs": "~1.11.5",
|
"dayjs": "~1.11.5",
|
||||||
"dotenv": "~16.0.3",
|
"dotenv": "~16.0.3",
|
||||||
"express": "~4.17.3",
|
"express": "~4.21.0",
|
||||||
"express-basic-auth": "~1.2.1",
|
"express-basic-auth": "~1.2.1",
|
||||||
"express-static-gzip": "~2.1.7",
|
"express-static-gzip": "~2.1.7",
|
||||||
"form-data": "~4.0.0",
|
"form-data": "~4.0.0",
|
||||||
"gamedig": "~4.0.5",
|
"gamedig": "^4.2.0",
|
||||||
|
"html-escaper": "^3.0.3",
|
||||||
"http-graceful-shutdown": "~3.1.7",
|
"http-graceful-shutdown": "~3.1.7",
|
||||||
"http-proxy-agent": "~5.0.0",
|
"http-proxy-agent": "~5.0.0",
|
||||||
"https-proxy-agent": "~5.0.1",
|
"https-proxy-agent": "~5.0.1",
|
||||||
@@ -105,41 +113,41 @@
|
|||||||
"jsonwebtoken": "~9.0.0",
|
"jsonwebtoken": "~9.0.0",
|
||||||
"jwt-decode": "~3.1.2",
|
"jwt-decode": "~3.1.2",
|
||||||
"kafkajs": "^2.2.4",
|
"kafkajs": "^2.2.4",
|
||||||
"knex": "^2.4.2",
|
|
||||||
"limiter": "~2.1.0",
|
"limiter": "~2.1.0",
|
||||||
"liquidjs": "^10.7.0",
|
"liquidjs": "^10.7.0",
|
||||||
"mongodb": "~4.14.0",
|
"mongodb": "~4.17.1",
|
||||||
"mqtt": "~4.3.7",
|
"mqtt": "~4.3.7",
|
||||||
"mssql": "~8.1.4",
|
"mssql": "~8.1.4",
|
||||||
"mysql2": "~2.3.3",
|
"mysql2": "~3.9.6",
|
||||||
"nanoid": "~3.3.4",
|
"nanoid": "~3.3.4",
|
||||||
"node-cloudflared-tunnel": "~1.0.9",
|
"node-cloudflared-tunnel": "~1.0.9",
|
||||||
"node-radius-client": "~1.0.0",
|
"node-radius-client": "~1.0.0",
|
||||||
"nodemailer": "~6.6.5",
|
"nodemailer": "~6.9.13",
|
||||||
"nostr-tools": "^1.13.1",
|
"nostr-tools": "^1.13.1",
|
||||||
"notp": "~2.0.3",
|
"notp": "~2.0.3",
|
||||||
"openid-client": "^5.4.2",
|
"openid-client": "^5.4.2",
|
||||||
"password-hash": "~1.2.2",
|
"password-hash": "~1.2.2",
|
||||||
"pg": "~8.8.0",
|
"pg": "~8.11.3",
|
||||||
"pg-connection-string": "~2.5.0",
|
"pg-connection-string": "~2.6.2",
|
||||||
"playwright-core": "~1.35.1",
|
"playwright-core": "~1.35.1",
|
||||||
"prom-client": "~13.2.0",
|
"prom-client": "~13.2.0",
|
||||||
"prometheus-api-metrics": "~3.2.1",
|
"prometheus-api-metrics": "~3.2.1",
|
||||||
|
"promisify-child-process": "~4.1.2",
|
||||||
"protobufjs": "~7.2.4",
|
"protobufjs": "~7.2.4",
|
||||||
"qs": "~6.10.4",
|
"qs": "~6.10.4",
|
||||||
"redbean-node": "~0.3.0",
|
"redbean-node": "~0.3.0",
|
||||||
"redis": "~4.5.1",
|
"redis": "~4.5.1",
|
||||||
"semver": "~7.5.4",
|
"semver": "~7.5.4",
|
||||||
"socket.io": "~4.6.1",
|
"socket.io": "~4.8.0",
|
||||||
"socket.io-client": "~4.6.1",
|
"socket.io-client": "~4.8.0",
|
||||||
"socks-proxy-agent": "6.1.1",
|
"socks-proxy-agent": "6.1.1",
|
||||||
"tar": "~6.1.11",
|
"tar": "~6.2.1",
|
||||||
"tcp-ping": "~0.1.1",
|
"tcp-ping": "~0.1.1",
|
||||||
"thirty-two": "~1.0.2",
|
"thirty-two": "~1.0.2",
|
||||||
"ws": "^8.13.0"
|
"ws": "^8.13.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@actions/github": "~5.0.1",
|
"@actions/github": "~5.1.1",
|
||||||
"@babel/eslint-parser": "^7.22.7",
|
"@babel/eslint-parser": "^7.22.7",
|
||||||
"@babel/preset-env": "^7.15.8",
|
"@babel/preset-env": "^7.15.8",
|
||||||
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
"@fortawesome/fontawesome-svg-core": "~1.2.36",
|
||||||
@@ -160,12 +168,11 @@
|
|||||||
"core-js": "~3.26.1",
|
"core-js": "~3.26.1",
|
||||||
"cronstrue": "~2.24.0",
|
"cronstrue": "~2.24.0",
|
||||||
"cross-env": "~7.0.3",
|
"cross-env": "~7.0.3",
|
||||||
"cypress": "^12.17.0",
|
"cypress": "^13.2.0",
|
||||||
"delay": "^5.0.0",
|
"delay": "^5.0.0",
|
||||||
"dns2": "~2.0.1",
|
"dns2": "~2.0.1",
|
||||||
"dompurify": "~2.4.3",
|
"dompurify": "~3.1.7",
|
||||||
"eslint": "~8.14.0",
|
"eslint": "~8.14.0",
|
||||||
"eslint-plugin-jsdoc": "^46.4.6",
|
|
||||||
"eslint-plugin-vue": "~8.7.1",
|
"eslint-plugin-vue": "~8.7.1",
|
||||||
"favico.js": "~0.3.10",
|
"favico.js": "~0.3.10",
|
||||||
"jest": "~29.6.1",
|
"jest": "~29.6.1",
|
||||||
@@ -184,8 +191,7 @@
|
|||||||
"timezones-list": "~3.0.1",
|
"timezones-list": "~3.0.1",
|
||||||
"typescript": "~4.4.4",
|
"typescript": "~4.4.4",
|
||||||
"v-pagination-3": "~0.1.7",
|
"v-pagination-3": "~0.1.7",
|
||||||
"vite": "~4.4.1",
|
"vite": "~5.2.8",
|
||||||
"vite-plugin-commonjs": "^0.8.0",
|
|
||||||
"vite-plugin-compression": "^0.5.1",
|
"vite-plugin-compression": "^0.5.1",
|
||||||
"vue": "~3.3.4",
|
"vue": "~3.3.4",
|
||||||
"vue-chartjs": "~5.2.0",
|
"vue-chartjs": "~5.2.0",
|
||||||
@@ -199,7 +205,7 @@
|
|||||||
"vue-router": "~4.0.14",
|
"vue-router": "~4.0.14",
|
||||||
"vue-toastification": "~2.0.0-rc.5",
|
"vue-toastification": "~2.0.0-rc.5",
|
||||||
"vuedraggable": "~4.1.0",
|
"vuedraggable": "~4.1.0",
|
||||||
"wait-on": "^6.0.1",
|
"wait-on": "^7.2.0",
|
||||||
"whatwg-url": "~12.0.1"
|
"whatwg-url": "~12.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,10 +1,9 @@
|
|||||||
<svg width="640" height="640" viewBox="0 0 640 640" fill="none" xmlns="http://www.w3.org/2000/svg">
|
<svg width="640" height="640" viewBox="0 0 640 640" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
|
||||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" fill="url(#paint0_linear_381_799)"/>
|
<g transform="matrix(1 0 0 1 320 320)">
|
||||||
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" stroke="#F2F2F2" stroke-opacity="0.51" stroke-width="200"/>
|
<linearGradient id="S3" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1 0 0 1 -319.99875 -320.0001577393)" x1="259.78" y1="261.15" x2="463.85" y2="456.49">
|
||||||
<defs>
|
|
||||||
<linearGradient id="paint0_linear_381_799" x1="259.78" y1="261.15" x2="463.85" y2="456.49" gradientUnits="userSpaceOnUse">
|
|
||||||
<stop stop-color="#5CDD8B"/>
|
<stop stop-color="#5CDD8B"/>
|
||||||
<stop offset="1" stop-color="#86E6A9"/>
|
<stop offset="1" stop-color="#86E6A9"/>
|
||||||
</linearGradient>
|
</linearGradient>
|
||||||
</defs>
|
<path style="stroke: rgb(242,242,242); stroke-opacity: 0.51; stroke-width: 200; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: url(#S3); fill-rule: nonzero; opacity: 1;" transform=" translate(0, 0)" d="M 170.40125 -84.36016 C 224.09125 38.37984 224.09125 115.33984 170.40125 146.49984 C 89.85125000000001 193.23984000000002 -120.03875 207.48984000000002 -180.45875 135.63984 C -220.73875 87.73983999999999 -220.73875 14.399839999999998 -180.45875 -84.36016000000001 C -139.49875 -151.82016 -81.28875000000001 -185.55016 -5.828750000000014 -185.55016 C 69.64124999999999 -185.55016 128.38125 -151.82016000000002 170.40124999999998 -84.36016000000001 z" stroke-linecap="round" />
|
||||||
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
|
Before Width: | Height: | Size: 893 B After Width: | Height: | Size: 1.1 KiB |
@@ -9,9 +9,9 @@ const dayjs = require("dayjs");
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Login to web app
|
* Login to web app
|
||||||
* @param {string} username Username to login with
|
* @param {string} username
|
||||||
* @param {string} password Password to login with
|
* @param {string} password
|
||||||
* @returns {Promise<(Bean|null)>} User or null if login failed
|
* @returns {Promise<(Bean|null)>}
|
||||||
*/
|
*/
|
||||||
exports.login = async function (username, password) {
|
exports.login = async function (username, password) {
|
||||||
if (typeof username !== "string" || typeof password !== "string") {
|
if (typeof username !== "string" || typeof password !== "string") {
|
||||||
@@ -39,7 +39,6 @@ exports.login = async function (username, password) {
|
|||||||
/**
|
/**
|
||||||
* Validate a provided API key
|
* Validate a provided API key
|
||||||
* @param {string} key API key to verify
|
* @param {string} key API key to verify
|
||||||
* @returns {boolean} API is ok?
|
|
||||||
*/
|
*/
|
||||||
async function verifyAPIKey(key) {
|
async function verifyAPIKey(key) {
|
||||||
if (typeof key !== "string") {
|
if (typeof key !== "string") {
|
||||||
@@ -74,10 +73,9 @@ async function verifyAPIKey(key) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom authorizer for express-basic-auth
|
* Custom authorizer for express-basic-auth
|
||||||
* @param {string} username Username to login with
|
* @param {string} username
|
||||||
* @param {string} password Password to login with
|
* @param {string} password
|
||||||
* @param {authCallback} callback Callback to handle login result
|
* @param {authCallback} callback
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function apiAuthorizer(username, password, callback) {
|
function apiAuthorizer(username, password, callback) {
|
||||||
// API Rate Limit
|
// API Rate Limit
|
||||||
@@ -101,10 +99,9 @@ function apiAuthorizer(username, password, callback) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom authorizer for express-basic-auth
|
* Custom authorizer for express-basic-auth
|
||||||
* @param {string} username Username to login with
|
* @param {string} username
|
||||||
* @param {string} password Password to login with
|
* @param {string} password
|
||||||
* @param {authCallback} callback Callback to handle login result
|
* @param {authCallback} callback
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
function userAuthorizer(username, password, callback) {
|
function userAuthorizer(username, password, callback) {
|
||||||
// Login Rate Limit
|
// Login Rate Limit
|
||||||
@@ -129,8 +126,7 @@ function userAuthorizer(username, password, callback) {
|
|||||||
* Use basic auth if auth is not disabled
|
* Use basic auth if auth is not disabled
|
||||||
* @param {express.Request} req Express request object
|
* @param {express.Request} req Express request object
|
||||||
* @param {express.Response} res Express response object
|
* @param {express.Response} res Express response object
|
||||||
* @param {express.NextFunction} next Next handler in chain
|
* @param {express.NextFunction} next
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
exports.basicAuth = async function (req, res, next) {
|
exports.basicAuth = async function (req, res, next) {
|
||||||
const middleware = basicAuth({
|
const middleware = basicAuth({
|
||||||
@@ -152,8 +148,7 @@ exports.basicAuth = async function (req, res, next) {
|
|||||||
* Use use API Key if API keys enabled, else use basic auth
|
* Use use API Key if API keys enabled, else use basic auth
|
||||||
* @param {express.Request} req Express request object
|
* @param {express.Request} req Express request object
|
||||||
* @param {express.Response} res Express response object
|
* @param {express.Response} res Express response object
|
||||||
* @param {express.NextFunction} next Next handler in chain
|
* @param {express.NextFunction} next
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
exports.apiAuth = async function (req, res, next) {
|
exports.apiAuth = async function (req, res, next) {
|
||||||
if (!await Settings.get("disableAuth")) {
|
if (!await Settings.get("disableAuth")) {
|
||||||
|
@@ -15,7 +15,6 @@ class CacheableDnsHttpAgent {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Register/Disable cacheable to global agents
|
* Register/Disable cacheable to global agents
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
static async update() {
|
static async update() {
|
||||||
log.debug("CacheableDnsHttpAgent", "update");
|
log.debug("CacheableDnsHttpAgent", "update");
|
||||||
@@ -41,15 +40,14 @@ class CacheableDnsHttpAgent {
|
|||||||
/**
|
/**
|
||||||
* Attach cacheable to HTTP agent
|
* Attach cacheable to HTTP agent
|
||||||
* @param {http.Agent} agent Agent to install
|
* @param {http.Agent} agent Agent to install
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
static install(agent) {
|
static install(agent) {
|
||||||
this.cacheable.install(agent);
|
this.cacheable.install(agent);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent
|
* @var {https.AgentOptions} agentOptions
|
||||||
* @returns {https.Agent} The new HTTPS agent
|
* @return {https.Agent}
|
||||||
*/
|
*/
|
||||||
static getHttpsAgent(agentOptions) {
|
static getHttpsAgent(agentOptions) {
|
||||||
if (!this.enable) {
|
if (!this.enable) {
|
||||||
@@ -65,8 +63,8 @@ class CacheableDnsHttpAgent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent
|
* @var {http.AgentOptions} agentOptions
|
||||||
* @returns {https.Agents} The new HTTP agent
|
* @return {https.Agents}
|
||||||
*/
|
*/
|
||||||
static getHttpAgent(agentOptions) {
|
static getHttpAgent(agentOptions) {
|
||||||
if (!this.enable) {
|
if (!this.enable) {
|
||||||
|
@@ -12,7 +12,7 @@ const checkVersion = require("./check-version");
|
|||||||
/**
|
/**
|
||||||
* Send list of notification providers to client
|
* Send list of notification providers to client
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @returns {Promise<Bean[]>} List of notifications
|
* @returns {Promise<Bean[]>}
|
||||||
*/
|
*/
|
||||||
async function sendNotificationList(socket) {
|
async function sendNotificationList(socket) {
|
||||||
const timeLogger = new TimeLogger();
|
const timeLogger = new TimeLogger();
|
||||||
@@ -40,8 +40,8 @@ async function sendNotificationList(socket) {
|
|||||||
* Send Heartbeat History list to socket
|
* Send Heartbeat History list to socket
|
||||||
* @param {Socket} socket Socket.io instance
|
* @param {Socket} socket Socket.io instance
|
||||||
* @param {number} monitorID ID of monitor to send heartbeat history
|
* @param {number} monitorID ID of monitor to send heartbeat history
|
||||||
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only
|
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
|
||||||
* @param {boolean} overwrite Overwrite client-side's heartbeat list
|
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||||
@@ -71,8 +71,8 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
|
|||||||
* Important Heart beat list (aka event list)
|
* Important Heart beat list (aka event list)
|
||||||
* @param {Socket} socket Socket.io instance
|
* @param {Socket} socket Socket.io instance
|
||||||
* @param {number} monitorID ID of monitor to send heartbeat history
|
* @param {number} monitorID ID of monitor to send heartbeat history
|
||||||
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only
|
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
|
||||||
* @param {boolean} overwrite Overwrite client-side's heartbeat list
|
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
|
||||||
@@ -100,7 +100,7 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
|
|||||||
/**
|
/**
|
||||||
* Emit proxy list to client
|
* Emit proxy list to client
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @returns {Promise<Bean[]>} List of proxies
|
* @return {Promise<Bean[]>}
|
||||||
*/
|
*/
|
||||||
async function sendProxyList(socket) {
|
async function sendProxyList(socket) {
|
||||||
const timeLogger = new TimeLogger();
|
const timeLogger = new TimeLogger();
|
||||||
@@ -141,21 +141,24 @@ async function sendAPIKeyList(socket) {
|
|||||||
/**
|
/**
|
||||||
* Emits the version information to the client.
|
* Emits the version information to the client.
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @param {boolean} hideVersion Should we hide the version information in the response?
|
* @param {boolean} hideVersion
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
async function sendInfo(socket, hideVersion = false) {
|
async function sendInfo(socket, hideVersion = false) {
|
||||||
let version;
|
let version;
|
||||||
let latestVersion;
|
let latestVersion;
|
||||||
|
let isContainer;
|
||||||
|
|
||||||
if (!hideVersion) {
|
if (!hideVersion) {
|
||||||
version = checkVersion.version;
|
version = checkVersion.version;
|
||||||
latestVersion = checkVersion.latestVersion;
|
latestVersion = checkVersion.latestVersion;
|
||||||
|
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
|
||||||
}
|
}
|
||||||
|
|
||||||
socket.emit("info", {
|
socket.emit("info", {
|
||||||
version,
|
version,
|
||||||
latestVersion,
|
latestVersion,
|
||||||
|
isContainer,
|
||||||
primaryBaseURL: await setting("primaryBaseURL"),
|
primaryBaseURL: await setting("primaryBaseURL"),
|
||||||
serverTimezone: await server.getTimezone(),
|
serverTimezone: await server.getTimezone(),
|
||||||
serverTimezoneOffset: server.getTimezoneOffset(),
|
serverTimezoneOffset: server.getTimezoneOffset(),
|
||||||
@@ -165,7 +168,7 @@ async function sendInfo(socket, hideVersion = false) {
|
|||||||
/**
|
/**
|
||||||
* Send list of docker hosts to client
|
* Send list of docker hosts to client
|
||||||
* @param {Socket} socket Socket.io socket instance
|
* @param {Socket} socket Socket.io socket instance
|
||||||
* @returns {Promise<Bean[]>} List of docker hosts
|
* @returns {Promise<Bean[]>}
|
||||||
*/
|
*/
|
||||||
async function sendDockerHostList(socket) {
|
async function sendDockerHostList(socket) {
|
||||||
const timeLogger = new TimeLogger();
|
const timeLogger = new TimeLogger();
|
||||||
|
@@ -1,29 +1,42 @@
|
|||||||
|
const isFreeBSD = /^freebsd/.test(process.platform);
|
||||||
|
|
||||||
// Interop with browser
|
// Interop with browser
|
||||||
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||||
const demoMode = args["demo"] || false;
|
|
||||||
|
|
||||||
const badgeConstants = {
|
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
|
||||||
naColor: "#999",
|
// Dual-stack support for (::)
|
||||||
defaultUpColor: "#66c20a",
|
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
|
||||||
defaultWarnColor: "#eed202",
|
let hostEnv = isFreeBSD ? null : process.env.HOST;
|
||||||
defaultDownColor: "#c2290a",
|
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
|
||||||
defaultPendingColor: "#f8a306",
|
|
||||||
defaultMaintenanceColor: "#1747f5",
|
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
|
||||||
defaultPingColor: "blue", // as defined by badge-maker / shields.io
|
.map(portValue => parseInt(portValue))
|
||||||
defaultStyle: "flat",
|
.find(portValue => !isNaN(portValue));
|
||||||
defaultPingValueSuffix: "ms",
|
|
||||||
defaultPingLabelSuffix: "h",
|
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||||
defaultUptimeValueSuffix: "%",
|
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||||
defaultUptimeLabelSuffix: "h",
|
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
|
||||||
defaultCertExpValueSuffix: " days",
|
|
||||||
defaultCertExpLabelSuffix: "h",
|
const isSSL = sslKey && sslCert;
|
||||||
// Values Come From Default Notification Times
|
|
||||||
defaultCertExpireWarnDays: "14",
|
function getLocalWebSocketURL() {
|
||||||
defaultCertExpireDownDays: "7"
|
const protocol = isSSL ? "wss" : "ws";
|
||||||
};
|
const host = hostname || "localhost";
|
||||||
|
return `${protocol}://${host}:${port}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const localWebSocketURL = getLocalWebSocketURL();
|
||||||
|
|
||||||
|
const demoMode = args["demo"] || false;
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
args,
|
args,
|
||||||
|
hostname,
|
||||||
|
port,
|
||||||
|
sslKey,
|
||||||
|
sslCert,
|
||||||
|
sslKeyPassphrase,
|
||||||
|
isSSL,
|
||||||
|
localWebSocketURL,
|
||||||
demoMode,
|
demoMode,
|
||||||
badgeConstants,
|
|
||||||
};
|
};
|
||||||
|
@@ -4,8 +4,6 @@ const { setSetting, setting } = require("./util-server");
|
|||||||
const { log, sleep } = require("../src/util");
|
const { log, sleep } = require("../src/util");
|
||||||
const knex = require("knex");
|
const knex = require("knex");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const { EmbeddedMariaDB } = require("./embedded-mariadb");
|
|
||||||
const mysql = require("mysql2/promise");
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Database & App Data Folder
|
* Database & App Data Folder
|
||||||
@@ -26,7 +24,7 @@ class Database {
|
|||||||
|
|
||||||
static screenshotDir;
|
static screenshotDir;
|
||||||
|
|
||||||
static sqlitePath;
|
static path;
|
||||||
|
|
||||||
static dockerTLSDir;
|
static dockerTLSDir;
|
||||||
|
|
||||||
@@ -36,13 +34,11 @@ class Database {
|
|||||||
static patched = false;
|
static patched = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SQLite only
|
|
||||||
* Add patch filename in key
|
* Add patch filename in key
|
||||||
* Values:
|
* Values:
|
||||||
* true: Add it regardless of order
|
* true: Add it regardless of order
|
||||||
* false: Do nothing
|
* false: Do nothing
|
||||||
* { parents: []}: Need parents before add it
|
* { parents: []}: Need parents before add it
|
||||||
* @deprecated
|
|
||||||
*/
|
*/
|
||||||
static patchList = {
|
static patchList = {
|
||||||
"patch-setting-value-type.sql": true,
|
"patch-setting-value-type.sql": true,
|
||||||
@@ -84,7 +80,11 @@ class Database {
|
|||||||
"patch-add-certificate-expiry-status-page.sql": true,
|
"patch-add-certificate-expiry-status-page.sql": true,
|
||||||
"patch-monitor-oauth-cc.sql": true,
|
"patch-monitor-oauth-cc.sql": true,
|
||||||
"patch-add-timeout-monitor.sql": true,
|
"patch-add-timeout-monitor.sql": true,
|
||||||
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
|
"patch-add-gamedig-given-port.sql": true,
|
||||||
|
"patch-notification-config.sql": true,
|
||||||
|
"patch-fix-kafka-producer-booleans.sql": true,
|
||||||
|
"patch-timeout.sql": true,
|
||||||
|
"patch-monitor-tls-info-add-fk.sql": true,
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -95,20 +95,15 @@ class Database {
|
|||||||
|
|
||||||
static noReject = true;
|
static noReject = true;
|
||||||
|
|
||||||
static dbConfig = {};
|
|
||||||
|
|
||||||
static knexMigrationsPath = "./db/knex_migrations";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the data directory
|
* Initialize the database
|
||||||
* @param {object} args Arguments to initialize DB with
|
* @param {Object} args Arguments to initialize DB with
|
||||||
* @returns {void}
|
|
||||||
*/
|
*/
|
||||||
static initDataDir(args) {
|
static init(args) {
|
||||||
// Data Directory (must be end with "/")
|
// Data Directory (must be end with "/")
|
||||||
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
|
||||||
|
|
||||||
Database.sqlitePath = path.join(Database.dataDir, "kuma.db");
|
Database.path = path.join(Database.dataDir, "kuma.db");
|
||||||
if (! fs.existsSync(Database.dataDir)) {
|
if (! fs.existsSync(Database.dataDir)) {
|
||||||
fs.mkdirSync(Database.dataDir, { recursive: true });
|
fs.mkdirSync(Database.dataDir, { recursive: true });
|
||||||
}
|
}
|
||||||
@@ -133,134 +128,36 @@ class Database {
|
|||||||
log.info("db", `Data Dir: ${Database.dataDir}`);
|
log.info("db", `Data Dir: ${Database.dataDir}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
static readDBConfig() {
|
|
||||||
let dbConfig;
|
|
||||||
|
|
||||||
let dbConfigString = fs.readFileSync(path.join(Database.dataDir, "db-config.json")).toString("utf-8");
|
|
||||||
dbConfig = JSON.parse(dbConfigString);
|
|
||||||
|
|
||||||
if (typeof dbConfig !== "object") {
|
|
||||||
throw new Error("Invalid db-config.json, it must be an object");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof dbConfig.type !== "string") {
|
|
||||||
throw new Error("Invalid db-config.json, type must be a string");
|
|
||||||
}
|
|
||||||
return dbConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param dbConfig
|
|
||||||
*/
|
|
||||||
static writeDBConfig(dbConfig) {
|
|
||||||
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to the database
|
* Connect to the database
|
||||||
* @param {boolean} testMode Should the connection be
|
* @param {boolean} [testMode=false] Should the connection be
|
||||||
* started in test mode?
|
* started in test mode?
|
||||||
* @param {boolean} autoloadModels Should models be
|
* @param {boolean} [autoloadModels=true] Should models be
|
||||||
* automatically loaded?
|
* automatically loaded?
|
||||||
* @param {boolean} noLog Should logs not be output?
|
* @param {boolean} [noLog=false] Should logs not be output?
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async connect(testMode = false, autoloadModels = true, noLog = false) {
|
static async connect(testMode = false, autoloadModels = true, noLog = false) {
|
||||||
const acquireConnectionTimeout = 120 * 1000;
|
const acquireConnectionTimeout = 120 * 1000;
|
||||||
let dbConfig;
|
|
||||||
try {
|
|
||||||
dbConfig = this.readDBConfig();
|
|
||||||
Database.dbConfig = dbConfig;
|
|
||||||
} catch (err) {
|
|
||||||
log.warn("db", err.message);
|
|
||||||
dbConfig = {
|
|
||||||
type: "sqlite",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let config = {};
|
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
||||||
|
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
||||||
|
|
||||||
log.info("db", `Database Type: ${dbConfig.type}`);
|
const knexInstance = knex({
|
||||||
|
client: Dialect,
|
||||||
if (dbConfig.type === "sqlite") {
|
connection: {
|
||||||
|
filename: Database.path,
|
||||||
if (! fs.existsSync(Database.sqlitePath)) {
|
acquireConnectionTimeout: acquireConnectionTimeout,
|
||||||
log.info("server", "Copying Database");
|
},
|
||||||
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
|
useNullAsDefault: true,
|
||||||
|
pool: {
|
||||||
|
min: 1,
|
||||||
|
max: 1,
|
||||||
|
idleTimeoutMillis: 120 * 1000,
|
||||||
|
propagateCreateError: false,
|
||||||
|
acquireTimeoutMillis: acquireConnectionTimeout,
|
||||||
}
|
}
|
||||||
|
});
|
||||||
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
|
|
||||||
Dialect.prototype._driver = () => require("@louislam/sqlite3");
|
|
||||||
|
|
||||||
config = {
|
|
||||||
client: Dialect,
|
|
||||||
connection: {
|
|
||||||
filename: Database.sqlitePath,
|
|
||||||
acquireConnectionTimeout: acquireConnectionTimeout,
|
|
||||||
},
|
|
||||||
useNullAsDefault: true,
|
|
||||||
pool: {
|
|
||||||
min: 1,
|
|
||||||
max: 1,
|
|
||||||
idleTimeoutMillis: 120 * 1000,
|
|
||||||
propagateCreateError: false,
|
|
||||||
acquireTimeoutMillis: acquireConnectionTimeout,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else if (dbConfig.type === "mariadb") {
|
|
||||||
if (!/^\w+$/.test(dbConfig.dbName)) {
|
|
||||||
throw Error("Invalid database name. A database name can only consist of letters, numbers and underscores");
|
|
||||||
}
|
|
||||||
|
|
||||||
const connection = await mysql.createConnection({
|
|
||||||
host: dbConfig.hostname,
|
|
||||||
port: dbConfig.port,
|
|
||||||
user: dbConfig.username,
|
|
||||||
password: dbConfig.password,
|
|
||||||
});
|
|
||||||
|
|
||||||
await connection.execute("CREATE DATABASE IF NOT EXISTS " + dbConfig.dbName + " CHARACTER SET utf8mb4");
|
|
||||||
connection.end();
|
|
||||||
|
|
||||||
config = {
|
|
||||||
client: "mysql2",
|
|
||||||
connection: {
|
|
||||||
host: dbConfig.hostname,
|
|
||||||
port: dbConfig.port,
|
|
||||||
user: dbConfig.username,
|
|
||||||
password: dbConfig.password,
|
|
||||||
database: dbConfig.dbName,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else if (dbConfig.type === "embedded-mariadb") {
|
|
||||||
let embeddedMariaDB = EmbeddedMariaDB.getInstance();
|
|
||||||
await embeddedMariaDB.start();
|
|
||||||
log.info("mariadb", "Embedded MariaDB started");
|
|
||||||
config = {
|
|
||||||
client: "mysql2",
|
|
||||||
connection: {
|
|
||||||
socketPath: embeddedMariaDB.socketPath,
|
|
||||||
user: "node",
|
|
||||||
database: "kuma",
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
throw new Error("Unknown Database type: " + dbConfig.type);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set to utf8mb4 for MariaDB
|
|
||||||
if (dbConfig.type.endsWith("mariadb")) {
|
|
||||||
config.pool = {
|
|
||||||
afterCreate(conn, done) {
|
|
||||||
conn.query("SET CHARACTER SET utf8mb4;", (err) => done(err, conn));
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const knexInstance = knex(config);
|
|
||||||
|
|
||||||
R.setup(knexInstance);
|
R.setup(knexInstance);
|
||||||
|
|
||||||
@@ -275,18 +172,6 @@ class Database {
|
|||||||
await R.autoloadModels("./server/model");
|
await R.autoloadModels("./server/model");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dbConfig.type === "sqlite") {
|
|
||||||
await this.initSQLite(testMode, noLog);
|
|
||||||
} else if (dbConfig.type.endsWith("mariadb")) {
|
|
||||||
await this.initMariaDB();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param testMode
|
|
||||||
* @param noLog
|
|
||||||
*/
|
|
||||||
static async initSQLite(testMode, noLog) {
|
|
||||||
await R.exec("PRAGMA foreign_keys = ON");
|
await R.exec("PRAGMA foreign_keys = ON");
|
||||||
if (testMode) {
|
if (testMode) {
|
||||||
// Change to MEMORY
|
// Change to MEMORY
|
||||||
@@ -311,56 +196,8 @@ class Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Patch the database */
|
||||||
*
|
|
||||||
*/
|
|
||||||
static async initMariaDB() {
|
|
||||||
log.debug("db", "Checking if MariaDB database exists...");
|
|
||||||
|
|
||||||
let hasTable = await R.hasTable("docker_host");
|
|
||||||
if (!hasTable) {
|
|
||||||
const { createTables } = require("../db/knex_init_db");
|
|
||||||
await createTables();
|
|
||||||
} else {
|
|
||||||
log.debug("db", "MariaDB database already exists");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Patch the database
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
static async patch() {
|
static async patch() {
|
||||||
// Still need to keep this for old versions of Uptime Kuma
|
|
||||||
if (Database.dbConfig.type === "sqlite") {
|
|
||||||
await this.patchSqlite();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Using knex migrations
|
|
||||||
// https://knexjs.org/guide/migrations.html
|
|
||||||
// https://gist.github.com/NigelEarle/70db130cc040cc2868555b29a0278261
|
|
||||||
try {
|
|
||||||
await R.knex.migrate.latest({
|
|
||||||
directory: Database.knexMigrationsPath,
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
log.error("db", "Database migration failed");
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
static async rollbackLatestPatch() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Patch the database for SQLite
|
|
||||||
* @deprecated
|
|
||||||
*/
|
|
||||||
static async patchSqlite() {
|
|
||||||
let version = parseInt(await setting("database_version"));
|
let version = parseInt(await setting("database_version"));
|
||||||
|
|
||||||
if (! version) {
|
if (! version) {
|
||||||
@@ -380,7 +217,7 @@ class Database {
|
|||||||
// Try catch anything here
|
// Try catch anything here
|
||||||
try {
|
try {
|
||||||
for (let i = version + 1; i <= this.latestVersion; i++) {
|
for (let i = version + 1; i <= this.latestVersion; i++) {
|
||||||
const sqlFile = `./db/old_migrations/patch${i}.sql`;
|
const sqlFile = `./db/patch${i}.sql`;
|
||||||
log.info("db", `Patching ${sqlFile}`);
|
log.info("db", `Patching ${sqlFile}`);
|
||||||
await Database.importSQLFile(sqlFile);
|
await Database.importSQLFile(sqlFile);
|
||||||
log.info("db", `Patched ${sqlFile}`);
|
log.info("db", `Patched ${sqlFile}`);
|
||||||
@@ -397,18 +234,17 @@ class Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.patchSqlite2();
|
await this.patch2();
|
||||||
await this.migrateNewStatusPage();
|
await this.migrateNewStatusPage();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Patch DB using new process
|
* Patch DB using new process
|
||||||
* Call it from patch() only
|
* Call it from patch() only
|
||||||
* @deprecated
|
|
||||||
* @private
|
* @private
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async patchSqlite2() {
|
static async patch2() {
|
||||||
log.info("db", "Database Patch 2.0 Process");
|
log.info("db", "Database Patch 2.0 Process");
|
||||||
let databasePatchedFiles = await setting("databasePatchedFiles");
|
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||||
|
|
||||||
@@ -442,7 +278,6 @@ class Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SQlite only
|
|
||||||
* Migrate status page value in setting to "status_page" table
|
* Migrate status page value in setting to "status_page" table
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
@@ -514,8 +349,8 @@ class Database {
|
|||||||
* Patch database using new patching process
|
* Patch database using new patching process
|
||||||
* Used it patch2() only
|
* Used it patch2() only
|
||||||
* @private
|
* @private
|
||||||
* @param {string} sqlFilename Name of SQL file to load
|
* @param sqlFilename
|
||||||
* @param {object} databasePatchedFiles Patch status of database files
|
* @param databasePatchedFiles
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
||||||
@@ -539,7 +374,7 @@ class Database {
|
|||||||
|
|
||||||
log.info("db", sqlFilename + " is patching");
|
log.info("db", sqlFilename + " is patching");
|
||||||
this.patched = true;
|
this.patched = true;
|
||||||
await this.importSQLFile("./db/old_migrations/" + sqlFilename);
|
await this.importSQLFile("./db/" + sqlFilename);
|
||||||
databasePatchedFiles[sqlFilename] = true;
|
databasePatchedFiles[sqlFilename] = true;
|
||||||
log.info("db", sqlFilename + " was patched successfully");
|
log.info("db", sqlFilename + " was patched successfully");
|
||||||
|
|
||||||
@@ -550,7 +385,7 @@ class Database {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Load an SQL file and execute it
|
* Load an SQL file and execute it
|
||||||
* @param {string} filename Filename of SQL file to import
|
* @param filename Filename of SQL file to import
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async importSQLFile(filename) {
|
static async importSQLFile(filename) {
|
||||||
@@ -584,7 +419,7 @@ class Database {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Aquire a direct connection to database
|
* Aquire a direct connection to database
|
||||||
* @returns {any} Database connection
|
* @returns {any}
|
||||||
*/
|
*/
|
||||||
static getBetterSQLite3Database() {
|
static getBetterSQLite3Database() {
|
||||||
return R.knex.client.acquireConnection();
|
return R.knex.client.acquireConnection();
|
||||||
@@ -621,13 +456,10 @@ class Database {
|
|||||||
process.removeListener("unhandledRejection", listener);
|
process.removeListener("unhandledRejection", listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Get the size of the database */
|
||||||
* Get the size of the database
|
|
||||||
* @returns {number} Size of database
|
|
||||||
*/
|
|
||||||
static getSize() {
|
static getSize() {
|
||||||
log.debug("db", "Database.getSize()");
|
log.debug("db", "Database.getSize()");
|
||||||
let stats = fs.statSync(Database.sqlitePath);
|
let stats = fs.statSync(Database.path);
|
||||||
log.debug("db", stats);
|
log.debug("db", stats);
|
||||||
return stats.size;
|
return stats.size;
|
||||||
}
|
}
|
||||||
@@ -639,18 +471,6 @@ class Database {
|
|||||||
static async shrink() {
|
static async shrink() {
|
||||||
await R.exec("VACUUM");
|
await R.exec("VACUUM");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
static sqlHourOffset() {
|
|
||||||
if (this.dbConfig.client === "sqlite3") {
|
|
||||||
return "DATETIME('now', ? || ' hours')";
|
|
||||||
} else {
|
|
||||||
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Database;
|
module.exports = Database;
|
||||||
|
@@ -14,10 +14,10 @@ class DockerHost {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Save a docker host
|
* Save a docker host
|
||||||
* @param {object} dockerHost Docker host to save
|
* @param {Object} dockerHost Docker host to save
|
||||||
* @param {?number} dockerHostID ID of the docker host to update
|
* @param {?number} dockerHostID ID of the docker host to update
|
||||||
* @param {number} userID ID of the user who adds the docker host
|
* @param {number} userID ID of the user who adds the docker host
|
||||||
* @returns {Promise<Bean>} Updated docker host
|
* @returns {Promise<Bean>}
|
||||||
*/
|
*/
|
||||||
static async save(dockerHost, dockerHostID, userID) {
|
static async save(dockerHost, dockerHostID, userID) {
|
||||||
let bean;
|
let bean;
|
||||||
@@ -64,7 +64,7 @@ class DockerHost {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the amount of containers on the Docker host
|
* Fetches the amount of containers on the Docker host
|
||||||
* @param {object} dockerHost Docker host to check for
|
* @param {Object} dockerHost Docker host to check for
|
||||||
* @returns {number} Total amount of containers on the host
|
* @returns {number} Total amount of containers on the host
|
||||||
*/
|
*/
|
||||||
static async testDockerHost(dockerHost) {
|
static async testDockerHost(dockerHost) {
|
||||||
@@ -80,8 +80,8 @@ class DockerHost {
|
|||||||
options.socketPath = dockerHost.dockerDaemon;
|
options.socketPath = dockerHost.dockerDaemon;
|
||||||
} else if (dockerHost.dockerType === "tcp") {
|
} else if (dockerHost.dockerType === "tcp") {
|
||||||
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
|
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
|
||||||
|
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
||||||
}
|
}
|
||||||
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
|
|
||||||
|
|
||||||
let res = await axios.request(options);
|
let res = await axios.request(options);
|
||||||
|
|
||||||
@@ -108,8 +108,6 @@ class DockerHost {
|
|||||||
/**
|
/**
|
||||||
* Since axios 0.27.X, it does not accept `tcp://` protocol.
|
* Since axios 0.27.X, it does not accept `tcp://` protocol.
|
||||||
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
|
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
|
||||||
* @param {any} url URL to fix
|
|
||||||
* @returns {any} URL with tcp:// replaced by http://
|
|
||||||
*/
|
*/
|
||||||
static patchDockerURL(url) {
|
static patchDockerURL(url) {
|
||||||
if (typeof url === "string") {
|
if (typeof url === "string") {
|
||||||
@@ -131,10 +129,11 @@ class DockerHost {
|
|||||||
* 'data/docker-tls/example.com/' would be searched for certificate files),
|
* 'data/docker-tls/example.com/' would be searched for certificate files),
|
||||||
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
|
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
|
||||||
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
|
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
|
||||||
* @param {string} dockerType i.e. "tcp" or "socket"
|
*
|
||||||
* @param {string} url The docker host URL rewritten to https://
|
* @param {String} dockerType i.e. "tcp" or "socket"
|
||||||
* @returns {object} HTTP agent options
|
* @param {String} url The docker host URL rewritten to https://
|
||||||
*/
|
* @return {Object}
|
||||||
|
* */
|
||||||
static getHttpsAgentOptions(dockerType, url) {
|
static getHttpsAgentOptions(dockerType, url) {
|
||||||
let baseOptions = {
|
let baseOptions = {
|
||||||
maxCachedSessions: 0,
|
maxCachedSessions: 0,
|
||||||
|
@@ -1,168 +0,0 @@
|
|||||||
const { log } = require("../src/util");
|
|
||||||
const childProcess = require("child_process");
|
|
||||||
const fs = require("fs");
|
|
||||||
const mysql = require("mysql2");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* It is only used inside the docker container
|
|
||||||
*/
|
|
||||||
class EmbeddedMariaDB {
|
|
||||||
|
|
||||||
static instance = null;
|
|
||||||
|
|
||||||
exec = "mariadbd";
|
|
||||||
|
|
||||||
mariadbDataDir = "/app/data/mariadb";
|
|
||||||
|
|
||||||
runDir = "/app/data/run/mariadb";
|
|
||||||
|
|
||||||
socketPath = this.runDir + "/mysqld.sock";
|
|
||||||
|
|
||||||
childProcess = null;
|
|
||||||
running = false;
|
|
||||||
|
|
||||||
started = false;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {EmbeddedMariaDB}
|
|
||||||
*/
|
|
||||||
static getInstance() {
|
|
||||||
if (!EmbeddedMariaDB.instance) {
|
|
||||||
EmbeddedMariaDB.instance = new EmbeddedMariaDB();
|
|
||||||
}
|
|
||||||
return EmbeddedMariaDB.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
static hasInstance() {
|
|
||||||
return !!EmbeddedMariaDB.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
start() {
|
|
||||||
if (this.childProcess) {
|
|
||||||
log.info("mariadb", "Already started");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.initDB();
|
|
||||||
|
|
||||||
this.running = true;
|
|
||||||
log.info("mariadb", "Starting Embedded MariaDB");
|
|
||||||
this.childProcess = childProcess.spawn(this.exec, [
|
|
||||||
"--user=node",
|
|
||||||
"--datadir=" + this.mariadbDataDir,
|
|
||||||
`--socket=${this.socketPath}`,
|
|
||||||
`--pid-file=${this.runDir}/mysqld.pid`,
|
|
||||||
]);
|
|
||||||
|
|
||||||
this.childProcess.on("close", (code) => {
|
|
||||||
this.running = false;
|
|
||||||
this.childProcess = null;
|
|
||||||
this.started = false;
|
|
||||||
log.info("mariadb", "Stopped Embedded MariaDB: " + code);
|
|
||||||
|
|
||||||
if (code !== 0) {
|
|
||||||
log.info("mariadb", "Try to restart Embedded MariaDB as it is not stopped by user");
|
|
||||||
this.start();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.childProcess.on("error", (err) => {
|
|
||||||
if (err.code === "ENOENT") {
|
|
||||||
log.error("mariadb", `Embedded MariaDB: ${this.exec} is not found`);
|
|
||||||
} else {
|
|
||||||
log.error("mariadb", err);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let handler = (data) => {
|
|
||||||
log.debug("mariadb", data.toString("utf-8"));
|
|
||||||
if (data.toString("utf-8").includes("ready for connections")) {
|
|
||||||
this.initDBAfterStarted();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
this.childProcess.stdout.on("data", handler);
|
|
||||||
this.childProcess.stderr.on("data", handler);
|
|
||||||
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
let interval = setInterval(() => {
|
|
||||||
if (this.started) {
|
|
||||||
clearInterval(interval);
|
|
||||||
resolve();
|
|
||||||
} else {
|
|
||||||
log.info("mariadb", "Waiting for Embedded MariaDB to start...");
|
|
||||||
}
|
|
||||||
}, 1000);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
stop() {
|
|
||||||
if (this.childProcess) {
|
|
||||||
this.childProcess.kill("SIGINT");
|
|
||||||
this.childProcess = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
initDB() {
|
|
||||||
if (!fs.existsSync(this.mariadbDataDir)) {
|
|
||||||
log.info("mariadb", `Embedded MariaDB: ${this.mariadbDataDir} is not found, create one now.`);
|
|
||||||
fs.mkdirSync(this.mariadbDataDir, {
|
|
||||||
recursive: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
let result = childProcess.spawnSync("mysql_install_db", [
|
|
||||||
"--user=node",
|
|
||||||
"--ldata=" + this.mariadbDataDir,
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (result.status !== 0) {
|
|
||||||
let error = result.stderr.toString("utf-8");
|
|
||||||
log.error("mariadb", error);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
log.info("mariadb", "Embedded MariaDB: mysql_install_db done:" + result.stdout.toString("utf-8"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!fs.existsSync(this.runDir)) {
|
|
||||||
log.info("mariadb", `Embedded MariaDB: ${this.runDir} is not found, create one now.`);
|
|
||||||
fs.mkdirSync(this.runDir, {
|
|
||||||
recursive: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
async initDBAfterStarted() {
|
|
||||||
const connection = mysql.createConnection({
|
|
||||||
socketPath: this.socketPath,
|
|
||||||
user: "node",
|
|
||||||
});
|
|
||||||
|
|
||||||
let result = await connection.execute("CREATE DATABASE IF NOT EXISTS `kuma`");
|
|
||||||
log.debug("mariadb", "CREATE DATABASE: " + JSON.stringify(result));
|
|
||||||
|
|
||||||
log.info("mariadb", "Embedded MariaDB is ready for connections");
|
|
||||||
this.started = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
EmbeddedMariaDB,
|
|
||||||
};
|
|
@@ -1,21 +1,25 @@
|
|||||||
const jsesc = require("jsesc");
|
const jsesc = require("jsesc");
|
||||||
|
const { escape } = require("html-escaper");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
|
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
|
||||||
* into a webpage.
|
* into a webpage.
|
||||||
* @param {string} tagId Google UA/G/AW/DC Property ID to use with the Google Analytics script.
|
* @param tagId Google UA/G/AW/DC Property ID to use with the Google Analytics script.
|
||||||
* @returns {string} HTML script tags to inject into page
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
function getGoogleAnalyticsScript(tagId) {
|
function getGoogleAnalyticsScript(tagId) {
|
||||||
let escapedTagId = jsesc(tagId, { isScriptContext: true });
|
let escapedTagIdJS = jsesc(tagId, { isScriptContext: true });
|
||||||
|
|
||||||
if (escapedTagId) {
|
if (escapedTagIdJS) {
|
||||||
escapedTagId = escapedTagId.trim();
|
escapedTagIdJS = escapedTagIdJS.trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Escape the tag ID for use in an HTML attribute.
|
||||||
|
let escapedTagIdHTMLAttribute = escape(tagId);
|
||||||
|
|
||||||
return `
|
return `
|
||||||
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagId}"></script>
|
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagIdHTMLAttribute}"></script>
|
||||||
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagId}'); </script>
|
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagIdJS}'); </script>
|
||||||
`;
|
`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user