Compare commits

..

43 Commits

Author SHA1 Message Date
Louis Lam
df2f76dbe3 WIP 2023-08-18 02:16:38 +08:00
Louis Lam
31c00081fa Merge branch 'master' into 2.0.X 2023-08-16 16:14:04 +08:00
Louis Lam
db3a7d69fe Change some jsdoc rule to warn instead of error 2023-08-11 22:29:45 +08:00
Louis Lam
d33b4f46e4 Disable e2e test temporarily and update some docs 2023-08-11 22:17:31 +08:00
Louis Lam
dd62bd3d91 Move patch files 2023-08-11 22:08:45 +08:00
Matthew Nickson
8a92054c2b Added JSDoc to ESLint (#3529)
* Added JSDoc to eslint rules

Signed-off-by: Matthew Nickson <mnickson@sidingsmedia.com>

* Fixed JSDoc eslint errors

Signed-off-by: Matthew Nickson <mnickson@sidingsmedia.com>

* Update the check-linters workflow to Node.js 20

---------

Signed-off-by: Matthew Nickson <mnickson@sidingsmedia.com>
Co-authored-by: Louis Lam <louislam@users.noreply.github.com>
2023-08-11 15:46:41 +08:00
Louis Lam
da4f4e3d76 Update migrations 2023-08-09 21:07:23 +08:00
Louis Lam
e001fd7d1c incrementalVacuum for sqlite only 2023-08-09 21:05:15 +08:00
Louis Lam
83307b3446 Update dependencies 2023-08-09 20:13:22 +08:00
Louis Lam
3e0f5f4231 Merge branch 'master' into 2.0.X
# Conflicts:
#	package-lock.json
#	server/database.js
#	server/util-server.js
2023-08-09 20:09:56 +08:00
Louis Lam
0ab3507faf Update to nodejs 20 bookworm and use apt to install apprise 2023-07-30 23:47:07 +08:00
Louis Lam
a0bd4b248b Merge remote-tracking branch 'origin/master' into 2.0.X
# Conflicts:
#	docker/debian-base.dockerfile
#	package-lock.json
#	package.json
#	server/database.js
#	src/router.js
2023-07-30 19:15:09 +08:00
Louis Lam
ccc39b9516 Move all old db patch files to db/old_migrations/ 2023-07-01 20:54:47 +08:00
Louis Lam
e26abc3156 Improve the setup database 2023-07-01 02:48:42 +08:00
Louis Lam
d286c534bd Improve the setup database for embedded MariaDB 2023-06-30 22:17:07 +08:00
Louis Lam
7975caf29e Update db migration and dockerfile 2023-06-30 17:26:37 +08:00
Louis Lam
16a1a66e09 Merge remote-tracking branch 'origin/master' into 2.0.X
# Conflicts:
#	docker/alpine-base.dockerfile
#	docker/debian-base.dockerfile
#	docker/dockerfile
#	package.json
#	server/database.js
#	server/jobs/util-worker.js
#	server/model/maintenance.js
#	server/model/monitor.js
#	server/routers/api-router.js
#	server/server.js
#	server/uptime-kuma-server.js
2023-06-30 13:38:56 +08:00
Louis Lam
2e2747fb52 Handling DATE_ADD 2023-06-27 20:57:34 +08:00
Louis Lam
5388a37a26 Fix port NaN not working in MariaDB 2023-06-27 20:57:34 +08:00
Louis Lam
b2a1bd5214 WIP 2023-06-27 20:57:34 +08:00
Louis Lam
a2d147b88e Merge pull request #3017 from chakflying/v2/fix/external-database-setup
[2.0] Fix: Add external mariaDB setup
2023-06-27 20:55:21 +08:00
Nelson Chan
f70b971810 Fix: Improve error message
Co-authored-by: Frank Elsinga <frank@elsinga.de>
2023-05-21 15:42:13 +08:00
Nelson Chan
38fab198bb Fix: Fix user count check 2023-04-03 19:36:07 +08:00
Nelson Chan
8d5679a8ab Fix: Create database before connect 2023-04-03 19:35:31 +08:00
Louis Lam
f2633a5d01 Finished knex_init_db.js 2023-02-12 03:44:15 +08:00
Louis Lam
4056951915 WIP: building database in knex.js 2023-02-11 22:21:06 +08:00
Louis Lam
e4183ee2b7 Database Setup Page (#2738)
* WIP

* WIP: Database setup process

* Add database setup page
2023-02-11 14:41:02 +08:00
Louis Lam
db4663d6be Merge remote-tracking branch 'origin/master' into 2.0.X 2023-02-11 00:51:40 +08:00
Louis Lam
68ead3414d WIP 2023-02-06 22:26:13 +08:00
Louis Lam
e06c3ee5d4 Merge branch 'mariadb' into 2.0.X
# Conflicts:
#	package.json
2023-02-05 18:31:42 +08:00
Louis Lam
d4752b65de WIP 2023-02-05 18:01:54 +08:00
Louis Lam
dc4d2a77bb WIP 2023-02-05 17:45:36 +08:00
Louis Lam
33d9c1bbb1 Merge branch 'true-rootless' into mariadb
# Conflicts:
#	docker/dockerfile
2023-02-04 18:40:39 +08:00
Louis Lam
27eddb7253 Update dockerfile 2023-02-04 18:37:12 +08:00
Louis Lam
0b40c65139 Merge remote-tracking branch 'origin/master' into mariadb
# Conflicts:
#	docker/alpine-base.dockerfile
#	docker/dockerfile-alpine
#	package.json
#	server/database.js
2023-02-04 18:21:34 +08:00
Louis Lam
a4de93f976 WIP 2022-12-23 22:43:56 +08:00
Louis Lam
5e976afb27 Merge remote-tracking branch 'origin/master' into mariadb
# Conflicts:
#	docker/alpine-base.dockerfile
#	docker/dockerfile
#	package-lock.json
2022-12-18 21:03:06 +08:00
Louis Lam
00b52f23cf Merge remote-tracking branch 'origin/2.0.X' into 2.0.X 2022-10-04 21:21:39 +08:00
Louis Lam
ba1f7762b1 Merge pull request #2085 from louislam/drop-alpine
[2.0.X] Drop support for Alpine docker image
2022-09-25 21:06:35 +08:00
Louis Lam
73f7fbabd3 True rootless image 2022-09-14 18:05:02 +08:00
Louis Lam
0039f1f521 Drop support for Alpine docker image 2022-09-14 17:36:55 +08:00
Louis Lam
b0d39b44ce Testing 2022-07-22 23:15:55 +08:00
Louis Lam
301b2007a0 Drop Alpine support 2022-07-19 20:53:19 +08:00
266 changed files with 6131 additions and 5103 deletions

View File

@@ -1,6 +1,6 @@
/.idea
/node_modules
/data
/data*
/cypress
/out
/test
@@ -34,12 +34,7 @@ tsconfig.json
/ecosystem.config.js
/extra/healthcheck.exe
/extra/healthcheck
/extra/exe-builder
/extra/push-examples
/extra/uptime-kuma-push
# Comment the following line if you want to rebuild the healthcheck binary
/extra/healthcheck-armv7
extra/exe-builder
### .gitignore content (commented rules are duplicated)

View File

@@ -14,6 +14,7 @@ module.exports = {
extends: [
"eslint:recommended",
"plugin:vue/vue3-recommended",
"plugin:jsdoc/recommended-error",
],
parser: "vue-eslint-parser",
parserOptions: {
@@ -21,6 +22,9 @@ module.exports = {
sourceType: "module",
requireConfigFile: false,
},
plugins: [
"jsdoc"
],
rules: {
"yoda": "error",
eqeqeq: [ "warn", "smart" ],
@@ -78,7 +82,7 @@ module.exports = {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//"no-console": "warn",
//'no-console': 'warn',
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": [ "warn", {
"max": 1,
@@ -90,15 +94,50 @@ module.exports = {
"no-unneeded-ternary": "error",
"array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ],
//"prefer-template": "error",
"template-curly-spacing": [ "warn", "never" ],
//'prefer-template': 'error',
"comma-dangle": [ "warn", "only-multiline" ],
"no-empty": [ "error", {
"allowEmptyCatch": true
}],
"no-control-regex": "off",
"one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }]
"max-statements-per-line": [ "error", { "max": 1 }],
"jsdoc/check-tag-names": [
"error",
{
"definedTags": [ "link" ]
}
],
"jsdoc/no-undefined-types": "off",
"jsdoc/no-defaults": [
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/require-throws": "warn",
"jsdoc/require-jsdoc": [
"error",
{
"require": {
"FunctionDeclaration": true,
"MethodDefinition": true,
}
}
],
"jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns": [
"warn",
{
"forceRequireReturn": true,
"forceReturnsWithAsync": true
}
],
"jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn"
},
"overrides": [
{

View File

@@ -5,11 +5,11 @@ name: Auto Test
on:
push:
branches: [ master, 1.23.X ]
branches: [ master ]
paths-ignore:
- '*.md'
pull_request:
branches: [ master, 1.23.X ]
branches: [ master, 2.0.X ]
paths-ignore:
- '*.md'
@@ -27,13 +27,13 @@ jobs:
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
- run: npm install npm@9 -g
- run: npm install npm@latest -g
- run: npm install
- run: npm run build
- run: npm test
@@ -55,13 +55,13 @@ jobs:
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
- run: npm install npm@9 -g
- run: npm install npm@latest -g
- run: npm ci --production
check-linters:
@@ -69,39 +69,40 @@ jobs:
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v4
- name: Use Node.js 20
uses: actions/setup-node@v3
with:
node-version: 14
node-version: 20
- run: npm install
- run: npm run lint:prod
- run: npm run lint
e2e-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- name: Use Node.js 14
uses: actions/setup-node@v4
with:
node-version: 14
- run: npm install
- run: npm run build
- run: npm run cy:test
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet
# e2e-tests:
# needs: [ check-linters ]
# runs-on: ubuntu-latest
# steps:
# - run: git config --global core.autocrlf false # Mainly for Windows
# - uses: actions/checkout@v3
#
# - name: Use Node.js 14
# uses: actions/setup-node@v3
# with:
# node-version: 14
# - run: npm install
# - run: npm run build
# - run: npm run cy:test
frontend-unit-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: 14
- run: npm install

View File

@@ -14,10 +14,10 @@ jobs:
node-version: [16]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'

View File

@@ -6,7 +6,7 @@ on:
pull_request:
branches:
- master
- 1.23.X
- 2.0.X
workflow_dispatch:
permissions:
@@ -17,11 +17,11 @@ jobs:
json-yaml-validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: json-yaml-validate
id: json-yaml-validate
uses: GrantBirki/json-yaml-validate@v2.4.0
uses: GrantBirki/json-yaml-validate@v1.3.0
with:
comment: "true" # enable comment mode
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions

View File

@@ -9,7 +9,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
- uses: actions/stale@v7
with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'

1
.gitignore vendored
View File

@@ -7,6 +7,7 @@ dist-ssr
/data
!/data/.gitkeep
/data*
.vscode
/private

View File

@@ -2,13 +2,13 @@
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same package.json.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
## Key Technical Skills
- Node.js (You should know about promise, async/await and arrow function etc.)
- Node.js (You should know what are promise, async/await and arrow function etc.)
- Socket.io
- SCSS
- Vue.js
@@ -30,7 +30,7 @@ The frontend code builds into "dist" directory. The server (express.js) exposes
## Can I create a pull request for Uptime Kuma?
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
Here are some references:
@@ -46,8 +46,8 @@ Here are some references:
- New features
### ❌ Won't be merged:
- A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Do not pass the auto-test
- A dedicated pr for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Do not pass the auto test
- Any breaking changes
- Duplicated pull requests
- Buggy
@@ -61,9 +61,9 @@ The above cases may not cover all possible situations.
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
### Recommended Pull Request Guideline
@@ -83,11 +83,11 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
## Project Styles
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app.
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`
- Easy to use
- The web UI styling should be consistent and nice
@@ -130,7 +130,7 @@ Port `3000` and port `3001` will be used.
npm run dev
```
But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals:
```
npm run start-frontend-dev
npm run start-server-dev
@@ -146,13 +146,13 @@ It is mainly a socket.io app + express.js.
express.js is used for:
- entry point such as redirecting to a status page or the dashboard
- serving the frontend built files (index.html, .js and .css etc.)
- serving internal APIs of the status page
- serving internal APIs of status page
### Structure in /server/
- jobs/ (Jobs that are running in another process)
- model/ (Object model, auto-mapping to the database table name)
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- monitor_types (Monitor Types)
- notification-providers/ (individual notification logic)
@@ -163,7 +163,7 @@ express.js is used for:
## Frontend Dev Server
It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
For production, it is not used. It will be compiled to `dist` directory instead.
@@ -181,7 +181,7 @@ Uptime Kuma Frontend is a single page application (SPA). Most paths are handled
The router is in `src/router.js`
As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`.
@@ -210,7 +210,7 @@ Both frontend and backend share the same package.json. However, the frontend dep
### Update Dependencies
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
@@ -218,17 +218,17 @@ If for security / bug / other reasons, a library must be updated, breaking chang
## Translations
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are omitted, they can not be translated).
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are ommited, they can not be translated).
**Don't include any other languages in your initial Pull-Request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
**Don't include any other languages in your inital Pull-Request** (even if this is your mother tounge), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language-skills.
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not English and my grammar is not that great.
My mother language is not english and my grammar is not that great.
## Wiki

View File

@@ -26,7 +26,7 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Fancy, Reactive, Fast UI/UX
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
* 20-second intervals
* 20 second intervals
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
* Multiple status pages
* Map status pages to specific domains
@@ -70,7 +70,7 @@ npm run setup
# Option 1. Try it
node server/server.js
# (Recommended) Option 2. Run in the background using PM2
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it:
npm install pm2 -g && pm2 install pm2-logrotate
@@ -93,7 +93,7 @@ pm2 save && pm2 startup
### Windows Portable (x64)
https://github.com/louislam/uptime-kuma/releases/download/1.23.1/uptime-kuma-windows-x64-portable-1.23.1.zip
https://github.com/louislam/uptime-kuma/files/11886108/uptime-kuma-win64-portable-1.0.1.zip
### Advanced Installation
@@ -109,7 +109,7 @@ https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next?
I will assign requests/issues to the next milestone.
I will mark requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones

View File

@@ -3,19 +3,19 @@
## Reporting a Vulnerability
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
1. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
1. Please also create a empty security issues for alerting me, as GitHub Advisory do not send a notification, I probably will miss without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
Do not use the public issue tracker or discuss it in public as it will cause more damage.
Do not use the public issue tracker or discuss it in the public as it will cause more damage.
## Do you accept other 3rd-party bug bounty platforms?
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone have tried to send a phishing link to me by this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
## Supported Versions
### Uptime Kuma Versions
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
### Upgradable Docker Tags

View File

@@ -3,6 +3,7 @@ import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite";
import visualizer from "rollup-plugin-visualizer";
import viteCompression from "vite-plugin-compression";
import commonjs from "vite-plugin-commonjs";
const postCssScss = require("postcss-scss");
const postcssRTLCSS = require("postcss-rtlcss");
@@ -21,6 +22,7 @@ export default defineConfig({
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
},
plugins: [
commonjs(),
vue(),
legacy({
targets: [ "since 2015" ],

559
db/knex_init_db.js Normal file
View File

@@ -0,0 +1,559 @@
const { R } = require("redbean-node");
const { log } = require("../src/util");
/**
* ⚠️⚠️⚠️⚠️⚠️⚠️ DO NOT ADD ANYTHING HERE!
* IF YOU NEED TO ADD FIELDS, ADD IT TO ./db/knex_migrations
* See ./db/knex_migrations/README.md for more information
* @returns {Promise<void>}
*/
async function createTables() {
log.info("mariadb", "Creating basic tables for MariaDB");
const knex = R.knex;
// TODO: Should check later if it is really the final patch sql file.
// docker_host
await knex.schema.createTable("docker_host", (table) => {
table.increments("id");
table.integer("user_id").unsigned().notNullable();
table.string("docker_daemon", 255);
table.string("docker_type", 255);
table.string("name", 255);
});
// group
await knex.schema.createTable("group", (table) => {
table.increments("id");
table.string("name", 255).notNullable();
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.boolean("public").notNullable().defaultTo(false);
table.boolean("active").notNullable().defaultTo(true);
table.integer("weight").notNullable().defaultTo(1000);
table.integer("status_page_id").unsigned();
});
// proxy
await knex.schema.createTable("proxy", (table) => {
table.increments("id");
table.integer("user_id").unsigned().notNullable();
table.string("protocol", 10).notNullable();
table.string("host", 255).notNullable();
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
table.boolean("auth").notNullable();
table.string("username", 255).nullable();
table.string("password", 255).nullable();
table.boolean("active").notNullable().defaultTo(true);
table.boolean("default").notNullable().defaultTo(false);
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.index("user_id", "proxy_user_id");
});
// user
await knex.schema.createTable("user", (table) => {
table.increments("id");
table.string("username", 255).notNullable().unique().collate("utf8_general_ci");
table.string("password", 255);
table.boolean("active").notNullable().defaultTo(true);
table.string("timezone", 150);
table.string("twofa_secret", 64);
table.boolean("twofa_status").notNullable().defaultTo(false);
table.string("twofa_last_token", 6);
});
// monitor
await knex.schema.createTable("monitor", (table) => {
table.increments("id");
table.string("name", 150);
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("interval").notNullable().defaultTo(20);
table.text("url");
table.string("type", 20);
table.integer("weight").defaultTo(2000);
table.string("hostname", 255);
table.integer("port");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.string("keyword", 255);
table.integer("maxretries").notNullable().defaultTo(0);
table.boolean("ignore_tls").notNullable().defaultTo(false);
table.boolean("upside_down").notNullable().defaultTo(false);
table.integer("maxredirects").notNullable().defaultTo(10);
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
table.string("dns_resolve_type", 5);
table.string("dns_resolve_server", 255);
table.string("dns_last_result", 255);
table.integer("retry_interval").notNullable().defaultTo(0);
table.string("push_token", 20).defaultTo(null);
table.text("method").notNullable().defaultTo("GET");
table.text("body").defaultTo(null);
table.text("headers").defaultTo(null);
table.text("basic_auth_user").defaultTo(null);
table.text("basic_auth_pass").defaultTo(null);
table.integer("docker_host").unsigned()
.references("id").inTable("docker_host");
table.string("docker_container", 255);
table.integer("proxy_id").unsigned()
.references("id").inTable("proxy");
table.boolean("expiry_notification").defaultTo(true);
table.text("mqtt_topic");
table.string("mqtt_success_message", 255);
table.string("mqtt_username", 255);
table.string("mqtt_password", 255);
table.string("database_connection_string", 2000);
table.text("database_query");
table.string("auth_method", 250);
table.text("auth_domain");
table.text("auth_workstation");
table.string("grpc_url", 255).defaultTo(null);
table.text("grpc_protobuf").defaultTo(null);
table.text("grpc_body").defaultTo(null);
table.text("grpc_metadata").defaultTo(null);
table.text("grpc_method").defaultTo(null);
table.text("grpc_service_name").defaultTo(null);
table.boolean("grpc_enable_tls").notNullable().defaultTo(false);
table.string("radius_username", 255);
table.string("radius_password", 255);
table.string("radius_calling_station_id", 50);
table.string("radius_called_station_id", 50);
table.string("radius_secret", 255);
table.integer("resend_interval").notNullable().defaultTo(0);
table.integer("packet_size").notNullable().defaultTo(56);
table.string("game", 255);
});
// heartbeat
await knex.schema.createTable("heartbeat", (table) => {
table.increments("id");
table.boolean("important").notNullable().defaultTo(false);
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.smallint("status").notNullable();
table.text("msg");
table.datetime("time").notNullable();
table.integer("ping");
table.integer("duration").notNullable().defaultTo(0);
table.integer("down_count").notNullable().defaultTo(0);
table.index("important");
table.index([ "monitor_id", "time" ], "monitor_time_index");
table.index("monitor_id");
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
});
// incident
await knex.schema.createTable("incident", (table) => {
table.increments("id");
table.string("title", 255).notNullable();
table.text("content", 255).notNullable();
table.string("style", 30).notNullable().defaultTo("warning");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.datetime("last_updated_date");
table.boolean("pin").notNullable().defaultTo(true);
table.boolean("active").notNullable().defaultTo(true);
table.integer("status_page_id").unsigned();
});
// maintenance
await knex.schema.createTable("maintenance", (table) => {
table.increments("id");
table.string("title", 150).notNullable();
table.text("description").notNullable();
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.boolean("active").notNullable().defaultTo(true);
table.string("strategy", 50).notNullable().defaultTo("single");
table.datetime("start_date");
table.datetime("end_date");
table.time("start_time");
table.time("end_time");
table.string("weekdays", 250).defaultTo("[]");
table.text("days_of_month").defaultTo("[]");
table.integer("interval_day");
table.index("active");
table.index([ "strategy", "active" ], "manual_active");
table.index("user_id", "maintenance_user_id");
});
// status_page
await knex.schema.createTable("status_page", (table) => {
table.increments("id");
table.string("slug", 255).notNullable().unique().collate("utf8_general_ci");
table.string("title", 255).notNullable();
table.text("description");
table.string("icon", 255).notNullable();
table.string("theme", 30).notNullable();
table.boolean("published").notNullable().defaultTo(true);
table.boolean("search_engine_index").notNullable().defaultTo(true);
table.boolean("show_tags").notNullable().defaultTo(false);
table.string("password");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.datetime("modified_date").notNullable().defaultTo(knex.fn.now());
table.text("footer_text");
table.text("custom_css");
table.boolean("show_powered_by").notNullable().defaultTo(true);
table.string("google_analytics_tag_id");
});
// maintenance_status_page
await knex.schema.createTable("maintenance_status_page", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned().notNullable()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
});
// maintenance_timeslot
await knex.schema.createTable("maintenance_timeslot", (table) => {
table.increments("id");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.datetime("start_date").notNullable();
table.datetime("end_date");
table.boolean("generated_next").defaultTo(false);
table.index("maintenance_id");
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
table.index("generated_next", "generated_next_index");
});
// monitor_group
await knex.schema.createTable("monitor_group", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("group_id").unsigned().notNullable()
.references("id").inTable("group")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("weight").notNullable().defaultTo(1000);
table.boolean("send_url").notNullable().defaultTo(false);
table.index([ "monitor_id", "group_id" ], "fk");
});
// monitor_maintenance
await knex.schema.createTable("monitor_maintenance", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index("maintenance_id", "maintenance_id_index2");
table.index("monitor_id", "monitor_id_index");
});
// notification
await knex.schema.createTable("notification", (table) => {
table.increments("id");
table.string("name", 255);
table.string("config", 255); // TODO: should use TEXT!
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned();
table.boolean("is_default").notNullable().defaultTo(false);
});
// monitor_notification
await knex.schema.createTable("monitor_notification", (table) => {
table.increments("id").unsigned(); // TODO: no auto increment????
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("notification_id").unsigned().notNullable()
.references("id").inTable("notification")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
});
// tag
await knex.schema.createTable("tag", (table) => {
table.increments("id");
table.string("name", 255).notNullable();
table.string("color", 255).notNullable();
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
});
// monitor_tag
await knex.schema.createTable("monitor_tag", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("tag_id").unsigned().notNullable()
.references("id").inTable("tag")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("value");
});
// monitor_tls_info
await knex.schema.createTable("monitor_tls_info", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
table.text("info_json");
});
// notification_sent_history
await knex.schema.createTable("notification_sent_history", (table) => {
table.increments("id");
table.string("type", 50).notNullable();
table.integer("monitor_id").unsigned().notNullable();
table.integer("days").notNullable();
table.unique([ "type", "monitor_id", "days" ]);
table.index([ "type", "monitor_id", "days" ], "good_index");
});
// setting
await knex.schema.createTable("setting", (table) => {
table.increments("id");
table.string("key", 200).notNullable().unique().collate("utf8_general_ci");
table.text("value");
table.string("type", 20);
});
// status_page_cname
await knex.schema.createTable("status_page_cname", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.string("domain").notNullable().unique().collate("utf8_general_ci");
});
/*********************
* Converted Patch here
*********************/
// 2023-06-30-1348-http-body-encoding.js
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
// UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
await knex.schema.table("monitor", function (table) {
table.string("http_body_encoding", 25);
});
await knex("monitor")
.where(function () {
this.where("type", "http").orWhere("type", "keyword");
})
.whereNull("http_body_encoding")
.update({
http_body_encoding: "json",
});
// 2023-06-30-1354-add-description-monitor.js
// ALTER TABLE monitor ADD description TEXT default null;
await knex.schema.table("monitor", function (table) {
table.text("description").defaultTo(null);
});
// 2023-06-30-1357-api-key-table.js
/*
CREATE TABLE [api_key] (
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
[key] VARCHAR(255) NOT NULL,
[name] VARCHAR(255) NOT NULL,
[user_id] INTEGER NOT NULL,
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
[active] BOOLEAN DEFAULT 1 NOT NULL,
[expires] DATETIME DEFAULT NULL,
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
);
*/
await knex.schema.createTable("api_key", function (table) {
table.increments("id").primary();
table.string("key", 255).notNullable();
table.string("name", 255).notNullable();
table.integer("user_id").unsigned().notNullable()
.references("id").inTable("user")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
table.boolean("active").defaultTo(1).notNullable();
table.dateTime("expires").defaultTo(null);
});
// 2023-06-30-1400-monitor-tls.js
/*
ALTER TABLE monitor
ADD tls_ca TEXT default null;
ALTER TABLE monitor
ADD tls_cert TEXT default null;
ALTER TABLE monitor
ADD tls_key TEXT default null;
*/
await knex.schema.table("monitor", function (table) {
table.text("tls_ca").defaultTo(null);
table.text("tls_cert").defaultTo(null);
table.text("tls_key").defaultTo(null);
});
// 2023-06-30-1401-maintenance-cron.js
/*
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
DROP TABLE maintenance_timeslot;
ALTER TABLE maintenance ADD cron TEXT;
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
*/
await knex.schema
.dropTableIfExists("maintenance_timeslot")
.table("maintenance", function (table) {
table.text("cron");
table.string("timezone", 255);
table.integer("duration");
});
// 2023-06-30-1413-add-parent-monitor.js.
/*
ALTER TABLE monitor
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
*/
await knex.schema.table("monitor", function (table) {
table.integer("parent").unsigned()
.references("id").inTable("monitor")
.onDelete("SET NULL")
.onUpdate("CASCADE");
});
/*
patch-add-invert-keyword.sql
ALTER TABLE monitor
ADD invert_keyword BOOLEAN default 0 not null;
*/
await knex.schema.table("monitor", function (table) {
table.boolean("invert_keyword").defaultTo(0).notNullable();
});
/*
patch-added-json-query.sql
ALTER TABLE monitor
ADD json_path TEXT;
ALTER TABLE monitor
ADD expected_value VARCHAR(255);
*/
await knex.schema.table("monitor", function (table) {
table.text("json_path");
table.string("expected_value", 255);
});
/*
patch-added-kafka-producer.sql
ALTER TABLE monitor
ADD kafka_producer_topic VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_brokers TEXT;
ALTER TABLE monitor
ADD kafka_producer_ssl INTEGER;
ALTER TABLE monitor
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_sasl_options TEXT;
ALTER TABLE monitor
ADD kafka_producer_message TEXT;
*/
await knex.schema.table("monitor", function (table) {
table.string("kafka_producer_topic", 255);
table.text("kafka_producer_brokers");
table.integer("kafka_producer_ssl");
table.string("kafka_producer_allow_auto_topic_creation", 255);
table.text("kafka_producer_sasl_options");
table.text("kafka_producer_message");
});
/*
patch-add-certificate-expiry-status-page.sql
ALTER TABLE status_page
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
*/
await knex.schema.table("status_page", function (table) {
table.boolean("show_certificate_expiry").defaultTo(0).notNullable();
});
/*
patch-monitor-oauth-cc.sql
ALTER TABLE monitor
ADD oauth_client_id TEXT default null;
ALTER TABLE monitor
ADD oauth_client_secret TEXT default null;
ALTER TABLE monitor
ADD oauth_token_url TEXT default null;
ALTER TABLE monitor
ADD oauth_scopes TEXT default null;
ALTER TABLE monitor
ADD oauth_auth_method TEXT default null;
*/
await knex.schema.table("monitor", function (table) {
table.text("oauth_client_id").defaultTo(null);
table.text("oauth_client_secret").defaultTo(null);
table.text("oauth_token_url").defaultTo(null);
table.text("oauth_scopes").defaultTo(null);
table.text("oauth_auth_method").defaultTo(null);
});
/*
patch-add-timeout-monitor.sql
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
*/
await knex.schema.table("monitor", function (table) {
table.double("timeout").defaultTo(0).notNullable();
});
/*
patch-add-gamedig-given-port.sql
ALTER TABLE monitor
ADD gamedig_given_port_only BOOLEAN default 1 not null;
*/
await knex.schema.table("monitor", function (table) {
table.boolean("gamedig_given_port_only").defaultTo(1).notNullable();
});
log.info("mariadb", "Created basic tables for MariaDB");
}
module.exports = {
createTables,
};

View File

@@ -0,0 +1,57 @@
## Info
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Basic rules
- All tables must have a primary key named `id`
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports multiple databases
## Template
Filename: YYYYMMDDHHMMSS_name.js
```js
exports.up = function(knex) {
};
exports.down = function(knex) {
};
// exports.config = { transaction: false };
```
## Example
Filename: 2023-06-30-1348-create-user-and-product.js
```js
exports.up = function(knex) {
return knex.schema
.createTable('user', function (table) {
table.increments('id');
table.string('first_name', 255).notNullable();
table.string('last_name', 255).notNullable();
})
.createTable('product', function (table) {
table.increments('id');
table.decimal('price').notNullable();
table.string('name', 1000).notNullable();
}).then(() => {
knex("products").insert([
{ price: 10, name: "Apple" },
{ price: 20, name: "Orange" },
]);
});
};
exports.down = function(knex) {
return knex.schema
.dropTable("product")
.dropTable("user");
};
```
https://knexjs.org/guide/migrations.html#transactions-in-migrations

View File

@@ -0,0 +1,3 @@
# Don't create a new migration file here
Please go to ./db/knex_migrations/README.md

View File

@@ -1,34 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Rename COLUMNs to another one (suffixed by `_old`)
ALTER TABLE monitor
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
ALTER TABLE monitor
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
-- Add correct COLUMNs
ALTER TABLE monitor
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
ALTER TABLE monitor
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
-- Set bring old values from `_old` COLUMNs to correct ones
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
-- Remove old COLUMNs
ALTER TABLE monitor
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
ALTER TABLE monitor
DROP COLUMN kafka_producer_ssl_old;
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
ALTER TABLE notification RENAME COLUMN config TO config_old;
ALTER TABLE notification ADD COLUMN config TEXT;
UPDATE notification SET config = config_old;
ALTER TABLE notification DROP COLUMN config_old;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
UPDATE monitor SET timeout = (interval * 0.8)
WHERE timeout IS NULL OR timeout <= 0;
COMMIT;

View File

@@ -1,8 +0,0 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:16-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
pip3 --no-cache-dir install apprise==1.4.0 && \
rm -rf /root/.cache

View File

@@ -1,12 +1,11 @@
# DON'T UPDATE TO bullseye-slim, see #372.
# There is no 20-buster-slim for armv7 unfortunately, 18-buster-slim is the last one for Uptime Kuma v1.
FROM node:18-buster-slim
# If the image changed, the second stage image should be changed too
FROM node:20-bookworm-slim AS base2-slim
ARG TARGETPLATFORM
WORKDIR /app
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
# python3* = apprise's dependencies
# apprise = for notifications (From testing repo)
# sqlite3 = for debugging
# iputils-ping = for ping
# util-linux = for setpriv (Should be dropped in 2.0.0?)
@@ -15,29 +14,25 @@ WORKDIR /app
# ca-certificates = keep the cert up-to-date
# sudo = for start service nscd with non-root user
# nscd = for better DNS caching
# (pip) apprise = for notifications
RUN apt-get update && \
apt-get --yes --no-install-recommends install \
python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 \
RUN echo "deb http://deb.debian.org/debian testing main" >> /etc/apt/sources.list && \
apt update && \
apt --yes --no-install-recommends -t testing install apprise sqlite3 ca-certificates && \
apt --yes --no-install-recommends -t stable install \
iputils-ping \
util-linux \
dumb-init \
curl \
ca-certificates \
sudo \
nscd && \
pip3 --no-cache-dir install apprise==1.6.0 && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
# Install cloudflared
RUN set -eux && \
mkdir -p --mode=0755 /usr/share/keyrings && \
curl --fail --show-error --silent --location --insecure https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared buster main' | tee /etc/apt/sources.list.d/cloudflared.list && \
apt-get update && \
apt-get install --yes --no-install-recommends cloudflared && \
RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bullseye main' | tee /etc/apt/sources.list.d/cloudflared.list && \
apt update && \
apt install --yes --no-install-recommends -t stable cloudflared && \
cloudflared version && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
@@ -46,3 +41,16 @@ RUN set -eux && \
COPY ./docker/etc/nscd.conf /etc/nscd.conf
COPY ./docker/etc/sudoers /etc/sudoers
# Full Base Image
# MariaDB, Chromium and fonts
# Not working for armv7, so use the older version (10.5) of MariaDB from the debian repo
# curl -LsS https://r.mariadb.com/downloads/mariadb_repo_setup | bash -s -- --mariadb-server-version="mariadb-11.1" && \
FROM base2-slim AS base2
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
RUN apt update && \
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
apt --yes remove curl && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove && \
chown -R node:node /var/lib/mysql

View File

@@ -0,0 +1,14 @@
version: '3.8'
services:
uptime-kuma:
container_name: uptime-kuma-dev
image: louislam/uptime-kuma:nightly2
volumes:
#- ./data:/app/data
- ../server:/app/server
- ../db:/app/db
ports:
- "3001:3001" # <Host Port>:<Container Port>
- "3307:3306"

View File

@@ -1,14 +1,15 @@
# Simple docker-compose.yml
# You can change your port or volume location
version: '3.3'
version: '3.8'
services:
uptime-kuma:
image: louislam/uptime-kuma:1
image: louislam/uptime-kuma:2
container_name: uptime-kuma
volumes:
- ./uptime-kuma-data:/app/data
- uptime-kuma:/app/data
ports:
- 3001:3001 # <Host Port>:<Container Port>
- "3001:3001" # <Host Port>:<Container Port>
restart: always
volumes:
uptime-kuma:

View File

@@ -1,6 +1,8 @@
ARG BASE_IMAGE=louislam/uptime-kuma:base2
############################################
# Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
# Run npm run build-healthcheck-armv7 in the host first, otherwise it will be super slow where it is building the armv7 healthcheck
# Check file: builder-go.dockerfile
############################################
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
@@ -8,49 +10,47 @@ FROM louislam/uptime-kuma:builder-go AS build_healthcheck
############################################
# Build in Node.js
############################################
FROM louislam/uptime-kuma:base-debian AS build
FROM louislam/uptime-kuma:base2 AS build
USER node
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc
COPY package.json package.json
COPY package-lock.json package-lock.json
COPY --chown=node:node .npmrc .npmrc
COPY --chown=node:node package.json package.json
COPY --chown=node:node package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . .
COPY --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
RUN chmod +x /app/extra/entrypoint.sh
COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
############################################
# ⭐ Main Image
############################################
FROM louislam/uptime-kuma:base-debian AS release
FROM $BASE_IMAGE AS release
USER node
WORKDIR /app
ENV UPTIME_KUMA_IS_CONTAINER=1
# Copy app files from build layer
COPY --from=build /app /app
COPY --chown=node:node --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
CMD ["node", "server/server.js"]
############################################
# Mark as Nightly
############################################
FROM release AS nightly
USER node
RUN npm run mark-as-nightly
############################################
# Build an image for testing pr
############################################
FROM louislam/uptime-kuma:base-debian AS pr-test
FROM louislam/uptime-kuma:base2 AS pr-test2
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
## Install Git
@@ -78,7 +78,7 @@ CMD ["npm", "run", "start-pr-test"]
############################################
# Upload the artifact to Github
############################################
FROM louislam/uptime-kuma:base-debian AS upload-artifact
FROM louislam/uptime-kuma:base2 AS upload-artifact
WORKDIR /
RUN apt update && \
apt --yes install curl file

View File

@@ -1,27 +0,0 @@
FROM louislam/uptime-kuma:base-alpine AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . .
RUN chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly

View File

@@ -36,6 +36,8 @@ if (! exists) {
/**
* Commit updated files
* @param {string} version Version to update to
* @returns {void}
* @throws Error committing files
*/
function commit(version) {
let msg = "Update to " + version;
@@ -55,6 +57,7 @@ function commit(version) {
/**
* Create a tag with the specified version
* @param {string} version Tag to create
* @returns {void}
*/
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
@@ -68,6 +71,7 @@ function tag(version) {
* Check if a tag exists for the specified version
* @param {string} version Version to check
* @returns {boolean} Does the tag already exist
* @throws Version is not valid
*/
function tagExists(version) {
if (! version) {

View File

@@ -15,6 +15,7 @@ download(url);
/**
* Downloads the latest version of the dist from a GitHub release.
* @param {string} url The URL to download from.
* @returns {void}
*
* Generated by Trelent
*/

View File

@@ -1,21 +0,0 @@
#!/usr/bin/env sh
# set -e Exit the script if an error happens
set -e
PUID=${PUID=0}
PGID=${PGID=0}
files_ownership () {
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
# -R Recursively descends the specified directories
# -c Like verbose but report only when a change is made
chown -hRc "$PUID":"$PGID" /app/data
}
echo "==> Performing startup jobs and maintenance tasks"
files_ownership
echo "==> Starting application with user $PUID group $PGID"
# --clear-groups Clear supplementary groups.
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"

View File

@@ -4,12 +4,12 @@ const fs = require("fs");
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
* or the `recursive` property removing completely in the future Node.js version.
* See the link below.
*
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
* @param {fs.PathLike} path Valid types for path values in "fs".
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
* @returns {void}
*/
const rmSync = (path, options) => {
if (typeof fs.rmSync === "function") {

View File

@@ -6,7 +6,7 @@
* ⚠️ Deprecated: Changed to healthcheck.go, it will be deleted in the future.
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
const FBSD = /^freebsd/.test(process.platform);
const { FBSD } = require("../server/util-server");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";

View File

@@ -189,15 +189,13 @@ if (type == "local") {
bash("check=$(git --version)");
if (check == "") {
error = 1;
println("Error: git is not found!");
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
println("Error: git is missing");
}
bash("check=$(node -v)");
if (check == "") {
error = 1;
println("Error: node is not found");
println("help: an installation guide is available at https://nodejs.org/en/download");
println("Error: node is missing");
}
if (error > 0) {
@@ -218,7 +216,6 @@ if (type == "local") {
bash("check=$(pm2 --version)");
if (check == "") {
println("Error: pm2 is not found!");
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
bash("exit 1");
}
@@ -235,7 +232,6 @@ if (type == "local") {
bash("check=$(docker -v)");
if (check == "") {
println("Error: docker is not found!");
println("help: an installation guide is available at https://docs.docker.com/desktop/");
bash("exit 1");
}
@@ -243,7 +239,6 @@ if (type == "local") {
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
\"echo\" \"Error: docker is not running\"
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
\"exit\" \"1\"
fi");

View File

@@ -1,44 +0,0 @@
// Generate on GitHub
const input = `
* Add Korean translation by @Alanimdeo in https://github.com/louislam/dockge/pull/86
`;
const template = `
### 🆕 New Features
### 💇‍♀️ Improvements
### 🐞 Bug Fixes
### ⬆️ Security Fixes
### 🦎 Translation Contributions
### Others
- Other small changes, code refactoring and comment/doc updates in this repo:
`;
const lines = input.split("\n").filter((line) => line.trim() !== "");
for (const line of lines) {
// Split the last " by "
const usernamePullRequesURL = line.split(" by ").pop();
if (!usernamePullRequesURL) {
console.log("Unable to parse", line);
continue;
}
const [ username, pullRequestURL ] = usernamePullRequesURL.split(" in ");
const pullRequestID = "#" + pullRequestURL.split("/").pop();
let message = line.split(" by ").shift();
if (!message) {
console.log("Unable to parse", line);
continue;
}
message = message.split("* ").pop();
console.log("-", pullRequestID, message, `(Thanks ${username})`);
}
console.log(template);

View File

@@ -12,7 +12,7 @@ const rl = readline.createInterface({
});
const main = async () => {
Database.init(args);
Database.initDataDir(args);
await Database.connect();
try {

View File

@@ -5,8 +5,6 @@ const { R } = require("redbean-node");
const readline = require("readline");
const { initJWTSecret } = require("../server/util-server");
const User = require("../server/model/user");
const { io } = require("socket.io-client");
const { localWebSocketURL } = require("../server/config");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
@@ -15,7 +13,7 @@ const rl = readline.createInterface({
const main = async () => {
console.log("Connecting the database");
Database.init(args);
Database.initDataDir(args);
await Database.connect(false, false, true);
try {
@@ -38,16 +36,12 @@ const main = async () => {
// Reset all sessions by reset jwt secret
await initJWTSecret();
// Disconnect all other socket clients of the user
await disconnectAllSocketClients(user.username, password);
break;
} else {
console.log("Passwords do not match, please try again.");
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
@@ -72,44 +66,6 @@ function question(question) {
});
}
function disconnectAllSocketClients(username, password) {
return new Promise((resolve) => {
console.log("Connecting to " + localWebSocketURL + " to disconnect all other socket clients");
// Disconnect all socket connections
const socket = io(localWebSocketURL, {
reconnection: false,
timeout: 5000,
});
socket.on("connect", () => {
socket.emit("login", {
username,
password,
}, (res) => {
if (res.ok) {
console.log("Logged in.");
socket.emit("disconnectOtherSocketClients");
} else {
console.warn("Login failed.");
console.warn("Please restart the server to disconnect all sessions.");
}
socket.close();
});
});
socket.on("connect_error", function () {
// The localWebSocketURL is not guaranteed to be working for some complicated Uptime Kuma setup
// Ask the user to restart the server manually
console.warn("Failed to connect to " + localWebSocketURL);
console.warn("Please restart the server to disconnect all sessions manually.");
resolve();
});
socket.on("disconnect", () => {
resolve();
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}

View File

@@ -138,7 +138,7 @@ server.listen({
/**
* Get human readable request type from request code
* @param {number} code Request code to translate
* @returns {string} Human readable request type
* @returns {string|void} Human readable request type
*/
function type(code) {
for (let name in Packet.TYPE) {

View File

@@ -7,11 +7,17 @@ class SimpleMqttServer {
aedes = require("aedes")();
server = require("net").createServer(this.aedes.handle);
/**
* @param {number} port Port to listen on
*/
constructor(port) {
this.port = port;
}
/** Start the MQTT server */
/**
* Start the MQTT server
* @returns {void}
*/
start() {
this.server.listen(this.port, () => {
console.log("server started and listening on port ", this.port);

View File

@@ -12,6 +12,7 @@ import rmSync from "../fs-rmSync.js";
* created with this code if one does not already exist
* @param {string} baseLang The second base language file to copy. This
* will be ignored if set to "en" as en.js is copied by default
* @returns {void}
*/
function copyFiles(langCode, baseLang) {
if (fs.existsSync("./languages")) {
@@ -33,7 +34,8 @@ function copyFiles(langCode, baseLang) {
/**
* Update the specified language file
* @param {string} langCode Language code to update
* @param {string} baseLang Second language to copy keys from
* @param {string} baseLangCode Second language to copy keys from
* @returns {void}
*/
async function updateLanguage(langCode, baseLangCode) {
const en = (await import("./languages/en.js")).default;

View File

@@ -39,6 +39,8 @@ if (! exists) {
/**
* Commit updated files
* @param {string} version Version to update to
* @returns {void}
* @throws Error when committing files
*/
function commit(version) {
let msg = "Update to " + version;
@@ -55,6 +57,7 @@ function commit(version) {
/**
* Create a tag with the specified version
* @param {string} version Tag to create
* @returns {void}
*/
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
@@ -65,6 +68,7 @@ function tag(version) {
* Check if a tag exists for the specified version
* @param {string} version Version to check
* @returns {boolean} Does the tag already exist
* @throws Version is not valid
*/
function tagExists(version) {
if (! version) {

View File

@@ -13,6 +13,7 @@ updateWiki(newVersion);
/**
* Update the wiki with new version number
* @param {string} newVersion Version to update to
* @returns {void}
*/
function updateWiki(newVersion) {
const wikiDir = "./tmp/wiki";
@@ -46,6 +47,7 @@ function updateWiki(newVersion) {
/**
* Check if a directory exists and then delete it
* @param {string} dir Directory to delete
* @returns {void}
*/
function safeDelete(dir) {
if (fs.existsSync(dir)) {

View File

@@ -156,14 +156,12 @@ fi
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is not found!"
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is not found"
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
@@ -182,7 +180,6 @@ fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Error: pm2 is not found!"
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
exit 1
fi
mkdir -p $installPath
@@ -195,13 +192,11 @@ else
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
"exit" "1"
fi
if [ "$3" != "" ]; then

5676
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "uptime-kuma",
"version": "1.23.11",
"version": "1.23.0-beta.1",
"license": "MIT",
"repository": {
"type": "git",
@@ -13,12 +13,10 @@
"install-legacy": "npm install",
"update-legacy": "npm update",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
"lint:js-prod": "npm run lint:js -- --max-warnings 0",
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"lint:prod": "npm run lint:js-prod && npm run lint:style",
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
"start-frontend-devcontainer": "cross-env NODE_ENV=development DEVCONTAINER=1 vite --host --config ./config/vite.config.js",
@@ -31,18 +29,17 @@
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
"tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim",
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.23.11 && npm ci --production && npm run download-dist",
"setup": "git checkout 1.22.1 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
@@ -50,7 +47,6 @@
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .",
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
@@ -58,11 +54,7 @@
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
"simple-mongo": "docker run --rm -p 27017:27017 mongo",
"simple-postgres": "docker run --rm -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres",
"simple-mariadb": "docker run --rm -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mariadb# mariadb",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"ncu-patch": "npm-check-updates -u -t patch",
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"git-remove-tag": "git tag -d",
@@ -75,7 +67,8 @@
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
"deploy-demo-server": "node extra/deploy-demo-server.js",
"sort-contributors": "node extra/sort-contributors.js",
"start-server-node14-win": "private\\node14\\node.exe server/server.js"
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate"
},
"dependencies": {
"@grpc/grpc-js": "~1.7.3",
@@ -101,8 +94,7 @@
"express-basic-auth": "~1.2.1",
"express-static-gzip": "~2.1.7",
"form-data": "~4.0.0",
"gamedig": "^4.2.0",
"html-escaper": "^3.0.3",
"gamedig": "~4.0.5",
"http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "~5.0.0",
"https-proxy-agent": "~5.0.1",
@@ -113,12 +105,13 @@
"jsonwebtoken": "~9.0.0",
"jwt-decode": "~3.1.2",
"kafkajs": "^2.2.4",
"knex": "^2.4.2",
"limiter": "~2.1.0",
"liquidjs": "^10.7.0",
"mongodb": "~4.17.1",
"mongodb": "~4.14.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"mysql2": "~3.6.2",
"mysql2": "~2.3.3",
"nanoid": "~3.3.4",
"node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0",
@@ -127,12 +120,11 @@
"notp": "~2.0.3",
"openid-client": "^5.4.2",
"password-hash": "~1.2.2",
"pg": "~8.11.3",
"pg-connection-string": "~2.6.2",
"pg": "~8.8.0",
"pg-connection-string": "~2.5.0",
"playwright-core": "~1.35.1",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.1",
"promisify-child-process": "~4.1.2",
"protobufjs": "~7.2.4",
"qs": "~6.10.4",
"redbean-node": "~0.3.0",
@@ -168,11 +160,12 @@
"core-js": "~3.26.1",
"cronstrue": "~2.24.0",
"cross-env": "~7.0.3",
"cypress": "^13.2.0",
"cypress": "^12.17.0",
"delay": "^5.0.0",
"dns2": "~2.0.1",
"dompurify": "~2.4.3",
"eslint": "~8.14.0",
"eslint-plugin-jsdoc": "^46.4.6",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10",
"jest": "~29.6.1",
@@ -192,6 +185,7 @@
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",
"vite": "~4.4.1",
"vite-plugin-commonjs": "^0.8.0",
"vite-plugin-compression": "^0.5.1",
"vue": "~3.3.4",
"vue-chartjs": "~5.2.0",
@@ -205,7 +199,7 @@
"vue-router": "~4.0.14",
"vue-toastification": "~2.0.0-rc.5",
"vuedraggable": "~4.1.0",
"wait-on": "^7.2.0",
"wait-on": "^6.0.1",
"whatwg-url": "~12.0.1"
}
}

View File

@@ -1,9 +1,10 @@
<svg width="640" height="640" viewBox="0 0 640 640" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
<g transform="matrix(1 0 0 1 320 320)">
<linearGradient id="S3" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1 0 0 1 -319.99875 -320.0001577393)" x1="259.78" y1="261.15" x2="463.85" y2="456.49">
<svg width="640" height="640" viewBox="0 0 640 640" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" fill="url(#paint0_linear_381_799)"/>
<path d="M490.4 235.64C544.09 358.38 544.09 435.34 490.4 466.5C409.85 513.24 199.96 527.49 139.54 455.64C99.2601 407.74 99.2601 334.4 139.54 235.64C180.5 168.18 238.71 134.45 314.17 134.45C389.64 134.45 448.38 168.18 490.4 235.64Z" stroke="#F2F2F2" stroke-opacity="0.51" stroke-width="200"/>
<defs>
<linearGradient id="paint0_linear_381_799" x1="259.78" y1="261.15" x2="463.85" y2="456.49" gradientUnits="userSpaceOnUse">
<stop stop-color="#5CDD8B"/>
<stop offset="1" stop-color="#86E6A9"/>
</linearGradient>
<path style="stroke: rgb(242,242,242); stroke-opacity: 0.51; stroke-width: 200; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: url(#S3); fill-rule: nonzero; opacity: 1;" transform=" translate(0, 0)" d="M 170.40125 -84.36016 C 224.09125 38.37984 224.09125 115.33984 170.40125 146.49984 C 89.85125000000001 193.23984000000002 -120.03875 207.48984000000002 -180.45875 135.63984 C -220.73875 87.73983999999999 -220.73875 14.399839999999998 -180.45875 -84.36016000000001 C -139.49875 -151.82016 -81.28875000000001 -185.55016 -5.828750000000014 -185.55016 C 69.64124999999999 -185.55016 128.38125 -151.82016000000002 170.40124999999998 -84.36016000000001 z" stroke-linecap="round" />
</g>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 893 B

View File

@@ -9,9 +9,9 @@ const dayjs = require("dayjs");
/**
* Login to web app
* @param {string} username
* @param {string} password
* @returns {Promise<(Bean|null)>}
* @param {string} username Username to login with
* @param {string} password Password to login with
* @returns {Promise<(Bean|null)>} User or null if login failed
*/
exports.login = async function (username, password) {
if (typeof username !== "string" || typeof password !== "string") {
@@ -39,6 +39,7 @@ exports.login = async function (username, password) {
/**
* Validate a provided API key
* @param {string} key API key to verify
* @returns {boolean} API is ok?
*/
async function verifyAPIKey(key) {
if (typeof key !== "string") {
@@ -73,9 +74,10 @@ async function verifyAPIKey(key) {
/**
* Custom authorizer for express-basic-auth
* @param {string} username
* @param {string} password
* @param {authCallback} callback
* @param {string} username Username to login with
* @param {string} password Password to login with
* @param {authCallback} callback Callback to handle login result
* @returns {void}
*/
function apiAuthorizer(username, password, callback) {
// API Rate Limit
@@ -99,9 +101,10 @@ function apiAuthorizer(username, password, callback) {
/**
* Custom authorizer for express-basic-auth
* @param {string} username
* @param {string} password
* @param {authCallback} callback
* @param {string} username Username to login with
* @param {string} password Password to login with
* @param {authCallback} callback Callback to handle login result
* @returns {void}
*/
function userAuthorizer(username, password, callback) {
// Login Rate Limit
@@ -126,7 +129,8 @@ function userAuthorizer(username, password, callback) {
* Use basic auth if auth is not disabled
* @param {express.Request} req Express request object
* @param {express.Response} res Express response object
* @param {express.NextFunction} next
* @param {express.NextFunction} next Next handler in chain
* @returns {void}
*/
exports.basicAuth = async function (req, res, next) {
const middleware = basicAuth({
@@ -148,7 +152,8 @@ exports.basicAuth = async function (req, res, next) {
* Use use API Key if API keys enabled, else use basic auth
* @param {express.Request} req Express request object
* @param {express.Response} res Express response object
* @param {express.NextFunction} next
* @param {express.NextFunction} next Next handler in chain
* @returns {void}
*/
exports.apiAuth = async function (req, res, next) {
if (!await Settings.get("disableAuth")) {

View File

@@ -15,6 +15,7 @@ class CacheableDnsHttpAgent {
/**
* Register/Disable cacheable to global agents
* @returns {void}
*/
static async update() {
log.debug("CacheableDnsHttpAgent", "update");
@@ -40,14 +41,15 @@ class CacheableDnsHttpAgent {
/**
* Attach cacheable to HTTP agent
* @param {http.Agent} agent Agent to install
* @returns {void}
*/
static install(agent) {
this.cacheable.install(agent);
}
/**
* @var {https.AgentOptions} agentOptions
* @return {https.Agent}
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent
* @returns {https.Agent} The new HTTPS agent
*/
static getHttpsAgent(agentOptions) {
if (!this.enable) {
@@ -63,8 +65,8 @@ class CacheableDnsHttpAgent {
}
/**
* @var {http.AgentOptions} agentOptions
* @return {https.Agents}
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent
* @returns {https.Agents} The new HTTP agent
*/
static getHttpAgent(agentOptions) {
if (!this.enable) {

View File

@@ -12,7 +12,7 @@ const checkVersion = require("./check-version");
/**
* Send list of notification providers to client
* @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>}
* @returns {Promise<Bean[]>} List of notifications
*/
async function sendNotificationList(socket) {
const timeLogger = new TimeLogger();
@@ -40,8 +40,8 @@ async function sendNotificationList(socket) {
* Send Heartbeat History list to socket
* @param {Socket} socket Socket.io instance
* @param {number} monitorID ID of monitor to send heartbeat history
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} overwrite Overwrite client-side's heartbeat list
* @returns {Promise<void>}
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
@@ -71,8 +71,8 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
* Important Heart beat list (aka event list)
* @param {Socket} socket Socket.io instance
* @param {number} monitorID ID of monitor to send heartbeat history
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} overwrite Overwrite client-side's heartbeat list
* @returns {Promise<void>}
*/
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
@@ -100,7 +100,7 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
/**
* Emit proxy list to client
* @param {Socket} socket Socket.io socket instance
* @return {Promise<Bean[]>}
* @returns {Promise<Bean[]>} List of proxies
*/
async function sendProxyList(socket) {
const timeLogger = new TimeLogger();
@@ -141,24 +141,21 @@ async function sendAPIKeyList(socket) {
/**
* Emits the version information to the client.
* @param {Socket} socket Socket.io socket instance
* @param {boolean} hideVersion
* @param {boolean} hideVersion Should we hide the version information in the response?
* @returns {Promise<void>}
*/
async function sendInfo(socket, hideVersion = false) {
let version;
let latestVersion;
let isContainer;
if (!hideVersion) {
version = checkVersion.version;
latestVersion = checkVersion.latestVersion;
isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
}
socket.emit("info", {
version,
latestVersion,
isContainer,
primaryBaseURL: await setting("primaryBaseURL"),
serverTimezone: await server.getTimezone(),
serverTimezoneOffset: server.getTimezoneOffset(),
@@ -168,7 +165,7 @@ async function sendInfo(socket, hideVersion = false) {
/**
* Send list of docker hosts to client
* @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>}
* @returns {Promise<Bean[]>} List of docker hosts
*/
async function sendDockerHostList(socket) {
const timeLogger = new TimeLogger();

View File

@@ -1,42 +1,29 @@
const isFreeBSD = /^freebsd/.test(process.platform);
// Interop with browser
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
let hostEnv = isFreeBSD ? null : process.env.HOST;
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
.map(portValue => parseInt(portValue))
.find(portValue => !isNaN(portValue));
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
const isSSL = sslKey && sslCert;
function getLocalWebSocketURL() {
const protocol = isSSL ? "wss" : "ws";
const host = hostname || "localhost";
return `${protocol}://${host}:${port}`;
}
const localWebSocketURL = getLocalWebSocketURL();
const demoMode = args["demo"] || false;
const badgeConstants = {
naColor: "#999",
defaultUpColor: "#66c20a",
defaultWarnColor: "#eed202",
defaultDownColor: "#c2290a",
defaultPendingColor: "#f8a306",
defaultMaintenanceColor: "#1747f5",
defaultPingColor: "blue", // as defined by badge-maker / shields.io
defaultStyle: "flat",
defaultPingValueSuffix: "ms",
defaultPingLabelSuffix: "h",
defaultUptimeValueSuffix: "%",
defaultUptimeLabelSuffix: "h",
defaultCertExpValueSuffix: " days",
defaultCertExpLabelSuffix: "h",
// Values Come From Default Notification Times
defaultCertExpireWarnDays: "14",
defaultCertExpireDownDays: "7"
};
module.exports = {
args,
hostname,
port,
sslKey,
sslCert,
sslKeyPassphrase,
isSSL,
localWebSocketURL,
demoMode,
badgeConstants,
};

View File

@@ -4,6 +4,8 @@ const { setSetting, setting } = require("./util-server");
const { log, sleep } = require("../src/util");
const knex = require("knex");
const path = require("path");
const { EmbeddedMariaDB } = require("./embedded-mariadb");
const mysql = require("mysql2/promise");
/**
* Database & App Data Folder
@@ -24,7 +26,7 @@ class Database {
static screenshotDir;
static path;
static sqlitePath;
static dockerTLSDir;
@@ -34,11 +36,13 @@ class Database {
static patched = false;
/**
* SQLite only
* Add patch filename in key
* Values:
* true: Add it regardless of order
* false: Do nothing
* { parents: []}: Need parents before add it
* @deprecated
*/
static patchList = {
"patch-setting-value-type.sql": true,
@@ -80,10 +84,7 @@ class Database {
"patch-add-certificate-expiry-status-page.sql": true,
"patch-monitor-oauth-cc.sql": true,
"patch-add-timeout-monitor.sql": true,
"patch-add-gamedig-given-port.sql": true,
"patch-notification-config.sql": true,
"patch-fix-kafka-producer-booleans.sql": true,
"patch-timeout.sql": true,
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
};
/**
@@ -94,15 +95,20 @@ class Database {
static noReject = true;
static dbConfig = {};
static knexMigrationsPath = "./db/knex_migrations";
/**
* Initialize the database
* @param {Object} args Arguments to initialize DB with
* Initialize the data directory
* @param {object} args Arguments to initialize DB with
* @returns {void}
*/
static init(args) {
static initDataDir(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = path.join(Database.dataDir, "kuma.db");
Database.sqlitePath = path.join(Database.dataDir, "kuma.db");
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
@@ -127,36 +133,134 @@ class Database {
log.info("db", `Data Dir: ${Database.dataDir}`);
}
/**
*
*/
static readDBConfig() {
let dbConfig;
let dbConfigString = fs.readFileSync(path.join(Database.dataDir, "db-config.json")).toString("utf-8");
dbConfig = JSON.parse(dbConfigString);
if (typeof dbConfig !== "object") {
throw new Error("Invalid db-config.json, it must be an object");
}
if (typeof dbConfig.type !== "string") {
throw new Error("Invalid db-config.json, type must be a string");
}
return dbConfig;
}
/**
* @param dbConfig
*/
static writeDBConfig(dbConfig) {
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
}
/**
* Connect to the database
* @param {boolean} [testMode=false] Should the connection be
* @param {boolean} testMode Should the connection be
* started in test mode?
* @param {boolean} [autoloadModels=true] Should models be
* @param {boolean} autoloadModels Should models be
* automatically loaded?
* @param {boolean} [noLog=false] Should logs not be output?
* @param {boolean} noLog Should logs not be output?
* @returns {Promise<void>}
*/
static async connect(testMode = false, autoloadModels = true, noLog = false) {
const acquireConnectionTimeout = 120 * 1000;
let dbConfig;
try {
dbConfig = this.readDBConfig();
Database.dbConfig = dbConfig;
} catch (err) {
log.warn("db", err.message);
dbConfig = {
type: "sqlite",
};
}
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
let config = {};
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
log.info("db", `Database Type: ${dbConfig.type}`);
if (dbConfig.type === "sqlite") {
if (! fs.existsSync(Database.sqlitePath)) {
log.info("server", "Copying Database");
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
}
});
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
config = {
client: Dialect,
connection: {
filename: Database.sqlitePath,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
};
} else if (dbConfig.type === "mariadb") {
if (!/^\w+$/.test(dbConfig.dbName)) {
throw Error("Invalid database name. A database name can only consist of letters, numbers and underscores");
}
const connection = await mysql.createConnection({
host: dbConfig.hostname,
port: dbConfig.port,
user: dbConfig.username,
password: dbConfig.password,
});
await connection.execute("CREATE DATABASE IF NOT EXISTS " + dbConfig.dbName + " CHARACTER SET utf8mb4");
connection.end();
config = {
client: "mysql2",
connection: {
host: dbConfig.hostname,
port: dbConfig.port,
user: dbConfig.username,
password: dbConfig.password,
database: dbConfig.dbName,
}
};
} else if (dbConfig.type === "embedded-mariadb") {
let embeddedMariaDB = EmbeddedMariaDB.getInstance();
await embeddedMariaDB.start();
log.info("mariadb", "Embedded MariaDB started");
config = {
client: "mysql2",
connection: {
socketPath: embeddedMariaDB.socketPath,
user: "node",
database: "kuma",
}
};
} else {
throw new Error("Unknown Database type: " + dbConfig.type);
}
// Set to utf8mb4 for MariaDB
if (dbConfig.type.endsWith("mariadb")) {
config.pool = {
afterCreate(conn, done) {
conn.query("SET CHARACTER SET utf8mb4;", (err) => done(err, conn));
},
};
}
const knexInstance = knex(config);
R.setup(knexInstance);
@@ -171,6 +275,18 @@ class Database {
await R.autoloadModels("./server/model");
}
if (dbConfig.type === "sqlite") {
await this.initSQLite(testMode, noLog);
} else if (dbConfig.type.endsWith("mariadb")) {
await this.initMariaDB();
}
}
/**
* @param testMode
* @param noLog
*/
static async initSQLite(testMode, noLog) {
await R.exec("PRAGMA foreign_keys = ON");
if (testMode) {
// Change to MEMORY
@@ -195,8 +311,56 @@ class Database {
}
}
/** Patch the database */
/**
*
*/
static async initMariaDB() {
log.debug("db", "Checking if MariaDB database exists...");
let hasTable = await R.hasTable("docker_host");
if (!hasTable) {
const { createTables } = require("../db/knex_init_db");
await createTables();
} else {
log.debug("db", "MariaDB database already exists");
}
}
/**
* Patch the database
* @returns {void}
*/
static async patch() {
// Still need to keep this for old versions of Uptime Kuma
if (Database.dbConfig.type === "sqlite") {
await this.patchSqlite();
}
// Using knex migrations
// https://knexjs.org/guide/migrations.html
// https://gist.github.com/NigelEarle/70db130cc040cc2868555b29a0278261
try {
await R.knex.migrate.latest({
directory: Database.knexMigrationsPath,
});
} catch (e) {
log.error("db", "Database migration failed");
throw e;
}
}
/**
* @returns {Promise<void>}
*/
static async rollbackLatestPatch() {
}
/**
* Patch the database for SQLite
* @deprecated
*/
static async patchSqlite() {
let version = parseInt(await setting("database_version"));
if (! version) {
@@ -216,7 +380,7 @@ class Database {
// Try catch anything here
try {
for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/patch${i}.sql`;
const sqlFile = `./db/old_migrations/patch${i}.sql`;
log.info("db", `Patching ${sqlFile}`);
await Database.importSQLFile(sqlFile);
log.info("db", `Patched ${sqlFile}`);
@@ -233,17 +397,18 @@ class Database {
}
}
await this.patch2();
await this.patchSqlite2();
await this.migrateNewStatusPage();
}
/**
* Patch DB using new process
* Call it from patch() only
* @deprecated
* @private
* @returns {Promise<void>}
*/
static async patch2() {
static async patchSqlite2() {
log.info("db", "Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
@@ -277,6 +442,7 @@ class Database {
}
/**
* SQlite only
* Migrate status page value in setting to "status_page" table
* @returns {Promise<void>}
*/
@@ -348,8 +514,8 @@ class Database {
* Patch database using new patching process
* Used it patch2() only
* @private
* @param sqlFilename
* @param databasePatchedFiles
* @param {string} sqlFilename Name of SQL file to load
* @param {object} databasePatchedFiles Patch status of database files
* @returns {Promise<void>}
*/
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
@@ -373,7 +539,7 @@ class Database {
log.info("db", sqlFilename + " is patching");
this.patched = true;
await this.importSQLFile("./db/" + sqlFilename);
await this.importSQLFile("./db/old_migrations/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
log.info("db", sqlFilename + " was patched successfully");
@@ -384,7 +550,7 @@ class Database {
/**
* Load an SQL file and execute it
* @param filename Filename of SQL file to import
* @param {string} filename Filename of SQL file to import
* @returns {Promise<void>}
*/
static async importSQLFile(filename) {
@@ -418,7 +584,7 @@ class Database {
/**
* Aquire a direct connection to database
* @returns {any}
* @returns {any} Database connection
*/
static getBetterSQLite3Database() {
return R.knex.client.acquireConnection();
@@ -455,10 +621,13 @@ class Database {
process.removeListener("unhandledRejection", listener);
}
/** Get the size of the database */
/**
* Get the size of the database
* @returns {number} Size of database
*/
static getSize() {
log.debug("db", "Database.getSize()");
let stats = fs.statSync(Database.path);
let stats = fs.statSync(Database.sqlitePath);
log.debug("db", stats);
return stats.size;
}
@@ -470,6 +639,18 @@ class Database {
static async shrink() {
await R.exec("VACUUM");
}
/**
*
*/
static sqlHourOffset() {
if (this.dbConfig.client === "sqlite3") {
return "DATETIME('now', ? || ' hours')";
} else {
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";
}
}
}
module.exports = Database;

View File

@@ -14,10 +14,10 @@ class DockerHost {
/**
* Save a docker host
* @param {Object} dockerHost Docker host to save
* @param {object} dockerHost Docker host to save
* @param {?number} dockerHostID ID of the docker host to update
* @param {number} userID ID of the user who adds the docker host
* @returns {Promise<Bean>}
* @returns {Promise<Bean>} Updated docker host
*/
static async save(dockerHost, dockerHostID, userID) {
let bean;
@@ -64,7 +64,7 @@ class DockerHost {
/**
* Fetches the amount of containers on the Docker host
* @param {Object} dockerHost Docker host to check for
* @param {object} dockerHost Docker host to check for
* @returns {number} Total amount of containers on the host
*/
static async testDockerHost(dockerHost) {
@@ -80,8 +80,8 @@ class DockerHost {
options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") {
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
}
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
let res = await axios.request(options);
@@ -108,6 +108,8 @@ class DockerHost {
/**
* Since axios 0.27.X, it does not accept `tcp://` protocol.
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
* @param {any} url URL to fix
* @returns {any} URL with tcp:// replaced by http://
*/
static patchDockerURL(url) {
if (typeof url === "string") {
@@ -129,11 +131,10 @@ class DockerHost {
* 'data/docker-tls/example.com/' would be searched for certificate files),
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
*
* @param {String} dockerType i.e. "tcp" or "socket"
* @param {String} url The docker host URL rewritten to https://
* @return {Object}
* */
* @param {string} dockerType i.e. "tcp" or "socket"
* @param {string} url The docker host URL rewritten to https://
* @returns {object} HTTP agent options
*/
static getHttpsAgentOptions(dockerType, url) {
let baseOptions = {
maxCachedSessions: 0,

168
server/embedded-mariadb.js Normal file
View File

@@ -0,0 +1,168 @@
const { log } = require("../src/util");
const childProcess = require("child_process");
const fs = require("fs");
const mysql = require("mysql2");
/**
* It is only used inside the docker container
*/
class EmbeddedMariaDB {
static instance = null;
exec = "mariadbd";
mariadbDataDir = "/app/data/mariadb";
runDir = "/app/data/run/mariadb";
socketPath = this.runDir + "/mysqld.sock";
childProcess = null;
running = false;
started = false;
/**
* @returns {EmbeddedMariaDB}
*/
static getInstance() {
if (!EmbeddedMariaDB.instance) {
EmbeddedMariaDB.instance = new EmbeddedMariaDB();
}
return EmbeddedMariaDB.instance;
}
/**
*
*/
static hasInstance() {
return !!EmbeddedMariaDB.instance;
}
/**
*
*/
start() {
if (this.childProcess) {
log.info("mariadb", "Already started");
return;
}
this.initDB();
this.running = true;
log.info("mariadb", "Starting Embedded MariaDB");
this.childProcess = childProcess.spawn(this.exec, [
"--user=node",
"--datadir=" + this.mariadbDataDir,
`--socket=${this.socketPath}`,
`--pid-file=${this.runDir}/mysqld.pid`,
]);
this.childProcess.on("close", (code) => {
this.running = false;
this.childProcess = null;
this.started = false;
log.info("mariadb", "Stopped Embedded MariaDB: " + code);
if (code !== 0) {
log.info("mariadb", "Try to restart Embedded MariaDB as it is not stopped by user");
this.start();
}
});
this.childProcess.on("error", (err) => {
if (err.code === "ENOENT") {
log.error("mariadb", `Embedded MariaDB: ${this.exec} is not found`);
} else {
log.error("mariadb", err);
}
});
let handler = (data) => {
log.debug("mariadb", data.toString("utf-8"));
if (data.toString("utf-8").includes("ready for connections")) {
this.initDBAfterStarted();
}
};
this.childProcess.stdout.on("data", handler);
this.childProcess.stderr.on("data", handler);
return new Promise((resolve) => {
let interval = setInterval(() => {
if (this.started) {
clearInterval(interval);
resolve();
} else {
log.info("mariadb", "Waiting for Embedded MariaDB to start...");
}
}, 1000);
});
}
/**
*
*/
stop() {
if (this.childProcess) {
this.childProcess.kill("SIGINT");
this.childProcess = null;
}
}
/**
*
*/
initDB() {
if (!fs.existsSync(this.mariadbDataDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.mariadbDataDir} is not found, create one now.`);
fs.mkdirSync(this.mariadbDataDir, {
recursive: true,
});
let result = childProcess.spawnSync("mysql_install_db", [
"--user=node",
"--ldata=" + this.mariadbDataDir,
]);
if (result.status !== 0) {
let error = result.stderr.toString("utf-8");
log.error("mariadb", error);
return;
} else {
log.info("mariadb", "Embedded MariaDB: mysql_install_db done:" + result.stdout.toString("utf-8"));
}
}
if (!fs.existsSync(this.runDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.runDir} is not found, create one now.`);
fs.mkdirSync(this.runDir, {
recursive: true,
});
}
}
/**
*
*/
async initDBAfterStarted() {
const connection = mysql.createConnection({
socketPath: this.socketPath,
user: "node",
});
let result = await connection.execute("CREATE DATABASE IF NOT EXISTS `kuma`");
log.debug("mariadb", "CREATE DATABASE: " + JSON.stringify(result));
log.info("mariadb", "Embedded MariaDB is ready for connections");
this.started = true;
}
}
module.exports = {
EmbeddedMariaDB,
};

View File

@@ -1,25 +1,21 @@
const jsesc = require("jsesc");
const { escape } = require("html-escaper");
/**
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts
* into a webpage.
* @param tagId Google UA/G/AW/DC Property ID to use with the Google Analytics script.
* @returns {string}
* @param {string} tagId Google UA/G/AW/DC Property ID to use with the Google Analytics script.
* @returns {string} HTML script tags to inject into page
*/
function getGoogleAnalyticsScript(tagId) {
let escapedTagIdJS = jsesc(tagId, { isScriptContext: true });
let escapedTagId = jsesc(tagId, { isScriptContext: true });
if (escapedTagIdJS) {
escapedTagIdJS = escapedTagIdJS.trim();
if (escapedTagId) {
escapedTagId = escapedTagId.trim();
}
// Escape the tag ID for use in an HTML attribute.
let escapedTagIdHTMLAttribute = escape(tagId);
return `
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagIdHTMLAttribute}"></script>
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagIdJS}'); </script>
<script async src="https://www.googletagmanager.com/gtag/js?id=${escapedTagId}"></script>
<script>window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date());gtag('config', '${escapedTagId}'); </script>
`;
}

View File

@@ -10,7 +10,7 @@ let ImageDataURI = (() => {
/**
* Decode the data:image/ URI
* @param {string} dataURI data:image/ URI to decode
* @returns {?Object} An object with properties "imageType" and "dataBase64".
* @returns {?object} An object with properties "imageType" and "dataBase64".
* The former is the image type, e.g., "png", and the latter is a base64
* encoded string of the image's binary data. If it fails to parse, returns
* null instead of an object.
@@ -52,8 +52,8 @@ let ImageDataURI = (() => {
/**
* Write data URI to file
* @param {string} dataURI data:image/ URI
* @param {string} [filePath] Path to write file to
* @returns {Promise<string>}
* @param {string} filePath Path to write file to
* @returns {Promise<string|void>} Write file error
*/
function outputFile(dataURI, filePath) {
filePath = filePath || "./";

Some files were not shown because too many files have changed in this diff Show More