Compare commits

..

1 Commits

Author SHA1 Message Date
Louis Lam
11d01ebc78 Add queue package 2023-07-19 03:14:48 +08:00
305 changed files with 2868 additions and 8536 deletions

View File

@@ -1,6 +1,6 @@
/.idea /.idea
/node_modules /node_modules
/data* /data
/cypress /cypress
/out /out
/test /test
@@ -18,7 +18,6 @@ README.md
.vscode .vscode
.eslint* .eslint*
.stylelint* .stylelint*
/.devcontainer
/.github /.github
yarn.lock yarn.lock
app.json app.json
@@ -36,7 +35,6 @@ tsconfig.json
/extra/healthcheck /extra/healthcheck
extra/exe-builder extra/exe-builder
### .gitignore content (commented rules are duplicated) ### .gitignore content (commented rules are duplicated)
#node_modules #node_modules

View File

@@ -14,7 +14,6 @@ module.exports = {
extends: [ extends: [
"eslint:recommended", "eslint:recommended",
"plugin:vue/vue3-recommended", "plugin:vue/vue3-recommended",
"plugin:jsdoc/recommended-error",
], ],
parser: "vue-eslint-parser", parser: "vue-eslint-parser",
parserOptions: { parserOptions: {
@@ -22,9 +21,6 @@ module.exports = {
sourceType: "module", sourceType: "module",
requireConfigFile: false, requireConfigFile: false,
}, },
plugins: [
"jsdoc"
],
rules: { rules: {
"yoda": "error", "yoda": "error",
eqeqeq: [ "warn", "smart" ], eqeqeq: [ "warn", "smart" ],
@@ -101,43 +97,7 @@ module.exports = {
}], }],
"no-control-regex": "off", "no-control-regex": "off",
"one-var": [ "error", "never" ], "one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }], "max-statements-per-line": [ "error", { "max": 1 }]
"jsdoc/check-tag-names": [
"error",
{
"definedTags": [ "link" ]
}
],
"jsdoc/no-undefined-types": "off",
"jsdoc/no-defaults": [
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/require-throws": "warn",
"jsdoc/require-jsdoc": [
"error",
{
"require": {
"FunctionDeclaration": true,
"MethodDefinition": true,
}
}
],
"jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns": [
"warn",
{
"forceRequireReturn": true,
"forceReturnsWithAsync": true
}
],
"jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn"
}, },
"overrides": [ "overrides": [
{ {

View File

@@ -9,7 +9,7 @@ on:
paths-ignore: paths-ignore:
- '*.md' - '*.md'
pull_request: pull_request:
branches: [ master, 2.0.X ] branches: [ master ]
paths-ignore: paths-ignore:
- '*.md' - '*.md'
@@ -22,7 +22,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: [macos-latest, ubuntu-latest, windows-latest, ARM64] os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
node: [ 14, 20 ] node: [ 14, 18 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps: steps:
@@ -50,7 +50,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: [ ARMv7 ] os: [ ARMv7 ]
node: [ 14, 20 ] node: [ 14.21.3, 18.16.1 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps: steps:
@@ -71,28 +71,27 @@ jobs:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Use Node.js 20 - name: Use Node.js 14
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 20 node-version: 14
- run: npm install - run: npm install
- run: npm run lint - run: npm run lint
# TODO: Temporarily disable, as it cannot pass the test in 2.0.0 yet e2e-tests:
# e2e-tests: needs: [ check-linters ]
# needs: [ check-linters ] runs-on: ubuntu-latest
# runs-on: ubuntu-latest steps:
# steps: - run: git config --global core.autocrlf false # Mainly for Windows
# - run: git config --global core.autocrlf false # Mainly for Windows - uses: actions/checkout@v3
# - uses: actions/checkout@v3
# - name: Use Node.js 14
# - name: Use Node.js 14 uses: actions/setup-node@v3
# uses: actions/setup-node@v3 with:
# with: node-version: 14
# node-version: 14 - run: npm install
# - run: npm install - run: npm run build
# - run: npm run build - run: npm run cy:test
# - run: npm run cy:test
frontend-unit-tests: frontend-unit-tests:
needs: [ check-linters ] needs: [ check-linters ]

View File

@@ -6,7 +6,6 @@ on:
pull_request: pull_request:
branches: branches:
- master - master
- 2.0.X
workflow_dispatch: workflow_dispatch:
permissions: permissions:

1
.gitignore vendored
View File

@@ -7,7 +7,6 @@ dist-ssr
/data /data
!/data/.gitkeep !/data/.gitkeep
/data*
.vscode .vscode
/private /private

View File

@@ -10,7 +10,6 @@
"color-function-notation": "legacy", "color-function-notation": "legacy",
"shorthand-property-no-redundant-values": null, "shorthand-property-no-redundant-values": null,
"color-hex-length": null, "color-hex-length": null,
"declaration-block-no-redundant-longhand-properties": null, "declaration-block-no-redundant-longhand-properties": null
"at-rule-no-unknown": null
} }
} }

View File

@@ -34,19 +34,19 @@ Yes or no, it depends on what you will try to do. Since I don't want to waste yo
Here are some references: Here are some references:
### ✅ Usually accepted: ✅ Usually Accept:
- Bug fix - Bug fix
- Security fix - Security fix
- Adding notification providers - Adding notification providers
- Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md)) - Adding new language files (You should go to https://weblate.kuma.pet for existing languages)
- Adding new language keys: `$t("...")` - Adding new language keys: `$t("...")`
### ⚠️ Discussion required: ⚠️ Discussion First
- Large pull requests - Large pull requests
- New features - New features
### ❌ Won't be merged: ❌ Won't Merge
- A dedicated pr for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md)) - A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
- Do not pass the auto test - Do not pass the auto test
- Any breaking changes - Any breaking changes
- Duplicated pull requests - Duplicated pull requests
@@ -106,11 +106,11 @@ I personally do not like something that requires so many configurations before y
## Tools ## Tools
- [`Node.js`](https://nodejs.org/) >= 14 - Node.js >= 14
- [`npm`](https://www.npmjs.com/) >= 8.5 - NPM >= 8.5
- [`git`](https://git-scm.com/) - Git
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/)) - IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
- A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/)) - A SQLite GUI tool (SQLite Expert Personal is suggested)
## Install Dependencies for Development ## Install Dependencies for Development
@@ -214,21 +214,11 @@ Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely,
Patch release = the third digit ([Semantic Versioning](https://semver.org/)) Patch release = the third digit ([Semantic Versioning](https://semver.org/))
If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change. If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes.
## Translations ## Translations
Please add **all** the strings which are translatable to `src/lang/en.json` (If translation keys are ommited, they can not be translated). Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
**Don't include any other languages in your inital Pull-Request** (even if this is your mother tounge), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language-skills.
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not english and my grammar is not that great.
## Wiki ## Wiki

View File

@@ -1,16 +1,16 @@
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
# Uptime Kuma # Uptime Kuma
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/"> [![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" /> <img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a> </a>
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" /> <img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
## 🥔 Live Demo ## 🥔 Live Demo
@@ -184,10 +184,7 @@ If you want to report a bug or request a new feature, feel free to open a [new i
### Translations ### Translations
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md). If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
Feel free to correct the grammar in the documentation or code.
My mother language is not english and my grammar is not that great.
### Create Pull Requests ### Create Pull Requests
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

View File

@@ -4,4 +4,8 @@ if (process.env.TEST_FRONTEND) {
config.presets = [ "@babel/preset-env" ]; config.presets = [ "@babel/preset-env" ];
} }
if (process.env.TEST_BACKEND) {
config.plugins = [ "babel-plugin-rewire" ];
}
module.exports = config; module.exports = config;

View File

@@ -1,559 +0,0 @@
const { R } = require("redbean-node");
const { log } = require("../src/util");
/**
* ⚠️⚠️⚠️⚠️⚠️⚠️ DO NOT ADD ANYTHING HERE!
* IF YOU NEED TO ADD FIELDS, ADD IT TO ./db/knex_migrations
* See ./db/knex_migrations/README.md for more information
* @returns {Promise<void>}
*/
async function createTables() {
log.info("mariadb", "Creating basic tables for MariaDB");
const knex = R.knex;
// TODO: Should check later if it is really the final patch sql file.
// docker_host
await knex.schema.createTable("docker_host", (table) => {
table.increments("id");
table.integer("user_id").unsigned().notNullable();
table.string("docker_daemon", 255);
table.string("docker_type", 255);
table.string("name", 255);
});
// group
await knex.schema.createTable("group", (table) => {
table.increments("id");
table.string("name", 255).notNullable();
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.boolean("public").notNullable().defaultTo(false);
table.boolean("active").notNullable().defaultTo(true);
table.integer("weight").notNullable().defaultTo(1000);
table.integer("status_page_id").unsigned();
});
// proxy
await knex.schema.createTable("proxy", (table) => {
table.increments("id");
table.integer("user_id").unsigned().notNullable();
table.string("protocol", 10).notNullable();
table.string("host", 255).notNullable();
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
table.boolean("auth").notNullable();
table.string("username", 255).nullable();
table.string("password", 255).nullable();
table.boolean("active").notNullable().defaultTo(true);
table.boolean("default").notNullable().defaultTo(false);
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.index("user_id", "proxy_user_id");
});
// user
await knex.schema.createTable("user", (table) => {
table.increments("id");
table.string("username", 255).notNullable().unique().collate("utf8_general_ci");
table.string("password", 255);
table.boolean("active").notNullable().defaultTo(true);
table.string("timezone", 150);
table.string("twofa_secret", 64);
table.boolean("twofa_status").notNullable().defaultTo(false);
table.string("twofa_last_token", 6);
});
// monitor
await knex.schema.createTable("monitor", (table) => {
table.increments("id");
table.string("name", 150);
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("interval").notNullable().defaultTo(20);
table.text("url");
table.string("type", 20);
table.integer("weight").defaultTo(2000);
table.string("hostname", 255);
table.integer("port");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.string("keyword", 255);
table.integer("maxretries").notNullable().defaultTo(0);
table.boolean("ignore_tls").notNullable().defaultTo(false);
table.boolean("upside_down").notNullable().defaultTo(false);
table.integer("maxredirects").notNullable().defaultTo(10);
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
table.string("dns_resolve_type", 5);
table.string("dns_resolve_server", 255);
table.string("dns_last_result", 255);
table.integer("retry_interval").notNullable().defaultTo(0);
table.string("push_token", 20).defaultTo(null);
table.text("method").notNullable().defaultTo("GET");
table.text("body").defaultTo(null);
table.text("headers").defaultTo(null);
table.text("basic_auth_user").defaultTo(null);
table.text("basic_auth_pass").defaultTo(null);
table.integer("docker_host").unsigned()
.references("id").inTable("docker_host");
table.string("docker_container", 255);
table.integer("proxy_id").unsigned()
.references("id").inTable("proxy");
table.boolean("expiry_notification").defaultTo(true);
table.text("mqtt_topic");
table.string("mqtt_success_message", 255);
table.string("mqtt_username", 255);
table.string("mqtt_password", 255);
table.string("database_connection_string", 2000);
table.text("database_query");
table.string("auth_method", 250);
table.text("auth_domain");
table.text("auth_workstation");
table.string("grpc_url", 255).defaultTo(null);
table.text("grpc_protobuf").defaultTo(null);
table.text("grpc_body").defaultTo(null);
table.text("grpc_metadata").defaultTo(null);
table.text("grpc_method").defaultTo(null);
table.text("grpc_service_name").defaultTo(null);
table.boolean("grpc_enable_tls").notNullable().defaultTo(false);
table.string("radius_username", 255);
table.string("radius_password", 255);
table.string("radius_calling_station_id", 50);
table.string("radius_called_station_id", 50);
table.string("radius_secret", 255);
table.integer("resend_interval").notNullable().defaultTo(0);
table.integer("packet_size").notNullable().defaultTo(56);
table.string("game", 255);
});
// heartbeat
await knex.schema.createTable("heartbeat", (table) => {
table.increments("id");
table.boolean("important").notNullable().defaultTo(false);
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.smallint("status").notNullable();
table.text("msg");
table.datetime("time").notNullable();
table.integer("ping");
table.integer("duration").notNullable().defaultTo(0);
table.integer("down_count").notNullable().defaultTo(0);
table.index("important");
table.index([ "monitor_id", "time" ], "monitor_time_index");
table.index("monitor_id");
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
});
// incident
await knex.schema.createTable("incident", (table) => {
table.increments("id");
table.string("title", 255).notNullable();
table.text("content", 255).notNullable();
table.string("style", 30).notNullable().defaultTo("warning");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.datetime("last_updated_date");
table.boolean("pin").notNullable().defaultTo(true);
table.boolean("active").notNullable().defaultTo(true);
table.integer("status_page_id").unsigned();
});
// maintenance
await knex.schema.createTable("maintenance", (table) => {
table.increments("id");
table.string("title", 150).notNullable();
table.text("description").notNullable();
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.boolean("active").notNullable().defaultTo(true);
table.string("strategy", 50).notNullable().defaultTo("single");
table.datetime("start_date");
table.datetime("end_date");
table.time("start_time");
table.time("end_time");
table.string("weekdays", 250).defaultTo("[]");
table.text("days_of_month").defaultTo("[]");
table.integer("interval_day");
table.index("active");
table.index([ "strategy", "active" ], "manual_active");
table.index("user_id", "maintenance_user_id");
});
// status_page
await knex.schema.createTable("status_page", (table) => {
table.increments("id");
table.string("slug", 255).notNullable().unique().collate("utf8_general_ci");
table.string("title", 255).notNullable();
table.text("description");
table.string("icon", 255).notNullable();
table.string("theme", 30).notNullable();
table.boolean("published").notNullable().defaultTo(true);
table.boolean("search_engine_index").notNullable().defaultTo(true);
table.boolean("show_tags").notNullable().defaultTo(false);
table.string("password");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.datetime("modified_date").notNullable().defaultTo(knex.fn.now());
table.text("footer_text");
table.text("custom_css");
table.boolean("show_powered_by").notNullable().defaultTo(true);
table.string("google_analytics_tag_id");
});
// maintenance_status_page
await knex.schema.createTable("maintenance_status_page", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned().notNullable()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
});
// maintenance_timeslot
await knex.schema.createTable("maintenance_timeslot", (table) => {
table.increments("id");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.datetime("start_date").notNullable();
table.datetime("end_date");
table.boolean("generated_next").defaultTo(false);
table.index("maintenance_id");
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
table.index("generated_next", "generated_next_index");
});
// monitor_group
await knex.schema.createTable("monitor_group", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("group_id").unsigned().notNullable()
.references("id").inTable("group")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("weight").notNullable().defaultTo(1000);
table.boolean("send_url").notNullable().defaultTo(false);
table.index([ "monitor_id", "group_id" ], "fk");
});
// monitor_maintenance
await knex.schema.createTable("monitor_maintenance", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index("maintenance_id", "maintenance_id_index2");
table.index("monitor_id", "monitor_id_index");
});
// notification
await knex.schema.createTable("notification", (table) => {
table.increments("id");
table.string("name", 255);
table.string("config", 255); // TODO: should use TEXT!
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned();
table.boolean("is_default").notNullable().defaultTo(false);
});
// monitor_notification
await knex.schema.createTable("monitor_notification", (table) => {
table.increments("id").unsigned(); // TODO: no auto increment????
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("notification_id").unsigned().notNullable()
.references("id").inTable("notification")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
});
// tag
await knex.schema.createTable("tag", (table) => {
table.increments("id");
table.string("name", 255).notNullable();
table.string("color", 255).notNullable();
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
});
// monitor_tag
await knex.schema.createTable("monitor_tag", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("tag_id").unsigned().notNullable()
.references("id").inTable("tag")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("value");
});
// monitor_tls_info
await knex.schema.createTable("monitor_tls_info", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
table.text("info_json");
});
// notification_sent_history
await knex.schema.createTable("notification_sent_history", (table) => {
table.increments("id");
table.string("type", 50).notNullable();
table.integer("monitor_id").unsigned().notNullable();
table.integer("days").notNullable();
table.unique([ "type", "monitor_id", "days" ]);
table.index([ "type", "monitor_id", "days" ], "good_index");
});
// setting
await knex.schema.createTable("setting", (table) => {
table.increments("id");
table.string("key", 200).notNullable().unique().collate("utf8_general_ci");
table.text("value");
table.string("type", 20);
});
// status_page_cname
await knex.schema.createTable("status_page_cname", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.string("domain").notNullable().unique().collate("utf8_general_ci");
});
/*********************
* Converted Patch here
*********************/
// 2023-06-30-1348-http-body-encoding.js
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
// UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
await knex.schema.table("monitor", function (table) {
table.string("http_body_encoding", 25);
});
await knex("monitor")
.where(function () {
this.where("type", "http").orWhere("type", "keyword");
})
.whereNull("http_body_encoding")
.update({
http_body_encoding: "json",
});
// 2023-06-30-1354-add-description-monitor.js
// ALTER TABLE monitor ADD description TEXT default null;
await knex.schema.table("monitor", function (table) {
table.text("description").defaultTo(null);
});
// 2023-06-30-1357-api-key-table.js
/*
CREATE TABLE [api_key] (
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
[key] VARCHAR(255) NOT NULL,
[name] VARCHAR(255) NOT NULL,
[user_id] INTEGER NOT NULL,
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
[active] BOOLEAN DEFAULT 1 NOT NULL,
[expires] DATETIME DEFAULT NULL,
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
);
*/
await knex.schema.createTable("api_key", function (table) {
table.increments("id").primary();
table.string("key", 255).notNullable();
table.string("name", 255).notNullable();
table.integer("user_id").unsigned().notNullable()
.references("id").inTable("user")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
table.boolean("active").defaultTo(1).notNullable();
table.dateTime("expires").defaultTo(null);
});
// 2023-06-30-1400-monitor-tls.js
/*
ALTER TABLE monitor
ADD tls_ca TEXT default null;
ALTER TABLE monitor
ADD tls_cert TEXT default null;
ALTER TABLE monitor
ADD tls_key TEXT default null;
*/
await knex.schema.table("monitor", function (table) {
table.text("tls_ca").defaultTo(null);
table.text("tls_cert").defaultTo(null);
table.text("tls_key").defaultTo(null);
});
// 2023-06-30-1401-maintenance-cron.js
/*
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
DROP TABLE maintenance_timeslot;
ALTER TABLE maintenance ADD cron TEXT;
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
*/
await knex.schema
.dropTableIfExists("maintenance_timeslot")
.table("maintenance", function (table) {
table.text("cron");
table.string("timezone", 255);
table.integer("duration");
});
// 2023-06-30-1413-add-parent-monitor.js.
/*
ALTER TABLE monitor
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
*/
await knex.schema.table("monitor", function (table) {
table.integer("parent").unsigned()
.references("id").inTable("monitor")
.onDelete("SET NULL")
.onUpdate("CASCADE");
});
/*
patch-add-invert-keyword.sql
ALTER TABLE monitor
ADD invert_keyword BOOLEAN default 0 not null;
*/
await knex.schema.table("monitor", function (table) {
table.boolean("invert_keyword").defaultTo(0).notNullable();
});
/*
patch-added-json-query.sql
ALTER TABLE monitor
ADD json_path TEXT;
ALTER TABLE monitor
ADD expected_value VARCHAR(255);
*/
await knex.schema.table("monitor", function (table) {
table.text("json_path");
table.string("expected_value", 255);
});
/*
patch-added-kafka-producer.sql
ALTER TABLE monitor
ADD kafka_producer_topic VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_brokers TEXT;
ALTER TABLE monitor
ADD kafka_producer_ssl INTEGER;
ALTER TABLE monitor
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_sasl_options TEXT;
ALTER TABLE monitor
ADD kafka_producer_message TEXT;
*/
await knex.schema.table("monitor", function (table) {
table.string("kafka_producer_topic", 255);
table.text("kafka_producer_brokers");
table.integer("kafka_producer_ssl");
table.string("kafka_producer_allow_auto_topic_creation", 255);
table.text("kafka_producer_sasl_options");
table.text("kafka_producer_message");
});
/*
patch-add-certificate-expiry-status-page.sql
ALTER TABLE status_page
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
*/
await knex.schema.table("status_page", function (table) {
table.boolean("show_certificate_expiry").defaultTo(0).notNullable();
});
/*
patch-monitor-oauth-cc.sql
ALTER TABLE monitor
ADD oauth_client_id TEXT default null;
ALTER TABLE monitor
ADD oauth_client_secret TEXT default null;
ALTER TABLE monitor
ADD oauth_token_url TEXT default null;
ALTER TABLE monitor
ADD oauth_scopes TEXT default null;
ALTER TABLE monitor
ADD oauth_auth_method TEXT default null;
*/
await knex.schema.table("monitor", function (table) {
table.text("oauth_client_id").defaultTo(null);
table.text("oauth_client_secret").defaultTo(null);
table.text("oauth_token_url").defaultTo(null);
table.text("oauth_scopes").defaultTo(null);
table.text("oauth_auth_method").defaultTo(null);
});
/*
patch-add-timeout-monitor.sql
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
*/
await knex.schema.table("monitor", function (table) {
table.double("timeout").defaultTo(0).notNullable();
});
/*
patch-add-gamedig-given-port.sql
ALTER TABLE monitor
ADD gamedig_given_port_only BOOLEAN default 1 not null;
*/
await knex.schema.table("monitor", function (table) {
table.boolean("gamedig_given_port_only").defaultTo(1).notNullable();
});
log.info("mariadb", "Created basic tables for MariaDB");
}
module.exports = {
createTables,
};

View File

@@ -1,57 +0,0 @@
## Info
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Basic rules
- All tables must have a primary key named `id`
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports multiple databases
## Template
Filename: YYYYMMDDHHMMSS_name.js
```js
exports.up = function(knex) {
};
exports.down = function(knex) {
};
// exports.config = { transaction: false };
```
## Example
Filename: 2023-06-30-1348-create-user-and-product.js
```js
exports.up = function(knex) {
return knex.schema
.createTable('user', function (table) {
table.increments('id');
table.string('first_name', 255).notNullable();
table.string('last_name', 255).notNullable();
})
.createTable('product', function (table) {
table.increments('id');
table.decimal('price').notNullable();
table.string('name', 1000).notNullable();
}).then(() => {
knex("products").insert([
{ price: 10, name: "Apple" },
{ price: 20, name: "Orange" },
]);
});
};
exports.down = function(knex) {
return knex.schema
.dropTable("product")
.dropTable("user");
};
```
https://knexjs.org/guide/migrations.html#transactions-in-migrations

View File

@@ -1,3 +0,0 @@
# Don't create a new migration file here
Please go to ./db/knex_migrations/README.md

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD gamedig_given_port_only BOOLEAN default 1 not null;
COMMIT;

View File

@@ -1,6 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
COMMIT;

View File

@@ -1,19 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD oauth_client_id TEXT default null;
ALTER TABLE monitor
ADD oauth_client_secret TEXT default null;
ALTER TABLE monitor
ADD oauth_token_url TEXT default null;
ALTER TABLE monitor
ADD oauth_scopes TEXT default null;
ALTER TABLE monitor
ADD oauth_auth_method TEXT default null;
COMMIT;

View File

@@ -0,0 +1,8 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:16-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
pip3 --no-cache-dir install apprise==1.4.0 && \
rm -rf /root/.cache

View File

@@ -1,56 +1,28 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too # If the image changed, the second stage image should be changed too
FROM node:20-bookworm-slim AS base2-slim FROM node:16-buster-slim
ARG TARGETPLATFORM ARG TARGETPLATFORM
WORKDIR /app WORKDIR /app
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller! # Install Curl
# apprise = for notifications (From testing repo) # Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# sqlite3 = for debugging # Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
# iputils-ping = for ping RUN apt-get update && \
# util-linux = for setpriv (Should be dropped in 2.0.0?) apt-get --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
# dumb-init = avoid zombie processes (#480) sqlite3 iputils-ping util-linux dumb-init git curl ca-certificates && \
# curl = for debugging pip3 --no-cache-dir install apprise==1.4.0 && \
# ca-certificates = keep the cert up-to-date
# sudo = for start service nscd with non-root user
# nscd = for better DNS caching
RUN echo "deb http://deb.debian.org/debian testing main" >> /etc/apt/sources.list && \
apt update && \
apt --yes --no-install-recommends -t testing install apprise sqlite3 ca-certificates && \
apt --yes --no-install-recommends -t stable install \
iputils-ping \
util-linux \
dumb-init \
curl \
sudo \
nscd && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove apt --yes autoremove
# Install cloudflared # Install cloudflared
RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \ RUN set -eux && \
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bullseye main' | tee /etc/apt/sources.list.d/cloudflared.list && \ mkdir -p --mode=0755 /usr/share/keyrings && \
apt update && \ curl --fail --show-error --silent --location --insecure https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
apt install --yes --no-install-recommends -t stable cloudflared && \ echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared buster main' | tee /etc/apt/sources.list.d/cloudflared.list && \
apt-get update && \
apt-get install --yes --no-install-recommends cloudflared && \
cloudflared version && \ cloudflared version && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove apt --yes autoremove
# For nscd
COPY ./docker/etc/nscd.conf /etc/nscd.conf
COPY ./docker/etc/sudoers /etc/sudoers
# Full Base Image
# MariaDB, Chromium and fonts
# Not working for armv7, so use the older version (10.5) of MariaDB from the debian repo
# curl -LsS https://r.mariadb.com/downloads/mariadb_repo_setup | bash -s -- --mariadb-server-version="mariadb-11.1" && \
FROM base2-slim AS base2
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
RUN apt update && \
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
apt --yes remove curl && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove && \
chown -R node:node /var/lib/mysql

View File

@@ -1,14 +0,0 @@
version: '3.8'
services:
uptime-kuma:
container_name: uptime-kuma-dev
image: louislam/uptime-kuma:nightly2
volumes:
#- ./data:/app/data
- ../server:/app/server
- ../db:/app/db
ports:
- "3001:3001" # <Host Port>:<Container Port>
- "3307:3306"

View File

@@ -1,15 +1,14 @@
version: '3.8' # Simple docker-compose.yml
# You can change your port or volume location
version: '3.3'
services: services:
uptime-kuma: uptime-kuma:
image: louislam/uptime-kuma:2 image: louislam/uptime-kuma:1
container_name: uptime-kuma container_name: uptime-kuma
volumes: volumes:
- uptime-kuma:/app/data - ./uptime-kuma-data:/app/data
ports: ports:
- "3001:3001" # <Host Port>:<Container Port> - 3001:3001 # <Host Port>:<Container Port>
restart: always restart: always
volumes:
uptime-kuma:

View File

@@ -1,8 +1,6 @@
ARG BASE_IMAGE=louislam/uptime-kuma:base2
############################################ ############################################
# Build in Golang # Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, otherwise it will be super slow where it is building the armv7 healthcheck # Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
# Check file: builder-go.dockerfile # Check file: builder-go.dockerfile
############################################ ############################################
FROM louislam/uptime-kuma:builder-go AS build_healthcheck FROM louislam/uptime-kuma:builder-go AS build_healthcheck
@@ -10,47 +8,49 @@ FROM louislam/uptime-kuma:builder-go AS build_healthcheck
############################################ ############################################
# Build in Node.js # Build in Node.js
############################################ ############################################
FROM louislam/uptime-kuma:base2 AS build FROM louislam/uptime-kuma:base-debian AS build
USER node
WORKDIR /app WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY --chown=node:node .npmrc .npmrc COPY .npmrc .npmrc
COPY --chown=node:node package.json package.json COPY package.json package.json
COPY --chown=node:node package-lock.json package-lock.json COPY package-lock.json package-lock.json
RUN npm ci --omit=dev RUN npm ci --omit=dev
COPY . . COPY . .
COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck COPY --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
RUN chmod +x /app/extra/entrypoint.sh
############################################ ############################################
# ⭐ Main Image # ⭐ Main Image
############################################ ############################################
FROM $BASE_IMAGE AS release FROM louislam/uptime-kuma:base-debian AS release
USER node
WORKDIR /app WORKDIR /app
ENV UPTIME_KUMA_IS_CONTAINER=1 ENV UPTIME_KUMA_IS_CONTAINER=1
# Copy app files from build layer # Copy app files from build layer
COPY --chown=node:node --from=build /app /app COPY --from=build /app /app
EXPOSE 3001 EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"] CMD ["node", "server/server.js"]
############################################ ############################################
# Mark as Nightly # Mark as Nightly
############################################ ############################################
FROM release AS nightly FROM release AS nightly
USER node
RUN npm run mark-as-nightly RUN npm run mark-as-nightly
############################################ ############################################
# Build an image for testing pr # Build an image for testing pr
############################################ ############################################
FROM louislam/uptime-kuma:base2 AS pr-test2 FROM louislam/uptime-kuma:base-debian AS pr-test
WORKDIR /app WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
## Install Git ## Install Git
@@ -78,7 +78,7 @@ CMD ["npm", "run", "start-pr-test"]
############################################ ############################################
# Upload the artifact to Github # Upload the artifact to Github
############################################ ############################################
FROM louislam/uptime-kuma:base2 AS upload-artifact FROM louislam/uptime-kuma:base-debian AS upload-artifact
WORKDIR / WORKDIR /
RUN apt update && \ RUN apt update && \
apt --yes install curl file apt --yes install curl file

27
docker/dockerfile-alpine Normal file
View File

@@ -0,0 +1,27 @@
FROM louislam/uptime-kuma:base-alpine AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . .
RUN chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly

View File

@@ -1,90 +0,0 @@
#
# /etc/nscd.conf
#
# An example Name Service Cache config file. This file is needed by nscd.
#
# Legal entries are:
#
# logfile <file>
# debug-level <level>
# threads <initial #threads to use>
# max-threads <maximum #threads to use>
# server-user <user to run server as instead of root>
# server-user is ignored if nscd is started with -S parameters
# stat-user <user who is allowed to request statistics>
# reload-count unlimited|<number>
# paranoia <yes|no>
# restart-interval <time in seconds>
#
# enable-cache <service> <yes|no>
# positive-time-to-live <service> <time in seconds>
# negative-time-to-live <service> <time in seconds>
# suggested-size <service> <prime number>
# check-files <service> <yes|no>
# persistent <service> <yes|no>
# shared <service> <yes|no>
# max-db-size <service> <number bytes>
# auto-propagate <service> <yes|no>
#
# Currently supported cache names (services): passwd, group, hosts, services
#
# logfile /var/log/nscd.log
# threads 4
# max-threads 32
# server-user node
# stat-user somebody
debug-level 0
# reload-count 5
paranoia no
# restart-interval 3600
enable-cache passwd no
positive-time-to-live passwd 600
negative-time-to-live passwd 20
suggested-size passwd 211
check-files passwd yes
persistent passwd yes
shared passwd yes
max-db-size passwd 33554432
auto-propagate passwd yes
enable-cache group no
positive-time-to-live group 3600
negative-time-to-live group 60
suggested-size group 211
check-files group yes
persistent group yes
shared group yes
max-db-size group 33554432
auto-propagate group yes
enable-cache hosts yes
positive-time-to-live hosts 3600
negative-time-to-live hosts 20
suggested-size hosts 211
check-files hosts yes
persistent hosts yes
# Set shared to "no" to display stats in `nscd -g`
# Read more: https://stackoverflow.com/questions/40429245/nscdcentos7curl-0-dns-cache-hit-rate
shared hosts no
max-db-size hosts 33554432
enable-cache services no
positive-time-to-live services 28800
negative-time-to-live services 20
suggested-size services 211
check-files services yes
persistent services yes
shared services yes
max-db-size services 33554432
enable-cache netgroup no
positive-time-to-live netgroup 28800
negative-time-to-live netgroup 20
suggested-size netgroup 211
check-files netgroup yes
persistent netgroup yes
shared netgroup yes
max-db-size netgroup 33554432

View File

@@ -1,31 +0,0 @@
#
# This file MUST be edited with the 'visudo' command as root.
#
# Please consider adding local content in /etc/sudoers.d/ instead of
# directly modifying this file.
#
# See the man page for details on how to write a sudoers file.
#
Defaults env_reset
Defaults mail_badpass
Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
# Host alias specification
# User alias specification
# Cmnd alias specification
# User privilege specification
root ALL=(ALL:ALL) ALL
# Allow members of group sudo to execute any command
%sudo ALL=(ALL:ALL) ALL
# See sudoers(5) for more information on "#include" directives:
#includedir /etc/sudoers.d
# Allow `node` to control service (mainly for nscd)
node ALL=(root) NOPASSWD: /usr/sbin/nscdservice
node ALL=(root) NOPASSWD: /usr/sbin/service

View File

@@ -36,8 +36,6 @@ if (! exists) {
/** /**
* Commit updated files * Commit updated files
* @param {string} version Version to update to * @param {string} version Version to update to
* @returns {void}
* @throws Error committing files
*/ */
function commit(version) { function commit(version) {
let msg = "Update to " + version; let msg = "Update to " + version;
@@ -57,7 +55,6 @@ function commit(version) {
/** /**
* Create a tag with the specified version * Create a tag with the specified version
* @param {string} version Tag to create * @param {string} version Tag to create
* @returns {void}
*/ */
function tag(version) { function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]); let res = childProcess.spawnSync("git", [ "tag", version ]);
@@ -71,7 +68,6 @@ function tag(version) {
* Check if a tag exists for the specified version * Check if a tag exists for the specified version
* @param {string} version Version to check * @param {string} version Version to check
* @returns {boolean} Does the tag already exist * @returns {boolean} Does the tag already exist
* @throws Version is not valid
*/ */
function tagExists(version) { function tagExists(version) {
if (! version) { if (! version) {

View File

@@ -15,7 +15,6 @@ download(url);
/** /**
* Downloads the latest version of the dist from a GitHub release. * Downloads the latest version of the dist from a GitHub release.
* @param {string} url The URL to download from. * @param {string} url The URL to download from.
* @returns {void}
* *
* Generated by Trelent * Generated by Trelent
*/ */

21
extra/entrypoint.sh Normal file
View File

@@ -0,0 +1,21 @@
#!/usr/bin/env sh
# set -e Exit the script if an error happens
set -e
PUID=${PUID=0}
PGID=${PGID=0}
files_ownership () {
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
# -R Recursively descends the specified directories
# -c Like verbose but report only when a change is made
chown -hRc "$PUID":"$PGID" /app/data
}
echo "==> Performing startup jobs and maintenance tasks"
files_ownership
echo "==> Starting application with user $PUID group $PGID"
# --clear-groups Clear supplementary groups.
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"

View File

@@ -4,12 +4,12 @@ const fs = require("fs");
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16, * to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
* or the `recursive` property removing completely in the future Node.js version. * or the `recursive` property removing completely in the future Node.js version.
* See the link below. * See the link below.
*
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`. * @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync` * @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync` * @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
* @param {fs.PathLike} path Valid types for path values in "fs". * @param {fs.PathLike} path Valid types for path values in "fs".
* @param {fs.RmDirOptions} options options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`. * @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
* @returns {void}
*/ */
const rmSync = (path, options) => { const rmSync = (path, options) => {
if (typeof fs.rmSync === "function") { if (typeof fs.rmSync === "function") {

View File

@@ -5,15 +5,15 @@
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh // curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
println("====================="); println("=====================");
println("Uptime Kuma Install Script"); println("Uptime Kuma Installer");
println("====================="); println("=====================");
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"); println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
println("---------------------------------------"); println("---------------------------------------");
println("This script is designed for Linux and basic usage."); println("This script is designed for Linux and basic usage.");
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"); println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
println("---------------------------------------"); println("---------------------------------------");
println(""); println("");
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"); println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
println("Docker - Install Uptime Kuma Docker container"); println("Docker - Install Uptime Kuma Docker container");
println(""); println("");
@@ -29,10 +29,14 @@ function checkNode() {
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')"); bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
println("Node Version: " ++ nodeVersion); println("Node Version: " ++ nodeVersion);
if (nodeVersion <= "12") { if (nodeVersion < "12") {
println("Error: Required Node.js 14"); println("Error: Required Node.js 14");
call("exit", "1"); call("exit", "1");
} }
if (nodeVersion == "12") {
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
}
} }
function deb() { function deb() {
@@ -56,8 +60,8 @@ function deb() {
bash("apt --yes install curl"); bash("apt --yes install curl");
} }
println("Installing Node.js 16"); println("Installing Node.js 14");
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt"); bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
bash("apt --yes install nodejs"); bash("apt --yes install nodejs");
bash("node -v"); bash("node -v");
@@ -87,10 +91,6 @@ if (type == "local") {
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')"); bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
if (os == "Ubuntu") { if (os == "Ubuntu") {
distribution = "ubuntu"; distribution = "ubuntu";
// Get ubuntu version
bash(". /etc/lsb-release");
version = DISTRIB_RELEASE;
} }
if (os == "Debian") { if (os == "Debian") {
distribution = "debian"; distribution = "debian";
@@ -101,7 +101,6 @@ if (type == "local") {
println("Your OS: " ++ os); println("Your OS: " ++ os);
println("Distribution: " ++ distribution); println("Distribution: " ++ distribution);
println("Version: " ++ version);
println("Arch: " ++ arch); println("Arch: " ++ arch);
if ("$3" != "") { if ("$3" != "") {
@@ -132,32 +131,15 @@ if (type == "local") {
checkNode(); checkNode();
} else { } else {
bash("dnfCheck=$(dnf --version)");
// Use yum
if (dnfCheck == "") {
bash("curlCheck=$(curl --version)"); bash("curlCheck=$(curl --version)");
if (curlCheck == "") { if (curlCheck == "") {
println("Installing Curl"); println("Installing Curl");
bash("yum -y -q install curl"); bash("yum -y -q install curl");
} }
println("Installing Node.js 16"); println("Installing Node.js 14");
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt"); bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
bash("yum install -y -q nodejs"); bash("yum install -y -q nodejs");
} else {
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("dnf -y install curl");
}
println("Installing Node.js 16");
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
bash("dnf install -y nodejs");
}
bash("node -v"); bash("node -v");
bash("nodeCheckAgain=$(node -v)"); bash("nodeCheckAgain=$(node -v)");
@@ -211,14 +193,6 @@ if (type == "local") {
bash("pm2 startup"); bash("pm2 startup");
} }
// Check again
bash("check=$(pm2 --version)");
if (check == "") {
println("Error: pm2 is not found!");
bash("exit 1");
}
bash("mkdir -p $installPath"); bash("mkdir -p $installPath");
bash("cd $installPath"); bash("cd $installPath");
bash("git clone https://github.com/louislam/uptime-kuma.git ."); bash("git clone https://github.com/louislam/uptime-kuma.git .");

View File

@@ -12,7 +12,7 @@ const rl = readline.createInterface({
}); });
const main = async () => { const main = async () => {
Database.initDataDir(args); Database.init(args);
await Database.connect(); await Database.connect();
try { try {

View File

@@ -13,7 +13,7 @@ const rl = readline.createInterface({
const main = async () => { const main = async () => {
console.log("Connecting the database"); console.log("Connecting the database");
Database.initDataDir(args); Database.init(args);
await Database.connect(false, false, true); await Database.connect(false, false, true);
try { try {

View File

@@ -138,7 +138,7 @@ server.listen({
/** /**
* Get human readable request type from request code * Get human readable request type from request code
* @param {number} code Request code to translate * @param {number} code Request code to translate
* @returns {string|void} Human readable request type * @returns {string} Human readable request type
*/ */
function type(code) { function type(code) {
for (let name in Packet.TYPE) { for (let name in Packet.TYPE) {

View File

@@ -7,17 +7,11 @@ class SimpleMqttServer {
aedes = require("aedes")(); aedes = require("aedes")();
server = require("net").createServer(this.aedes.handle); server = require("net").createServer(this.aedes.handle);
/**
* @param {number} port Port to listen on
*/
constructor(port) { constructor(port) {
this.port = port; this.port = port;
} }
/** /** Start the MQTT server */
* Start the MQTT server
* @returns {void}
*/
start() { start() {
this.server.listen(this.port, () => { this.server.listen(this.port, () => {
console.log("server started and listening on port ", this.port); console.log("server started and listening on port ", this.port);

View File

@@ -12,7 +12,6 @@ import rmSync from "../fs-rmSync.js";
* created with this code if one does not already exist * created with this code if one does not already exist
* @param {string} baseLang The second base language file to copy. This * @param {string} baseLang The second base language file to copy. This
* will be ignored if set to "en" as en.js is copied by default * will be ignored if set to "en" as en.js is copied by default
* @returns {void}
*/ */
function copyFiles(langCode, baseLang) { function copyFiles(langCode, baseLang) {
if (fs.existsSync("./languages")) { if (fs.existsSync("./languages")) {
@@ -34,8 +33,7 @@ function copyFiles(langCode, baseLang) {
/** /**
* Update the specified language file * Update the specified language file
* @param {string} langCode Language code to update * @param {string} langCode Language code to update
* @param {string} baseLangCode Second language to copy keys from * @param {string} baseLang Second language to copy keys from
* @returns {void}
*/ */
async function updateLanguage(langCode, baseLangCode) { async function updateLanguage(langCode, baseLangCode) {
const en = (await import("./languages/en.js")).default; const en = (await import("./languages/en.js")).default;

View File

@@ -39,8 +39,6 @@ if (! exists) {
/** /**
* Commit updated files * Commit updated files
* @param {string} version Version to update to * @param {string} version Version to update to
* @returns {void}
* @throws Error when committing files
*/ */
function commit(version) { function commit(version) {
let msg = "Update to " + version; let msg = "Update to " + version;
@@ -57,7 +55,6 @@ function commit(version) {
/** /**
* Create a tag with the specified version * Create a tag with the specified version
* @param {string} version Tag to create * @param {string} version Tag to create
* @returns {void}
*/ */
function tag(version) { function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]); let res = childProcess.spawnSync("git", [ "tag", version ]);
@@ -68,7 +65,6 @@ function tag(version) {
* Check if a tag exists for the specified version * Check if a tag exists for the specified version
* @param {string} version Version to check * @param {string} version Version to check
* @returns {boolean} Does the tag already exist * @returns {boolean} Does the tag already exist
* @throws Version is not valid
*/ */
function tagExists(version) { function tagExists(version) {
if (! version) { if (! version) {

View File

@@ -13,7 +13,6 @@ updateWiki(newVersion);
/** /**
* Update the wiki with new version number * Update the wiki with new version number
* @param {string} newVersion Version to update to * @param {string} newVersion Version to update to
* @returns {void}
*/ */
function updateWiki(newVersion) { function updateWiki(newVersion) {
const wikiDir = "./tmp/wiki"; const wikiDir = "./tmp/wiki";
@@ -47,7 +46,6 @@ function updateWiki(newVersion) {
/** /**
* Check if a directory exists and then delete it * Check if a directory exists and then delete it
* @param {string} dir Directory to delete * @param {string} dir Directory to delete
* @returns {void}
*/ */
function safeDelete(dir) { function safeDelete(dir) {
if (fs.existsSync(dir)) { if (fs.existsSync(dir)) {

View File

@@ -9,24 +9,8 @@
<meta name="theme-color" id="theme-color" content="" /> <meta name="theme-color" id="theme-color" content="" />
<meta name="description" content="Uptime Kuma monitoring tool" /> <meta name="description" content="Uptime Kuma monitoring tool" />
<title>Uptime Kuma</title> <title>Uptime Kuma</title>
<style>
.noscript-message {
font-size: 20px;
text-align: center;
padding: 10px;
max-width: 500px;
margin: 0 auto;
}
</style>
</head> </head>
<body> <body>
<noscript>
<div class="noscript-message">
Sorry, you don't seem to have JavaScript enabled or your browser
doesn't support it.<br />This website requires JavaScript to function.
Please enable JavaScript in your browser settings to continue.
</div>
</noscript>
<div id="app"></div> <div id="app"></div>
<script type="module" src="/src/main.js"></script> <script type="module" src="/src/main.js"></script>
</body> </body>

View File

@@ -3,15 +3,15 @@
# The command is working on Windows PowerShell and Docker for Windows only. # The command is working on Windows PowerShell and Docker for Windows only.
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh # curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
"echo" "-e" "=====================" "echo" "-e" "====================="
"echo" "-e" "Uptime Kuma Install Script" "echo" "-e" "Uptime Kuma Installer"
"echo" "-e" "=====================" "echo" "-e" "====================="
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8" "echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
"echo" "-e" "---------------------------------------" "echo" "-e" "---------------------------------------"
"echo" "-e" "This script is designed for Linux and basic usage." "echo" "-e" "This script is designed for Linux and basic usage."
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation" "echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
"echo" "-e" "---------------------------------------" "echo" "-e" "---------------------------------------"
"echo" "-e" "" "echo" "-e" ""
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2" "echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
"echo" "-e" "Docker - Install Uptime Kuma Docker container" "echo" "-e" "Docker - Install Uptime Kuma Docker container"
"echo" "-e" "" "echo" "-e" ""
if [ "$1" != "" ]; then if [ "$1" != "" ]; then
@@ -25,9 +25,12 @@ function checkNode {
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])') nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
"echo" "-e" "Node Version: ""$nodeVersion" "echo" "-e" "Node Version: ""$nodeVersion"
_0="12" _0="12"
if [ $(($nodeVersion <= $_0)) == 1 ]; then if [ $(($nodeVersion < $_0)) == 1 ]; then
"echo" "-e" "Error: Required Node.js 14" "echo" "-e" "Error: Required Node.js 14"
"exit" "1" "exit" "1"
fi
if [ "$nodeVersion" == "12" ]; then
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
fi fi
} }
function deb { function deb {
@@ -47,8 +50,8 @@ fi
"echo" "-e" "Installing Curl" "echo" "-e" "Installing Curl"
apt --yes install curl apt --yes install curl
fi fi
"echo" "-e" "Installing Node.js 16" "echo" "-e" "Installing Node.js 14"
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
apt --yes install nodejs apt --yes install nodejs
node -v node -v
nodeCheckAgain=$(node -v) nodeCheckAgain=$(node -v)
@@ -73,9 +76,6 @@ if [ "$type" == "local" ]; then
os=$(head -n1 /etc/issue | cut -f 1 -d ' ') os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
if [ "$os" == "Ubuntu" ]; then if [ "$os" == "Ubuntu" ]; then
distribution="ubuntu" distribution="ubuntu"
# Get ubuntu version
. /etc/lsb-release
version="$DISTRIB_RELEASE"
fi fi
if [ "$os" == "Debian" ]; then if [ "$os" == "Debian" ]; then
distribution="debian" distribution="debian"
@@ -85,7 +85,6 @@ fi
arch=$(uname -i) arch=$(uname -i)
"echo" "-e" "Your OS: ""$os" "echo" "-e" "Your OS: ""$os"
"echo" "-e" "Distribution: ""$distribution" "echo" "-e" "Distribution: ""$distribution"
"echo" "-e" "Version: ""$version"
"echo" "-e" "Arch: ""$arch" "echo" "-e" "Arch: ""$arch"
if [ "$3" != "" ]; then if [ "$3" != "" ]; then
port="$3" port="$3"
@@ -109,27 +108,14 @@ fi
if [ "$nodeCheck" != "" ]; then if [ "$nodeCheck" != "" ]; then
"checkNode" "checkNode"
else else
dnfCheck=$(dnf --version)
# Use yum
if [ "$dnfCheck" == "" ]; then
curlCheck=$(curl --version) curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl" "echo" "-e" "Installing Curl"
yum -y -q install curl yum -y -q install curl
fi fi
"echo" "-e" "Installing Node.js 16" "echo" "-e" "Installing Node.js 14"
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
yum install -y -q nodejs yum install -y -q nodejs
else
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
dnf -y install curl
fi
"echo" "-e" "Installing Node.js 16"
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
dnf install -y nodejs
fi
node -v node -v
nodeCheckAgain=$(node -v) nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then if [ "$nodeCheckAgain" == "" ]; then
@@ -175,12 +161,6 @@ fi
"echo" "-e" "Installing PM2" "echo" "-e" "Installing PM2"
npm install pm2 -g && pm2 install pm2-logrotate npm install pm2 -g && pm2 install pm2-logrotate
pm2 startup pm2 startup
fi
# Check again
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Error: pm2 is not found!"
exit 1
fi fi
mkdir -p $installPath mkdir -p $installPath
cd $installPath cd $installPath

3442
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "uptime-kuma", "name": "uptime-kuma",
"version": "1.23.0-beta.1", "version": "1.22.1",
"license": "MIT", "license": "MIT",
"repository": { "repository": {
"type": "git", "type": "git",
@@ -29,14 +29,15 @@
"jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js", "jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
"tsc": "tsc", "tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js", "vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build && npm run build-docker-full && npm run build-docker-slim", "build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push", "build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push", "build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push", "build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push", "build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push", "build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push", "build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-local": "npm run build && docker build -f docker/dockerfile -t louislam/uptime-kuma:nightly2 --target nightly .", "build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push", "build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain", "upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.22.1 && npm ci --production && npm run download-dist", "setup": "git checkout 1.22.1 && npm ci --production && npm run download-dist",
@@ -45,16 +46,15 @@
"reset-password": "node extra/reset-password.js", "reset-password": "node extra/reset-password.js",
"remove-2fa": "node extra/remove-2fa.js", "remove-2fa": "node extra/remove-2fa.js",
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1", "compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
"test-install-script-rockylinux": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/rockylinux.dockerfile .",
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .", "test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-debian": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian.dockerfile .", "test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
"test-install-script-debian-buster": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/debian-buster.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .", "test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
"test-install-script-ubuntu1804": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1804.dockerfile .",
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .", "test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js", "simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js", "simple-mqtt-server": "node extra/simple-mqtt-server.js",
"update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix", "update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"ncu-patch": "npm-check-updates -u -t patch",
"release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js", "release-final": "node ./extra/test-docker.js && node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts", "release-beta": "node ./extra/test-docker.js && node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"git-remove-tag": "git tag -d", "git-remove-tag": "git tag -d",
@@ -66,9 +66,7 @@
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"", "cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go", "build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go",
"deploy-demo-server": "node extra/deploy-demo-server.js", "deploy-demo-server": "node extra/deploy-demo-server.js",
"sort-contributors": "node extra/sort-contributors.js", "sort-contributors": "node extra/sort-contributors.js"
"quick-run-nightly": "docker run --rm --env NODE_ENV=development -p 3001:3001 louislam/uptime-kuma:nightly2",
"start-dev-container": "cd docker && docker-compose -f docker-compose-dev.yml up --force-recreate"
}, },
"dependencies": { "dependencies": {
"@grpc/grpc-js": "~1.7.3", "@grpc/grpc-js": "~1.7.3",
@@ -99,13 +97,11 @@
"http-proxy-agent": "~5.0.0", "http-proxy-agent": "~5.0.0",
"https-proxy-agent": "~5.0.1", "https-proxy-agent": "~5.0.1",
"iconv-lite": "~0.6.3", "iconv-lite": "~0.6.3",
"isomorphic-ws": "^5.0.0",
"jsesc": "~3.0.2", "jsesc": "~3.0.2",
"jsonata": "^2.0.3", "jsonata": "^2.0.3",
"jsonwebtoken": "~9.0.0", "jsonwebtoken": "~9.0.0",
"jwt-decode": "~3.1.2", "jwt-decode": "~3.1.2",
"kafkajs": "^2.2.4", "kafkajs": "^2.2.4",
"knex": "^2.4.2",
"limiter": "~2.1.0", "limiter": "~2.1.0",
"liquidjs": "^10.7.0", "liquidjs": "^10.7.0",
"mongodb": "~4.14.0", "mongodb": "~4.14.0",
@@ -116,9 +112,7 @@
"node-cloudflared-tunnel": "~1.0.9", "node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "~1.0.0", "node-radius-client": "~1.0.0",
"nodemailer": "~6.6.5", "nodemailer": "~6.6.5",
"nostr-tools": "^1.13.1",
"notp": "~2.0.3", "notp": "~2.0.3",
"openid-client": "^5.4.2",
"password-hash": "~1.2.2", "password-hash": "~1.2.2",
"pg": "~8.8.0", "pg": "~8.8.0",
"pg-connection-string": "~2.5.0", "pg-connection-string": "~2.5.0",
@@ -127,6 +121,7 @@
"prometheus-api-metrics": "~3.2.1", "prometheus-api-metrics": "~3.2.1",
"protobufjs": "~7.2.4", "protobufjs": "~7.2.4",
"qs": "~6.10.4", "qs": "~6.10.4",
"queue": "~7.0.0",
"redbean-node": "~0.3.0", "redbean-node": "~0.3.0",
"redis": "~4.5.1", "redis": "~4.5.1",
"semver": "~7.5.4", "semver": "~7.5.4",
@@ -135,8 +130,7 @@
"socks-proxy-agent": "6.1.1", "socks-proxy-agent": "6.1.1",
"tar": "~6.1.11", "tar": "~6.1.11",
"tcp-ping": "~0.1.1", "tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2", "thirty-two": "~1.0.2"
"ws": "^8.13.0"
}, },
"devDependencies": { "devDependencies": {
"@actions/github": "~5.0.1", "@actions/github": "~5.0.1",
@@ -153,6 +147,7 @@
"@vue/compiler-sfc": "~3.3.4", "@vue/compiler-sfc": "~3.3.4",
"@vuepic/vue-datepicker": "~3.4.8", "@vuepic/vue-datepicker": "~3.4.8",
"aedes": "^0.46.3", "aedes": "^0.46.3",
"babel-plugin-rewire": "~1.2.0",
"bootstrap": "5.1.3", "bootstrap": "5.1.3",
"chart.js": "~4.2.1", "chart.js": "~4.2.1",
"chartjs-adapter-dayjs-4": "~1.0.4", "chartjs-adapter-dayjs-4": "~1.0.4",
@@ -165,7 +160,6 @@
"dns2": "~2.0.1", "dns2": "~2.0.1",
"dompurify": "~2.4.3", "dompurify": "~2.4.3",
"eslint": "~8.14.0", "eslint": "~8.14.0",
"eslint-plugin-jsdoc": "^46.4.6",
"eslint-plugin-vue": "~8.7.1", "eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10", "favico.js": "~0.3.10",
"jest": "~29.6.1", "jest": "~29.6.1",

View File

@@ -9,9 +9,9 @@ const dayjs = require("dayjs");
/** /**
* Login to web app * Login to web app
* @param {string} username Username to login with * @param {string} username
* @param {string} password Password to login with * @param {string} password
* @returns {Promise<(Bean|null)>} User or null if login failed * @returns {Promise<(Bean|null)>}
*/ */
exports.login = async function (username, password) { exports.login = async function (username, password) {
if (typeof username !== "string" || typeof password !== "string") { if (typeof username !== "string" || typeof password !== "string") {
@@ -39,7 +39,6 @@ exports.login = async function (username, password) {
/** /**
* Validate a provided API key * Validate a provided API key
* @param {string} key API key to verify * @param {string} key API key to verify
* @returns {boolean} API is ok?
*/ */
async function verifyAPIKey(key) { async function verifyAPIKey(key) {
if (typeof key !== "string") { if (typeof key !== "string") {
@@ -74,10 +73,9 @@ async function verifyAPIKey(key) {
/** /**
* Custom authorizer for express-basic-auth * Custom authorizer for express-basic-auth
* @param {string} username Username to login with * @param {string} username
* @param {string} password Password to login with * @param {string} password
* @param {authCallback} callback Callback to handle login result * @param {authCallback} callback
* @returns {void}
*/ */
function apiAuthorizer(username, password, callback) { function apiAuthorizer(username, password, callback) {
// API Rate Limit // API Rate Limit
@@ -101,10 +99,9 @@ function apiAuthorizer(username, password, callback) {
/** /**
* Custom authorizer for express-basic-auth * Custom authorizer for express-basic-auth
* @param {string} username Username to login with * @param {string} username
* @param {string} password Password to login with * @param {string} password
* @param {authCallback} callback Callback to handle login result * @param {authCallback} callback
* @returns {void}
*/ */
function userAuthorizer(username, password, callback) { function userAuthorizer(username, password, callback) {
// Login Rate Limit // Login Rate Limit
@@ -129,8 +126,7 @@ function userAuthorizer(username, password, callback) {
* Use basic auth if auth is not disabled * Use basic auth if auth is not disabled
* @param {express.Request} req Express request object * @param {express.Request} req Express request object
* @param {express.Response} res Express response object * @param {express.Response} res Express response object
* @param {express.NextFunction} next Next handler in chain * @param {express.NextFunction} next
* @returns {void}
*/ */
exports.basicAuth = async function (req, res, next) { exports.basicAuth = async function (req, res, next) {
const middleware = basicAuth({ const middleware = basicAuth({
@@ -152,8 +148,7 @@ exports.basicAuth = async function (req, res, next) {
* Use use API Key if API keys enabled, else use basic auth * Use use API Key if API keys enabled, else use basic auth
* @param {express.Request} req Express request object * @param {express.Request} req Express request object
* @param {express.Response} res Express response object * @param {express.Response} res Express response object
* @param {express.NextFunction} next Next handler in chain * @param {express.NextFunction} next
* @returns {void}
*/ */
exports.apiAuth = async function (req, res, next) { exports.apiAuth = async function (req, res, next) {
if (!await Settings.get("disableAuth")) { if (!await Settings.get("disableAuth")) {

View File

@@ -15,7 +15,6 @@ class CacheableDnsHttpAgent {
/** /**
* Register/Disable cacheable to global agents * Register/Disable cacheable to global agents
* @returns {void}
*/ */
static async update() { static async update() {
log.debug("CacheableDnsHttpAgent", "update"); log.debug("CacheableDnsHttpAgent", "update");
@@ -41,15 +40,14 @@ class CacheableDnsHttpAgent {
/** /**
* Attach cacheable to HTTP agent * Attach cacheable to HTTP agent
* @param {http.Agent} agent Agent to install * @param {http.Agent} agent Agent to install
* @returns {void}
*/ */
static install(agent) { static install(agent) {
this.cacheable.install(agent); this.cacheable.install(agent);
} }
/** /**
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent * @var {https.AgentOptions} agentOptions
* @returns {https.Agent} The new HTTPS agent * @return {https.Agent}
*/ */
static getHttpsAgent(agentOptions) { static getHttpsAgent(agentOptions) {
if (!this.enable) { if (!this.enable) {
@@ -65,8 +63,8 @@ class CacheableDnsHttpAgent {
} }
/** /**
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent * @var {http.AgentOptions} agentOptions
* @returns {https.Agents} The new HTTP agent * @return {https.Agents}
*/ */
static getHttpAgent(agentOptions) { static getHttpAgent(agentOptions) {
if (!this.enable) { if (!this.enable) {

View File

@@ -12,7 +12,7 @@ const checkVersion = require("./check-version");
/** /**
* Send list of notification providers to client * Send list of notification providers to client
* @param {Socket} socket Socket.io socket instance * @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>} List of notifications * @returns {Promise<Bean[]>}
*/ */
async function sendNotificationList(socket) { async function sendNotificationList(socket) {
const timeLogger = new TimeLogger(); const timeLogger = new TimeLogger();
@@ -40,8 +40,8 @@ async function sendNotificationList(socket) {
* Send Heartbeat History list to socket * Send Heartbeat History list to socket
* @param {Socket} socket Socket.io instance * @param {Socket} socket Socket.io instance
* @param {number} monitorID ID of monitor to send heartbeat history * @param {number} monitorID ID of monitor to send heartbeat history
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only * @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} overwrite Overwrite client-side's heartbeat list * @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) { async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
@@ -71,8 +71,8 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
* Important Heart beat list (aka event list) * Important Heart beat list (aka event list)
* @param {Socket} socket Socket.io instance * @param {Socket} socket Socket.io instance
* @param {number} monitorID ID of monitor to send heartbeat history * @param {number} monitorID ID of monitor to send heartbeat history
* @param {boolean} toUser True = send to all browsers with the same user id, False = send to the current browser only * @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} overwrite Overwrite client-side's heartbeat list * @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) { async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
@@ -100,7 +100,7 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
/** /**
* Emit proxy list to client * Emit proxy list to client
* @param {Socket} socket Socket.io socket instance * @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>} List of proxies * @return {Promise<Bean[]>}
*/ */
async function sendProxyList(socket) { async function sendProxyList(socket) {
const timeLogger = new TimeLogger(); const timeLogger = new TimeLogger();
@@ -141,7 +141,7 @@ async function sendAPIKeyList(socket) {
/** /**
* Emits the version information to the client. * Emits the version information to the client.
* @param {Socket} socket Socket.io socket instance * @param {Socket} socket Socket.io socket instance
* @param {boolean} hideVersion Should we hide the version information in the response? * @param {boolean} hideVersion
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async function sendInfo(socket, hideVersion = false) { async function sendInfo(socket, hideVersion = false) {
@@ -165,7 +165,7 @@ async function sendInfo(socket, hideVersion = false) {
/** /**
* Send list of docker hosts to client * Send list of docker hosts to client
* @param {Socket} socket Socket.io socket instance * @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>} List of docker hosts * @returns {Promise<Bean[]>}
*/ */
async function sendDockerHostList(socket) { async function sendDockerHostList(socket) {
const timeLogger = new TimeLogger(); const timeLogger = new TimeLogger();

View File

@@ -3,9 +3,6 @@ const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server"); const { setSetting, setting } = require("./util-server");
const { log, sleep } = require("../src/util"); const { log, sleep } = require("../src/util");
const knex = require("knex"); const knex = require("knex");
const path = require("path");
const { EmbeddedMariaDB } = require("./embedded-mariadb");
const mysql = require("mysql2/promise");
/** /**
* Database & App Data Folder * Database & App Data Folder
@@ -26,9 +23,7 @@ class Database {
static screenshotDir; static screenshotDir;
static sqlitePath; static path;
static dockerTLSDir;
/** /**
* @type {boolean} * @type {boolean}
@@ -36,13 +31,11 @@ class Database {
static patched = false; static patched = false;
/** /**
* SQLite only
* Add patch filename in key * Add patch filename in key
* Values: * Values:
* true: Add it regardless of order * true: Add it regardless of order
* false: Do nothing * false: Do nothing
* { parents: []}: Need parents before add it * { parents: []}: Need parents before add it
* @deprecated
*/ */
static patchList = { static patchList = {
"patch-setting-value-type.sql": true, "patch-setting-value-type.sql": true,
@@ -81,10 +74,6 @@ class Database {
"patch-add-invert-keyword.sql": true, "patch-add-invert-keyword.sql": true,
"patch-added-json-query.sql": true, "patch-added-json-query.sql": true,
"patch-added-kafka-producer.sql": true, "patch-added-kafka-producer.sql": true,
"patch-add-certificate-expiry-status-page.sql": true,
"patch-monitor-oauth-cc.sql": true,
"patch-add-timeout-monitor.sql": true,
"patch-add-gamedig-given-port.sql": true, // The last file so far converted to a knex migration file
}; };
/** /**
@@ -95,110 +84,53 @@ class Database {
static noReject = true; static noReject = true;
static dbConfig = {};
static knexMigrationsPath = "./db/knex_migrations";
/** /**
* Initialize the data directory * Initialize the database
* @param {object} args Arguments to initialize DB with * @param {Object} args Arguments to initialize DB with
* @returns {void}
*/ */
static initDataDir(args) { static init(args) {
// Data Directory (must be end with "/") // Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/"; Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.sqlitePath = path.join(Database.dataDir, "kuma.db"); Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) { if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true }); fs.mkdirSync(Database.dataDir, { recursive: true });
} }
Database.uploadDir = path.join(Database.dataDir, "upload/"); Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) { if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true }); fs.mkdirSync(Database.uploadDir, { recursive: true });
} }
// Create screenshot dir // Create screenshot dir
Database.screenshotDir = path.join(Database.dataDir, "screenshots/"); Database.screenshotDir = Database.dataDir + "screenshots/";
if (! fs.existsSync(Database.screenshotDir)) { if (! fs.existsSync(Database.screenshotDir)) {
fs.mkdirSync(Database.screenshotDir, { recursive: true }); fs.mkdirSync(Database.screenshotDir, { recursive: true });
} }
Database.dockerTLSDir = path.join(Database.dataDir, "docker-tls/");
if (! fs.existsSync(Database.dockerTLSDir)) {
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
}
log.info("db", `Data Dir: ${Database.dataDir}`); log.info("db", `Data Dir: ${Database.dataDir}`);
} }
/**
*
*/
static readDBConfig() {
let dbConfig;
let dbConfigString = fs.readFileSync(path.join(Database.dataDir, "db-config.json")).toString("utf-8");
dbConfig = JSON.parse(dbConfigString);
if (typeof dbConfig !== "object") {
throw new Error("Invalid db-config.json, it must be an object");
}
if (typeof dbConfig.type !== "string") {
throw new Error("Invalid db-config.json, type must be a string");
}
return dbConfig;
}
/**
* @param dbConfig
*/
static writeDBConfig(dbConfig) {
fs.writeFileSync(path.join(Database.dataDir, "db-config.json"), JSON.stringify(dbConfig, null, 4));
}
/** /**
* Connect to the database * Connect to the database
* @param {boolean} testMode Should the connection be * @param {boolean} [testMode=false] Should the connection be
* started in test mode? * started in test mode?
* @param {boolean} autoloadModels Should models be * @param {boolean} [autoloadModels=true] Should models be
* automatically loaded? * automatically loaded?
* @param {boolean} noLog Should logs not be output? * @param {boolean} [noLog=false] Should logs not be output?
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
static async connect(testMode = false, autoloadModels = true, noLog = false) { static async connect(testMode = false, autoloadModels = true, noLog = false) {
const acquireConnectionTimeout = 120 * 1000; const acquireConnectionTimeout = 120 * 1000;
let dbConfig;
try {
dbConfig = this.readDBConfig();
Database.dbConfig = dbConfig;
} catch (err) {
log.warn("db", err.message);
dbConfig = {
type: "sqlite",
};
}
let config = {};
log.info("db", `Database Type: ${dbConfig.type}`);
if (dbConfig.type === "sqlite") {
if (! fs.existsSync(Database.sqlitePath)) {
log.info("server", "Copying Database");
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
}
const Dialect = require("knex/lib/dialects/sqlite3/index.js"); const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3"); Dialect.prototype._driver = () => require("@louislam/sqlite3");
config = { const knexInstance = knex({
client: Dialect, client: Dialect,
connection: { connection: {
filename: Database.sqlitePath, filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout, acquireConnectionTimeout: acquireConnectionTimeout,
}, },
useNullAsDefault: true, useNullAsDefault: true,
@@ -209,59 +141,8 @@ class Database {
propagateCreateError: false, propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout, acquireTimeoutMillis: acquireConnectionTimeout,
} }
};
} else if (dbConfig.type === "mariadb") {
if (!/^\w+$/.test(dbConfig.dbName)) {
throw Error("Invalid database name. A database name can only consist of letters, numbers and underscores");
}
const connection = await mysql.createConnection({
host: dbConfig.hostname,
port: dbConfig.port,
user: dbConfig.username,
password: dbConfig.password,
}); });
await connection.execute("CREATE DATABASE IF NOT EXISTS " + dbConfig.dbName + " CHARACTER SET utf8mb4");
connection.end();
config = {
client: "mysql2",
connection: {
host: dbConfig.hostname,
port: dbConfig.port,
user: dbConfig.username,
password: dbConfig.password,
database: dbConfig.dbName,
}
};
} else if (dbConfig.type === "embedded-mariadb") {
let embeddedMariaDB = EmbeddedMariaDB.getInstance();
await embeddedMariaDB.start();
log.info("mariadb", "Embedded MariaDB started");
config = {
client: "mysql2",
connection: {
socketPath: embeddedMariaDB.socketPath,
user: "node",
database: "kuma",
}
};
} else {
throw new Error("Unknown Database type: " + dbConfig.type);
}
// Set to utf8mb4 for MariaDB
if (dbConfig.type.endsWith("mariadb")) {
config.pool = {
afterCreate(conn, done) {
conn.query("SET CHARACTER SET utf8mb4;", (err) => done(err, conn));
},
};
}
const knexInstance = knex(config);
R.setup(knexInstance); R.setup(knexInstance);
if (process.env.SQL_LOG === "1") { if (process.env.SQL_LOG === "1") {
@@ -275,18 +156,6 @@ class Database {
await R.autoloadModels("./server/model"); await R.autoloadModels("./server/model");
} }
if (dbConfig.type === "sqlite") {
await this.initSQLite(testMode, noLog);
} else if (dbConfig.type.endsWith("mariadb")) {
await this.initMariaDB();
}
}
/**
* @param testMode
* @param noLog
*/
static async initSQLite(testMode, noLog) {
await R.exec("PRAGMA foreign_keys = ON"); await R.exec("PRAGMA foreign_keys = ON");
if (testMode) { if (testMode) {
// Change to MEMORY // Change to MEMORY
@@ -311,56 +180,8 @@ class Database {
} }
} }
/** /** Patch the database */
*
*/
static async initMariaDB() {
log.debug("db", "Checking if MariaDB database exists...");
let hasTable = await R.hasTable("docker_host");
if (!hasTable) {
const { createTables } = require("../db/knex_init_db");
await createTables();
} else {
log.debug("db", "MariaDB database already exists");
}
}
/**
* Patch the database
* @returns {void}
*/
static async patch() { static async patch() {
// Still need to keep this for old versions of Uptime Kuma
if (Database.dbConfig.type === "sqlite") {
await this.patchSqlite();
}
// Using knex migrations
// https://knexjs.org/guide/migrations.html
// https://gist.github.com/NigelEarle/70db130cc040cc2868555b29a0278261
try {
await R.knex.migrate.latest({
directory: Database.knexMigrationsPath,
});
} catch (e) {
log.error("db", "Database migration failed");
throw e;
}
}
/**
* @returns {Promise<void>}
*/
static async rollbackLatestPatch() {
}
/**
* Patch the database for SQLite
* @deprecated
*/
static async patchSqlite() {
let version = parseInt(await setting("database_version")); let version = parseInt(await setting("database_version"));
if (! version) { if (! version) {
@@ -380,7 +201,7 @@ class Database {
// Try catch anything here // Try catch anything here
try { try {
for (let i = version + 1; i <= this.latestVersion; i++) { for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/old_migrations/patch${i}.sql`; const sqlFile = `./db/patch${i}.sql`;
log.info("db", `Patching ${sqlFile}`); log.info("db", `Patching ${sqlFile}`);
await Database.importSQLFile(sqlFile); await Database.importSQLFile(sqlFile);
log.info("db", `Patched ${sqlFile}`); log.info("db", `Patched ${sqlFile}`);
@@ -397,18 +218,17 @@ class Database {
} }
} }
await this.patchSqlite2(); await this.patch2();
await this.migrateNewStatusPage(); await this.migrateNewStatusPage();
} }
/** /**
* Patch DB using new process * Patch DB using new process
* Call it from patch() only * Call it from patch() only
* @deprecated
* @private * @private
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
static async patchSqlite2() { static async patch2() {
log.info("db", "Database Patch 2.0 Process"); log.info("db", "Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles"); let databasePatchedFiles = await setting("databasePatchedFiles");
@@ -442,7 +262,6 @@ class Database {
} }
/** /**
* SQlite only
* Migrate status page value in setting to "status_page" table * Migrate status page value in setting to "status_page" table
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
@@ -514,8 +333,8 @@ class Database {
* Patch database using new patching process * Patch database using new patching process
* Used it patch2() only * Used it patch2() only
* @private * @private
* @param {string} sqlFilename Name of SQL file to load * @param sqlFilename
* @param {object} databasePatchedFiles Patch status of database files * @param databasePatchedFiles
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
static async patch2Recursion(sqlFilename, databasePatchedFiles) { static async patch2Recursion(sqlFilename, databasePatchedFiles) {
@@ -539,7 +358,7 @@ class Database {
log.info("db", sqlFilename + " is patching"); log.info("db", sqlFilename + " is patching");
this.patched = true; this.patched = true;
await this.importSQLFile("./db/old_migrations/" + sqlFilename); await this.importSQLFile("./db/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true; databasePatchedFiles[sqlFilename] = true;
log.info("db", sqlFilename + " was patched successfully"); log.info("db", sqlFilename + " was patched successfully");
@@ -550,7 +369,7 @@ class Database {
/** /**
* Load an SQL file and execute it * Load an SQL file and execute it
* @param {string} filename Filename of SQL file to import * @param filename Filename of SQL file to import
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
static async importSQLFile(filename) { static async importSQLFile(filename) {
@@ -584,7 +403,7 @@ class Database {
/** /**
* Aquire a direct connection to database * Aquire a direct connection to database
* @returns {any} Database connection * @returns {any}
*/ */
static getBetterSQLite3Database() { static getBetterSQLite3Database() {
return R.knex.client.acquireConnection(); return R.knex.client.acquireConnection();
@@ -621,13 +440,10 @@ class Database {
process.removeListener("unhandledRejection", listener); process.removeListener("unhandledRejection", listener);
} }
/** /** Get the size of the database */
* Get the size of the database
* @returns {number} Size of database
*/
static getSize() { static getSize() {
log.debug("db", "Database.getSize()"); log.debug("db", "Database.getSize()");
let stats = fs.statSync(Database.sqlitePath); let stats = fs.statSync(Database.path);
log.debug("db", stats); log.debug("db", stats);
return stats.size; return stats.size;
} }
@@ -639,18 +455,6 @@ class Database {
static async shrink() { static async shrink() {
await R.exec("VACUUM"); await R.exec("VACUUM");
} }
/**
*
*/
static sqlHourOffset() {
if (this.dbConfig.client === "sqlite3") {
return "DATETIME('now', ? || ' hours')";
} else {
return "DATE_ADD(NOW(), INTERVAL ? HOUR)";
}
}
} }
module.exports = Database; module.exports = Database;

View File

@@ -2,22 +2,14 @@ const axios = require("axios");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const version = require("../package.json").version; const version = require("../package.json").version;
const https = require("https"); const https = require("https");
const fs = require("fs");
const path = require("path");
const Database = require("./database");
class DockerHost { class DockerHost {
static CertificateFileNameCA = "ca.pem";
static CertificateFileNameCert = "cert.pem";
static CertificateFileNameKey = "key.pem";
/** /**
* Save a docker host * Save a docker host
* @param {object} dockerHost Docker host to save * @param {Object} dockerHost Docker host to save
* @param {?number} dockerHostID ID of the docker host to update * @param {?number} dockerHostID ID of the docker host to update
* @param {number} userID ID of the user who adds the docker host * @param {number} userID ID of the user who adds the docker host
* @returns {Promise<Bean>} Updated docker host * @returns {Promise<Bean>}
*/ */
static async save(dockerHost, dockerHostID, userID) { static async save(dockerHost, dockerHostID, userID) {
let bean; let bean;
@@ -64,7 +56,7 @@ class DockerHost {
/** /**
* Fetches the amount of containers on the Docker host * Fetches the amount of containers on the Docker host
* @param {object} dockerHost Docker host to check for * @param {Object} dockerHost Docker host to check for
* @returns {number} Total amount of containers on the host * @returns {number} Total amount of containers on the host
*/ */
static async testDockerHost(dockerHost) { static async testDockerHost(dockerHost) {
@@ -74,6 +66,10 @@ class DockerHost {
"Accept": "*/*", "Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version "User-Agent": "Uptime-Kuma/" + version
}, },
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: false,
}),
}; };
if (dockerHost.dockerType === "socket") { if (dockerHost.dockerType === "socket") {
@@ -81,7 +77,6 @@ class DockerHost {
} else if (dockerHost.dockerType === "tcp") { } else if (dockerHost.dockerType === "tcp") {
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon); options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
} }
options.httpsAgent = new https.Agent(DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
let res = await axios.request(options); let res = await axios.request(options);
@@ -108,8 +103,6 @@ class DockerHost {
/** /**
* Since axios 0.27.X, it does not accept `tcp://` protocol. * Since axios 0.27.X, it does not accept `tcp://` protocol.
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165) * Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
* @param {any} url URL to fix
* @returns {any} URL with tcp:// replaced by http://
*/ */
static patchDockerURL(url) { static patchDockerURL(url) {
if (typeof url === "string") { if (typeof url === "string") {
@@ -118,52 +111,6 @@ class DockerHost {
} }
return url; return url;
} }
/**
* Returns HTTPS agent options with client side TLS parameters if certificate files
* for the given host are available under a predefined directory path.
*
* The base path where certificates are looked for can be set with the
* 'DOCKER_TLS_DIR_PATH' environmental variable or defaults to 'data/docker-tls/'.
*
* If a directory in this path exists with a name matching the FQDN of the docker host
* (e.g. the FQDN of 'https://example.com:2376' is 'example.com' so the directory
* 'data/docker-tls/example.com/' would be searched for certificate files),
* then 'ca.pem', 'key.pem' and 'cert.pem' files are included in the agent options.
* File names can also be overridden via 'DOCKER_TLS_FILE_NAME_(CA|KEY|CERT)'.
* @param {string} dockerType i.e. "tcp" or "socket"
* @param {string} url The docker host URL rewritten to https://
* @returns {object} HTTP agent options
*/
static getHttpsAgentOptions(dockerType, url) {
let baseOptions = {
maxCachedSessions: 0,
rejectUnauthorized: true
};
let certOptions = {};
let dirName = (new URL(url)).hostname;
let caPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCA);
let certPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCert);
let keyPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameKey);
if (dockerType === "tcp" && fs.existsSync(caPath) && fs.existsSync(certPath) && fs.existsSync(keyPath)) {
let ca = fs.readFileSync(caPath);
let key = fs.readFileSync(keyPath);
let cert = fs.readFileSync(certPath);
certOptions = {
ca,
key,
cert
};
}
return {
...baseOptions,
...certOptions
};
}
} }
module.exports = { module.exports = {

View File

@@ -1,168 +0,0 @@
const { log } = require("../src/util");
const childProcess = require("child_process");
const fs = require("fs");
const mysql = require("mysql2");
/**
* It is only used inside the docker container
*/
class EmbeddedMariaDB {
static instance = null;
exec = "mariadbd";
mariadbDataDir = "/app/data/mariadb";
runDir = "/app/data/run/mariadb";
socketPath = this.runDir + "/mysqld.sock";
childProcess = null;
running = false;
started = false;
/**
* @returns {EmbeddedMariaDB}
*/
static getInstance() {
if (!EmbeddedMariaDB.instance) {
EmbeddedMariaDB.instance = new EmbeddedMariaDB();
}
return EmbeddedMariaDB.instance;
}
/**
*
*/
static hasInstance() {
return !!EmbeddedMariaDB.instance;
}
/**
*
*/
start() {
if (this.childProcess) {
log.info("mariadb", "Already started");
return;
}
this.initDB();
this.running = true;
log.info("mariadb", "Starting Embedded MariaDB");
this.childProcess = childProcess.spawn(this.exec, [
"--user=node",
"--datadir=" + this.mariadbDataDir,
`--socket=${this.socketPath}`,
`--pid-file=${this.runDir}/mysqld.pid`,
]);
this.childProcess.on("close", (code) => {
this.running = false;
this.childProcess = null;
this.started = false;
log.info("mariadb", "Stopped Embedded MariaDB: " + code);
if (code !== 0) {
log.info("mariadb", "Try to restart Embedded MariaDB as it is not stopped by user");
this.start();
}
});
this.childProcess.on("error", (err) => {
if (err.code === "ENOENT") {
log.error("mariadb", `Embedded MariaDB: ${this.exec} is not found`);
} else {
log.error("mariadb", err);
}
});
let handler = (data) => {
log.debug("mariadb", data.toString("utf-8"));
if (data.toString("utf-8").includes("ready for connections")) {
this.initDBAfterStarted();
}
};
this.childProcess.stdout.on("data", handler);
this.childProcess.stderr.on("data", handler);
return new Promise((resolve) => {
let interval = setInterval(() => {
if (this.started) {
clearInterval(interval);
resolve();
} else {
log.info("mariadb", "Waiting for Embedded MariaDB to start...");
}
}, 1000);
});
}
/**
*
*/
stop() {
if (this.childProcess) {
this.childProcess.kill("SIGINT");
this.childProcess = null;
}
}
/**
*
*/
initDB() {
if (!fs.existsSync(this.mariadbDataDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.mariadbDataDir} is not found, create one now.`);
fs.mkdirSync(this.mariadbDataDir, {
recursive: true,
});
let result = childProcess.spawnSync("mysql_install_db", [
"--user=node",
"--ldata=" + this.mariadbDataDir,
]);
if (result.status !== 0) {
let error = result.stderr.toString("utf-8");
log.error("mariadb", error);
return;
} else {
log.info("mariadb", "Embedded MariaDB: mysql_install_db done:" + result.stdout.toString("utf-8"));
}
}
if (!fs.existsSync(this.runDir)) {
log.info("mariadb", `Embedded MariaDB: ${this.runDir} is not found, create one now.`);
fs.mkdirSync(this.runDir, {
recursive: true,
});
}
}
/**
*
*/
async initDBAfterStarted() {
const connection = mysql.createConnection({
socketPath: this.socketPath,
user: "node",
});
let result = await connection.execute("CREATE DATABASE IF NOT EXISTS `kuma`");
log.debug("mariadb", "CREATE DATABASE: " + JSON.stringify(result));
log.info("mariadb", "Embedded MariaDB is ready for connections");
this.started = true;
}
}
module.exports = {
EmbeddedMariaDB,
};

View File

@@ -3,8 +3,8 @@ const jsesc = require("jsesc");
/** /**
* Returns a string that represents the javascript that is required to insert the Google Analytics scripts * Returns a string that represents the javascript that is required to insert the Google Analytics scripts
* into a webpage. * into a webpage.
* @param {string} tagId Google UA/G/AW/DC Property ID to use with the Google Analytics script. * @param tagId Google UA/G/AW/DC Property ID to use with the Google Analytics script.
* @returns {string} HTML script tags to inject into page * @returns {string}
*/ */
function getGoogleAnalyticsScript(tagId) { function getGoogleAnalyticsScript(tagId) {
let escapedTagId = jsesc(tagId, { isScriptContext: true }); let escapedTagId = jsesc(tagId, { isScriptContext: true });

View File

@@ -10,7 +10,7 @@ let ImageDataURI = (() => {
/** /**
* Decode the data:image/ URI * Decode the data:image/ URI
* @param {string} dataURI data:image/ URI to decode * @param {string} dataURI data:image/ URI to decode
* @returns {?object} An object with properties "imageType" and "dataBase64". * @returns {?Object} An object with properties "imageType" and "dataBase64".
* The former is the image type, e.g., "png", and the latter is a base64 * The former is the image type, e.g., "png", and the latter is a base64
* encoded string of the image's binary data. If it fails to parse, returns * encoded string of the image's binary data. If it fails to parse, returns
* null instead of an object. * null instead of an object.
@@ -52,8 +52,8 @@ let ImageDataURI = (() => {
/** /**
* Write data URI to file * Write data URI to file
* @param {string} dataURI data:image/ URI * @param {string} dataURI data:image/ URI
* @param {string} filePath Path to write file to * @param {string} [filePath] Path to write file to
* @returns {Promise<string|void>} Write file error * @returns {Promise<string>}
*/ */
function outputFile(dataURI, filePath) { function outputFile(dataURI, filePath) {
filePath = filePath || "./"; filePath = filePath || "./";

View File

@@ -39,10 +39,7 @@ const initBackgroundJobs = async function () {
}; };
/** /** Stop all background jobs if running */
* Stop all background jobs if running
* @returns {void}
*/
const stopBackgroundJobs = function () { const stopBackgroundJobs = function () {
for (const job of jobs) { for (const job of jobs) {
if (job.croner) { if (job.croner) {

View File

@@ -1,13 +1,12 @@
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const { log } = require("../../src/util"); const { log } = require("../../src/util");
const { setSetting, setting } = require("../util-server"); const { setSetting, setting } = require("../util-server");
const Database = require("../database");
const DEFAULT_KEEP_PERIOD = 180; const DEFAULT_KEEP_PERIOD = 180;
/** /**
* Clears old data from the heartbeat table of the database. * Clears old data from the heartbeat table of the database.
* @returns {Promise<void>} A promise that resolves when the data has been cleared. * @return {Promise<void>} A promise that resolves when the data has been cleared.
*/ */
const clearOldData = async () => { const clearOldData = async () => {
@@ -35,12 +34,10 @@ const clearOldData = async () => {
log.debug("clearOldData", `Clearing Data older than ${parsedPeriod} days...`); log.debug("clearOldData", `Clearing Data older than ${parsedPeriod} days...`);
const sqlHourOffset = Database.sqlHourOffset();
try { try {
await R.exec( await R.exec(
"DELETE FROM heartbeat WHERE time < " + sqlHourOffset, "DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[ parsedPeriod * -24 ] [ parsedPeriod ]
); );
await R.exec("PRAGMA optimize;"); await R.exec("PRAGMA optimize;");

View File

@@ -1,19 +1,13 @@
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const { log } = require("../../src/util"); const { log } = require("../../src/util");
const Database = require("../database");
/** /**
* Run incremental_vacuum and checkpoint the WAL. * Run incremental_vacuum and checkpoint the WAL.
* @returns {Promise<void>} A promise that resolves when the process is finished. * @return {Promise<void>} A promise that resolves when the process is finished.
*/ */
const incrementalVacuum = async () => { const incrementalVacuum = async () => {
try { try {
if (Database.dbConfig.type !== "sqlite") {
log.debug("incrementalVacuum", "Skipping incremental_vacuum, not using SQLite.");
return;
}
log.debug("incrementalVacuum", "Running incremental_vacuum and wal_checkpoint(PASSIVE)..."); log.debug("incrementalVacuum", "Running incremental_vacuum and wal_checkpoint(PASSIVE)...");
await R.exec("PRAGMA incremental_vacuum(200)"); await R.exec("PRAGMA incremental_vacuum(200)");
await R.exec("PRAGMA wal_checkpoint(PASSIVE)"); await R.exec("PRAGMA wal_checkpoint(PASSIVE)");

View File

@@ -19,7 +19,7 @@ class APIKey extends BeanModel {
/** /**
* Returns an object that ready to parse to JSON * Returns an object that ready to parse to JSON
* @returns {object} Object ready to parse * @returns {Object}
*/ */
toJSON() { toJSON() {
return { return {
@@ -37,7 +37,7 @@ class APIKey extends BeanModel {
/** /**
* Returns an object that ready to parse to JSON with sensitive fields * Returns an object that ready to parse to JSON with sensitive fields
* removed * removed
* @returns {object} Object ready to parse * @returns {Object}
*/ */
toPublicJSON() { toPublicJSON() {
return { return {
@@ -53,9 +53,9 @@ class APIKey extends BeanModel {
/** /**
* Create a new API Key and store it in the database * Create a new API Key and store it in the database
* @param {object} key Object sent by client * @param {Object} key Object sent by client
* @param {int} userID ID of socket user * @param {int} userID ID of socket user
* @returns {Promise<bean>} API key * @returns {Promise<bean>}
*/ */
static async save(key, userID) { static async save(key, userID) {
let bean; let bean;

View File

@@ -3,7 +3,7 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
class DockerHost extends BeanModel { class DockerHost extends BeanModel {
/** /**
* Returns an object that ready to parse to JSON * Returns an object that ready to parse to JSON
* @returns {object} Object ready to parse * @returns {Object}
*/ */
toJSON() { toJSON() {
return { return {

View File

@@ -4,19 +4,17 @@ const { R } = require("redbean-node");
class Group extends BeanModel { class Group extends BeanModel {
/** /**
* Return an object that ready to parse to JSON for public Only show * Return an object that ready to parse to JSON for public
* necessary data to public * Only show necessary data to public
* @param {boolean} showTags Should the JSON include monitor tags * @param {boolean} [showTags=false] Should the JSON include monitor tags
* @param {boolean} certExpiry Should JSON include info about * @returns {Object}
* certificate expiry?
* @returns {object} Object ready to parse
*/ */
async toPublicJSON(showTags = false, certExpiry = false) { async toPublicJSON(showTags = false) {
let monitorBeanList = await this.getMonitorList(); let monitorBeanList = await this.getMonitorList();
let monitorList = []; let monitorList = [];
for (let bean of monitorBeanList) { for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON(showTags, certExpiry)); monitorList.push(await bean.toPublicJSON(showTags));
} }
return { return {
@@ -29,7 +27,7 @@ class Group extends BeanModel {
/** /**
* Get all monitors * Get all monitors
* @returns {Bean[]} List of monitors * @returns {Bean[]}
*/ */
async getMonitorList() { async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(` return R.convertToBeans("monitor", await R.getAll(`

Some files were not shown because too many files have changed in this diff Show More