mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-09-18 09:26:56 +08:00
Compare commits
32 Commits
fix-check-
...
2.0-last-p
Author | SHA1 | Date | |
---|---|---|---|
|
3ed30dc4b2 | ||
|
ab398b9641 | ||
|
65b49384e0 | ||
|
e91b2efe9a | ||
|
5e55215c9c | ||
|
93cc21271f | ||
|
8a4e295882 | ||
|
fe91ffcc9d | ||
|
4632030a5e | ||
|
776f4f2d5f | ||
|
7562212483 | ||
|
c86b12d5d2 | ||
|
19a9735234 | ||
|
0f646e634e | ||
|
03e507a4e1 | ||
|
ed5963deb7 | ||
|
9ff9a9edcc | ||
|
d7c3c40d74 | ||
|
344fd52501 | ||
|
6437b9afab | ||
|
da8da0bf59 | ||
|
6be297fd46 | ||
|
31ce34da77 | ||
|
7f1042976b | ||
|
51892c789a | ||
|
67ad0f79b3 | ||
|
7046a2e0f6 | ||
|
59e7607e1a | ||
|
0f3c727aa4 | ||
|
696d902983 | ||
|
124effb552 | ||
|
2dfa6886b4 |
@@ -1,4 +1,4 @@
|
|||||||
name: validate
|
name: json-yaml-validate
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
@@ -25,19 +25,3 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
comment: "true" # enable comment mode
|
comment: "true" # enable comment mode
|
||||||
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
|
||||||
|
|
||||||
# General validations
|
|
||||||
validate:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Use Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: Validate language JSON files
|
|
||||||
run: node ./extra/check-lang-json.js
|
|
||||||
|
|
||||||
- name: Validate knex migrations filename
|
|
||||||
run: node ./extra/check-knex-filenames.mjs
|
|
@@ -1,13 +0,0 @@
|
|||||||
// Update info_json column to LONGTEXT mainly for MariaDB
|
|
||||||
exports.up = function (knex) {
|
|
||||||
return knex.schema
|
|
||||||
.alterTable("monitor_tls_info", function (table) {
|
|
||||||
table.text("info_json", "longtext").alter();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.down = function (knex) {
|
|
||||||
return knex.schema.alterTable("monitor_tls_info", function (table) {
|
|
||||||
table.text("info_json", "text").alter();
|
|
||||||
});
|
|
||||||
};
|
|
@@ -1,13 +1,3 @@
|
|||||||
# Download Apprise deb package
|
|
||||||
FROM node:20-bookworm-slim AS download-apprise
|
|
||||||
WORKDIR /app
|
|
||||||
COPY ./extra/download-apprise.mjs ./download-apprise.mjs
|
|
||||||
RUN apt update && \
|
|
||||||
apt --yes --no-install-recommends install curl && \
|
|
||||||
npm install cheerio semver && \
|
|
||||||
node ./download-apprise.mjs
|
|
||||||
|
|
||||||
# Base Image (Slim)
|
|
||||||
# If the image changed, the second stage image should be changed too
|
# If the image changed, the second stage image should be changed too
|
||||||
FROM node:20-bookworm-slim AS base2-slim
|
FROM node:20-bookworm-slim AS base2-slim
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
@@ -37,9 +27,8 @@ RUN apt update && \
|
|||||||
# apprise = for notifications (Install from the deb package, as the stable one is too old) (workaround for #4867)
|
# apprise = for notifications (Install from the deb package, as the stable one is too old) (workaround for #4867)
|
||||||
# Switching to testing repo is no longer working, as the testing repo is not bookworm anymore.
|
# Switching to testing repo is no longer working, as the testing repo is not bookworm anymore.
|
||||||
# python3-paho-mqtt (#4859)
|
# python3-paho-mqtt (#4859)
|
||||||
# TODO: no idea how to delete the deb file after installation as it becomes a layer already
|
RUN curl http://ftp.debian.org/debian/pool/main/a/apprise/apprise_1.8.0-2_all.deb --output apprise.deb && \
|
||||||
COPY --from=download-apprise /app/apprise.deb ./apprise.deb
|
apt update && \
|
||||||
RUN apt update && \
|
|
||||||
apt --yes --no-install-recommends install ./apprise.deb python3-paho-mqtt && \
|
apt --yes --no-install-recommends install ./apprise.deb python3-paho-mqtt && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
rm -f apprise.deb && \
|
rm -f apprise.deb && \
|
||||||
|
@@ -27,6 +27,7 @@ RUN mkdir ./data
|
|||||||
# ⭐ Main Image
|
# ⭐ Main Image
|
||||||
############################################
|
############################################
|
||||||
FROM $BASE_IMAGE AS release
|
FROM $BASE_IMAGE AS release
|
||||||
|
USER node
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
|
LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
|
||||||
@@ -45,7 +46,6 @@ CMD ["node", "server/server.js"]
|
|||||||
# Rootless Image
|
# Rootless Image
|
||||||
############################################
|
############################################
|
||||||
FROM release AS rootless
|
FROM release AS rootless
|
||||||
USER node
|
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Mark as Nightly
|
# Mark as Nightly
|
||||||
|
@@ -1,72 +0,0 @@
|
|||||||
import fs from "fs";
|
|
||||||
const dir = "./db/knex_migrations";
|
|
||||||
|
|
||||||
// Get the file list (ending with .js) from the directory
|
|
||||||
const files = fs.readdirSync(dir).filter((file) => file !== "README.md");
|
|
||||||
|
|
||||||
// They are wrong, but they had been merged, so allowed.
|
|
||||||
const exceptionList = [
|
|
||||||
"2024-08-24-000-add-cache-bust.js",
|
|
||||||
"2024-10-1315-rabbitmq-monitor.js",
|
|
||||||
];
|
|
||||||
|
|
||||||
// Correct format: YYYY-MM-DD-HHmm-description.js
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
if (exceptionList.includes(file)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check ending with .js
|
|
||||||
if (!file.endsWith(".js")) {
|
|
||||||
console.error(`It should end with .js: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const parts = file.split("-");
|
|
||||||
|
|
||||||
// Should be at least 5 parts
|
|
||||||
if (parts.length < 5) {
|
|
||||||
console.error(`Invalid format: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// First part should be a year >= 2024
|
|
||||||
const year = parseInt(parts[0], 10);
|
|
||||||
if (isNaN(year) || year < 2023) {
|
|
||||||
console.error(`Invalid year: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Second part should be a month
|
|
||||||
const month = parseInt(parts[1], 10);
|
|
||||||
if (isNaN(month) || month < 1 || month > 12) {
|
|
||||||
console.error(`Invalid month: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Third part should be a day
|
|
||||||
const day = parseInt(parts[2], 10);
|
|
||||||
if (isNaN(day) || day < 1 || day > 31) {
|
|
||||||
console.error(`Invalid day: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fourth part should be HHmm
|
|
||||||
const time = parts[3];
|
|
||||||
|
|
||||||
// Check length is 4
|
|
||||||
if (time.length !== 4) {
|
|
||||||
console.error(`Invalid time: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const hour = parseInt(time.substring(0, 2), 10);
|
|
||||||
const minute = parseInt(time.substring(2), 10);
|
|
||||||
if (isNaN(hour) || hour < 0 || hour > 23 || isNaN(minute) || minute < 0 || minute > 59) {
|
|
||||||
console.error(`Invalid time: ${file}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("All knex filenames are correct.");
|
|
@@ -1,27 +0,0 @@
|
|||||||
// For #5231
|
|
||||||
|
|
||||||
const fs = require("fs");
|
|
||||||
|
|
||||||
let path = "./src/lang";
|
|
||||||
|
|
||||||
// list directories in the lang directory
|
|
||||||
let jsonFileList = fs.readdirSync(path);
|
|
||||||
|
|
||||||
for (let jsonFile of jsonFileList) {
|
|
||||||
if (!jsonFile.endsWith(".json")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let jsonPath = path + "/" + jsonFile;
|
|
||||||
let originalContent = fs.readFileSync(jsonPath, "utf8");
|
|
||||||
let langData = JSON.parse(originalContent);
|
|
||||||
|
|
||||||
let formattedContent = JSON.stringify(langData, null, 4) + "\n";
|
|
||||||
|
|
||||||
if (originalContent !== formattedContent) {
|
|
||||||
console.error(`File ${jsonFile} is not formatted correctly.`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("All lang json files are formatted correctly.");
|
|
@@ -1,57 +0,0 @@
|
|||||||
// Go to http://ftp.debian.org/debian/pool/main/a/apprise/ using fetch api, where it is a apache directory listing page
|
|
||||||
// Use cheerio to parse the html and get the latest version of Apprise
|
|
||||||
// call curl to download the latest version of Apprise
|
|
||||||
// Target file: the latest version of Apprise, which the format is apprise_{VERSION}_all.deb
|
|
||||||
|
|
||||||
import * as cheerio from "cheerio";
|
|
||||||
import semver from "semver";
|
|
||||||
import * as childProcess from "child_process";
|
|
||||||
|
|
||||||
const baseURL = "http://ftp.debian.org/debian/pool/main/a/apprise/";
|
|
||||||
const response = await fetch(baseURL);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error("Failed to fetch page of Apprise Debian repository.");
|
|
||||||
}
|
|
||||||
|
|
||||||
const html = await response.text();
|
|
||||||
|
|
||||||
const $ = cheerio.load(html);
|
|
||||||
|
|
||||||
// Get all the links in the page
|
|
||||||
const linkElements = $("a");
|
|
||||||
|
|
||||||
// Filter the links which match apprise_{VERSION}_all.deb
|
|
||||||
const links = [];
|
|
||||||
const pattern = /apprise_(.*?)_all.deb/;
|
|
||||||
|
|
||||||
for (let i = 0; i < linkElements.length; i++) {
|
|
||||||
const link = linkElements[i];
|
|
||||||
if (link.attribs.href.match(pattern) && !link.attribs.href.includes("~")) {
|
|
||||||
links.push({
|
|
||||||
filename: link.attribs.href,
|
|
||||||
version: link.attribs.href.match(pattern)[1],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(links);
|
|
||||||
|
|
||||||
// semver compare and download
|
|
||||||
let latestLink = {
|
|
||||||
filename: "",
|
|
||||||
version: "0.0.0",
|
|
||||||
};
|
|
||||||
|
|
||||||
for (const link of links) {
|
|
||||||
if (semver.gt(link.version, latestLink.version)) {
|
|
||||||
latestLink = link;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const downloadURL = baseURL + latestLink.filename;
|
|
||||||
console.log(`Downloading ${downloadURL}...`);
|
|
||||||
let result = childProcess.spawnSync("curl", [ downloadURL, "--output", "apprise.deb" ]);
|
|
||||||
console.log(result.stdout?.toString());
|
|
||||||
console.error(result.stderr?.toString());
|
|
||||||
process.exit(result.status !== null ? result.status : 1);
|
|
4188
package-lock.json
generated
4188
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -38,8 +38,8 @@
|
|||||||
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
|
"build-docker-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2 --target base2 . --push",
|
||||||
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
|
"build-docker-base-slim": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base2-slim --target base2-slim . --push",
|
||||||
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
|
||||||
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:next-slim -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
"build-docker-slim": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim -t louislam/uptime-kuma:$VERSION-slim --target release --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||||
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:next -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
"build-docker-full": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2 -t louislam/uptime-kuma:$VERSION --target release . --push",
|
||||||
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
"build-docker-nightly": "node ./extra/test-docker.js && npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly2 --target nightly . --push",
|
||||||
"build-docker-slim-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim-rootless -t louislam/uptime-kuma:$VERSION-slim-rootless --target rootless --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
"build-docker-slim-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-slim-rootless -t louislam/uptime-kuma:$VERSION-slim-rootless --target rootless --build-arg BASE_IMAGE=louislam/uptime-kuma:base2-slim . --push",
|
||||||
"build-docker-full-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-rootless -t louislam/uptime-kuma:$VERSION-rootless --target rootless . --push",
|
"build-docker-full-rootless": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:2-rootless -t louislam/uptime-kuma:$VERSION-rootless --target rootless . --push",
|
||||||
|
@@ -9,7 +9,6 @@ const mysql = require("mysql2/promise");
|
|||||||
const { Settings } = require("./settings");
|
const { Settings } = require("./settings");
|
||||||
const { UptimeCalculator } = require("./uptime-calculator");
|
const { UptimeCalculator } = require("./uptime-calculator");
|
||||||
const dayjs = require("dayjs");
|
const dayjs = require("dayjs");
|
||||||
const { SimpleMigrationServer } = require("./utils/simple-migration-server");
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Database & App Data Folder
|
* Database & App Data Folder
|
||||||
@@ -383,11 +382,9 @@ class Database {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Patch the database
|
* Patch the database
|
||||||
* @param {number} port Start the migration server for aggregate tables on this port if provided
|
|
||||||
* @param {string} hostname Start the migration server for aggregate tables on this hostname if provided
|
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async patch(port = undefined, hostname = undefined) {
|
static async patch() {
|
||||||
// Still need to keep this for old versions of Uptime Kuma
|
// Still need to keep this for old versions of Uptime Kuma
|
||||||
if (Database.dbConfig.type === "sqlite") {
|
if (Database.dbConfig.type === "sqlite") {
|
||||||
await this.patchSqlite();
|
await this.patchSqlite();
|
||||||
@@ -412,7 +409,7 @@ class Database {
|
|||||||
await R.exec("PRAGMA foreign_keys = ON");
|
await R.exec("PRAGMA foreign_keys = ON");
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.migrateAggregateTable(port, hostname);
|
await this.migrateAggregateTable();
|
||||||
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Allow missing patch files for downgrade or testing pr.
|
// Allow missing patch files for downgrade or testing pr.
|
||||||
@@ -738,11 +735,9 @@ class Database {
|
|||||||
* Normally, it should be in transaction, but UptimeCalculator wasn't designed to be in transaction before that.
|
* Normally, it should be in transaction, but UptimeCalculator wasn't designed to be in transaction before that.
|
||||||
* I don't want to heavily modify the UptimeCalculator, so it is not in transaction.
|
* I don't want to heavily modify the UptimeCalculator, so it is not in transaction.
|
||||||
* Run `npm run reset-migrate-aggregate-table-state` to reset, in case the migration is interrupted.
|
* Run `npm run reset-migrate-aggregate-table-state` to reset, in case the migration is interrupted.
|
||||||
* @param {number} port Start the migration server on this port if provided
|
|
||||||
* @param {string} hostname Start the migration server on this hostname if provided
|
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async migrateAggregateTable(port, hostname = undefined) {
|
static async migrateAggregateTable() {
|
||||||
log.debug("db", "Enter Migrate Aggregate Table function");
|
log.debug("db", "Enter Migrate Aggregate Table function");
|
||||||
|
|
||||||
// Add a setting for 2.0.0-dev users to skip this migration
|
// Add a setting for 2.0.0-dev users to skip this migration
|
||||||
@@ -763,17 +758,7 @@ class Database {
|
|||||||
throw new Error("Aggregate table migration is already in progress");
|
throw new Error("Aggregate table migration is already in progress");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
await Settings.set("migrateAggregateTableState", "migrating");
|
||||||
* Start migration server for displaying the migration status
|
|
||||||
* @type {SimpleMigrationServer}
|
|
||||||
*/
|
|
||||||
let migrationServer;
|
|
||||||
let msg;
|
|
||||||
|
|
||||||
if (port) {
|
|
||||||
migrationServer = new SimpleMigrationServer();
|
|
||||||
await migrationServer.start(port, hostname);
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("db", "Migrating Aggregate Table");
|
log.info("db", "Migrating Aggregate Table");
|
||||||
|
|
||||||
@@ -792,13 +777,10 @@ class Database {
|
|||||||
let count = countResult.count;
|
let count = countResult.count;
|
||||||
if (count > 0) {
|
if (count > 0) {
|
||||||
log.warn("db", `Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`);
|
log.warn("db", `Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`);
|
||||||
await migrationServer?.stop();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await Settings.set("migrateAggregateTableState", "migrating");
|
|
||||||
|
|
||||||
let progressPercent = 0;
|
let progressPercent = 0;
|
||||||
let part = 100 / monitors.length;
|
let part = 100 / monitors.length;
|
||||||
let i = 1;
|
let i = 1;
|
||||||
@@ -829,9 +811,7 @@ class Database {
|
|||||||
`, [ monitor.monitor_id, date.date ]);
|
`, [ monitor.monitor_id, date.date ]);
|
||||||
|
|
||||||
if (heartbeats.length > 0) {
|
if (heartbeats.length > 0) {
|
||||||
msg = `[DON'T STOP] Migrating monitor data ${monitor.monitor_id} - ${date.date} [${progressPercent.toFixed(2)}%][${i}/${monitors.length}]`;
|
log.info("db", `[DON'T STOP] Migrating monitor data ${monitor.monitor_id} - ${date.date} [${progressPercent.toFixed(2)}%][${i}/${monitors.length}]`);
|
||||||
log.info("db", msg);
|
|
||||||
migrationServer?.update(msg);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let heartbeat of heartbeats) {
|
for (let heartbeat of heartbeats) {
|
||||||
@@ -849,13 +829,9 @@ class Database {
|
|||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
|
|
||||||
msg = "Clearing non-important heartbeats";
|
|
||||||
log.info("db", msg);
|
|
||||||
migrationServer?.update(msg);
|
|
||||||
|
|
||||||
await Database.clearHeartbeatData(true);
|
await Database.clearHeartbeatData(true);
|
||||||
|
|
||||||
await Settings.set("migrateAggregateTableState", "migrated");
|
await Settings.set("migrateAggregateTableState", "migrated");
|
||||||
await migrationServer?.stop();
|
|
||||||
|
|
||||||
if (monitors.length > 0) {
|
if (monitors.length > 0) {
|
||||||
log.info("db", "Aggregate Table Migration Completed");
|
log.info("db", "Aggregate Table Migration Completed");
|
||||||
|
@@ -1716,7 +1716,7 @@ async function initDatabase(testMode = false) {
|
|||||||
log.info("server", "Connected to the database");
|
log.info("server", "Connected to the database");
|
||||||
|
|
||||||
// Patch the database
|
// Patch the database
|
||||||
await Database.patch(port, hostname);
|
await Database.patch();
|
||||||
|
|
||||||
let jwtSecretBean = await R.findOne("setting", " `key` = ? ", [
|
let jwtSecretBean = await R.findOne("setting", " `key` = ? ", [
|
||||||
"jwtSecret",
|
"jwtSecret",
|
||||||
|
@@ -1,84 +0,0 @@
|
|||||||
const express = require("express");
|
|
||||||
const http = require("node:http");
|
|
||||||
const { log } = require("../../src/util");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SimpleMigrationServer
|
|
||||||
* For displaying the migration status of the server
|
|
||||||
* Also, it is used to let Docker healthcheck know the status of the server, as the main server is not started yet, healthcheck will think the server is down incorrectly.
|
|
||||||
*/
|
|
||||||
class SimpleMigrationServer {
|
|
||||||
/**
|
|
||||||
* Express app instance
|
|
||||||
* @type {?Express}
|
|
||||||
*/
|
|
||||||
app;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Server instance
|
|
||||||
* @type {?Server}
|
|
||||||
*/
|
|
||||||
server;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response object
|
|
||||||
* @type {?Response}
|
|
||||||
*/
|
|
||||||
response;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start the server
|
|
||||||
* @param {number} port Port
|
|
||||||
* @param {string} hostname Hostname
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
start(port, hostname) {
|
|
||||||
this.app = express();
|
|
||||||
this.server = http.createServer(this.app);
|
|
||||||
|
|
||||||
this.app.get("/", (req, res) => {
|
|
||||||
res.set("Content-Type", "text/plain");
|
|
||||||
res.write("Migration is in progress, listening message...\n");
|
|
||||||
if (this.response) {
|
|
||||||
this.response.write("Disconnected\n");
|
|
||||||
this.response.end();
|
|
||||||
}
|
|
||||||
this.response = res;
|
|
||||||
// never ending response
|
|
||||||
});
|
|
||||||
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
this.server.listen(port, hostname, () => {
|
|
||||||
if (hostname) {
|
|
||||||
log.info("migration", `Migration server is running on http://${hostname}:${port}`);
|
|
||||||
} else {
|
|
||||||
log.info("migration", `Migration server is running on http://localhost:${port}`);
|
|
||||||
}
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update the message
|
|
||||||
* @param {string} msg Message to update
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
update(msg) {
|
|
||||||
this.response?.write(msg + "\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stop the server
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
async stop() {
|
|
||||||
this.response?.write("Finished, please refresh this page.\n");
|
|
||||||
this.response?.end();
|
|
||||||
await this.server?.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
SimpleMigrationServer,
|
|
||||||
};
|
|
Reference in New Issue
Block a user