Merge branch 'master' of https://github.com/louislam/uptime-kuma into louislam-master

This commit is contained in:
DeeJayPee
2021-10-05 19:57:27 +02:00
133 changed files with 15748 additions and 5164 deletions

View File

@@ -18,11 +18,10 @@ exports.startInterval = () => {
// For debug
if (process.env.TEST_CHECK_VERSION === "1") {
res.data.version = "1000.0.0"
res.data.version = "1000.0.0";
}
exports.latestVersion = res.data.version;
console.log("Latest Version: " + exports.latestVersion);
} catch (_) { }
};

View File

@@ -3,11 +3,25 @@ const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server");
const { debug, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
/**
* Database & App Data Folder
*/
class Database {
static templatePath = "./db/kuma.db";
/**
* Data Dir (Default: ./data)
*/
static dataDir;
/**
* User Upload Dir (Default: ./data/upload)
*/
static uploadDir;
static path;
/**
@@ -31,39 +45,70 @@ class Database {
"patch-setting-value-type.sql": true,
"patch-improve-performance.sql": true,
"patch-2fa.sql": true,
"patch-add-retry-interval-monitor.sql": true,
"patch-incident-table.sql": true,
"patch-group-table.sql": true,
"patch-monitor-push_token.sql": true,
}
/**
* The finally version should be 10 after merged tag feature
* @deprecated Use patchList for any new feature
*/
static latestVersion = 9;
static latestVersion = 10;
static noReject = true;
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
console.log(`Data Dir: ${Database.dataDir}`);
}
static async connect() {
const acquireConnectionTimeout = 120 * 1000;
R.setup("sqlite", {
filename: Database.path,
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
acquireConnectionTimeout: acquireConnectionTimeout,
}, {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
});
R.setup(knexInstance);
if (process.env.SQL_LOG === "1") {
R.debug(true);
}
// Auto map the model to a bean object
R.freeze(true)
R.freeze(true);
await R.autoloadModels("./server/model");
await R.exec("PRAGMA foreign_keys = ON");
// Change to WAL
await R.exec("PRAGMA journal_mode = WAL");
await R.exec("PRAGMA cache_size = -12000");
@@ -71,6 +116,7 @@ class Database {
console.log("SQLite config:");
console.log(await R.getAll("PRAGMA journal_mode"));
console.log(await R.getAll("PRAGMA cache_size"));
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
static async patch() {
@@ -88,7 +134,7 @@ class Database {
} else if (version > this.latestVersion) {
console.info("Warning: Database version is newer than expected");
} else {
console.info("Database patch is needed")
console.info("Database patch is needed");
this.backup(version);
@@ -103,11 +149,12 @@ class Database {
}
} catch (ex) {
await Database.close();
this.restore();
console.error(ex)
console.error("Start Uptime-Kuma failed due to patch db failed")
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues")
console.error(ex);
console.error("Start Uptime-Kuma failed due to patch db failed");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
}
@@ -132,7 +179,7 @@ class Database {
try {
for (let sqlFilename in this.patchList) {
await this.patch2Recursion(sqlFilename, databasePatchedFiles)
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
}
if (this.patched) {
@@ -141,11 +188,13 @@ class Database {
} catch (ex) {
await Database.close();
this.restore();
console.error(ex)
console.error(ex);
console.error("Start Uptime-Kuma failed due to patch db failed");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
@@ -185,7 +234,7 @@ class Database {
console.log(sqlFilename + " is patched successfully");
} else {
console.log(sqlFilename + " is already patched, skip");
debug(sqlFilename + " is already patched, skip");
}
}
@@ -203,12 +252,12 @@ class Database {
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return ! line.startsWith("--")
return ! line.startsWith("--");
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n")
text = lines.join("\n");
let statements = text.split(";")
.map((statement) => {
@@ -216,7 +265,7 @@ class Database {
})
.filter((statement) => {
return statement !== "";
})
});
for (let statement of statements) {
await R.exec(statement);
@@ -262,7 +311,7 @@ class Database {
*/
static backup(version) {
if (! this.backupPath) {
console.info("Backup the db")
console.info("Backup the db");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);

57
server/image-data-uri.js Normal file
View File

@@ -0,0 +1,57 @@
/*
From https://github.com/DiegoZoracKy/image-data-uri/blob/master/lib/image-data-uri.js
Modified with 0 dependencies
*/
let fs = require("fs");
let ImageDataURI = (() => {
function decode(dataURI) {
if (!/data:image\//.test(dataURI)) {
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
return null;
}
let regExMatches = dataURI.match("data:(image/.*);base64,(.*)");
return {
imageType: regExMatches[1],
dataBase64: regExMatches[2],
dataBuffer: new Buffer(regExMatches[2], "base64")
};
}
function encode(data, mediaType) {
if (!data || !mediaType) {
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
return null;
}
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
return dataImgBase64;
}
function outputFile(dataURI, filePath) {
filePath = filePath || "./";
return new Promise((resolve, reject) => {
let imageDecoded = decode(dataURI);
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
if (err) {
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
}
resolve(filePath);
});
});
}
return {
decode: decode,
encode: encode,
outputFile: outputFile,
};
})();
module.exports = ImageDataURI;

34
server/model/group.js Normal file
View File

@@ -0,0 +1,34 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
class Group extends BeanModel {
async toPublicJSON() {
let monitorBeanList = await this.getMonitorList();
let monitorList = [];
for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON());
}
return {
id: this.id,
name: this.name,
weight: this.weight,
monitorList,
};
}
async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(`
SELECT monitor.* FROM monitor, monitor_group
WHERE monitor.id = monitor_group.monitor_id
AND group_id = ?
ORDER BY monitor_group.weight
`, [
this.id,
]));
}
}
module.exports = Group;

View File

@@ -1,8 +1,8 @@
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc")
let timezone = require("dayjs/plugin/timezone")
dayjs.extend(utc)
dayjs.extend(timezone)
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const { BeanModel } = require("redbean-node/dist/bean-model");
/**
@@ -13,6 +13,15 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
*/
class Heartbeat extends BeanModel {
toPublicJSON() {
return {
status: this.status,
time: this.time,
msg: "", // Hide for public
ping: this.ping,
};
}
toJSON() {
return {
monitorID: this.monitor_id,

18
server/model/incident.js Normal file
View File

@@ -0,0 +1,18 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Incident extends BeanModel {
toPublicJSON() {
return {
id: this.id,
style: this.style,
title: this.title,
content: this.content,
pin: this.pin,
createdDate: this.createdDate,
lastUpdatedDate: this.lastUpdatedDate,
};
}
}
module.exports = Incident;

View File

@@ -1,16 +1,16 @@
const https = require("https");
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc")
let timezone = require("dayjs/plugin/timezone")
dayjs.extend(utc)
dayjs.extend(timezone)
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom } = require("../util-server");
const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification")
const { Notification } = require("../notification");
const version = require("../../package.json").version;
/**
@@ -20,18 +20,35 @@ const version = require("../../package.json").version;
* 2 = PENDING
*/
class Monitor extends BeanModel {
/**
* Return a object that ready to parse to JSON for public
* Only show necessary data to public
*/
async toPublicJSON() {
return {
id: this.id,
name: this.name,
};
}
/**
* Return a object that ready to parse to JSON
*/
async toJSON() {
let notificationIDList = {};
let list = await R.find("monitor_notification", " monitor_id = ? ", [
this.id,
])
]);
for (let bean of list) {
notificationIDList[bean.notification_id] = true;
}
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
return {
id: this.id,
name: this.name,
@@ -43,6 +60,7 @@ class Monitor extends BeanModel {
active: this.active,
type: this.type,
interval: this.interval,
retryInterval: this.retryInterval,
keyword: this.keyword,
ignoreTls: this.getIgnoreTls(),
upsideDown: this.isUpsideDown(),
@@ -51,7 +69,9 @@ class Monitor extends BeanModel {
dns_resolve_type: this.dns_resolve_type,
dns_resolve_server: this.dns_resolve_server,
dns_last_result: this.dns_last_result,
pushToken: this.pushToken,
notificationIDList,
tags: tags,
};
}
@@ -60,7 +80,7 @@ class Monitor extends BeanModel {
* @returns {boolean}
*/
getIgnoreTls() {
return Boolean(this.ignoreTls)
return Boolean(this.ignoreTls);
}
/**
@@ -90,12 +110,12 @@ class Monitor extends BeanModel {
if (! previousBeat) {
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
this.id,
])
]);
}
const isFirstBeat = !previousBeat;
let bean = R.dispense("heartbeat")
let bean = R.dispense("heartbeat");
bean.monitor_id = this.id;
bean.time = R.isoDateTime(dayjs.utc());
bean.status = DOWN;
@@ -131,7 +151,7 @@ class Monitor extends BeanModel {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
});
bean.msg = `${res.status} - ${res.statusText}`
bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime;
// Check certificate if https is used
@@ -141,12 +161,12 @@ class Monitor extends BeanModel {
tlsInfo = await this.updateTlsInfo(checkCertificate(res));
} catch (e) {
if (e.message !== "No TLS certificate in response") {
console.error(e.message)
console.error(e.message);
}
}
}
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms")
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
if (this.type === "http") {
bean.status = UP;
@@ -156,26 +176,26 @@ class Monitor extends BeanModel {
// Convert to string for object/array
if (typeof data !== "string") {
data = JSON.stringify(data)
data = JSON.stringify(data);
}
if (data.includes(this.keyword)) {
bean.msg += ", keyword is found"
bean.msg += ", keyword is found";
bean.status = UP;
} else {
throw new Error(bean.msg + ", but keyword is not found")
throw new Error(bean.msg + ", but keyword is not found");
}
}
} else if (this.type === "port") {
bean.ping = await tcping(this.hostname, this.port);
bean.msg = ""
bean.msg = "";
bean.status = UP;
} else if (this.type === "ping") {
bean.ping = await ping(this.hostname);
bean.msg = ""
bean.msg = "";
bean.status = UP;
} else if (this.type === "dns") {
let startTime = dayjs().valueOf();
@@ -195,7 +215,7 @@ class Monitor extends BeanModel {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2)
dnsMessage = dnsMessage.slice(0, -2);
} else if (this.dns_resolve_type == "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
@@ -205,7 +225,7 @@ class Monitor extends BeanModel {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
dnsMessage = dnsMessage.slice(0, -2)
dnsMessage = dnsMessage.slice(0, -2);
}
if (this.dnsLastResult !== dnsMessage) {
@@ -217,6 +237,28 @@ class Monitor extends BeanModel {
bean.msg = dnsMessage;
bean.status = UP;
} else if (this.type === "push") { // Type: Push
const time = R.isoDateTime(dayjs.utc().subtract(this.interval, "second"));
let heartbeatCount = await R.count("heartbeat", " monitor_id = ? AND time > ? ", [
this.id,
time
]);
debug("heartbeatCount" + heartbeatCount + " " + time);
if (heartbeatCount <= 0) {
throw new Error("No heartbeat in the time window");
} else {
// No need to insert successful heartbeat for push type, so end here
retries = 0;
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
return;
}
} else {
bean.msg = "Unknown Monitor Type";
bean.status = PENDING;
}
if (this.isUpsideDown()) {
@@ -244,6 +286,8 @@ class Monitor extends BeanModel {
}
}
let beatInterval = this.interval;
// * ? -> ANY STATUS = important [isFirstBeat]
// UP -> PENDING = not important
// * UP -> DOWN = important
@@ -268,20 +312,20 @@ class Monitor extends BeanModel {
if (!isFirstBeat || bean.status === DOWN) {
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
this.id,
])
]);
let text;
if (bean.status === UP) {
text = "✅ Up"
text = "✅ Up";
} else {
text = "🔴 Down"
text = "🔴 Down";
}
let msg = `[${this.name}] [${text}] ${bean.msg}`;
for (let notification of notificationList) {
try {
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON())
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON());
} catch (e) {
console.error("Cannot send notification to " + notification.name);
console.log(e);
@@ -294,15 +338,18 @@ class Monitor extends BeanModel {
}
if (bean.status === UP) {
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${this.interval} seconds | Type: ${this.type}`)
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === PENDING) {
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Type: ${this.type}`)
if (this.retryInterval > 0) {
beatInterval = this.retryInterval;
}
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else {
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Type: ${this.type}`)
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
}
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id)
Monitor.sendStats(io, this.id, this.user_id);
await R.store(bean);
prometheus.update(bean, tlsInfo);
@@ -310,12 +357,19 @@ class Monitor extends BeanModel {
previousBeat = bean;
if (! this.isStop) {
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
this.heartbeatInterval = setTimeout(beat, beatInterval * 1000);
}
}
};
beat();
// Delay Push Type
if (this.type === "push") {
setTimeout(() => {
beat();
}, this.interval * 1000);
} else {
beat();
}
}
stop() {
@@ -406,7 +460,7 @@ class Monitor extends BeanModel {
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
* @param duration : int Hours
*/
static async sendUptime(duration, io, monitorID, userID) {
static async calcUptime(duration, monitorID) {
const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
@@ -459,12 +513,21 @@ class Monitor extends BeanModel {
} else {
// Handle new monitor with only one beat, because the beat's duration = 0
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
console.log("here???" + status);
if (status === UP) {
uptime = 1;
}
}
return uptime;
}
/**
* Send Uptime
* @param duration : int Hours
*/
static async sendUptime(duration, io, monitorID, userID) {
const uptime = await this.calcUptime(duration, monitorID);
io.to(userID).emit("uptime", monitorID, duration, uptime);
}
}

13
server/model/tag.js Normal file
View File

@@ -0,0 +1,13 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Tag extends BeanModel {
toJSON() {
return {
id: this._id,
name: this._name,
color: this._color,
};
}
}
module.exports = Tag;

View File

@@ -0,0 +1,749 @@
let url = require("url");
let MemoryCache = require("./memory-cache");
let t = {
ms: 1,
second: 1000,
minute: 60000,
hour: 3600000,
day: 3600000 * 24,
week: 3600000 * 24 * 7,
month: 3600000 * 24 * 30,
};
let instances = [];
let matches = function (a) {
return function (b) {
return a === b;
};
};
let doesntMatch = function (a) {
return function (b) {
return !matches(a)(b);
};
};
let logDuration = function (d, prefix) {
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
};
function getSafeHeaders(res) {
return res.getHeaders ? res.getHeaders() : res._headers;
}
function ApiCache() {
let memCache = new MemoryCache();
let globalOptions = {
debug: false,
defaultDuration: 3600000,
enabled: true,
appendKey: [],
jsonp: false,
redisClient: false,
headerBlacklist: [],
statusCodes: {
include: [],
exclude: [],
},
events: {
expire: undefined,
},
headers: {
// 'cache-control': 'no-cache' // example of header overwrite
},
trackPerformance: false,
respectCacheControl: false,
};
let middlewareOptions = [];
let instance = this;
let index = null;
let timers = {};
let performanceArray = []; // for tracking cache hit rate
instances.push(this);
this.id = instances.length;
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
});
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
function shouldCacheResponse(request, response, toggle) {
let opt = globalOptions;
let codes = opt.statusCodes;
if (!response) {
return false;
}
if (toggle && !toggle(request, response)) {
return false;
}
if (codes.exclude && codes.exclude.length && codes.exclude.indexOf(response.statusCode) !== -1) {
return false;
}
if (codes.include && codes.include.length && codes.include.indexOf(response.statusCode) === -1) {
return false;
}
return true;
}
function addIndexEntries(key, req) {
let groupName = req.apicacheGroup;
if (groupName) {
debug("group detected \"" + groupName + "\"");
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
index.all.unshift(key);
}
function filterBlacklistedHeaders(headers) {
return Object.keys(headers)
.filter(function (key) {
return globalOptions.headerBlacklist.indexOf(key) === -1;
})
.reduce(function (acc, header) {
acc[header] = headers[header];
return acc;
}, {});
}
function createCacheObject(status, headers, data, encoding) {
return {
status: status,
headers: filterBlacklistedHeaders(headers),
data: data,
encoding: encoding,
timestamp: new Date().getTime() / 1000, // seconds since epoch. This is used to properly decrement max-age headers in cached responses.
};
}
function cacheResponse(key, value, duration) {
let redis = globalOptions.redisClient;
let expireCallback = globalOptions.events.expire;
if (redis && redis.connected) {
try {
redis.hset(key, "response", JSON.stringify(value));
redis.hset(key, "duration", duration);
redis.expire(key, duration / 1000, expireCallback || function () {});
} catch (err) {
debug("[apicache] error in redis.hset()");
}
} else {
memCache.add(key, value, duration, expireCallback);
}
// add automatic cache clearing from duration, includes max limit on setTimeout
timers[key] = setTimeout(function () {
instance.clear(key, true);
}, Math.min(duration, 2147483647));
}
function accumulateContent(res, content) {
if (content) {
if (typeof content == "string") {
res._apicache.content = (res._apicache.content || "") + content;
} else if (Buffer.isBuffer(content)) {
let oldContent = res._apicache.content;
if (typeof oldContent === "string") {
oldContent = !Buffer.from ? new Buffer(oldContent) : Buffer.from(oldContent);
}
if (!oldContent) {
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
}
res._apicache.content = Buffer.concat(
[oldContent, content],
oldContent.length + content.length
);
} else {
res._apicache.content = content;
}
}
}
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
// monkeypatch res.end to create cache object
res._apicache = {
write: res.write,
writeHead: res.writeHead,
end: res.end,
cacheable: true,
content: undefined,
};
// append header overwrites if applicable
Object.keys(globalOptions.headers).forEach(function (name) {
res.setHeader(name, globalOptions.headers[name]);
});
res.writeHead = function () {
// add cache control headers
if (!globalOptions.headers["cache-control"]) {
if (shouldCacheResponse(req, res, toggle)) {
res.setHeader("cache-control", "max-age=" + (duration / 1000).toFixed(0));
} else {
res.setHeader("cache-control", "no-cache, no-store, must-revalidate");
}
}
res._apicache.headers = Object.assign({}, getSafeHeaders(res));
return res._apicache.writeHead.apply(this, arguments);
};
// patch res.write
res.write = function (content) {
accumulateContent(res, content);
return res._apicache.write.apply(this, arguments);
};
// patch res.end
res.end = function (content, encoding) {
if (shouldCacheResponse(req, res, toggle)) {
accumulateContent(res, content);
if (res._apicache.cacheable && res._apicache.content) {
addIndexEntries(key, req);
let headers = res._apicache.headers || getSafeHeaders(res);
let cacheObject = createCacheObject(
res.statusCode,
headers,
res._apicache.content,
encoding
);
cacheResponse(key, cacheObject, duration);
// display log entry
let elapsed = new Date() - req.apicacheTimer;
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
}
}
return res._apicache.end.apply(this, arguments);
};
next();
}
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
if (toggle && !toggle(request, response)) {
return next();
}
let headers = getSafeHeaders(response);
// Modified by @louislam, removed Cache-control, since I don't need client side cache!
// Original Source: https://github.com/kwhitley/apicache/blob/0d5686cc21fad353c6dddee646288c2fca3e4f50/src/apicache.js#L254
Object.assign(headers, filterBlacklistedHeaders(cacheObject.headers || {}));
// only embed apicache headers when not in production environment
if (process.env.NODE_ENV !== "production") {
Object.assign(headers, {
"apicache-store": globalOptions.redisClient ? "redis" : "memory",
"apicache-version": "1.6.2-modified",
});
}
// unstringify buffers
let data = cacheObject.data;
if (data && data.type === "Buffer") {
data =
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
}
// test Etag against If-None-Match for 304
let cachedEtag = cacheObject.headers.etag;
let requestEtag = request.headers["if-none-match"];
if (requestEtag && cachedEtag === requestEtag) {
response.writeHead(304, headers);
return response.end();
}
response.writeHead(cacheObject.status || 200, headers);
return response.end(data, cacheObject.encoding);
}
function syncOptions() {
for (let i in middlewareOptions) {
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
}
}
this.clear = function (target, isAutomatic) {
let group = index.groups[target];
let redis = globalOptions.redisClient;
if (group) {
debug("clearing group \"" + target + "\"");
group.forEach(function (key) {
debug("clearing cached entry for \"" + key + "\"");
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
memCache.delete(key);
} else {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
}
index.all = index.all.filter(doesntMatch(key));
});
delete index.groups[target];
} else if (target) {
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
if (!redis) {
memCache.delete(target);
} else {
try {
redis.del(target);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + target + "\")");
}
}
// remove from global index
index.all = index.all.filter(doesntMatch(target));
// remove target from each group that it may exist in
Object.keys(index.groups).forEach(function (groupName) {
index.groups[groupName] = index.groups[groupName].filter(doesntMatch(target));
// delete group if now empty
if (!index.groups[groupName].length) {
delete index.groups[groupName];
}
});
} else {
debug("clearing entire index");
if (!redis) {
memCache.clear();
} else {
// clear redis keys one by one from internal index to prevent clearing non-apicache entries
index.all.forEach(function (key) {
clearTimeout(timers[key]);
delete timers[key];
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
});
}
this.resetIndex();
}
return this.getIndex();
};
function parseDuration(duration, defaultDuration) {
if (typeof duration === "number") {
return duration;
}
if (typeof duration === "string") {
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
if (split.length === 3) {
let len = parseFloat(split[1]);
let unit = split[2].replace(/s$/i, "").toLowerCase();
if (unit === "m") {
unit = "ms";
}
return (len || 1) * (t[unit] || 0);
}
}
return defaultDuration;
}
this.getDuration = function (duration) {
return parseDuration(duration, globalOptions.defaultDuration);
};
/**
* Return cache performance statistics (hit rate). Suitable for putting into a route:
* <code>
* app.get('/api/cache/performance', (req, res) => {
* res.json(apicache.getPerformance())
* })
* </code>
*/
this.getPerformance = function () {
return performanceArray.map(function (p) {
return p.report();
});
};
this.getIndex = function (group) {
if (group) {
return index.groups[group];
} else {
return index;
}
};
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
let duration = instance.getDuration(strDuration);
let opt = {};
middlewareOptions.push({
options: opt,
});
let options = function (localOptions) {
if (localOptions) {
middlewareOptions.find(function (middleware) {
return middleware.options === opt;
}).localOptions = localOptions;
}
syncOptions();
return opt;
};
options(localOptions);
/**
* A Function for non tracking performance
*/
function NOOPCachePerformance() {
this.report = this.hit = this.miss = function () {}; // noop;
}
/**
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
*/
function CachePerformance() {
/**
* Tracks the hit rate for the last 100 requests.
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 1000 requests.
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 10000 requests.
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 100000 requests.
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
/**
* The number of calls that have passed through the middleware since the server started.
*/
this.callCount = 0;
/**
* The total number of hits since the server started
*/
this.hitCount = 0;
/**
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheHit = null;
/**
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheMiss = null;
/**
* Return performance statistics
*/
this.report = function () {
return {
lastCacheHit: this.lastCacheHit,
lastCacheMiss: this.lastCacheMiss,
callCount: this.callCount,
hitCount: this.hitCount,
missCount: this.callCount - this.hitCount,
hitRate: this.callCount == 0 ? null : this.hitCount / this.callCount,
hitRateLast100: this.hitRate(this.hitsLast100),
hitRateLast1000: this.hitRate(this.hitsLast1000),
hitRateLast10000: this.hitRate(this.hitsLast10000),
hitRateLast100000: this.hitRate(this.hitsLast100000),
};
};
/**
* Computes a cache hit rate from an array of hits and misses.
* @param {Uint8Array} array An array representing hits and misses.
* @returns a number between 0 and 1, or null if the array has no hits or misses
*/
this.hitRate = function (array) {
let hits = 0;
let misses = 0;
for (let i = 0; i < array.length; i++) {
let n8 = array[i];
for (let j = 0; j < 4; j++) {
switch (n8 & 3) {
case 1:
hits++;
break;
case 2:
misses++;
break;
}
n8 >>= 2;
}
}
let total = hits + misses;
if (total == 0) {
return null;
}
return hits / total;
};
/**
* Record a hit or miss in the given array. It will be recorded at a position determined
* by the current value of the callCount variable.
* @param {Uint8Array} array An array representing hits and misses.
* @param {boolean} hit true for a hit, false for a miss
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
* Each hit or miss is encoded as to bits as follows:
* 00 means no hit or miss has been recorded in these bits
* 01 encodes a hit
* 10 encodes a miss
*/
this.recordHitInArray = function (array, hit) {
let arrayIndex = ~~(this.callCount / 4) % array.length;
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
let clearMask = ~(3 << bitOffset);
let record = (hit ? 1 : 2) << bitOffset;
array[arrayIndex] = (array[arrayIndex] & clearMask) | record;
};
/**
* Records the hit or miss in the tracking arrays and increments the call count.
* @param {boolean} hit true records a hit, false records a miss
*/
this.recordHit = function (hit) {
this.recordHitInArray(this.hitsLast100, hit);
this.recordHitInArray(this.hitsLast1000, hit);
this.recordHitInArray(this.hitsLast10000, hit);
this.recordHitInArray(this.hitsLast100000, hit);
if (hit) {
this.hitCount++;
}
this.callCount++;
};
/**
* Records a hit event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache hit
*/
this.hit = function (key) {
this.recordHit(true);
this.lastCacheHit = key;
};
/**
* Records a miss event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache miss
*/
this.miss = function (key) {
this.recordHit(false);
this.lastCacheMiss = key;
};
}
let perf = globalOptions.trackPerformance ? new CachePerformance() : new NOOPCachePerformance();
performanceArray.push(perf);
let cache = function (req, res, next) {
function bypass() {
debug("bypass detected, skipping cache.");
return next();
}
// initial bypass chances
if (!opt.enabled) {
return bypass();
}
if (
req.headers["x-apicache-bypass"] ||
req.headers["x-apicache-force-fetch"] ||
(opt.respectCacheControl && req.headers["cache-control"] == "no-cache")
) {
return bypass();
}
// REMOVED IN 0.11.1 TO CORRECT MIDDLEWARE TOGGLE EXECUTE ORDER
// if (typeof middlewareToggle === 'function') {
// if (!middlewareToggle(req, res)) return bypass()
// } else if (middlewareToggle !== undefined && !middlewareToggle) {
// return bypass()
// }
// embed timer
req.apicacheTimer = new Date();
// In Express 4.x the url is ambigious based on where a router is mounted. originalUrl will give the full Url
let key = req.originalUrl || req.url;
// Remove querystring from key if jsonp option is enabled
if (opt.jsonp) {
key = url.parse(key).pathname;
}
// add appendKey (either custom function or response path)
if (typeof opt.appendKey === "function") {
key += "$$appendKey=" + opt.appendKey(req, res);
} else if (opt.appendKey.length > 0) {
let appendKey = req;
for (let i = 0; i < opt.appendKey.length; i++) {
appendKey = appendKey[opt.appendKey[i]];
}
key += "$$appendKey=" + appendKey;
}
// attempt cache hit
let redis = opt.redisClient;
let cached = !redis ? memCache.getValue(key) : null;
// send if cache hit from memory-cache
if (cached) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
}
// send if cache hit from redis
if (redis && redis.connected) {
try {
redis.hgetall(key, function (err, obj) {
if (!err && obj && obj.response) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (redis) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(
req,
res,
JSON.parse(obj.response),
middlewareToggle,
next,
duration
);
} else {
perf.miss(key);
return makeResponseCacheable(
req,
res,
next,
key,
duration,
strDuration,
middlewareToggle
);
}
});
} catch (err) {
// bypass redis on error
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
} else {
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
};
cache.options = options;
return cache;
};
this.options = function (options) {
if (options) {
Object.assign(globalOptions, options);
syncOptions();
if ("defaultDuration" in options) {
// Convert the default duration to a number in milliseconds (if needed)
globalOptions.defaultDuration = parseDuration(globalOptions.defaultDuration, 3600000);
}
if (globalOptions.trackPerformance) {
debug("WARNING: using trackPerformance flag can cause high memory usage!");
}
return this;
} else {
return globalOptions;
}
};
this.resetIndex = function () {
index = {
all: [],
groups: {},
};
};
this.newInstance = function (config) {
let instance = new ApiCache();
if (config) {
instance.options(config);
}
return instance;
};
this.clone = function () {
return this.newInstance(this.options());
};
// initialize index
this.resetIndex();
}
module.exports = new ApiCache();

View File

@@ -0,0 +1,14 @@
const apicache = require("./apicache");
apicache.options({
headerBlacklist: [
"cache-control"
],
headers: {
// Disable client side cache, only server side cache.
// BUG! Not working for the second request
"cache-control": "no-cache",
},
});
module.exports = apicache;

View File

@@ -0,0 +1,59 @@
function MemoryCache() {
this.cache = {};
this.size = 0;
}
MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
let old = this.cache[key];
let instance = this;
let entry = {
value: value,
expire: time + Date.now(),
timeout: setTimeout(function () {
instance.delete(key);
return timeoutCallback && typeof timeoutCallback === "function" && timeoutCallback(value, key);
}, time)
};
this.cache[key] = entry;
this.size = Object.keys(this.cache).length;
return entry;
};
MemoryCache.prototype.delete = function (key) {
let entry = this.cache[key];
if (entry) {
clearTimeout(entry.timeout);
}
delete this.cache[key];
this.size = Object.keys(this.cache).length;
return null;
};
MemoryCache.prototype.get = function (key) {
let entry = this.cache[key];
return entry;
};
MemoryCache.prototype.getValue = function (key) {
let entry = this.get(key);
return entry && entry.value;
};
MemoryCache.prototype.clear = function () {
Object.keys(this.cache).forEach(function (key) {
this.delete(key);
}, this);
return true;
};
module.exports = MemoryCache;

View File

@@ -62,6 +62,11 @@ class Discord extends NotificationProvider {
],
}],
}
if (notification.discordPrefixMessage) {
discorddowndata.content = notification.discordPrefixMessage;
}
await axios.post(notification.discordWebhookUrl, discorddowndata)
return okMsg;
@@ -92,6 +97,11 @@ class Discord extends NotificationProvider {
],
}],
}
if (notification.discordPrefixMessage) {
discordupdata.content = notification.discordPrefixMessage;
}
await axios.post(notification.discordWebhookUrl, discordupdata)
return okMsg;
}

View File

@@ -0,0 +1,124 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class Teams extends NotificationProvider {
name = "teams";
_statusMessageFactory = (status, monitorName) => {
if (status === DOWN) {
return `🔴 Application [${monitorName}] went down`;
} else if (status === UP) {
return `✅ Application [${monitorName}] is back online`;
}
return "Notification";
};
_getThemeColor = (status) => {
if (status === DOWN) {
return "ff0000";
}
if (status === UP) {
return "00e804";
}
return "008cff";
};
_notificationPayloadFactory = ({
status,
monitorMessage,
monitorName,
monitorUrl,
}) => {
const notificationMessage = this._statusMessageFactory(
status,
monitorName
);
const facts = [];
if (monitorName) {
facts.push({
name: "Monitor",
value: monitorName,
});
}
if (monitorUrl) {
facts.push({
name: "URL",
value: monitorUrl,
});
}
return {
"@context": "https://schema.org/extensions",
"@type": "MessageCard",
themeColor: this._getThemeColor(status),
summary: notificationMessage,
sections: [
{
activityImage:
"https://raw.githubusercontent.com/louislam/uptime-kuma/master/public/icon.png",
activityTitle: "**Uptime Kuma**",
},
{
activityTitle: notificationMessage,
},
{
activityTitle: "**Description**",
text: monitorMessage,
facts,
},
],
};
};
_sendNotification = async (webhookUrl, payload) => {
await axios.post(webhookUrl, payload);
};
_handleGeneralNotification = (webhookUrl, msg) => {
const payload = this._notificationPayloadFactory({
monitorMessage: msg
});
return this._sendNotification(webhookUrl, payload);
};
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully. ";
try {
if (heartbeatJSON == null) {
await this._handleGeneralNotification(notification.webhookUrl, msg);
return okMsg;
}
let url;
if (monitorJSON["type"] === "port") {
url = monitorJSON["hostname"];
if (monitorJSON["port"]) {
url += ":" + monitorJSON["port"];
}
} else {
url = monitorJSON["url"];
}
const payload = this._notificationPayloadFactory({
monitorMessage: heartbeatJSON.msg,
monitorName: monitorJSON.name,
monitorUrl: url,
status: heartbeatJSON.status,
});
await this._sendNotification(notification.webhookUrl, payload);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Teams;

View File

@@ -13,6 +13,7 @@ const RocketChat = require("./notification-providers/rocket-chat");
const Signal = require("./notification-providers/signal");
const Slack = require("./notification-providers/slack");
const SMTP = require("./notification-providers/smtp");
const Teams = require("./notification-providers/teams");
const Telegram = require("./notification-providers/telegram");
const Webhook = require("./notification-providers/webhook");
@@ -28,6 +29,7 @@ class Notification {
const list = [
new Apprise(),
new Discord(),
new Teams(),
new Gotify(),
new Line(),
new LunaSea(),

View File

@@ -59,7 +59,7 @@ class Prometheus {
}
try {
monitor_cert_days_remaining.set(this.monitorLabelValues, tlsInfo.daysRemaining)
monitor_cert_days_remaining.set(this.monitorLabelValues, tlsInfo.certInfo.daysRemaining)
} catch (e) {
console.error(e)
}

View File

@@ -0,0 +1,191 @@
let express = require("express");
const { allowDevAllOrigin, getSettings, setting } = require("../util-server");
const { R } = require("redbean-node");
const server = require("../server");
const apicache = require("../modules/apicache");
const Monitor = require("../model/monitor");
const dayjs = require("dayjs");
const { UP } = require("../../src/util");
let router = express.Router();
let cache = apicache.middleware;
let io = server.io;
router.get("/api/entry-page", async (_, response) => {
allowDevAllOrigin(response);
response.json(server.entryPage);
});
router.get("/api/push/:pushToken", async (request, response) => {
try {
let pushToken = request.params.pushToken;
let msg = request.query.msg || "OK";
let ping = request.query.ping;
let monitor = await R.findOne("monitor", " push_token = ? AND active = 1 ", [
pushToken
]);
if (! monitor) {
throw new Error("Monitor not found or not active.");
}
let bean = R.dispense("heartbeat");
bean.monitor_id = monitor.id;
bean.time = R.isoDateTime(dayjs.utc());
bean.status = UP;
bean.msg = msg;
bean.ping = ping;
await R.store(bean);
io.to(monitor.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, monitor.id, monitor.user_id);
response.json({
ok: true,
});
} catch (e) {
response.json({
ok: false,
msg: e.message
});
}
});
// Status Page Config
router.get("/api/status-page/config", async (_request, response) => {
allowDevAllOrigin(response);
let config = await getSettings("statusPage");
if (! config.statusPageTheme) {
config.statusPageTheme = "light";
}
if (! config.statusPagePublished) {
config.statusPagePublished = true;
}
if (! config.title) {
config.title = "Uptime Kuma";
}
response.json(config);
});
// Status Page - Get the current Incident
// Can fetch only if published
router.get("/api/status-page/incident", async (_, response) => {
allowDevAllOrigin(response);
try {
await checkPublished();
let incident = await R.findOne("incident", " pin = 1 AND active = 1");
if (incident) {
incident = incident.toPublicJSON();
}
response.json({
ok: true,
incident,
});
} catch (error) {
send403(response, error.message);
}
});
// Status Page - Monitor List
// Can fetch only if published
router.get("/api/status-page/monitor-list", cache("5 minutes"), async (_request, response) => {
allowDevAllOrigin(response);
try {
await checkPublished();
const publicGroupList = [];
let list = await R.find("group", " public = 1 ORDER BY weight ");
for (let groupBean of list) {
publicGroupList.push(await groupBean.toPublicJSON());
}
response.json(publicGroupList);
} catch (error) {
send403(response, error.message);
}
});
// Status Page Polling Data
// Can fetch only if published
router.get("/api/status-page/heartbeat", cache("5 minutes"), async (_request, response) => {
allowDevAllOrigin(response);
try {
await checkPublished();
let heartbeatList = {};
let uptimeList = {};
let monitorIDList = await R.getCol(`
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
WHERE monitor_group.group_id = \`group\`.id
AND public = 1
`);
for (let monitorID of monitorIDList) {
let list = await R.getAll(`
SELECT * FROM heartbeat
WHERE monitor_id = ?
ORDER BY time DESC
LIMIT 50
`, [
monitorID,
]);
list = R.convertToBeans("heartbeat", list);
heartbeatList[monitorID] = list.reverse().map(row => row.toPublicJSON());
const type = 24;
uptimeList[`${monitorID}_${type}`] = await Monitor.calcUptime(type, monitorID);
}
response.json({
heartbeatList,
uptimeList
});
} catch (error) {
send403(response, error.message);
}
});
async function checkPublished() {
if (! await isPublished()) {
throw new Error("The status page is not published");
}
}
/**
* Default is published
* @returns {Promise<boolean>}
*/
async function isPublished() {
const value = await setting("statusPagePublished");
if (value === null) {
return true;
}
return value;
}
function send403(res, msg = "") {
res.status(403).json({
"status": "fail",
"msg": msg,
});
}
module.exports = router;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,161 @@
const { R } = require("redbean-node");
const { checkLogin, setSettings } = require("../util-server");
const dayjs = require("dayjs");
const { debug } = require("../../src/util");
const ImageDataURI = require("../image-data-uri");
const Database = require("../database");
const apicache = require("../modules/apicache");
module.exports.statusPageSocketHandler = (socket) => {
// Post or edit incident
socket.on("postIncident", async (incident, callback) => {
try {
checkLogin(socket);
await R.exec("UPDATE incident SET pin = 0 ");
let incidentBean;
if (incident.id) {
incidentBean = await R.findOne("incident", " id = ?", [
incident.id
]);
}
if (incidentBean == null) {
incidentBean = R.dispense("incident");
}
incidentBean.title = incident.title;
incidentBean.content = incident.content;
incidentBean.style = incident.style;
incidentBean.pin = true;
if (incident.id) {
incidentBean.lastUpdatedDate = R.isoDateTime(dayjs.utc());
} else {
incidentBean.createdDate = R.isoDateTime(dayjs.utc());
}
await R.store(incidentBean);
callback({
ok: true,
incident: incidentBean.toPublicJSON(),
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
socket.on("unpinIncident", async (callback) => {
try {
checkLogin(socket);
await R.exec("UPDATE incident SET pin = 0 WHERE pin = 1");
callback({
ok: true,
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
// Save Status Page
// imgDataUrl Only Accept PNG!
socket.on("saveStatusPage", async (config, imgDataUrl, publicGroupList, callback) => {
try {
checkLogin(socket);
apicache.clear();
const header = "data:image/png;base64,";
// Check logo format
// If is image data url, convert to png file
// Else assume it is a url, nothing to do
if (imgDataUrl.startsWith("data:")) {
if (! imgDataUrl.startsWith(header)) {
throw new Error("Only allowed PNG logo.");
}
// Convert to file
await ImageDataURI.outputFile(imgDataUrl, Database.uploadDir + "logo.png");
config.logo = "/upload/logo.png?t=" + Date.now();
} else {
config.icon = imgDataUrl;
}
// Save Config
await setSettings("statusPage", config);
// Save Public Group List
const groupIDList = [];
let groupOrder = 1;
for (let group of publicGroupList) {
let groupBean;
if (group.id) {
groupBean = await R.findOne("group", " id = ? AND public = 1 ", [
group.id
]);
} else {
groupBean = R.dispense("group");
}
groupBean.name = group.name;
groupBean.public = true;
groupBean.weight = groupOrder++;
await R.store(groupBean);
await R.exec("DELETE FROM monitor_group WHERE group_id = ? ", [
groupBean.id
]);
let monitorOrder = 1;
console.log(group.monitorList);
for (let monitor of group.monitorList) {
let relationBean = R.dispense("monitor_group");
relationBean.weight = monitorOrder++;
relationBean.group_id = groupBean.id;
relationBean.monitor_id = monitor.id;
await R.store(relationBean);
}
groupIDList.push(groupBean.id);
group.id = groupBean.id;
}
// Delete groups that not in the list
debug("Delete groups that not in the list");
const slots = groupIDList.map(() => "?").join(",");
await R.exec(`DELETE FROM \`group\` WHERE id NOT IN (${slots})`, groupIDList);
callback({
ok: true,
publicGroupList,
});
} catch (error) {
console.log(error);
callback({
ok: false,
msg: error.message,
});
}
});
};

View File

@@ -5,6 +5,7 @@ const { debug } = require("../src/util");
const passwordHash = require("./password-hash");
const dayjs = require("dayjs");
const { Resolver } = require("dns");
const child_process = require("child_process");
/**
* Init or reset JWT secret
@@ -23,7 +24,7 @@ exports.initJWTSecret = async () => {
jwtSecretBean.value = passwordHash.generate(dayjs() + "");
await R.store(jwtSecretBean);
return jwtSecretBean;
}
};
exports.tcping = function (hostname, port) {
return new Promise((resolve, reject) => {
@@ -44,7 +45,7 @@ exports.tcping = function (hostname, port) {
resolve(Math.round(data.max));
});
});
}
};
exports.ping = async (hostname) => {
try {
@@ -57,7 +58,7 @@ exports.ping = async (hostname) => {
throw e;
}
}
}
};
exports.pingAsync = function (hostname, ipv6 = false) {
return new Promise((resolve, reject) => {
@@ -69,13 +70,13 @@ exports.pingAsync = function (hostname, ipv6 = false) {
if (err) {
reject(err);
} else if (ms === null) {
reject(new Error(stdout))
reject(new Error(stdout));
} else {
resolve(Math.round(ms))
resolve(Math.round(ms));
}
});
});
}
};
exports.dnsResolve = function (hostname, resolver_server, rrtype) {
const resolver = new Resolver();
@@ -98,8 +99,8 @@ exports.dnsResolve = function (hostname, resolver_server, rrtype) {
}
});
}
})
}
});
};
exports.setting = async function (key) {
let value = await R.getCell("SELECT `value` FROM setting WHERE `key` = ? ", [
@@ -108,29 +109,29 @@ exports.setting = async function (key) {
try {
const v = JSON.parse(value);
debug(`Get Setting: ${key}: ${v}`)
debug(`Get Setting: ${key}: ${v}`);
return v;
} catch (e) {
return value;
}
}
};
exports.setSetting = async function (key, value) {
let bean = await R.findOne("setting", " `key` = ? ", [
key,
])
]);
if (!bean) {
bean = R.dispense("setting")
bean = R.dispense("setting");
bean.key = key;
}
bean.value = JSON.stringify(value);
await R.store(bean)
}
await R.store(bean);
};
exports.getSettings = async function (type) {
let list = await R.getAll("SELECT `key`, `value` FROM setting WHERE `type` = ? ", [
type,
])
]);
let result = {};
@@ -143,7 +144,7 @@ exports.getSettings = async function (type) {
}
return result;
}
};
exports.setSettings = async function (type, data) {
let keyList = Object.keys(data);
@@ -163,12 +164,12 @@ exports.setSettings = async function (type, data) {
if (bean.type === type) {
bean.value = JSON.stringify(data[key]);
promiseList.push(R.store(bean))
promiseList.push(R.store(bean));
}
}
await Promise.all(promiseList);
}
};
// ssl-checker by @dyaa
// param: res - response object from axios
@@ -185,40 +186,44 @@ const getDaysRemaining = (validFrom, validTo) => {
return daysRemaining;
};
exports.checkCertificate = function (res) {
const {
valid_from,
valid_to,
subjectaltname,
issuer,
fingerprint,
} = res.request.res.socket.getPeerCertificate(false);
// Fix certificate Info for display
// param: info - the chain obtained from getPeerCertificate()
const parseCertificateInfo = function (info) {
let link = info;
if (!valid_from || !valid_to || !subjectaltname) {
throw {
message: "No TLS certificate in response",
};
while (link) {
if (!link.valid_from || !link.valid_to) {
break;
}
link.validTo = new Date(link.valid_to);
link.validFor = link.subjectaltname?.replace(/DNS:|IP Address:/g, "").split(", ");
link.daysRemaining = getDaysRemaining(new Date(), link.validTo);
// Move up the chain until loop is encountered
if (link.issuerCertificate == null) {
break;
} else if (link.fingerprint == link.issuerCertificate.fingerprint) {
link.issuerCertificate = null;
break;
} else {
link = link.issuerCertificate;
}
}
return info;
};
exports.checkCertificate = function (res) {
const info = res.request.res.socket.getPeerCertificate(true);
const valid = res.request.res.socket.authorized || false;
const validTo = new Date(valid_to);
const validFor = subjectaltname
.replace(/DNS:|IP Address:/g, "")
.split(", ");
const daysRemaining = getDaysRemaining(new Date(), validTo);
const parsedInfo = parseCertificateInfo(info);
return {
valid,
validFor,
validTo,
daysRemaining,
issuer,
fingerprint,
valid: valid,
certInfo: parsedInfo
};
}
};
// Check if the provided status code is within the accepted ranges
// Param: status - the status code to check
@@ -247,7 +252,7 @@ exports.checkStatusCode = function (status, accepted_codes) {
}
return false;
}
};
exports.getTotalClientInRoom = (io, roomName) => {
@@ -270,14 +275,40 @@ exports.getTotalClientInRoom = (io, roomName) => {
} else {
return 0;
}
}
};
exports.genSecret = () => {
let secret = "";
let chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let charsLength = chars.length;
for ( let i = 0; i < 64; i++ ) {
secret += chars.charAt(Math.floor(Math.random() * charsLength));
exports.allowDevAllOrigin = (res) => {
if (process.env.NODE_ENV === "development") {
exports.allowAllOrigin(res);
}
return secret;
}
};
exports.allowAllOrigin = (res) => {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
};
exports.checkLogin = (socket) => {
if (! socket.userID) {
throw new Error("You are not logged in.");
}
};
exports.startUnitTest = async () => {
console.log("Starting unit test...");
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const child = child_process.spawn(npm, ["run", "jest"]);
child.stdout.on("data", (data) => {
console.log(data.toString());
});
child.stderr.on("data", (data) => {
console.log(data.toString());
});
child.on("close", function (code) {
console.log("Jest exit code: " + code);
process.exit(code);
});
};