Merge branch 'master' into import-export

# Conflicts:
#	server/server.js
This commit is contained in:
LouisLam
2021-09-23 17:20:13 +08:00
75 changed files with 4048 additions and 914 deletions

View File

@@ -18,7 +18,7 @@ exports.startInterval = () => {
// For debug
if (process.env.TEST_CHECK_VERSION === "1") {
res.data.version = "1000.0.0"
res.data.version = "1000.0.0";
}
exports.latestVersion = res.data.version;

View File

@@ -3,11 +3,25 @@ const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server");
const { debug, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
/**
* Database & App Data Folder
*/
class Database {
static templatePath = "./db/kuma.db";
/**
* Data Dir (Default: ./data)
*/
static dataDir;
/**
* User Upload Dir (Default: ./data/upload)
*/
static uploadDir;
static path;
/**
@@ -32,6 +46,8 @@ class Database {
"patch-improve-performance.sql": true,
"patch-2fa.sql": true,
"patch-add-retry-interval-monitor.sql": true,
"patch-incident-table.sql": true,
"patch-group-table.sql": true,
}
/**
@@ -42,27 +58,53 @@ class Database {
static noReject = true;
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
console.log(`Data Dir: ${Database.dataDir}`);
}
static async connect() {
const acquireConnectionTimeout = 120 * 1000;
R.setup("sqlite", {
filename: Database.path,
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
acquireConnectionTimeout: acquireConnectionTimeout,
}, {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
});
R.setup(knexInstance);
if (process.env.SQL_LOG === "1") {
R.debug(true);
}
// Auto map the model to a bean object
R.freeze(true)
R.freeze(true);
await R.autoloadModels("./server/model");
// Change to WAL
@@ -72,6 +114,7 @@ class Database {
console.log("SQLite config:");
console.log(await R.getAll("PRAGMA journal_mode"));
console.log(await R.getAll("PRAGMA cache_size"));
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
static async patch() {
@@ -89,7 +132,7 @@ class Database {
} else if (version > this.latestVersion) {
console.info("Warning: Database version is newer than expected");
} else {
console.info("Database patch is needed")
console.info("Database patch is needed");
this.backup(version);
@@ -104,11 +147,12 @@ class Database {
}
} catch (ex) {
await Database.close();
this.restore();
console.error(ex)
console.error("Start Uptime-Kuma failed due to patch db failed")
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues")
console.error(ex);
console.error("Start Uptime-Kuma failed due to patch db failed");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
}
@@ -133,7 +177,7 @@ class Database {
try {
for (let sqlFilename in this.patchList) {
await this.patch2Recursion(sqlFilename, databasePatchedFiles)
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
}
if (this.patched) {
@@ -142,11 +186,13 @@ class Database {
} catch (ex) {
await Database.close();
this.restore();
console.error(ex)
console.error(ex);
console.error("Start Uptime-Kuma failed due to patch db failed");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
@@ -186,7 +232,7 @@ class Database {
console.log(sqlFilename + " is patched successfully");
} else {
console.log(sqlFilename + " is already patched, skip");
debug(sqlFilename + " is already patched, skip");
}
}
@@ -204,12 +250,12 @@ class Database {
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return ! line.startsWith("--")
return ! line.startsWith("--");
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n")
text = lines.join("\n");
let statements = text.split(";")
.map((statement) => {
@@ -217,7 +263,7 @@ class Database {
})
.filter((statement) => {
return statement !== "";
})
});
for (let statement of statements) {
await R.exec(statement);
@@ -263,7 +309,7 @@ class Database {
*/
static backup(version) {
if (! this.backupPath) {
console.info("Backup the db")
console.info("Backup the db");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);

57
server/image-data-uri.js Normal file
View File

@@ -0,0 +1,57 @@
/*
From https://github.com/DiegoZoracKy/image-data-uri/blob/master/lib/image-data-uri.js
Modified with 0 dependencies
*/
let fs = require("fs");
let ImageDataURI = (() => {
function decode(dataURI) {
if (!/data:image\//.test(dataURI)) {
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
return null;
}
let regExMatches = dataURI.match("data:(image/.*);base64,(.*)");
return {
imageType: regExMatches[1],
dataBase64: regExMatches[2],
dataBuffer: new Buffer(regExMatches[2], "base64")
};
}
function encode(data, mediaType) {
if (!data || !mediaType) {
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
return null;
}
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
return dataImgBase64;
}
function outputFile(dataURI, filePath) {
filePath = filePath || "./";
return new Promise((resolve, reject) => {
let imageDecoded = decode(dataURI);
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
if (err) {
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
}
resolve(filePath);
});
});
}
return {
decode: decode,
encode: encode,
outputFile: outputFile,
};
})();
module.exports = ImageDataURI;

34
server/model/group.js Normal file
View File

@@ -0,0 +1,34 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
class Group extends BeanModel {
async toPublicJSON() {
let monitorBeanList = await this.getMonitorList();
let monitorList = [];
for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON());
}
return {
id: this.id,
name: this.name,
weight: this.weight,
monitorList,
};
}
async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(`
SELECT monitor.* FROM monitor, monitor_group
WHERE monitor.id = monitor_group.monitor_id
AND group_id = ?
ORDER BY monitor_group.weight
`, [
this.id,
]));
}
}
module.exports = Group;

View File

@@ -1,8 +1,8 @@
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc")
let timezone = require("dayjs/plugin/timezone")
dayjs.extend(utc)
dayjs.extend(timezone)
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const { BeanModel } = require("redbean-node/dist/bean-model");
/**
@@ -13,6 +13,15 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
*/
class Heartbeat extends BeanModel {
toPublicJSON() {
return {
status: this.status,
time: this.time,
msg: "", // Hide for public
ping: this.ping,
};
}
toJSON() {
return {
monitorID: this.monitor_id,

18
server/model/incident.js Normal file
View File

@@ -0,0 +1,18 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Incident extends BeanModel {
toPublicJSON() {
return {
id: this.id,
style: this.style,
title: this.title,
content: this.content,
pin: this.pin,
createdDate: this.createdDate,
lastUpdatedDate: this.lastUpdatedDate,
};
}
}
module.exports = Incident;

View File

@@ -1,16 +1,16 @@
const https = require("https");
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc")
let timezone = require("dayjs/plugin/timezone")
dayjs.extend(utc)
dayjs.extend(timezone)
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom } = require("../util-server");
const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification")
const { Notification } = require("../notification");
const version = require("../../package.json").version;
/**
@@ -20,13 +20,28 @@ const version = require("../../package.json").version;
* 2 = PENDING
*/
class Monitor extends BeanModel {
/**
* Return a object that ready to parse to JSON for public
* Only show necessary data to public
*/
async toPublicJSON() {
return {
id: this.id,
name: this.name,
};
}
/**
* Return a object that ready to parse to JSON
*/
async toJSON() {
let notificationIDList = {};
let list = await R.find("monitor_notification", " monitor_id = ? ", [
this.id,
])
]);
for (let bean of list) {
notificationIDList[bean.notification_id] = true;
@@ -64,7 +79,7 @@ class Monitor extends BeanModel {
* @returns {boolean}
*/
getIgnoreTls() {
return Boolean(this.ignoreTls)
return Boolean(this.ignoreTls);
}
/**
@@ -94,12 +109,12 @@ class Monitor extends BeanModel {
if (! previousBeat) {
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
this.id,
])
]);
}
const isFirstBeat = !previousBeat;
let bean = R.dispense("heartbeat")
let bean = R.dispense("heartbeat");
bean.monitor_id = this.id;
bean.time = R.isoDateTime(dayjs.utc());
bean.status = DOWN;
@@ -135,7 +150,7 @@ class Monitor extends BeanModel {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
});
bean.msg = `${res.status} - ${res.statusText}`
bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime;
// Check certificate if https is used
@@ -145,12 +160,12 @@ class Monitor extends BeanModel {
tlsInfo = await this.updateTlsInfo(checkCertificate(res));
} catch (e) {
if (e.message !== "No TLS certificate in response") {
console.error(e.message)
console.error(e.message);
}
}
}
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms")
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
if (this.type === "http") {
bean.status = UP;
@@ -160,26 +175,26 @@ class Monitor extends BeanModel {
// Convert to string for object/array
if (typeof data !== "string") {
data = JSON.stringify(data)
data = JSON.stringify(data);
}
if (data.includes(this.keyword)) {
bean.msg += ", keyword is found"
bean.msg += ", keyword is found";
bean.status = UP;
} else {
throw new Error(bean.msg + ", but keyword is not found")
throw new Error(bean.msg + ", but keyword is not found");
}
}
} else if (this.type === "port") {
bean.ping = await tcping(this.hostname, this.port);
bean.msg = ""
bean.msg = "";
bean.status = UP;
} else if (this.type === "ping") {
bean.ping = await ping(this.hostname);
bean.msg = ""
bean.msg = "";
bean.status = UP;
} else if (this.type === "dns") {
let startTime = dayjs().valueOf();
@@ -199,7 +214,7 @@ class Monitor extends BeanModel {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2)
dnsMessage = dnsMessage.slice(0, -2);
} else if (this.dns_resolve_type == "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
@@ -209,7 +224,7 @@ class Monitor extends BeanModel {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
dnsMessage = dnsMessage.slice(0, -2)
dnsMessage = dnsMessage.slice(0, -2);
}
if (this.dnsLastResult !== dnsMessage) {
@@ -272,20 +287,20 @@ class Monitor extends BeanModel {
if (!isFirstBeat || bean.status === DOWN) {
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
this.id,
])
]);
let text;
if (bean.status === UP) {
text = "✅ Up"
text = "✅ Up";
} else {
text = "🔴 Down"
text = "🔴 Down";
}
let msg = `[${this.name}] [${text}] ${bean.msg}`;
for (let notification of notificationList) {
try {
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON())
await Notification.send(JSON.parse(notification.config), msg, await this.toJSON(), bean.toJSON());
} catch (e) {
console.error("Cannot send notification to " + notification.name);
console.log(e);
@@ -300,18 +315,18 @@ class Monitor extends BeanModel {
let beatInterval = this.interval;
if (bean.status === UP) {
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`)
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === PENDING) {
if (this.retryInterval !== this.interval) {
beatInterval = this.retryInterval;
}
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`)
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else {
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`)
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
}
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id)
Monitor.sendStats(io, this.id, this.user_id);
await R.store(bean);
prometheus.update(bean, tlsInfo);
@@ -322,7 +337,7 @@ class Monitor extends BeanModel {
this.heartbeatInterval = setTimeout(beat, beatInterval * 1000);
}
}
};
beat();
}
@@ -415,7 +430,7 @@ class Monitor extends BeanModel {
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
* @param duration : int Hours
*/
static async sendUptime(duration, io, monitorID, userID) {
static async calcUptime(duration, monitorID) {
const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
@@ -468,12 +483,21 @@ class Monitor extends BeanModel {
} else {
// Handle new monitor with only one beat, because the beat's duration = 0
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
console.log("here???" + status);
if (status === UP) {
uptime = 1;
}
}
return uptime;
}
/**
* Send Uptime
* @param duration : int Hours
*/
static async sendUptime(duration, io, monitorID, userID) {
const uptime = await this.calcUptime(duration, monitorID);
io.to(userID).emit("uptime", monitorID, duration, uptime);
}
}

View File

@@ -0,0 +1,749 @@
let url = require("url");
let MemoryCache = require("./memory-cache");
let t = {
ms: 1,
second: 1000,
minute: 60000,
hour: 3600000,
day: 3600000 * 24,
week: 3600000 * 24 * 7,
month: 3600000 * 24 * 30,
};
let instances = [];
let matches = function (a) {
return function (b) {
return a === b;
};
};
let doesntMatch = function (a) {
return function (b) {
return !matches(a)(b);
};
};
let logDuration = function (d, prefix) {
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
};
function getSafeHeaders(res) {
return res.getHeaders ? res.getHeaders() : res._headers;
}
function ApiCache() {
let memCache = new MemoryCache();
let globalOptions = {
debug: false,
defaultDuration: 3600000,
enabled: true,
appendKey: [],
jsonp: false,
redisClient: false,
headerBlacklist: [],
statusCodes: {
include: [],
exclude: [],
},
events: {
expire: undefined,
},
headers: {
// 'cache-control': 'no-cache' // example of header overwrite
},
trackPerformance: false,
respectCacheControl: false,
};
let middlewareOptions = [];
let instance = this;
let index = null;
let timers = {};
let performanceArray = []; // for tracking cache hit rate
instances.push(this);
this.id = instances.length;
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
});
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
function shouldCacheResponse(request, response, toggle) {
let opt = globalOptions;
let codes = opt.statusCodes;
if (!response) {
return false;
}
if (toggle && !toggle(request, response)) {
return false;
}
if (codes.exclude && codes.exclude.length && codes.exclude.indexOf(response.statusCode) !== -1) {
return false;
}
if (codes.include && codes.include.length && codes.include.indexOf(response.statusCode) === -1) {
return false;
}
return true;
}
function addIndexEntries(key, req) {
let groupName = req.apicacheGroup;
if (groupName) {
debug("group detected \"" + groupName + "\"");
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
index.all.unshift(key);
}
function filterBlacklistedHeaders(headers) {
return Object.keys(headers)
.filter(function (key) {
return globalOptions.headerBlacklist.indexOf(key) === -1;
})
.reduce(function (acc, header) {
acc[header] = headers[header];
return acc;
}, {});
}
function createCacheObject(status, headers, data, encoding) {
return {
status: status,
headers: filterBlacklistedHeaders(headers),
data: data,
encoding: encoding,
timestamp: new Date().getTime() / 1000, // seconds since epoch. This is used to properly decrement max-age headers in cached responses.
};
}
function cacheResponse(key, value, duration) {
let redis = globalOptions.redisClient;
let expireCallback = globalOptions.events.expire;
if (redis && redis.connected) {
try {
redis.hset(key, "response", JSON.stringify(value));
redis.hset(key, "duration", duration);
redis.expire(key, duration / 1000, expireCallback || function () {});
} catch (err) {
debug("[apicache] error in redis.hset()");
}
} else {
memCache.add(key, value, duration, expireCallback);
}
// add automatic cache clearing from duration, includes max limit on setTimeout
timers[key] = setTimeout(function () {
instance.clear(key, true);
}, Math.min(duration, 2147483647));
}
function accumulateContent(res, content) {
if (content) {
if (typeof content == "string") {
res._apicache.content = (res._apicache.content || "") + content;
} else if (Buffer.isBuffer(content)) {
let oldContent = res._apicache.content;
if (typeof oldContent === "string") {
oldContent = !Buffer.from ? new Buffer(oldContent) : Buffer.from(oldContent);
}
if (!oldContent) {
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
}
res._apicache.content = Buffer.concat(
[oldContent, content],
oldContent.length + content.length
);
} else {
res._apicache.content = content;
}
}
}
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
// monkeypatch res.end to create cache object
res._apicache = {
write: res.write,
writeHead: res.writeHead,
end: res.end,
cacheable: true,
content: undefined,
};
// append header overwrites if applicable
Object.keys(globalOptions.headers).forEach(function (name) {
res.setHeader(name, globalOptions.headers[name]);
});
res.writeHead = function () {
// add cache control headers
if (!globalOptions.headers["cache-control"]) {
if (shouldCacheResponse(req, res, toggle)) {
res.setHeader("cache-control", "max-age=" + (duration / 1000).toFixed(0));
} else {
res.setHeader("cache-control", "no-cache, no-store, must-revalidate");
}
}
res._apicache.headers = Object.assign({}, getSafeHeaders(res));
return res._apicache.writeHead.apply(this, arguments);
};
// patch res.write
res.write = function (content) {
accumulateContent(res, content);
return res._apicache.write.apply(this, arguments);
};
// patch res.end
res.end = function (content, encoding) {
if (shouldCacheResponse(req, res, toggle)) {
accumulateContent(res, content);
if (res._apicache.cacheable && res._apicache.content) {
addIndexEntries(key, req);
let headers = res._apicache.headers || getSafeHeaders(res);
let cacheObject = createCacheObject(
res.statusCode,
headers,
res._apicache.content,
encoding
);
cacheResponse(key, cacheObject, duration);
// display log entry
let elapsed = new Date() - req.apicacheTimer;
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
}
}
return res._apicache.end.apply(this, arguments);
};
next();
}
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
if (toggle && !toggle(request, response)) {
return next();
}
let headers = getSafeHeaders(response);
// Modified by @louislam, removed Cache-control, since I don't need client side cache!
// Original Source: https://github.com/kwhitley/apicache/blob/0d5686cc21fad353c6dddee646288c2fca3e4f50/src/apicache.js#L254
Object.assign(headers, filterBlacklistedHeaders(cacheObject.headers || {}));
// only embed apicache headers when not in production environment
if (process.env.NODE_ENV !== "production") {
Object.assign(headers, {
"apicache-store": globalOptions.redisClient ? "redis" : "memory",
"apicache-version": "1.6.2-modified",
});
}
// unstringify buffers
let data = cacheObject.data;
if (data && data.type === "Buffer") {
data =
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
}
// test Etag against If-None-Match for 304
let cachedEtag = cacheObject.headers.etag;
let requestEtag = request.headers["if-none-match"];
if (requestEtag && cachedEtag === requestEtag) {
response.writeHead(304, headers);
return response.end();
}
response.writeHead(cacheObject.status || 200, headers);
return response.end(data, cacheObject.encoding);
}
function syncOptions() {
for (let i in middlewareOptions) {
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
}
}
this.clear = function (target, isAutomatic) {
let group = index.groups[target];
let redis = globalOptions.redisClient;
if (group) {
debug("clearing group \"" + target + "\"");
group.forEach(function (key) {
debug("clearing cached entry for \"" + key + "\"");
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
memCache.delete(key);
} else {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
}
index.all = index.all.filter(doesntMatch(key));
});
delete index.groups[target];
} else if (target) {
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
if (!redis) {
memCache.delete(target);
} else {
try {
redis.del(target);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + target + "\")");
}
}
// remove from global index
index.all = index.all.filter(doesntMatch(target));
// remove target from each group that it may exist in
Object.keys(index.groups).forEach(function (groupName) {
index.groups[groupName] = index.groups[groupName].filter(doesntMatch(target));
// delete group if now empty
if (!index.groups[groupName].length) {
delete index.groups[groupName];
}
});
} else {
debug("clearing entire index");
if (!redis) {
memCache.clear();
} else {
// clear redis keys one by one from internal index to prevent clearing non-apicache entries
index.all.forEach(function (key) {
clearTimeout(timers[key]);
delete timers[key];
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
});
}
this.resetIndex();
}
return this.getIndex();
};
function parseDuration(duration, defaultDuration) {
if (typeof duration === "number") {
return duration;
}
if (typeof duration === "string") {
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
if (split.length === 3) {
let len = parseFloat(split[1]);
let unit = split[2].replace(/s$/i, "").toLowerCase();
if (unit === "m") {
unit = "ms";
}
return (len || 1) * (t[unit] || 0);
}
}
return defaultDuration;
}
this.getDuration = function (duration) {
return parseDuration(duration, globalOptions.defaultDuration);
};
/**
* Return cache performance statistics (hit rate). Suitable for putting into a route:
* <code>
* app.get('/api/cache/performance', (req, res) => {
* res.json(apicache.getPerformance())
* })
* </code>
*/
this.getPerformance = function () {
return performanceArray.map(function (p) {
return p.report();
});
};
this.getIndex = function (group) {
if (group) {
return index.groups[group];
} else {
return index;
}
};
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
let duration = instance.getDuration(strDuration);
let opt = {};
middlewareOptions.push({
options: opt,
});
let options = function (localOptions) {
if (localOptions) {
middlewareOptions.find(function (middleware) {
return middleware.options === opt;
}).localOptions = localOptions;
}
syncOptions();
return opt;
};
options(localOptions);
/**
* A Function for non tracking performance
*/
function NOOPCachePerformance() {
this.report = this.hit = this.miss = function () {}; // noop;
}
/**
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
*/
function CachePerformance() {
/**
* Tracks the hit rate for the last 100 requests.
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 1000 requests.
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 10000 requests.
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 100000 requests.
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
/**
* The number of calls that have passed through the middleware since the server started.
*/
this.callCount = 0;
/**
* The total number of hits since the server started
*/
this.hitCount = 0;
/**
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheHit = null;
/**
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheMiss = null;
/**
* Return performance statistics
*/
this.report = function () {
return {
lastCacheHit: this.lastCacheHit,
lastCacheMiss: this.lastCacheMiss,
callCount: this.callCount,
hitCount: this.hitCount,
missCount: this.callCount - this.hitCount,
hitRate: this.callCount == 0 ? null : this.hitCount / this.callCount,
hitRateLast100: this.hitRate(this.hitsLast100),
hitRateLast1000: this.hitRate(this.hitsLast1000),
hitRateLast10000: this.hitRate(this.hitsLast10000),
hitRateLast100000: this.hitRate(this.hitsLast100000),
};
};
/**
* Computes a cache hit rate from an array of hits and misses.
* @param {Uint8Array} array An array representing hits and misses.
* @returns a number between 0 and 1, or null if the array has no hits or misses
*/
this.hitRate = function (array) {
let hits = 0;
let misses = 0;
for (let i = 0; i < array.length; i++) {
let n8 = array[i];
for (let j = 0; j < 4; j++) {
switch (n8 & 3) {
case 1:
hits++;
break;
case 2:
misses++;
break;
}
n8 >>= 2;
}
}
let total = hits + misses;
if (total == 0) {
return null;
}
return hits / total;
};
/**
* Record a hit or miss in the given array. It will be recorded at a position determined
* by the current value of the callCount variable.
* @param {Uint8Array} array An array representing hits and misses.
* @param {boolean} hit true for a hit, false for a miss
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
* Each hit or miss is encoded as to bits as follows:
* 00 means no hit or miss has been recorded in these bits
* 01 encodes a hit
* 10 encodes a miss
*/
this.recordHitInArray = function (array, hit) {
let arrayIndex = ~~(this.callCount / 4) % array.length;
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
let clearMask = ~(3 << bitOffset);
let record = (hit ? 1 : 2) << bitOffset;
array[arrayIndex] = (array[arrayIndex] & clearMask) | record;
};
/**
* Records the hit or miss in the tracking arrays and increments the call count.
* @param {boolean} hit true records a hit, false records a miss
*/
this.recordHit = function (hit) {
this.recordHitInArray(this.hitsLast100, hit);
this.recordHitInArray(this.hitsLast1000, hit);
this.recordHitInArray(this.hitsLast10000, hit);
this.recordHitInArray(this.hitsLast100000, hit);
if (hit) {
this.hitCount++;
}
this.callCount++;
};
/**
* Records a hit event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache hit
*/
this.hit = function (key) {
this.recordHit(true);
this.lastCacheHit = key;
};
/**
* Records a miss event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache miss
*/
this.miss = function (key) {
this.recordHit(false);
this.lastCacheMiss = key;
};
}
let perf = globalOptions.trackPerformance ? new CachePerformance() : new NOOPCachePerformance();
performanceArray.push(perf);
let cache = function (req, res, next) {
function bypass() {
debug("bypass detected, skipping cache.");
return next();
}
// initial bypass chances
if (!opt.enabled) {
return bypass();
}
if (
req.headers["x-apicache-bypass"] ||
req.headers["x-apicache-force-fetch"] ||
(opt.respectCacheControl && req.headers["cache-control"] == "no-cache")
) {
return bypass();
}
// REMOVED IN 0.11.1 TO CORRECT MIDDLEWARE TOGGLE EXECUTE ORDER
// if (typeof middlewareToggle === 'function') {
// if (!middlewareToggle(req, res)) return bypass()
// } else if (middlewareToggle !== undefined && !middlewareToggle) {
// return bypass()
// }
// embed timer
req.apicacheTimer = new Date();
// In Express 4.x the url is ambigious based on where a router is mounted. originalUrl will give the full Url
let key = req.originalUrl || req.url;
// Remove querystring from key if jsonp option is enabled
if (opt.jsonp) {
key = url.parse(key).pathname;
}
// add appendKey (either custom function or response path)
if (typeof opt.appendKey === "function") {
key += "$$appendKey=" + opt.appendKey(req, res);
} else if (opt.appendKey.length > 0) {
let appendKey = req;
for (let i = 0; i < opt.appendKey.length; i++) {
appendKey = appendKey[opt.appendKey[i]];
}
key += "$$appendKey=" + appendKey;
}
// attempt cache hit
let redis = opt.redisClient;
let cached = !redis ? memCache.getValue(key) : null;
// send if cache hit from memory-cache
if (cached) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
}
// send if cache hit from redis
if (redis && redis.connected) {
try {
redis.hgetall(key, function (err, obj) {
if (!err && obj && obj.response) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (redis) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(
req,
res,
JSON.parse(obj.response),
middlewareToggle,
next,
duration
);
} else {
perf.miss(key);
return makeResponseCacheable(
req,
res,
next,
key,
duration,
strDuration,
middlewareToggle
);
}
});
} catch (err) {
// bypass redis on error
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
} else {
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
};
cache.options = options;
return cache;
};
this.options = function (options) {
if (options) {
Object.assign(globalOptions, options);
syncOptions();
if ("defaultDuration" in options) {
// Convert the default duration to a number in milliseconds (if needed)
globalOptions.defaultDuration = parseDuration(globalOptions.defaultDuration, 3600000);
}
if (globalOptions.trackPerformance) {
debug("WARNING: using trackPerformance flag can cause high memory usage!");
}
return this;
} else {
return globalOptions;
}
};
this.resetIndex = function () {
index = {
all: [],
groups: {},
};
};
this.newInstance = function (config) {
let instance = new ApiCache();
if (config) {
instance.options(config);
}
return instance;
};
this.clone = function () {
return this.newInstance(this.options());
};
// initialize index
this.resetIndex();
}
module.exports = new ApiCache();

View File

@@ -0,0 +1,14 @@
const apicache = require("./apicache");
apicache.options({
headerBlacklist: [
"cache-control"
],
headers: {
// Disable client side cache, only server side cache.
// BUG! Not working for the second request
"cache-control": "no-cache",
},
});
module.exports = apicache;

View File

@@ -0,0 +1,59 @@
function MemoryCache() {
this.cache = {};
this.size = 0;
}
MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
let old = this.cache[key];
let instance = this;
let entry = {
value: value,
expire: time + Date.now(),
timeout: setTimeout(function () {
instance.delete(key);
return timeoutCallback && typeof timeoutCallback === "function" && timeoutCallback(value, key);
}, time)
};
this.cache[key] = entry;
this.size = Object.keys(this.cache).length;
return entry;
};
MemoryCache.prototype.delete = function (key) {
let entry = this.cache[key];
if (entry) {
clearTimeout(entry.timeout);
}
delete this.cache[key];
this.size = Object.keys(this.cache).length;
return null;
};
MemoryCache.prototype.get = function (key) {
let entry = this.cache[key];
return entry;
};
MemoryCache.prototype.getValue = function (key) {
let entry = this.get(key);
return entry && entry.value;
};
MemoryCache.prototype.clear = function () {
Object.keys(this.cache).forEach(function (key) {
this.delete(key);
}, this);
return true;
};
module.exports = MemoryCache;

View File

@@ -0,0 +1,150 @@
let express = require("express");
const { allowDevAllOrigin, getSettings, setting } = require("../util-server");
const { R } = require("redbean-node");
const server = require("../server");
const apicache = require("../modules/apicache");
const Monitor = require("../model/monitor");
let router = express.Router();
let cache = apicache.middleware;
router.get("/api/entry-page", async (_, response) => {
allowDevAllOrigin(response);
response.json(server.entryPage);
});
// Status Page Config
router.get("/api/status-page/config", async (_request, response) => {
allowDevAllOrigin(response);
let config = await getSettings("statusPage");
if (! config.statusPageTheme) {
config.statusPageTheme = "light";
}
if (! config.statusPagePublished) {
config.statusPagePublished = true;
}
if (! config.title) {
config.title = "Uptime Kuma";
}
response.json(config);
});
// Status Page - Get the current Incident
// Can fetch only if published
router.get("/api/status-page/incident", async (_, response) => {
allowDevAllOrigin(response);
try {
await checkPublished();
let incident = await R.findOne("incident", " pin = 1 AND active = 1");
if (incident) {
incident = incident.toPublicJSON();
}
response.json({
ok: true,
incident,
});
} catch (error) {
send403(response, error.message);
}
});
// Status Page - Monitor List
// Can fetch only if published
router.get("/api/status-page/monitor-list", cache("5 minutes"), async (_request, response) => {
allowDevAllOrigin(response);
try {
await checkPublished();
const publicGroupList = [];
let list = await R.find("group", " public = 1 ORDER BY weight ");
for (let groupBean of list) {
publicGroupList.push(await groupBean.toPublicJSON());
}
response.json(publicGroupList);
} catch (error) {
send403(response, error.message);
}
});
// Status Page Polling Data
// Can fetch only if published
router.get("/api/status-page/heartbeat", cache("5 minutes"), async (_request, response) => {
allowDevAllOrigin(response);
try {
await checkPublished();
let heartbeatList = {};
let uptimeList = {};
let monitorIDList = await R.getCol(`
SELECT monitor_group.monitor_id FROM monitor_group, \`group\`
WHERE monitor_group.group_id = \`group\`.id
AND public = 1
`);
for (let monitorID of monitorIDList) {
let list = await R.getAll(`
SELECT * FROM heartbeat
WHERE monitor_id = ?
ORDER BY time DESC
LIMIT 50
`, [
monitorID,
]);
list = R.convertToBeans("heartbeat", list);
heartbeatList[monitorID] = list.reverse().map(row => row.toPublicJSON());
const type = 24;
uptimeList[`${monitorID}_${type}`] = await Monitor.calcUptime(type, monitorID);
}
response.json({
heartbeatList,
uptimeList
});
} catch (error) {
send403(response, error.message);
}
});
async function checkPublished() {
if (! await isPublished()) {
throw new Error("The status page is not published");
}
}
/**
* Default is published
* @returns {Promise<boolean>}
*/
async function isPublished() {
const value = await setting("statusPagePublished");
if (value === null) {
return true;
}
return value;
}
function send403(res, msg = "") {
res.status(403).json({
"status": "fail",
"msg": msg,
});
}
module.exports = router;

View File

@@ -8,12 +8,12 @@ console.log("Node Env: " + process.env.NODE_ENV);
const { sleep, debug, TimeLogger, getRandomInt } = require("../src/util");
console.log("Importing Node libraries")
console.log("Importing Node libraries");
const fs = require("fs");
const http = require("http");
const https = require("https");
console.log("Importing 3rd-party libraries")
console.log("Importing 3rd-party libraries");
debug("Importing express");
const express = require("express");
debug("Importing socket.io");
@@ -35,7 +35,7 @@ console.log("Importing this project modules");
debug("Importing Monitor");
const Monitor = require("./model/monitor");
debug("Importing Settings");
const { getSettings, setSettings, setting, initJWTSecret, genSecret } = require("./util-server");
const { getSettings, setSettings, setting, initJWTSecret, genSecret, allowDevAllOrigin, checkLogin } = require("./util-server");
debug("Importing Notification");
const { Notification } = require("./notification");
@@ -62,13 +62,6 @@ const port = parseInt(process.env.PORT || args.port || 3001);
const sslKey = process.env.SSL_KEY || args["ssl-key"] || undefined;
const sslCert = process.env.SSL_CERT || args["ssl-cert"] || undefined;
// Demo Mode?
const demoMode = args["demo"] || false;
if (demoMode) {
console.log("==== Demo Mode ====");
}
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = Database.dataDir + "kuma.db";
@@ -77,7 +70,7 @@ if (! fs.existsSync(Database.dataDir)) {
}
console.log(`Data Dir: ${Database.dataDir}`);
console.log("Creating express and socket.io instance")
console.log("Creating express and socket.io instance");
const app = express();
let server;
@@ -98,6 +91,7 @@ module.exports.io = io;
// Must be after io instantiation
const { sendNotificationList, sendHeartbeatList, sendImportantHeartbeatList } = require("./client");
const { statusPageSocketHandler } = require("./socket-handlers/status-page-socket-handler");
app.use(express.json());
@@ -131,12 +125,19 @@ let needSetup = false;
*/
let indexHTML = fs.readFileSync("./dist/index.html").toString();
exports.entryPage = "dashboard";
(async () => {
Database.init(args);
await initDatabase();
console.log("Adding route")
exports.entryPage = await setting("entryPage");
console.log("Adding route");
// ***************************
// Normal Router here
// ***************************
// Robots.txt
app.get("/robots.txt", async (_request, response) => {
@@ -156,28 +157,39 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
app.use("/", express.static("dist"));
// ./data/upload
app.use("/upload", express.static(Database.uploadDir));
app.get("/.well-known/change-password", async (_, response) => {
response.redirect("https://github.com/louislam/uptime-kuma/wiki/Reset-Password-via-CLI");
});
// Universal Route Handler, must be at the end
// API Router
const apiRouter = require("./routers/api-router");
app.use(apiRouter);
// Universal Route Handler, must be at the end of all express route.
app.get("*", async (_request, response) => {
response.send(indexHTML);
if (_request.originalUrl.startsWith("/upload/")) {
response.status(404).send("File not found.");
} else {
response.send(indexHTML);
}
});
console.log("Adding socket handler")
console.log("Adding socket handler");
io.on("connection", async (socket) => {
socket.emit("info", {
version: checkVersion.version,
latestVersion: checkVersion.latestVersion,
})
});
totalClient++;
if (needSetup) {
console.log("Redirect to setup page")
socket.emit("setup")
console.log("Redirect to setup page");
socket.emit("setup");
}
socket.on("disconnect", () => {
@@ -185,7 +197,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
});
// ***************************
// Public API
// Public Socket API
// ***************************
socket.on("loginByToken", async (token, callback) => {
@@ -193,44 +205,44 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
try {
let decoded = jwt.verify(token, jwtSecret);
console.log("Username from JWT: " + decoded.username)
console.log("Username from JWT: " + decoded.username);
let user = await R.findOne("user", " username = ? AND active = 1 ", [
decoded.username,
])
]);
if (user) {
debug("afterLogin")
debug("afterLogin");
afterLogin(socket, user)
afterLogin(socket, user);
debug("afterLogin ok")
debug("afterLogin ok");
callback({
ok: true,
})
});
} else {
callback({
ok: false,
msg: "The user is inactive or deleted.",
})
});
}
} catch (error) {
callback({
ok: false,
msg: "Invalid token.",
})
});
}
});
socket.on("login", async (data, callback) => {
console.log("Login")
console.log("Login");
let user = await login(data.username, data.password)
let user = await login(data.username, data.password);
if (user) {
afterLogin(socket, user)
afterLogin(socket, user);
if (user.twofaStatus == 0) {
callback({
@@ -238,13 +250,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
token: jwt.sign({
username: data.username,
}, jwtSecret),
})
});
}
if (user.twofaStatus == 1 && !data.token) {
callback({
tokenRequired: true,
})
});
}
if (data.token) {
@@ -256,39 +268,39 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
token: jwt.sign({
username: data.username,
}, jwtSecret),
})
});
} else {
callback({
ok: false,
msg: "Invalid Token!",
})
});
}
}
} else {
callback({
ok: false,
msg: "Incorrect username or password.",
})
});
}
});
socket.on("logout", async (callback) => {
socket.leave(socket.userID)
socket.leave(socket.userID);
socket.userID = null;
callback();
});
socket.on("prepare2FA", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
let user = await R.findOne("user", " id = ? AND active = 1 ", [
socket.userID,
])
]);
if (user.twofa_status == 0) {
let newSecret = await genSecret()
let newSecret = await genSecret();
let encodedSecret = base32.encode(newSecret);
let uri = `otpauth://totp/Uptime%20Kuma:${user.username}?secret=${encodedSecret}`;
@@ -300,24 +312,24 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
callback({
ok: true,
uri: uri,
})
});
} else {
callback({
ok: false,
msg: "2FA is already enabled.",
})
});
}
} catch (error) {
callback({
ok: false,
msg: "Error while trying to prepare 2FA.",
})
});
}
});
socket.on("save2FA", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await R.exec("UPDATE `user` SET twofa_status = 1 WHERE id = ? ", [
socket.userID,
@@ -326,18 +338,18 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
callback({
ok: true,
msg: "2FA Enabled.",
})
});
} catch (error) {
callback({
ok: false,
msg: "Error while trying to change 2FA.",
})
});
}
});
socket.on("disable2FA", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [
socket.userID,
@@ -346,19 +358,19 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
callback({
ok: true,
msg: "2FA Disabled.",
})
});
} catch (error) {
callback({
ok: false,
msg: "Error while trying to change 2FA.",
})
});
}
});
socket.on("verifyToken", async (token, callback) => {
let user = await R.findOne("user", " id = ? AND active = 1 ", [
socket.userID,
])
]);
let verify = notp.totp.verify(token, user.twofa_secret);
@@ -366,40 +378,40 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
callback({
ok: true,
valid: true,
})
});
} else {
callback({
ok: false,
msg: "Invalid Token.",
valid: false,
})
});
}
});
socket.on("twoFAStatus", async (callback) => {
checkLogin(socket)
checkLogin(socket);
try {
let user = await R.findOne("user", " id = ? AND active = 1 ", [
socket.userID,
])
]);
if (user.twofa_status == 1) {
callback({
ok: true,
status: true,
})
});
} else {
callback({
ok: true,
status: false,
})
});
}
} catch (error) {
callback({
ok: false,
msg: "Error while trying to get 2FA status.",
})
});
}
});
@@ -410,13 +422,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("setup", async (username, password, callback) => {
try {
if ((await R.count("user")) !== 0) {
throw new Error("Uptime Kuma has been setup. If you want to setup again, please delete the database.")
throw new Error("Uptime Kuma has been setup. If you want to setup again, please delete the database.");
}
let user = R.dispense("user")
let user = R.dispense("user");
user.username = username;
user.password = passwordHash.generate(password)
await R.store(user)
user.password = passwordHash.generate(password);
await R.store(user);
needSetup = false;
@@ -440,8 +452,8 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
// Add a new monitor
socket.on("add", async (monitor, callback) => {
try {
checkLogin(socket)
let bean = R.dispense("monitor")
checkLogin(socket);
let bean = R.dispense("monitor");
let notificationIDList = monitor.notificationIDList;
delete monitor.notificationIDList;
@@ -449,11 +461,11 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
monitor.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes);
delete monitor.accepted_statuscodes;
bean.import(monitor)
bean.user_id = socket.userID
await R.store(bean)
bean.import(monitor);
bean.user_id = socket.userID;
await R.store(bean);
await updateMonitorNotification(bean.id, notificationIDList)
await updateMonitorNotification(bean.id, notificationIDList);
await startMonitor(socket.userID, bean.id);
await sendMonitorList(socket);
@@ -475,18 +487,18 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
// Edit a monitor
socket.on("editMonitor", async (monitor, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
let bean = await R.findOne("monitor", " id = ? ", [ monitor.id ])
let bean = await R.findOne("monitor", " id = ? ", [ monitor.id ]);
if (bean.user_id !== socket.userID) {
throw new Error("Permission denied.")
throw new Error("Permission denied.");
}
bean.name = monitor.name
bean.type = monitor.type
bean.url = monitor.url
bean.interval = monitor.interval
bean.name = monitor.name;
bean.type = monitor.type;
bean.url = monitor.url;
bean.interval = monitor.interval;
bean.retryInterval = monitor.retryInterval;
bean.hostname = monitor.hostname;
bean.maxretries = monitor.maxretries;
@@ -499,12 +511,12 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
bean.dns_resolve_type = monitor.dns_resolve_type;
bean.dns_resolve_server = monitor.dns_resolve_server;
await R.store(bean)
await R.store(bean);
await updateMonitorNotification(bean.id, monitor.notificationIDList)
await updateMonitorNotification(bean.id, monitor.notificationIDList);
if (bean.active) {
await restartMonitor(socket.userID, bean.id)
await restartMonitor(socket.userID, bean.id);
}
await sendMonitorList(socket);
@@ -516,7 +528,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
});
} catch (e) {
console.error(e)
console.error(e);
callback({
ok: false,
msg: e.message,
@@ -526,13 +538,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("getMonitorList", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await sendMonitorList(socket);
callback({
ok: true,
});
} catch (e) {
console.error(e)
console.error(e);
callback({
ok: false,
msg: e.message,
@@ -542,14 +554,14 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("getMonitor", async (monitorID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
console.log(`Get Monitor: ${monitorID} User ID: ${socket.userID}`)
console.log(`Get Monitor: ${monitorID} User ID: ${socket.userID}`);
let bean = await R.findOne("monitor", " id = ? AND user_id = ? ", [
monitorID,
socket.userID,
])
]);
callback({
ok: true,
@@ -567,7 +579,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
// Start or Resume the monitor
socket.on("resumeMonitor", async (monitorID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await startMonitor(socket.userID, monitorID);
await sendMonitorList(socket);
@@ -586,8 +598,8 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("pauseMonitor", async (monitorID, callback) => {
try {
checkLogin(socket)
await pauseMonitor(socket.userID, monitorID)
checkLogin(socket);
await pauseMonitor(socket.userID, monitorID);
await sendMonitorList(socket);
callback({
@@ -605,13 +617,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("deleteMonitor", async (monitorID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
console.log(`Delete Monitor: ${monitorID} User ID: ${socket.userID}`)
console.log(`Delete Monitor: ${monitorID} User ID: ${socket.userID}`);
if (monitorID in monitorList) {
monitorList[monitorID].stop();
delete monitorList[monitorID]
delete monitorList[monitorID];
}
await R.exec("DELETE FROM monitor WHERE id = ? AND user_id = ? ", [
@@ -636,9 +648,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("getTags", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
const list = await R.findAll("tag")
const list = await R.findAll("tag");
callback({
ok: true,
@@ -655,12 +667,12 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("addTag", async (tag, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
let bean = R.dispense("tag")
bean.name = tag.name
bean.color = tag.color
await R.store(bean)
let bean = R.dispense("tag");
bean.name = tag.name;
bean.color = tag.color;
await R.store(bean);
callback({
ok: true,
@@ -677,12 +689,12 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("editTag", async (tag, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
let bean = await R.findOne("monitor", " id = ? ", [ tag.id ])
bean.name = tag.name
bean.color = tag.color
await R.store(bean)
let bean = await R.findOne("monitor", " id = ? ", [ tag.id ]);
bean.name = tag.name;
bean.color = tag.color;
await R.store(bean);
callback({
ok: true,
@@ -699,9 +711,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("deleteTag", async (tagID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await R.exec("DELETE FROM tag WHERE id = ? ", [ tagID ])
await R.exec("DELETE FROM tag WHERE id = ? ", [ tagID ]);
callback({
ok: true,
@@ -718,13 +730,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("addMonitorTag", async (tagID, monitorID, value, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await R.exec("INSERT INTO monitor_tag (tag_id, monitor_id, value) VALUES (?, ?, ?)", [
tagID,
monitorID,
value,
])
]);
callback({
ok: true,
@@ -741,13 +753,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("editMonitorTag", async (tagID, monitorID, value, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await R.exec("UPDATE monitor_tag SET value = ? WHERE tag_id = ? AND monitor_id = ?", [
value,
tagID,
monitorID,
])
]);
callback({
ok: true,
@@ -764,13 +776,13 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("deleteMonitorTag", async (tagID, monitorID, value, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await R.exec("DELETE FROM monitor_tag WHERE tag_id = ? AND monitor_id = ? AND value = ?", [
tagID,
monitorID,
value,
])
]);
// Cleanup unused Tags
await R.exec("delete from tag where ( select count(*) from monitor_tag mt where tag.id = mt.tag_id ) = 0");
@@ -790,15 +802,15 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("changePassword", async (password, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
if (! password.currentPassword) {
throw new Error("Invalid new password")
throw new Error("Invalid new password");
}
let user = await R.findOne("user", " id = ? AND active = 1 ", [
socket.userID,
])
]);
if (user && passwordHash.verify(password.currentPassword, user.password)) {
@@ -807,9 +819,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
callback({
ok: true,
msg: "Password has been updated successfully.",
})
});
} else {
throw new Error("Incorrect current password")
throw new Error("Incorrect current password");
}
} catch (e) {
@@ -822,7 +834,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("getSettings", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
callback({
ok: true,
@@ -839,9 +851,10 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("setSettings", async (data, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await setSettings("general", data)
await setSettings("general", data);
exports.entryPage = data.entryPage;
callback({
ok: true,
@@ -859,10 +872,10 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
// Add or Edit
socket.on("addNotification", async (notification, notificationID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
let notificationBean = await Notification.save(notification, notificationID, socket.userID)
await sendNotificationList(socket)
let notificationBean = await Notification.save(notification, notificationID, socket.userID);
await sendNotificationList(socket);
callback({
ok: true,
@@ -880,10 +893,10 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("deleteNotification", async (notificationID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
await Notification.delete(notificationID, socket.userID)
await sendNotificationList(socket)
await Notification.delete(notificationID, socket.userID);
await sendNotificationList(socket);
callback({
ok: true,
@@ -900,9 +913,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("testNotification", async (notification, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
let msg = await Notification.send(notification, notification.name + " Testing")
let msg = await Notification.send(notification, notification.name + " Testing");
callback({
ok: true,
@@ -910,7 +923,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
});
} catch (e) {
console.error(e)
console.error(e);
callback({
ok: false,
@@ -921,7 +934,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("checkApprise", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
callback(Notification.checkApprise());
} catch (e) {
callback(false);
@@ -945,8 +958,8 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
if (importHandle == "overwrite") {
// Stops every monitor first, so it doesn't execute any heartbeat while importing
for (let id in monitorList) {
let monitor = monitorList[id]
await monitor.stop()
let monitor = monitorList[id];
await monitor.stop();
}
await R.exec("DELETE FROM heartbeat");
await R.exec("DELETE FROM monitor_notification");
@@ -968,7 +981,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
if ((importHandle == "skip" && notificationNameListString.includes(notificationListData[i].name) == false) || importHandle == "keep" || importHandle == "overwrite") {
let notification = JSON.parse(notificationListData[i].config);
await Notification.save(notification, null, socket.userID)
await Notification.save(notification, null, socket.userID);
}
}
@@ -1018,9 +1031,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
dns_resolve_type: monitorListData[i].dns_resolve_type,
dns_resolve_server: monitorListData[i].dns_resolve_server,
notificationIDList: {},
}
};
let bean = R.dispense("monitor")
let bean = R.dispense("monitor");
let notificationIDList = monitor.notificationIDList;
delete monitor.notificationIDList;
@@ -1028,9 +1041,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
monitor.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes);
delete monitor.accepted_statuscodes;
bean.import(monitor)
bean.user_id = socket.userID
await R.store(bean)
bean.import(monitor);
bean.user_id = socket.userID;
await R.store(bean);
// Only for backup files with the version 1.7.0 or higher, since there was the tag feature implemented
if (version >= 170) {
@@ -1078,7 +1091,7 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
}
}
await sendNotificationList(socket)
await sendNotificationList(socket);
await sendMonitorList(socket);
}
@@ -1097,9 +1110,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("clearEvents", async (monitorID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
console.log(`Clear Events Monitor: ${monitorID} User ID: ${socket.userID}`)
console.log(`Clear Events Monitor: ${monitorID} User ID: ${socket.userID}`);
await R.exec("UPDATE heartbeat SET msg = ?, important = ? WHERE monitor_id = ? ", [
"",
@@ -1123,9 +1136,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("clearHeartbeats", async (monitorID, callback) => {
try {
checkLogin(socket)
checkLogin(socket);
console.log(`Clear Heartbeats Monitor: ${monitorID} User ID: ${socket.userID}`)
console.log(`Clear Heartbeats Monitor: ${monitorID} User ID: ${socket.userID}`);
await R.exec("DELETE FROM heartbeat WHERE monitor_id = ?", [
monitorID
@@ -1147,9 +1160,9 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
socket.on("clearStatistics", async (callback) => {
try {
checkLogin(socket)
checkLogin(socket);
console.log(`Clear Statistics User ID: ${socket.userID}`)
console.log(`Clear Statistics User ID: ${socket.userID}`);
await R.exec("DELETE FROM heartbeat");
@@ -1165,24 +1178,27 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
}
});
debug("added all socket handlers")
// Status Page Socket Handler for admin only
statusPageSocketHandler(socket);
debug("added all socket handlers");
// ***************************
// Better do anything after added all socket handlers here
// ***************************
debug("check auto login")
debug("check auto login");
if (await setting("disableAuth")) {
console.log("Disabled Auth: auto login to admin")
afterLogin(socket, await R.findOne("user"))
socket.emit("autoLogin")
console.log("Disabled Auth: auto login to admin");
afterLogin(socket, await R.findOne("user"));
socket.emit("autoLogin");
} else {
debug("need auth")
debug("need auth");
}
});
console.log("Init the server")
console.log("Init the server");
server.once("error", async (err) => {
console.error("Cannot listen: " + err.message);
@@ -1204,14 +1220,14 @@ let indexHTML = fs.readFileSync("./dist/index.html").toString();
async function updateMonitorNotification(monitorID, notificationIDList) {
await R.exec("DELETE FROM monitor_notification WHERE monitor_id = ? ", [
monitorID,
])
]);
for (let notificationID in notificationIDList) {
if (notificationIDList[notificationID]) {
let relation = R.dispense("monitor_notification");
relation.monitor_id = monitorID;
relation.notification_id = notificationID;
await R.store(relation)
await R.store(relation);
}
}
}
@@ -1220,7 +1236,7 @@ async function checkOwner(userID, monitorID) {
let row = await R.getRow("SELECT id FROM monitor WHERE id = ? AND user_id = ? ", [
monitorID,
userID,
])
]);
if (! row) {
throw new Error("You do not own this monitor.");
@@ -1229,16 +1245,16 @@ async function checkOwner(userID, monitorID) {
async function sendMonitorList(socket) {
let list = await getMonitorJSONList(socket.userID);
io.to(socket.userID).emit("monitorList", list)
io.to(socket.userID).emit("monitorList", list);
return list;
}
async function afterLogin(socket, user) {
socket.userID = user.id;
socket.join(user.id)
socket.join(user.id);
let monitorList = await sendMonitorList(socket)
sendNotificationList(socket)
let monitorList = await sendMonitorList(socket);
sendNotificationList(socket);
await sleep(500);
@@ -1251,7 +1267,7 @@ async function afterLogin(socket, user) {
}
for (let monitorID in monitorList) {
await Monitor.sendStats(io, monitorID, user.id)
await Monitor.sendStats(io, monitorID, user.id);
}
}
@@ -1260,7 +1276,7 @@ async function getMonitorJSONList(userID) {
let monitorList = await R.find("monitor", " user_id = ? ORDER BY weight DESC, name", [
userID,
])
]);
for (let monitor of monitorList) {
result[monitor.id] = await monitor.toJSON();
@@ -1269,24 +1285,18 @@ async function getMonitorJSONList(userID) {
return result;
}
function checkLogin(socket) {
if (! socket.userID) {
throw new Error("You are not logged in.");
}
}
async function initDatabase() {
if (! fs.existsSync(Database.path)) {
console.log("Copying Database")
console.log("Copying Database");
fs.copyFileSync(Database.templatePath, Database.path);
}
console.log("Connecting to Database")
console.log("Connecting to Database");
await Database.connect();
console.log("Connected")
console.log("Connected");
// Patch the database
await Database.patch()
await Database.patch();
let jwtSecretBean = await R.findOne("setting", " `key` = ? ", [
"jwtSecret",
@@ -1302,7 +1312,7 @@ async function initDatabase() {
// If there is no record in user table, it is a new Uptime Kuma instance, need to setup
if ((await R.count("user")) === 0) {
console.log("No user, need setup")
console.log("No user, need setup");
needSetup = true;
}
@@ -1310,9 +1320,9 @@ async function initDatabase() {
}
async function startMonitor(userID, monitorID) {
await checkOwner(userID, monitorID)
await checkOwner(userID, monitorID);
console.log(`Resume Monitor: ${monitorID} User ID: ${userID}`)
console.log(`Resume Monitor: ${monitorID} User ID: ${userID}`);
await R.exec("UPDATE monitor SET active = 1 WHERE id = ? AND user_id = ? ", [
monitorID,
@@ -1321,24 +1331,24 @@ async function startMonitor(userID, monitorID) {
let monitor = await R.findOne("monitor", " id = ? ", [
monitorID,
])
]);
if (monitor.id in monitorList) {
monitorList[monitor.id].stop();
}
monitorList[monitor.id] = monitor;
monitor.start(io)
monitor.start(io);
}
async function restartMonitor(userID, monitorID) {
return await startMonitor(userID, monitorID)
return await startMonitor(userID, monitorID);
}
async function pauseMonitor(userID, monitorID) {
await checkOwner(userID, monitorID)
await checkOwner(userID, monitorID);
console.log(`Pause Monitor: ${monitorID} User ID: ${userID}`)
console.log(`Pause Monitor: ${monitorID} User ID: ${userID}`);
await R.exec("UPDATE monitor SET active = 0 WHERE id = ? AND user_id = ? ", [
monitorID,
@@ -1354,7 +1364,7 @@ async function pauseMonitor(userID, monitorID) {
* Resume active monitors
*/
async function startMonitors() {
let list = await R.find("monitor", " active = 1 ")
let list = await R.find("monitor", " active = 1 ");
for (let monitor of list) {
monitorList[monitor.id] = monitor;
@@ -1371,10 +1381,10 @@ async function shutdownFunction(signal) {
console.log("Shutdown requested");
console.log("Called signal: " + signal);
console.log("Stopping all monitors")
console.log("Stopping all monitors");
for (let id in monitorList) {
let monitor = monitorList[id]
monitor.stop()
let monitor = monitorList[id];
monitor.stop();
}
await sleep(2000);
await Database.close();

View File

@@ -0,0 +1,161 @@
const { R } = require("redbean-node");
const { checkLogin, setSettings } = require("../util-server");
const dayjs = require("dayjs");
const { debug } = require("../../src/util");
const ImageDataURI = require("../image-data-uri");
const Database = require("../database");
const apicache = require("../modules/apicache");
module.exports.statusPageSocketHandler = (socket) => {
// Post or edit incident
socket.on("postIncident", async (incident, callback) => {
try {
checkLogin(socket);
await R.exec("UPDATE incident SET pin = 0 ");
let incidentBean;
if (incident.id) {
incidentBean = await R.findOne("incident", " id = ?", [
incident.id
]);
}
if (incidentBean == null) {
incidentBean = R.dispense("incident");
}
incidentBean.title = incident.title;
incidentBean.content = incident.content;
incidentBean.style = incident.style;
incidentBean.pin = true;
if (incident.id) {
incidentBean.lastUpdatedDate = R.isoDateTime(dayjs.utc());
} else {
incidentBean.createdDate = R.isoDateTime(dayjs.utc());
}
await R.store(incidentBean);
callback({
ok: true,
incident: incidentBean.toPublicJSON(),
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
socket.on("unpinIncident", async (callback) => {
try {
checkLogin(socket);
await R.exec("UPDATE incident SET pin = 0 WHERE pin = 1");
callback({
ok: true,
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
// Save Status Page
// imgDataUrl Only Accept PNG!
socket.on("saveStatusPage", async (config, imgDataUrl, publicGroupList, callback) => {
try {
checkLogin(socket);
apicache.clear();
const header = "data:image/png;base64,";
// Check logo format
// If is image data url, convert to png file
// Else assume it is a url, nothing to do
if (imgDataUrl.startsWith("data:")) {
if (! imgDataUrl.startsWith(header)) {
throw new Error("Only allowed PNG logo.");
}
// Convert to file
await ImageDataURI.outputFile(imgDataUrl, Database.uploadDir + "logo.png");
config.logo = "/upload/logo.png?t=" + Date.now();
} else {
config.icon = imgDataUrl;
}
// Save Config
await setSettings("statusPage", config);
// Save Public Group List
const groupIDList = [];
let groupOrder = 1;
for (let group of publicGroupList) {
let groupBean;
if (group.id) {
groupBean = await R.findOne("group", " id = ? AND public = 1 ", [
group.id
]);
} else {
groupBean = R.dispense("group");
}
groupBean.name = group.name;
groupBean.public = true;
groupBean.weight = groupOrder++;
await R.store(groupBean);
await R.exec("DELETE FROM monitor_group WHERE group_id = ? ", [
groupBean.id
]);
let monitorOrder = 1;
console.log(group.monitorList);
for (let monitor of group.monitorList) {
let relationBean = R.dispense("monitor_group");
relationBean.weight = monitorOrder++;
relationBean.group_id = groupBean.id;
relationBean.monitor_id = monitor.id;
await R.store(relationBean);
}
groupIDList.push(groupBean.id);
group.id = groupBean.id;
}
// Delete groups that not in the list
debug("Delete groups that not in the list");
const slots = groupIDList.map(() => "?").join(",");
await R.exec(`DELETE FROM \`group\` WHERE id NOT IN (${slots})`, groupIDList);
callback({
ok: true,
publicGroupList,
});
} catch (error) {
console.log(error);
callback({
ok: false,
msg: error.message,
});
}
});
};

View File

@@ -23,7 +23,7 @@ exports.initJWTSecret = async () => {
jwtSecretBean.value = passwordHash.generate(dayjs() + "");
await R.store(jwtSecretBean);
return jwtSecretBean;
}
};
exports.tcping = function (hostname, port) {
return new Promise((resolve, reject) => {
@@ -44,7 +44,7 @@ exports.tcping = function (hostname, port) {
resolve(Math.round(data.max));
});
});
}
};
exports.ping = async (hostname) => {
try {
@@ -57,7 +57,7 @@ exports.ping = async (hostname) => {
throw e;
}
}
}
};
exports.pingAsync = function (hostname, ipv6 = false) {
return new Promise((resolve, reject) => {
@@ -69,13 +69,13 @@ exports.pingAsync = function (hostname, ipv6 = false) {
if (err) {
reject(err);
} else if (ms === null) {
reject(new Error(stdout))
reject(new Error(stdout));
} else {
resolve(Math.round(ms))
resolve(Math.round(ms));
}
});
});
}
};
exports.dnsResolve = function (hostname, resolver_server, rrtype) {
const resolver = new Resolver();
@@ -98,8 +98,8 @@ exports.dnsResolve = function (hostname, resolver_server, rrtype) {
}
});
}
})
}
});
};
exports.setting = async function (key) {
let value = await R.getCell("SELECT `value` FROM setting WHERE `key` = ? ", [
@@ -108,29 +108,29 @@ exports.setting = async function (key) {
try {
const v = JSON.parse(value);
debug(`Get Setting: ${key}: ${v}`)
debug(`Get Setting: ${key}: ${v}`);
return v;
} catch (e) {
return value;
}
}
};
exports.setSetting = async function (key, value) {
let bean = await R.findOne("setting", " `key` = ? ", [
key,
])
]);
if (!bean) {
bean = R.dispense("setting")
bean = R.dispense("setting");
bean.key = key;
}
bean.value = JSON.stringify(value);
await R.store(bean)
}
await R.store(bean);
};
exports.getSettings = async function (type) {
let list = await R.getAll("SELECT `key`, `value` FROM setting WHERE `type` = ? ", [
type,
])
]);
let result = {};
@@ -143,7 +143,7 @@ exports.getSettings = async function (type) {
}
return result;
}
};
exports.setSettings = async function (type, data) {
let keyList = Object.keys(data);
@@ -163,12 +163,12 @@ exports.setSettings = async function (type, data) {
if (bean.type === type) {
bean.value = JSON.stringify(data[key]);
promiseList.push(R.store(bean))
promiseList.push(R.store(bean));
}
}
await Promise.all(promiseList);
}
};
// ssl-checker by @dyaa
// param: res - response object from axios
@@ -218,7 +218,7 @@ exports.checkCertificate = function (res) {
issuer,
fingerprint,
};
}
};
// Check if the provided status code is within the accepted ranges
// Param: status - the status code to check
@@ -247,7 +247,7 @@ exports.checkStatusCode = function (status, accepted_codes) {
}
return false;
}
};
exports.getTotalClientInRoom = (io, roomName) => {
@@ -270,7 +270,7 @@ exports.getTotalClientInRoom = (io, roomName) => {
} else {
return 0;
}
}
};
exports.genSecret = () => {
let secret = "";
@@ -280,4 +280,21 @@ exports.genSecret = () => {
secret += chars.charAt(Math.floor(Math.random() * charsLength));
}
return secret;
}
};
exports.allowDevAllOrigin = (res) => {
if (process.env.NODE_ENV === "development") {
exports.allowAllOrigin(res);
}
};
exports.allowAllOrigin = (res) => {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
};
exports.checkLogin = (socket) => {
if (! socket.userID) {
throw new Error("You are not logged in.");
}
};