mirror of
https://github.com/louislam/uptime-kuma.git
synced 2025-08-09 08:38:37 +08:00
Merge branch 'master' of https://github.com/louislam/uptime-kuma into status-page-expiry
This commit is contained in:
@@ -141,12 +141,21 @@ async function sendAPIKeyList(socket) {
|
||||
/**
|
||||
* Emits the version information to the client.
|
||||
* @param {Socket} socket Socket.io socket instance
|
||||
* @param {boolean} hideVersion
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function sendInfo(socket) {
|
||||
async function sendInfo(socket, hideVersion = false) {
|
||||
let version;
|
||||
let latestVersion;
|
||||
|
||||
if (!hideVersion) {
|
||||
version = checkVersion.version;
|
||||
latestVersion = checkVersion.latestVersion;
|
||||
}
|
||||
|
||||
socket.emit("info", {
|
||||
version: checkVersion.version,
|
||||
latestVersion: checkVersion.latestVersion,
|
||||
version,
|
||||
latestVersion,
|
||||
primaryBaseURL: await setting("primaryBaseURL"),
|
||||
serverTimezone: await server.getTimezone(),
|
||||
serverTimezoneOffset: server.getTimezoneOffset(),
|
||||
|
@@ -1,4 +1,5 @@
|
||||
const args = require("args-parser")(process.argv);
|
||||
// Interop with browser
|
||||
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||
const demoMode = args["demo"] || false;
|
||||
|
||||
const badgeConstants = {
|
||||
|
@@ -73,6 +73,7 @@ class Database {
|
||||
"patch-add-parent-monitor.sql": true,
|
||||
"patch-add-invert-keyword.sql": true,
|
||||
"patch-added-json-query.sql": true,
|
||||
"patch-added-kafka-producer.sql": true,
|
||||
"patch-add-certificate-expiry-status-page.sql": true,
|
||||
};
|
||||
|
||||
|
@@ -6,7 +6,7 @@ const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVA
|
||||
SQL_DATETIME_FORMAT
|
||||
} = require("../../src/util");
|
||||
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
|
||||
redisPingAsync, mongodbPing,
|
||||
redisPingAsync, mongodbPing, kafkaProducerAsync
|
||||
} = require("../util-server");
|
||||
const { R } = require("redbean-node");
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
@@ -137,6 +137,11 @@ class Monitor extends BeanModel {
|
||||
httpBodyEncoding: this.httpBodyEncoding,
|
||||
jsonPath: this.jsonPath,
|
||||
expectedValue: this.expectedValue,
|
||||
kafkaProducerTopic: this.kafkaProducerTopic,
|
||||
kafkaProducerBrokers: JSON.parse(this.kafkaProducerBrokers),
|
||||
kafkaProducerSsl: this.kafkaProducerSsl === "1" && true || false,
|
||||
kafkaProducerAllowAutoTopicCreation: this.kafkaProducerAllowAutoTopicCreation === "1" && true || false,
|
||||
kafkaProducerMessage: this.kafkaProducerMessage,
|
||||
screenshot,
|
||||
};
|
||||
|
||||
@@ -161,6 +166,7 @@ class Monitor extends BeanModel {
|
||||
tlsCa: this.tlsCa,
|
||||
tlsCert: this.tlsCert,
|
||||
tlsKey: this.tlsKey,
|
||||
kafkaProducerSaslOptions: JSON.parse(this.kafkaProducerSaslOptions),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -175,7 +181,7 @@ class Monitor extends BeanModel {
|
||||
async isActive() {
|
||||
const parentActive = await Monitor.isParentActive(this.id);
|
||||
|
||||
return this.active && parentActive;
|
||||
return (this.active === 1) && parentActive;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -825,6 +831,24 @@ class Monitor extends BeanModel {
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
}
|
||||
|
||||
} else if (this.type === "kafka-producer") {
|
||||
let startTime = dayjs().valueOf();
|
||||
|
||||
bean.msg = await kafkaProducerAsync(
|
||||
JSON.parse(this.kafkaProducerBrokers),
|
||||
this.kafkaProducerTopic,
|
||||
this.kafkaProducerMessage,
|
||||
{
|
||||
allowAutoTopicCreation: this.kafkaProducerAllowAutoTopicCreation,
|
||||
ssl: this.kafkaProducerSsl,
|
||||
clientId: `Uptime-Kuma/${version}`,
|
||||
interval: this.interval,
|
||||
},
|
||||
JSON.parse(this.kafkaProducerSaslOptions),
|
||||
);
|
||||
bean.status = UP;
|
||||
bean.ping = dayjs().valueOf() - startTime;
|
||||
|
||||
} else {
|
||||
throw new Error("Unknown Monitor Type");
|
||||
}
|
||||
|
@@ -27,6 +27,11 @@ class Slack extends NotificationProvider {
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
if (notification.slackchannelnotify) {
|
||||
msg += " <!channel>";
|
||||
}
|
||||
|
||||
try {
|
||||
if (heartbeatJSON == null) {
|
||||
let data = {
|
||||
@@ -53,7 +58,7 @@ class Slack extends NotificationProvider {
|
||||
"type": "header",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "Uptime Kuma Alert",
|
||||
"text": textMsg,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
42
server/notification-providers/smsc.js
Normal file
42
server/notification-providers/smsc.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class SMSC extends NotificationProvider {
|
||||
name = "smsc";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
try {
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "text/json",
|
||||
}
|
||||
};
|
||||
|
||||
let getArray = [
|
||||
"fmt=3",
|
||||
"translit=" + notification.smscTranslit,
|
||||
"login=" + notification.smscLogin,
|
||||
"psw=" + notification.smscPassword,
|
||||
"phones=" + notification.smscToNumber,
|
||||
"mes=" + encodeURIComponent(msg.replace(/[^\x00-\x7F]/g, "")),
|
||||
];
|
||||
if (notification.smscSenderName !== "") {
|
||||
getArray.push("sender=" + notification.smscSenderName);
|
||||
}
|
||||
|
||||
let resp = await axios.get("https://smsc.kz/sys/send.php?" + getArray.join("&"), config);
|
||||
if (resp.data.id === undefined) {
|
||||
let error = `Something gone wrong. Api returned code ${resp.data.error_code}: ${resp.data.error}`;
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SMSC;
|
@@ -6,6 +6,7 @@ const AliyunSms = require("./notification-providers/aliyun-sms");
|
||||
const Apprise = require("./notification-providers/apprise");
|
||||
const Bark = require("./notification-providers/bark");
|
||||
const ClickSendSMS = require("./notification-providers/clicksendsms");
|
||||
const SMSC = require("./notification-providers/smsc");
|
||||
const DingDing = require("./notification-providers/dingding");
|
||||
const Discord = require("./notification-providers/discord");
|
||||
const Feishu = require("./notification-providers/feishu");
|
||||
@@ -68,6 +69,7 @@ class Notification {
|
||||
new Apprise(),
|
||||
new Bark(),
|
||||
new ClickSendSMS(),
|
||||
new SMSC(),
|
||||
new DingDing(),
|
||||
new Discord(),
|
||||
new Feishu(),
|
||||
|
@@ -442,7 +442,7 @@ router.get("/api/badge/:id/cert-exp", cache("5 minutes"), async (request, respon
|
||||
if (!tlsInfo.valid) {
|
||||
// return a "Bad Cert" badge in naColor (grey), when cert is not valid
|
||||
badgeValues.message = "Bad Cert";
|
||||
badgeValues.color = badgeConstants.downColor;
|
||||
badgeValues.color = downColor;
|
||||
} else {
|
||||
const daysRemaining = parseInt(overrideValue ?? tlsInfo.certInfo.daysRemaining);
|
||||
|
||||
|
@@ -15,18 +15,25 @@ dayjs.extend(require("dayjs/plugin/customParseFormat"));
|
||||
require("dotenv").config();
|
||||
|
||||
// Check Node.js Version
|
||||
const nodeVersion = parseInt(process.versions.node.split(".")[0]);
|
||||
const requiredVersion = 14;
|
||||
const nodeVersion = process.versions.node;
|
||||
|
||||
// Get the required Node.js version from package.json
|
||||
const requiredNodeVersions = require("../package.json").engines.node;
|
||||
const bannedNodeVersions = " < 14 || 20.0.* || 20.1.* || 20.2.* || 20.3.* ";
|
||||
console.log(`Your Node.js version: ${nodeVersion}`);
|
||||
|
||||
// See more: https://github.com/louislam/uptime-kuma/issues/3138
|
||||
if (nodeVersion >= 20) {
|
||||
console.warn("\x1b[31m%s\x1b[0m", "Warning: Uptime Kuma is currently not stable on Node.js >= 20, please use Node.js 18.");
|
||||
const semver = require("semver");
|
||||
const requiredNodeVersionsComma = requiredNodeVersions.split("||").map((version) => version.trim()).join(", ");
|
||||
|
||||
// Exit Uptime Kuma immediately if the Node.js version is banned
|
||||
if (semver.satisfies(nodeVersion, bannedNodeVersions)) {
|
||||
console.error("\x1b[31m%s\x1b[0m", `Error: Your Node.js version: ${nodeVersion} is not supported, please upgrade your Node.js to ${requiredNodeVersionsComma}.`);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
if (nodeVersion < requiredVersion) {
|
||||
console.error(`Error: Your Node.js version is not supported, please upgrade to Node.js >= ${requiredVersion}.`);
|
||||
process.exit(-1);
|
||||
// Warning if the Node.js version is not in the support list, but it maybe still works
|
||||
if (!semver.satisfies(nodeVersion, requiredNodeVersions)) {
|
||||
console.warn("\x1b[31m%s\x1b[0m", `Warning: Your Node.js version: ${nodeVersion} is not officially supported, please upgrade your Node.js to ${requiredNodeVersionsComma}.`);
|
||||
}
|
||||
|
||||
const args = require("args-parser")(process.argv);
|
||||
@@ -263,7 +270,7 @@ let needSetup = false;
|
||||
log.info("server", "Adding socket handler");
|
||||
io.on("connection", async (socket) => {
|
||||
|
||||
sendInfo(socket);
|
||||
sendInfo(socket, true);
|
||||
|
||||
if (needSetup) {
|
||||
log.info("server", "Redirect to setup page");
|
||||
@@ -636,6 +643,9 @@ let needSetup = false;
|
||||
monitor.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes);
|
||||
delete monitor.accepted_statuscodes;
|
||||
|
||||
monitor.kafkaProducerBrokers = JSON.stringify(monitor.kafkaProducerBrokers);
|
||||
monitor.kafkaProducerSaslOptions = JSON.stringify(monitor.kafkaProducerSaslOptions);
|
||||
|
||||
bean.import(monitor);
|
||||
bean.user_id = socket.userID;
|
||||
|
||||
@@ -750,6 +760,11 @@ let needSetup = false;
|
||||
bean.httpBodyEncoding = monitor.httpBodyEncoding;
|
||||
bean.expectedValue = monitor.expectedValue;
|
||||
bean.jsonPath = monitor.jsonPath;
|
||||
bean.kafkaProducerTopic = monitor.kafkaProducerTopic;
|
||||
bean.kafkaProducerBrokers = JSON.stringify(monitor.kafkaProducerBrokers);
|
||||
bean.kafkaProducerAllowAutoTopicCreation = monitor.kafkaProducerAllowAutoTopicCreation;
|
||||
bean.kafkaProducerSaslOptions = JSON.stringify(monitor.kafkaProducerSaslOptions);
|
||||
bean.kafkaProducerMessage = monitor.kafkaProducerMessage;
|
||||
|
||||
bean.validate();
|
||||
|
||||
@@ -1651,6 +1666,7 @@ async function afterLogin(socket, user) {
|
||||
socket.join(user.id);
|
||||
|
||||
let monitorList = await server.sendMonitorList(socket);
|
||||
sendInfo(socket);
|
||||
server.sendMaintenanceList(socket);
|
||||
sendNotificationList(socket);
|
||||
sendProxyList(socket);
|
||||
|
@@ -10,7 +10,7 @@ const util = require("util");
|
||||
const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent");
|
||||
const { Settings } = require("./settings");
|
||||
const dayjs = require("dayjs");
|
||||
// DO NOT IMPORT HERE IF THE MODULES USED `UptimeKumaServer.getInstance()`
|
||||
// DO NOT IMPORT HERE IF THE MODULES USED `UptimeKumaServer.getInstance()`, put at the bottom of this file instead.
|
||||
|
||||
/**
|
||||
* `module.exports` (alias: `server`) should be inside this class, in order to avoid circular dependency issue.
|
||||
@@ -249,9 +249,9 @@ class UptimeKumaServer {
|
||||
|
||||
return (typeof forwardedFor === "string" ? forwardedFor.split(",")[0].trim() : null)
|
||||
|| socket.client.conn.request.headers["x-real-ip"]
|
||||
|| clientIP.replace(/^.*:/, "");
|
||||
|| clientIP.replace(/^::ffff:/, "");
|
||||
} else {
|
||||
return clientIP.replace(/^.*:/, "");
|
||||
return clientIP.replace(/^::ffff:/, "");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,13 +262,43 @@ class UptimeKumaServer {
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async getTimezone() {
|
||||
// From process.env.TZ
|
||||
try {
|
||||
if (process.env.TZ) {
|
||||
this.checkTimezone(process.env.TZ);
|
||||
return process.env.TZ;
|
||||
}
|
||||
} catch (e) {
|
||||
log.warn("timezone", e.message + " in process.env.TZ");
|
||||
}
|
||||
|
||||
let timezone = await Settings.get("serverTimezone");
|
||||
if (timezone) {
|
||||
return timezone;
|
||||
} else if (process.env.TZ) {
|
||||
return process.env.TZ;
|
||||
} else {
|
||||
return dayjs.tz.guess();
|
||||
|
||||
// From Settings
|
||||
try {
|
||||
log.debug("timezone", "Using timezone from settings: " + timezone);
|
||||
if (timezone) {
|
||||
this.checkTimezone(timezone);
|
||||
return timezone;
|
||||
}
|
||||
} catch (e) {
|
||||
log.warn("timezone", e.message + " in settings");
|
||||
}
|
||||
|
||||
// Guess
|
||||
try {
|
||||
let guess = dayjs.tz.guess();
|
||||
log.debug("timezone", "Guessing timezone: " + guess);
|
||||
if (guess) {
|
||||
this.checkTimezone(guess);
|
||||
return guess;
|
||||
} else {
|
||||
return "UTC";
|
||||
}
|
||||
} catch (e) {
|
||||
// Guess failed, fall back to UTC
|
||||
log.debug("timezone", "Guessed an invalid timezone. Use UTC as fallback");
|
||||
return "UTC";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -280,11 +310,24 @@ class UptimeKumaServer {
|
||||
return dayjs().format("Z");
|
||||
}
|
||||
|
||||
/**
|
||||
* Throw an error if the timezone is invalid
|
||||
* @param timezone
|
||||
*/
|
||||
checkTimezone(timezone) {
|
||||
try {
|
||||
dayjs.utc("2013-11-18 11:55").tz(timezone).format();
|
||||
} catch (e) {
|
||||
throw new Error("Invalid timezone:" + timezone);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the current server timezone and environment variables
|
||||
* @param {string} timezone
|
||||
*/
|
||||
async setTimezone(timezone) {
|
||||
this.checkTimezone(timezone);
|
||||
await Settings.set("serverTimezone", timezone, "general");
|
||||
process.env.TZ = timezone;
|
||||
dayjs.tz.setDefault(timezone);
|
||||
@@ -300,6 +343,5 @@ module.exports = {
|
||||
UptimeKumaServer
|
||||
};
|
||||
|
||||
// Must be at the end
|
||||
const { MonitorType } = require("./monitor-types/monitor-type");
|
||||
// Must be at the end to avoid circular dependencies
|
||||
const { RealBrowserMonitorType } = require("./monitor-types/real-browser-monitor-type");
|
||||
|
@@ -28,8 +28,11 @@ const {
|
||||
} = require("node-radius-utils");
|
||||
const dayjs = require("dayjs");
|
||||
|
||||
const isWindows = process.platform === /^win/.test(process.platform);
|
||||
// SASLOptions used in JSDoc
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { Kafka, SASLOptions } = require("kafkajs");
|
||||
|
||||
const isWindows = process.platform === /^win/.test(process.platform);
|
||||
/**
|
||||
* Init or reset JWT secret
|
||||
* @returns {Promise<Bean>}
|
||||
@@ -196,6 +199,94 @@ exports.mqttAsync = function (hostname, topic, okMessage, options = {}) {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Monitor Kafka using Producer
|
||||
* @param {string} topic Topic name to produce into
|
||||
* @param {string} message Message to produce
|
||||
* @param {Object} [options={interval = 20, allowAutoTopicCreation = false, ssl = false, clientId = "Uptime-Kuma"}]
|
||||
* Kafka client options. Contains ssl, clientId, allowAutoTopicCreation and
|
||||
* interval (interval defaults to 20, allowAutoTopicCreation defaults to false, clientId defaults to "Uptime-Kuma"
|
||||
* and ssl defaults to false)
|
||||
* @param {string[]} brokers List of kafka brokers to connect, host and port joined by ':'
|
||||
* @param {SASLOptions} [saslOptions={}] Options for kafka client Authentication (SASL) (defaults to
|
||||
* {})
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
exports.kafkaProducerAsync = function (brokers, topic, message, options = {}, saslOptions = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { interval = 20, allowAutoTopicCreation = false, ssl = false, clientId = "Uptime-Kuma" } = options;
|
||||
|
||||
let connectedToKafka = false;
|
||||
|
||||
const timeoutID = setTimeout(() => {
|
||||
log.debug("kafkaProducer", "KafkaProducer timeout triggered");
|
||||
connectedToKafka = true;
|
||||
reject(new Error("Timeout"));
|
||||
}, interval * 1000 * 0.8);
|
||||
|
||||
if (saslOptions.mechanism === "None") {
|
||||
saslOptions = undefined;
|
||||
}
|
||||
|
||||
let client = new Kafka({
|
||||
brokers: brokers,
|
||||
clientId: clientId,
|
||||
sasl: saslOptions,
|
||||
retry: {
|
||||
retries: 0,
|
||||
},
|
||||
ssl: ssl,
|
||||
});
|
||||
|
||||
let producer = client.producer({
|
||||
allowAutoTopicCreation: allowAutoTopicCreation,
|
||||
retry: {
|
||||
retries: 0,
|
||||
}
|
||||
});
|
||||
|
||||
producer.connect().then(
|
||||
() => {
|
||||
try {
|
||||
producer.send({
|
||||
topic: topic,
|
||||
messages: [{
|
||||
value: message,
|
||||
}],
|
||||
});
|
||||
connectedToKafka = true;
|
||||
clearTimeout(timeoutID);
|
||||
resolve("Message sent successfully");
|
||||
} catch (e) {
|
||||
connectedToKafka = true;
|
||||
producer.disconnect();
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("Error sending message: " + e.message));
|
||||
}
|
||||
}
|
||||
).catch(
|
||||
(e) => {
|
||||
connectedToKafka = true;
|
||||
producer.disconnect();
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("Error in producer connection: " + e.message));
|
||||
}
|
||||
);
|
||||
|
||||
producer.on("producer.network.request_timeout", (_) => {
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("producer.network.request_timeout"));
|
||||
});
|
||||
|
||||
producer.on("producer.disconnect", (_) => {
|
||||
if (!connectedToKafka) {
|
||||
clearTimeout(timeoutID);
|
||||
reject(new Error("producer.disconnect"));
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Use NTLM Auth for a http request.
|
||||
* @param {Object} options The http request options
|
||||
|
Reference in New Issue
Block a user