Compare commits

..

31 Commits

Author SHA1 Message Date
Louis Lam
c742153d4d Merge pull request #1449 from mhkarimi1383/ansible-unofficial
making it functional when you have a minimal host
2022-04-04 14:16:25 +08:00
Muhammed Hussein Karimi
4aa4c5b853 update readme for new tag 2022-04-04 06:24:48 +04:30
Muhammed Hussein Karimi
2e7231edd1 making it functional when you have a minimal host 2022-04-04 06:16:59 +04:30
Louis Lam
c25b4cf9c8 Merge pull request #1188 from mhkarimi1383/patch-1
FIX: [ansible]logging volume not working
2022-01-18 00:03:40 +08:00
Louis Lam
a5d2dbf620 Merge pull request #1189 from mhkarimi1383/patch-2
always recreate uptime kuma containers
2022-01-18 00:03:29 +08:00
Muhammed Hussein karimi
10220ec5bc always recreate uptime kuma containers
this will fix the problem when you change nginx configuration and containers are not reloaded
2022-01-15 01:08:50 +03:30
Muhammed Hussein karimi
a6de002eda FIX: logging volume not working
fix path for logging in nginx ansible role
2022-01-15 00:56:57 +03:30
Louis Lam
df4c354e46 Merge pull request #1058 from mhkarimi1383/patch-1
fix: nginx role should run before uptime-kuma role
2021-12-15 00:51:24 +08:00
Muhammed Hussein karimi
7af628211e fix: nginx role should run before uptime-kuma role
this is required because of configuration files should be present.
2021-12-14 20:17:40 +03:30
Louis Lam
bb43dc2825 only for ansible 2021-12-14 18:46:34 +08:00
Louis Lam
a1b20698be Merge branch 'mhkarimi1383_master' into ansible-unofficial 2021-12-14 18:45:46 +08:00
Muhammed Hussein Karimi
2c0e22ad31 some minor features and fixes for ansible role 2021-12-14 01:14:44 +03:30
Muhammed Hussein karimi
f05651d235 Merge branch 'louislam:master' into master 2021-12-14 01:02:30 +03:30
Muhammed Hussein Karimi
8f7ca1f4db I forgot to commit this part :) 2021-11-08 21:06:58 +03:30
Muhammed Hussein Karimi
9a36e227a3 docker-compose is made in one, volume for error & access log, better tagging for docker 2021-11-08 21:01:52 +03:30
Muhammed Hussein Karimi
4ccff95d9c add some information to readme (It should be final) 2021-10-24 11:42:09 +03:30
Muhammed Hussein Karimi
17a572112c better readme 2021-10-23 23:04:47 +03:30
Muhammed Hussein Karimi
91649e7956 change docker compose version 2021-10-23 22:50:02 +03:30
Muhammed Hussein Karimi
c42c985e9e installing docker python library
see https://github.com/geerlingguy/ansible-role-docker#use-with-ansible-and-docker-python-library
2021-10-22 21:30:56 +03:30
Muhammed Hussein Karimi
177a9598ea better ssl handling 2021-10-22 21:08:19 +03:30
Muhammed Hussein Karimi
133def93fe typo fix 2021-10-22 20:56:58 +03:30
Muhammed Hussein Karimi
90ebf4f66c using variables for domain and guid to run added to readme 2021-10-22 20:53:28 +03:30
Muhammed Hussein Karimi
e0e5f3518a ansible requirements added 2021-10-22 20:40:01 +03:30
Muhammed Hussein Karimi
a81cc92b07 Fix name for docker_compose ansible module
to make it working with newer ansible
2021-10-22 20:39:43 +03:30
Muhammed Hussein karimi
d6f79ee80b Using geerlingguy ansible role for docker compose 2021-10-22 20:18:51 +03:30
Muhammed Hussein karimi
f0632f32ee remove docker role 2021-10-22 20:17:34 +03:30
Muhammed Hussein karimi
97fe7c001c Uaing alpine image 2021-10-12 19:35:25 +03:30
Muhammed Hussein karimi
de6437e494 Apply suggestion (nginx version and config)
Co-authored-by: Adam Stachowicz <saibamenppl@gmail.com>
2021-10-12 08:08:07 +03:30
Muhammed Hussein karimi
5db728841b somthing was missing in nginx tasks 2021-10-12 07:57:44 +03:30
Muhammed Hussein karimi
166c4d6b5f Update README.md 2021-10-11 23:51:17 +03:30
Muhammed Hussein Karimi
12d3aeb0cd ansible playbook added
this playbook will install docker
then install uptime kuma using docker
and install and configure nginx with ssl
2021-10-11 23:48:01 +03:30
358 changed files with 261 additions and 71326 deletions

View File

@@ -1,50 +0,0 @@
/.idea
/node_modules
/data
/cypress
/out
/test
/kubernetes
/.do
**/.dockerignore
/private
**/.git
**/.gitignore
**/docker-compose*
**/[Dd]ockerfile*
LICENSE
README.md
.editorconfig
.vscode
.eslint*
.stylelint*
/.github
yarn.lock
app.json
CODE_OF_CONDUCT.md
CONTRIBUTING.md
CNAME
install.sh
SECURITY.md
tsconfig.json
.env
/tmp
/babel.config.js
/ecosystem.config.js
### .gitignore content (commented rules are duplicated)
#node_modules
.DS_Store
#dist
dist-ssr
*.local
#.idea
#/data
#!/data/.gitkeep
#.vscode
### End of .gitignore content

View File

@@ -1,21 +0,0 @@
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false
[*.yaml]
indent_size = 2
[*.yml]
indent_size = 2
[*.vue]
trim_trailing_whitespace = false

View File

@@ -1,127 +0,0 @@
module.exports = {
ignorePatterns: [
"test/*",
"server/modules/apicache/*",
"src/util.js"
],
root: true,
env: {
browser: true,
commonjs: true,
es2020: true,
node: true,
},
extends: [
"eslint:recommended",
"plugin:vue/vue3-recommended",
],
parser: "vue-eslint-parser",
parserOptions: {
parser: "@babel/eslint-parser",
sourceType: "module",
requireConfigFile: false,
},
rules: {
"yoda": "error",
eqeqeq: [ "warn", "smart" ],
"linebreak-style": [ "error", "unix" ],
"camelcase": [ "warn", {
"properties": "never",
"ignoreImports": true
}],
"no-unused-vars": [ "warn", {
"args": "none"
}],
indent: [
"error",
4,
{
ignoredNodes: [ "TemplateLiteral" ],
SwitchCase: 1,
},
],
quotes: [ "error", "double" ],
semi: "error",
"vue/html-indent": [ "error", 4 ], // default: 2
"vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off",
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"vue/multi-word-component-names": "off",
"no-multi-spaces": [ "error", {
ignoreEOLComments: true,
}],
"array-bracket-spacing": [ "warn", "always", {
"singleValue": true,
"objectsInArrays": false,
"arraysInArrays": false
}],
"space-before-function-paren": [ "error", {
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}],
"curly": "error",
"object-curly-spacing": [ "error", "always" ],
"object-curly-newline": "off",
"object-property-newline": "error",
"comma-spacing": "error",
"brace-style": "error",
"no-var": "error",
"key-spacing": "warn",
"keyword-spacing": "warn",
"space-infix-ops": "warn",
"arrow-spacing": "warn",
"no-trailing-spaces": "error",
"no-constant-condition": [ "error", {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//'no-console': 'warn',
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": [ "warn", {
"max": 1,
"maxBOF": 0,
}],
"lines-between-class-members": [ "warn", "always", {
exceptAfterSingleLine: true,
}],
"no-unneeded-ternary": "error",
"array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ],
//'prefer-template': 'error',
"comma-dangle": [ "warn", "only-multiline" ],
"no-empty": [ "error", {
"allowEmptyCatch": true
}],
"no-control-regex": "off",
"one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }]
},
"overrides": [
{
"files": [ "src/languages/*.js", "src/icon.js" ],
"rules": {
"comma-dangle": [ "error", "always-multiline" ],
}
},
// Override for jest puppeteer
{
"files": [
"**/*.spec.js",
"**/*.spec.jsx"
],
env: {
jest: true,
},
globals: {
page: true,
browser: true,
context: true,
jestPuppeteer: true,
},
}
]
};

12
.github/FUNDING.yml vendored
View File

@@ -1,12 +0,0 @@
# These are supported funding model platforms
github: louislam # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
#patreon: # Replace with a single Patreon username
open_collective: uptime-kuma # Replace with a single Open Collective username
#ko_fi: # Replace with a single Ko-fi username
#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
#liberapay: # Replace with a single Liberapay username
#issuehunt: # Replace with a single IssueHunt username
#otechie: # Replace with a single Otechie username
#custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@@ -1,68 +0,0 @@
name: "❓ Ask for help"
description: "Submit any question related to Uptime Kuma"
#title: "[Help] "
labels: [help]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this bug has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar issue"
required: true
- type: checkboxes
attributes:
label: "🛡️ Security Policy"
description: Please review the security policy before reporting security related issues/bugs.
options:
- label: I agree to have read this project [Security Policy](https://github.com/louislam/uptime-kuma/security/policy)
required: true
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "📝 Describe your problem"
description: "Please walk us through it step by step."
placeholder: "Describe what are you asking for..."
- type: input
id: uptime-kuma-version
attributes:
label: "🐻 Uptime-Kuma Version"
description: "Which version of Uptime-Kuma are you running? Please do NOT provide the docker tag such as latest or 1"
placeholder: "Ex. 1.10.0"
validations:
required: true
- type: input
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on?"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on?"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true
- type: input
id: docker-version
attributes:
label: "🐋 Docker Version"
description: "If running with Docker, which version are you running?"
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
validations:
required: false
- type: input
id: nodejs-version
attributes:
label: "🟩 NodeJS Version"
description: "If running with Node.js? which version are you running?"
placeholder: "Ex. 14.18.0"
validations:
required: false

View File

@@ -1,99 +0,0 @@
name: "🐛 Bug Report"
description: "Submit a bug report to help us improve"
#title: "[Bug] "
labels: [bug]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this bug has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar issue"
required: true
- type: checkboxes
attributes:
label: "🛡️ Security Policy"
description: Please review the security policy before reporting security related issues/bugs.
options:
- label: I agree to have read this project [Security Policy](https://github.com/louislam/uptime-kuma/security/policy)
required: true
- type: textarea
id: description
validations:
required: false
attributes:
label: "Description"
description: "You could also upload screenshots"
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "👟 Reproduction steps"
description: "How do you trigger this bug? Please walk us through it step by step."
placeholder: "..."
- type: textarea
id: expected-behavior
validations:
required: true
attributes:
label: "👀 Expected behavior"
description: "What did you think would happen?"
placeholder: "..."
- type: textarea
id: actual-behavior
validations:
required: true
attributes:
label: "😓 Actual Behavior"
description: "What actually happen?"
placeholder: "..."
- type: input
id: uptime-kuma-version
attributes:
label: "🐻 Uptime-Kuma Version"
description: "Which version of Uptime-Kuma are you running? Please do NOT provide the docker tag such as latest or 1"
placeholder: "Ex. 1.10.0"
validations:
required: true
- type: input
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on?"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on?"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true
- type: input
id: docker-version
attributes:
label: "🐋 Docker Version"
description: "If running with Docker, which version are you running?"
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
validations:
required: false
- type: input
id: nodejs-version
attributes:
label: "🟩 NodeJS Version"
description: "If running with Node.js? which version are you running?"
placeholder: "Ex. 14.18.0"
validations:
required: false
- type: textarea
id: logs
attributes:
label: "📝 Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell
validations:
required: false

View File

@@ -1,59 +0,0 @@
name: 🚀 Feature Request
description: "Submit a proposal for a new feature"
#title: "[Feature] "
labels: [feature-request]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this feature request has NOT been suggested before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar feature request"
required: true
- type: dropdown
id: feature-area
attributes:
label: "🏷️ Feature Request Type"
description: "What kind of feature request is this?"
multiple: true
options:
- API
- New Notification
- New Monitor
- UI Feature
- Other
validations:
required: true
- type: textarea
id: feature-description
validations:
required: true
attributes:
label: "🔖 Feature description"
description: "A clear and concise description of what the feature request is."
placeholder: "You should add ..."
- type: textarea
id: solution
validations:
required: true
attributes:
label: "✔️ Solution"
description: "A clear and concise description of what you want to happen."
placeholder: "In my use-case, ..."
- type: textarea
id: alternatives
validations:
required: false
attributes:
label: "❓ Alternatives"
description: "A clear and concise description of any alternative solutions or features you've considered."
placeholder: "I have considered ..."
- type: textarea
id: additional-context
validations:
required: false
attributes:
label: "📝 Additional Context"
description: "Add any other context or screenshots about the feature request here."
placeholder: "..."

View File

@@ -1,35 +0,0 @@
⚠️⚠️⚠️ Since we do not accept all types of pull requests and do not want to waste your time. Please be sure that you have read pull request rules:
https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma
Tick the checkbox if you understand [x]:
- [ ] I have read and understand the pull request rules.
# Description
Fixes #(issue)
## Type of change
Please delete any options that are not relevant.
- Bug fix (non-breaking change which fixes an issue)
- User interface (UI)
- New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing functionality to not work as expected)
- Translation update
- Other
- This change requires a documentation update
## Checklist
- [ ] My code follows the style guidelines of this project
- [ ] I ran ESLint and other linters for modified files
- [ ] I have performed a self-review of my own code and tested it
- [ ] I have commented my code, particularly in hard-to-understand areas
(including JSDoc for methods)
- [ ] My changes generate no new warnings
- [ ] My code needed automated testing. I have added them (this is optional task)
## Screenshots (if any)
Please do not use any external image service. Instead, just paste in or drag and drop the image here, and it will be uploaded automatically.

View File

@@ -1,68 +0,0 @@
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Auto Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
auto-test:
needs: [ check-linters ]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
node: [ 14, 16, 17, 18 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm test
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
check-linters:
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run lint
e2e-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm run cy:test

View File

@@ -1,26 +0,0 @@
name: Close Incorrect Issue
on:
issues:
types: [opened]
jobs:
close-incorrect-issue:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
node-version: [16.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- run: npm ci
- run: node extra/close-incorrect-issue.js ${{ secrets.GITHUB_TOKEN }} ${{ github.event.issue.number }} ${{ github.event.issue.user.login }}

View File

@@ -1,24 +0,0 @@
name: 'Automatically close stale issues and PRs'
on:
workflow_dispatch:
schedule:
- cron: '0 */6 * * *'
#Run every 6 hours
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v5
with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
close-pr-message: 'This PR was closed because it has been stalled for 2 days with no activity.'
days-before-stale: 90
days-before-close: 2
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,feature-request'
exempt-issue-assignees: 'louislam'
exempt-pr-assignees: 'louislam'
operations-per-run: 200

18
.gitignore vendored
View File

@@ -1,18 +0,0 @@
node_modules
.DS_Store
dist
dist-ssr
*.local
.idea
/data
!/data/.gitkeep
.vscode
/private
/out
/tmp
.env
cypress/videos
cypress/screenshots

1
.npmrc
View File

@@ -1 +0,0 @@
legacy-peer-deps=true

View File

@@ -1,14 +0,0 @@
{
"extends": "stylelint-config-standard",
"customSyntax": "postcss-html",
"rules": {
"indentation": 4,
"no-descending-specificity": null,
"selector-list-comma-newline-after": null,
"declaration-empty-line-before": null,
"alpha-value-notation": "number",
"color-function-notation": "legacy",
"shorthand-property-no-redundant-values": null,
"color-hex-length": null,
}
}

1
CNAME
View File

@@ -1 +0,0 @@
git.kuma.pet

View File

@@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
uptime@kuma.pet.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@@ -1,265 +0,0 @@
# Project Info
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structured and commented so well, lol. Sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
## Key Technical Skills
- Node.js (You should know what are promise, async/await and arrow function etc.)
- Socket.io
- SCSS
- Vue.js
- Bootstrap
- SQLite
## Directories
- data (App data)
- dist (Frontend build)
- extra (Extra useful scripts)
- public (Frontend resources for dev only)
- server (Server source code)
- src (Frontend source code)
- test (unit test)
## Can I create a pull request for Uptime Kuma?
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can discuss first**. Especially for a large pull request or you don't know it will be merged or not.
Here are some references:
✅ Usually Accept:
- Bug/Security fix
- Translations
- Adding notification providers
⚠️ Discussion First
- Large pull requests
- New features
❌ Won't Merge
- Do not pass auto test
- Any breaking changes
- Duplicated pull request
- Buggy
- UI/UX is not close to Uptime Kuma
- Existing logic is completely modified or deleted for no reason
- A function that is completely out of scope
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests)
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
### Recommended Pull Request Guideline
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
1. Fork the project
1. Clone your fork repo to local
1. Create a new branch
1. Create an empty commit
`git commit -m "[empty commit] pull request for <YOUR TASK NAME>" --allow-empty`
1. Push to your fork repo
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare
1. Write a proper description
1. Click "Change to draft"
1. Discussion
## Project Styles
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`.
- Easy to use
- The web UI styling should be consistent and nice.
## Coding Styles
- 4 spaces indentation
- Follow `.editorconfig`
- Follow ESLint
- Methods and functions should be documented with JSDoc
## Name convention
- Javascript/Typescript: camelCaseType
- SQLite: snake_case (Underscore)
- CSS/SCSS: kebab-case (Dash)
## Tools
- Node.js >= 14
- NPM >= 8.5
- Git
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
- A SQLite GUI tool (SQLite Expert Personal is suggested)
## Install dependencies
```bash
npm ci
```
## Dev Server
(2022-04-26 Update)
We can start the frontend dev server and the backend dev server in one command.
Port `3000` and port `3001` will be used.
```bash
npm run dev
```
## Backend Server
It binds to `0.0.0.0:3001` by default.
It is mainly a socket.io app + express.js.
express.js is used for:
- entry point such as redirecting to a status page or the dashboard
- serving the frontend built files (index.html, .js and .css etc.)
- serving internal APIs of status page
### Structure in /server/
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- notification-providers/ (individual notification logic)
- routers/ (Express Routers)
- socket-handler (Socket.io Handlers)
- server.js (Server entry point and main logic)
## Frontend Dev Server
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
For production, it is not used. It will be compiled to `dist` directory instead.
You can use Vue.js devtools Chrome extension for debugging.
### Build the frontend
```bash
npm run build
```
### Frontend Details
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
The router is in `src/router.js`
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`.
## Database Migration
1. Create `patch-{name}.sql` in `./db/`
2. Add your patch filename in the `patchList` list in `./server/database.js`
## Unit Test
It is an end-to-end testing. It is using Jest and Puppeteer.
```bash
npm run build
npm test
```
By default, the Chromium window will be shown up during the test. Specifying `HEADLESS_TEST=1` for terminal environments.
## Dependencies
Both frontend and backend share the same package.json. However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into dist files. So:
- Frontend dependencies = "devDependencies"
- Examples: vue, chart.js
- Backend dependencies = "dependencies"
- Examples: socket.io, sqlite3
- Development dependencies = "devDependencies"
- Examples: eslint, sass
### Update Dependencies
Install `ncu`
https://github.com/raineorshine/npm-check-updates
```bash
ncu -u -t patch
npm install
```
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
## Translations
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
## Wiki
Since there is no way to make a pull request to wiki's repo, I have set up another repo to do that.
https://github.com/louislam/uptime-kuma-wiki
## Maintainer
Check the latest issues and pull requests:
https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
### Release Procedures
1. Draft a release note
2. Make sure the repo is cleared
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue`
5. `git push`
6. Publish the release note as 1.X.X
7. Press any key to continue
8. SSH to demo site server and update to 1.X.X
Checking:
- Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
- Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
- Try clean installation with Node.js
### Release Beta Procedures
1. Draft a release note, check "This is a pre-release"
2. Make sure the repo is cleared
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue`
5. Publish the release note as 1.X.X-beta.X
6. Press any key to continue
### Release Wiki
#### Setup Repo
```bash
git clone https://github.com/louislam/uptime-kuma-wiki.git
cd uptime-kuma-wiki
git remote add production https://github.com/louislam/uptime-kuma.wiki.git
```
#### Push to Production Wiki
```bash
git pull
git push production master
```

21
LICENSE
View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 Louis Lam
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

179
README.md
View File

@@ -1,179 +0,0 @@
# Uptime Kuma
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam)
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
It is a self-hosted monitoring tool like "Uptime Robot".
<img src="https://uptime.kuma.pet/img/dark.jpg" width="700" alt="" />
## 🥔 Live Demo
Try it!
- Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors))
- Europe Demo Server: https://demo.uptime-kuma.karimi.dev:27000 (Provided by [@mhkarimi1383](https://github.com/mhkarimi1383))
It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience.
## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers.
* Fancy, Reactive, Fast UI/UX.
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
* 20 second intervals.
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
* Multiple Status Pages
* Map Status Page to Domain
* Ping Chart
* Certificate Info
* Proxy Support
* 2FA available
## 🔧 How to Install
### 🐳 Docker
```bash
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
```
⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
Browse to http://localhost:3001 after starting.
### 💪🏻 Non-Docker
Required Tools:
- [Node.js](https://nodejs.org/en/download/) >= 14
- [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For run in background
```bash
# Update your npm to the latest version
npm install npm -g
git clone https://github.com/louislam/uptime-kuma.git
cd uptime-kuma
npm run setup
# Option 1. Try it
node server/server.js
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it:
npm install pm2 -g && pm2 install pm2-logrotate
# Start Server
pm2 start server/server.js --name uptime-kuma
```
Browse to http://localhost:3001 after starting.
More useful PM2 Commands
```bash
# If you want to see the current console output
pm2 monit
# If you want to add it to startup
pm2 save && pm2 startup
```
### Advanced Installation
If you need more options or need to browse via a reverse proxy, please read:
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install
## 🆙 How to Update
Please read:
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next?
I will mark requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones
Project Plan:
https://github.com/users/louislam/projects/4/views/1
## ❤️ Sponsors
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
<img src="https://uptime.kuma.pet/sponsors?v=6" alt />
## 🖼 More Screenshots
Light Mode:
<img src="https://uptime.kuma.pet/img/light.jpg" width="512" alt="" />
Status Page:
<img src="https://user-images.githubusercontent.com/1336778/134628766-a3fe0981-0926-4285-ab46-891a21c3e4cb.png" width="512" alt="" />
Settings Page:
<img src="https://louislam.net/uptimekuma/2.jpg" width="400" alt="" />
Telegram Notification Sample:
<img src="https://louislam.net/uptimekuma/3.jpg" width="400" alt="" />
## Motivation
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
* Want to build a fancy UI.
* Learn Vue 3 and vite.js.
* Show the power of Bootstrap 5.
* Try to use WebSocket with SPA instead of REST API.
* Deploy my first Docker image to Docker Hub.
If you love this project, please consider giving me a ⭐.
## 🗣️ Discussion
### Issues Page
You can discuss or ask for help in [issues](https://github.com/louislam/uptime-kuma/issues).
### Subreddit
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
You can mention me if you ask a question on Reddit.
[r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
## Contribute
### Test Pull Requests
There are a lot of pull requests right now, but I don't have time to test them all.
If you want to help, you can check this:
https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests
### Test Beta Version
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
### Bug Reports / Feature Requests
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Translations
If you want to translate Uptime Kuma into your language, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
### Create Pull Requests
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

View File

@@ -1,25 +0,0 @@
# Security Policy
## Reporting a Vulnerability
Please report security issues to uptime@kuma.pet.
Do not use the issue tracker or discuss it in the public as it will cause more damage.
## Supported Versions
### Uptime Kuma Versions
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
### Upgradable Docker Tags
| Tag | Supported |
| ------- | ------------------ |
| 1 | :white_check_mark: |
| 1-debian | :white_check_mark: |
| latest | :white_check_mark: |
| debian | :white_check_mark: |
| 1-alpine | ⚠️ Deprecated |
| alpine | ⚠️ Deprecated |
| All other tags | ❌ |

1
ansible/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
roles/nginx/files/ssl/*

39
ansible/README.md Normal file
View File

@@ -0,0 +1,39 @@
# Ansible Playbook to install uptime kuma using docker
This playbook comes with three tags
1. requirements (will install anything needed to make next parts working)
2. docker (to install docker)
3. nginx (to install nginx using docker with ssl)
4. uptime kuma (to install uptime kuma using docker)
To see more info see docker-compose, tasks and config files
I will try to make this readme better
## To run it
1. install ansible see [here](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html)
2. run `ansible-galaxy install -r ansible-requirements.yml` to get requirements
3. prepare inventory hosts
4. put your certificates in files section in nginx role with this structure below:
```
ansible -> roles -> nginx -> files -> ssl -> <uptime kuma domain>.fullchain.pem
ansible -> roles -> nginx -> files -> ssl -> <uptime kuma domain>.privkey.pem
```
5. to run playbook
```bash
ansible-playbook ./playbook.yml -i <your inventory path> -e "kuma_domain=<uptime kuma domain>" -e "kuma_image_os=<alpine or debian>" -e "kuma_image_version=<version>"
```
you can use other ansible playbook options too
> Note: Replace `<uptime kuma domain>` with your desired domain for uptime kuma
> replace `<version>` with a version from https://github.com/louislam/uptime-kuma/releases
> replace `<alpine or debian>` with one of options
> `-e "kuma_image_os=<alpine or debian>" -e "kuma_image_version=<version>"` is not required and you can remove this part or change only one of them (kuma_image_os is debian & kuma_image_version is 1 by default)
> If you are not using root user as your ansible_user use -bK option to become root
> instead of `-e "kuma_image_os=<alpine or debian>" -e "kuma_image_version=<version>"` You can use `-e kuma_tag=<uptime kuma full tag>` and replace `<uptime kuma full tag>` with your desired tag (e.g. `latest`)
> you can also create a yaml file with variables that you want to set & use it (also: ansible-vars)

View File

@@ -0,0 +1,6 @@
roles:
- src: geerlingguy.docker
- src: geerlingguy.pip
collections:
- name: community.docker

20
ansible/playbook.yml Normal file
View File

@@ -0,0 +1,20 @@
- name: install uptime kuma with nginx connected
hosts: all
vars:
pip_install_packages:
- name: wheel
- name: pip
state: latest
- name: setuptools
- name: cffi
- name: docker
- name: dockerpty
docker_compose_version: "v2.0.1"
roles:
- {role: requirements, tags: ["docker", "requirements"]}
- {role: geerlingguy.docker, tags: ["docker"]}
- {role: geerlingguy.pip, tags: ["docker"]}
- {role: nginx, tags: ["nginx"]}
- {role: uptime-kuma, tags: ["kuma"]}

View File

@@ -0,0 +1,22 @@
- name: Ensure Volumes & Files directories exists
file:
dest: "{{item}}"
state: directory
loop:
- /compose
- /compose/volumes
- /compose/volumes/nginx
- /compose/volumes/nginx/log/{{ kuma_domain }}
- name: Ensure nginx SSL certificates exist
copy:
src: ssl
dest: /compose/volumes/nginx
mode: 'preserve'
group: root
owner: root
- name: Ensure config files are updated
template:
src: "nginx.conf"
dest: /compose/volumes/nginx/nginx.conf

View File

@@ -0,0 +1,88 @@
user nginx;
worker_processes auto;
pid /var/run/nginx.pid;
error_log /var/log/nginx/error.log;
events {
worker_connections 2048;
}
http {
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
server_tokens off;
default_type application/octet-stream;
### SSL Settings for all servers (https://ssl-config.mozilla.org/#server=nginx&server-version=1.17.2&config=intermediate)
# certs sent to the client in SERVER HELLO are concatenated in ssl_certificate
ssl_certificate /etc/nginx/ssl/{{ kuma_domain }}.fullchain.pem;
ssl_certificate_key /etc/nginx/ssl/{{ kuma_domain }}.privkey.pem;
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
ssl_session_tickets off;
# intermediate configuration
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
ssl_prefer_server_ciphers off;
# curl https://ssl-config.mozilla.org/ffdhe2048.txt > /etc/nginx/ssl/dhparam.pem (TODO: check if it's secure to use others DH parameters!)
# openssl dhparam -out /etc/nginx/ssl/dhparam.pem 4096
ssl_dhparam /etc/nginx/ssl/dhparam.pem;
# HSTS (ngx_http_headers_module is required) (63072000 seconds)
add_header Strict-Transport-Security "max-age=63072000" always;
# OCSP stapling
ssl_stapling on;
ssl_stapling_verify on;
log_format main '$remote_addr - $remote_user [$time_local] "$request_method $scheme://$host$request_uri $server_protocol" $status $body_bytes_sent '
'"$http_referer" "$http_user_agent" $request_time $upstream_response_time UPA:$upstream_addr BYS:$bytes_sent BYR:$request_length';
access_log /var/log/nginx/access.log main;
### Set additional headers to be send to upstream
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# Remove Headers that gonna be sent to client
proxy_hide_header X-Powered-By;
proxy_hide_header Server;
# Redirect HTTP request to HTTPS
server {
listen 80 default_server;
server_name {{ kuma_domain }};
return 302 https://$host$request_uri;
}
server {
server_name {{ kuma_domain }};
listen 443 ssl http2 default_server;
access_log /var/log/nginx/{{ kuma_domain }}/access.log main;
error_log /var/log/nginx/{{ kuma_domain }}/error.log;
location / {
# rewrite ^/(.*)/$ /$1 permanent;
### redirect urls with trailing slash to non-trailing slash
# https://serverfault.dev/questions/597302/removing-the-trailing-slash-from-a-url-with-nginx
# location ~ (?<no_slash>.+)/$ {
# return 302 https://$host$no_slash;
# }
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://uptime-kuma:3001/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
}
}

View File

@@ -0,0 +1,11 @@
---
- name: Ensure {{inventory_hostname}} is set as hostname
hostname:
name: "{{inventory_hostname}}"
tags: ["hostname"]
- include_tasks: setup-RedHat.yml
when: ansible_os_family == 'RedHat'
- include_tasks: setup-Debian.yml
when: ansible_os_family == 'Debian'

View File

@@ -0,0 +1,9 @@
- name: Ensure packages for some requirements are installed
apt:
pkg:
- libffi-dev
- libzbar-dev
- libzbar0
- python3-docopt
update_cache: yes
state: present

View File

@@ -0,0 +1,9 @@
- name: Ensure packages for some requirements are installed
dnf:
name:
- libffi-devel
- zbar-devel
- zbar
- python3-docopt
update_cache: yes
state: present

View File

@@ -0,0 +1,4 @@
---
kuma_image_version: '1'
kuma_image_os: 'debian'
kuma_tag: "{{kuma_image_version}}-{{kuma_image_os}}"

View File

@@ -0,0 +1,23 @@
- name: Ensure Volumes & Files directories exists
file:
dest: "{{item}}"
state: directory
loop:
- /compose
- /compose/kuma
- /compose/volumes
- /compose/volumes/kuma
- name: Ensure docker-compose file has been updated
template:
src: "{{item}}"
dest: /compose/kuma/
loop:
- docker-compose.yml
- name: Ensure uptime-kuma is up
community.docker.docker_compose:
state: present
project_src: /compose/kuma
pull: yes
recreate: always

View File

@@ -0,0 +1,29 @@
version: '3.3'
services:
uptime-kuma:
restart: always
networks:
- uptime-kuma
expose:
- 3001
volumes:
- '/compose/volumes/uptime-kuma:/app/data'
container_name: uptime-kuma
image: 'louislam/uptime-kuma:{{kuma_tag}}'
nginx:
ports:
- 443:443
- 80:80
networks:
- uptime-kuma
depends_on:
- uptime-kuma
restart: always
image: nginx:stable-alpine
volumes:
- '/compose/volumes/nginx/:/etc/nginx/'
- '/compose/volumes/nginx/log/{{ kuma_domain }}:/var/log/nginx/{{ kuma_domain }}/'
networks:
uptime-kuma:

View File

@@ -1,11 +0,0 @@
const config = {};
if (process.env.TEST_FRONTEND) {
config.presets = [ "@babel/preset-env" ];
}
if (process.env.TEST_BACKEND) {
config.plugins = [ "babel-plugin-rewire" ];
}
module.exports = config;

View File

@@ -1,28 +0,0 @@
const { defineConfig } = require("cypress");
module.exports = defineConfig({
projectId: "vyjuem",
e2e: {
experimentalStudio: true,
setupNodeEvents(on, config) {
},
fixturesFolder: "test/cypress/fixtures",
screenshotsFolder: "test/cypress/screenshots",
videosFolder: "test/cypress/videos",
downloadsFolder: "test/cypress/downloads",
supportFile: "test/cypress/support/e2e.js",
baseUrl: "http://localhost:3002",
defaultCommandTimeout: 10000,
pageLoadTimeout: 60000,
viewportWidth: 1920,
viewportHeight: 1080,
specPattern: [
"test/cypress/e2e/setup.cy.js",
"test/cypress/e2e/**/*.js"
],
},
env: {
baseUrl: "http://localhost:3002",
},
});

View File

@@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/backend.spec.js",
};

View File

@@ -1,53 +0,0 @@
import legacy from "@vitejs/plugin-legacy";
import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite";
import visualizer from "rollup-plugin-visualizer";
import viteCompression from "vite-plugin-compression";
const postCssScss = require("postcss-scss");
const postcssRTLCSS = require("postcss-rtlcss");
const viteCompressionFilter = /\.(js|mjs|json|css|html|svg)$/i;
// https://vitejs.dev/config/
export default defineConfig({
server: {
port: 3000,
},
define: {
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
},
plugins: [
vue(),
legacy({
targets: [ "since 2015" ],
}),
visualizer({
filename: "tmp/dist-stats.html"
}),
viteCompression({
algorithm: "gzip",
filter: viteCompressionFilter,
}),
viteCompression({
algorithm: "brotliCompress",
filter: viteCompressionFilter,
}),
],
css: {
postcss: {
"parser": postCssScss,
"map": false,
"plugins": [ postcssRTLCSS ]
}
},
build: {
rollupOptions: {
output: {
manualChunks(id, { getModuleInfo, getModuleIds }) {
}
}
},
}
});

View File

Binary file not shown.

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE user
ADD twofa_last_token VARCHAR(6);
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE user
ADD twofa_secret VARCHAR(64);
ALTER TABLE user
ADD twofa_status BOOLEAN default 0 NOT NULL;
COMMIT;

View File

@@ -1,5 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor_group
ADD send_url BOOLEAN DEFAULT 0 NOT NULL;
COMMIT;

View File

@@ -1,18 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE docker_host (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
user_id INT NOT NULL,
docker_daemon VARCHAR(255),
docker_type VARCHAR(255),
name VARCHAR(255)
);
ALTER TABLE monitor
ADD docker_host INTEGER REFERENCES docker_host(id);
ALTER TABLE monitor
ADD docker_container VARCHAR(255);
COMMIT;

View File

@@ -1,18 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD auth_method VARCHAR(250);
ALTER TABLE monitor
ADD auth_domain TEXT;
ALTER TABLE monitor
ADD auth_workstation TEXT;
COMMIT;
BEGIN TRANSACTION;
UPDATE monitor
SET auth_method = 'basic'
WHERE basic_auth_user is not null;
COMMIT;

View File

@@ -1,18 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD radius_username VARCHAR(255);
ALTER TABLE monitor
ADD radius_password VARCHAR(255);
ALTER TABLE monitor
ADD radius_calling_station_id VARCHAR(50);
ALTER TABLE monitor
ADD radius_called_station_id VARCHAR(50);
ALTER TABLE monitor
ADD radius_secret VARCHAR(255);
COMMIT

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD retry_interval INTEGER default 0 not null;
COMMIT;

View File

@@ -1,10 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD database_connection_string VARCHAR(2000);
ALTER TABLE monitor
ADD database_query TEXT;
COMMIT

View File

@@ -1,16 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD mqtt_topic TEXT;
ALTER TABLE monitor
ADD mqtt_success_message VARCHAR(255);
ALTER TABLE monitor
ADD mqtt_username VARCHAR(255);
ALTER TABLE monitor
ADD mqtt_password VARCHAR(255);
COMMIT;

View File

@@ -1,30 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
create table `group`
(
id INTEGER not null
constraint group_pk
primary key autoincrement,
name VARCHAR(255) not null,
created_date DATETIME default (DATETIME('now')) not null,
public BOOLEAN default 0 not null,
active BOOLEAN default 1 not null,
weight BOOLEAN NOT NULL DEFAULT 1000
);
CREATE TABLE [monitor_group]
(
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[monitor_id] INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[group_id] INTEGER NOT NULL REFERENCES [group] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
weight BOOLEAN NOT NULL DEFAULT 1000
);
CREATE INDEX [fk]
ON [monitor_group] (
[monitor_id],
[group_id]);
COMMIT;

View File

@@ -1,13 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD method TEXT default 'GET' not null;
ALTER TABLE monitor
ADD body TEXT default null;
ALTER TABLE monitor
ADD headers TEXT default null;
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- For sendHeartbeatList
CREATE INDEX monitor_time_index ON heartbeat (monitor_id, time);
-- For sendImportantHeartbeatList
CREATE INDEX monitor_important_time_index ON heartbeat (monitor_id, important,time);
COMMIT;

View File

@@ -1,18 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
create table incident
(
id INTEGER not null
constraint incident_pk
primary key autoincrement,
title VARCHAR(255) not null,
content TEXT not null,
style VARCHAR(30) default 'warning' not null,
created_date DATETIME default (DATETIME('now')) not null,
last_updated_date DATETIME,
pin BOOLEAN default 1 not null,
active BOOLEAN default 1 not null
);
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD resend_interval INTEGER default 0 not null;
ALTER TABLE heartbeat
ADD down_count INTEGER default 0 not null;
COMMIT;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD basic_auth_user TEXT default null;
ALTER TABLE monitor
ADD basic_auth_pass TEXT default null;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD expiry_notification BOOLEAN default 1;
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD push_token VARCHAR(20) DEFAULT NULL;
COMMIT;

View File

@@ -1,18 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [notification_sent_history] (
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[type] VARCHAR(50) NOT NULL,
[monitor_id] INTEGER NOT NULL,
[days] INTEGER NOT NULL,
UNIQUE([type], [monitor_id], [days])
);
CREATE INDEX [good_index] ON [notification_sent_history] (
[type],
[monitor_id],
[days]
);
COMMIT;

View File

@@ -1,23 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE proxy (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
user_id INT NOT NULL,
protocol VARCHAR(10) NOT NULL,
host VARCHAR(255) NOT NULL,
port SMALLINT NOT NULL,
auth BOOLEAN NOT NULL,
username VARCHAR(255) NULL,
password VARCHAR(255) NULL,
active BOOLEAN NOT NULL DEFAULT 1,
'default' BOOLEAN NOT NULL DEFAULT 0,
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
);
ALTER TABLE monitor ADD COLUMN proxy_id INTEGER REFERENCES proxy(id);
CREATE INDEX proxy_id ON monitor (proxy_id);
CREATE INDEX proxy_user_id ON proxy (user_id);
COMMIT;

View File

@@ -1,22 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Generated by Intellij IDEA
create table setting_dg_tmp
(
id INTEGER
primary key autoincrement,
key VARCHAR(200) not null
unique,
value TEXT,
type VARCHAR(20)
);
insert into setting_dg_tmp(id, key, value, type) select id, key, value, type from setting;
drop table setting;
alter table setting_dg_tmp rename to setting;
COMMIT;

View File

@@ -1,6 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page ADD footer_text TEXT;
ALTER TABLE status_page ADD custom_css TEXT;
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
COMMIT;

View File

@@ -1,31 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [status_page](
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[slug] VARCHAR(255) NOT NULL UNIQUE,
[title] VARCHAR(255) NOT NULL,
[description] TEXT,
[icon] VARCHAR(255) NOT NULL,
[theme] VARCHAR(30) NOT NULL,
[published] BOOLEAN NOT NULL DEFAULT 1,
[search_engine_index] BOOLEAN NOT NULL DEFAULT 1,
[show_tags] BOOLEAN NOT NULL DEFAULT 0,
[password] VARCHAR,
[created_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
[modified_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE UNIQUE INDEX [slug] ON [status_page]([slug]);
CREATE TABLE [status_page_cname](
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[status_page_id] INTEGER NOT NULL REFERENCES [status_page]([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[domain] VARCHAR NOT NULL UNIQUE
);
ALTER TABLE incident ADD status_page_id INTEGER;
ALTER TABLE [group] ADD status_page_id INTEGER;
COMMIT;

View File

@@ -1,37 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- Change Monitor.created_date from "TIMESTAMP" to "DATETIME"
-- SQL Generated by Intellij Idea
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255)
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

View File

@@ -1,19 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
CREATE TABLE tag (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name VARCHAR(255) NOT NULL,
color VARCHAR(255) NOT NULL,
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
);
CREATE TABLE monitor_tag (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
tag_id INTEGER NOT NULL,
value TEXT,
CONSTRAINT FK_tag FOREIGN KEY (tag_id) REFERENCES tag(id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor(id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX monitor_tag_monitor_id_index ON monitor_tag (monitor_id);
CREATE INDEX monitor_tag_tag_id_index ON monitor_tag (tag_id);

View File

@@ -1,9 +0,0 @@
BEGIN TRANSACTION;
CREATE TABLE monitor_tls_info (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
info_json TEXT
);
COMMIT;

View File

@@ -1,37 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- Add maxretries column to monitor
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

View File

@@ -1,40 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- OK.... serious wrong, missing maxretries column
-- Developers should patch it manually if you have missing the maxretries column
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

View File

@@ -1,70 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
PRAGMA foreign_keys = off;
BEGIN TRANSACTION;
create table monitor_dg_tmp (
id INTEGER not null primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER references user on update cascade on delete
set
null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME default (DATETIME('now')) not null,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null
);
insert into
monitor_dg_tmp(
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
keyword,
maxretries,
ignore_tls,
upside_down
)
select
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
keyword,
maxretries,
ignore_tls,
upside_down
from
monitor;
drop table monitor;
alter table
monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys = on;

View File

@@ -1,74 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
PRAGMA foreign_keys = off;
BEGIN TRANSACTION;
create table monitor_dg_tmp (
id INTEGER not null primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER references user on update cascade on delete
set
null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME default (DATETIME('now')) not null,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null,
maxredirects INTEGER default 10 not null,
accepted_statuscodes_json TEXT default '["200-299"]' not null
);
insert into
monitor_dg_tmp(
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
created_date,
keyword,
maxretries,
ignore_tls,
upside_down
)
select
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
created_date,
keyword,
maxretries,
ignore_tls,
upside_down
from
monitor;
drop table monitor;
alter table
monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys = on;

View File

@@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD dns_resolve_type VARCHAR(5);
ALTER TABLE monitor
ADD dns_resolve_server VARCHAR(255);
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD dns_last_result VARCHAR(255);
COMMIT;

View File

@@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE notification
ADD is_default BOOLEAN default 0 NOT NULL;
COMMIT;

View File

@@ -1,8 +0,0 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:16-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
pip3 --no-cache-dir install apprise==1.0.0 && \
rm -rf /root/.cache

View File

@@ -1,28 +0,0 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too
FROM node:16-buster-slim
ARG TARGETPLATFORM
WORKDIR /app
# Install Curl
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init && \
pip3 --no-cache-dir install apprise==1.0.0 && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
# Install cloudflared
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
dpkg --add-architecture arm && \
apt update && \
apt --yes --no-install-recommends install ./cloudflared.deb && \
rm -rf /var/lib/apt/lists/* && \
rm -f cloudflared.deb && \
apt --yes autoremove

View File

@@ -1,14 +0,0 @@
# Simple docker-compose.yml
# You can change your port or volume location
version: '3.3'
services:
uptime-kuma:
image: louislam/uptime-kuma:1
container_name: uptime-kuma
volumes:
- ./uptime-kuma-data:/app/data
ports:
- 3001:3001 # <Host Port>:<Container Port>
restart: always

View File

@@ -1,82 +0,0 @@
FROM louislam/uptime-kuma:base-debian AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY . .
RUN npm ci --production && \
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-debian AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly
# Build an image for testing pr
FROM louislam/uptime-kuma:base-debian AS pr-test
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
## Install Git
RUN apt update \
&& apt --yes --no-install-recommends install curl \
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
&& chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& apt update \
&& apt --yes --no-install-recommends install git
## Empty the directory, because we have to clone the Git repo.
RUN rm -rf ./* && chown node /app
USER node
RUN git config --global user.email "no-reply@no-reply.com"
RUN git config --global user.name "PR Tester"
RUN git clone https://github.com/louislam/uptime-kuma.git .
RUN npm ci
EXPOSE 3000 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
CMD ["npm", "run", "start-pr-test"]
# Upload the artifact to Github
FROM louislam/uptime-kuma:base-debian AS upload-artifact
WORKDIR /
RUN apt update && \
apt --yes install curl file
COPY --from=build /app /app
ARG VERSION
ARG GITHUB_TOKEN
ARG TARGETARCH
ARG PLATFORM=debian
ARG FILE=$PLATFORM-$TARGETARCH-$VERSION.tar.gz
ARG DIST=dist.tar.gz
RUN chmod +x /app/extra/upload-github-release-asset.sh
# Full Build
# RUN tar -zcvf $FILE app
# RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=$FILE
# Dist only
RUN cd /app && tar -zcvf $DIST dist
RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=/app/$DIST

View File

@@ -1,25 +0,0 @@
FROM louislam/uptime-kuma:base-alpine AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY . .
RUN npm ci --production && \
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly

View File

@@ -1,6 +0,0 @@
module.exports = {
apps: [{
name: "uptime-kuma",
script: "./server/server.js",
}]
};

View File

@@ -1,66 +0,0 @@
const pkg = require("../../package.json");
const fs = require("fs");
const childProcess = require("child_process");
const util = require("../../src/util");
util.polyfill();
const version = process.env.VERSION;
console.log("Beta Version: " + version);
if (!version || !version.includes("-beta.")) {
console.error("invalid version, beta version only");
process.exit(1);
}
const exists = tagExists(version);
if (! exists) {
// Process package.json
pkg.version = version;
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
childProcess.spawnSync("npm", [ "install" ]);
commit(version);
tag(version);
} else {
console.log("version tag exists, please delete the tag or use another tag");
process.exit(1);
}
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
if (stdout.includes("no changes added to commit")) {
throw new Error("commit error");
}
res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
console.log(res.stdout.toString().trim());
}
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
res = childProcess.spawnSync("git", [ "push", "origin", version ]);
console.log(res.stdout.toString().trim());
}
function tagExists(version) {
if (! version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version;
}

View File

@@ -1,33 +0,0 @@
const childProcess = require("child_process");
if (!process.env.UPTIME_KUMA_GH_REPO) {
console.error("Please set a repo to the environment variable 'UPTIME_KUMA_GH_REPO' (e.g. mhkarimi1383:goalert-notification)");
process.exit(1);
}
let inputArray = process.env.UPTIME_KUMA_GH_REPO.split(":");
if (inputArray.length !== 2) {
console.error("Invalid format. Please set a repo to the environment variable 'UPTIME_KUMA_GH_REPO' (e.g. mhkarimi1383:goalert-notification)");
}
let name = inputArray[0];
let branch = inputArray[1];
console.log("Checkout pr");
// Checkout the pr
let result = childProcess.spawnSync("git", [ "remote", "add", name, `https://github.com/${name}/uptime-kuma` ]);
console.log(result.stdout.toString());
console.error(result.stderr.toString());
result = childProcess.spawnSync("git", [ "fetch", name, branch ]);
console.log(result.stdout.toString());
console.error(result.stderr.toString());
result = childProcess.spawnSync("git", [ "checkout", `${name}/${branch}`, "--force" ]);
console.log(result.stdout.toString());
console.error(result.stderr.toString());

View File

@@ -1,57 +0,0 @@
const github = require("@actions/github");
(async () => {
try {
const token = process.argv[2];
const issueNumber = process.argv[3];
const username = process.argv[4];
const client = github.getOctokit(token).rest;
const issue = {
owner: "louislam",
repo: "uptime-kuma",
number: issueNumber,
};
const labels = (
await client.issues.listLabelsOnIssue({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number
})
).data.map(({ name }) => name);
if (labels.length === 0) {
console.log("Bad format here");
await client.issues.addLabels({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
labels: [ "invalid-format" ]
});
// Add the issue closing comment
await client.issues.createComment({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please DO NOT open a blank issue.`
});
// Close the issue
await client.issues.update({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
state: "closed"
});
} else {
console.log("Pass!");
}
} catch (e) {
console.log(e);
}
})();

View File

@@ -1,2 +0,0 @@
# Must enable File Sharing in Docker Desktop
docker run -it --rm -v ${pwd}:/app louislam/batsh /usr/bin/batsh bash --output ./install.sh ./extra/install.batsh

View File

@@ -1,44 +0,0 @@
//
const http = require("https"); // or 'https' for https:// URLs
const fs = require("fs");
const platform = process.argv[2];
if (!platform) {
console.error("No platform??");
process.exit(1);
}
let arch = null;
if (platform === "linux/amd64") {
arch = "amd64";
} else if (platform === "linux/arm64") {
arch = "arm64";
} else if (platform === "linux/arm/v7") {
arch = "arm";
} else {
console.error("Invalid platform?? " + platform);
}
const file = fs.createWriteStream("cloudflared.deb");
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
function get(url) {
http.get(url, function (res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
console.log("Redirect to " + res.headers.location);
get(res.headers.location);
} else if (res.statusCode >= 200 && res.statusCode < 300) {
res.pipe(file);
res.on("end", function () {
console.log("Downloaded");
});
} else {
console.error(res.statusCode);
process.exit(1);
}
});
}

View File

@@ -1,66 +0,0 @@
console.log("Downloading dist");
const https = require("https");
const tar = require("tar");
const packageJSON = require("../package.json");
const fs = require("fs");
const rmSync = require("./fs-rmSync.js");
const version = packageJSON.version;
const filename = "dist.tar.gz";
const url = `https://github.com/louislam/uptime-kuma/releases/download/${version}/${filename}`;
download(url);
/**
* Downloads the latest version of the dist from a GitHub release.
* @param {string} url The URL to download from.
*
* Generated by Trelent
*/
function download(url) {
console.log(url);
https.get(url, (response) => {
if (response.statusCode === 200) {
console.log("Extracting dist...");
if (fs.existsSync("./dist")) {
if (fs.existsSync("./dist-backup")) {
rmSync("./dist-backup", {
recursive: true
});
}
fs.renameSync("./dist", "./dist-backup");
}
const tarStream = tar.x({
cwd: "./",
});
tarStream.on("close", () => {
if (fs.existsSync("./dist-backup")) {
rmSync("./dist-backup", {
recursive: true
});
}
console.log("Done");
});
tarStream.on("error", () => {
if (fs.existsSync("./dist-backup")) {
fs.renameSync("./dist-backup", "./dist");
}
console.error("Error from tarStream");
});
response.pipe(tarStream);
} else if (response.statusCode === 302) {
download(response.headers.location);
} else {
console.log("dist not found");
}
});
}

View File

@@ -1,21 +0,0 @@
#!/usr/bin/env sh
# set -e Exit the script if an error happens
set -e
PUID=${PUID=0}
PGID=${PGID=0}
files_ownership () {
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
# -R Recursively descends the specified directories
# -c Like verbose but report only when a change is made
chown -hRc "$PUID":"$PGID" /app/data
}
echo "==> Performing startup jobs and maintenance tasks"
files_ownership
echo "==> Starting application with user $PUID group $PGID"
# --clear-groups Clear supplementary groups.
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env node
const childProcess = require("child_process");
let env = process.env;
let cmd = process.argv[2];
let args = process.argv.slice(3);
let replacedArgs = [];
for (let arg of args) {
for (let key in env) {
arg = arg.replaceAll(`$${key}`, env[key]);
}
replacedArgs.push(arg);
}
let child = childProcess.spawn(cmd, replacedArgs);
child.stdout.pipe(process.stdout);
child.stderr.pipe(process.stderr);

View File

@@ -1,23 +0,0 @@
const fs = require("fs");
/**
* Detect if `fs.rmSync` is available
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
* or the `recursive` property removing completely in the future Node.js version.
* See the link below.
*
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
* @param {fs.PathLike} path Valid types for path values in "fs".
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
*/
const rmSync = (path, options) => {
if (typeof fs.rmSync === "function") {
if (options.recursive) {
options.force = true;
}
return fs.rmSync(path, options);
}
return fs.rmdirSync(path, options);
};
module.exports = rmSync;

View File

@@ -1,50 +0,0 @@
/*
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
const { FBSD } = require("../server/util-server");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
let client;
const sslKey = process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
if (sslKey && sslCert) {
client = require("https");
} else {
client = require("http");
}
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
let hostname = process.env.UPTIME_KUMA_HOST;
// Also read HOST if not *BSD, as HOST is a system environment variable in FreeBSD
if (!hostname && !FBSD) {
hostname = process.env.HOST;
}
const port = parseInt(process.env.UPTIME_KUMA_PORT || process.env.PORT || 3001);
let options = {
host: hostname || "127.0.0.1",
port: port,
timeout: 28 * 1000,
};
let request = client.request(options, (res) => {
console.log(`Health Check OK [Res Code: ${res.statusCode}]`);
if (res.statusCode === 302) {
process.exit(0);
} else {
process.exit(1);
}
});
request.on("error", function (err) {
console.error("Health Check ERROR");
process.exit(1);
});
request.end();

View File

@@ -1,245 +0,0 @@
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
// "npm run compile-install-script" to compile install.sh
// The command is working on Windows PowerShell and Docker for Windows only.
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
println("=====================");
println("Uptime Kuma Installer");
println("=====================");
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
println("---------------------------------------");
println("This script is designed for Linux and basic usage.");
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
println("---------------------------------------");
println("");
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
println("Docker - Install Uptime Kuma Docker container");
println("");
if ("$1" != "") {
type = "$1";
} else {
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
}
defaultPort = "3001";
function checkNode() {
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
println("Node Version: " ++ nodeVersion);
if (nodeVersion < "12") {
println("Error: Required Node.js 14");
call("exit", "1");
}
if (nodeVersion == "12") {
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
}
}
function deb() {
bash("nodeCheck=$(node -v)");
bash("apt --yes update");
if (nodeCheck != "") {
checkNode();
} else {
// Old nodejs binary name is "nodejs"
bash("check=$(nodejs --version)");
if (check != "") {
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
bash("exit 1");
}
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("apt --yes install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
bash("apt --yes install nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
if (nodeCheckAgain == "") {
println("Error during Node.js installation");
bash("exit 1");
}
}
bash("check=$(git --version)");
if (check == "") {
println("Installing Git");
bash("apt --yes install git");
}
}
if (type == "local") {
defaultInstallPath = "/opt/uptime-kuma";
if (exists("/etc/redhat-release")) {
os = call("cat", "/etc/redhat-release");
distribution = "rhel";
} else if (exists("/etc/issue")) {
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
if (os == "Ubuntu") {
distribution = "ubuntu";
}
if (os == "Debian") {
distribution = "debian";
}
}
bash("arch=$(uname -i)");
println("Your OS: " ++ os);
println("Distribution: " ++ distribution);
println("Arch: " ++ arch);
if ("$3" != "") {
port = "$3";
} else {
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
if (port == "") {
port = defaultPort;
}
}
if ("$2" != "") {
installPath = "$2";
} else {
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
if (installPath == "") {
installPath = defaultInstallPath;
}
}
// CentOS
if (distribution == "rhel") {
bash("nodeCheck=$(node -v)");
if (nodeCheck != "") {
checkNode();
} else {
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("yum -y -q install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
bash("yum install -y -q nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
if (nodeCheckAgain == "") {
println("Error during Node.js installation");
bash("exit 1");
}
}
bash("check=$(git --version)");
if (check == "") {
println("Installing Git");
bash("yum -y -q install git");
}
// Ubuntu
} else if (distribution == "ubuntu") {
deb();
// Debian
} else if (distribution == "debian") {
deb();
} else {
// Unknown distribution
error = 0;
bash("check=$(git --version)");
if (check == "") {
error = 1;
println("Error: git is missing");
}
bash("check=$(node -v)");
if (check == "") {
error = 1;
println("Error: node is missing");
}
if (error > 0) {
println("Please install above missing software");
bash("exit 1");
}
}
bash("check=$(pm2 --version)");
if (check == "") {
println("Installing PM2");
bash("npm install pm2 -g && pm2 install pm2-logrotate");
bash("pm2 startup");
}
bash("mkdir -p $installPath");
bash("cd $installPath");
bash("git clone https://github.com/louislam/uptime-kuma.git .");
bash("npm run setup");
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
} else {
defaultVolume = "uptime-kuma";
bash("check=$(docker -v)");
if (check == "") {
println("Error: docker is not found!");
bash("exit 1");
}
bash("check=$(docker info)");
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
\"echo\" \"Error: docker is not running\"
\"exit\" \"1\"
fi");
if ("$3" != "") {
port = "$3";
} else {
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
if (port == "") {
port = defaultPort;
}
}
if ("$2" != "") {
volume = "$2";
} else {
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
if (volume == "") {
volume = defaultVolume;
}
}
println("Port: $port");
println("Volume: $volume");
bash("docker volume create $volume");
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
}
println("http://localhost:$port");

View File

@@ -1,24 +0,0 @@
const pkg = require("../package.json");
const fs = require("fs");
const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version;
const newVersion = oldVersion + "-nightly";
console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion);
if (newVersion) {
// Process package.json
pkg.version = newVersion;
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Process README.md
if (fs.existsSync("README.md")) {
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion));
}
}

View File

@@ -1,6 +0,0 @@
console.log("Git Push and Publish the release note on github, then press any key to continue");
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on("data", process.exit.bind(process, 0));

View File

@@ -1,60 +0,0 @@
console.log("== Uptime Kuma Remove 2FA Tool ==");
console.log("Loading the database");
const Database = require("../server/database");
const { R } = require("redbean-node");
const readline = require("readline");
const TwoFA = require("../server/2fa");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const main = async () => {
Database.init(args);
await Database.connect();
try {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
throw new Error("user not found, have you installed?");
}
console.log("Found user: " + user.username);
let ans = await question("Are you sure want to remove 2FA? [y/N]");
if (ans.toLowerCase() === "y") {
await TwoFA.disable2FA(user.id);
console.log("2FA has been removed successfully.");
}
}
} catch (e) {
console.error("Error: " + e.message);
}
await Database.close();
rl.close();
console.log("Finished.");
};
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {
resolve(answer);
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}
module.exports = {
main,
};

View File

@@ -1,70 +0,0 @@
console.log("== Uptime Kuma Reset Password Tool ==");
const Database = require("../server/database");
const { R } = require("redbean-node");
const readline = require("readline");
const { initJWTSecret } = require("../server/util-server");
const User = require("../server/model/user");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const main = async () => {
console.log("Connecting the database");
Database.init(args);
await Database.connect(false, false, true);
try {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
throw new Error("user not found, have you installed?");
}
console.log("Found user: " + user.username);
while (true) {
let password = await question("New Password: ");
let confirmPassword = await question("Confirm New Password: ");
if (password === confirmPassword) {
await User.resetPassword(user.id, password);
// Reset all sessions by reset jwt secret
await initJWTSecret();
break;
} else {
console.log("Passwords do not match, please try again.");
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
}
await Database.close();
rl.close();
console.log("Finished.");
};
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {
resolve(answer);
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}
module.exports = {
main,
};

View File

@@ -1,144 +0,0 @@
/*
* Simple DNS Server
* For testing DNS monitoring type, dev only
*/
const dns2 = require("dns2");
const { Packet } = dns2;
const server = dns2.createServer({
udp: true
});
server.on("request", (request, send, rinfo) => {
for (let question of request.questions) {
console.log(question.name, type(question.type), question.class);
const response = Packet.createResponseFromRequest(request);
if (question.name === "existing.com") {
if (question.type === Packet.TYPE.A) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "1.2.3.4"
});
} else if (question.type === Packet.TYPE.AAAA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "fe80::::1234:5678:abcd:ef00",
});
} else if (question.type === Packet.TYPE.CNAME) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
domain: "cname1.existing.com",
});
} else if (question.type === Packet.TYPE.MX) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
exchange: "mx1.existing.com",
priority: 5
});
} else if (question.type === Packet.TYPE.NS) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
ns: "ns1.existing.com",
});
} else if (question.type === Packet.TYPE.SOA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
primary: "existing.com",
admin: "admin@existing.com",
serial: 2021082701,
refresh: 300,
retry: 3,
expiration: 10,
minimum: 10,
});
} else if (question.type === Packet.TYPE.SRV) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
priority: 5,
weight: 5,
port: 8080,
target: "srv1.existing.com",
});
} else if (question.type === Packet.TYPE.TXT) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
data: "#v=spf1 include:_spf.existing.com ~all",
});
} else if (question.type === Packet.TYPE.CAA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
flags: 0,
tag: "issue",
value: "ca.existing.com",
});
}
}
if (question.name === "4.3.2.1.in-addr.arpa") {
if (question.type === Packet.TYPE.PTR) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
domain: "ptr1.existing.com",
});
}
}
send(response);
}
});
server.on("listening", () => {
console.log("Listening");
console.log(server.addresses());
});
server.on("close", () => {
console.log("server closed");
});
server.listen({
udp: 5300
});
function type(code) {
for (let name in Packet.TYPE) {
if (Packet.TYPE[name] === code) {
return name;
}
}
}

View File

@@ -1,50 +0,0 @@
const { log } = require("../src/util");
const mqttUsername = "louis1";
const mqttPassword = "!@#$LLam";
class SimpleMqttServer {
aedes = require("aedes")();
server = require("net").createServer(this.aedes.handle);
constructor(port) {
this.port = port;
}
start() {
this.server.listen(this.port, () => {
console.log("server started and listening on port ", this.port);
});
}
}
let server1 = new SimpleMqttServer(10000);
server1.aedes.authenticate = function (client, username, password, callback) {
if (username && password) {
console.log(password.toString("utf-8"));
callback(null, username === mqttUsername && password.toString("utf-8") === mqttPassword);
} else {
callback(null, false);
}
};
server1.aedes.on("subscribe", (subscriptions, client) => {
console.log(subscriptions);
for (let s of subscriptions) {
if (s.topic === "test") {
server1.aedes.publish({
topic: "test",
payload: Buffer.from("ok"),
}, (error) => {
if (error) {
log.error("mqtt_server", error);
}
});
}
}
});
server1.start();

View File

@@ -1,3 +0,0 @@
package-lock.json
test.js
languages/

View File

@@ -1,96 +0,0 @@
// Need to use ES6 to read language files
import fs from "fs";
import util from "util";
import rmSync from "../fs-rmSync.js";
/**
* Copy across the required language files
* Creates a local directory (./languages) and copies the required files
* into it.
* @param {string} langCode Code of language to update. A file will be
* created with this code if one does not already exist
* @param {string} baseLang The second base language file to copy. This
* will be ignored if set to "en" as en.js is copied by default
*/
function copyFiles(langCode, baseLang) {
if (fs.existsSync("./languages")) {
rmSync("./languages", { recursive: true });
}
fs.mkdirSync("./languages");
if (!fs.existsSync(`../../src/languages/${langCode}.js`)) {
fs.closeSync(fs.openSync(`./languages/${langCode}.js`, "a"));
} else {
fs.copyFileSync(`../../src/languages/${langCode}.js`, `./languages/${langCode}.js`);
}
fs.copyFileSync("../../src/languages/en.js", "./languages/en.js");
if (baseLang !== "en") {
fs.copyFileSync(`../../src/languages/${baseLang}.js`, `./languages/${baseLang}.js`);
}
}
/**
* Update the specified language file
* @param {string} langCode Language code to update
* @param {string} baseLang Second language to copy keys from
*/
async function updateLanguage(langCode, baseLangCode) {
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
let file = langCode + ".js";
console.log("Processing " + file);
const lang = await import("./languages/" + file);
let obj;
if (lang.default) {
obj = lang.default;
} else {
console.log("Empty file");
obj = {
languageName: "<Your Language name in your language (not in English)>"
};
}
// En first
for (const key in en) {
if (! obj[key]) {
obj[key] = en[key];
}
}
if (baseLang !== en) {
// Base second
for (const key in baseLang) {
if (! obj[key]) {
obj[key] = key;
}
}
}
const code = "export default " + util.inspect(obj, {
depth: null,
});
fs.writeFileSync(`../../src/languages/${file}`, code);
}
// Get command line arguments
const baseLangCode = process.env.npm_config_baselang || "en";
const langCode = process.env.npm_config_language;
// We need the file to edit
if (langCode == null) {
throw new Error("Argument --language=<code> must be provided");
}
console.log("Base Lang: " + baseLangCode);
console.log("Updating: " + langCode);
copyFiles(langCode, baseLangCode);
await updateLanguage(langCode, baseLangCode);
rmSync("./languages", { recursive: true });
console.log("Done. Fixing formatting by ESLint...");

View File

@@ -1,12 +0,0 @@
{
"name": "update-language-files",
"type": "module",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}

View File

@@ -1,75 +0,0 @@
const pkg = require("../package.json");
const fs = require("fs");
const childProcess = require("child_process");
const util = require("../src/util");
util.polyfill();
const newVersion = process.env.VERSION;
console.log("New Version: " + newVersion);
if (! newVersion) {
console.error("invalid version");
process.exit(1);
}
const exists = tagExists(newVersion);
if (! exists) {
// Process package.json
pkg.version = newVersion;
// Replace the version: https://regex101.com/r/hmj2Bc/1
pkg.scripts.setup = pkg.scripts.setup.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
childProcess.spawnSync("npm", [ "install" ]);
commit(newVersion);
tag(newVersion);
} else {
console.log("version exists");
}
/**
* Updates the version number in package.json and commits it to git.
* @param {string} version - The new version number
*
* Generated by Trelent
*/
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
if (stdout.includes("no changes added to commit")) {
throw new Error("commit error");
}
}
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
}
/**
* Checks if a given version is already tagged in the git repository.
* @param {string} version - The version to check for.
*
* Generated by Trelent
*/
function tagExists(version) {
if (! version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version;
}

View File

@@ -1,48 +0,0 @@
const childProcess = require("child_process");
const fs = require("fs");
const newVersion = process.env.VERSION;
if (!newVersion) {
console.log("Missing version");
process.exit(1);
}
updateWiki(newVersion);
function updateWiki(newVersion) {
const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
safeDelete(wikiDir);
childProcess.spawnSync("git", [ "clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir ]);
let content = fs.readFileSync(howToUpdateFilename).toString();
// Replace the version: https://regex101.com/r/hmj2Bc/1
content = content.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
childProcess.spawnSync("git", [ "add", "-A" ], {
cwd: wikiDir,
});
childProcess.spawnSync("git", [ "commit", "-m", `Update to ${newVersion}` ], {
cwd: wikiDir,
});
console.log("Pushing to Github");
childProcess.spawnSync("git", [ "push" ], {
cwd: wikiDir,
});
safeDelete(wikiDir);
}
function safeDelete(dir) {
if (fs.existsSync(dir)) {
fs.rm(dir, {
recursive: true,
});
}
}

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env bash
#
# Author: Stefan Buck
# License: MIT
# https://gist.github.com/stefanbuck/ce788fee19ab6eb0b4447a85fc99f447
#
#
# This script accepts the following parameters:
#
# * owner
# * repo
# * tag
# * filename
# * github_api_token
#
# Script to upload a release asset using the GitHub API v3.
#
# Example:
#
# upload-github-release-asset.sh github_api_token=TOKEN owner=stefanbuck repo=playground tag=v0.1.0 filename=./build.zip
#
# Check dependencies.
set -e
xargs=$(which gxargs || which xargs)
# Validate settings.
[ "$TRACE" ] && set -x
CONFIG=$@
for line in $CONFIG; do
eval "$line"
done
# Define variables.
GH_API="https://api.github.com"
GH_REPO="$GH_API/repos/$owner/$repo"
GH_TAGS="$GH_REPO/releases/tags/$tag"
AUTH="Authorization: token $github_api_token"
WGET_ARGS="--content-disposition --auth-no-challenge --no-cookie"
CURL_ARGS="-LJO#"
if [[ "$tag" == 'LATEST' ]]; then
GH_TAGS="$GH_REPO/releases/latest"
fi
# Validate token.
curl -o /dev/null -sH "$AUTH" $GH_REPO || { echo "Error: Invalid repo, token or network issue!"; exit 1; }
# Read asset tags.
response=$(curl -sH "$AUTH" $GH_TAGS)
# Get ID of the asset based on given filename.
eval $(echo "$response" | grep -m 1 "id.:" | grep -w id | tr : = | tr -cd '[[:alnum:]]=')
[ "$id" ] || { echo "Error: Failed to get release id for tag: $tag"; echo "$response" | awk 'length($0)<100' >&2; exit 1; }
# Upload asset
echo "Uploading asset... "
# Construct url
GH_ASSET="https://uploads.github.com/repos/$owner/$repo/releases/$id/assets?name=$(basename $filename)"
curl "$GITHUB_OAUTH_BASIC" --data-binary @"$filename" -H "Authorization: token $github_api_token" -H "Content-Type: application/octet-stream" $GH_ASSET

View File

@@ -1,17 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
<link rel="manifest" href="/manifest.json" />
<meta name="theme-color" id="theme-color" content="" />
<meta name="description" content="Uptime Kuma monitoring tool" />
<title>Uptime Kuma</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>
</html>

Some files were not shown because too many files have changed in this diff Show More