Compare commits

..

15 Commits

Author SHA1 Message Date
Niklas Meyer
640f535e99 Merge pull request #5019 from mailcow/staging
2023-01a
2023-01-25 16:29:22 +01:00
Niklas Meyer
05d1a974eb Merge pull request #5003 from mailcow/feat/acme-skip-ip-check
[Acme] Implemented IP Check Bypass properly
2023-01-25 16:10:11 +01:00
Niklas Meyer
99e38d81b1 Removed Integration Tests 2023-01-25 16:09:15 +01:00
FreddleSpl0it
ed7b384e24 [Web] fix queue btn showing undefined 2023-01-25 09:34:12 +01:00
FreddleSpl0it
5439ea1010 Merge branch 'staging' of https://github.com/mailcow/mailcow-dockerized into staging 2023-01-25 09:32:27 +01:00
FreddleSpl0it
b719982504 partial rollback of dockerapi 2023-01-25 09:31:22 +01:00
milkmaker
8281d3fa55 [Web] Updated lang.da-dk.json (#5020)
Co-authored-by: osos <osos@openeyes.dk>

Co-authored-by: osos <osos@openeyes.dk>
2023-01-24 20:18:17 +01:00
FreddleSpl0it
9ba65a572e [Web] add missing template var for dadmins 2023-01-24 10:13:30 +01:00
FreddleSpl0it
afddcf7f3b replace nullnull.org with fuzzy.mailcow.email 2023-01-24 09:49:49 +01:00
Niklas Meyer
294569f5c9 Merge pull request #5015 from mailcow/feat/nc-install-fix
Fix nextcloud install
2023-01-22 16:17:18 +01:00
Peter
ef6452cf55 Fix installation of nextcloud 2023-01-22 15:06:36 +01:00
renovate[bot]
9af40eba10 Update dependency nextcloud/server to v25.0.3 (#4996)
Signed-off-by: milkmaker <milkmaker@mailcow.de>

Signed-off-by: milkmaker <milkmaker@mailcow.de>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-20 15:37:12 +01:00
renovate[bot]
1b3a13ca19 Update alpine Docker tag to v3.17 (#4997)
Signed-off-by: milkmaker <milkmaker@mailcow.de>

Signed-off-by: milkmaker <milkmaker@mailcow.de>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-01-20 15:36:52 +01:00
Patrick Schult
71cc607de6 Merge pull request #5006 from mailcow/staging
Revert Docker Compose detection commits
2023-01-19 16:04:50 +01:00
DerLinkman
5c57df4669 [Acme] Implemented IP Check Bypass properly 2023-01-16 10:10:20 +01:00
17 changed files with 327 additions and 484 deletions

View File

@@ -1,63 +0,0 @@
name: mailcow Integration Tests
on:
push:
branches: [ "master", "staging" ]
workflow_dispatch:
permissions:
contents: read
jobs:
integration_tests:
runs-on: ubuntu-latest
steps:
- name: Setup Ansible
run: |
export DEBIAN_FRONTEND=noninteractive
sudo apt-get update
sudo apt-get install python3 python3-pip git
sudo pip3 install ansible
- name: Prepair Test Environment
run: |
git clone https://github.com/mailcow/mailcow-integration-tests.git --branch $(curl -sL https://api.github.com/repos/mailcow/mailcow-integration-tests/releases/latest | jq -r '.tag_name') --single-branch .
./fork_check.sh
./ci.sh
./ci-pip-requirements.sh
env:
VAULT_PW: ${{ secrets.MAILCOW_TESTS_VAULT_PW }}
VAULT_FILE: ${{ secrets.MAILCOW_TESTS_VAULT_FILE }}
- name: Start Integration Test Server
run: |
./fork_check.sh
ansible-playbook mailcow-start-server.yml --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Setup Integration Test Server
run: |
./fork_check.sh
sleep 30
ansible-playbook mailcow-setup-server.yml --private-key id_ssh_rsa --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Run Integration Tests
run: |
./fork_check.sh
ansible-playbook mailcow-integration-tests.yml --private-key id_ssh_rsa --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'
- name: Delete Integration Test Server
if: always()
run: |
./fork_check.sh
ansible-playbook mailcow-delete-server.yml --diff
env:
PY_COLORS: '1'
ANSIBLE_FORCE_COLOR: '1'
ANSIBLE_HOST_KEY_CHECKING: 'false'

View File

@@ -1,6 +1,5 @@
# mailcow: dockerized - 🐮 + 🐋 = 💕 # mailcow: dockerized - 🐮 + 🐋 = 💕
[![Mailcow Integration Tests](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml/badge.svg?branch=master)](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml)
[![Translation status](https://translate.mailcow.email/widgets/mailcow-dockerized/-/translation/svg-badge.svg)](https://translate.mailcow.email/engage/mailcow-dockerized/) [![Translation status](https://translate.mailcow.email/widgets/mailcow-dockerized/-/translation/svg-badge.svg)](https://translate.mailcow.email/engage/mailcow-dockerized/)
[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/mailcow_email.svg?style=social&label=Follow%20%40mailcow_email)](https://twitter.com/mailcow_email) [![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/mailcow_email.svg?style=social&label=Follow%20%40mailcow_email)](https://twitter.com/mailcow_email)

View File

@@ -213,11 +213,13 @@ while true; do
done done
ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig') ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig')
if [[ ${SKIP_IP_CHECK} != "y" ]]; then
# Start IP detection # Start IP detection
log_f "Detecting IP addresses..." log_f "Detecting IP addresses..."
IPV4=$(get_ipv4) IPV4=$(get_ipv4)
IPV6=$(get_ipv6) IPV6=$(get_ipv6)
log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}" log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}"
fi
######################################### #########################################
# IP and webroot challenge verification # # IP and webroot challenge verification #

View File

@@ -13,6 +13,7 @@ RUN apk add --update --no-cache python3 \
fastapi \ fastapi \
uvicorn \ uvicorn \
aiodocker \ aiodocker \
docker \
redis redis
COPY docker-entrypoint.sh /app/ COPY docker-entrypoint.sh /app/

View File

@@ -1,5 +1,6 @@
from fastapi import FastAPI, Response, Request from fastapi import FastAPI, Response, Request
import aiodocker import aiodocker
import docker
import psutil import psutil
import sys import sys
import re import re
@@ -9,11 +10,38 @@ import json
import asyncio import asyncio
import redis import redis
from datetime import datetime from datetime import datetime
import logging
from logging.config import dictConfig
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(asctime)s %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
},
"handlers": {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
},
"loggers": {
"api-logger": {"handlers": ["default"], "level": "INFO"},
},
}
dictConfig(log_config)
containerIds_to_update = [] containerIds_to_update = []
host_stats_isUpdating = False host_stats_isUpdating = False
app = FastAPI() app = FastAPI()
logger = logging.getLogger('api-logger')
@app.get("/host/stats") @app.get("/host/stats")
@@ -21,18 +49,15 @@ async def get_host_update_stats():
global host_stats_isUpdating global host_stats_isUpdating
if host_stats_isUpdating == False: if host_stats_isUpdating == False:
print("start host stats task")
asyncio.create_task(get_host_stats()) asyncio.create_task(get_host_stats())
host_stats_isUpdating = True host_stats_isUpdating = True
while True: while True:
if redis_client.exists('host_stats'): if redis_client.exists('host_stats'):
break break
print("wait for host_stats results")
await asyncio.sleep(1.5) await asyncio.sleep(1.5)
print("host stats pulled")
stats = json.loads(redis_client.get('host_stats')) stats = json.loads(redis_client.get('host_stats'))
return Response(content=json.dumps(stats, indent=4), media_type="application/json") return Response(content=json.dumps(stats, indent=4), media_type="application/json")
@@ -106,14 +131,14 @@ async def post_containers(container_id : str, post_action : str, request: Reques
else: else:
api_call_method_name = '__'.join(['container_post', str(post_action) ]) api_call_method_name = '__'.join(['container_post', str(post_action) ])
docker_utils = DockerUtils(async_docker_client) docker_utils = DockerUtils(sync_docker_client)
api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json")) api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
print("api call: %s, container_id: %s" % (api_call_method_name, container_id)) logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
return await api_call_method(container_id, request_json) return api_call_method(container_id, request_json)
except Exception as e: except Exception as e:
print("error - container_post: %s" % str(e)) logger.error("error - container_post: %s" % str(e))
res = { res = {
"type": "danger", "type": "danger",
"msg": str(e) "msg": str(e)
@@ -152,398 +177,289 @@ class DockerUtils:
self.docker_client = docker_client self.docker_client = docker_client
# api call: container_post - post_action: stop # api call: container_post - post_action: stop
async def container_post__stop(self, container_id, request_json): def container_post__stop(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: container.stop()
await container.stop()
res = {
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
res = { 'type': 'success', 'msg': 'command completed successfully'}
return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: start # api call: container_post - post_action: start
async def container_post__start(self, container_id, request_json): def container_post__start(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: container.start()
await container.start()
res = { res = { 'type': 'success', 'msg': 'command completed successfully'}
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: restart # api call: container_post - post_action: restart
async def container_post__restart(self, container_id, request_json): def container_post__restart(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: container.restart()
await container.restart()
res = { res = { 'type': 'success', 'msg': 'command completed successfully'}
'type': 'success',
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: top # api call: container_post - post_action: top
async def container_post__top(self, container_id, request_json): def container_post__top(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
if container._id == container_id: res = { 'type': 'success', 'msg': container.top()}
ps_exec = await container.exec("ps")
async with ps_exec.start(detach=False) as stream:
ps_return = await stream.read_out()
exec_details = await ps_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': ps_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: # api call: container_post - post_action: stats
res = { def container_post__stats(self, container_id, request_json):
'type': 'danger', for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
'msg': '' for stat in container.stats(decode=True, stream=True):
} res = { 'type': 'success', 'msg': stat}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: delete # api call: container_post - post_action: exec - cmd: mailq - task: delete
async def container_post__exec__mailq__delete(self, container_id, request_json): def container_post__exec__mailq__delete(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-d %s' % i for i in filtered_qids] flagged_qids = ['-d %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids)) sanitized_string = str(' '.join(flagged_qids));
for container in self.docker_client.containers.list(filters={"id": container_id}):
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return exec_run_handler('generic', postsuper_r)
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: hold # api call: container_post - post_action: exec - cmd: mailq - task: hold
async def container_post__exec__mailq__hold(self, container_id, request_json): def container_post__exec__mailq__hold(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-h %s' % i for i in filtered_qids] flagged_qids = ['-h %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids)) sanitized_string = str(' '.join(flagged_qids));
for container in self.docker_client.containers.list(filters={"id": container_id}):
for container in (await self.docker_client.containers.list()): postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
if container._id == container_id: return exec_run_handler('generic', postsuper_r)
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: cat # api call: container_post - post_action: exec - cmd: mailq - task: cat
async def container_post__exec__mailq__cat(self, container_id, request_json): def container_post__exec__mailq__cat(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
sanitized_string = str(' '.join(filtered_qids)) sanitized_string = str(' '.join(filtered_qids));
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix') if not postcat_return:
return await exec_run_handler('utf8_text_only', postcat_exec) postcat_return = 'err: invalid'
return exec_run_handler('utf8_text_only', postcat_return)
# api call: container_post - post_action: exec - cmd: mailq - task: unhold # api call: container_post - post_action: exec - cmd: mailq - task: unhold
async def container_post__exec__mailq__unhold(self, container_id, request_json): def container_post__exec__mailq__unhold(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-H %s' % i for i in filtered_qids] flagged_qids = ['-H %s' % i for i in filtered_qids]
sanitized_string = str(' '.join(flagged_qids)) sanitized_string = str(' '.join(flagged_qids));
for container in self.docker_client.containers.list(filters={"id": container_id}):
for container in (await self.docker_client.containers.list()): postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
if container._id == container_id: return exec_run_handler('generic', postsuper_r)
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: deliver # api call: container_post - post_action: exec - cmd: mailq - task: deliver
async def container_post__exec__mailq__deliver(self, container_id, request_json): def container_post__exec__mailq__deliver(self, container_id, request_json):
if 'items' in request_json: if 'items' in request_json:
r = re.compile("^[0-9a-fA-F]+$") r = re.compile("^[0-9a-fA-F]+$")
filtered_qids = filter(r.match, request_json['items']) filtered_qids = filter(r.match, request_json['items'])
if filtered_qids: if filtered_qids:
flagged_qids = ['-i %s' % i for i in filtered_qids] flagged_qids = ['-i %s' % i for i in filtered_qids]
for container in self.docker_client.containers.list(filters={"id": container_id}):
for container in (await self.docker_client.containers.list()):
if container._id == container_id:
for i in flagged_qids: for i in flagged_qids:
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix') postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
async with postsuper_r_exec.start(detach=False) as stream:
postsuper_r_return = await stream.read_out()
# todo: check each exit code # todo: check each exit code
res = { res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
'type': 'success',
'msg': 'Scheduled immediate delivery'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: mailq - task: list # api call: container_post - post_action: exec - cmd: mailq - task: list
async def container_post__exec__mailq__list(self, container_id, request_json): def container_post__exec__mailq__list(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix') return exec_run_handler('utf8_text_only', mailq_return)
return await exec_run_handler('utf8_text_only', mailq_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: flush # api call: container_post - post_action: exec - cmd: mailq - task: flush
async def container_post__exec__mailq__flush(self, container_id, request_json): def container_post__exec__mailq__flush(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix') return exec_run_handler('generic', postqueue_r)
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete # api call: container_post - post_action: exec - cmd: mailq - task: super_delete
async def container_post__exec__mailq__super_delete(self, container_id, request_json): def container_post__exec__mailq__super_delete(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"]) return exec_run_handler('generic', postsuper_r)
return await exec_run_handler('generic', postsuper_r_exec)
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan # api call: container_post - post_action: exec - cmd: system - task: fts_rescan
async def container_post__exec__system__fts_rescan(self, container_id, request_json): def container_post__exec__system__fts_rescan(self, container_id, request_json):
if 'username' in request_json: if 'username' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail') if rescan_return.exit_code == 0:
async with rescan_exec.start(detach=False) as stream: res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { res = { 'type': 'warning', 'msg': 'fts_rescan error'}
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
if 'all' in request_json: if 'all' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail') if rescan_return.exit_code == 0:
async with rescan_exec.start(detach=False) as stream: res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
rescan_return = await stream.read_out()
exec_details = await rescan_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'success',
'msg': 'fts_rescan: rescan triggered'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { res = { 'type': 'warning', 'msg': 'fts_rescan error'}
'type': 'warning',
'msg': 'fts_rescan error'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: df # api call: container_post - post_action: exec - cmd: system - task: df
async def container_post__exec__system__df(self, container_id, request_json): def container_post__exec__system__df(self, container_id, request_json):
if 'dir' in request_json: if 'dir' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody') if df_return.exit_code == 0:
async with df_exec.start(detach=False) as stream: return df_return.output.decode('utf-8').rstrip()
df_return = await stream.read_out()
print(df_return)
print(await df_exec.inspect())
exec_details = await df_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
return df_return.data.decode('utf-8').rstrip()
else: else:
return "0,0,0,0,0,0" return "0,0,0,0,0,0"
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
async def container_post__exec__system__mysql_upgrade(self, container_id, request_json): def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql') if sql_return.exit_code == 0:
async with sql_exec.start(detach=False) as stream:
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
matched = False matched = False
for line in sql_return.data.decode('utf-8').split("\n"): for line in sql_return.output.decode('utf-8').split("\n"):
if 'is already upgraded to' in line: if 'is already upgraded to' in line:
matched = True matched = True
if matched: if matched:
res = { res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
'type': 'success',
'msg': 'mysql_upgrade: already upgraded',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
await container.restart() container.restart()
res = { res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
'type': 'warning',
'msg': 'mysql_upgrade: upgrade was applied',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
'type': 'error',
'msg': 'mysql_upgrade: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql # api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json): def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql') if sql_return.exit_code == 0:
async with sql_exec.start(detach=False) as stream: res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
sql_return = await stream.read_out()
exec_details = await sql_exec.inspect()
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
res = {
'type': 'info',
'msg': 'mysql_tzinfo_to_sql: command completed successfully',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
'type': 'error',
'msg': 'mysql_tzinfo_to_sql: error running command',
'text': sql_return.data.decode('utf-8')
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
# api call: container_post - post_action: exec - cmd: reload - task: dovecot # api call: container_post - post_action: exec - cmd: reload - task: dovecot
async def container_post__exec__reload__dovecot(self, container_id, request_json): def container_post__exec__reload__dovecot(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"]) return exec_run_handler('generic', reload_return)
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: postfix # api call: container_post - post_action: exec - cmd: reload - task: postfix
async def container_post__exec__reload__postfix(self, container_id, request_json): def container_post__exec__reload__postfix(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"]) return exec_run_handler('generic', reload_return)
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: reload - task: nginx # api call: container_post - post_action: exec - cmd: reload - task: nginx
async def container_post__exec__reload__nginx(self, container_id, request_json): def container_post__exec__reload__nginx(self, container_id, request_json):
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"]) return exec_run_handler('generic', reload_return)
return await exec_run_handler('generic', reload_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: list # api call: container_post - post_action: exec - cmd: sieve - task: list
async def container_post__exec__sieve__list(self, container_id, request_json): def container_post__exec__sieve__list(self, container_id, request_json):
if 'username' in request_json: if 'username' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"]) return exec_run_handler('utf8_text_only', sieve_return)
return await exec_run_handler('utf8_text_only', sieve_exec)
# api call: container_post - post_action: exec - cmd: sieve - task: print # api call: container_post - post_action: exec - cmd: sieve - task: print
async def container_post__exec__sieve__print(self, container_id, request_json): def container_post__exec__sieve__print(self, container_id, request_json):
if 'username' in request_json and 'script_name' in request_json: if 'username' in request.json and 'script_name' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id:
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"] cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_exec = await container.exec(cmd) sieve_return = container.exec_run(cmd)
return await exec_run_handler('utf8_text_only', sieve_exec) return exec_run_handler('utf8_text_only', sieve_return)
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup # api call: container_post - post_action: exec - cmd: maildir - task: cleanup
async def container_post__exec__maildir__cleanup(self, container_id, request_json): def container_post__exec__maildir__cleanup(self, container_id, request_json):
if 'maildir' in request_json: if 'maildir' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id:
sane_name = re.sub(r'\W+', '', request_json['maildir']) sane_name = re.sub(r'\W+', '', request_json['maildir'])
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"] cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
maildir_cleanup_exec = await container.exec(cmd, user='vmail') maildir_cleanup = container.exec_run(cmd, user='vmail')
return await exec_run_handler('generic', maildir_cleanup_exec) return exec_run_handler('generic', maildir_cleanup)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password # api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
async def container_post__exec__rspamd__worker_password(self, container_id, request_json): def container_post__exec__rspamd__worker_password(self, container_id, request_json):
if 'raw' in request_json: if 'raw' in request_json:
for container in (await self.docker_client.containers.list()): for container in self.docker_client.containers.list(filters={"id": container_id}):
if container._id == container_id: cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
rspamd_password_exec = await container.exec(cmd, user='_rspamd')
async with rspamd_password_exec.start(detach=False) as stream:
rspamd_password_return = await stream.read_out()
matched = False matched = False
if "OK" in rspamd_password_return.data.decode('utf-8'): for line in cmd_response.split("\n"):
if '$2$' in line:
hash = line.strip()
hash_out = re.search('\$2\$.+$', hash).group(0)
rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
container.restart()
matched = True matched = True
await container.restart()
if matched: if matched:
res = { res = { 'type': 'success', 'msg': 'command completed successfully' }
'type': 'success', logger.info('success changing Rspamd password')
'msg': 'command completed successfully'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { logger.error('failed changing Rspamd password')
'type': 'danger', res = { 'type': 'danger', 'msg': 'command did not complete' }
'msg': 'command did not complete'
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
async def exec_run_handler(type, exec_obj): def recv_socket_data(c_socket, timeout):
async with exec_obj.start(detach=False) as stream: c_socket.setblocking(0)
exec_return = await stream.read_out() total_data=[]
data=''
if exec_return == None: begin=time.time()
exec_return = "" while True:
if total_data and time.time()-begin > timeout:
break
elif time.time()-begin > timeout*2:
break
try:
data = c_socket.recv(8192)
if data:
total_data.append(data.decode('utf-8'))
#change the beginning time for measurement
begin=time.time()
else: else:
exec_return = exec_return.data.decode('utf-8') #sleep for sometime to indicate a gap
time.sleep(0.1)
break
except:
pass
return ''.join(total_data)
try :
socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
if not cmd.endswith("\n"):
cmd = cmd + "\n"
socket.send(cmd.encode('utf-8'))
data = recv_socket_data(socket, timeout)
socket.close()
return data
except Exception as e:
logger.error("error - exec_cmd_container: %s" % str(e))
traceback.print_exc(file=sys.stdout)
def exec_run_handler(type, output):
if type == 'generic': if type == 'generic':
exec_details = await exec_obj.inspect() if output.exit_code == 0:
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0: res = { 'type': 'success', 'msg': 'command completed successfully' }
res = {
"type": "success",
"msg": "command completed successfully"
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
else: else:
res = { res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
"type": "success",
"msg": "'command failed: " + exec_return
}
return Response(content=json.dumps(res, indent=4), media_type="application/json") return Response(content=json.dumps(res, indent=4), media_type="application/json")
if type == 'utf8_text_only': if type == 'utf8_text_only':
return Response(content=exec_return, media_type="text/plain") return Response(content=output.output.decode('utf-8'), media_type="text/plain")
async def get_host_stats(wait=5): async def get_host_stats(wait=5):
global host_stats_isUpdating global host_stats_isUpdating
@@ -570,12 +486,10 @@ async def get_host_stats(wait=5):
"type": "danger", "type": "danger",
"msg": str(e) "msg": str(e)
} }
print(json.dumps(res, indent=4))
await asyncio.sleep(wait) await asyncio.sleep(wait)
host_stats_isUpdating = False host_stats_isUpdating = False
async def get_container_stats(container_id, wait=5, stop=False): async def get_container_stats(container_id, wait=5, stop=False):
global containerIds_to_update global containerIds_to_update
@@ -598,13 +512,11 @@ async def get_container_stats(container_id, wait=5, stop=False):
"type": "danger", "type": "danger",
"msg": str(e) "msg": str(e)
} }
print(json.dumps(res, indent=4))
else: else:
res = { res = {
"type": "danger", "type": "danger",
"msg": "no or invalid id defined" "msg": "no or invalid id defined"
} }
print(json.dumps(res, indent=4))
await asyncio.sleep(wait) await asyncio.sleep(wait)
if stop == True: if stop == True:
@@ -615,9 +527,13 @@ async def get_container_stats(container_id, wait=5, stop=False):
await get_container_stats(container_id, wait=0, stop=True) await get_container_stats(container_id, wait=0, stop=True)
if os.environ['REDIS_SLAVEOF_IP'] != "": if os.environ['REDIS_SLAVEOF_IP'] != "":
redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0) redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
else: else:
redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0) redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock') async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
logger.info('DockerApi started')

View File

@@ -175,7 +175,7 @@ BAD_SUBJECT_00 {
type = "header"; type = "header";
header = "subject"; header = "subject";
regexp = true; regexp = true;
map = "http://nullnull.org/bad-subject-regex.txt"; map = "http://fuzzy.mailcow.email/bad-subject-regex.txt";
score = 6.0; score = 6.0;
symbols_set = ["BAD_SUBJECT_00"]; symbols_set = ["BAD_SUBJECT_00"];
} }

View File

@@ -2067,16 +2067,13 @@ jQuery(function($){
'<a href="#" data-action="delete_selected" data-id="single-syncjob" data-api-url="delete/syncjob" data-item="' + item.id + '" class="btn btn-sm btn-xs-half btn-danger"><i class="bi bi-trash"></i> ' + lang.remove + '</a>' + '<a href="#" data-action="delete_selected" data-id="single-syncjob" data-api-url="delete/syncjob" data-item="' + item.id + '" class="btn btn-sm btn-xs-half btn-danger"><i class="bi bi-trash"></i> ' + lang.remove + '</a>' +
'</div>'; '</div>';
item.chkbox = '<input type="checkbox" data-id="syncjob" name="multi_select" value="' + item.id + '" />'; item.chkbox = '<input type="checkbox" data-id="syncjob" name="multi_select" value="' + item.id + '" />';
if (item.is_running == 1) {
if (item.is_running == 1 && item.active == 1) {
item.is_running = '<span id="active-script" class="badge fs-6 bg-success">' + lang.running + '</span>'; item.is_running = '<span id="active-script" class="badge fs-6 bg-success">' + lang.running + '</span>';
} else if (item.is_running == 0 && item.active == 1) {
item.is_running = '<span id="inactive-script" class="badge fs-6 bg-warning">' + lang.waiting + '</span>';
} else { } else {
item.is_running = '<span id="inactive-script" class="badge fs-6 bg-danger">' + lang.inactive + '</span>'; item.is_running = '<span id="inactive-script" class="badge fs-6 bg-warning">' + lang.waiting + '</span>';
} }
if (!item.last_run) { if (!item.last_run > 0) {
item.last_run = lang.never; item.last_run = lang.waiting;
} }
if (item.success == null) { if (item.success == null) {
item.success = '-'; item.success = '-';
@@ -2145,17 +2142,19 @@ jQuery(function($){
data: 'log', data: 'log',
defaultContent: '' defaultContent: ''
}, },
{
title: lang.active,
data: 'active',
defaultContent: '',
render: function (data, type) {
return 1==data?'<i class="bi bi-check-lg"></i>':0==data&&'<i class="bi bi-x-lg"></i>';
}
},
{ {
title: lang.status, title: lang.status,
data: 'is_running', data: 'is_running',
defaultContent: '' defaultContent: ''
}, },
{
title: lang.encryption,
data: 'enc1',
defaultContent: '',
className: 'none'
},
{ {
title: lang.excludes, title: lang.excludes,
data: 'exclude', data: 'exclude',

View File

@@ -21,7 +21,6 @@ jQuery(function($){
url: '/api/v1/get/postcat/' + button.data('queue-id'), url: '/api/v1/get/postcat/' + button.data('queue-id'),
dataType: 'text', dataType: 'text',
complete: function (data) { complete: function (data) {
console.log(data);
$('#queue_msg_content').text(data.responseText); $('#queue_msg_content').text(data.responseText);
} }
}); });
@@ -54,7 +53,7 @@ jQuery(function($){
}); });
item.recipients = rcpts.join('<hr style="margin:1px!important">'); item.recipients = rcpts.join('<hr style="margin:1px!important">');
item.action = '<div class="btn-group">' + item.action = '<div class="btn-group">' +
'<a href="#" data-bs-toggle="modal" data-bs-target="#showQueuedMsg" data-queue-id="' + encodeURI(item.queue_id) + '" class="btn btn-xs btn-secondary">' + lang.queue_show_message + '</a>' + '<a href="#" data-bs-toggle="modal" data-bs-target="#showQueuedMsg" data-queue-id="' + encodeURI(item.queue_id) + '" class="btn btn-xs btn-secondary">' + lang.show_message + '</a>' +
'</div>'; '</div>';
}); });
return data; return data;

View File

@@ -127,13 +127,6 @@ jQuery(function($){
} }
} }
$(".refresh_table").on('click', function(e) {
e.preventDefault();
var table_name = $(this).data('table');
if ($.fn.DataTable.isDataTable('#' + table_name))
$('#' + table_name).DataTable().ajax.reload();
});
function draw_tla_table() { function draw_tla_table() {
// just recalc width if instance already exists // just recalc width if instance already exists
if ($.fn.DataTable.isDataTable('#tla_table') ) { if ($.fn.DataTable.isDataTable('#tla_table') ) {
@@ -240,6 +233,7 @@ jQuery(function($){
type: "GET", type: "GET",
url: '/api/v1/get/syncjobs/' + encodeURIComponent(mailcow_cc_username) + '/no_log', url: '/api/v1/get/syncjobs/' + encodeURIComponent(mailcow_cc_username) + '/no_log',
dataSrc: function(data){ dataSrc: function(data){
console.log(data);
$.each(data, function (i, item) { $.each(data, function (i, item) {
item.user1 = escapeHtml(item.user1); item.user1 = escapeHtml(item.user1);
item.log = '<a href="#syncjobLogModal" data-bs-toggle="modal" data-syncjob-id="' + item.id + '">' + lang.open_logs + '</a>' item.log = '<a href="#syncjobLogModal" data-bs-toggle="modal" data-syncjob-id="' + item.id + '">' + lang.open_logs + '</a>'
@@ -260,15 +254,13 @@ jQuery(function($){
item.action = '<span>-</span>'; item.action = '<span>-</span>';
item.chkbox = '<input type="checkbox" disabled />'; item.chkbox = '<input type="checkbox" disabled />';
} }
if (item.is_running == 1 && item.active == 1) { if (item.is_running == 1) {
item.is_running = '<span id="active-script" class="badge fs-6 bg-success">' + lang.running + '</span>'; item.is_running = '<span id="active-script" class="badge fs-6 bg-success">' + lang.running + '</span>';
} else if (item.is_running == 0 && item.active == 1) {
item.is_running = '<span id="inactive-script" class="badge fs-6 bg-warning">' + lang.waiting + '</span>';
} else { } else {
item.is_running = '<span id="disabled-script" class="badge fs-6 bg-danger">' + lang.inactive + '</span>'; item.is_running = '<span id="inactive-script" class="badge fs-6 bg-warning">' + lang.waiting + '</span>';
} }
if (!item.last_run) { if (!item.last_run > 0) {
item.last_run = lang.never; item.last_run = lang.waiting;
} }
if (item.success == null) { if (item.success == null) {
item.success = '-'; item.success = '-';
@@ -337,6 +329,14 @@ jQuery(function($){
data: 'log', data: 'log',
defaultContent: '' defaultContent: ''
}, },
{
title: lang.active,
data: 'active',
defaultContent: '',
render: function (data, type) {
return 1==data?'<i class="bi bi-check-lg"></i>':0==data&&'<i class="bi bi-x-lg"></i>'
}
},
{ {
title: lang.status, title: lang.status,
data: 'is_running', data: 'is_running',
@@ -346,20 +346,17 @@ jQuery(function($){
{ {
title: lang.encryption, title: lang.encryption,
data: 'enc1', data: 'enc1',
defaultContent: '', defaultContent: ''
className: 'none'
}, },
{ {
title: lang.excludes, title: lang.excludes,
data: 'exclude', data: 'exclude',
defaultContent: '', defaultContent: ''
className: 'none'
}, },
{ {
title: lang.interval + " (min)", title: lang.interval + " (min)",
data: 'mins_interval', data: 'mins_interval',
defaultContent: '', defaultContent: ''
className: 'none'
}, },
{ {
title: lang.action, title: lang.action,

View File

@@ -1,6 +1,6 @@
{ {
"acl": { "acl": {
"alias_domains": "Tilføj kældenavn domæner", "alias_domains": "Tilføj domænealias",
"app_passwds": "Administrer app-adgangskoder", "app_passwds": "Administrer app-adgangskoder",
"bcc_maps": "BCC kort", "bcc_maps": "BCC kort",
"delimiter_action": "Afgrænsning handling", "delimiter_action": "Afgrænsning handling",
@@ -22,9 +22,9 @@
"spam_alias": "Midlertidige aliasser", "spam_alias": "Midlertidige aliasser",
"spam_policy": "Sortliste / hvidliste", "spam_policy": "Sortliste / hvidliste",
"spam_score": "Spam-score", "spam_score": "Spam-score",
"syncjobs": "Synkroniser job", "syncjobs": "Synkroniserings job",
"tls_policy": "TLS politik", "tls_policy": "TLS politik",
"unlimited_quota": "Ubegrænset quote for mailbokse", "unlimited_quota": "Ubegrænset plads for mailbokse",
"domain_desc": "Skift domæne beskrivelse" "domain_desc": "Skift domæne beskrivelse"
}, },
"add": { "add": {

View File

@@ -771,7 +771,6 @@
"edit": "Bearbeiten", "edit": "Bearbeiten",
"empty": "Keine Einträge vorhanden", "empty": "Keine Einträge vorhanden",
"enable_x": "Aktivieren", "enable_x": "Aktivieren",
"encryption": "Verschlüsselung",
"excludes": "Ausschlüsse", "excludes": "Ausschlüsse",
"filter_table": "Filtern", "filter_table": "Filtern",
"filters": "Filter", "filters": "Filter",
@@ -1176,7 +1175,6 @@
"recent_successful_connections": "Kürzlich erfolgreiche Verbindungen", "recent_successful_connections": "Kürzlich erfolgreiche Verbindungen",
"remove": "Entfernen", "remove": "Entfernen",
"running": "Wird ausgeführt", "running": "Wird ausgeführt",
"inactive": "Inaktiv",
"save": "Änderungen speichern", "save": "Änderungen speichern",
"save_changes": "Änderungen speichern", "save_changes": "Änderungen speichern",
"sender_acl_disabled": "<span class=\"badge fs-6 bg-danger\">Absenderprüfung deaktiviert</span>", "sender_acl_disabled": "<span class=\"badge fs-6 bg-danger\">Absenderprüfung deaktiviert</span>",
@@ -1222,7 +1220,7 @@
"user_settings": "Benutzereinstellungen", "user_settings": "Benutzereinstellungen",
"username": "Benutzername", "username": "Benutzername",
"verify": "Verifizieren", "verify": "Verifizieren",
"waiting": "Wartend", "waiting": "Warte auf Ausführung",
"week": "Woche", "week": "Woche",
"weekly": "Wöchentlich", "weekly": "Wöchentlich",
"weeks": "Wochen", "weeks": "Wochen",

View File

@@ -779,7 +779,6 @@
"edit": "Edit", "edit": "Edit",
"empty": "No results", "empty": "No results",
"enable_x": "Enable", "enable_x": "Enable",
"encryption": "Encryption",
"excludes": "Excludes", "excludes": "Excludes",
"filter_table": "Filter table", "filter_table": "Filter table",
"filters": "Filters", "filters": "Filters",
@@ -1144,7 +1143,6 @@
"hour": "hour", "hour": "hour",
"hourly": "Hourly", "hourly": "Hourly",
"hours": "hours", "hours": "hours",
"inactive": "Inactive",
"in_use": "Used", "in_use": "Used",
"interval": "Interval", "interval": "Interval",
"is_catch_all": "Catch-all for domain/s", "is_catch_all": "Catch-all for domain/s",

View File

@@ -4,11 +4,7 @@
<button class="btn d-md-none flex-grow-1 text-start" data-bs-target="#collapse-tab-Syncjobs" data-bs-toggle="collapse" aria-controls="collapse-tab-Syncjobs"> <button class="btn d-md-none flex-grow-1 text-start" data-bs-target="#collapse-tab-Syncjobs" data-bs-toggle="collapse" aria-controls="collapse-tab-Syncjobs">
{{ lang.user.sync_jobs }} {{ lang.user.sync_jobs }}
</button> </button>
<span class="d-none d-md-block">{{ lang.user.sync_jobs }} <span class="badge bg-info table-lines"></span></span> <span class="d-none d-md-block">{{ lang.user.sync_jobs }}
<div class="btn-group ms-auto d-flex">
<button class="btn btn-xs btn-secondary refresh_table" data-draw="draw_sync_job_table" data-table="sync_job_table">{{ lang.admin.refresh }}</button>
</div>
</div> </div>
<div id="collapse-tab-Syncjobs" class="card-body collapse" data-bs-parent="#user-content"> <div id="collapse-tab-Syncjobs" class="card-body collapse" data-bs-parent="#user-content">
<div class="mass-actions-user mb-4"> <div class="mass-actions-user mb-4">

View File

@@ -20,6 +20,7 @@ if (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'doma
'tfa_data' => $tfa_data, 'tfa_data' => $tfa_data,
'fido2_data' => $fido2_data, 'fido2_data' => $fido2_data,
'lang_user' => json_encode($lang['user']), 'lang_user' => json_encode($lang['user']),
'lang_datatables' => json_encode($lang['datatables']),
]; ];
} }
elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'user') { elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'user') {

View File

@@ -389,7 +389,7 @@ services:
acme-mailcow: acme-mailcow:
depends_on: depends_on:
- nginx-mailcow - nginx-mailcow
image: mailcow/acme:1.83 image: mailcow/acme:1.84
dns: dns:
- ${IPV4_NETWORK:-172.22.1}.254 - ${IPV4_NETWORK:-172.22.1}.254
environment: environment:
@@ -510,7 +510,7 @@ services:
- watchdog - watchdog
dockerapi-mailcow: dockerapi-mailcow:
image: mailcow/dockerapi:2.0 image: mailcow/dockerapi:2.01
security_opt: security_opt:
- label=disable - label=disable
restart: always restart: always

View File

@@ -26,6 +26,6 @@ services:
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock - /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
mysql-mailcow: mysql-mailcow:
image: alpine:3.10 image: alpine:3.17
command: /bin/true command: /bin/true
restart: "no" restart: "no"

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# renovate: datasource=github-releases depName=nextcloud/server versioning=semver extractVersion=^v(?<version>.*)$ # renovate: datasource=github-releases depName=nextcloud/server versioning=semver extractVersion=^v(?<version>.*)$
NEXTCLOUD_VERSION=25.0.2 NEXTCLOUD_VERSION=25.0.3
for bin in curl dirmngr; do for bin in curl dirmngr; do
if [[ -z $(which ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi if [[ -z $(which ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi
@@ -46,22 +46,22 @@ if [[ ${NC_PURGE} == "y" ]]; then
echo -e "\033[33mDetecting Database information...\033[0m" echo -e "\033[33mDetecting Database information...\033[0m"
if [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "Show databases" | grep "nextcloud") ]]; then if [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "Show databases" | grep "nextcloud") ]]; then
echo -e "\033[32mFound seperate nextcloud Database (newer scheme)!\033[0m" echo -e "\033[32mFound seperate Nextcloud database (newer scheme)!\033[0m"
echo -e "\033[31mPurging...\033[0m" echo -e "\033[31mPurging...\033[0m"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP DATABASE nextcloud;" > /dev/null docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP DATABASE nextcloud;" > /dev/null
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP USER 'nextcloud'@'%';" > /dev/null docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP USER 'nextcloud'@'%';" > /dev/null
elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'oc_%'") && $? -eq 0 ]]; then elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'oc_%'") && $? -eq 0 ]]; then
echo -e "\033[32mFound nextcloud (oc) tables inside of mailcow Database (old scheme)!\033[0m" echo -e "\033[32mFound Nextcloud (oc) tables inside of mailcow database (old scheme)!\033[0m"
echo -e "\033[31mPurging...\033[0m" echo -e "\033[31mPurging...\033[0m"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \ docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \
"$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'oc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null "$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'oc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null
elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'nc_%'") && $? -eq 0 ]]; then elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'nc_%'") && $? -eq 0 ]]; then
echo -e "\033[32mFound nextcloud (nc) tables inside of mailcow Database (old scheme)!\033[0m" echo -e "\033[32mFound Nextcloud (nc) tables inside of mailcow database (old scheme)!\033[0m"
echo -e "\033[31mPurging...\033[0m" echo -e "\033[31mPurging...\033[0m"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \ docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \
"$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'nc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null "$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'nc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null
else else
echo -e "\033[31mError: No Nextcloud Databases/Tables found!" echo -e "\033[31mError: No Nextcloud databases/tables found!"
echo -e "\033[33mNot purging anything...\033[0m" echo -e "\033[33mNot purging anything...\033[0m"
exit 1 exit 1
fi fi
@@ -80,10 +80,10 @@ EOF
docker restart $(docker ps -aqf name=nginx-mailcow) docker restart $(docker ps -aqf name=nginx-mailcow)
echo -e "\033[32mNextcloud has been sucessfully uninstalled!\033[0m" echo -e "\033[32mNextcloud has been uninstalled sucessfully!\033[0m"
elif [[ ${NC_UPDATE} == "y" ]]; then elif [[ ${NC_UPDATE} == "y" ]]; then
read -r -p "Are you sure you want to update Nextcloud (with nextclouds own updater)? [y/N] " response read -r -p "Are you sure you want to update Nextcloud (with Nextclouds own updater)? [y/N] " response
response=${response,,} response=${response,,}
if [[ ! "$response" =~ ^(yes|y)$ ]]; then if [[ ! "$response" =~ ^(yes|y)$ ]]; then
echo "OK, aborting." echo "OK, aborting."
@@ -118,18 +118,18 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
&& mkdir -p ./data/web/nextcloud/data \ && mkdir -p ./data/web/nextcloud/data \
&& chmod +x ./data/web/nextcloud/occ && chmod +x ./data/web/nextcloud/occ
echo -e "\033[33mCreating Nextcloud Database...\033[0m" echo -e "\033[33mCreating 'nextcloud' database...\033[0m"
NC_DBPASS=$(</dev/urandom tr -dc A-Za-z0-9 | head -c 28) NC_DBPASS=$(</dev/urandom tr -dc A-Za-z0-9 | head -c 28)
NC_DBUSER=nextcloud NC_DBUSER=nextcloud
NC_DBNAME=nextcloud NC_DBNAME=nextcloud
echo -ne "[1/3] Creating nextcloud Database" echo -ne "[1/3] Creating 'nextcloud' database"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE DATABASE ${NC_DBNAME};" docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE DATABASE ${NC_DBNAME};"
sleep 2 sleep 2
echo -ne "\r[2/3] Creating nextcloud Database user" echo -ne "\r[2/3] Creating 'nextcloud' database user"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE USER '${NC_DBUSER}'@'%' IDENTIFIED BY '${NC_DBPASS}';" docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE USER '${NC_DBUSER}'@'%' IDENTIFIED BY '${NC_DBPASS}';"
sleep 2 sleep 2
echo -ne "\r[3/3] Granting nextcloud user all permissions on database nextcloud" echo -ne "\r[3/3] Granting 'nextcloud' user all permissions on database 'nextcloud'"
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "GRANT ALL PRIVILEGES ON ${NC_DBNAME}.* TO '${NC_DBUSER}'@'%';" docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "GRANT ALL PRIVILEGES ON ${NC_DBNAME}.* TO '${NC_DBUSER}'@'%';"
sleep 2 sleep 2
@@ -140,7 +140,7 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
echo -ne "[1/4] Setting correct permissions for www-data" echo -ne "[1/4] Setting correct permissions for www-data"
docker exec -it $(docker ps -f name=php-fpm-mailcow -q) /bin/bash -c "chown -R www-data:www-data /web/nextcloud" docker exec -it $(docker ps -f name=php-fpm-mailcow -q) /bin/bash -c "chown -R www-data:www-data /web/nextcloud"
sleep 2 sleep 2
echo -ne "\r[2/4] Running occ maintenance:install to install nextcloud" echo -ne "\r[2/4] Running occ maintenance:install to install Nextcloud"
docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) /web/nextcloud/occ --no-warnings maintenance:install \ docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) /web/nextcloud/occ --no-warnings maintenance:install \
--database mysql \ --database mysql \
--database-host mysql \ --database-host mysql \
@@ -149,9 +149,9 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
--database-pass ${NC_DBPASS} \ --database-pass ${NC_DBPASS} \
--admin-user admin \ --admin-user admin \
--admin-pass ${ADMIN_NC_PASS} \ --admin-pass ${ADMIN_NC_PASS} \
--data-dir /web/nextcloud/data 2>&1 /dev/null --data-dir /web/nextcloud/data > /dev/null 2>&1
echo -ne "\r[3/4] Setting custom parameters inside the nextcloud config file" echo -ne "\r[3/4] Setting custom parameters inside the Nextcloud config file"
echo "" echo ""
docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) bash -c "/web/nextcloud/occ --no-warnings config:system:set redis host --value=redis --type=string; \ docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) bash -c "/web/nextcloud/occ --no-warnings config:system:set redis host --value=redis --type=string; \
/web/nextcloud/occ --no-warnings config:system:set redis port --value=6379 --type=integer; \ /web/nextcloud/occ --no-warnings config:system:set redis port --value=6379 --type=integer; \
@@ -178,7 +178,7 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
#/web/nextcloud/occ --no-warnings config:system:set user_backends 0 arguments 0 --value={dovecot:143/imap/tls/novalidate-cert}; \ #/web/nextcloud/occ --no-warnings config:system:set user_backends 0 arguments 0 --value={dovecot:143/imap/tls/novalidate-cert}; \
#/web/nextcloud/occ --no-warnings config:system:set user_backends 0 class --value=OC_User_IMAP; \ #/web/nextcloud/occ --no-warnings config:system:set user_backends 0 class --value=OC_User_IMAP; \
echo -e "\r[4/4] Enabling NGINX Configuration" echo -e "\r[4/4] Enabling Nginx Configuration"
cp ./data/assets/nextcloud/nextcloud.conf ./data/conf/nginx/ cp ./data/assets/nextcloud/nextcloud.conf ./data/conf/nginx/
sed -i "s/NC_SUBD/${NC_SUBD}/g" ./data/conf/nginx/nextcloud.conf sed -i "s/NC_SUBD/${NC_SUBD}/g" ./data/conf/nginx/nextcloud.conf
sleep 2 sleep 2
@@ -193,11 +193,11 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
echo "* INSTALL DATE: $(date +%Y-%m-%d_%H-%M-%S) *" echo "* INSTALL DATE: $(date +%Y-%m-%d_%H-%M-%S) *"
echo "******************************************" echo "******************************************"
echo "" echo ""
echo -e "\033[36mDatabase Name: ${NC_DBNAME}\033[0m" echo -e "\033[36mDatabase name: ${NC_DBNAME}\033[0m"
echo -e "\033[36mDatabase User: ${NC_DBUSER}\033[0m" echo -e "\033[36mDatabase user: ${NC_DBUSER}\033[0m"
echo -e "\033[36mDatabase Password: ${NC_DBPASS}\033[0m" echo -e "\033[36mDatabase password: ${NC_DBPASS}\033[0m"
echo "" echo ""
echo -e "\033[31mUI Admin Password: ${ADMIN_NC_PASS}\033[0m" echo -e "\033[31mUI admin password: ${ADMIN_NC_PASS}\033[0m"
echo "" echo ""