Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
640f535e99 | ||
|
05d1a974eb | ||
|
99e38d81b1 | ||
|
ed7b384e24 | ||
|
5439ea1010 | ||
|
b719982504 | ||
|
8281d3fa55 | ||
|
9ba65a572e | ||
|
afddcf7f3b | ||
|
294569f5c9 | ||
|
ef6452cf55 | ||
|
9af40eba10 | ||
|
1b3a13ca19 | ||
|
71cc607de6 | ||
|
2ebd8345df | ||
|
f5baeb31c1 | ||
|
5abda44bc6 | ||
|
520d070081 | ||
|
5c57df4669 |
63
.github/workflows/integration_tests.yml
vendored
63
.github/workflows/integration_tests.yml
vendored
@@ -1,63 +0,0 @@
|
||||
name: mailcow Integration Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "staging" ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
integration_tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Ansible
|
||||
run: |
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update
|
||||
sudo apt-get install python3 python3-pip git
|
||||
sudo pip3 install ansible
|
||||
- name: Prepair Test Environment
|
||||
run: |
|
||||
git clone https://github.com/mailcow/mailcow-integration-tests.git --branch $(curl -sL https://api.github.com/repos/mailcow/mailcow-integration-tests/releases/latest | jq -r '.tag_name') --single-branch .
|
||||
./fork_check.sh
|
||||
./ci.sh
|
||||
./ci-pip-requirements.sh
|
||||
env:
|
||||
VAULT_PW: ${{ secrets.MAILCOW_TESTS_VAULT_PW }}
|
||||
VAULT_FILE: ${{ secrets.MAILCOW_TESTS_VAULT_FILE }}
|
||||
- name: Start Integration Test Server
|
||||
run: |
|
||||
./fork_check.sh
|
||||
ansible-playbook mailcow-start-server.yml --diff
|
||||
env:
|
||||
PY_COLORS: '1'
|
||||
ANSIBLE_FORCE_COLOR: '1'
|
||||
ANSIBLE_HOST_KEY_CHECKING: 'false'
|
||||
- name: Setup Integration Test Server
|
||||
run: |
|
||||
./fork_check.sh
|
||||
sleep 30
|
||||
ansible-playbook mailcow-setup-server.yml --private-key id_ssh_rsa --diff
|
||||
env:
|
||||
PY_COLORS: '1'
|
||||
ANSIBLE_FORCE_COLOR: '1'
|
||||
ANSIBLE_HOST_KEY_CHECKING: 'false'
|
||||
- name: Run Integration Tests
|
||||
run: |
|
||||
./fork_check.sh
|
||||
ansible-playbook mailcow-integration-tests.yml --private-key id_ssh_rsa --diff
|
||||
env:
|
||||
PY_COLORS: '1'
|
||||
ANSIBLE_FORCE_COLOR: '1'
|
||||
ANSIBLE_HOST_KEY_CHECKING: 'false'
|
||||
- name: Delete Integration Test Server
|
||||
if: always()
|
||||
run: |
|
||||
./fork_check.sh
|
||||
ansible-playbook mailcow-delete-server.yml --diff
|
||||
env:
|
||||
PY_COLORS: '1'
|
||||
ANSIBLE_FORCE_COLOR: '1'
|
||||
ANSIBLE_HOST_KEY_CHECKING: 'false'
|
@@ -1,6 +1,5 @@
|
||||
# mailcow: dockerized - 🐮 + 🐋 = 💕
|
||||
|
||||
[](https://github.com/mailcow/mailcow-dockerized/actions/workflows/integration_tests.yml)
|
||||
[](https://translate.mailcow.email/engage/mailcow-dockerized/)
|
||||
[](https://twitter.com/mailcow_email)
|
||||
|
||||
|
@@ -213,11 +213,13 @@ while true; do
|
||||
done
|
||||
ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig')
|
||||
|
||||
if [[ ${SKIP_IP_CHECK} != "y" ]]; then
|
||||
# Start IP detection
|
||||
log_f "Detecting IP addresses..."
|
||||
IPV4=$(get_ipv4)
|
||||
IPV6=$(get_ipv6)
|
||||
log_f "OK: ${IPV4}, ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}"
|
||||
fi
|
||||
|
||||
#########################################
|
||||
# IP and webroot challenge verification #
|
||||
|
@@ -13,6 +13,7 @@ RUN apk add --update --no-cache python3 \
|
||||
fastapi \
|
||||
uvicorn \
|
||||
aiodocker \
|
||||
docker \
|
||||
redis
|
||||
|
||||
COPY docker-entrypoint.sh /app/
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from fastapi import FastAPI, Response, Request
|
||||
import aiodocker
|
||||
import docker
|
||||
import psutil
|
||||
import sys
|
||||
import re
|
||||
@@ -9,11 +10,38 @@ import json
|
||||
import asyncio
|
||||
import redis
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from logging.config import dictConfig
|
||||
|
||||
|
||||
log_config = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"default": {
|
||||
"()": "uvicorn.logging.DefaultFormatter",
|
||||
"fmt": "%(levelprefix)s %(asctime)s %(message)s",
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stderr",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
"api-logger": {"handlers": ["default"], "level": "INFO"},
|
||||
},
|
||||
}
|
||||
dictConfig(log_config)
|
||||
|
||||
containerIds_to_update = []
|
||||
host_stats_isUpdating = False
|
||||
app = FastAPI()
|
||||
logger = logging.getLogger('api-logger')
|
||||
|
||||
|
||||
@app.get("/host/stats")
|
||||
@@ -21,18 +49,15 @@ async def get_host_update_stats():
|
||||
global host_stats_isUpdating
|
||||
|
||||
if host_stats_isUpdating == False:
|
||||
print("start host stats task")
|
||||
asyncio.create_task(get_host_stats())
|
||||
host_stats_isUpdating = True
|
||||
|
||||
while True:
|
||||
if redis_client.exists('host_stats'):
|
||||
break
|
||||
print("wait for host_stats results")
|
||||
await asyncio.sleep(1.5)
|
||||
|
||||
|
||||
print("host stats pulled")
|
||||
stats = json.loads(redis_client.get('host_stats'))
|
||||
return Response(content=json.dumps(stats, indent=4), media_type="application/json")
|
||||
|
||||
@@ -106,14 +131,14 @@ async def post_containers(container_id : str, post_action : str, request: Reques
|
||||
else:
|
||||
api_call_method_name = '__'.join(['container_post', str(post_action) ])
|
||||
|
||||
docker_utils = DockerUtils(async_docker_client)
|
||||
docker_utils = DockerUtils(sync_docker_client)
|
||||
api_call_method = getattr(docker_utils, api_call_method_name, lambda container_id: Response(content=json.dumps({'type': 'danger', 'msg':'container_post - unknown api call' }, indent=4), media_type="application/json"))
|
||||
|
||||
|
||||
print("api call: %s, container_id: %s" % (api_call_method_name, container_id))
|
||||
return await api_call_method(container_id, request_json)
|
||||
logger.info("api call: %s, container_id: %s" % (api_call_method_name, container_id))
|
||||
return api_call_method(container_id, request_json)
|
||||
except Exception as e:
|
||||
print("error - container_post: %s" % str(e))
|
||||
logger.error("error - container_post: %s" % str(e))
|
||||
res = {
|
||||
"type": "danger",
|
||||
"msg": str(e)
|
||||
@@ -152,398 +177,289 @@ class DockerUtils:
|
||||
self.docker_client = docker_client
|
||||
|
||||
# api call: container_post - post_action: stop
|
||||
async def container_post__stop(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
await container.stop()
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'command completed successfully'
|
||||
}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
def container_post__stop(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
|
||||
container.stop()
|
||||
|
||||
res = { 'type': 'success', 'msg': 'command completed successfully'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
# api call: container_post - post_action: start
|
||||
async def container_post__start(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
await container.start()
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'command completed successfully'
|
||||
}
|
||||
def container_post__start(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
|
||||
container.start()
|
||||
|
||||
res = { 'type': 'success', 'msg': 'command completed successfully'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
|
||||
# api call: container_post - post_action: restart
|
||||
async def container_post__restart(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
await container.restart()
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'command completed successfully'
|
||||
}
|
||||
def container_post__restart(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
|
||||
container.restart()
|
||||
|
||||
res = { 'type': 'success', 'msg': 'command completed successfully'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
|
||||
# api call: container_post - post_action: top
|
||||
async def container_post__top(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
ps_exec = await container.exec("ps")
|
||||
async with ps_exec.start(detach=False) as stream:
|
||||
ps_return = await stream.read_out()
|
||||
|
||||
exec_details = await ps_exec.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': ps_return.data.decode('utf-8')
|
||||
}
|
||||
def container_post__top(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
|
||||
res = { 'type': 'success', 'msg': container.top()}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
'type': 'danger',
|
||||
'msg': ''
|
||||
}
|
||||
# api call: container_post - post_action: stats
|
||||
def container_post__stats(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(all=True, filters={"id": container_id}):
|
||||
for stat in container.stats(decode=True, stream=True):
|
||||
res = { 'type': 'success', 'msg': stat}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: delete
|
||||
async def container_post__exec__mailq__delete(self, container_id, request_json):
|
||||
def container_post__exec__mailq__delete(self, container_id, request_json):
|
||||
if 'items' in request_json:
|
||||
r = re.compile("^[0-9a-fA-F]+$")
|
||||
filtered_qids = filter(r.match, request_json['items'])
|
||||
if filtered_qids:
|
||||
flagged_qids = ['-d %s' % i for i in filtered_qids]
|
||||
sanitized_string = str(' '.join(flagged_qids))
|
||||
sanitized_string = str(' '.join(flagged_qids));
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
|
||||
return exec_run_handler('generic', postsuper_r)
|
||||
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
|
||||
return await exec_run_handler('generic', postsuper_r_exec)
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: hold
|
||||
async def container_post__exec__mailq__hold(self, container_id, request_json):
|
||||
def container_post__exec__mailq__hold(self, container_id, request_json):
|
||||
if 'items' in request_json:
|
||||
r = re.compile("^[0-9a-fA-F]+$")
|
||||
filtered_qids = filter(r.match, request_json['items'])
|
||||
if filtered_qids:
|
||||
flagged_qids = ['-h %s' % i for i in filtered_qids]
|
||||
sanitized_string = str(' '.join(flagged_qids))
|
||||
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
|
||||
return await exec_run_handler('generic', postsuper_r_exec)
|
||||
sanitized_string = str(' '.join(flagged_qids));
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
|
||||
return exec_run_handler('generic', postsuper_r)
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: cat
|
||||
async def container_post__exec__mailq__cat(self, container_id, request_json):
|
||||
def container_post__exec__mailq__cat(self, container_id, request_json):
|
||||
if 'items' in request_json:
|
||||
r = re.compile("^[0-9a-fA-F]+$")
|
||||
filtered_qids = filter(r.match, request_json['items'])
|
||||
if filtered_qids:
|
||||
sanitized_string = str(' '.join(filtered_qids))
|
||||
sanitized_string = str(' '.join(filtered_qids));
|
||||
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
postcat_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
|
||||
return await exec_run_handler('utf8_text_only', postcat_exec)
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
postcat_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postcat -q " + sanitized_string], user='postfix')
|
||||
if not postcat_return:
|
||||
postcat_return = 'err: invalid'
|
||||
return exec_run_handler('utf8_text_only', postcat_return)
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: unhold
|
||||
async def container_post__exec__mailq__unhold(self, container_id, request_json):
|
||||
def container_post__exec__mailq__unhold(self, container_id, request_json):
|
||||
if 'items' in request_json:
|
||||
r = re.compile("^[0-9a-fA-F]+$")
|
||||
filtered_qids = filter(r.match, request_json['items'])
|
||||
if filtered_qids:
|
||||
flagged_qids = ['-H %s' % i for i in filtered_qids]
|
||||
sanitized_string = str(' '.join(flagged_qids))
|
||||
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
|
||||
return await exec_run_handler('generic', postsuper_r_exec)
|
||||
|
||||
sanitized_string = str(' '.join(flagged_qids));
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string])
|
||||
return exec_run_handler('generic', postsuper_r)
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: deliver
|
||||
async def container_post__exec__mailq__deliver(self, container_id, request_json):
|
||||
def container_post__exec__mailq__deliver(self, container_id, request_json):
|
||||
if 'items' in request_json:
|
||||
r = re.compile("^[0-9a-fA-F]+$")
|
||||
filtered_qids = filter(r.match, request_json['items'])
|
||||
if filtered_qids:
|
||||
flagged_qids = ['-i %s' % i for i in filtered_qids]
|
||||
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
for i in flagged_qids:
|
||||
postsuper_r_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
|
||||
async with postsuper_r_exec.start(detach=False) as stream:
|
||||
postsuper_r_return = await stream.read_out()
|
||||
postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix')
|
||||
# todo: check each exit code
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'Scheduled immediate delivery'
|
||||
}
|
||||
res = { 'type': 'success', 'msg': 'Scheduled immediate delivery'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: list
|
||||
async def container_post__exec__mailq__list(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
mailq_exec = await container.exec(["/usr/sbin/postqueue", "-j"], user='postfix')
|
||||
return await exec_run_handler('utf8_text_only', mailq_exec)
|
||||
|
||||
|
||||
def container_post__exec__mailq__list(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix')
|
||||
return exec_run_handler('utf8_text_only', mailq_return)
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: flush
|
||||
async def container_post__exec__mailq__flush(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
postsuper_r_exec = await container.exec(["/usr/sbin/postqueue", "-f"], user='postfix')
|
||||
return await exec_run_handler('generic', postsuper_r_exec)
|
||||
|
||||
|
||||
def container_post__exec__mailq__flush(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix')
|
||||
return exec_run_handler('generic', postqueue_r)
|
||||
# api call: container_post - post_action: exec - cmd: mailq - task: super_delete
|
||||
async def container_post__exec__mailq__super_delete(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
postsuper_r_exec = await container.exec(["/usr/sbin/postsuper", "-d", "ALL"])
|
||||
return await exec_run_handler('generic', postsuper_r_exec)
|
||||
|
||||
|
||||
def container_post__exec__mailq__super_delete(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"])
|
||||
return exec_run_handler('generic', postsuper_r)
|
||||
# api call: container_post - post_action: exec - cmd: system - task: fts_rescan
|
||||
async def container_post__exec__system__fts_rescan(self, container_id, request_json):
|
||||
def container_post__exec__system__fts_rescan(self, container_id, request_json):
|
||||
if 'username' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
|
||||
async with rescan_exec.start(detach=False) as stream:
|
||||
rescan_return = await stream.read_out()
|
||||
|
||||
exec_details = await rescan_exec.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'fts_rescan: rescan triggered'
|
||||
}
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request_json['username'].replace("'", "'\\''") + "'"], user='vmail')
|
||||
if rescan_return.exit_code == 0:
|
||||
res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
'type': 'warning',
|
||||
'msg': 'fts_rescan error'
|
||||
}
|
||||
res = { 'type': 'warning', 'msg': 'fts_rescan error'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
if 'all' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
rescan_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
|
||||
async with rescan_exec.start(detach=False) as stream:
|
||||
rescan_return = await stream.read_out()
|
||||
|
||||
exec_details = await rescan_exec.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'fts_rescan: rescan triggered'
|
||||
}
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail')
|
||||
if rescan_return.exit_code == 0:
|
||||
res = { 'type': 'success', 'msg': 'fts_rescan: rescan triggered'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
'type': 'warning',
|
||||
'msg': 'fts_rescan error'
|
||||
}
|
||||
res = { 'type': 'warning', 'msg': 'fts_rescan error'}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: system - task: df
|
||||
async def container_post__exec__system__df(self, container_id, request_json):
|
||||
def container_post__exec__system__df(self, container_id, request_json):
|
||||
if 'dir' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
df_exec = await container.exec(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
|
||||
async with df_exec.start(detach=False) as stream:
|
||||
df_return = await stream.read_out()
|
||||
|
||||
print(df_return)
|
||||
print(await df_exec.inspect())
|
||||
exec_details = await df_exec.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
return df_return.data.decode('utf-8').rstrip()
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request_json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody')
|
||||
if df_return.exit_code == 0:
|
||||
return df_return.output.decode('utf-8').rstrip()
|
||||
else:
|
||||
return "0,0,0,0,0,0"
|
||||
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade
|
||||
async def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
|
||||
async with sql_exec.start(detach=False) as stream:
|
||||
sql_return = await stream.read_out()
|
||||
|
||||
exec_details = await sql_exec.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
def container_post__exec__system__mysql_upgrade(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n"], user='mysql')
|
||||
if sql_return.exit_code == 0:
|
||||
matched = False
|
||||
for line in sql_return.data.decode('utf-8').split("\n"):
|
||||
for line in sql_return.output.decode('utf-8').split("\n"):
|
||||
if 'is already upgraded to' in line:
|
||||
matched = True
|
||||
if matched:
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'mysql_upgrade: already upgraded',
|
||||
'text': sql_return.data.decode('utf-8')
|
||||
}
|
||||
res = { 'type': 'success', 'msg':'mysql_upgrade: already upgraded', 'text': sql_return.output.decode('utf-8')}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
await container.restart()
|
||||
res = {
|
||||
'type': 'warning',
|
||||
'msg': 'mysql_upgrade: upgrade was applied',
|
||||
'text': sql_return.data.decode('utf-8')
|
||||
}
|
||||
container.restart()
|
||||
res = { 'type': 'warning', 'msg':'mysql_upgrade: upgrade was applied', 'text': sql_return.output.decode('utf-8')}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
'type': 'error',
|
||||
'msg': 'mysql_upgrade: error running command',
|
||||
'text': sql_return.data.decode('utf-8')
|
||||
}
|
||||
res = { 'type': 'error', 'msg': 'mysql_upgrade: error running command', 'text': sql_return.output.decode('utf-8')}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: system - task: mysql_tzinfo_to_sql
|
||||
async def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
sql_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
|
||||
async with sql_exec.start(detach=False) as stream:
|
||||
sql_return = await stream.read_out()
|
||||
|
||||
exec_details = await sql_exec.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
res = {
|
||||
'type': 'info',
|
||||
'msg': 'mysql_tzinfo_to_sql: command completed successfully',
|
||||
'text': sql_return.data.decode('utf-8')
|
||||
}
|
||||
def container_post__exec__system__mysql_tzinfo_to_sql(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
sql_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/mysql_tzinfo_to_sql /usr/share/zoneinfo | /bin/sed 's/Local time zone must be set--see zic manual page/FCTY/' | /usr/bin/mysql -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "' mysql \n"], user='mysql')
|
||||
if sql_return.exit_code == 0:
|
||||
res = { 'type': 'info', 'msg': 'mysql_tzinfo_to_sql: command completed successfully', 'text': sql_return.output.decode('utf-8')}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
'type': 'error',
|
||||
'msg': 'mysql_tzinfo_to_sql: error running command',
|
||||
'text': sql_return.data.decode('utf-8')
|
||||
}
|
||||
res = { 'type': 'error', 'msg': 'mysql_tzinfo_to_sql: error running command', 'text': sql_return.output.decode('utf-8')}
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
# api call: container_post - post_action: exec - cmd: reload - task: dovecot
|
||||
async def container_post__exec__reload__dovecot(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
|
||||
return await exec_run_handler('generic', reload_exec)
|
||||
|
||||
|
||||
def container_post__exec__reload__dovecot(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"])
|
||||
return exec_run_handler('generic', reload_return)
|
||||
# api call: container_post - post_action: exec - cmd: reload - task: postfix
|
||||
async def container_post__exec__reload__postfix(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
reload_exec = await container.exec(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
|
||||
return await exec_run_handler('generic', reload_exec)
|
||||
|
||||
|
||||
def container_post__exec__reload__postfix(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"])
|
||||
return exec_run_handler('generic', reload_return)
|
||||
# api call: container_post - post_action: exec - cmd: reload - task: nginx
|
||||
async def container_post__exec__reload__nginx(self, container_id, request_json):
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
reload_exec = await container.exec(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
|
||||
return await exec_run_handler('generic', reload_exec)
|
||||
|
||||
|
||||
def container_post__exec__reload__nginx(self, container_id, request_json):
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"])
|
||||
return exec_run_handler('generic', reload_return)
|
||||
# api call: container_post - post_action: exec - cmd: sieve - task: list
|
||||
async def container_post__exec__sieve__list(self, container_id, request_json):
|
||||
def container_post__exec__sieve__list(self, container_id, request_json):
|
||||
if 'username' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
sieve_exec = await container.exec(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
|
||||
return await exec_run_handler('utf8_text_only', sieve_exec)
|
||||
|
||||
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request_json['username'].replace("'", "'\\''") + "'"])
|
||||
return exec_run_handler('utf8_text_only', sieve_return)
|
||||
# api call: container_post - post_action: exec - cmd: sieve - task: print
|
||||
async def container_post__exec__sieve__print(self, container_id, request_json):
|
||||
if 'username' in request_json and 'script_name' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
def container_post__exec__sieve__print(self, container_id, request_json):
|
||||
if 'username' in request.json and 'script_name' in request_json:
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
|
||||
sieve_exec = await container.exec(cmd)
|
||||
return await exec_run_handler('utf8_text_only', sieve_exec)
|
||||
|
||||
|
||||
sieve_return = container.exec_run(cmd)
|
||||
return exec_run_handler('utf8_text_only', sieve_return)
|
||||
# api call: container_post - post_action: exec - cmd: maildir - task: cleanup
|
||||
async def container_post__exec__maildir__cleanup(self, container_id, request_json):
|
||||
def container_post__exec__maildir__cleanup(self, container_id, request_json):
|
||||
if 'maildir' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
sane_name = re.sub(r'\W+', '', request_json['maildir'])
|
||||
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
|
||||
maildir_cleanup_exec = await container.exec(cmd, user='vmail')
|
||||
return await exec_run_handler('generic', maildir_cleanup_exec)
|
||||
|
||||
maildir_cleanup = container.exec_run(cmd, user='vmail')
|
||||
return exec_run_handler('generic', maildir_cleanup)
|
||||
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
|
||||
async def container_post__exec__rspamd__worker_password(self, container_id, request_json):
|
||||
def container_post__exec__rspamd__worker_password(self, container_id, request_json):
|
||||
if 'raw' in request_json:
|
||||
for container in (await self.docker_client.containers.list()):
|
||||
if container._id == container_id:
|
||||
|
||||
cmd = "./set_worker_password.sh '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
|
||||
rspamd_password_exec = await container.exec(cmd, user='_rspamd')
|
||||
async with rspamd_password_exec.start(detach=False) as stream:
|
||||
rspamd_password_return = await stream.read_out()
|
||||
for container in self.docker_client.containers.list(filters={"id": container_id}):
|
||||
cmd = "/usr/bin/rspamadm pw -e -p '" + request_json['raw'].replace("'", "'\\''") + "' 2> /dev/null"
|
||||
cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
|
||||
|
||||
matched = False
|
||||
if "OK" in rspamd_password_return.data.decode('utf-8'):
|
||||
for line in cmd_response.split("\n"):
|
||||
if '$2$' in line:
|
||||
hash = line.strip()
|
||||
hash_out = re.search('\$2\$.+$', hash).group(0)
|
||||
rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip())
|
||||
rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc"
|
||||
cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename)
|
||||
cmd_response = exec_cmd_container(container, cmd, user="_rspamd")
|
||||
if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response:
|
||||
container.restart()
|
||||
matched = True
|
||||
await container.restart()
|
||||
|
||||
if matched:
|
||||
res = {
|
||||
'type': 'success',
|
||||
'msg': 'command completed successfully'
|
||||
}
|
||||
res = { 'type': 'success', 'msg': 'command completed successfully' }
|
||||
logger.info('success changing Rspamd password')
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
'type': 'danger',
|
||||
'msg': 'command did not complete'
|
||||
}
|
||||
logger.error('failed changing Rspamd password')
|
||||
res = { 'type': 'danger', 'msg': 'command did not complete' }
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
|
||||
|
||||
def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"):
|
||||
|
||||
async def exec_run_handler(type, exec_obj):
|
||||
async with exec_obj.start(detach=False) as stream:
|
||||
exec_return = await stream.read_out()
|
||||
|
||||
if exec_return == None:
|
||||
exec_return = ""
|
||||
def recv_socket_data(c_socket, timeout):
|
||||
c_socket.setblocking(0)
|
||||
total_data=[]
|
||||
data=''
|
||||
begin=time.time()
|
||||
while True:
|
||||
if total_data and time.time()-begin > timeout:
|
||||
break
|
||||
elif time.time()-begin > timeout*2:
|
||||
break
|
||||
try:
|
||||
data = c_socket.recv(8192)
|
||||
if data:
|
||||
total_data.append(data.decode('utf-8'))
|
||||
#change the beginning time for measurement
|
||||
begin=time.time()
|
||||
else:
|
||||
exec_return = exec_return.data.decode('utf-8')
|
||||
#sleep for sometime to indicate a gap
|
||||
time.sleep(0.1)
|
||||
break
|
||||
except:
|
||||
pass
|
||||
return ''.join(total_data)
|
||||
|
||||
|
||||
try :
|
||||
socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock
|
||||
if not cmd.endswith("\n"):
|
||||
cmd = cmd + "\n"
|
||||
socket.send(cmd.encode('utf-8'))
|
||||
data = recv_socket_data(socket, timeout)
|
||||
socket.close()
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error("error - exec_cmd_container: %s" % str(e))
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
def exec_run_handler(type, output):
|
||||
if type == 'generic':
|
||||
exec_details = await exec_obj.inspect()
|
||||
if exec_details["ExitCode"] == None or exec_details["ExitCode"] == 0:
|
||||
res = {
|
||||
"type": "success",
|
||||
"msg": "command completed successfully"
|
||||
}
|
||||
if output.exit_code == 0:
|
||||
res = { 'type': 'success', 'msg': 'command completed successfully' }
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
else:
|
||||
res = {
|
||||
"type": "success",
|
||||
"msg": "'command failed: " + exec_return
|
||||
}
|
||||
res = { 'type': 'danger', 'msg': 'command failed: ' + output.output.decode('utf-8') }
|
||||
return Response(content=json.dumps(res, indent=4), media_type="application/json")
|
||||
if type == 'utf8_text_only':
|
||||
return Response(content=exec_return, media_type="text/plain")
|
||||
return Response(content=output.output.decode('utf-8'), media_type="text/plain")
|
||||
|
||||
async def get_host_stats(wait=5):
|
||||
global host_stats_isUpdating
|
||||
@@ -570,12 +486,10 @@ async def get_host_stats(wait=5):
|
||||
"type": "danger",
|
||||
"msg": str(e)
|
||||
}
|
||||
print(json.dumps(res, indent=4))
|
||||
|
||||
await asyncio.sleep(wait)
|
||||
host_stats_isUpdating = False
|
||||
|
||||
|
||||
async def get_container_stats(container_id, wait=5, stop=False):
|
||||
global containerIds_to_update
|
||||
|
||||
@@ -598,13 +512,11 @@ async def get_container_stats(container_id, wait=5, stop=False):
|
||||
"type": "danger",
|
||||
"msg": str(e)
|
||||
}
|
||||
print(json.dumps(res, indent=4))
|
||||
else:
|
||||
res = {
|
||||
"type": "danger",
|
||||
"msg": "no or invalid id defined"
|
||||
}
|
||||
print(json.dumps(res, indent=4))
|
||||
|
||||
await asyncio.sleep(wait)
|
||||
if stop == True:
|
||||
@@ -615,9 +527,13 @@ async def get_container_stats(container_id, wait=5, stop=False):
|
||||
await get_container_stats(container_id, wait=0, stop=True)
|
||||
|
||||
|
||||
|
||||
if os.environ['REDIS_SLAVEOF_IP'] != "":
|
||||
redis_client = redis.Redis(host=os.environ['REDIS_SLAVEOF_IP'], port=os.environ['REDIS_SLAVEOF_PORT'], db=0)
|
||||
else:
|
||||
redis_client = redis.Redis(host='redis-mailcow', port=6379, db=0)
|
||||
|
||||
sync_docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto')
|
||||
async_docker_client = aiodocker.Docker(url='unix:///var/run/docker.sock')
|
||||
|
||||
logger.info('DockerApi started')
|
||||
|
@@ -175,7 +175,7 @@ BAD_SUBJECT_00 {
|
||||
type = "header";
|
||||
header = "subject";
|
||||
regexp = true;
|
||||
map = "http://nullnull.org/bad-subject-regex.txt";
|
||||
map = "http://fuzzy.mailcow.email/bad-subject-regex.txt";
|
||||
score = 6.0;
|
||||
symbols_set = ["BAD_SUBJECT_00"];
|
||||
}
|
||||
|
@@ -21,7 +21,6 @@ jQuery(function($){
|
||||
url: '/api/v1/get/postcat/' + button.data('queue-id'),
|
||||
dataType: 'text',
|
||||
complete: function (data) {
|
||||
console.log(data);
|
||||
$('#queue_msg_content').text(data.responseText);
|
||||
}
|
||||
});
|
||||
@@ -54,7 +53,7 @@ jQuery(function($){
|
||||
});
|
||||
item.recipients = rcpts.join('<hr style="margin:1px!important">');
|
||||
item.action = '<div class="btn-group">' +
|
||||
'<a href="#" data-bs-toggle="modal" data-bs-target="#showQueuedMsg" data-queue-id="' + encodeURI(item.queue_id) + '" class="btn btn-xs btn-secondary">' + lang.queue_show_message + '</a>' +
|
||||
'<a href="#" data-bs-toggle="modal" data-bs-target="#showQueuedMsg" data-queue-id="' + encodeURI(item.queue_id) + '" class="btn btn-xs btn-secondary">' + lang.show_message + '</a>' +
|
||||
'</div>';
|
||||
});
|
||||
return data;
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"acl": {
|
||||
"alias_domains": "Tilføj kældenavn domæner",
|
||||
"alias_domains": "Tilføj domænealias",
|
||||
"app_passwds": "Administrer app-adgangskoder",
|
||||
"bcc_maps": "BCC kort",
|
||||
"delimiter_action": "Afgrænsning handling",
|
||||
@@ -22,9 +22,9 @@
|
||||
"spam_alias": "Midlertidige aliasser",
|
||||
"spam_policy": "Sortliste / hvidliste",
|
||||
"spam_score": "Spam-score",
|
||||
"syncjobs": "Synkroniser job",
|
||||
"syncjobs": "Synkroniserings job",
|
||||
"tls_policy": "TLS politik",
|
||||
"unlimited_quota": "Ubegrænset quote for mailbokse",
|
||||
"unlimited_quota": "Ubegrænset plads for mailbokse",
|
||||
"domain_desc": "Skift domæne beskrivelse"
|
||||
},
|
||||
"add": {
|
||||
|
@@ -20,6 +20,7 @@ if (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'doma
|
||||
'tfa_data' => $tfa_data,
|
||||
'fido2_data' => $fido2_data,
|
||||
'lang_user' => json_encode($lang['user']),
|
||||
'lang_datatables' => json_encode($lang['datatables']),
|
||||
];
|
||||
}
|
||||
elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == 'user') {
|
||||
|
@@ -389,7 +389,7 @@ services:
|
||||
acme-mailcow:
|
||||
depends_on:
|
||||
- nginx-mailcow
|
||||
image: mailcow/acme:1.83
|
||||
image: mailcow/acme:1.84
|
||||
dns:
|
||||
- ${IPV4_NETWORK:-172.22.1}.254
|
||||
environment:
|
||||
@@ -510,11 +510,10 @@ services:
|
||||
- watchdog
|
||||
|
||||
dockerapi-mailcow:
|
||||
image: mailcow/dockerapi:2.0
|
||||
image: mailcow/dockerapi:2.01
|
||||
security_opt:
|
||||
- label=disable
|
||||
restart: always
|
||||
oom_kill_disable: true
|
||||
dns:
|
||||
- ${IPV4_NETWORK:-172.22.1}.254
|
||||
environment:
|
||||
|
@@ -25,9 +25,8 @@ for bin in openssl curl docker git awk sha1sum; do
|
||||
if [[ -z $(which ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi
|
||||
done
|
||||
|
||||
if command -v docker compose > /dev/null 2>&1; then
|
||||
version=$(docker compose version --short)
|
||||
if [[ $version =~ ^2\.([0-9]+)\.([0-9]+) ]]; then
|
||||
if docker compose > /dev/null 2>&1; then
|
||||
if docker compose version --short | grep "^2." > /dev/null 2>&1; then
|
||||
COMPOSE_VERSION=native
|
||||
echo -e "\e[31mFound Docker Compose Plugin (native).\e[0m"
|
||||
echo -e "\e[31mSetting the DOCKER_COMPOSE_VERSION Variable to native\e[0m"
|
||||
@@ -35,12 +34,12 @@ if command -v docker compose > /dev/null 2>&1; then
|
||||
echo -e "\e[33mNotice: You´ll have to update this Compose Version via your Package Manager manually!\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
echo -e "\e[31mPlease update/install it manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
elif command -v docker-compose > /dev/null 2>&1; then
|
||||
version=$(docker-compose version --short)
|
||||
if [[ $version =~ ^2\.([0-9]+)\.([0-9]+) ]]; then
|
||||
elif docker-compose > /dev/null 2>&1; then
|
||||
if ! [[ $(alias docker-compose 2> /dev/null) ]] ; then
|
||||
if docker-compose version --short | grep "^2." > /dev/null 2>&1; then
|
||||
COMPOSE_VERSION=standalone
|
||||
echo -e "\e[31mFound Docker Compose Standalone.\e[0m"
|
||||
echo -e "\e[31mSetting the DOCKER_COMPOSE_VERSION Variable to standalone\e[0m"
|
||||
@@ -51,9 +50,11 @@ elif command -v docker-compose > /dev/null 2>&1; then
|
||||
echo -e "\e[31mPlease update/install manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose.\e[0m"
|
||||
echo -e "\e[31mPlease install it manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
echo -e "\e[31mPlease install it regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -457,6 +458,15 @@ case ${git_branch} in
|
||||
mailcow_last_git_version=""
|
||||
;;
|
||||
esac
|
||||
# if [ ${git_branch} == "master" ]; then
|
||||
# mailcow_git_version=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||
# elif [ ${git_branch} == "nightly" ]; then
|
||||
# mailcow_git_version=$(git rev-parse --short $(git rev-parse @{upstream}))
|
||||
# mailcow_last_git_version=""
|
||||
# else
|
||||
# mailcow_git_version=$(git rev-parse --short HEAD)
|
||||
# mailcow_last_git_version=""
|
||||
# fi
|
||||
|
||||
if [[ $SKIP_BRANCH != "y" ]]; then
|
||||
mailcow_git_commit=$(git rev-parse origin/${git_branch})
|
||||
|
@@ -26,6 +26,6 @@ services:
|
||||
- /var/run/mysqld/mysqld.sock:/var/run/mysqld/mysqld.sock
|
||||
|
||||
mysql-mailcow:
|
||||
image: alpine:3.10
|
||||
image: alpine:3.17
|
||||
command: /bin/true
|
||||
restart: "no"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
# renovate: datasource=github-releases depName=nextcloud/server versioning=semver extractVersion=^v(?<version>.*)$
|
||||
NEXTCLOUD_VERSION=25.0.2
|
||||
NEXTCLOUD_VERSION=25.0.3
|
||||
|
||||
for bin in curl dirmngr; do
|
||||
if [[ -z $(which ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi
|
||||
@@ -46,22 +46,22 @@ if [[ ${NC_PURGE} == "y" ]]; then
|
||||
|
||||
echo -e "\033[33mDetecting Database information...\033[0m"
|
||||
if [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "Show databases" | grep "nextcloud") ]]; then
|
||||
echo -e "\033[32mFound seperate nextcloud Database (newer scheme)!\033[0m"
|
||||
echo -e "\033[32mFound seperate Nextcloud database (newer scheme)!\033[0m"
|
||||
echo -e "\033[31mPurging...\033[0m"
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP DATABASE nextcloud;" > /dev/null
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "DROP USER 'nextcloud'@'%';" > /dev/null
|
||||
elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'oc_%'") && $? -eq 0 ]]; then
|
||||
echo -e "\033[32mFound nextcloud (oc) tables inside of mailcow Database (old scheme)!\033[0m"
|
||||
echo -e "\033[32mFound Nextcloud (oc) tables inside of mailcow database (old scheme)!\033[0m"
|
||||
echo -e "\033[31mPurging...\033[0m"
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \
|
||||
"$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'oc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null
|
||||
elif [[ $(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} mailcow -e "SHOW TABLES LIKE 'nc_%'") && $? -eq 0 ]]; then
|
||||
echo -e "\033[32mFound nextcloud (nc) tables inside of mailcow Database (old scheme)!\033[0m"
|
||||
echo -e "\033[32mFound Nextcloud (nc) tables inside of mailcow database (old scheme)!\033[0m"
|
||||
echo -e "\033[31mPurging...\033[0m"
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e \
|
||||
"$(docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "SELECT IFNULL(GROUP_CONCAT('DROP TABLE ', TABLE_SCHEMA, '.', TABLE_NAME SEPARATOR ';'),'SELECT NULL;') FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME LIKE 'nc_%' AND TABLE_SCHEMA = '${DBNAME}';" -BN)" > /dev/null
|
||||
else
|
||||
echo -e "\033[31mError: No Nextcloud Databases/Tables found!"
|
||||
echo -e "\033[31mError: No Nextcloud databases/tables found!"
|
||||
echo -e "\033[33mNot purging anything...\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
@@ -80,10 +80,10 @@ EOF
|
||||
|
||||
docker restart $(docker ps -aqf name=nginx-mailcow)
|
||||
|
||||
echo -e "\033[32mNextcloud has been sucessfully uninstalled!\033[0m"
|
||||
echo -e "\033[32mNextcloud has been uninstalled sucessfully!\033[0m"
|
||||
|
||||
elif [[ ${NC_UPDATE} == "y" ]]; then
|
||||
read -r -p "Are you sure you want to update Nextcloud (with nextclouds own updater)? [y/N] " response
|
||||
read -r -p "Are you sure you want to update Nextcloud (with Nextclouds own updater)? [y/N] " response
|
||||
response=${response,,}
|
||||
if [[ ! "$response" =~ ^(yes|y)$ ]]; then
|
||||
echo "OK, aborting."
|
||||
@@ -118,18 +118,18 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
|
||||
&& mkdir -p ./data/web/nextcloud/data \
|
||||
&& chmod +x ./data/web/nextcloud/occ
|
||||
|
||||
echo -e "\033[33mCreating Nextcloud Database...\033[0m"
|
||||
echo -e "\033[33mCreating 'nextcloud' database...\033[0m"
|
||||
NC_DBPASS=$(</dev/urandom tr -dc A-Za-z0-9 | head -c 28)
|
||||
NC_DBUSER=nextcloud
|
||||
NC_DBNAME=nextcloud
|
||||
|
||||
echo -ne "[1/3] Creating nextcloud Database"
|
||||
echo -ne "[1/3] Creating 'nextcloud' database"
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE DATABASE ${NC_DBNAME};"
|
||||
sleep 2
|
||||
echo -ne "\r[2/3] Creating nextcloud Database user"
|
||||
echo -ne "\r[2/3] Creating 'nextcloud' database user"
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "CREATE USER '${NC_DBUSER}'@'%' IDENTIFIED BY '${NC_DBPASS}';"
|
||||
sleep 2
|
||||
echo -ne "\r[3/3] Granting nextcloud user all permissions on database nextcloud"
|
||||
echo -ne "\r[3/3] Granting 'nextcloud' user all permissions on database 'nextcloud'"
|
||||
docker exec -it $(docker ps -f name=mysql-mailcow -q) mysql -uroot -p${DBROOT} -e "GRANT ALL PRIVILEGES ON ${NC_DBNAME}.* TO '${NC_DBUSER}'@'%';"
|
||||
sleep 2
|
||||
|
||||
@@ -140,7 +140,7 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
|
||||
echo -ne "[1/4] Setting correct permissions for www-data"
|
||||
docker exec -it $(docker ps -f name=php-fpm-mailcow -q) /bin/bash -c "chown -R www-data:www-data /web/nextcloud"
|
||||
sleep 2
|
||||
echo -ne "\r[2/4] Running occ maintenance:install to install nextcloud"
|
||||
echo -ne "\r[2/4] Running occ maintenance:install to install Nextcloud"
|
||||
docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) /web/nextcloud/occ --no-warnings maintenance:install \
|
||||
--database mysql \
|
||||
--database-host mysql \
|
||||
@@ -149,9 +149,9 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
|
||||
--database-pass ${NC_DBPASS} \
|
||||
--admin-user admin \
|
||||
--admin-pass ${ADMIN_NC_PASS} \
|
||||
--data-dir /web/nextcloud/data 2>&1 /dev/null
|
||||
--data-dir /web/nextcloud/data > /dev/null 2>&1
|
||||
|
||||
echo -ne "\r[3/4] Setting custom parameters inside the nextcloud config file"
|
||||
echo -ne "\r[3/4] Setting custom parameters inside the Nextcloud config file"
|
||||
echo ""
|
||||
docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) bash -c "/web/nextcloud/occ --no-warnings config:system:set redis host --value=redis --type=string; \
|
||||
/web/nextcloud/occ --no-warnings config:system:set redis port --value=6379 --type=integer; \
|
||||
@@ -178,7 +178,7 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
|
||||
#/web/nextcloud/occ --no-warnings config:system:set user_backends 0 arguments 0 --value={dovecot:143/imap/tls/novalidate-cert}; \
|
||||
#/web/nextcloud/occ --no-warnings config:system:set user_backends 0 class --value=OC_User_IMAP; \
|
||||
|
||||
echo -e "\r[4/4] Enabling NGINX Configuration"
|
||||
echo -e "\r[4/4] Enabling Nginx Configuration"
|
||||
cp ./data/assets/nextcloud/nextcloud.conf ./data/conf/nginx/
|
||||
sed -i "s/NC_SUBD/${NC_SUBD}/g" ./data/conf/nginx/nextcloud.conf
|
||||
sleep 2
|
||||
@@ -193,11 +193,11 @@ elif [[ ${NC_INSTALL} == "y" ]]; then
|
||||
echo "* INSTALL DATE: $(date +%Y-%m-%d_%H-%M-%S) *"
|
||||
echo "******************************************"
|
||||
echo ""
|
||||
echo -e "\033[36mDatabase Name: ${NC_DBNAME}\033[0m"
|
||||
echo -e "\033[36mDatabase User: ${NC_DBUSER}\033[0m"
|
||||
echo -e "\033[36mDatabase Password: ${NC_DBPASS}\033[0m"
|
||||
echo -e "\033[36mDatabase name: ${NC_DBNAME}\033[0m"
|
||||
echo -e "\033[36mDatabase user: ${NC_DBUSER}\033[0m"
|
||||
echo -e "\033[36mDatabase password: ${NC_DBPASS}\033[0m"
|
||||
echo ""
|
||||
echo -e "\033[31mUI Admin Password: ${ADMIN_NC_PASS}\033[0m"
|
||||
echo -e "\033[31mUI admin password: ${ADMIN_NC_PASS}\033[0m"
|
||||
echo ""
|
||||
|
||||
|
||||
|
27
update.sh
27
update.sh
@@ -177,35 +177,38 @@ remove_obsolete_nginx_ports() {
|
||||
|
||||
detect_docker_compose_command(){
|
||||
if ! [ "${DOCKER_COMPOSE_VERSION}" == "native" ] && ! [ "${DOCKER_COMPOSE_VERSION}" == "standalone" ]; then
|
||||
if command -v docker compose > /dev/null 2>&1; then
|
||||
version=$(docker compose version --short)
|
||||
if [[ $version =~ ^2\.([0-9]+)\.([0-9]+) ]]; then
|
||||
COMPOSE_VERSION=native
|
||||
if docker compose > /dev/null 2>&1; then
|
||||
if docker compose version --short | grep "2." > /dev/null 2>&1; then
|
||||
DOCKER_COMPOSE_VERSION=native
|
||||
COMPOSE_COMMAND="docker compose"
|
||||
echo -e "\e[31mFound Docker Compose Plugin (native).\e[0m"
|
||||
echo -e "\e[31mSetting the DOCKER_COMPOSE_VERSION Variable to native\e[0m"
|
||||
sleep 2
|
||||
echo -e "\e[33mNotice: You´ll have to update this Compose Version via your Package Manager manually!\e[0m"
|
||||
echo -e "\e[33mNotice: You'll have to update this Compose Version via your Package Manager manually!\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
echo -e "\e[31mPlease update/install it manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
elif command -v docker-compose > /dev/null 2>&1; then
|
||||
version=$(docker-compose version --short)
|
||||
if [[ $version =~ ^2\.([0-9]+)\.([0-9]+) ]]; then
|
||||
COMPOSE_VERSION=standalone
|
||||
elif docker-compose > /dev/null 2>&1; then
|
||||
if ! [[ $(alias docker-compose 2> /dev/null) ]] ; then
|
||||
if docker-compose version --short | grep "^2." > /dev/null 2>&1; then
|
||||
DOCKER_COMPOSE_VERSION=standalone
|
||||
COMPOSE_COMMAND="docker-compose"
|
||||
echo -e "\e[31mFound Docker Compose Standalone.\e[0m"
|
||||
echo -e "\e[31mSetting the DOCKER_COMPOSE_VERSION Variable to standalone\e[0m"
|
||||
sleep 2
|
||||
echo -e "\e[33mNotice: For an automatic update of docker-compose please use the update_compose.sh scripts located at the helper-scripts folder.\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
echo -e "\e[31mPlease update/install regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose.\e[0m"
|
||||
echo -e "\e[31mPlease install it manually regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
echo -e "\e[31mPlease install it regarding to this doc site: https://mailcow.github.io/mailcow-dockerized-docs/i_u_m/i_u_m_install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
Reference in New Issue
Block a user