Merge remote-tracking branch 'origin/nightly' into feature/sso

This commit is contained in:
FreddleSpl0it
2023-07-07 09:28:05 +02:00
58 changed files with 616 additions and 310 deletions

View File

@@ -1,4 +1,4 @@
FROM clamav/clamav:1.0_base
FROM clamav/clamav:1.0.1-1_base
LABEL maintainer "André Peters <andre.peters@servercow.de>"

View File

@@ -9,6 +9,7 @@ import os
import json
import asyncio
import redis
import platform
from datetime import datetime
import logging
from logging.config import dictConfig
@@ -370,7 +371,7 @@ class DockerUtils:
return exec_run_handler('utf8_text_only', sieve_return)
# api call: container_post - post_action: exec - cmd: sieve - task: print
def container_post__exec__sieve__print(self, container_id, request_json):
if 'username' in request.json and 'script_name' in request_json:
if 'username' in request_json and 'script_name' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request_json['username'].replace("'", "'\\''") + "' '" + request_json['script_name'].replace("'", "'\\''") + "'"]
sieve_return = container.exec_run(cmd)
@@ -380,7 +381,15 @@ class DockerUtils:
if 'maildir' in request_json:
for container in self.docker_client.containers.list(filters={"id": container_id}):
sane_name = re.sub(r'\W+', '', request_json['maildir'])
cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request_json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"]
vmail_name = request_json['maildir'].replace("'", "'\\''")
cmd_vmail = "if [[ -d '/var/vmail/" + vmail_name + "' ]]; then /bin/mv '/var/vmail/" + vmail_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"
index_name = request_json['maildir'].split("/")
if len(index_name) > 1:
index_name = index_name[1].replace("'", "'\\''") + "@" + index_name[0].replace("'", "'\\''")
cmd_vmail_index = "if [[ -d '/var/vmail_index/" + index_name + "' ]]; then /bin/mv '/var/vmail_index/" + index_name + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "_index'; fi"
cmd = ["/bin/bash", "-c", cmd_vmail + " && " + cmd_vmail_index]
else:
cmd = ["/bin/bash", "-c", cmd_vmail]
maildir_cleanup = container.exec_run(cmd, user='vmail')
return exec_run_handler('generic', maildir_cleanup)
# api call: container_post - post_action: exec - cmd: rspamd - task: worker_password
@@ -477,7 +486,8 @@ async def get_host_stats(wait=5):
"swap": psutil.swap_memory()
},
"uptime": time.time() - psutil.boot_time(),
"system_time": system_time.strftime("%d.%m.%Y %H:%M:%S")
"system_time": system_time.strftime("%d.%m.%Y %H:%M:%S"),
"architecture": platform.machine()
}
redis_client.set('host_stats', json.dumps(host_stats), ex=10)

View File

@@ -21,6 +21,7 @@ RUN groupadd -g 5000 vmail \
&& touch /etc/default/locale \
&& apt-get update \
&& apt-get -y --no-install-recommends install \
build-essential \
apt-transport-https \
ca-certificates \
cpanminus \
@@ -61,6 +62,7 @@ RUN groupadd -g 5000 vmail \
libproc-processtable-perl \
libreadonly-perl \
libregexp-common-perl \
libssl-dev \
libsys-meminfo-perl \
libterm-readkey-perl \
libtest-deep-perl \
@@ -112,6 +114,8 @@ RUN groupadd -g 5000 vmail \
&& apt-get autoclean \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf /tmp/* /var/tmp/* /root/.cache/
# imapsync dependencies
RUN cpan Crypt::OpenSSL::PKCS12
COPY trim_logs.sh /usr/local/bin/trim_logs.sh
COPY clean_q_aged.sh /usr/local/bin/clean_q_aged.sh

View File

@@ -8492,6 +8492,7 @@ sub xoauth2
require HTML::Entities ;
require JSON ;
require JSON::WebToken::Crypt::RSA ;
require Crypt::OpenSSL::PKCS12;
require Crypt::OpenSSL::RSA ;
require Encode::Byte ;
require IO::Socket::SSL ;
@@ -8532,8 +8533,9 @@ sub xoauth2
$sync->{ debug } and myprint( "Service account: $iss\nKey file: $keyfile\nKey password: $keypass\n");
# Get private key from p12 file (would be better in perl...)
$key = `openssl pkcs12 -in "$keyfile" -nodes -nocerts -passin pass:$keypass -nomacver`;
# Get private key from p12 file
my $pkcs12 = Crypt::OpenSSL::PKCS12->new_from_file($keyfile);
$key = $pkcs12->private_key($keypass);
$sync->{ debug } and myprint( "Private key:\n$key\n");
}

View File

@@ -64,28 +64,40 @@ def refreshF2boptions():
global f2boptions
global quit_now
global exit_code
f2boptions = {}
if not r.get('F2B_OPTIONS'):
f2boptions = {}
f2boptions['ban_time'] = int
f2boptions['max_attempts'] = int
f2boptions['retry_window'] = int
f2boptions['netban_ipv4'] = int
f2boptions['netban_ipv6'] = int
f2boptions['ban_time'] = r.get('F2B_BAN_TIME') or 1800
f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS') or 10
f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW') or 600
f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4') or 32
f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6') or 128
r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
f2boptions['ban_time'] = r.get('F2B_BAN_TIME')
f2boptions['max_ban_time'] = r.get('F2B_MAX_BAN_TIME')
f2boptions['ban_time_increment'] = r.get('F2B_BAN_TIME_INCREMENT')
f2boptions['max_attempts'] = r.get('F2B_MAX_ATTEMPTS')
f2boptions['retry_window'] = r.get('F2B_RETRY_WINDOW')
f2boptions['netban_ipv4'] = r.get('F2B_NETBAN_IPV4')
f2boptions['netban_ipv6'] = r.get('F2B_NETBAN_IPV6')
else:
try:
f2boptions = {}
f2boptions = json.loads(r.get('F2B_OPTIONS'))
except ValueError:
print('Error loading F2B options: F2B_OPTIONS is not json')
quit_now = True
exit_code = 2
verifyF2boptions(f2boptions)
r.set('F2B_OPTIONS', json.dumps(f2boptions, ensure_ascii=False))
def verifyF2boptions(f2boptions):
verifyF2boption(f2boptions,'ban_time', 1800)
verifyF2boption(f2boptions,'max_ban_time', 10000)
verifyF2boption(f2boptions,'ban_time_increment', True)
verifyF2boption(f2boptions,'max_attempts', 10)
verifyF2boption(f2boptions,'retry_window', 600)
verifyF2boption(f2boptions,'netban_ipv4', 32)
verifyF2boption(f2boptions,'netban_ipv6', 128)
def verifyF2boption(f2boptions, f2boption, f2bdefault):
f2boptions[f2boption] = f2boptions[f2boption] if f2boption in f2boptions and f2boptions[f2boption] is not None else f2bdefault
def refreshF2bregex():
global f2bregex
global quit_now
@@ -147,6 +159,7 @@ def ban(address):
global lock
refreshF2boptions()
BAN_TIME = int(f2boptions['ban_time'])
BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
MAX_ATTEMPTS = int(f2boptions['max_attempts'])
RETRY_WINDOW = int(f2boptions['retry_window'])
NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4'])
@@ -174,20 +187,16 @@ def ban(address):
net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False)
net = str(net)
if not net in bans or time.time() - bans[net]['last_attempt'] > RETRY_WINDOW:
bans[net] = { 'attempts': 0 }
active_window = RETRY_WINDOW
else:
active_window = time.time() - bans[net]['last_attempt']
if not net in bans:
bans[net] = {'attempts': 0, 'last_attempt': 0, 'ban_counter': 0}
bans[net]['attempts'] += 1
bans[net]['last_attempt'] = time.time()
active_window = time.time() - bans[net]['last_attempt']
if bans[net]['attempts'] >= MAX_ATTEMPTS:
cur_time = int(round(time.time()))
logCrit('Banning %s for %d minutes' % (net, BAN_TIME / 60))
NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
logCrit('Banning %s for %d minutes' % (net, NET_BAN_TIME / 60 ))
if type(ip) is ipaddress.IPv4Address:
with lock:
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW')
@@ -206,7 +215,7 @@ def ban(address):
rule.target = target
if rule not in chain.rules:
chain.insert_rule(rule)
r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + BAN_TIME)
r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + NET_BAN_TIME)
else:
logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net))
@@ -238,7 +247,8 @@ def unban(net):
r.hdel('F2B_ACTIVE_BANS', '%s' % net)
r.hdel('F2B_QUEUE_UNBAN', '%s' % net)
if net in bans:
del bans[net]
bans[net]['attempts'] = 0
bans[net]['ban_counter'] += 1
def permBan(net, unban=False):
global lock
@@ -332,7 +342,7 @@ def watch():
logWarn('%s matched rule id %s (%s)' % (addr, rule_id, item['data']))
ban(addr)
except Exception as ex:
logWarn('Error reading log line from pubsub')
logWarn('Error reading log line from pubsub: %s' % ex)
quit_now = True
exit_code = 2
@@ -359,21 +369,30 @@ def snat4(snat_target):
chain = iptc.Chain(table, 'POSTROUTING')
table.autocommit = False
new_rule = get_snat4_rule()
for position, rule in enumerate(chain.rules):
match = all((
new_rule.get_src() == rule.get_src(),
new_rule.get_dst() == rule.get_dst(),
new_rule.target.parameters == rule.target.parameters,
new_rule.target.name == rule.target.name
))
if position == 0:
if not match:
logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
chain.insert_rule(new_rule)
else:
if match:
logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
chain.delete_rule(rule)
if not chain.rules:
# if there are no rules in the chain, insert the new rule directly
logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
chain.insert_rule(new_rule)
else:
for position, rule in enumerate(chain.rules):
if not hasattr(rule.target, 'parameter'):
continue
match = all((
new_rule.get_src() == rule.get_src(),
new_rule.get_dst() == rule.get_dst(),
new_rule.target.parameters == rule.target.parameters,
new_rule.target.name == rule.target.name
))
if position == 0:
if not match:
logInfo(f'Added POSTROUTING rule for source network {new_rule.src} to SNAT target {snat_target}')
chain.insert_rule(new_rule)
else:
if match:
logInfo(f'Remove rule for source network {new_rule.src} to SNAT target {snat_target} from POSTROUTING chain at position {position}')
chain.delete_rule(rule)
table.commit()
table.autocommit = True
except:
@@ -418,6 +437,8 @@ def autopurge():
time.sleep(10)
refreshF2boptions()
BAN_TIME = int(f2boptions['ban_time'])
MAX_BAN_TIME = int(f2boptions['max_ban_time'])
BAN_TIME_INCREMENT = bool(f2boptions['ban_time_increment'])
MAX_ATTEMPTS = int(f2boptions['max_attempts'])
QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN')
if QUEUE_UNBAN:
@@ -425,7 +446,9 @@ def autopurge():
unban(str(net))
for net in bans.copy():
if bans[net]['attempts'] >= MAX_ATTEMPTS:
if time.time() - bans[net]['last_attempt'] > BAN_TIME:
NET_BAN_TIME = BAN_TIME if not BAN_TIME_INCREMENT else BAN_TIME * 2 ** bans[net]['ban_counter']
TIME_SINCE_LAST_ATTEMPT = time.time() - bans[net]['last_attempt']
if TIME_SINCE_LAST_ATTEMPT > NET_BAN_TIME or TIME_SINCE_LAST_ATTEMPT > MAX_BAN_TIME:
unban(net)
def isIpNetwork(address):

View File

@@ -1,4 +1,4 @@
FROM php:8.1-fpm-alpine3.17
FROM php:8.2-fpm-alpine3.17
LABEL maintainer "Andre Peters <andre.peters@servercow.de>"
# renovate: datasource=github-tags depName=krakjoe/apcu versioning=semver-coerced
@@ -12,7 +12,7 @@ ARG MEMCACHED_PECL_VERSION=3.2.0
# renovate: datasource=github-tags depName=phpredis/phpredis versioning=semver-coerced
ARG REDIS_PECL_VERSION=5.3.7
# renovate: datasource=github-tags depName=composer/composer versioning=semver-coerced
ARG COMPOSER_VERSION=2.5.1
ARG COMPOSER_VERSION=2.5.5
RUN apk add -U --no-cache autoconf \
aspell-dev \
@@ -52,6 +52,7 @@ RUN apk add -U --no-cache autoconf \
libxpm-dev \
libzip \
libzip-dev \
linux-headers \
make \
mysql-client \
openldap-dev \
@@ -75,7 +76,7 @@ RUN apk add -U --no-cache autoconf \
--with-webp \
--with-xpm \
--with-avif \
&& docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql pspell soap sockets zip bcmath gmp \
&& docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql pspell soap sockets sysvsem zip bcmath gmp \
&& docker-php-ext-configure imap --with-imap --with-imap-ssl \
&& docker-php-ext-install -j 4 imap \
&& curl --silent --show-error https://getcomposer.org/installer | php -- --version=${COMPOSER_VERSION} \
@@ -99,6 +100,7 @@ RUN apk add -U --no-cache autoconf \
libxml2-dev \
libxpm-dev \
libzip-dev \
linux-headers \
make \
openldap-dev \
pcre-dev \