Compare commits

..

1 Commits

Author SHA1 Message Date
missytake
84e731034a tests: make sure chatmail-metadata was started
fix a flaky test: https://github.com/chatmail/relay/pull/856#issuecomment-3919881473
since #856 chatmail-metadata is restarted every 5 second, if it didn't come up after that, the failure likely sits deeper.
2026-03-04 18:03:48 +01:00
53 changed files with 529 additions and 1478 deletions

View File

@@ -1,32 +1,21 @@
name: Run unit-tests and container-based deploy+test verification
name: CI
on:
# Triggers when a PR is merged into main or a direct push occurs
push:
branches: [ "main" ]
# Triggers for any PR (and its subsequent commits) targeting the main branch
pull_request:
branches: [ "main" ]
# Newest push wins: Prevents multiple runs from clashing and wasting runner efforts
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
push:
jobs:
tox:
name: isolated chatmaild tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v4
# Checkout pull request HEAD commit instead of merge commit
# Otherwise `test_deployed_state` will be unhappy.
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: download filtermail
run: curl -L https://github.com/chatmail/filtermail/releases/download/v0.6.1/filtermail-x86_64 -o /usr/local/bin/filtermail && chmod +x /usr/local/bin/filtermail
run: curl -L https://github.com/chatmail/filtermail/releases/download/v0.5.1/filtermail-x86_64 -o /usr/local/bin/filtermail && chmod +x /usr/local/bin/filtermail
- name: run chatmaild tests
working-directory: chatmaild
run: pipx run tox
@@ -35,9 +24,7 @@ jobs:
name: deploy-chatmail tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/checkout@v4
- name: initenv
run: scripts/initenv.sh
@@ -51,23 +38,5 @@ jobs:
- name: run deploy-chatmail offline tests
run: pytest --pyargs cmdeploy
lxc-test:
name: LXC deploy and test
uses: chatmail/cmlxc/.github/workflows/lxc-test.yml@v0.10.0
with:
cmlxc_commands: |
cmlxc init
# single cmdeploy relay test
cmlxc -v deploy-cmdeploy --source ./repo cm0
cmlxc -v test-mini cm0
cmlxc -v test-cmdeploy cm0
# cross cmdeploy relay test
cmlxc -v deploy-cmdeploy --source ./repo --ipv4-only cm1
cmlxc -v test-cmdeploy cm0 cm1
# cross cmdeploy/madmail relay tests
cmlxc -v deploy-madmail mad0
cmlxc -v test-cmdeploy cm0 mad0
cmlxc -v test-mini cm0 mad0
cmlxc -v test-mini mad0 cm0
# all other cmdeploy commands require a staging server
# see https://github.com/deltachat/chatmail/issues/100

View File

@@ -0,0 +1,105 @@
name: deploy on staging-ipv4.testrun.org, and run tests
on:
push:
branches:
- main
pull_request:
paths-ignore:
- 'scripts/**'
- '**/README.md'
- 'CHANGELOG.md'
- 'LICENSE'
jobs:
deploy:
name: deploy on staging-ipv4.testrun.org, and run tests
runs-on: ubuntu-latest
timeout-minutes: 30
environment:
name: staging-ipv4.testrun.org
url: https://staging-ipv4.testrun.org/
concurrency: staging-ipv4.testrun.org
steps:
- uses: actions/checkout@v4
- name: prepare SSH
run: |
mkdir ~/.ssh
echo "${{ secrets.STAGING_SSH_KEY }}" >> ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
ssh-keyscan staging-ipv4.testrun.org > ~/.ssh/known_hosts
# save previous acme & dkim state
rsync -avz root@staging-ipv4.testrun.org:/var/lib/acme acme-ipv4 || true
rsync -avz root@staging-ipv4.testrun.org:/etc/dkimkeys dkimkeys-ipv4 || true
# store previous acme & dkim state on ns.testrun.org, if it contains useful certs
if [ -f dkimkeys-ipv4/dkimkeys/opendkim.private ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" dkimkeys-ipv4 root@ns.testrun.org:/tmp/ || true; fi
if [ "$(ls -A acme-ipv4/acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme-ipv4 root@ns.testrun.org:/tmp/ || true; fi
# make sure CAA record isn't set
scp -o StrictHostKeyChecking=accept-new .github/workflows/staging-ipv4.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging-ipv4.testrun.org.zone
ssh root@ns.testrun.org sed -i '/CAA/d' /etc/nsd/staging-ipv4.testrun.org.zone
ssh root@ns.testrun.org nsd-checkzone staging-ipv4.testrun.org /etc/nsd/staging-ipv4.testrun.org.zone
ssh root@ns.testrun.org systemctl reload nsd
- name: rebuild staging-ipv4.testrun.org to have a clean VPS
run: |
curl -X POST \
-H "Authorization: Bearer ${{ secrets.HETZNER_API_TOKEN }}" \
-H "Content-Type: application/json" \
-d '{"image":"debian-12"}' \
"https://api.hetzner.cloud/v1/servers/${{ secrets.STAGING_IPV4_SERVER_ID }}/actions/rebuild"
- run: scripts/initenv.sh
- name: append venv/bin to PATH
run: echo venv/bin >>$GITHUB_PATH
- name: upload TLS cert after rebuilding
run: |
echo " --- wait until staging-ipv4.testrun.org VPS is rebuilt --- "
rm ~/.ssh/known_hosts
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org id -u ; do sleep 1 ; done
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org id -u
# download acme & dkim state from ns.testrun.org
rsync -e "ssh -o StrictHostKeyChecking=accept-new" -avz root@ns.testrun.org:/tmp/acme-ipv4/acme acme-restore || true
rsync -avz root@ns.testrun.org:/tmp/dkimkeys-ipv4/dkimkeys dkimkeys-restore || true
# restore acme & dkim state to staging2.testrun.org
rsync -avz acme-restore/acme root@staging-ipv4.testrun.org:/var/lib/ || true
rsync -avz dkimkeys-restore/dkimkeys root@staging-ipv4.testrun.org:/etc/ || true
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org chown root:root -R /var/lib/acme || true
- name: run deploy-chatmail offline tests
run: pytest --pyargs cmdeploy
- name: setup dependencies
run: |
ssh root@staging-ipv4.testrun.org apt update
ssh root@staging-ipv4.testrun.org apt install -y git python3.11-venv python3-dev gcc
ssh root@staging-ipv4.testrun.org git clone https://github.com/chatmail/relay
ssh root@staging-ipv4.testrun.org "cd relay && git checkout " ${{ github.head_ref }}
ssh root@staging-ipv4.testrun.org "cd relay && scripts/initenv.sh"
- name: initialize config
run: |
ssh root@staging-ipv4.testrun.org "cd relay && scripts/cmdeploy init staging-ipv4.testrun.org"
ssh root@staging-ipv4.testrun.org "sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' relay/chatmail.ini"
ssh root@staging-ipv4.testrun.org "sed -i 's/#\s*mtail_address/mtail_address/' relay/chatmail.ini"
- run: ssh root@staging-ipv4.testrun.org "cd relay && scripts/cmdeploy run --verbose --skip-dns-check --ssh-host localhost"
- name: set DNS entries
run: |
ssh root@staging-ipv4.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
ssh root@staging-ipv4.testrun.org "cd relay && scripts/cmdeploy dns --zonefile staging-generated.zone --ssh-host localhost"
ssh root@staging-ipv4.testrun.org cat relay/staging-generated.zone >> .github/workflows/staging-ipv4.testrun.org-default.zone
cat .github/workflows/staging-ipv4.testrun.org-default.zone
scp .github/workflows/staging-ipv4.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging-ipv4.testrun.org.zone
ssh root@ns.testrun.org nsd-checkzone staging-ipv4.testrun.org /etc/nsd/staging-ipv4.testrun.org.zone
ssh root@ns.testrun.org systemctl reload nsd
- name: cmdeploy test
run: ssh root@staging-ipv4.testrun.org "cd relay && CHATMAIL_DOMAIN2=ci-chatmail.testrun.org scripts/cmdeploy test --slow --ssh-host localhost"
- name: cmdeploy dns
run: ssh root@staging-ipv4.testrun.org "cd relay && scripts/cmdeploy dns -v --ssh-host localhost"

98
.github/workflows/test-and-deploy.yaml vendored Normal file
View File

@@ -0,0 +1,98 @@
name: deploy on staging2.testrun.org, and run tests
on:
push:
branches:
- main
pull_request:
paths-ignore:
- 'scripts/**'
- '**/README.md'
- 'CHANGELOG.md'
- 'LICENSE'
jobs:
deploy:
name: deploy on staging2.testrun.org, and run tests
runs-on: ubuntu-latest
timeout-minutes: 30
environment:
name: staging2.testrun.org
url: https://staging2.testrun.org/
concurrency: staging2.testrun.org
steps:
- uses: actions/checkout@v4
- name: prepare SSH
run: |
mkdir ~/.ssh
echo "${{ secrets.STAGING_SSH_KEY }}" >> ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
ssh-keyscan staging2.testrun.org > ~/.ssh/known_hosts
# save previous acme & dkim state
rsync -avz root@staging2.testrun.org:/var/lib/acme . || true
rsync -avz root@staging2.testrun.org:/etc/dkimkeys . || true
# store previous acme & dkim state on ns.testrun.org, if it contains useful certs
if [ -f dkimkeys/opendkim.private ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" dkimkeys root@ns.testrun.org:/tmp/ || true; fi
if [ "$(ls -A acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme root@ns.testrun.org:/tmp/ || true; fi
# make sure CAA record isn't set
scp -o StrictHostKeyChecking=accept-new .github/workflows/staging.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging2.testrun.org.zone
ssh root@ns.testrun.org sed -i '/CAA/d' /etc/nsd/staging2.testrun.org.zone
ssh root@ns.testrun.org nsd-checkzone staging2.testrun.org /etc/nsd/staging2.testrun.org.zone
ssh root@ns.testrun.org systemctl reload nsd
- name: rebuild staging2.testrun.org to have a clean VPS
run: |
curl -X POST \
-H "Authorization: Bearer ${{ secrets.HETZNER_API_TOKEN }}" \
-H "Content-Type: application/json" \
-d '{"image":"debian-12"}' \
"https://api.hetzner.cloud/v1/servers/${{ secrets.STAGING_SERVER_ID }}/actions/rebuild"
- run: scripts/initenv.sh
- name: append venv/bin to PATH
run: echo venv/bin >>$GITHUB_PATH
- name: upload TLS cert after rebuilding
run: |
echo " --- wait until staging2.testrun.org VPS is rebuilt --- "
rm ~/.ssh/known_hosts
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org id -u ; do sleep 1 ; done
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org id -u
# download acme & dkim state from ns.testrun.org
rsync -e "ssh -o StrictHostKeyChecking=accept-new" -avz root@ns.testrun.org:/tmp/acme acme-restore || true
rsync -avz root@ns.testrun.org:/tmp/dkimkeys dkimkeys-restore || true
# restore acme & dkim state to staging2.testrun.org
rsync -avz acme-restore/acme root@staging2.testrun.org:/var/lib/ || true
rsync -avz dkimkeys-restore/dkimkeys root@staging2.testrun.org:/etc/ || true
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
- name: add hpk42 key to staging server
run: ssh root@staging2.testrun.org 'curl -s https://github.com/hpk42.keys >> .ssh/authorized_keys'
- name: run deploy-chatmail offline tests
run: pytest --pyargs cmdeploy
- run: |
cmdeploy init staging2.testrun.org
sed -i 's/#\s*mtail_address/mtail_address/' chatmail.ini
- run: cmdeploy run --verbose --skip-dns-check
- name: set DNS entries
run: |
ssh -o StrictHostKeyChecking=accept-new root@staging2.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
cmdeploy dns --zonefile staging-generated.zone --verbose
cat staging-generated.zone >> .github/workflows/staging.testrun.org-default.zone
cat .github/workflows/staging.testrun.org-default.zone
scp .github/workflows/staging.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging2.testrun.org.zone
ssh root@ns.testrun.org nsd-checkzone staging2.testrun.org /etc/nsd/staging2.testrun.org.zone
ssh root@ns.testrun.org systemctl reload nsd
- name: cmdeploy test
run: CHATMAIL_DOMAIN2=ci-chatmail.testrun.org cmdeploy test --slow
- name: cmdeploy dns
run: cmdeploy dns -v

View File

@@ -1,20 +1,5 @@
# Changelog for chatmail deployment
## Unreleased
### Features
- Add per-user quota-triggered cleanup (`chatmail-quota-expire`).
When a mailbox exceeds the configured ``max_mailbox_size``,
Dovecot runs the new script which removes the oldest
messages until usage drops to a safe level.
No operator action is required after upgrading;
existing over-quota mailboxes start receiving mail
again immediately and are cleaned up automatically.
The daily `chatmail-expire` timer continues to handle
deletion of old messages, large messages,
and inactive user mailboxes.
## 1.9.0 2025-12-18
### Documentation

View File

@@ -6,7 +6,10 @@ build-backend = "setuptools.build_meta"
name = "chatmaild"
version = "0.3"
dependencies = [
"aiosmtpd",
"iniconfig",
"deltachat-rpc-server",
"deltachat-rpc-client",
"filelock",
"requests",
"crypt-r >= 3.13.1 ; python_version >= '3.11'",
@@ -21,8 +24,8 @@ where = ['src']
[project.scripts]
doveauth = "chatmaild.doveauth:main"
chatmail-metadata = "chatmaild.metadata:main"
chatmail-metrics = "chatmaild.metrics:main"
chatmail-expire = "chatmaild.expire:main"
chatmail-quota-expire = "chatmaild.quota_expire:main"
chatmail-fsreport = "chatmaild.fsreport:main"
lastlogin = "chatmaild.lastlogin:main"
turnserver = "chatmaild.turnserver:main"
@@ -68,7 +71,6 @@ commands =
deps = pytest
pdbpp
pytest-localserver
aiosmtpd
execnet
commands = pytest -v -rsXx {posargs}
"""

View File

@@ -38,9 +38,6 @@ class Config:
self.filtermail_smtp_port_incoming = int(
params.get("filtermail_smtp_port_incoming", "10081")
)
self.filtermail_http_port_incoming = int(
params.get("filtermail_http_port_incoming", "10082")
)
self.postfix_reinject_port = int(params.get("postfix_reinject_port", "10025"))
self.postfix_reinject_port_incoming = int(
params.get("postfix_reinject_port_incoming", "10026")
@@ -95,11 +92,6 @@ class Config:
# old unused option (except for first migration from sqlite to maildir store)
self.passdb_path = Path(params.get("passdb_path", "/home/vmail/passdb.sqlite"))
@property
def max_mailbox_size_mb(self):
"""Return max_mailbox_size as an integer in megabytes."""
return parse_size_mb(self.max_mailbox_size)
def _getbytefile(self):
return open(self._inipath, "rb")
@@ -113,16 +105,6 @@ class Config:
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
def parse_size_mb(limit):
"""Parse a size string like ``500M`` or ``2G`` and return megabytes."""
value = limit.strip().upper().rstrip("B")
if value.endswith("G"):
return int(value[:-1]) * 1024
if value.endswith("M"):
return int(value[:-1])
return int(value)
def write_initial_config(inipath, mail_domain, overrides):
"""Write out default config file, using the specified config value overrides."""
content = get_default_config_content(mail_domain, **overrides)

View File

@@ -1,11 +1,8 @@
import json
import logging
import os
import re
import sys
import filelock
try:
import crypt_r
except ImportError:
@@ -16,7 +13,6 @@ from .dictproxy import DictProxy
from .migrate_db import migrate_from_db_to_maildir
NOCREATE_FILE = "/etc/chatmail-nocreate"
VALID_LOCALPART_RE = re.compile(r"^[a-z0-9._-]+$")
def encrypt_password(password: str):
@@ -56,10 +52,6 @@ def is_allowed_to_create(config: Config, user, cleartext_password) -> bool:
)
return False
if not VALID_LOCALPART_RE.match(localpart):
logging.warning("localpart %r contains invalid characters", localpart)
return False
return True
@@ -148,13 +140,8 @@ class AuthDictProxy(DictProxy):
if not is_allowed_to_create(self.config, addr, cleartext_password):
return
lock = filelock.FileLock(str(user.password_path) + ".lock", timeout=5)
with lock:
userdata = user.get_userdb_dict()
if userdata:
return userdata
user.set_password(encrypt_password(cleartext_password))
print(f"Created address: {addr}", file=sys.stderr)
user.set_password(encrypt_password(cleartext_password))
print(f"Created address: {addr}", file=sys.stderr)
return user.get_userdb_dict()

View File

@@ -13,20 +13,9 @@ to show storage summaries only for first 1000 mailboxes
python -m chatmaild.fsreport /path/to/chatmail.ini --maxnum 1000
to write Prometheus textfile for node_exporter
python -m chatmaild.fsreport --textfile /var/lib/prometheus/node-exporter/
writes to /var/lib/prometheus/node-exporter/fsreport.prom
to also write legacy metrics.py style output (default: /var/www/html/metrics):
python -m chatmaild.fsreport --textfile /var/lib/prometheus/node-exporter/ --legacy-metrics
"""
import os
import tempfile
from argparse import ArgumentParser
from datetime import datetime
@@ -59,19 +48,7 @@ class Report:
self.num_ci_logins = self.num_all_logins = 0
self.login_buckets = {x: 0 for x in (1, 10, 30, 40, 80, 100, 150)}
KiB = 1024
MiB = 1024 * KiB
self.message_size_thresholds = (
0,
100 * KiB,
MiB // 2,
1 * MiB,
2 * MiB,
5 * MiB,
10 * MiB,
)
self.message_buckets = {x: 0 for x in self.message_size_thresholds}
self.message_count_buckets = {x: 0 for x in self.message_size_thresholds}
self.message_buckets = {x: 0 for x in (0, 160000, 500000, 2000000)}
def process_mailbox_stat(self, mailbox):
# categorize login times
@@ -91,10 +68,9 @@ class Report:
for size in self.message_buckets:
for msg in mailbox.messages:
if msg.size >= size:
if self.mdir and f"/{self.mdir}/" not in msg.path:
if self.mdir and not msg.relpath.startswith(self.mdir):
continue
self.message_buckets[size] += msg.size
self.message_count_buckets[size] += 1
self.size_messages += sum(entry.size for entry in mailbox.messages)
self.size_extra += sum(entry.size for entry in mailbox.extrafiles)
@@ -117,10 +93,9 @@ class Report:
pref = f"[{self.mdir}] " if self.mdir else ""
for minsize, sumsize in self.message_buckets.items():
count = self.message_count_buckets[minsize]
percent = (sumsize / all_messages * 100) if all_messages else 0
print(
f"{pref}larger than {HSize(minsize)}: {HSize(sumsize)} ({percent:.2f}%), {count} msgs"
f"{pref}larger than {HSize(minsize)}: {HSize(sumsize)} ({percent:.2f}%)"
)
user_logins = self.num_all_logins - self.num_ci_logins
@@ -136,75 +111,6 @@ class Report:
for days, active in self.login_buckets.items():
print(f"last {days:3} days: {HSize(active)} {p(active)}")
def _write_atomic(self, filepath, content):
"""Atomically write content to filepath via tmp+rename."""
dirpath = os.path.dirname(os.path.abspath(filepath))
fd, tmppath = tempfile.mkstemp(dir=dirpath, suffix=".tmp")
try:
with os.fdopen(fd, "w") as f:
f.write(content)
os.chmod(tmppath, 0o644)
os.rename(tmppath, filepath)
except BaseException:
try:
os.unlink(tmppath)
except OSError:
pass
raise
def dump_textfile(self, filepath):
"""Dump metrics in Prometheus exposition format."""
lines = []
lines.append("# HELP chatmail_storage_bytes Mailbox storage in bytes.")
lines.append("# TYPE chatmail_storage_bytes gauge")
lines.append(f'chatmail_storage_bytes{{kind="messages"}} {self.size_messages}')
lines.append(f'chatmail_storage_bytes{{kind="extra"}} {self.size_extra}')
total = self.size_extra + self.size_messages
lines.append(f'chatmail_storage_bytes{{kind="total"}} {total}')
lines.append("# HELP chatmail_messages_bytes Sum of msg bytes >= threshold.")
lines.append("# TYPE chatmail_messages_bytes gauge")
for minsize, sumsize in self.message_buckets.items():
lines.append(f'chatmail_messages_bytes{{min_size="{minsize}"}} {sumsize}')
lines.append("# HELP chatmail_messages_count Number of msgs >= size threshold.")
lines.append("# TYPE chatmail_messages_count gauge")
for minsize, count in self.message_count_buckets.items():
lines.append(f'chatmail_messages_count{{min_size="{minsize}"}} {count}')
lines.append("# HELP chatmail_accounts Number of accounts.")
lines.append("# TYPE chatmail_accounts gauge")
user_logins = self.num_all_logins - self.num_ci_logins
lines.append(f'chatmail_accounts{{kind="all"}} {self.num_all_logins}')
lines.append(f'chatmail_accounts{{kind="ci"}} {self.num_ci_logins}')
lines.append(f'chatmail_accounts{{kind="user"}} {user_logins}')
lines.append(
"# HELP chatmail_accounts_active Non-CI accounts active within N days."
)
lines.append("# TYPE chatmail_accounts_active gauge")
for days, active in self.login_buckets.items():
lines.append(f'chatmail_accounts_active{{days="{days}"}} {active}')
self._write_atomic(filepath, "\n".join(lines) + "\n")
def dump_compat_textfile(self, filepath):
"""Dump legacy metrics.py style metrics."""
user_logins = self.num_all_logins - self.num_ci_logins
lines = [
"# HELP total number of accounts",
"# TYPE accounts gauge",
f"accounts {self.num_all_logins}",
"# HELP number of CI accounts",
"# TYPE ci_accounts gauge",
f"ci_accounts {self.num_ci_logins}",
"# HELP number of non-CI accounts",
"# TYPE nonci_accounts gauge",
f"nonci_accounts {user_logins}",
]
self._write_atomic(filepath, "\n".join(lines) + "\n")
def main(args=None):
"""Report about filesystem storage usage of all mailboxes and messages"""
@@ -221,21 +127,19 @@ def main(args=None):
"--days",
default=0,
action="store",
help="assume date to be DAYS older than now",
help="assume date to be days older than now",
)
parser.add_argument(
"--min-login-age",
default=0,
metavar="DAYS",
dest="min_login_age",
action="store",
help="only sum up message size if last login is at least DAYS days old",
help="only sum up message size if last login is at least min-login-age days old",
)
parser.add_argument(
"--mdir",
metavar="{cur,new,tmp}",
action="store",
help="only consider messages in specified Maildir subdirectory for summary",
help="only consider 'cur' or 'new' or 'tmp' messages for summary",
)
parser.add_argument(
@@ -244,21 +148,6 @@ def main(args=None):
action="store",
help="maximum number of mailboxes to iterate on",
)
parser.add_argument(
"--textfile",
metavar="PATH",
default=None,
help="write Prometheus textfile to PATH (directory or file); "
"if PATH is a directory, writes 'fsreport.prom' inside it",
)
parser.add_argument(
"--legacy-metrics",
metavar="FILENAME",
nargs="?",
const="/var/www/html/metrics",
default=None,
help="write legacy metrics.py textfile (default: /var/www/html/metrics)",
)
args = parser.parse_args(args)
@@ -272,15 +161,7 @@ def main(args=None):
rep = Report(now=now, min_login_age=int(args.min_login_age), mdir=args.mdir)
for mbox in iter_mailboxes(str(config.mailboxes_dir), maxnum=maxnum):
rep.process_mailbox_stat(mbox)
if args.textfile:
path = args.textfile
if os.path.isdir(path):
path = os.path.join(path, "fsreport.prom")
rep.dump_textfile(path)
if args.legacy_metrics:
rep.dump_compat_textfile(args.legacy_metrics)
if not args.textfile and not args.legacy_metrics:
rep.dump_summary()
rep.dump_summary()
if __name__ == "__main__":

View File

@@ -101,11 +101,7 @@ class MetadataDictProxy(DictProxy):
# Handle `GETMETADATA "" /shared/vendor/deltachat/irohrelay`
return f"O{self.iroh_relay}\n"
elif keyname == "vendor/vendor.dovecot/pvt/server/vendor/deltachat/turn":
try:
res = turn_credentials()
except Exception:
logging.exception("failed to get TURN credentials")
return "N\n"
res = turn_credentials()
port = 3478
return f"O{self.turn_hostname}:{port}:{res}\n"

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env python3
import sys
from pathlib import Path
def main(vmail_dir=None):
if vmail_dir is None:
vmail_dir = sys.argv[1]
accounts = 0
ci_accounts = 0
for path in Path(vmail_dir).iterdir():
if not path.joinpath("cur").is_dir():
continue
accounts += 1
if path.name[:3] in ("ci-", "ac_"):
ci_accounts += 1
print("# HELP total number of accounts")
print("# TYPE accounts gauge")
print(f"accounts {accounts}")
print("# HELP number of CI accounts")
print("# TYPE ci_accounts gauge")
print(f"ci_accounts {ci_accounts}")
print("# HELP number of non-CI accounts")
print("# TYPE nonci_accounts gauge")
print(f"nonci_accounts {accounts - ci_accounts}")
if __name__ == "__main__":
main()

View File

@@ -2,8 +2,8 @@
"""CGI script for creating new accounts."""
import ipaddress
import json
import random
import secrets
import string
from urllib.parse import quote
@@ -15,25 +15,13 @@ ALPHANUMERIC = string.ascii_lowercase + string.digits
ALPHANUMERIC_PUNCT = string.ascii_letters + string.digits + string.punctuation
def wrap_ip(host):
if host.startswith("[") and host.endswith("]"):
return host
try:
ipaddress.ip_address(host)
return f"[{host}]"
except ValueError:
return host
def create_newemail_dict(config: Config):
user = "".join(
secrets.choice(ALPHANUMERIC) for _ in range(config.username_max_length)
)
user = "".join(random.choices(ALPHANUMERIC, k=config.username_max_length))
password = "".join(
secrets.choice(ALPHANUMERIC_PUNCT)
for _ in range(config.password_min_length + 3)
)
return dict(email=f"{user}@{wrap_ip(config.mail_domain)}", password=f"{password}")
return dict(email=f"{user}@{config.mail_domain}", password=f"{password}")
def create_dclogin_url(email, password):

View File

@@ -1,90 +0,0 @@
"""
Quota-triggered per-user mailbox cleanup.
Dovecot calls this script via ``quota_warning``
when a user crosses the quota threshold.
The script removes oldest messages first
to keep the mailbox under a specified target size.
Usage::
chatmail-quota-expire <target_mb> <mailbox_path>
"""
import sys
from argparse import ArgumentParser
from pathlib import Path
from chatmaild.expire import get_file_entry, os_listdir_if_exists
def scan_mailbox_messages(mailbox_dir):
"""Collect FileEntry items from top-level cur/new/tmp only."""
mbox = Path(mailbox_dir)
messages = []
for sub in ("cur", "new", "tmp"):
for name in os_listdir_if_exists(mbox / sub):
if entry := get_file_entry(str(mbox / sub / name)):
messages.append(entry)
return messages
def expire_to_target(mailbox_dir, target_bytes):
"""Remove oldest files until total size <= *target_bytes*.
Returns ``(removed_count, cache_bytes)`` where *cache_bytes*
is the size of the deleted ``dovecot.index.cache`` file
(0 when the file did not exist).
"""
mbox = Path(mailbox_dir)
messages = scan_mailbox_messages(mbox)
total_size = sum(m.size for m in messages)
removed = 0
for count, entry in enumerate(sorted(messages, key=lambda m: m.mtime), 1):
if total_size <= target_bytes:
break
Path(entry.path).unlink(missing_ok=True)
total_size -= entry.size
removed = count
(mbox / "maildirsize").unlink(missing_ok=True)
cache = mbox / "dovecot.index.cache"
try:
cache_bytes = cache.stat().st_size
except FileNotFoundError:
cache_bytes = 0
cache.unlink(missing_ok=True)
return removed, cache_bytes
def main(args=None):
"""Remove mailbox messages to stay within a megabyte target."""
parser = ArgumentParser(description=main.__doc__)
parser.add_argument(
"target_mb",
type=int,
help="target mailbox size in megabytes",
)
parser.add_argument(
"mailbox_path",
help="path to a user mailbox",
)
args = parser.parse_args(args)
target_bytes = args.target_mb * 1024 * 1024
removed_count, cache_bytes = expire_to_target(args.mailbox_path, target_bytes)
if removed_count:
user = Path(args.mailbox_path).name
cache_mb = cache_bytes / 1024 / 1024
print(
f"quota-expire: removed {removed_count} message(s) from {user}"
f" cache={cache_mb:.1f}MB",
file=sys.stderr,
)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,6 +1,6 @@
import pytest
from chatmaild.config import parse_size_mb, read_config
from chatmaild.config import read_config
def test_read_config_basic(example_config):
@@ -121,17 +121,3 @@ def test_config_tls_external_bad_format(make_config):
"tls_external_cert_and_key": "/only/one/path.pem",
},
)
def test_parse_size_mb():
assert parse_size_mb("500M") == 500
assert parse_size_mb("2G") == 2048
assert parse_size_mb(" 1g ") == 1024
assert parse_size_mb("100MB") == 100
assert parse_size_mb("256") == 256
def test_max_mailbox_size_mb(make_config):
config = make_config("chat.example.org")
assert config.max_mailbox_size == "500M"
assert config.max_mailbox_size_mb == 500

View File

@@ -120,60 +120,6 @@ def test_handle_dovecot_protocol_iterate(gencreds, example_config):
assert not lines[2]
def test_invalid_localpart_characters(make_config):
"""Test that is_allowed_to_create rejects localparts with invalid characters."""
config = make_config("chat.example.org", {"username_min_length": "3"})
password = "zequ0Aimuchoodaechik"
domain = config.mail_domain
# valid localparts
assert is_allowed_to_create(config, f"abc123@{domain}", password)
assert is_allowed_to_create(config, f"a.b-c_d@{domain}", password)
# uppercase rejected
assert not is_allowed_to_create(config, f"Abc123@{domain}", password)
assert not is_allowed_to_create(config, f"ABCDEFG@{domain}", password)
# spaces and special chars rejected
assert not is_allowed_to_create(config, f"a b cde@{domain}", password)
assert not is_allowed_to_create(config, f"abc+def@{domain}", password)
assert not is_allowed_to_create(config, f"abc!def@{domain}", password)
assert not is_allowed_to_create(config, f"ab@cdef@{domain}", password)
assert not is_allowed_to_create(config, f"abc/def@{domain}", password)
assert not is_allowed_to_create(config, f"abc\\def@{domain}", password)
def test_concurrent_creation_same_account(dictproxy):
"""Test that concurrent creation of the same account doesn't corrupt password."""
addr = "racetest1@chat.example.org"
password = "zequ0Aimuchoodaechik"
num_threads = 10
results = queue.Queue()
def create():
try:
res = dictproxy.lookup_passdb(addr, password)
results.put(("ok", res))
except Exception:
results.put(("err", traceback.format_exc()))
threads = [threading.Thread(target=create, daemon=True) for _ in range(num_threads)]
for t in threads:
t.start()
for t in threads:
t.join(timeout=10)
passwords_seen = set()
for _ in range(num_threads):
status, res = results.get()
if status == "err":
pytest.fail(f"concurrent creation failed\n{res}")
passwords_seen.add(res["password"])
# all threads must see the same password hash
assert len(passwords_seen) == 1
def test_50_concurrent_lookups_different_accounts(gencreds, dictproxy):
num_threads = 50
req_per_thread = 5

View File

@@ -112,43 +112,6 @@ def test_report(mbox1, example_config):
report_main(args)
def test_report_mdir_filters_by_path(mbox1, example_config):
"""Test that Report with mdir='cur' only counts messages in cur/ subdirectory."""
from chatmaild.fsreport import Report
now = datetime.utcnow().timestamp()
# Set password mtime to old enough so min_login_age check passes
password = Path(mbox1.basedir).joinpath("password")
old_time = now - 86400 * 10 # 10 days ago
os.utime(password, (old_time, old_time))
# Reload mailbox with updated mtime
from chatmaild.expire import MailboxStat
mbox = MailboxStat(mbox1.basedir)
# Report without mdir — should count all messages
rep_all = Report(now=now, min_login_age=1, mdir=None)
rep_all.process_mailbox_stat(mbox)
total_all = rep_all.message_buckets[0]
# Report with mdir='cur' — should only count cur/ messages
rep_cur = Report(now=now, min_login_age=1, mdir="cur")
rep_cur.process_mailbox_stat(mbox)
total_cur = rep_cur.message_buckets[0]
# Report with mdir='new' — should only count new/ messages
rep_new = Report(now=now, min_login_age=1, mdir="new")
rep_new.process_mailbox_stat(mbox)
total_new = rep_new.message_buckets[0]
# cur has 500-byte msg, new has 600-byte msg (from fill_mbox)
assert total_cur == 500
assert total_new == 600
assert total_all == 500 + 600
def test_expiry_cli_basic(example_config, mbox1):
args = (str(example_config._inipath),)
expiry_main(args)

View File

@@ -314,51 +314,6 @@ def test_persistent_queue_items(tmp_path, testaddr, token):
assert not queue_item < item2 and not item2 < queue_item
def test_turn_credentials_exception_returns_N(notifier, metadata, monkeypatch):
"""Test that turn_credentials() failure returns N\\n instead of crashing."""
import chatmaild.metadata
dictproxy = MetadataDictProxy(
notifier=notifier,
metadata=metadata,
turn_hostname="turn.example.org",
)
def mock_turn_credentials():
raise ConnectionRefusedError("socket not available")
monkeypatch.setattr(chatmaild.metadata, "turn_credentials", mock_turn_credentials)
transactions = {}
res = dictproxy.handle_dovecot_request(
"Lshared/0123/vendor/vendor.dovecot/pvt/server/vendor/deltachat/turn"
"\tuser@example.org",
transactions,
)
assert res == "N\n"
def test_turn_credentials_success(notifier, metadata, monkeypatch):
"""Test that valid turn_credentials() returns TURN URI."""
import chatmaild.metadata
dictproxy = MetadataDictProxy(
notifier=notifier,
metadata=metadata,
turn_hostname="turn.example.org",
)
monkeypatch.setattr(chatmaild.metadata, "turn_credentials", lambda: "user:pass")
transactions = {}
res = dictproxy.handle_dovecot_request(
"Lshared/0123/vendor/vendor.dovecot/pvt/server/vendor/deltachat/turn"
"\tuser@example.org",
transactions,
)
assert res == "Oturn.example.org:3478:user:pass\n"
def test_iroh_relay(dictproxy):
rfile = io.BytesIO(
b"\n".join(

View File

@@ -0,0 +1,24 @@
from chatmaild.metrics import main
def test_main(tmp_path, capsys):
paths = []
for x in ("ci-asllkj", "ac_12l3kj", "qweqwe", "ci-l1k2j31l2k3"):
p = tmp_path.joinpath(x)
p.mkdir()
p.joinpath("cur").mkdir()
paths.append(p)
tmp_path.joinpath("nomailbox").mkdir()
main(tmp_path)
out, _ = capsys.readouterr()
d = {}
for line in out.split("\n"):
if line.strip() and not line.startswith("#"):
name, num = line.split()
d[name] = int(num)
assert d["accounts"] == 4
assert d["ci_accounts"] == 3
assert d["nonci_accounts"] == 1

View File

@@ -19,12 +19,6 @@ def test_create_newemail_dict(example_config):
assert ac1["password"] != ac2["password"]
def test_create_newemail_dict_ip(make_config):
config = make_config("1.2.3.4")
ac = create_newemail_dict(config)
assert ac["email"].endswith("@[1.2.3.4]")
def test_create_dclogin_url():
url = create_dclogin_url("user@example.org", "p@ss w+rd")
assert url.startswith("dclogin:")

View File

@@ -1,73 +0,0 @@
import os
import time
from chatmaild.quota_expire import expire_to_target, main, scan_mailbox_messages
MB = 1024 * 1024
def _create_message(basedir, relpath, size, days_old=0):
path = basedir / relpath
path.parent.mkdir(parents=True, exist_ok=True)
path.write_bytes(b"x" * size)
mtime = time.time() - days_old * 86400
os.utime(path, (mtime, mtime))
return path
def test_scan_cur_new_tmp(tmp_path):
_create_message(tmp_path, "cur/msg1", 100)
_create_message(tmp_path, "new/msg2", 200)
_create_message(tmp_path, "tmp/msg3", 300)
assert len(scan_mailbox_messages(str(tmp_path))) == 3
def test_scan_ignores_subfolders(tmp_path):
_create_message(tmp_path, "cur/a", 10)
_create_message(tmp_path, ".DeltaChat/cur/b", 20)
assert len(scan_mailbox_messages(str(tmp_path))) == 1
def test_removes_to_target(tmp_path):
for i in range(15):
_create_message(tmp_path, f"cur/msg{i:02d}", MB, days_old=i + 1)
removed, _ = expire_to_target(str(tmp_path), 10 * MB)
assert removed == 5
assert len(scan_mailbox_messages(str(tmp_path))) == 10
def test_removes_oldest_first(tmp_path):
_create_message(tmp_path, "cur/old_small", MB, days_old=30)
_create_message(tmp_path, "cur/new_huge", 10 * MB, days_old=1)
# the 10MB file is kept, the 1MB file is removed because it's older
removed, _ = expire_to_target(str(tmp_path), 10 * MB)
assert removed == 1
assert not (tmp_path / "cur/old_small").exists()
assert (tmp_path / "cur/new_huge").exists()
def test_exact_limit(tmp_path):
_create_message(tmp_path, "cur/msg1", 5 * MB)
removed, _ = expire_to_target(str(tmp_path), 5 * MB)
assert removed == 0
def test_removes_stale_caches(tmp_path):
_create_message(tmp_path, "cur/msg1", 2 * MB, days_old=5)
(tmp_path / "maildirsize").write_text("x")
(tmp_path / "dovecot.index.cache").write_bytes(b"y" * 4096)
removed, cache_bytes = expire_to_target(str(tmp_path), MB)
assert removed == 1
assert cache_bytes == 4096
assert not (tmp_path / "maildirsize").exists()
assert not (tmp_path / "dovecot.index.cache").exists()
def test_logging_output_is_mtail_compatible(tmp_path, capsys):
mbox = tmp_path / "user@example.org"
_create_message(mbox, "cur/msg1", 2 * MB, days_old=5)
(mbox / "dovecot.index.cache").write_bytes(b"c" * 2 * MB)
main([str(1), str(mbox)])
_, err = capsys.readouterr()
assert "quota-expire: removed 1 message(s) from user@example.org" in err
assert "cache=2.0MB" in err

View File

@@ -1,73 +0,0 @@
import socket
import threading
import time
from unittest.mock import patch
import pytest
from chatmaild.turnserver import turn_credentials
SOCKET_PATH = "/run/chatmail-turn/turn.socket"
@pytest.fixture
def turn_socket(tmp_path):
"""Create a real Unix socket server at a temp path."""
sock_path = str(tmp_path / "turn.socket")
server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
server.bind(sock_path)
server.listen(1)
yield sock_path, server
server.close()
def _call_turn_credentials(sock_path):
"""Call turn_credentials but connect to sock_path instead of hardcoded path."""
original_connect = socket.socket.connect
def patched_connect(self, address):
if address == SOCKET_PATH:
address = sock_path
return original_connect(self, address)
with patch.object(socket.socket, "connect", patched_connect):
return turn_credentials()
def test_turn_credentials_timeout(turn_socket):
"""Server accepts but never responds — must raise socket.timeout."""
sock_path, server = turn_socket
def accept_and_hang():
conn, _ = server.accept()
time.sleep(30)
conn.close()
t = threading.Thread(target=accept_and_hang, daemon=True)
t.start()
with pytest.raises(socket.timeout):
_call_turn_credentials(sock_path)
def test_turn_credentials_connection_refused(tmp_path):
"""Socket file doesn't exist — must raise ConnectionRefusedError or FileNotFoundError."""
missing = str(tmp_path / "nonexistent.socket")
with pytest.raises((ConnectionRefusedError, FileNotFoundError)):
_call_turn_credentials(missing)
def test_turn_credentials_success(turn_socket):
"""Server responds with credentials — must return stripped string."""
sock_path, server = turn_socket
def respond():
conn, _ = server.accept()
conn.sendall(b"testuser:testpass\n")
conn.close()
t = threading.Thread(target=respond, daemon=True)
t.start()
result = _call_turn_credentials(sock_path)
assert result == "testuser:testpass"

View File

@@ -4,7 +4,6 @@ import socket
def turn_credentials() -> str:
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
client_socket.settimeout(5)
client_socket.connect("/run/chatmail-turn/turn.socket")
with client_socket.makefile("rb") as file:
return file.readline().decode("utf-8").strip()

View File

@@ -10,6 +10,7 @@ dependencies = [
"pillow",
"qrcode",
"markdown",
"pytest",
"setuptools>=68",
"termcolor",
"build",
@@ -20,7 +21,6 @@ dependencies = [
"execnet",
"imap_tools",
"deltachat-rpc-client",
"deltachat-rpc-server",
]
[project.scripts]

View File

@@ -67,7 +67,7 @@ class AcmetoolDeployer(Deployer):
)
files.template(
src=importlib.resources.files(__package__).joinpath("desired.yaml.j2"),
dest=f"/var/lib/acme/desired/{self.domains[0]}", # 0 is mailhost TLD
dest=f"/var/lib/acme/desired/{self.domains[0]}", # 0 is mailhost TLD
user="root",
group="root",
mode="644",

View File

@@ -1,10 +1,7 @@
import importlib.resources
import io
import os
from contextlib import contextmanager
from pyinfra import host
from pyinfra.facts.server import Command
from pyinfra.operations import files, server, systemd
@@ -13,39 +10,6 @@ def has_systemd():
return os.path.isdir("/run/systemd/system")
def is_in_container() -> bool:
"""Return True if running inside a container (Docker, LXC, etc.)."""
return (
host.get_fact(
Command,
"systemd-detect-virt --container --quiet 2>/dev/null && echo yes || true",
)
== "yes"
)
@contextmanager
def blocked_service_startup():
"""Prevent services from auto-starting during package installation.
Installs a ``/usr/sbin/policy-rc.d`` that exits 101, blocking any
service from being started by the package manager. This avoids bind
conflicts and CPU/RAM spikes during initial setup. The file is removed
when the context exits.
"""
# For documentation about policy-rc.d, see:
# https://people.debian.org/~hmh/invokerc.d-policyrc.d-specification.txt
files.put(
src=get_resource("policy-rc.d"),
dest="/usr/sbin/policy-rc.d",
user="root",
group="root",
mode="755",
)
yield
files.file("/usr/sbin/policy-rc.d", present=False)
def get_resource(arg, pkg=__package__):
return importlib.resources.files(pkg).joinpath(arg)

View File

@@ -0,0 +1,32 @@
;
; Required DNS entries for chatmail servers
;
{% if A %}
{{ mail_domain }}. A {{ A }}
{% endif %}
{% if AAAA %}
{{ mail_domain }}. AAAA {{ AAAA }}
{% endif %}
{{ mail_domain }}. MX 10 {{ mail_domain }}.
{% if strict_tls %}
_mta-sts.{{ mail_domain }}. TXT "v=STSv1; id={{ sts_id }}"
mta-sts.{{ mail_domain }}. CNAME {{ mail_domain }}.
{% endif %}
www.{{ mail_domain }}. CNAME {{ mail_domain }}.
{{ dkim_entry }}
;
; Recommended DNS entries for interoperability and security-hardening
;
{{ mail_domain }}. TXT "v=spf1 a ~all"
_dmarc.{{ mail_domain }}. TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
{% if acme_account_url %}
{{ mail_domain }}. CAA 0 issue "letsencrypt.org;accounturi={{ acme_account_url }}"
{% endif %}
_adsp._domainkey.{{ mail_domain }}. TXT "dkim=discardable"
_submission._tcp.{{ mail_domain }}. SRV 0 1 587 {{ mail_domain }}.
_submissions._tcp.{{ mail_domain }}. SRV 0 1 465 {{ mail_domain }}.
_imap._tcp.{{ mail_domain }}. SRV 0 1 143 {{ mail_domain }}.
_imaps._tcp.{{ mail_domain }}. SRV 0 1 993 {{ mail_domain }}.

View File

@@ -108,7 +108,10 @@ def run_cmd(args, out):
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
cmd = f"{pyinf} --ssh-user root {ssh_host} {deploy_path} -y"
if ssh_host == "localhost":
if ssh_host in ["localhost", "@docker"]:
if ssh_host == "@docker":
env["CHATMAIL_NOPORTCHECK"] = "True"
env["CHATMAIL_NOSYSCTL"] = "True"
cmd = f"{pyinf} @local {deploy_path} -y"
if version.parse(pyinfra.__version__) < version.parse("3"):
@@ -116,18 +119,24 @@ def run_cmd(args, out):
return 1
try:
out.check_call(cmd, env=env)
retcode = out.check_call(cmd, env=env)
if args.website_only:
out.green("Website deployment completed.")
if retcode == 0:
out.green("Website deployment completed.")
else:
out.red("Website deployment failed.")
elif retcode == 0:
out.green("Deploy completed, call `cmdeploy dns` next.")
elif not args.dns_check_disabled and strict_tls and not remote_data["acme_account_url"]:
out.red("Deploy completed but letsencrypt not configured")
out.red("Run 'cmdeploy run' again")
retcode = 0
else:
out.green("Deploy completed, call `cmdeploy dns` next.")
return 0
out.red("Deploy failed")
except subprocess.CalledProcessError:
out.red("Deploy failed")
return 1
retcode = 1
return retcode
def dns_cmd_options(parser):
@@ -207,7 +216,6 @@ def test_cmd(args, out):
"""Run local and online tests for chatmail deployment."""
env = os.environ.copy()
env["CHATMAIL_INI"] = str(args.inipath.absolute())
if args.ssh_host:
env["CHATMAIL_SSH"] = args.ssh_host
@@ -314,7 +322,7 @@ def add_ssh_host_option(parser):
parser.add_argument(
"--ssh-host",
dest="ssh_host",
help="Run commands on 'localhost' or on a specific SSH host "
help="Run commands on 'localhost', via '@docker', or on a specific SSH host "
"instead of chatmail.ini's mail_domain.",
)
@@ -376,7 +384,9 @@ def get_parser():
def get_sshexec(ssh_host: str, verbose=True):
if ssh_host in ["localhost", "@local"]:
return LocalExec(verbose)
return LocalExec(verbose, docker=False)
elif ssh_host == "@docker":
return LocalExec(verbose, docker=True)
if verbose:
print(f"[ssh] login to {ssh_host}")
return SSHExec(ssh_host, verbose=verbose)

View File

@@ -2,10 +2,11 @@
Chat Mail pyinfra deploy.
"""
import os
import shutil
import subprocess
import sys
from io import BytesIO, StringIO
from io import StringIO
from pathlib import Path
from chatmaild.config import read_config
@@ -23,11 +24,9 @@ from .basedeploy import (
Deployer,
Deployment,
activate_remote_units,
blocked_service_startup,
configure_remote_units,
get_resource,
has_systemd,
is_in_container,
)
from .dovecot.deployer import DovecotDeployer
from .external.deployer import ExternalTlsDeployer
@@ -124,6 +123,7 @@ def _install_remote_venv_with_chatmaild() -> None:
def _configure_remote_venv_with_chatmaild(config) -> None:
remote_base_dir = "/usr/local/lib/chatmaild"
remote_venv_dir = f"{remote_base_dir}/venv"
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
root_owned = dict(user="root", group="root", mode="644")
@@ -134,13 +134,16 @@ def _configure_remote_venv_with_chatmaild(config) -> None:
**root_owned,
)
files.file(
path="/etc/cron.d/chatmail-metrics",
present=False,
)
files.file(
path="/var/www/html/metrics",
present=False,
files.template(
src=get_resource("metrics.cron.j2"),
dest="/etc/cron.d/chatmail-metrics",
user="root",
group="root",
mode="644",
config={
"mailboxes_dir": config.mailboxes_dir,
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
},
)
@@ -150,16 +153,33 @@ class UnboundDeployer(Deployer):
self.need_restart = False
def install(self):
# Run local DNS resolver `unbound`. `resolvconf` takes care of
# setting up /etc/resolv.conf to use 127.0.0.1 as the resolver.
# Run local DNS resolver `unbound`.
# `resolvconf` takes care of setting up /etc/resolv.conf
# to use 127.0.0.1 as the resolver.
# On an IPv4-only system, if unbound is started but not configured,
# it causes subsequent steps to fail to resolve hosts.
with blocked_service_startup():
apt.packages(
name="Install unbound",
packages=["unbound", "unbound-anchor", "dnsutils", "resolvconf"],
)
#
# On an IPv4-only system, if unbound is started but not
# configured, it causes subsequent steps to fail to resolve hosts.
# Here, we use policy-rc.d to prevent unbound from starting up
# on initial install. Later, we will configure it and start it.
#
# For documentation about policy-rc.d, see:
# https://people.debian.org/~hmh/invokerc.d-policyrc.d-specification.txt
#
files.put(
src=get_resource("policy-rc.d"),
dest="/usr/sbin/policy-rc.d",
user="root",
group="root",
mode="755",
)
apt.packages(
name="Install unbound",
packages=["unbound", "unbound-anchor", "dnsutils"],
)
files.file("/usr/sbin/policy-rc.d", present=False)
def configure(self):
server.shell(
@@ -251,9 +271,6 @@ class WebsiteDeployer(Deployer):
# if www_folder is a hugo page, build it
if build_dir:
www_path = build_webpages(src_dir, build_dir, self.config)
if www_path is None:
logger.warning("Web page build failed, skipping website deployment")
return
# if it is not a hugo page, upload it as is
files.rsync(
f"{www_path}/", "/var/www/html", flags=["-avz", "--chown=www-data"]
@@ -320,12 +337,12 @@ class TurnDeployer(Deployer):
def install(self):
(url, sha256sum) = {
"x86_64": (
"https://github.com/chatmail/chatmail-turn/releases/download/v0.4/chatmail-turn-x86_64-linux",
"1ec1f5c50122165e858a5a91bcba9037a28aa8cb8b64b8db570aa457c6141a8a",
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-x86_64-linux",
"841e527c15fdc2940b0469e206188ea8f0af48533be12ecb8098520f813d41e4",
),
"aarch64": (
"https://github.com/chatmail/chatmail-turn/releases/download/v0.4/chatmail-turn-aarch64-linux",
"0fb3e792419494e21ecad536464929dba706bb2c88884ed8f1788141d26fc756",
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-aarch64-linux",
"a5fc2d06d937b56a34e098d2cd72a82d3e89967518d159bf246dc69b65e81b42",
),
}[host.get_fact(facts.server.Arch)]
@@ -458,19 +475,10 @@ class ChatmailDeployer(Deployer):
("iroh", None, None),
]
def __init__(self, config):
self.config = config
self.mail_domain = config.mail_domain
def __init__(self, mail_domain):
self.mail_domain = mail_domain
def install(self):
files.put(
name="Disable installing recommended packages globally",
src=BytesIO(b'APT::Install-Recommends "false";\n'),
dest="/etc/apt/apt.conf.d/00InstallRecommends",
user="root",
group="root",
mode="644",
)
apt.update(name="apt update", cache_time=24 * 3600)
apt.upgrade(name="upgrade apt packages", auto_remove=True)
@@ -483,18 +491,12 @@ class ChatmailDeployer(Deployer):
name="Install rsync",
packages=["rsync"],
)
def configure(self):
# metadata crashes if the mailboxes dir does not exist
files.directory(
name="Ensure vmail mailbox directory exists",
path=str(self.config.mailboxes_dir),
user="vmail",
group="vmail",
mode="700",
present=True,
apt.packages(
name="Ensure cron is installed",
packages=["cron"],
)
def configure(self):
# This file is used by auth proxy.
# https://wiki.debian.org/EtcMailName
server.shell(
@@ -584,7 +586,7 @@ def deploy_chatmail(config_path: Path, disable_mail: bool, website_only: bool) -
Out().red(f"Deploy failed: mtail_address {config.mtail_address} is not available (VPN up?).\n")
exit(1)
if not is_in_container():
if not os.environ.get("CHATMAIL_NOPORTCHECK"):
port_services = [
(["master", "smtpd"], 25),
("unbound", 53),
@@ -624,7 +626,7 @@ def deploy_chatmail(config_path: Path, disable_mail: bool, website_only: bool) -
tls_deployer = get_tls_deployer(config, mail_domain)
all_deployers = [
ChatmailDeployer(config),
ChatmailDeployer(mail_domain),
LegacyRemoveDeployer(),
FiltermailDeployer(),
JournaldDeployer(),

View File

@@ -1,22 +1,11 @@
import datetime
import importlib
from jinja2 import Template
from . import remote
def parse_zone_records(text):
"""Yield ``(name, ttl, rtype, rdata)`` from standard BIND-format text."""
for raw_line in text.splitlines():
line = raw_line.strip()
if not line or line.startswith(";"):
continue
try:
name, ttl, _in, rtype, rdata = line.split(None, 4)
except ValueError:
raise ValueError(f"Bad zone record line: {line!r}") from None
name = name.rstrip(".")
yield name, ttl, rtype.upper(), rdata
def get_initial_remote_data(sshexec, mail_domain):
return sshexec.logged(
call=remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=mail_domain)
@@ -42,39 +31,13 @@ def get_filled_zone_file(remote_data):
if not sts_id:
remote_data["sts_id"] = datetime.datetime.now().strftime("%Y%m%d%H%M")
d = remote_data["mail_domain"]
def append_record(name, rtype, rdata, ttl=3600):
lines.append(f"{name:<40} {ttl:<6} IN {rtype:<5} {rdata}")
lines = ["; Required DNS entries"]
if remote_data.get("A"):
append_record(f"{d}.", "A", remote_data["A"])
if remote_data.get("AAAA"):
append_record(f"{d}.", "AAAA", remote_data["AAAA"])
append_record(f"{d}.", "MX", f"10 {d}.")
if remote_data.get("strict_tls"):
append_record(f"_mta-sts.{d}.", "TXT", f'"v=STSv1; id={remote_data["sts_id"]}"')
append_record(f"mta-sts.{d}.", "CNAME", f"{d}.")
append_record(f"www.{d}.", "CNAME", f"{d}.")
lines.append(remote_data["dkim_entry"])
template = importlib.resources.files(__package__).joinpath("chatmail.zone.j2")
content = template.read_text()
zonefile = Template(content).render(**remote_data)
lines = [x.strip() for x in zonefile.split("\n") if x.strip()]
lines.append("")
lines.append("; Recommended DNS entries")
append_record(f"{d}.", "TXT", '"v=spf1 a ~all"')
append_record(f"_dmarc.{d}.", "TXT", '"v=DMARC1;p=reject;adkim=s;aspf=s"')
if remote_data.get("acme_account_url"):
append_record(
f"{d}.",
"CAA",
f'0 issue "letsencrypt.org;accounturi={remote_data["acme_account_url"]}"',
)
append_record(f"_adsp._domainkey.{d}.", "TXT", '"dkim=discardable"')
append_record(f"_submission._tcp.{d}.", "SRV", f"0 1 587 {d}.")
append_record(f"_submissions._tcp.{d}.", "SRV", f"0 1 465 {d}.")
append_record(f"_imap._tcp.{d}.", "SRV", f"0 1 143 {d}.")
append_record(f"_imaps._tcp.{d}.", "SRV", f"0 1 993 {d}.")
lines.append("")
return "\n".join(lines)
zonefile = "\n".join(lines)
return zonefile
def check_full_zone(sshexec, remote_data, out, zonefile) -> int:
@@ -95,8 +58,7 @@ def check_full_zone(sshexec, remote_data, out, zonefile) -> int:
returncode = 1
if remote_data.get("dkim_entry") in required_diff:
out(
"If the DKIM entry above does not work with your DNS provider,"
" you can try this one:\n"
"If the DKIM entry above does not work with your DNS provider, you can try this one:\n"
)
out(remote_data.get("web_dkim_entry") + "\n")
if recommended_diff:

View File

@@ -1,33 +1,19 @@
import io
import urllib.request
import os
from chatmaild.config import Config
from pyinfra import host
from pyinfra.facts.deb import DebPackages
from pyinfra.facts.server import Arch, Command, Sysctl
from pyinfra.facts.server import Arch, Sysctl
from pyinfra.facts.systemd import SystemdEnabled
from pyinfra.operations import apt, files, server, systemd
from cmdeploy.basedeploy import (
Deployer,
activate_remote_units,
blocked_service_startup,
configure_remote_units,
get_resource,
is_in_container,
has_systemd,
)
DOVECOT_ARCHIVE_VERSION = "2.3.21+dfsg1-3"
DOVECOT_PACKAGE_VERSION = f"1:{DOVECOT_ARCHIVE_VERSION}"
DOVECOT_SHA256 = {
("core", "amd64"): "dd060706f52a306fa863d874717210b9fe10536c824afe1790eec247ded5b27d",
("core", "arm64"): "e7548e8a82929722e973629ecc40fcfa886894cef3db88f23535149e7f730dc9",
("imapd", "amd64"): "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86",
("imapd", "arm64"): "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f",
("lmtpd", "amd64"): "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab",
("lmtpd", "arm64"): "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f",
}
class DovecotDeployer(Deployer):
daemon_reload = False
@@ -39,64 +25,23 @@ class DovecotDeployer(Deployer):
def install(self):
arch = host.get_fact(Arch)
with blocked_service_startup():
debs = []
for pkg in ("core", "imapd", "lmtpd"):
deb, changed = _download_dovecot_package(pkg, arch)
self.need_restart |= changed
if deb:
debs.append(deb)
if debs:
deb_list = " ".join(debs)
# First dpkg may fail on missing dependencies (stderr suppressed);
# apt-get --fix-broken pulls them in, then dpkg retries cleanly.
server.shell(
name="Install dovecot packages",
commands=[
f"dpkg --force-confdef --force-confold -i {deb_list} 2> /dev/null || true",
"DEBIAN_FRONTEND=noninteractive apt-get -y --fix-broken install",
f"dpkg --force-confdef --force-confold -i {deb_list}",
],
)
self.need_restart = True
files.put(
name="Pin dovecot packages to block Debian dist-upgrades",
src=io.StringIO(
"Package: dovecot-*\n"
"Pin: version *\n"
"Pin-Priority: -1\n"
),
dest="/etc/apt/preferences.d/pin-dovecot",
user="root",
group="root",
mode="644",
)
if has_systemd() and "dovecot.service" in host.get_fact(SystemdEnabled):
return # already installed and running
_install_dovecot_package("core", arch)
_install_dovecot_package("imapd", arch)
_install_dovecot_package("lmtpd", arch)
def configure(self):
configure_remote_units(self.config.mail_domain, self.units)
config_restart, self.daemon_reload = _configure_dovecot(self.config)
self.need_restart |= config_restart
self.need_restart, self.daemon_reload = _configure_dovecot(self.config)
def activate(self):
activate_remote_units(self.units)
# Detect stale binary: package installed but service still runs old (deleted) binary.
if not self.disable_mail and not self.need_restart:
stale = host.get_fact(
Command,
'pid=$(systemctl show -p MainPID --value dovecot.service 2>/dev/null);'
' [ "${pid:-0}" != "0" ] && readlink "/proc/$pid/exe" 2>/dev/null | grep -q "(deleted)"'
" && echo STALE || true",
)
if stale == "STALE":
self.need_restart = True
restart = False if self.disable_mail else self.need_restart
systemd.service(
name="Disable dovecot for now"
if self.disable_mail
else "Start and enable Dovecot",
name="Disable dovecot for now" if self.disable_mail else "Start and enable Dovecot",
service="dovecot.service",
running=False if self.disable_mail else True,
enabled=False if self.disable_mail else True,
@@ -106,49 +51,41 @@ class DovecotDeployer(Deployer):
self.need_restart = False
def _pick_url(primary, fallback):
try:
req = urllib.request.Request(primary, method="HEAD")
urllib.request.urlopen(req, timeout=10)
return primary
except Exception:
return fallback
def _download_dovecot_package(package: str, arch: str) -> tuple[str | None, bool]:
"""Download a dovecot .deb if needed, return (path, changed)."""
def _install_dovecot_package(package: str, arch: str):
arch = "amd64" if arch == "x86_64" else arch
arch = "arm64" if arch == "aarch64" else arch
url = f"https://download.delta.chat/dovecot/dovecot-{package}_2.3.21%2Bdfsg1-3_{arch}.deb"
deb_filename = "/root/" + url.split("/")[-1]
pkg_name = f"dovecot-{package}"
sha256 = DOVECOT_SHA256.get((package, arch))
if sha256 is None:
op = apt.packages(packages=[pkg_name])
return None, bool(getattr(op, "changed", False))
installed_versions = host.get_fact(DebPackages).get(pkg_name, [])
if DOVECOT_PACKAGE_VERSION in installed_versions:
return None, False
url_version = DOVECOT_ARCHIVE_VERSION.replace("+", "%2B")
deb_base = f"{pkg_name}_{url_version}_{arch}.deb"
primary_url = f"https://download.delta.chat/dovecot/{deb_base}"
fallback_url = f"https://github.com/chatmail/dovecot/releases/download/upstream%2F{url_version}/{deb_base}"
url = _pick_url(primary_url, fallback_url)
deb_filename = f"/root/{deb_base}"
match (package, arch):
case ("core", "amd64"):
sha256 = "dd060706f52a306fa863d874717210b9fe10536c824afe1790eec247ded5b27d"
case ("core", "arm64"):
sha256 = "e7548e8a82929722e973629ecc40fcfa886894cef3db88f23535149e7f730dc9"
case ("imapd", "amd64"):
sha256 = "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86"
case ("imapd", "arm64"):
sha256 = "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f"
case ("lmtpd", "amd64"):
sha256 = "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab"
case ("lmtpd", "arm64"):
sha256 = "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f"
case _:
apt.packages(packages=[f"dovecot-{package}"])
return
files.download(
name=f"Download {pkg_name}",
name=f"Download dovecot-{package}",
src=url,
dest=deb_filename,
sha256sum=sha256,
cache_time=60 * 60 * 24 * 365 * 10, # never redownload the package
)
return deb_filename, True
apt.deb(name=f"Install dovecot-{package}", src=deb_filename)
def _configure_dovecot(config: Config, debug: bool = False) -> tuple[bool, bool]:
def _configure_dovecot(config: Config, debug: bool = False) -> (bool, bool):
"""Configures Dovecot IMAP server."""
need_restart = False
daemon_reload = False
@@ -183,25 +120,19 @@ def _configure_dovecot(config: Config, debug: bool = False) -> tuple[bool, bool]
# as per https://doc.dovecot.org/2.3/configuration_manual/os/
# it is recommended to set the following inotify limits
can_modify = not is_in_container()
for name in ("max_user_instances", "max_user_watches"):
key = f"fs.inotify.{name}"
value = host.get_fact(Sysctl)[key]
if value > 65534:
continue
if not can_modify:
print(
"\n!!!! refusing to attempt sysctl setting in containers\n"
f"!!!! dovecot: sysctl {key!r}={value}, should be >65534 for production setups\n"
"!!!!"
if not os.environ.get("CHATMAIL_NOSYSCTL"):
for name in ("max_user_instances", "max_user_watches"):
key = f"fs.inotify.{name}"
if host.get_fact(Sysctl)[key] > 65535:
# Skip updating limits if already sufficient
# (enables running in incus containers where sysctl readonly)
continue
server.sysctl(
name=f"Change {key}",
key=key,
value=65535,
persist=True,
)
continue
server.sysctl(
name=f"Change {key}",
key=key,
value=65535,
persist=True,
)
timezone_env = files.line(
name="Set TZ environment variable",

View File

@@ -133,11 +133,6 @@ protocol lmtp {
# mail_lua and push_notification_lua are needed for Lua push notification handler.
# <https://doc.dovecot.org/2.3/configuration_manual/push_notification/#configuration>
mail_plugins = $mail_plugins mail_lua notify push_notification push_notification_lua
# Disable fsync for LMTP. May lose delivered message,
# but unlikely to cause problems with multiple relays.
# https://doc.dovecot.org/2.3/admin_manual/mailbox_formats/#fsyncing
mail_fsync = never
}
plugin {
@@ -149,26 +144,12 @@ plugin {
}
plugin {
# for now we define static quota-rules for all users
quota = maildir:User quota
quota_rule = *:storage={{ config.max_mailbox_size }}
quota_max_mail_size={{ config.max_message_size }}
quota_grace = 0
# Inflate the dovecot-visible quota so that Delta Chat clients
# (which warn at 80% of the IMAP-reported limit) never see
# quota warnings -- expire kicks in well before that point.
quota_rule = *:storage={{ config.max_mailbox_size_mb * 140 // 100 }}M
# Trigger when usage reaches the configured max_mailbox_size
# (72% of inflated = ~100% of configured), then expire oldest
# messages down to 80% of the configured max_mailbox_size.
quota_warning = storage=72%% quota-warning {{ config.max_mailbox_size_mb * 80 // 100 }} {{ config.mailboxes_dir }}/%u
}
service quota-warning {
executable = script /usr/local/lib/chatmaild/venv/bin/chatmail-quota-expire
user = vmail
unix_listener quota-warning {
}
# quota_over_flag_value = TRUE
}
# push_notification configuration
@@ -271,9 +252,6 @@ protocol imap {
# sort -sn <(sed 's/ / C: /' *.in) <(sed 's/ / S: /' cat *.out)
rawlog_dir = %h
# Disable fsync for IMAP. May lose IMAP changes like setting flags.
mail_fsync = never
}
{% endif %}

View File

@@ -14,10 +14,10 @@ class FiltermailDeployer(Deployer):
def install(self):
arch = host.get_fact(facts.server.Arch)
url = f"https://github.com/chatmail/filtermail/releases/download/v0.6.1/filtermail-{arch}"
url = f"https://github.com/chatmail/filtermail/releases/download/v0.5.1/filtermail-{arch}"
sha256sum = {
"x86_64": "48b3fb80c092d00b9b0a0ef77a8673496da3b9aed5ec1851e1df936d5589d62f",
"aarch64": "c65bd5f45df187d3d65d6965a285583a3be0f44a6916ff12909ff9a8d702c22e",
"x86_64": "adce2ddb461c5fd744df699f3b0b3c33b6d52413c641f18695b93826e5e0d234",
"aarch64": "b51cf4248c6c443308f21b1811da1cc919b98b719a2138f4b60940ea093a5422",
}[arch]
self.need_restart |= files.download(
name="Download filtermail",

View File

@@ -0,0 +1 @@
*/5 * * * * root {{ config.execpath }} {{ config.mailboxes_dir }} >/var/www/html/metrics

View File

@@ -78,11 +78,3 @@ counter rejected_unencrypted_mail_count
/Rejected unencrypted mail/ {
rejected_unencrypted_mail_count++
}
counter quota_expire_runs
counter quota_expire_removed_files
/quota-expire: removed (?P<count>\d+) message\(s\)/ {
quota_expire_runs++
quota_expire_removed_files += $count
}

View File

@@ -54,7 +54,7 @@ http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers on;
ssl_certificate {{ config.tls_cert_path }};
ssl_certificate_key {{ config.tls_key_path }};
@@ -73,16 +73,16 @@ http {
access_log syslog:server=unix:/dev/log,facility=local7;
location /mxdeliv/ {
proxy_pass http://127.0.0.1:{{ config.filtermail_http_port_incoming }};
}
location / {
# First attempt to serve request as file, then
# as directory, then fall back to displaying a 404.
try_files $uri $uri/ =404;
}
location /metrics {
default_type text/plain;
}
location /new {
{% if config.tls_cert_mode != "self" %}
if ($request_method = GET) {

View File

@@ -103,13 +103,6 @@ class OpendkimDeployer(Deployer):
)
need_restart |= service_file.changed
files.file(
name="chown opendkim: /etc/dkimkeys/opendkim.private",
path="/etc/dkimkeys/opendkim.private",
user="opendkim",
group="opendkim",
)
self.need_restart = need_restart
def activate(self):

View File

@@ -97,9 +97,7 @@ class PostfixDeployer(Deployer):
server.shell(
name="Validate postfix configuration",
# Extract stderr and quit with error if non-zero
commands=[
"""bash -c 'w=$(postconf 2>&1 >/dev/null); [[ -z "$w" ]] || { echo "$w"; false; }'"""
],
commands=["""bash -c 'w=$(postconf 2>&1 >/dev/null); [[ -z "$w" ]] || { echo "$w"; false; }'"""],
)
self.need_restart = need_restart

View File

@@ -20,7 +20,7 @@ smtpd_tls_key_file={{ config.tls_key_path }}
smtpd_tls_security_level=may
smtp_tls_CApath=/etc/ssl/certs
smtp_tls_security_level=verify
smtp_tls_security_level={{ "verify" if config.tls_cert_mode == "acme" else "encrypt" }}
# Send SNI extension when connecting to other servers.
# <https://www.postfix.org/postconf.5.html#smtp_tls_servername>
smtp_tls_servername = hostname
@@ -88,22 +88,6 @@ inet_protocols = ipv4
inet_protocols = all
{% endif %}
# Postfix does not try IPv4 and IPv6 connections
# concurrently as of version 3.7.11.
#
# When relay has both A (IPv4) and AAAA (IPv6) records,
# but broken IPv6 connectivity,
# every second message is delayed by the connection timeout
# <https://www.postfix.org/postconf.5.html#smtp_connect_timeout>
# which defaults to 30 seconds. Reducing timeouts is not a solution
# as this will result in a failure to connect to slow servers.
#
# As a workaround we always prefer IPv4 when it is available.
#
# The setting is documented at
# <https://www.postfix.org/postconf.5.html#smtp_address_preference>
smtp_address_preference=ipv4
virtual_transport = lmtp:unix:private/dovecot-lmtp
virtual_mailbox_domains = {{ config.mail_domain }}
lmtp_header_checks = regexp:/etc/postfix/lmtp_header_cleanup

View File

@@ -53,14 +53,13 @@ def get_dkim_entry(mail_domain, pre_command, dkim_selector):
print=log_progress,
)
except CalledProcessError:
return None, None
return
dkim_value_raw = f"v=DKIM1;k=rsa;p={dkim_pubkey};s=email;t=s"
dkim_value = '" "'.join(re.findall(".{1,255}", dkim_value_raw))
web_dkim_value = "".join(re.findall(".{1,255}", dkim_value_raw))
name = f"{dkim_selector}._domainkey.{mail_domain}."
return (
f'{name:<40} 3600 IN TXT "{dkim_value}"',
f'{name:<40} 3600 IN TXT "{web_dkim_value}"',
f'{dkim_selector}._domainkey.{mail_domain}. TXT "{dkim_value}"',
f'{dkim_selector}._domainkey.{mail_domain}. TXT "{web_dkim_value}"',
)
@@ -95,7 +94,7 @@ def check_zonefile(zonefile, verbose=True):
if not zf_line.strip() or zf_line.startswith(";"):
continue
print(f"dns-checking {zf_line!r}") if verbose else log_progress("")
zf_domain, _ttl, _in, zf_typ, zf_value = zf_line.split(None, 4)
zf_domain, zf_typ, zf_value = zf_line.split(maxsplit=2)
zf_domain = zf_domain.rstrip(".")
zf_value = zf_value.strip()
query_value = query_dns(zf_typ, zf_domain)

View File

@@ -40,5 +40,5 @@ def dovecot_recalc_quota(user):
#
for line in output.split("\n"):
parts = line.split()
if len(parts) >= 6 and parts[2] == "STORAGE":
if parts[2] == "STORAGE":
return dict(value=int(parts[3]), limit=int(parts[4]), percent=int(parts[5]))

View File

@@ -18,8 +18,6 @@ def openssl_selfsigned_args(domain, cert_path, key_path, days=36500):
"-keyout", str(key_path),
"-out", str(cert_path),
"-subj", f"/CN={domain}",
# Mark as end-entity cert so it cannot be used as a CA to sign others.
"-addext", "basicConstraints=critical,CA:FALSE",
"-addext", "extendedKeyUsage=serverAuth,clientAuth",
"-addext",
f"subjectAltName=DNS:{domain},DNS:www.{domain},DNS:mta-sts.{domain}",

View File

@@ -5,5 +5,5 @@ After=network.target
[Service]
Type=oneshot
User=vmail
ExecStart=/usr/local/lib/chatmaild/venv/bin/chatmail-fsreport /usr/local/lib/chatmaild/chatmail.ini
ExecStart=/usr/local/lib/chatmaild/venv/bin/chatmail-fsreport /usr/local/lib/chatmaild/chatmail.ini

View File

@@ -87,8 +87,9 @@ class SSHExec:
class LocalExec:
FuncError = FuncError
def __init__(self, verbose=False):
def __init__(self, verbose=False, docker=False):
self.verbose = verbose
self.docker = docker
def __call__(self, call, kwargs=None, log_callback=None):
if kwargs is None:
@@ -100,6 +101,10 @@ class LocalExec:
if not title:
title = call.__name__
where = "locally"
if self.docker:
if call == remote.rdns.perform_initial_checks:
kwargs["pre_command"] = "docker exec chatmail "
where = "in docker"
if self.verbose:
print_stderr(f"Running {where}: {title}(**{kwargs})")
return self(call, kwargs, log_callback=print_stderr)

View File

@@ -1,18 +1,17 @@
; Required DNS entries
zftest.testrun.org. 3600 IN A 135.181.204.127
zftest.testrun.org. 3600 IN AAAA 2a01:4f9:c012:52f4::1
zftest.testrun.org. 3600 IN MX 10 zftest.testrun.org.
_mta-sts.zftest.testrun.org. 3600 IN TXT "v=STSv1; id=202403211706"
mta-sts.zftest.testrun.org. 3600 IN CNAME zftest.testrun.org.
www.zftest.testrun.org. 3600 IN CNAME zftest.testrun.org.
opendkim._domainkey.zftest.testrun.org. 3600 IN TXT "v=DKIM1;k=rsa;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoYt82CVUyz2ouaqjX2kB+5J80knAyoOU3MGU5aWppmwUwwTvj/oSTSpkc5JMtVTRmKKr8NUDWAL1Yw7dfGqqPHdHfwwjS3BIvDzYx+hzgtz62RnfNgV+/2MAoNpfX7cAFIHdRzEHNtwugc3RDLquqPoupAE3Y2YRw2T5zG5fILh4vwIcJZL5Uq6B92j8wwJqOex" "33n+vm1NKQ9rxo/UsHAmZlJzpooXcG/4igTBxJyJlamVSRR6N7Nul1v//YJb7J6v2o0iPHW6uE0StzKaPPNC2IVosSRFbD9H2oqppltptFSNPlI0E+t0JBWHem6YK7xcugiO3ImMCaaU8g6Jt/wIDAQAB;s=email;t=s"
; Required DNS entries for chatmail servers
zftest.testrun.org. A 135.181.204.127
zftest.testrun.org. AAAA 2a01:4f9:c012:52f4::1
zftest.testrun.org. MX 10 zftest.testrun.org.
_mta-sts.zftest.testrun.org. TXT "v=STSv1; id=202403211706"
mta-sts.zftest.testrun.org. CNAME zftest.testrun.org.
www.zftest.testrun.org. CNAME zftest.testrun.org.
opendkim._domainkey.zftest.testrun.org. TXT "v=DKIM1;k=rsa;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoYt82CVUyz2ouaqjX2kB+5J80knAyoOU3MGU5aWppmwUwwTvj/oSTSpkc5JMtVTRmKKr8NUDWAL1Yw7dfGqqPHdHfwwjS3BIvDzYx+hzgtz62RnfNgV+/2MAoNpfX7cAFIHdRzEHNtwugc3RDLquqPoupAE3Y2YRw2T5zG5fILh4vwIcJZL5Uq6B92j8wwJqOex" "33n+vm1NKQ9rxo/UsHAmZlJzpooXcG/4igTBxJyJlamVSRR6N7Nul1v//YJb7J6v2o0iPHW6uE0StzKaPPNC2IVosSRFbD9H2oqppltptFSNPlI0E+t0JBWHem6YK7xcugiO3ImMCaaU8g6Jt/wIDAQAB;s=email;t=s"
; Recommended DNS entries
zftest.testrun.org. 3600 IN TXT "v=spf1 a ~all"
_dmarc.zftest.testrun.org. 3600 IN TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
zftest.testrun.org. 3600 IN CAA 0 issue "letsencrypt.org;accounturi=https://acme-v02.api.letsencrypt.org/acme/acct/1371472956"
_adsp._domainkey.zftest.testrun.org. 3600 IN TXT "dkim=discardable"
_submission._tcp.zftest.testrun.org. 3600 IN SRV 0 1 587 zftest.testrun.org.
_submissions._tcp.zftest.testrun.org. 3600 IN SRV 0 1 465 zftest.testrun.org.
_imap._tcp.zftest.testrun.org. 3600 IN SRV 0 1 143 zftest.testrun.org.
_imaps._tcp.zftest.testrun.org. 3600 IN SRV 0 1 993 zftest.testrun.org.
_submission._tcp.zftest.testrun.org. SRV 0 1 587 zftest.testrun.org.
_submissions._tcp.zftest.testrun.org. SRV 0 1 465 zftest.testrun.org.
_imap._tcp.zftest.testrun.org. SRV 0 1 143 zftest.testrun.org.
_imaps._tcp.zftest.testrun.org. SRV 0 1 993 zftest.testrun.org.
zftest.testrun.org. CAA 0 issue "letsencrypt.org;accounturi=https://acme-v02.api.letsencrypt.org/acme/acct/1371472956"
zftest.testrun.org. TXT "v=spf1 a:zftest.testrun.org ~all"
_dmarc.zftest.testrun.org. TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
_adsp._domainkey.zftest.testrun.org. TXT "dkim=discardable"

View File

@@ -1,3 +1,4 @@
import time
def test_tls_imap(benchmark, imap):
def imap_connect():
imap.connect()

View File

@@ -89,9 +89,7 @@ def test_concurrent_logins_same_account(
assert login_results.get()
def test_no_vrfy(cmfactory, chatmail_config):
ac = cmfactory.get_online_account()
addr = ac.get_config("addr")
def test_no_vrfy(chatmail_config):
domain = chatmail_config.mail_domain
s = smtplib.SMTP(domain)
@@ -100,7 +98,7 @@ def test_no_vrfy(cmfactory, chatmail_config):
s.putcmd("vrfy", f"wrongaddress@{chatmail_config.mail_domain}")
result = s.getreply()
print(result)
s.putcmd("vrfy", addr)
s.putcmd("vrfy", f"echo@{chatmail_config.mail_domain}")
result2 = s.getreply()
print(result2)
assert result[0] == result2[0] == 252

View File

@@ -71,44 +71,6 @@ class TestSSHExecutor:
assert (now - since_date).total_seconds() < 60 * 60 * 51
def test_dovecot_main_process_matches_installed_binary(sshdomain):
sshexec = get_sshexec(sshdomain)
main_pid = int(
sshexec(
call=remote.rshell.shell,
kwargs=dict(
command="timeout 10 systemctl show -p MainPID --value dovecot.service"
),
).strip()
)
assert main_pid != 0, "dovecot.service MainPID is 0 -- service not running?"
exe = sshexec(
call=remote.rshell.shell,
kwargs=dict(command=f"timeout 10 readlink /proc/{main_pid}/exe"),
).strip()
status_text = sshexec(
call=remote.rshell.shell,
kwargs=dict(
command="timeout 10 systemctl show -p StatusText --value dovecot.service"
),
).strip()
installed_version = sshexec(
call=remote.rshell.shell, kwargs=dict(command="timeout 10 dovecot --version")
).strip()
assert not exe.endswith("(deleted)"), (
f"running dovecot binary was deleted (stale after upgrade): {exe}"
)
expected_status_text = f"v{installed_version}"
assert status_text == expected_status_text or status_text.startswith(
f"{expected_status_text} "
), (
f"dovecot status version mismatch: "
f"StatusText={status_text!r}, installed={installed_version!r}"
)
def test_timezone_env(remote):
for line in remote.iter_output("env"):
print(line)

View File

@@ -6,8 +6,8 @@ import imap_tools
import pytest
import requests
from cmdeploy.cmdeploy import get_sshexec
from cmdeploy.remote import rshell
from cmdeploy.cmdeploy import get_sshexec
@pytest.fixture
@@ -88,12 +88,9 @@ class TestEndToEndDeltaChat:
return int(float(number) * units[unit])
quota = parse_size_limit(chatmail_config.max_mailbox_size)
# Dovecot quota is inflated to 140% of the configured limit
# so that quota-expire keeps users below the warning threshold.
dovecot_quota = quota * 140 // 100
lp.sec(f"filling remote inbox for {user}")
fn = f"7743102289.M843172P2484002.c20,S={dovecot_quota},W=2398:2,"
fn = f"7743102289.M843172P2484002.c20,S={quota},W=2398:2,"
path = chatmail_config.mailboxes_dir.joinpath(user, "cur", fn)
sshexec = get_sshexec(sshdomain)
sshexec(call=rshell.write_numbytes, kwargs=dict(path=str(path), num=120))

View File

@@ -1,5 +1,4 @@
import imaplib
import ipaddress
import itertools
import os
import random
@@ -15,14 +14,6 @@ from chatmaild.config import read_config
conftestdir = Path(__file__).parent
def _is_ip(domain):
try:
ipaddress.ip_address(domain)
return True
except ValueError:
return False
def pytest_addoption(parser):
parser.addoption(
"--slow", action="store_true", default=False, help="also run slow tests"
@@ -44,11 +35,6 @@ def pytest_runtest_setup(item):
def _get_chatmail_config():
inipath = os.environ.get("CHATMAIL_INI")
if inipath:
path = Path(inipath).resolve()
return read_config(path), path
current = Path().resolve()
while 1:
path = current.joinpath("chatmail.ini").resolve()
@@ -291,7 +277,6 @@ def gencreds(chatmail_config):
def gen(domain=None):
domain = domain if domain else chatmail_config.mail_domain
addr_domain = f"[{domain}]" if _is_ip(domain) else domain
while 1:
num = next(count)
alphanumeric = "abcdefghijklmnopqrstuvwxyz1234567890"
@@ -305,7 +290,7 @@ def gencreds(chatmail_config):
password = "".join(
random.choices(alphanumeric, k=chatmail_config.password_min_length)
)
yield f"{user}@{addr_domain}", f"{password}"
yield f"{user}@{domain}", f"{password}"
return lambda domain=None: next(gen(domain))
@@ -354,22 +339,9 @@ class ChatmailACFactory:
accounts = []
for _ in range(num):
account = self.dc.add_account()
addr, password = self.gencreds(domain)
if _is_ip(domain):
# Use DCLOGIN scheme with explicit server hosts,
# matching how madmail presents its addresses to users.
qr = (
f"dclogin:{addr}"
f"?p={password}&v=1"
f"&ih={domain}&ip=993"
f"&sh={domain}&sp=465"
f"&ic=3&ss=default"
)
future = account.add_transport_from_qr.future(qr)
else:
future = account.add_or_update_transport.future(
self._make_transport(domain)
)
future = account.add_or_update_transport.future(
self._make_transport(domain)
)
futures.append(future)
# ensure messages stay in INBOX so that they can be
@@ -416,15 +388,12 @@ def cmfactory(rpc, gencreds, maildomain, chatmail_config):
@pytest.fixture
def remote(sshdomain):
r = Remote(sshdomain)
yield r
r.close()
return Remote(sshdomain)
class Remote:
def __init__(self, sshdomain):
self.sshdomain = sshdomain
self._procs = []
def iter_output(self, logcmd="", ready=None):
getjournal = "journalctl -f" if not logcmd else logcmd
@@ -434,32 +403,19 @@ class Remote:
case "localhost": command = []
case _: command = ["ssh", f"root@{self.sshdomain}"]
[command.append(arg) for arg in getjournal.split()]
popen = subprocess.Popen(
self.popen = subprocess.Popen(
command,
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
self._procs.append(popen)
try:
while 1:
line = popen.stdout.readline()
res = line.decode().strip().lower()
if not res:
break
if ready is not None:
ready()
ready = None
yield res
finally:
popen.terminate()
popen.wait()
def close(self):
while self._procs:
proc = self._procs.pop()
proc.kill()
proc.wait()
while 1:
line = self.popen.stdout.readline()
res = line.decode().strip().lower()
if not res:
break
if ready is not None:
ready()
ready = None
yield res
@pytest.fixture

View File

@@ -23,19 +23,15 @@ class TestCmdline:
run = parser.parse_args(["run"])
assert init and run
def test_init_not_overwrite(self, capsys, tmp_path, monkeypatch):
monkeypatch.delenv("CHATMAIL_INI", raising=False)
inipath = tmp_path / "chatmail.ini"
args = ["init", "--config", str(inipath), "chat.example.org"]
assert main(args) == 0
def test_init_not_overwrite(self, capsys):
assert main(["init", "chat.example.org"]) == 0
capsys.readouterr()
assert main(args) == 1
assert main(["init", "chat.example.org"]) == 1
out, err = capsys.readouterr()
assert "path exists" in out.lower()
args.insert(1, "--force")
assert main(args) == 0
assert main(["init", "chat.example.org", "--force"]) == 0
out, err = capsys.readouterr()
assert "deleting config file" in out.lower()

View File

@@ -3,7 +3,7 @@ from copy import deepcopy
import pytest
from cmdeploy import remote
from cmdeploy.dns import check_full_zone, check_initial_remote_data, parse_zone_records
from cmdeploy.dns import check_full_zone, check_initial_remote_data
@pytest.fixture
@@ -60,29 +60,6 @@ def mockdns(request, mockdns_base, mockdns_expected):
return mockdns_base
class TestGetDkimEntry:
def test_dkim_entry_returns_tuple_on_success(self, mockdns):
entry, web_entry = remote.rdns.get_dkim_entry(
"some.domain", "", dkim_selector="opendkim"
)
# May return None,None if openssl not available, but should never crash
if entry is not None:
assert "opendkim._domainkey.some.domain" in entry
assert "opendkim._domainkey.some.domain" in web_entry
def test_dkim_entry_returns_none_tuple_on_error(self, monkeypatch):
"""CalledProcessError must return (None, None), not bare None."""
from subprocess import CalledProcessError
def failing_shell(command, fail_ok=False, print=print):
raise CalledProcessError(1, command)
monkeypatch.setattr(remote.rdns, "shell", failing_shell)
result = remote.rdns.get_dkim_entry("some.domain", "", dkim_selector="opendkim")
assert result == (None, None)
assert result[0] is None and result[1] is None
class TestPerformInitialChecks:
def test_perform_initial_checks_ok1(self, mockdns, mockdns_expected):
remote_data = remote.rdns.perform_initial_checks("some.domain")
@@ -125,49 +102,18 @@ class TestPerformInitialChecks:
assert not l
def test_parse_zone_records():
text = """
; This is a comment
some.domain. 3600 IN A 1.1.1.1
; Another comment
www.some.domain. 3600 IN CNAME some.domain.
; Multi-word rdata
some.domain. 3600 IN MX 10 mail.some.domain.
; DKIM record (single line, multi-word TXT rdata)
dkim._domainkey.some.domain. 3600 IN TXT "v=DKIM1;k=rsa;p=MIIBIjANBgkqhkiG" "9w0BAQEFAAOCAQ8AMIIBCgKCAQEA"
; Another TXT record
_dmarc.some.domain. 3600 IN TXT "v=DMARC1;p=reject"
"""
records = list(parse_zone_records(text))
assert records == [
("some.domain", "3600", "A", "1.1.1.1"),
("www.some.domain", "3600", "CNAME", "some.domain."),
("some.domain", "3600", "MX", "10 mail.some.domain."),
(
"dkim._domainkey.some.domain",
"3600",
"TXT",
'"v=DKIM1;k=rsa;p=MIIBIjANBgkqhkiG" "9w0BAQEFAAOCAQ8AMIIBCgKCAQEA"',
),
("_dmarc.some.domain", "3600", "TXT", '"v=DMARC1;p=reject"'),
]
def test_parse_zone_records_invalid_line():
text = "invalid line"
with pytest.raises(ValueError, match="Bad zone record line"):
list(parse_zone_records(text))
def parse_zonefile_into_dict(zonefile, mockdns_base, only_required=False):
if only_required:
zonefile = zonefile.split("; Recommended")[0]
for name, ttl, rtype, rdata in parse_zone_records(zonefile):
mockdns_base.setdefault(rtype, {})[name] = rdata
for zf_line in zonefile.split("\n"):
if zf_line.startswith("#"):
if "Recommended" in zf_line and only_required:
return
continue
if not zf_line.strip():
continue
zf_domain, zf_typ, zf_value = zf_line.split(maxsplit=2)
zf_domain = zf_domain.rstrip(".")
zf_value = zf_value.strip()
mockdns_base.setdefault(zf_typ, {})[zf_domain] = zf_value
class MockSSHExec:

View File

@@ -1,238 +0,0 @@
from contextlib import nullcontext
from types import SimpleNamespace
import pytest
from pyinfra.facts.deb import DebPackages
from cmdeploy.dovecot import deployer as dovecot_deployer
def make_host(*fact_pairs):
"""Build a mock host; get_fact(cls) dispatches to the provided facts mapping.
Args:
*fact_pairs: tuples of (fact_class, fact_value) to register
Returns:
SimpleNamespace with get_fact that raises a clear error if an
unexpected fact type is requested.
"""
facts = dict(fact_pairs)
def get_fact(cls):
if cls not in facts:
registered = ", ".join(c.__name__ for c in facts)
raise LookupError(
f"unexpected get_fact({cls.__name__}); "
f"only registered: {registered}"
)
return facts[cls]
return SimpleNamespace(get_fact=get_fact)
@pytest.fixture
def deployer():
return dovecot_deployer.DovecotDeployer(
SimpleNamespace(mail_domain="chat.example.org"),
disable_mail=False,
)
@pytest.fixture
def patch_blocked(monkeypatch):
monkeypatch.setattr(dovecot_deployer, "blocked_service_startup", nullcontext)
@pytest.fixture
def mock_files_put(monkeypatch):
monkeypatch.setattr(
dovecot_deployer.files,
"put",
lambda **kwargs: SimpleNamespace(changed=False),
)
@pytest.fixture
def track_shell(monkeypatch):
calls = []
monkeypatch.setattr(
dovecot_deployer.server,
"shell",
lambda **kwargs: calls.append(kwargs) or SimpleNamespace(changed=False),
)
return calls
def test_download_dovecot_package_skips_epoch_matched_install(monkeypatch):
epoch_version = dovecot_deployer.DOVECOT_PACKAGE_VERSION
downloads = []
monkeypatch.setattr(
dovecot_deployer,
"host",
make_host((DebPackages, {"dovecot-core": [epoch_version]})),
)
monkeypatch.setattr(
dovecot_deployer,
"_pick_url",
lambda primary, fallback: primary,
)
monkeypatch.setattr(
dovecot_deployer.files,
"download",
lambda **kwargs: downloads.append(kwargs),
)
deb, changed = dovecot_deployer._download_dovecot_package("core", "amd64")
assert deb is None, f"expected no deb path when version matches, got {deb!r}"
assert changed is False, "should not flag changed when version already installed"
assert downloads == [], "should not download when version already installed"
def test_download_dovecot_package_uses_archive_version_for_url_and_filename(
monkeypatch,
):
downloads = []
monkeypatch.setattr(
dovecot_deployer,
"host",
make_host((DebPackages, {})),
)
monkeypatch.setattr(
dovecot_deployer,
"_pick_url",
lambda primary, fallback: primary,
)
monkeypatch.setattr(
dovecot_deployer.files,
"download",
lambda **kwargs: downloads.append(kwargs),
)
deb, changed = dovecot_deployer._download_dovecot_package("core", "amd64")
archive_version = dovecot_deployer.DOVECOT_ARCHIVE_VERSION.replace("+", "%2B")
expected_deb = f"/root/dovecot-core_{archive_version}_amd64.deb"
# Verify the returned path uses archive version, not package version (with epoch)
assert changed is True, "should flag changed when package not yet installed"
assert deb == expected_deb, f"deb path mismatch: {deb!r} != {expected_deb!r}"
assert dovecot_deployer.DOVECOT_PACKAGE_VERSION not in deb, (
f"deb path should use archive version (no epoch), got {deb!r}"
)
assert len(downloads) == 1, "files.download should be called exactly once"
def test_install_skips_dpkg_path_when_epoch_matched_packages_present(
deployer, patch_blocked, mock_files_put, track_shell, monkeypatch
):
monkeypatch.setattr(
dovecot_deployer,
"host",
make_host(
(
dovecot_deployer.DebPackages,
{
"dovecot-core": [dovecot_deployer.DOVECOT_PACKAGE_VERSION],
"dovecot-imapd": [dovecot_deployer.DOVECOT_PACKAGE_VERSION],
"dovecot-lmtpd": [dovecot_deployer.DOVECOT_PACKAGE_VERSION],
},
),
(dovecot_deployer.Arch, "x86_64"),
),
)
downloads = []
monkeypatch.setattr(
dovecot_deployer.files,
"download",
lambda **kwargs: downloads.append(kwargs),
)
deployer.install()
assert downloads == [], "should not download when all packages epoch-matched"
assert track_shell == [], "should not run dpkg when all packages epoch-matched"
assert deployer.need_restart is False, (
"need_restart should be False when nothing changed"
)
def test_install_unsupported_arch_falls_back_to_apt(
deployer, patch_blocked, mock_files_put, track_shell, monkeypatch
):
# For unsupported architectures, all fact lookups return the arch string.
monkeypatch.setattr(
dovecot_deployer,
"host",
SimpleNamespace(get_fact=lambda cls: "riscv64"),
)
apt_calls = []
# Mirrors apt.packages() return value: OperationMeta with .changed property.
# Only lmtpd triggers a change to verify |= accumulation of changed flags.
def fake_apt(**kwargs):
apt_calls.append(kwargs)
changed = "lmtpd" in kwargs["packages"][0]
return SimpleNamespace(changed=changed)
monkeypatch.setattr(dovecot_deployer.apt, "packages", fake_apt)
deployer.install()
actual_pkgs = [c["packages"] for c in apt_calls]
assert actual_pkgs == [["dovecot-core"], ["dovecot-imapd"], ["dovecot-lmtpd"]], (
f"expected apt install of core/imapd/lmtpd, got {actual_pkgs}"
)
assert track_shell == [], "should not run dpkg for unsupported arch"
assert deployer.need_restart is True, (
"need_restart should be True when apt installed a package"
)
def test_install_runs_dpkg_when_packages_need_download(
deployer, patch_blocked, mock_files_put, track_shell, monkeypatch
):
monkeypatch.setattr(
dovecot_deployer,
"host",
make_host(
(dovecot_deployer.DebPackages, {}),
(dovecot_deployer.Arch, "x86_64"),
),
)
monkeypatch.setattr(
dovecot_deployer,
"_pick_url",
lambda primary, fallback: primary,
)
monkeypatch.setattr(
dovecot_deployer.files,
"download",
lambda **kwargs: SimpleNamespace(changed=True),
)
deployer.install()
assert len(track_shell) == 1, (
f"expected one server.shell() call for dpkg install, got {len(track_shell)}"
)
cmds = track_shell[0]["commands"]
assert len(cmds) == 3, f"expected 3 dpkg/apt commands, got: {cmds}"
assert cmds[0].startswith("dpkg --force-confdef --force-confold -i ")
assert "apt-get -y --fix-broken install" in cmds[1]
assert cmds[2].startswith("dpkg --force-confdef --force-confold -i ")
assert deployer.need_restart is True, (
"need_restart should be True after dpkg install"
)
def test_pick_url_falls_back_on_primary_error(monkeypatch):
def raise_error(req, timeout):
raise OSError("connection timeout")
monkeypatch.setattr(dovecot_deployer.urllib.request, "urlopen", raise_error)
result = dovecot_deployer._pick_url("http://primary", "http://fallback")
assert result == "http://fallback", (
f"should fall back when primary fails, got {result!r}"
)

View File

@@ -1,68 +0,0 @@
from unittest.mock import patch
from cmdeploy.remote.rshell import dovecot_recalc_quota
def test_dovecot_recalc_quota_normal_output():
"""Normal doveadm output returns parsed dict."""
normal_output = (
"Quota name Type Value Limit %\n"
"User quota STORAGE 5 102400 0\n"
"User quota MESSAGE 2 - 0\n"
)
with patch("cmdeploy.remote.rshell.shell", return_value=normal_output):
result = dovecot_recalc_quota("user@example.org")
# shell is called twice (recalc + get), patch returns same for both
assert result == {"value": 5, "limit": 102400, "percent": 0}
def test_dovecot_recalc_quota_empty_output():
"""Empty doveadm output (trailing newline) must not IndexError."""
call_count = [0]
def mock_shell(cmd):
call_count[0] += 1
if "recalc" in cmd:
return ""
# quota get returns only empty lines
return "\n\n"
with patch("cmdeploy.remote.rshell.shell", side_effect=mock_shell):
result = dovecot_recalc_quota("user@example.org")
assert result is None
def test_dovecot_recalc_quota_malformed_output():
"""Malformed output with too few columns must not crash."""
call_count = [0]
def mock_shell(cmd):
call_count[0] += 1
if "recalc" in cmd:
return ""
# partial line, fewer than 6 parts
return "Quota name\nUser quota STORAGE\n"
with patch("cmdeploy.remote.rshell.shell", side_effect=mock_shell):
result = dovecot_recalc_quota("user@example.org")
assert result is None
def test_dovecot_recalc_quota_header_only():
"""Only header line, no data rows."""
call_count = [0]
def mock_shell(cmd):
call_count[0] += 1
if "recalc" in cmd:
return ""
return "Quota name Type Value Limit %\n"
with patch("cmdeploy.remote.rshell.shell", side_effect=mock_shell):
result = dovecot_recalc_quota("user@example.org")
assert result is None

View File

@@ -102,20 +102,17 @@ short overview of ``chatmaild`` services:
Apple/Google/Huawei.
- `chatmail-expire <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/expire.py>`_
deletes old messages, large messages, and entire mailboxes
of users who have not logged in for longer than
``delete_inactive_users_after`` days.
- ``chatmail-quota-expire``
is called by Dovecot's ``quota_warning`` mechanism when a
mailbox exceeds ``max_mailbox_size``.
It removes the oldest messages
until usage drops to a safe level.
deletes users if they have not logged in for a longer while.
The timeframe can be configured in ``chatmail.ini``.
- `lastlogin <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/lastlogin.py>`_
is contacted by Dovecot when a user logs in and stores the date of
the login.
- `metrics <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metrics.py>`_
collects some metrics and displays them at
``https://example.org/metrics``.
``www/``
~~~~~~~~~
@@ -145,9 +142,11 @@ Chatmail relay dependency diagram
nginx-internal --- autoconfig.xml;
certs-nginx[("`TLS certs
/var/lib/acme`")] --> nginx-internal;
systemd-timer --- chatmail-metrics;
systemd-timer --- acmetool;
systemd-timer --- chatmail-expire-daily;
systemd-timer --- chatmail-fsreport-daily;
chatmail-metrics --- website;
acmetool --> certs[("`TLS certs
/var/lib/acme`")];
nginx-external --- |993|dovecot;
@@ -163,8 +162,6 @@ Chatmail relay dependency diagram
/home/vmail/.../user"];
dovecot --- |lastlogin.socket|lastlogin;
dovecot --- chatmail-metadata;
dovecot --- |quota-warning|chatmail-quota-expire;
chatmail-quota-expire --- maildir;
lastlogin --- maildir;
doveauth --- maildir;
chatmail-expire-daily --- maildir;