mirror of
https://github.com/chatmail/relay.git
synced 2026-05-11 08:24:37 +00:00
Compare commits
4 Commits
1.8.0
...
cmdeploy-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5399ea1f59 | ||
|
|
f7d0a9150d | ||
|
|
7023612a8b | ||
|
|
fdabed5c67 |
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1 +1,5 @@
|
|||||||
blank_issues_enabled: true
|
blank_issues_enabled: true
|
||||||
|
contact_links:
|
||||||
|
- name: Mutual Help Chat Group
|
||||||
|
url: https://i.delta.chat/#6CBFF8FFD505C0FDEA20A66674F2916EA8FBEE99&a=invitebot%40nine.testrun.org&g=Chatmail%20Mutual%20Help&x=7sFF7Ik50pWv6J1z7RVC5527&i=X69wTFfvCfs3d-JzqP0kVA3i&s=ibp-447dU-wUq-52QanwAtWc
|
||||||
|
about: If you have troubles setting up the relay server, feel free to ask here.
|
||||||
|
|||||||
53
.github/workflows/docs-preview.yaml
vendored
53
.github/workflows/docs-preview.yaml
vendored
@@ -1,53 +0,0 @@
|
|||||||
name: documentation preview
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'doc/**'
|
|
||||||
- 'scripts/build-docs.sh'
|
|
||||||
- '.github/workflows/docs-preview.yaml'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
scripts:
|
|
||||||
name: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: 'staging.chatmail.at/doc/relay/'
|
|
||||||
url: https://staging.chatmail.at/doc/relay/${{ steps.prepare.outputs.prid }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: initenv
|
|
||||||
run: scripts/initenv.sh
|
|
||||||
|
|
||||||
- name: append venv/bin to PATH
|
|
||||||
run: echo `pwd`/venv/bin >>$GITHUB_PATH
|
|
||||||
|
|
||||||
- name: build documentation
|
|
||||||
working-directory: doc
|
|
||||||
run: sphinx-build source build
|
|
||||||
|
|
||||||
- name: build documentation second time (for TOC)
|
|
||||||
working-directory: doc
|
|
||||||
run: sphinx-build source build
|
|
||||||
|
|
||||||
- name: Get Pullrequest ID
|
|
||||||
id: prepare
|
|
||||||
run: |
|
|
||||||
export PULLREQUEST_ID=$(echo "${{ github.ref }}" | cut -d "/" -f3)
|
|
||||||
echo "prid=$PULLREQUEST_ID" >> $GITHUB_OUTPUT
|
|
||||||
if [ $(expr length "${{ secrets.USERNAME }}") -gt "1" ]; then echo "uploadtoserver=true" >> $GITHUB_OUTPUT; fi
|
|
||||||
- run: |
|
|
||||||
echo "baseurl: /${{ steps.prepare.outputs.prid }}" >> _config.yml
|
|
||||||
|
|
||||||
- name: Upload preview
|
|
||||||
run: |
|
|
||||||
mkdir -p "$HOME/.ssh"
|
|
||||||
echo "${{ secrets.CHATMAIL_STAGING_SSHKEY }}" > "$HOME/.ssh/key"
|
|
||||||
chmod 600 "$HOME/.ssh/key"
|
|
||||||
rsync -rILvh -e "ssh -i $HOME/.ssh/key -o StrictHostKeyChecking=no" $GITHUB_WORKSPACE/doc/build/ "${{ secrets.USERNAME }}@chatmail.at:/var/www/html/staging.chatmail.at/doc/relay/${{ steps.prepare.outputs.prid }}/"
|
|
||||||
|
|
||||||
- name: check links
|
|
||||||
working-directory: doc
|
|
||||||
run: sphinx-build --builder linkcheck source build
|
|
||||||
|
|
||||||
47
.github/workflows/docs.yaml
vendored
47
.github/workflows/docs.yaml
vendored
@@ -1,47 +0,0 @@
|
|||||||
name: build and upload documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- 'missytake/docs-ci'
|
|
||||||
paths:
|
|
||||||
- 'doc/**'
|
|
||||||
- 'scripts/build-docs.sh'
|
|
||||||
- '.github/workflows/docs.yaml'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
scripts:
|
|
||||||
name: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: 'chatmail.at/doc/relay/'
|
|
||||||
url: https://chatmail.at/doc/relay/
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: initenv
|
|
||||||
run: scripts/initenv.sh
|
|
||||||
|
|
||||||
- name: append venv/bin to PATH
|
|
||||||
run: echo `pwd`/venv/bin >>$GITHUB_PATH
|
|
||||||
|
|
||||||
- name: build documentation
|
|
||||||
working-directory: doc
|
|
||||||
run: sphinx-build source build
|
|
||||||
|
|
||||||
- name: build documentation second time (for TOC)
|
|
||||||
working-directory: doc
|
|
||||||
run: sphinx-build source build
|
|
||||||
|
|
||||||
- name: check links
|
|
||||||
working-directory: doc
|
|
||||||
run: sphinx-build --builder linkcheck source build
|
|
||||||
|
|
||||||
- name: upload documentation
|
|
||||||
run: |
|
|
||||||
mkdir -p "$HOME/.ssh"
|
|
||||||
echo "${{ secrets.CHATMAIL_STAGING_SSHKEY }}" > "$HOME/.ssh/key"
|
|
||||||
chmod 600 "$HOME/.ssh/key"
|
|
||||||
rsync -rILvh -e "ssh -i $HOME/.ssh/key -o StrictHostKeyChecking=no" $GITHUB_WORKSPACE/doc/build/ "${{ secrets.USERNAME }}@chatmail.at:/var/www/html/chatmail.at/doc/relay/"
|
|
||||||
|
|
||||||
@@ -16,14 +16,11 @@ jobs:
|
|||||||
name: deploy on staging-ipv4.testrun.org, and run tests
|
name: deploy on staging-ipv4.testrun.org, and run tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
environment:
|
|
||||||
name: staging-ipv4.testrun.org
|
|
||||||
url: https://staging-ipv4.testrun.org/
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ci-ipv4-${{ github.workflow }}-${{ github.ref }}
|
group: ci-ipv4-${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
||||||
steps:
|
steps:
|
||||||
- uses: jsok/serialize-workflow-action@515cd04c46d7ea7435c4a22a3b4419127afdefe9
|
- uses: jsok/serialize-workflow-action@v1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -80,7 +77,7 @@ jobs:
|
|||||||
cmdeploy init staging-ipv4.testrun.org
|
cmdeploy init staging-ipv4.testrun.org
|
||||||
sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' chatmail.ini
|
sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' chatmail.ini
|
||||||
|
|
||||||
- run: cmdeploy run --verbose --skip-dns-check
|
- run: cmdeploy run
|
||||||
|
|
||||||
- name: set DNS entries
|
- name: set DNS entries
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
10
.github/workflows/test-and-deploy.yaml
vendored
10
.github/workflows/test-and-deploy.yaml
vendored
@@ -16,14 +16,11 @@ jobs:
|
|||||||
name: deploy on staging2.testrun.org, and run tests
|
name: deploy on staging2.testrun.org, and run tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
environment:
|
|
||||||
name: staging2.testrun.org
|
|
||||||
url: https://staging2.testrun.org/
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ci-${{ github.workflow }}-${{ github.ref }}
|
group: ci-${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
||||||
steps:
|
steps:
|
||||||
- uses: jsok/serialize-workflow-action@515cd04c46d7ea7435c4a22a3b4419127afdefe9
|
- uses: jsok/serialize-workflow-action@v1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -73,15 +70,12 @@ jobs:
|
|||||||
rsync -avz dkimkeys-restore/dkimkeys root@staging2.testrun.org:/etc/ || true
|
rsync -avz dkimkeys-restore/dkimkeys root@staging2.testrun.org:/etc/ || true
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
|
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
|
||||||
|
|
||||||
- name: add hpk42 key to staging server
|
|
||||||
run: ssh root@staging2.testrun.org 'curl -s https://github.com/hpk42.keys >> .ssh/authorized_keys'
|
|
||||||
|
|
||||||
- name: run deploy-chatmail offline tests
|
- name: run deploy-chatmail offline tests
|
||||||
run: pytest --pyargs cmdeploy
|
run: pytest --pyargs cmdeploy
|
||||||
|
|
||||||
- run: cmdeploy init staging2.testrun.org
|
- run: cmdeploy init staging2.testrun.org
|
||||||
|
|
||||||
- run: cmdeploy run --verbose --skip-dns-check
|
- run: cmdeploy run --verbose
|
||||||
|
|
||||||
- name: set DNS entries
|
- name: set DNS entries
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
50
ARCHITECTURE.md
Normal file
50
ARCHITECTURE.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
This diagram shows components of the chatmail server; this is a draft
|
||||||
|
overview as of mid-August 2025:
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph LR;
|
||||||
|
cmdeploy --- sshd;
|
||||||
|
letsencrypt --- |80|acmetool-redirector;
|
||||||
|
acmetool-redirector --- |443|nginx-right(["`nginx
|
||||||
|
(external)`"]);
|
||||||
|
nginx-external --- |465|postfix;
|
||||||
|
nginx-external(["`nginx
|
||||||
|
(external)`"]) --- |8443|nginx-internal["`nginx
|
||||||
|
(internal)`"];
|
||||||
|
nginx-internal --- website["`Website
|
||||||
|
/var/www/html`"];
|
||||||
|
nginx-internal --- newemail.py;
|
||||||
|
nginx-internal --- autoconfig.xml;
|
||||||
|
certs-nginx[("`TLS certs
|
||||||
|
/var/lib/acme`")] --> nginx-internal;
|
||||||
|
cron --- chatmail-metrics;
|
||||||
|
cron --- acmetool;
|
||||||
|
cron --- expunge;
|
||||||
|
chatmail-metrics --- website;
|
||||||
|
acmetool --> certs[("`TLS certs
|
||||||
|
/var/lib/acme`")];
|
||||||
|
nginx-external --- |993|dovecot;
|
||||||
|
autoconfig.xml --- postfix;
|
||||||
|
autoconfig.xml --- dovecot;
|
||||||
|
postfix --- echobot;
|
||||||
|
postfix --- |10080,10081|filtermail;
|
||||||
|
postfix --- users["`User data
|
||||||
|
home/vmail/mail`"];
|
||||||
|
postfix --- |doveauth.socket|doveauth;
|
||||||
|
dovecot --- |doveauth.socket|doveauth;
|
||||||
|
dovecot --- users;
|
||||||
|
dovecot --- |metadata.socket|chatmail-metadata;
|
||||||
|
doveauth --- users;
|
||||||
|
expunge --- users;
|
||||||
|
chatmail-metadata --- iroh-relay;
|
||||||
|
certs-nginx --> postfix;
|
||||||
|
certs-nginx --> dovecot;
|
||||||
|
style certs fill:#ff6;
|
||||||
|
style certs-nginx fill:#ff6;
|
||||||
|
style nginx-external fill:#fc9;
|
||||||
|
style nginx-right fill:#fc9;
|
||||||
|
```
|
||||||
|
|
||||||
|
The edges in this graph should not be taken too literally; they
|
||||||
|
reflect some sort of communication path or dependency relationship
|
||||||
|
between components of the chatmail server.
|
||||||
76
CHANGELOG.md
76
CHANGELOG.md
@@ -2,100 +2,24 @@
|
|||||||
|
|
||||||
## untagged
|
## untagged
|
||||||
|
|
||||||
- Add imap_compress option to chatmail.ini
|
|
||||||
([#760](https://github.com/chatmail/relay/pull/760))
|
|
||||||
|
|
||||||
- Remove echobot from relays
|
|
||||||
([#753](https://github.com/chatmail/relay/pull/753))
|
|
||||||
|
|
||||||
- Fix `cmdeploy webdev`
|
|
||||||
([#743](https://github.com/chatmail/relay/pull/743))
|
|
||||||
|
|
||||||
- Add robots.txt to exclude all web crawlers
|
|
||||||
([#732](https://github.com/chatmail/relay/pull/732))
|
|
||||||
|
|
||||||
- acmetool: accept new Let's Encrypt ToS: https://letsencrypt.org/documents/LE-SA-v1.6-August-18-2025.pdf
|
|
||||||
([#729](https://github.com/chatmail/relay/pull/729))
|
|
||||||
|
|
||||||
- Organized cmdeploy into install, configure, and activate stages
|
|
||||||
([#695](https://github.com/chatmail/relay/pull/695))
|
|
||||||
|
|
||||||
- docs: move readme.md docs to sphinx documentation rendered at https://chatmail.at/doc/relay
|
|
||||||
([#711](https://github.com/chatmail/relay/pull/711))
|
|
||||||
|
|
||||||
- acmetool: replace cronjob with a systemd timer
|
|
||||||
([#719](https://github.com/chatmail/relay/pull/719))
|
|
||||||
|
|
||||||
- remove xstore@testrun.org from default passthrough recipients
|
|
||||||
([#722](https://github.com/chatmail/relay/pull/722))
|
|
||||||
|
|
||||||
- don't deploy the website if there are merge conflicts in the www folder
|
|
||||||
([#714](https://github.com/chatmail/relay/pull/714))
|
|
||||||
|
|
||||||
- acmetool: use ECDSA keys instead of RSA
|
|
||||||
([#689](https://github.com/chatmail/relay/pull/689))
|
|
||||||
|
|
||||||
- Require TLS 1.2 for outgoing SMTP connections
|
|
||||||
([#685](https://github.com/chatmail/relay/pull/685), [#730](https://github.com/chatmail/relay/pull/730))
|
|
||||||
|
|
||||||
- require STARTTLS for incoming port 25 connections
|
|
||||||
([#684](https://github.com/chatmail/relay/pull/684), [#730](https://github.com/chatmail/relay/pull/730))
|
|
||||||
|
|
||||||
- filtermail: run CPU-intensive handle_DATA in a thread pool executor
|
|
||||||
([#676](https://github.com/chatmail/relay/pull/676))
|
|
||||||
|
|
||||||
- don't use the complicated logging module in filtermail to exclude a potential source of errors.
|
|
||||||
([#674](https://github.com/chatmail/relay/pull/674))
|
|
||||||
|
|
||||||
- Specify nginx.conf to only handle `mail_domain`, www, and mta-sts domains
|
|
||||||
([#636](https://github.com/chatmail/relay/pull/636))
|
|
||||||
|
|
||||||
- Setup TURN server
|
|
||||||
([#621](https://github.com/chatmail/relay/pull/621))
|
|
||||||
|
|
||||||
- cmdeploy: make --ssh-host work with localhost
|
- cmdeploy: make --ssh-host work with localhost
|
||||||
([#659](https://github.com/chatmail/relay/pull/659))
|
([#659](https://github.com/chatmail/relay/pull/659))
|
||||||
|
|
||||||
- Update iroh-relay to 0.35.0
|
- Update iroh-relay to 0.35.0
|
||||||
([#650](https://github.com/chatmail/relay/pull/650))
|
([#650](https://github.com/chatmail/relay/pull/650))
|
||||||
|
|
||||||
- filtermail: accept mails from Protonmail
|
|
||||||
([#616](https://github.com/chatmail/relay/pull/616))
|
|
||||||
|
|
||||||
- Ignore all RCPT TO: parameters
|
- Ignore all RCPT TO: parameters
|
||||||
([#651](https://github.com/chatmail/relay/pull/651))
|
([#651](https://github.com/chatmail/relay/pull/651))
|
||||||
|
|
||||||
- Increase opendkim DNS Timeout from 5 to 60 seconds
|
|
||||||
([#672](https://github.com/chatmail/relay/pull/672))
|
|
||||||
|
|
||||||
- Add config parameter for Let's Encrypt ACME email
|
|
||||||
([#663](https://github.com/chatmail/relay/pull/663))
|
|
||||||
|
|
||||||
- Use max username length in newemail.py, not min
|
- Use max username length in newemail.py, not min
|
||||||
([#648](https://github.com/chatmail/relay/pull/648))
|
([#648](https://github.com/chatmail/relay/pull/648))
|
||||||
|
|
||||||
- Add startup for `fcgiwrap.service` because sometimes it did not start automatically.
|
|
||||||
([#657](https://github.com/chatmail/relay/pull/657))
|
|
||||||
|
|
||||||
- Add `cmdeploy init --force` command for recreating chatmail.ini
|
|
||||||
([#656](https://github.com/chatmail/relay/pull/656))
|
|
||||||
|
|
||||||
- Increase maxproc for reinjecting ports from 10 to 100
|
- Increase maxproc for reinjecting ports from 10 to 100
|
||||||
([#646](https://github.com/chatmail/relay/pull/646))
|
([#646](https://github.com/chatmail/relay/pull/646))
|
||||||
|
|
||||||
- Allow ports 143 and 993 to be used by `dovecot` process
|
- Allow ports 143 and 993 to be used by `dovecot` process
|
||||||
([#639](https://github.com/chatmail/relay/pull/639))
|
([#639](https://github.com/chatmail/relay/pull/639))
|
||||||
|
|
||||||
- Add `--skip-dns-check` argument to `cmdeploy run` command, which disables DNS record checking before installation.
|
|
||||||
([#661](https://github.com/chatmail/relay/pull/661))
|
|
||||||
|
|
||||||
- Rework expiry of message files and mailboxes in Python
|
|
||||||
to only do a single iteration over sometimes millions of messages
|
|
||||||
instead of doing "find" commands that iterate 9 times over the messages.
|
|
||||||
Provide an "fsreport" CLI for more fine grained analysis of message files.
|
|
||||||
([#637](https://github.com/chatmail/relay/pull/637))
|
|
||||||
|
|
||||||
|
|
||||||
## 1.7.0 2025-09-11
|
## 1.7.0 2025-09-11
|
||||||
|
|
||||||
- Make www upload path configurable
|
- Make www upload path configurable
|
||||||
|
|||||||
479
README.md
479
README.md
@@ -1,20 +1,479 @@
|
|||||||
|
|
||||||
# Chatmail relays for end-to-end encrypted email
|
<img width="800px" src="www/src/collage-top.png"/>
|
||||||
|
|
||||||
|
# Chatmail relays for end-to-end encrypted e-mail
|
||||||
|
|
||||||
Chatmail relay servers are interoperable Mail Transport Agents (MTAs) designed for:
|
Chatmail relay servers are interoperable Mail Transport Agents (MTAs) designed for:
|
||||||
|
|
||||||
- **Zero State:** no private data or metadata collected, messages are auto-deleted, low disk usage
|
- **Convenience:** Low friction instant onboarding
|
||||||
|
|
||||||
- **Instant/Realtime:** sub-second message delivery, realtime P2P
|
- **Privacy:** No name, phone numbers, email required or collected
|
||||||
streaming, privacy-preserving Push Notifications for Apple, Google, and Huawei;
|
|
||||||
|
|
||||||
- **Security Enforcement**: only strict TLS, DKIM and OpenPGP with minimized metadata accepted
|
- **End-to-End Encryption enforced**: only OpenPGP messages with metadata minimization allowed
|
||||||
|
|
||||||
- **Reliable Federation and Decentralization:** No spam or IP reputation checks, federating
|
- **Instant:** Privacy-preserving Push Notifications for Apple, Google, and Huawei
|
||||||
depends on established IETF standards and protocols.
|
|
||||||
|
|
||||||
This repository contains everything needed to setup a ready-to-use chatmail relay on an ssh-reachable host.
|
- **Speed:** Message delivery in half a second, with optional P2P realtime connections
|
||||||
For getting started and more information please refer to the web version of this repositories' documentation at
|
|
||||||
|
|
||||||
[https://chatmail.at/doc/relay](https://chatmail.at/doc/relay)
|
- **Transport Security:** Strict TLS and DKIM enforced
|
||||||
|
|
||||||
|
- **Reliability:** No spam or IP reputation checks; rate-limits are suitable for realtime chats
|
||||||
|
|
||||||
|
- **Efficiency:** Messages are only stored for transit and removed automatically
|
||||||
|
|
||||||
|
This repository contains everything needed to setup a ready-to-use chatmail relay
|
||||||
|
comprised of a minimal setup of the battle-tested
|
||||||
|
[Postfix SMTP](https://www.postfix.org) and [Dovecot IMAP](https://www.dovecot.org) MTAs/MDAs.
|
||||||
|
|
||||||
|
The automated setup is designed and optimized for providing chatmail addresses
|
||||||
|
for immediate permission-free onboarding through chat apps and bots.
|
||||||
|
Chatmail addresses are automatically created at first login,
|
||||||
|
after which the initially specified password is required
|
||||||
|
for sending and receiving messages through them.
|
||||||
|
|
||||||
|
Please see [this list of known apps and client projects](https://chatmail.at/clients.html)
|
||||||
|
and [this list of known public 3rd party chatmail relay servers](https://chatmail.at/relays).
|
||||||
|
|
||||||
|
|
||||||
|
## Minimal requirements, Prerequisites
|
||||||
|
|
||||||
|
You will need the following:
|
||||||
|
|
||||||
|
- Control over a domain through a DNS provider of your choice.
|
||||||
|
|
||||||
|
- A Debian 12 server with reachable SMTP/SUBMISSIONS/IMAPS/HTTPS ports.
|
||||||
|
IPv6 is encouraged if available.
|
||||||
|
Chatmail relay servers only require 1GB RAM, one CPU, and perhaps 10GB storage for a
|
||||||
|
few thousand active chatmail addresses.
|
||||||
|
|
||||||
|
- Key-based SSH authentication to the root user.
|
||||||
|
You must add a passphrase-protected private key to your local ssh-agent
|
||||||
|
because you can't type in your passphrase during deployment.
|
||||||
|
(An ed25519 private key is required due to an [upstream bug in paramiko](https://github.com/paramiko/paramiko/issues/2191))
|
||||||
|
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
|
||||||
|
We use `chat.example.org` as the chatmail domain in the following steps.
|
||||||
|
Please substitute it with your own domain.
|
||||||
|
|
||||||
|
1. Setup the initial DNS records.
|
||||||
|
The following is an example in the familiar BIND zone file format with
|
||||||
|
a TTL of 1 hour (3600 seconds).
|
||||||
|
Please substitute your domain and IP addresses.
|
||||||
|
|
||||||
|
```
|
||||||
|
chat.example.com. 3600 IN A 198.51.100.5
|
||||||
|
chat.example.com. 3600 IN AAAA 2001:db8::5
|
||||||
|
www.chat.example.com. 3600 IN CNAME chat.example.com.
|
||||||
|
mta-sts.chat.example.com. 3600 IN CNAME chat.example.com.
|
||||||
|
```
|
||||||
|
|
||||||
|
2. On your local PC, clone the repository and bootstrap the Python virtualenv.
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone https://github.com/chatmail/relay
|
||||||
|
cd relay
|
||||||
|
scripts/initenv.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
3. On your local PC, create chatmail configuration file `chatmail.ini`:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy init chat.example.org # <-- use your domain
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Verify that SSH root login to your remote server works:
|
||||||
|
|
||||||
|
```
|
||||||
|
ssh root@chat.example.org # <-- use your domain
|
||||||
|
```
|
||||||
|
|
||||||
|
5. From your local PC, deploy the remote chatmail relay server:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy run
|
||||||
|
```
|
||||||
|
This script will also check that you have all necessary DNS records.
|
||||||
|
If DNS records are missing, it will recommend
|
||||||
|
which you should configure at your DNS provider
|
||||||
|
(it can take some time until they are public).
|
||||||
|
|
||||||
|
### Other helpful commands
|
||||||
|
|
||||||
|
To check the status of your remotely running chatmail service:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy status
|
||||||
|
```
|
||||||
|
|
||||||
|
To display and check all recommended DNS records:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy dns
|
||||||
|
```
|
||||||
|
|
||||||
|
To test whether your chatmail service is working correctly:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy test
|
||||||
|
```
|
||||||
|
|
||||||
|
To measure the performance of your chatmail service:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy bench
|
||||||
|
```
|
||||||
|
|
||||||
|
## Overview of this repository
|
||||||
|
|
||||||
|
This repository has four directories:
|
||||||
|
|
||||||
|
- [cmdeploy](https://github.com/chatmail/relay/tree/main/cmdeploy)
|
||||||
|
is a collection of configuration files
|
||||||
|
and a [pyinfra](https://pyinfra.com)-based deployment script.
|
||||||
|
|
||||||
|
- [chatmaild](https://github.com/chatmail/relay/tree/main/chatmaild)
|
||||||
|
is a Python package containing several small services
|
||||||
|
which handle authentication,
|
||||||
|
trigger push notifications on new messages,
|
||||||
|
ensure that outbound mails are encrypted,
|
||||||
|
delete inactive users,
|
||||||
|
and some other minor things.
|
||||||
|
chatmaild can also be installed as a stand-alone Python package.
|
||||||
|
|
||||||
|
- [www](https://github.com/chatmail/relay/tree/main/www)
|
||||||
|
contains the html, css, and markdown files
|
||||||
|
which make up a chatmail relay's web page.
|
||||||
|
Edit them before deploying to make your chatmail relay stand out.
|
||||||
|
|
||||||
|
- [scripts](https://github.com/chatmail/relay/tree/main/scripts)
|
||||||
|
offers two convenience tools for beginners;
|
||||||
|
`initenv.sh` installs the necessary dependencies to a local virtual environment,
|
||||||
|
and the `scripts/cmdeploy` script enables you
|
||||||
|
to run the `cmdeploy` command line tool in the local virtual environment.
|
||||||
|
|
||||||
|
### cmdeploy
|
||||||
|
|
||||||
|
The `cmdeploy/src/cmdeploy/cmdeploy.py` command line tool
|
||||||
|
helps with setting up and managing the chatmail service.
|
||||||
|
`cmdeploy init` creates the `chatmail.ini` config file.
|
||||||
|
`cmdeploy run` uses a [pyinfra](https://pyinfra.com/)-based [`script`](cmdeploy/src/cmdeploy/__init__.py)
|
||||||
|
to automatically install or upgrade all chatmail components on a relay,
|
||||||
|
according to the `chatmail.ini` config.
|
||||||
|
|
||||||
|
The components of chatmail are:
|
||||||
|
|
||||||
|
- [Postfix SMTP MTA](https://www.postfix.org) accepts and relays messages
|
||||||
|
(both from your users and from the wider e-mail MTA network)
|
||||||
|
|
||||||
|
- [Dovecot IMAP MDA](https://www.dovecot.org) stores messages for your users until they download them
|
||||||
|
|
||||||
|
- [Nginx](https://nginx.org/) shows the web page with your privacy policy and additional information
|
||||||
|
|
||||||
|
- [acmetool](https://hlandau.github.io/acmetool/) manages TLS certificates for Dovecot, Postfix, and Nginx
|
||||||
|
|
||||||
|
- [OpenDKIM](http://www.opendkim.org/) for signing messages with DKIM and rejecting inbound messages without DKIM
|
||||||
|
|
||||||
|
- [mtail](https://google.github.io/mtail/) for collecting anonymized metrics in case you have monitoring
|
||||||
|
|
||||||
|
- [Iroh relay](https://www.iroh.computer/docs/concepts/relay)
|
||||||
|
which helps client devices to establish Peer-to-Peer connections
|
||||||
|
|
||||||
|
- and the chatmaild services, explained in the next section:
|
||||||
|
|
||||||
|
### chatmaild
|
||||||
|
|
||||||
|
`chatmaild` implements various systemd-controlled services
|
||||||
|
that integrate with Dovecot and Postfix to achieve instant-onboarding and
|
||||||
|
only relaying OpenPGP end-to-end messages encrypted messages.
|
||||||
|
A short overview of `chatmaild` services:
|
||||||
|
|
||||||
|
- [`doveauth`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/doveauth.py)
|
||||||
|
implements create-on-login address semantics and is used
|
||||||
|
by Dovecot during IMAP login and by Postfix during SMTP/SUBMISSION login
|
||||||
|
which in turn uses [Dovecot SASL](https://doc.dovecot.org/configuration_manual/authentication/dict/#complete-example-for-authenticating-via-a-unix-socket)
|
||||||
|
to authenticate logins.
|
||||||
|
|
||||||
|
- [`filtermail`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/filtermail.py)
|
||||||
|
prevents unencrypted email from leaving or entering the chatmail service
|
||||||
|
and is integrated into Postfix's outbound and inbound mail pipelines.
|
||||||
|
|
||||||
|
- [`chatmail-metadata`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metadata.py) is contacted by a
|
||||||
|
[Dovecot lua script](https://github.com/chatmail/relay/blob/main/cmdeploy/src/cmdeploy/dovecot/push_notification.lua)
|
||||||
|
to store user-specific relay-side config.
|
||||||
|
On new messages,
|
||||||
|
it [passes the user's push notification token](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/notifier.py)
|
||||||
|
to [notifications.delta.chat](https://delta.chat/help#instant-delivery)
|
||||||
|
so the push notifications on the user's phone can be triggered
|
||||||
|
by Apple/Google/Huawei.
|
||||||
|
|
||||||
|
- [`delete_inactive_users`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/delete_inactive_users.py)
|
||||||
|
deletes users if they have not logged in for a very long time.
|
||||||
|
The timeframe can be configured in `chatmail.ini`.
|
||||||
|
|
||||||
|
- [`lastlogin`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/lastlogin.py)
|
||||||
|
is contacted by Dovecot when a user logs in
|
||||||
|
and stores the date of the login.
|
||||||
|
|
||||||
|
- [`echobot`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/echo.py)
|
||||||
|
is a small bot for test purposes.
|
||||||
|
It simply echoes back messages from users.
|
||||||
|
|
||||||
|
- [`chatmail-metrics`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metrics.py)
|
||||||
|
collects some metrics and displays them at `https://example.org/metrics`.
|
||||||
|
|
||||||
|
### Home page and getting started for users
|
||||||
|
|
||||||
|
`cmdeploy run` also creates default static web pages and deploys them
|
||||||
|
to a Nginx web server with:
|
||||||
|
|
||||||
|
- a default `index.html` along with a QR code that users can click to
|
||||||
|
create an address on your chatmail relay
|
||||||
|
|
||||||
|
- a default `info.html` that is linked from the home page
|
||||||
|
|
||||||
|
- a default `policy.html` that is linked from the home page
|
||||||
|
|
||||||
|
All `.html` files are generated
|
||||||
|
by the according markdown `.md` file in the `www/src` directory.
|
||||||
|
|
||||||
|
|
||||||
|
### Refining the web pages
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy webdev
|
||||||
|
```
|
||||||
|
|
||||||
|
This starts a local live development cycle for chatmail web pages:
|
||||||
|
|
||||||
|
- uses the `www/src/page-layout.html` file for producing static
|
||||||
|
HTML pages from `www/src/*.md` files
|
||||||
|
|
||||||
|
- continously builds the web presence reading files from `www/src` directory
|
||||||
|
and generating HTML files and copying assets to the `www/build` directory.
|
||||||
|
|
||||||
|
- Starts a browser window automatically where you can "refresh" as needed.
|
||||||
|
|
||||||
|
#### Custom web pages
|
||||||
|
|
||||||
|
You can skip uploading a web page
|
||||||
|
by setting `www_folder=disabled` in `chatmail.ini`.
|
||||||
|
|
||||||
|
If you want to manage your web pages outside this git repository,
|
||||||
|
you can set `www_folder` in `chatmail.ini` to a custom directory on your computer.
|
||||||
|
`cmdeploy run` will upload it as the server's home page,
|
||||||
|
and if it contains a `src/index.md` file,
|
||||||
|
will build it with hugo.
|
||||||
|
|
||||||
|
|
||||||
|
## Mailbox directory layout
|
||||||
|
|
||||||
|
Fresh chatmail addresses have a mailbox directory that contains:
|
||||||
|
|
||||||
|
- a `password` file with the salted password required for authenticating
|
||||||
|
whether a login may use the address to send/receive messages.
|
||||||
|
If you modify the password file manually, you effectively block the user.
|
||||||
|
|
||||||
|
- `enforceE2EEincoming` is a default-created file with each address.
|
||||||
|
If present the file indicates that this chatmail address rejects incoming cleartext messages.
|
||||||
|
If absent the address accepts incoming cleartext messages.
|
||||||
|
|
||||||
|
- `dovecot*`, `cur`, `new` and `tmp` represent IMAP/mailbox state.
|
||||||
|
If the address is only used by one device, the Maildir directories
|
||||||
|
will typically be empty unless the user of that address hasn't been online
|
||||||
|
for a while.
|
||||||
|
|
||||||
|
|
||||||
|
## Emergency Commands to disable automatic address creation
|
||||||
|
|
||||||
|
If you need to stop address creation,
|
||||||
|
e.g. because some script is wildly creating addresses,
|
||||||
|
login with ssh and run:
|
||||||
|
|
||||||
|
```
|
||||||
|
touch /etc/chatmail-nocreate
|
||||||
|
```
|
||||||
|
|
||||||
|
Chatmail address creation will be denied while this file is present.
|
||||||
|
|
||||||
|
### Ports
|
||||||
|
|
||||||
|
[Postfix](http://www.postfix.org/) listens on ports 25 (SMTP) and 587 (SUBMISSION) and 465 (SUBMISSIONS).
|
||||||
|
[Dovecot](https://www.dovecot.org/) listens on ports 143 (IMAP) and 993 (IMAPS).
|
||||||
|
[Nginx](https://www.nginx.com/) listens on port 8443 (HTTPS-ALT) and 443 (HTTPS).
|
||||||
|
Port 443 multiplexes HTTPS, IMAP and SMTP using ALPN to redirect connections to ports 8443, 465 or 993.
|
||||||
|
[acmetool](https://hlandau.github.io/acmetool/) listens on port 80 (HTTP).
|
||||||
|
|
||||||
|
chatmail-core based apps will, however, discover all ports and configurations
|
||||||
|
automatically by reading the [autoconfig XML file](https://www.ietf.org/archive/id/draft-bucksch-autoconfig-00.html) from the chatmail relay server.
|
||||||
|
|
||||||
|
## Email authentication
|
||||||
|
|
||||||
|
Chatmail relays enforce [DKIM](https://www.rfc-editor.org/rfc/rfc6376)
|
||||||
|
to authenticate incoming emails.
|
||||||
|
Incoming emails must have a valid DKIM signature with
|
||||||
|
Signing Domain Identifier (SDID, `d=` parameter in the DKIM-Signature header)
|
||||||
|
equal to the `From:` header domain.
|
||||||
|
This property is checked by OpenDKIM screen policy script
|
||||||
|
before validating the signatures.
|
||||||
|
This correpsonds to strict [DMARC](https://www.rfc-editor.org/rfc/rfc7489) alignment (`adkim=s`),
|
||||||
|
but chatmail does not rely on DMARC and does not consult the sender policy published in DMARC records.
|
||||||
|
Other legacy authentication mechanisms such as [iprev](https://www.rfc-editor.org/rfc/rfc8601#section-2.7.3)
|
||||||
|
and [SPF](https://www.rfc-editor.org/rfc/rfc7208) are also not taken into account.
|
||||||
|
If there is no valid DKIM signature on the incoming email,
|
||||||
|
the sender receives a "5.7.1 No valid DKIM signature found" error.
|
||||||
|
|
||||||
|
Outgoing emails must be sent over authenticated connection
|
||||||
|
with envelope MAIL FROM (return path) corresponding to the login.
|
||||||
|
This is ensured by Postfix which maps login username
|
||||||
|
to MAIL FROM with
|
||||||
|
[`smtpd_sender_login_maps`](https://www.postfix.org/postconf.5.html#smtpd_sender_login_maps)
|
||||||
|
and rejects incorrectly authenticated emails with [`reject_sender_login_mismatch`](reject_sender_login_mismatch) policy.
|
||||||
|
`From:` header must correspond to envelope MAIL FROM,
|
||||||
|
this is ensured by `filtermail` proxy.
|
||||||
|
|
||||||
|
## TLS requirements
|
||||||
|
|
||||||
|
Postfix is configured to require valid TLS
|
||||||
|
by setting [`smtp_tls_security_level`](https://www.postfix.org/postconf.5.html#smtp_tls_security_level) to `verify`.
|
||||||
|
If emails don't arrive at your chatmail relay server,
|
||||||
|
the problem is likely that your relay does not have a valid TLS certificate.
|
||||||
|
|
||||||
|
You can test it by resolving `MX` records of your relay domain
|
||||||
|
and then connecting to MX relays (e.g `mx.example.org`) with
|
||||||
|
`openssl s_client -connect mx.example.org:25 -verify_hostname mx.example.org -verify_return_error -starttls smtp`
|
||||||
|
from the host that has open port 25 to verify that certificate is valid.
|
||||||
|
|
||||||
|
When providing a TLS certificate to your chatmail relay server,
|
||||||
|
make sure to provide the full certificate chain
|
||||||
|
and not just the last certificate.
|
||||||
|
|
||||||
|
If you are running an Exim server and don't see incoming connections
|
||||||
|
from a chatmail relay server in the logs,
|
||||||
|
make sure `smtp_no_mail` log item is enabled in the config
|
||||||
|
with `log_selector = +smtp_no_mail`.
|
||||||
|
By default Exim does not log sessions that are closed
|
||||||
|
before sending the `MAIL` command.
|
||||||
|
This happens if certificate is not recognized as valid by Postfix,
|
||||||
|
so you might think that connection is not established
|
||||||
|
while actually it is a problem with your TLS certificate.
|
||||||
|
|
||||||
|
## Migrating a chatmail relay to a new host
|
||||||
|
|
||||||
|
If you want to migrate chatmail relay from an old machine
|
||||||
|
to a new machine,
|
||||||
|
you can use these steps.
|
||||||
|
They were tested with a Linux laptop;
|
||||||
|
you might need to adjust some of the steps to your environment.
|
||||||
|
|
||||||
|
Let's assume that your `mail_domain` is `mail.example.org`,
|
||||||
|
all involved machines run Debian 12,
|
||||||
|
your old site's IP address is `13.37.13.37`,
|
||||||
|
and your new site's IP address is `13.12.23.42`.
|
||||||
|
|
||||||
|
Note, you should lower the TTLs of your DNS records to a value
|
||||||
|
such as 300 (5 minutes) so the migration happens as smoothly as possible.
|
||||||
|
|
||||||
|
During the guide you might get a warning about changed SSH Host keys;
|
||||||
|
in this case, just run `ssh-keygen -R "mail.example.org"` as recommended.
|
||||||
|
|
||||||
|
1. First, disable mail services on the old site.
|
||||||
|
|
||||||
|
```
|
||||||
|
cmdeploy run --disable-mail --ssh-host 13.37.13.37
|
||||||
|
```
|
||||||
|
|
||||||
|
Now your users will notice the migration
|
||||||
|
and will not be able to send or receive messages
|
||||||
|
until the migration is completed.
|
||||||
|
|
||||||
|
2. Now we want to copy `/home/vmail`, `/var/lib/acme`, `/etc/dkimkeys`, `/run/echobot`, and `/var/spool/postfix` to the new site.
|
||||||
|
Login to the old site while forwarding your SSH agent
|
||||||
|
so you can copy directly from the old to the new site with your SSH key:
|
||||||
|
```
|
||||||
|
ssh -A root@13.37.13.37
|
||||||
|
tar c - /home/vmail/mail /var/lib/acme /etc/dkimkeys /run/echobot /var/spool/postfix | ssh root@13.12.23.42 "tar x -C /"
|
||||||
|
```
|
||||||
|
|
||||||
|
This transfers all addresses, the TLS certificate, DKIM keys (so DKIM DNS record remains valid), and the echobot's password so it continues to function.
|
||||||
|
It also preserves the Postfix mail spool so any messages pending delivery will still be delivered.
|
||||||
|
|
||||||
|
3. Install chatmail on the new machine:
|
||||||
|
|
||||||
|
```
|
||||||
|
cmdeploy run --disable-mail --ssh-host 13.12.23.42
|
||||||
|
```
|
||||||
|
Postfix and Dovecot are disabled for now; we will enable them later.
|
||||||
|
We first need to make the new site fully operational.
|
||||||
|
|
||||||
|
3. On the new site, run the following to ensure the ownership is correct in case UIDs/GIDs changed:
|
||||||
|
|
||||||
|
```
|
||||||
|
chown root: -R /var/lib/acme
|
||||||
|
chown opendkim: -R /etc/dkimkeys
|
||||||
|
chown vmail: -R /home/vmail/mail
|
||||||
|
chown echobot: -R /run/echobot
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Now, update DNS entries.
|
||||||
|
|
||||||
|
If other MTAs try to deliver messages to your chatmail domain they may fail intermittently,
|
||||||
|
as DNS catches up with the new site settings
|
||||||
|
but normally will retry delivering messages
|
||||||
|
for at least a week, so messages will not be lost.
|
||||||
|
|
||||||
|
5. Finally, you can execute `cmdeploy run --ssh-host 13.12.23.42` to turn on chatmail on the new relay.
|
||||||
|
Your users will be able to use the chatmail relay as soon as the DNS changes have propagated.
|
||||||
|
Voilà!
|
||||||
|
|
||||||
|
## Setting up a reverse proxy
|
||||||
|
|
||||||
|
A chatmail relay MTA does not track or depend on the client IP address
|
||||||
|
for its operation, so it can be run behind a reverse proxy.
|
||||||
|
This will not even affect incoming mail authentication
|
||||||
|
as DKIM only checks the cryptographic signature
|
||||||
|
of the message and does not use the IP address as the input.
|
||||||
|
|
||||||
|
For example, you may want to self-host your chatmail relay
|
||||||
|
and only use hosted VPS to provide a public IP address
|
||||||
|
for client connections and incoming mail.
|
||||||
|
You can connect chatmail relay to VPS
|
||||||
|
using a tunnel protocol
|
||||||
|
such as [WireGuard](https://www.wireguard.com/)
|
||||||
|
and setup a reverse proxy on a VPS
|
||||||
|
to forward connections to the chatmail relay
|
||||||
|
over the tunnel.
|
||||||
|
You can also setup multiple reverse proxies
|
||||||
|
for your chatmail relay in different networks
|
||||||
|
to ensure your relay is reachable even when
|
||||||
|
one of the IPs becomes inaccessible due to
|
||||||
|
hosting or routing problems.
|
||||||
|
|
||||||
|
Note that your chatmail relay still needs
|
||||||
|
to be able to make outgoing connections on port 25
|
||||||
|
to send messages outside.
|
||||||
|
|
||||||
|
To setup a reverse proxy
|
||||||
|
(or rather Destination NAT, DNAT)
|
||||||
|
for your chatmail relay, run:
|
||||||
|
|
||||||
|
```
|
||||||
|
scripts/cmdeploy proxy <proxy_ip_address> --relay-ipv4 <relay_ipv4_address> --relay-ipv6 <relay_ipv6_address>
|
||||||
|
```
|
||||||
|
|
||||||
|
Once proxy relay is set up,
|
||||||
|
you can add its IP address to the DNS,
|
||||||
|
or distribute it as you wish.
|
||||||
|
|
||||||
|
## Neighbors and Acquaintances
|
||||||
|
|
||||||
|
Here are some related projects that you may be interested in:
|
||||||
|
|
||||||
|
- [Mox](https://github.com/mjl-/mox): A Golang email server. [Work is in
|
||||||
|
progress](https://github.com/mjl-/mox/issues/251) to modify it to support all
|
||||||
|
of the features and configuration settings required to operate as a chatmail
|
||||||
|
relay.
|
||||||
|
- [Maddy-Chatmail](https://github.com/sadraiiali/maddy_chatmail): a plugin for the
|
||||||
|
[Maddy email server](https://maddy.email/) which aims to implement the
|
||||||
|
chatmail relay features and configuration options.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "chatmaild"
|
name = "chatmaild"
|
||||||
version = "0.3"
|
version = "0.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiosmtpd",
|
"aiosmtpd",
|
||||||
"iniconfig",
|
"iniconfig",
|
||||||
@@ -25,11 +25,10 @@ where = ['src']
|
|||||||
doveauth = "chatmaild.doveauth:main"
|
doveauth = "chatmaild.doveauth:main"
|
||||||
chatmail-metadata = "chatmaild.metadata:main"
|
chatmail-metadata = "chatmaild.metadata:main"
|
||||||
filtermail = "chatmaild.filtermail:main"
|
filtermail = "chatmaild.filtermail:main"
|
||||||
|
echobot = "chatmaild.echo:main"
|
||||||
chatmail-metrics = "chatmaild.metrics:main"
|
chatmail-metrics = "chatmaild.metrics:main"
|
||||||
chatmail-expire = "chatmaild.expire:main"
|
delete_inactive_users = "chatmaild.delete_inactive_users:main"
|
||||||
chatmail-fsreport = "chatmaild.fsreport:main"
|
|
||||||
lastlogin = "chatmaild.lastlogin:main"
|
lastlogin = "chatmaild.lastlogin:main"
|
||||||
turnserver = "chatmaild.turnserver:main"
|
|
||||||
|
|
||||||
[project.entry-points.pytest11]
|
[project.entry-points.pytest11]
|
||||||
"chatmaild.testplugin" = "chatmaild.tests.plugin"
|
"chatmaild.testplugin" = "chatmaild.tests.plugin"
|
||||||
@@ -71,7 +70,5 @@ commands =
|
|||||||
[testenv]
|
[testenv]
|
||||||
deps = pytest
|
deps = pytest
|
||||||
pdbpp
|
pdbpp
|
||||||
pytest-localserver
|
|
||||||
execnet
|
|
||||||
commands = pytest -v -rsXx {posargs}
|
commands = pytest -v -rsXx {posargs}
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ import iniconfig
|
|||||||
|
|
||||||
from chatmaild.user import User
|
from chatmaild.user import User
|
||||||
|
|
||||||
|
echobot_password_path = Path("/run/echobot/password")
|
||||||
|
|
||||||
|
|
||||||
def read_config(inipath):
|
def read_config(inipath):
|
||||||
assert Path(inipath).exists(), inipath
|
assert Path(inipath).exists(), inipath
|
||||||
@@ -42,9 +44,7 @@ class Config:
|
|||||||
)
|
)
|
||||||
self.mtail_address = params.get("mtail_address")
|
self.mtail_address = params.get("mtail_address")
|
||||||
self.disable_ipv6 = params.get("disable_ipv6", "false").lower() == "true"
|
self.disable_ipv6 = params.get("disable_ipv6", "false").lower() == "true"
|
||||||
self.acme_email = params.get("acme_email", "")
|
|
||||||
self.imap_rawlog = params.get("imap_rawlog", "false").lower() == "true"
|
self.imap_rawlog = params.get("imap_rawlog", "false").lower() == "true"
|
||||||
self.imap_compress = params.get("imap_compress", "false").lower() == "true"
|
|
||||||
if "iroh_relay" not in params:
|
if "iroh_relay" not in params:
|
||||||
self.iroh_relay = "https://" + params["mail_domain"]
|
self.iroh_relay = "https://" + params["mail_domain"]
|
||||||
self.enable_iroh_relay = True
|
self.enable_iroh_relay = True
|
||||||
@@ -71,7 +71,10 @@ class Config:
|
|||||||
raise ValueError(f"invalid address {addr!r}")
|
raise ValueError(f"invalid address {addr!r}")
|
||||||
|
|
||||||
maildir = self.mailboxes_dir.joinpath(addr)
|
maildir = self.mailboxes_dir.joinpath(addr)
|
||||||
password_path = maildir.joinpath("password")
|
if addr.startswith("echo@"):
|
||||||
|
password_path = echobot_password_path
|
||||||
|
else:
|
||||||
|
password_path = maildir.joinpath("password")
|
||||||
|
|
||||||
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
|
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
|
||||||
|
|
||||||
|
|||||||
31
chatmaild/src/chatmaild/delete_inactive_users.py
Normal file
31
chatmaild/src/chatmaild/delete_inactive_users.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
"""
|
||||||
|
Remove inactive users
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .config import read_config
|
||||||
|
|
||||||
|
|
||||||
|
def delete_inactive_users(config):
|
||||||
|
cutoff_date = time.time() - config.delete_inactive_users_after * 86400
|
||||||
|
for addr in os.listdir(config.mailboxes_dir):
|
||||||
|
try:
|
||||||
|
user = config.get_user(addr)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
read_timestamp = user.get_last_login_timestamp()
|
||||||
|
if read_timestamp and read_timestamp < cutoff_date:
|
||||||
|
path = config.mailboxes_dir.joinpath(addr)
|
||||||
|
assert path == user.maildir
|
||||||
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
(cfgpath,) = sys.argv[1:]
|
||||||
|
config = read_config(cfgpath)
|
||||||
|
delete_inactive_users(config)
|
||||||
@@ -40,6 +40,10 @@ def is_allowed_to_create(config: Config, user, cleartext_password) -> bool:
|
|||||||
return False
|
return False
|
||||||
localpart, domain = parts
|
localpart, domain = parts
|
||||||
|
|
||||||
|
if localpart == "echo":
|
||||||
|
# echobot account should not be created in the database
|
||||||
|
return False
|
||||||
|
|
||||||
if (
|
if (
|
||||||
len(localpart) > config.username_max_length
|
len(localpart) > config.username_max_length
|
||||||
or len(localpart) < config.username_min_length
|
or len(localpart) < config.username_min_length
|
||||||
|
|||||||
109
chatmaild/src/chatmaild/echo.py
Normal file
109
chatmaild/src/chatmaild/echo.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Advanced echo bot example.
|
||||||
|
|
||||||
|
it will echo back any message that has non-empty text and also supports the /help command.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from deltachat_rpc_client import Bot, DeltaChat, EventType, Rpc, events
|
||||||
|
|
||||||
|
from chatmaild.config import echobot_password_path, read_config
|
||||||
|
from chatmaild.doveauth import encrypt_password
|
||||||
|
from chatmaild.newemail import create_newemail_dict
|
||||||
|
|
||||||
|
hooks = events.HookCollection()
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.RawEvent)
|
||||||
|
def log_event(event):
|
||||||
|
if event.kind == EventType.INFO:
|
||||||
|
logging.info(event.msg)
|
||||||
|
elif event.kind == EventType.WARNING:
|
||||||
|
logging.warning(event.msg)
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.RawEvent(EventType.ERROR))
|
||||||
|
def log_error(event):
|
||||||
|
logging.error("%s", event.msg)
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.MemberListChanged)
|
||||||
|
def on_memberlist_changed(event):
|
||||||
|
logging.info(
|
||||||
|
"member %s was %s", event.member, "added" if event.member_added else "removed"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.GroupImageChanged)
|
||||||
|
def on_group_image_changed(event):
|
||||||
|
logging.info("group image %s", "deleted" if event.image_deleted else "changed")
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.GroupNameChanged)
|
||||||
|
def on_group_name_changed(event):
|
||||||
|
logging.info(f"group name changed, old name: {event.old_name}")
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.NewMessage(func=lambda e: not e.command))
|
||||||
|
def echo(event):
|
||||||
|
snapshot = event.message_snapshot
|
||||||
|
if snapshot.is_info:
|
||||||
|
# Ignore info messages
|
||||||
|
return
|
||||||
|
if snapshot.text or snapshot.file:
|
||||||
|
snapshot.chat.send_message(text=snapshot.text, file=snapshot.file)
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.on(events.NewMessage(command="/help"))
|
||||||
|
def help_command(event):
|
||||||
|
snapshot = event.message_snapshot
|
||||||
|
snapshot.chat.send_text("Send me any message and I will echo it back")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
path = os.environ.get("PATH")
|
||||||
|
venv_path = sys.argv[0].strip("echobot")
|
||||||
|
os.environ["PATH"] = path + ":" + venv_path
|
||||||
|
with Rpc() as rpc:
|
||||||
|
deltachat = DeltaChat(rpc)
|
||||||
|
system_info = deltachat.get_system_info()
|
||||||
|
logging.info(f"Running deltachat core {system_info.deltachat_core_version}")
|
||||||
|
|
||||||
|
accounts = deltachat.get_all_accounts()
|
||||||
|
account = accounts[0] if accounts else deltachat.add_account()
|
||||||
|
|
||||||
|
bot = Bot(account, hooks)
|
||||||
|
|
||||||
|
config = read_config(sys.argv[1])
|
||||||
|
addr = "echo@" + config.mail_domain
|
||||||
|
|
||||||
|
# Create password file
|
||||||
|
if bot.is_configured():
|
||||||
|
password = bot.account.get_config("mail_pw")
|
||||||
|
else:
|
||||||
|
password = create_newemail_dict(config)["password"]
|
||||||
|
|
||||||
|
echobot_password_path.write_text(encrypt_password(password))
|
||||||
|
# Give the user which doveauth runs as access to the password file.
|
||||||
|
subprocess.check_call(
|
||||||
|
["/usr/bin/setfacl", "-m", "user:vmail:r", echobot_password_path],
|
||||||
|
)
|
||||||
|
|
||||||
|
if not bot.is_configured():
|
||||||
|
bot.configure(addr, password)
|
||||||
|
|
||||||
|
# write invite link to working directory
|
||||||
|
invitelink = bot.account.get_qr_code()
|
||||||
|
Path("invite-link.txt").write_text(invitelink)
|
||||||
|
|
||||||
|
bot.run_forever()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,218 +0,0 @@
|
|||||||
"""
|
|
||||||
Expire old messages and addresses.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from collections import namedtuple
|
|
||||||
from datetime import datetime
|
|
||||||
from stat import S_ISREG
|
|
||||||
|
|
||||||
from chatmaild.config import read_config
|
|
||||||
|
|
||||||
FileEntry = namedtuple("FileEntry", ("relpath", "mtime", "size"))
|
|
||||||
|
|
||||||
|
|
||||||
def iter_mailboxes(basedir, maxnum):
|
|
||||||
if not os.path.exists(basedir):
|
|
||||||
print_info(f"no mailboxes found at: {basedir}")
|
|
||||||
return
|
|
||||||
|
|
||||||
for name in os_listdir_if_exists(basedir)[:maxnum]:
|
|
||||||
if "@" in name:
|
|
||||||
yield MailboxStat(basedir + "/" + name)
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_entry(path):
|
|
||||||
"""return a FileEntry or None if the path does not exist or is not a regular file."""
|
|
||||||
try:
|
|
||||||
st = os.stat(path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
return None
|
|
||||||
if not S_ISREG(st.st_mode):
|
|
||||||
return None
|
|
||||||
return FileEntry(path, st.st_mtime, st.st_size)
|
|
||||||
|
|
||||||
|
|
||||||
def os_listdir_if_exists(path):
|
|
||||||
"""return a list of names obtained from os.listdir or an empty list if the path does not exist."""
|
|
||||||
try:
|
|
||||||
return os.listdir(path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class MailboxStat:
|
|
||||||
last_login = None
|
|
||||||
|
|
||||||
def __init__(self, basedir):
|
|
||||||
self.basedir = str(basedir)
|
|
||||||
# all detected messages in cur/new/tmp folders
|
|
||||||
self.messages = []
|
|
||||||
|
|
||||||
# all detected files in mailbox top dir
|
|
||||||
self.extrafiles = []
|
|
||||||
|
|
||||||
# scan all relevant files (without recursion)
|
|
||||||
old_cwd = os.getcwd()
|
|
||||||
try:
|
|
||||||
os.chdir(self.basedir)
|
|
||||||
except FileNotFoundError:
|
|
||||||
return
|
|
||||||
for name in os_listdir_if_exists("."):
|
|
||||||
if name in ("cur", "new", "tmp"):
|
|
||||||
for msg_name in os_listdir_if_exists(name):
|
|
||||||
entry = get_file_entry(f"{name}/{msg_name}")
|
|
||||||
if entry is not None:
|
|
||||||
self.messages.append(entry)
|
|
||||||
|
|
||||||
else:
|
|
||||||
entry = get_file_entry(name)
|
|
||||||
if entry is not None:
|
|
||||||
self.extrafiles.append(entry)
|
|
||||||
if name == "password":
|
|
||||||
self.last_login = entry.mtime
|
|
||||||
self.extrafiles.sort(key=lambda x: -x.size)
|
|
||||||
os.chdir(old_cwd)
|
|
||||||
|
|
||||||
|
|
||||||
def print_info(msg):
|
|
||||||
print(msg, file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
class Expiry:
|
|
||||||
def __init__(self, config, dry, now, verbose):
|
|
||||||
self.config = config
|
|
||||||
self.dry = dry
|
|
||||||
self.now = now
|
|
||||||
self.verbose = verbose
|
|
||||||
self.del_mboxes = 0
|
|
||||||
self.all_mboxes = 0
|
|
||||||
self.del_files = 0
|
|
||||||
self.all_files = 0
|
|
||||||
self.start = time.time()
|
|
||||||
|
|
||||||
def remove_mailbox(self, mboxdir):
|
|
||||||
if self.verbose:
|
|
||||||
print_info(f"removing {mboxdir}")
|
|
||||||
if not self.dry:
|
|
||||||
shutil.rmtree(mboxdir)
|
|
||||||
self.del_mboxes += 1
|
|
||||||
|
|
||||||
def remove_file(self, path, mtime=None):
|
|
||||||
if self.verbose:
|
|
||||||
if mtime is not None:
|
|
||||||
date = datetime.fromtimestamp(mtime).strftime("%b %d")
|
|
||||||
print_info(f"removing {date} {path}")
|
|
||||||
else:
|
|
||||||
print_info(f"removing {path}")
|
|
||||||
if not self.dry:
|
|
||||||
try:
|
|
||||||
os.unlink(path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
print_info(f"file not found/vanished {path}")
|
|
||||||
self.del_files += 1
|
|
||||||
|
|
||||||
def process_mailbox_stat(self, mbox):
|
|
||||||
cutoff_without_login = (
|
|
||||||
self.now - int(self.config.delete_inactive_users_after) * 86400
|
|
||||||
)
|
|
||||||
cutoff_mails = self.now - int(self.config.delete_mails_after) * 86400
|
|
||||||
cutoff_large_mails = self.now - int(self.config.delete_large_after) * 86400
|
|
||||||
|
|
||||||
self.all_mboxes += 1
|
|
||||||
changed = False
|
|
||||||
if mbox.last_login and mbox.last_login < cutoff_without_login:
|
|
||||||
self.remove_mailbox(mbox.basedir)
|
|
||||||
return
|
|
||||||
|
|
||||||
# all to-be-removed files are relative to the mailbox basedir
|
|
||||||
try:
|
|
||||||
os.chdir(mbox.basedir)
|
|
||||||
except FileNotFoundError:
|
|
||||||
print_info(f"mailbox not found/vanished {mbox.basedir}")
|
|
||||||
return
|
|
||||||
|
|
||||||
mboxname = os.path.basename(mbox.basedir)
|
|
||||||
if self.verbose:
|
|
||||||
date = datetime.fromtimestamp(mbox.last_login) if mbox.last_login else None
|
|
||||||
if date:
|
|
||||||
print_info(f"checking mailbox {date.strftime('%b %d')} {mboxname}")
|
|
||||||
else:
|
|
||||||
print_info(f"checking mailbox (no last_login) {mboxname}")
|
|
||||||
self.all_files += len(mbox.messages)
|
|
||||||
for message in mbox.messages:
|
|
||||||
if message.mtime < cutoff_mails:
|
|
||||||
self.remove_file(message.relpath, mtime=message.mtime)
|
|
||||||
elif message.size > 200000 and message.mtime < cutoff_large_mails:
|
|
||||||
# we only remove noticed large files (not unnoticed ones in new/)
|
|
||||||
if message.relpath.startswith("cur/"):
|
|
||||||
self.remove_file(message.relpath, mtime=message.mtime)
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
changed = True
|
|
||||||
if changed:
|
|
||||||
self.remove_file("maildirsize")
|
|
||||||
|
|
||||||
def get_summary(self):
|
|
||||||
return (
|
|
||||||
f"Removed {self.del_mboxes} out of {self.all_mboxes} mailboxes "
|
|
||||||
f"and {self.del_files} out of {self.all_files} files in existing mailboxes "
|
|
||||||
f"in {time.time() - self.start:2.2f} seconds"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
|
||||||
"""Expire mailboxes and messages according to chatmail config"""
|
|
||||||
parser = ArgumentParser(description=main.__doc__)
|
|
||||||
ini = "/usr/local/lib/chatmaild/chatmail.ini"
|
|
||||||
parser.add_argument(
|
|
||||||
"chatmail_ini",
|
|
||||||
action="store",
|
|
||||||
nargs="?",
|
|
||||||
help=f"path pointing to chatmail.ini file, default: {ini}",
|
|
||||||
default=ini,
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--days", action="store", help="assume date to be days older than now"
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--maxnum",
|
|
||||||
default=None,
|
|
||||||
action="store",
|
|
||||||
help="maximum number of mailboxes to iterate on",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-v",
|
|
||||||
dest="verbose",
|
|
||||||
action="store_true",
|
|
||||||
help="print out removed files and mailboxes",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--remove",
|
|
||||||
dest="remove",
|
|
||||||
action="store_true",
|
|
||||||
help="actually remove all expired files and dirs",
|
|
||||||
)
|
|
||||||
args = parser.parse_args(args)
|
|
||||||
|
|
||||||
config = read_config(args.chatmail_ini)
|
|
||||||
now = datetime.utcnow().timestamp()
|
|
||||||
if args.days:
|
|
||||||
now = now - 86400 * int(args.days)
|
|
||||||
|
|
||||||
maxnum = int(args.maxnum) if args.maxnum else None
|
|
||||||
exp = Expiry(config, dry=not args.remove, now=now, verbose=args.verbose)
|
|
||||||
for mailbox in iter_mailboxes(str(config.mailboxes_dir), maxnum=maxnum):
|
|
||||||
exp.process_mailbox_stat(mailbox)
|
|
||||||
print(exp.get_summary())
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main(sys.argv[1:])
|
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from email import policy
|
from email import policy
|
||||||
@@ -82,14 +83,8 @@ def check_openpgp_payload(payload: bytes):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def check_armored_payload(payload: str, outgoing: bool):
|
def check_armored_payload(payload: str):
|
||||||
"""Check the armored PGP message for invalid content.
|
prefix = "-----BEGIN PGP MESSAGE-----\r\n\r\n"
|
||||||
|
|
||||||
:param payload: the armored PGP message
|
|
||||||
:param outgoing: whether the message is outgoing or incoming
|
|
||||||
:return: whether the message is a valid PGP message
|
|
||||||
"""
|
|
||||||
prefix = "-----BEGIN PGP MESSAGE-----\r\n"
|
|
||||||
if not payload.startswith(prefix):
|
if not payload.startswith(prefix):
|
||||||
return False
|
return False
|
||||||
payload = payload.removeprefix(prefix)
|
payload = payload.removeprefix(prefix)
|
||||||
@@ -101,16 +96,6 @@ def check_armored_payload(payload: str, outgoing: bool):
|
|||||||
return False
|
return False
|
||||||
payload = payload.removesuffix(suffix)
|
payload = payload.removesuffix(suffix)
|
||||||
|
|
||||||
version_comment = "Version: "
|
|
||||||
if payload.startswith(version_comment):
|
|
||||||
if outgoing: # Disallow comments in outgoing messages
|
|
||||||
return False
|
|
||||||
# Remove comments from incoming messages
|
|
||||||
payload = payload.partition("\r\n")[2]
|
|
||||||
|
|
||||||
while payload.startswith("\r\n"):
|
|
||||||
payload = payload.removeprefix("\r\n")
|
|
||||||
|
|
||||||
# Remove CRC24.
|
# Remove CRC24.
|
||||||
payload = payload.rpartition("=")[0]
|
payload = payload.rpartition("=")[0]
|
||||||
|
|
||||||
@@ -146,7 +131,7 @@ def is_securejoin(message):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def check_encrypted(message, outgoing=True):
|
def check_encrypted(message):
|
||||||
"""Check that the message is an OpenPGP-encrypted message.
|
"""Check that the message is an OpenPGP-encrypted message.
|
||||||
|
|
||||||
MIME structure of the message must correspond to <https://www.rfc-editor.org/rfc/rfc3156>.
|
MIME structure of the message must correspond to <https://www.rfc-editor.org/rfc/rfc3156>.
|
||||||
@@ -173,7 +158,7 @@ def check_encrypted(message, outgoing=True):
|
|||||||
if part.get_content_type() != "application/octet-stream":
|
if part.get_content_type() != "application/octet-stream":
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not check_armored_payload(part.get_payload(), outgoing=outgoing):
|
if not check_armored_payload(part.get_payload()):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@@ -227,7 +212,7 @@ class OutgoingBeforeQueueHandler:
|
|||||||
self.send_rate_limiter = SendRateLimiter()
|
self.send_rate_limiter = SendRateLimiter()
|
||||||
|
|
||||||
async def handle_MAIL(self, server, session, envelope, address, mail_options):
|
async def handle_MAIL(self, server, session, envelope, address, mail_options):
|
||||||
log_info(f"handle_MAIL from {address}")
|
logging.info(f"handle_MAIL from {address}")
|
||||||
envelope.mail_from = address
|
envelope.mail_from = address
|
||||||
max_sent = self.config.max_user_send_per_minute
|
max_sent = self.config.max_user_send_per_minute
|
||||||
if not self.send_rate_limiter.is_sending_allowed(address, max_sent):
|
if not self.send_rate_limiter.is_sending_allowed(address, max_sent):
|
||||||
@@ -240,15 +225,11 @@ class OutgoingBeforeQueueHandler:
|
|||||||
return "250 OK"
|
return "250 OK"
|
||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
async def handle_DATA(self, server, session, envelope):
|
||||||
loop = asyncio.get_running_loop()
|
logging.info("handle_DATA before-queue")
|
||||||
return await loop.run_in_executor(None, self.sync_handle_DATA, envelope)
|
|
||||||
|
|
||||||
def sync_handle_DATA(self, envelope):
|
|
||||||
log_info("handle_DATA before-queue")
|
|
||||||
error = self.check_DATA(envelope)
|
error = self.check_DATA(envelope)
|
||||||
if error:
|
if error:
|
||||||
return error
|
return error
|
||||||
log_info("re-injecting the mail that passed checks")
|
logging.info("re-injecting the mail that passed checks")
|
||||||
client = SMTPClient("localhost", self.config.postfix_reinject_port)
|
client = SMTPClient("localhost", self.config.postfix_reinject_port)
|
||||||
client.sendmail(
|
client.sendmail(
|
||||||
envelope.mail_from, envelope.rcpt_tos, envelope.original_content
|
envelope.mail_from, envelope.rcpt_tos, envelope.original_content
|
||||||
@@ -257,10 +238,10 @@ class OutgoingBeforeQueueHandler:
|
|||||||
|
|
||||||
def check_DATA(self, envelope):
|
def check_DATA(self, envelope):
|
||||||
"""the central filtering function for e-mails."""
|
"""the central filtering function for e-mails."""
|
||||||
log_info(f"Processing DATA message from {envelope.mail_from}")
|
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
||||||
|
|
||||||
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
||||||
mail_encrypted = check_encrypted(message, outgoing=True)
|
mail_encrypted = check_encrypted(message)
|
||||||
|
|
||||||
_, from_addr = parseaddr(message.get("from").strip())
|
_, from_addr = parseaddr(message.get("from").strip())
|
||||||
|
|
||||||
@@ -297,15 +278,11 @@ class IncomingBeforeQueueHandler:
|
|||||||
self.config = config
|
self.config = config
|
||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
async def handle_DATA(self, server, session, envelope):
|
||||||
loop = asyncio.get_running_loop()
|
logging.info("handle_DATA before-queue")
|
||||||
return await loop.run_in_executor(None, self.sync_handle_DATA, envelope)
|
|
||||||
|
|
||||||
def sync_handle_DATA(self, envelope):
|
|
||||||
log_info("handle_DATA before-queue")
|
|
||||||
error = self.check_DATA(envelope)
|
error = self.check_DATA(envelope)
|
||||||
if error:
|
if error:
|
||||||
return error
|
return error
|
||||||
log_info("re-injecting the mail that passed checks")
|
logging.info("re-injecting the mail that passed checks")
|
||||||
|
|
||||||
# the smtp daemon on reinject_port_incoming gives it to dkim milter
|
# the smtp daemon on reinject_port_incoming gives it to dkim milter
|
||||||
# which looks at source address to determine whether to verify or sign
|
# which looks at source address to determine whether to verify or sign
|
||||||
@@ -321,10 +298,10 @@ class IncomingBeforeQueueHandler:
|
|||||||
|
|
||||||
def check_DATA(self, envelope):
|
def check_DATA(self, envelope):
|
||||||
"""the central filtering function for e-mails."""
|
"""the central filtering function for e-mails."""
|
||||||
log_info(f"Processing DATA message from {envelope.mail_from}")
|
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
||||||
|
|
||||||
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
||||||
mail_encrypted = check_encrypted(message, outgoing=False)
|
mail_encrypted = check_encrypted(message)
|
||||||
|
|
||||||
if mail_encrypted or is_securejoin(message):
|
if mail_encrypted or is_securejoin(message):
|
||||||
print("Incoming: Filtering encrypted mail.", file=sys.stderr)
|
print("Incoming: Filtering encrypted mail.", file=sys.stderr)
|
||||||
@@ -363,19 +340,16 @@ class SendRateLimiter:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def log_info(msg):
|
|
||||||
print(msg, file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
assert len(args) == 2
|
assert len(args) == 2
|
||||||
config = read_config(args[0])
|
config = read_config(args[0])
|
||||||
mode = args[1]
|
mode = args[1]
|
||||||
|
logging.basicConfig(level=logging.WARN)
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
assert mode in ["incoming", "outgoing"]
|
assert mode in ["incoming", "outgoing"]
|
||||||
task = asyncmain_beforequeue(config, mode)
|
task = asyncmain_beforequeue(config, mode)
|
||||||
loop.create_task(task)
|
loop.create_task(task)
|
||||||
log_info("entering serving loop")
|
logging.info("entering serving loop")
|
||||||
loop.run_forever()
|
loop.run_forever()
|
||||||
|
|||||||
@@ -1,168 +0,0 @@
|
|||||||
"""
|
|
||||||
command line tool to analyze mailbox message storage
|
|
||||||
|
|
||||||
example invocation:
|
|
||||||
|
|
||||||
python -m chatmaild.fsreport /path/to/chatmail.ini
|
|
||||||
|
|
||||||
to show storage summaries for all "cur" folders
|
|
||||||
|
|
||||||
python -m chatmaild.fsreport /path/to/chatmail.ini --mdir cur
|
|
||||||
|
|
||||||
to show storage summaries only for first 1000 mailboxes
|
|
||||||
|
|
||||||
python -m chatmaild.fsreport /path/to/chatmail.ini --maxnum 1000
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from chatmaild.config import read_config
|
|
||||||
from chatmaild.expire import iter_mailboxes
|
|
||||||
|
|
||||||
DAYSECONDS = 24 * 60 * 60
|
|
||||||
MONTHSECONDS = DAYSECONDS * 30
|
|
||||||
|
|
||||||
|
|
||||||
def HSize(size: int):
|
|
||||||
"""Format a size integer as a Human-readable string Kilobyte, Megabyte or Gigabyte"""
|
|
||||||
if size < 10000:
|
|
||||||
return f"{size / 1000:5.2f}K"
|
|
||||||
if size < 1000 * 1000:
|
|
||||||
return f"{size / 1000:5.0f}K"
|
|
||||||
if size < 1000 * 1000 * 1000:
|
|
||||||
return f"{int(size / 1000000):5.0f}M"
|
|
||||||
return f"{size / 1000000000:5.2f}G"
|
|
||||||
|
|
||||||
|
|
||||||
class Report:
|
|
||||||
def __init__(self, now, min_login_age, mdir):
|
|
||||||
self.size_extra = 0
|
|
||||||
self.size_messages = 0
|
|
||||||
self.now = now
|
|
||||||
self.min_login_age = min_login_age
|
|
||||||
self.mdir = mdir
|
|
||||||
|
|
||||||
self.num_ci_logins = self.num_all_logins = 0
|
|
||||||
self.login_buckets = {x: 0 for x in (1, 10, 30, 40, 80, 100, 150)}
|
|
||||||
|
|
||||||
self.message_buckets = {x: 0 for x in (0, 160000, 500000, 2000000)}
|
|
||||||
|
|
||||||
def process_mailbox_stat(self, mailbox):
|
|
||||||
# categorize login times
|
|
||||||
last_login = mailbox.last_login
|
|
||||||
if last_login:
|
|
||||||
self.num_all_logins += 1
|
|
||||||
if os.path.basename(mailbox.basedir)[:3] == "ci-":
|
|
||||||
self.num_ci_logins += 1
|
|
||||||
else:
|
|
||||||
for days in self.login_buckets:
|
|
||||||
if last_login >= self.now - days * DAYSECONDS:
|
|
||||||
self.login_buckets[days] += 1
|
|
||||||
|
|
||||||
cutoff_login_date = self.now - self.min_login_age * DAYSECONDS
|
|
||||||
if last_login and last_login <= cutoff_login_date:
|
|
||||||
# categorize message sizes
|
|
||||||
for size in self.message_buckets:
|
|
||||||
for msg in mailbox.messages:
|
|
||||||
if msg.size >= size:
|
|
||||||
if self.mdir and not msg.relpath.startswith(self.mdir):
|
|
||||||
continue
|
|
||||||
self.message_buckets[size] += msg.size
|
|
||||||
|
|
||||||
self.size_messages += sum(entry.size for entry in mailbox.messages)
|
|
||||||
self.size_extra += sum(entry.size for entry in mailbox.extrafiles)
|
|
||||||
|
|
||||||
def dump_summary(self):
|
|
||||||
all_messages = self.size_messages
|
|
||||||
print()
|
|
||||||
print("## Mailbox storage use analysis")
|
|
||||||
print(f"Mailbox data total size: {HSize(self.size_extra + all_messages)}")
|
|
||||||
print(f"Messages total size : {HSize(all_messages)}")
|
|
||||||
try:
|
|
||||||
percent = self.size_extra / (self.size_extra + all_messages) * 100
|
|
||||||
except ZeroDivisionError:
|
|
||||||
percent = 100
|
|
||||||
print(f"Extra files : {HSize(self.size_extra)} ({percent:.2f}%)")
|
|
||||||
|
|
||||||
print()
|
|
||||||
if self.min_login_age:
|
|
||||||
print(f"### Message storage for {self.min_login_age} days old logins")
|
|
||||||
|
|
||||||
pref = f"[{self.mdir}] " if self.mdir else ""
|
|
||||||
for minsize, sumsize in self.message_buckets.items():
|
|
||||||
percent = (sumsize / all_messages * 100) if all_messages else 0
|
|
||||||
print(
|
|
||||||
f"{pref}larger than {HSize(minsize)}: {HSize(sumsize)} ({percent:.2f}%)"
|
|
||||||
)
|
|
||||||
|
|
||||||
user_logins = self.num_all_logins - self.num_ci_logins
|
|
||||||
|
|
||||||
def p(num):
|
|
||||||
return f"({num / user_logins * 100:2.2f}%)" if user_logins else "100%"
|
|
||||||
|
|
||||||
print()
|
|
||||||
print(f"## Login stats, from date reference {datetime.fromtimestamp(self.now)}")
|
|
||||||
print(f"all: {HSize(self.num_all_logins)}")
|
|
||||||
print(f"non-ci: {HSize(user_logins)}")
|
|
||||||
print(f"ci: {HSize(self.num_ci_logins)}")
|
|
||||||
for days, active in self.login_buckets.items():
|
|
||||||
print(f"last {days:3} days: {HSize(active)} {p(active)}")
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
|
||||||
"""Report about filesystem storage usage of all mailboxes and messages"""
|
|
||||||
parser = ArgumentParser(description=main.__doc__)
|
|
||||||
ini = "/usr/local/lib/chatmaild/chatmail.ini"
|
|
||||||
parser.add_argument(
|
|
||||||
"chatmail_ini",
|
|
||||||
action="store",
|
|
||||||
nargs="?",
|
|
||||||
help=f"path pointing to chatmail.ini file, default: {ini}",
|
|
||||||
default=ini,
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--days",
|
|
||||||
default=0,
|
|
||||||
action="store",
|
|
||||||
help="assume date to be days older than now",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--min-login-age",
|
|
||||||
default=0,
|
|
||||||
dest="min_login_age",
|
|
||||||
action="store",
|
|
||||||
help="only sum up message size if last login is at least min-login-age days old",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--mdir",
|
|
||||||
action="store",
|
|
||||||
help="only consider 'cur' or 'new' or 'tmp' messages for summary",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--maxnum",
|
|
||||||
default=None,
|
|
||||||
action="store",
|
|
||||||
help="maximum number of mailboxes to iterate on",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args(args)
|
|
||||||
|
|
||||||
config = read_config(args.chatmail_ini)
|
|
||||||
|
|
||||||
now = datetime.utcnow().timestamp()
|
|
||||||
if args.days:
|
|
||||||
now = now - 86400 * int(args.days)
|
|
||||||
|
|
||||||
maxnum = int(args.maxnum) if args.maxnum else None
|
|
||||||
rep = Report(now=now, min_login_age=int(args.min_login_age), mdir=args.mdir)
|
|
||||||
for mbox in iter_mailboxes(str(config.mailboxes_dir), maxnum=maxnum):
|
|
||||||
rep.process_mailbox_stat(mbox)
|
|
||||||
rep.dump_summary()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -43,10 +43,7 @@ passthrough_senders =
|
|||||||
|
|
||||||
# list of e-mail recipients for which to accept outbound un-encrypted mails
|
# list of e-mail recipients for which to accept outbound un-encrypted mails
|
||||||
# (space-separated, item may start with "@" to whitelist whole recipient domains)
|
# (space-separated, item may start with "@" to whitelist whole recipient domains)
|
||||||
passthrough_recipients = echo@{mail_domain}
|
passthrough_recipients = xstore@testrun.org echo@{mail_domain}
|
||||||
|
|
||||||
# path to www directory - documented here: https://chatmail.at/doc/relay/getting_started.html#custom-web-pages
|
|
||||||
#www_folder = www
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Deployment Details
|
# Deployment Details
|
||||||
@@ -63,9 +60,6 @@ postfix_reinject_port_incoming = 10026
|
|||||||
# if set to "True" IPv6 is disabled
|
# if set to "True" IPv6 is disabled
|
||||||
disable_ipv6 = False
|
disable_ipv6 = False
|
||||||
|
|
||||||
# Your email adress, which will be used in acmetool to manage Let's Encrypt SSL certificates
|
|
||||||
acme_email =
|
|
||||||
|
|
||||||
# Defaults to https://iroh.{{mail_domain}} and running `iroh-relay` on the chatmail
|
# Defaults to https://iroh.{{mail_domain}} and running `iroh-relay` on the chatmail
|
||||||
# service.
|
# service.
|
||||||
# If you set it to anything else, the service will be disabled
|
# If you set it to anything else, the service will be disabled
|
||||||
@@ -99,12 +93,6 @@ acme_email =
|
|||||||
# so use this option with caution on production servers.
|
# so use this option with caution on production servers.
|
||||||
imap_rawlog = false
|
imap_rawlog = false
|
||||||
|
|
||||||
# set to true if you want to enable the IMAP COMPRESS Extension,
|
|
||||||
# which allows IMAP connections to be efficiently compressed.
|
|
||||||
# WARNING: Enabling this makes it impossible to hibernate IMAP
|
|
||||||
# processes which will result in much higher memory/RAM usage.
|
|
||||||
imap_compress = false
|
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Privacy Policy
|
# Privacy Policy
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
[privacy]
|
[privacy]
|
||||||
|
|
||||||
passthrough_recipients = privacy@testrun.org echo@{mail_domain}
|
passthrough_recipients = privacy@testrun.org xstore@testrun.org echo@{mail_domain}
|
||||||
|
|
||||||
privacy_postal =
|
privacy_postal =
|
||||||
Merlinux GmbH, Represented by the managing director H. Krekel,
|
Merlinux GmbH, Represented by the managing director H. Krekel,
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from .config import read_config
|
|||||||
from .dictproxy import DictProxy
|
from .dictproxy import DictProxy
|
||||||
from .filedict import FileDict
|
from .filedict import FileDict
|
||||||
from .notifier import Notifier
|
from .notifier import Notifier
|
||||||
from .turnserver import turn_credentials
|
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_token_timestamp(timestamp, now):
|
def _is_valid_token_timestamp(timestamp, now):
|
||||||
@@ -76,12 +75,11 @@ class Metadata:
|
|||||||
|
|
||||||
|
|
||||||
class MetadataDictProxy(DictProxy):
|
class MetadataDictProxy(DictProxy):
|
||||||
def __init__(self, notifier, metadata, iroh_relay=None, turn_hostname=None):
|
def __init__(self, notifier, metadata, iroh_relay=None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.notifier = notifier
|
self.notifier = notifier
|
||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
self.iroh_relay = iroh_relay
|
self.iroh_relay = iroh_relay
|
||||||
self.turn_hostname = turn_hostname
|
|
||||||
|
|
||||||
def handle_lookup(self, parts):
|
def handle_lookup(self, parts):
|
||||||
# Lpriv/43f5f508a7ea0366dff30200c15250e3/devicetoken\tlkj123poi@c2.testrun.org
|
# Lpriv/43f5f508a7ea0366dff30200c15250e3/devicetoken\tlkj123poi@c2.testrun.org
|
||||||
@@ -100,11 +98,6 @@ class MetadataDictProxy(DictProxy):
|
|||||||
):
|
):
|
||||||
# Handle `GETMETADATA "" /shared/vendor/deltachat/irohrelay`
|
# Handle `GETMETADATA "" /shared/vendor/deltachat/irohrelay`
|
||||||
return f"O{self.iroh_relay}\n"
|
return f"O{self.iroh_relay}\n"
|
||||||
elif keyname == "vendor/vendor.dovecot/pvt/server/vendor/deltachat/turn":
|
|
||||||
res = turn_credentials()
|
|
||||||
port = 3478
|
|
||||||
return f"O{self.turn_hostname}:{port}:{res}\n"
|
|
||||||
|
|
||||||
logging.warning(f"lookup ignored: {parts!r}")
|
logging.warning(f"lookup ignored: {parts!r}")
|
||||||
return "N\n"
|
return "N\n"
|
||||||
|
|
||||||
@@ -128,7 +121,6 @@ def main():
|
|||||||
|
|
||||||
config = read_config(config_path)
|
config = read_config(config_path)
|
||||||
iroh_relay = config.iroh_relay
|
iroh_relay = config.iroh_relay
|
||||||
mail_domain = config.mail_domain
|
|
||||||
|
|
||||||
vmail_dir = config.mailboxes_dir
|
vmail_dir = config.mailboxes_dir
|
||||||
if not vmail_dir.exists():
|
if not vmail_dir.exists():
|
||||||
@@ -142,10 +134,7 @@ def main():
|
|||||||
notifier.start_notification_threads(metadata.remove_token_from_addr)
|
notifier.start_notification_threads(metadata.remove_token_from_addr)
|
||||||
|
|
||||||
dictproxy = MetadataDictProxy(
|
dictproxy = MetadataDictProxy(
|
||||||
notifier=notifier,
|
notifier=notifier, metadata=metadata, iroh_relay=iroh_relay
|
||||||
metadata=metadata,
|
|
||||||
iroh_relay=iroh_relay,
|
|
||||||
turn_hostname=mail_domain,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
dictproxy.serve_forever_from_socket(socket)
|
dictproxy.serve_forever_from_socket(socket)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
|
from chatmaild.delete_inactive_users import delete_inactive_users
|
||||||
from chatmaild.doveauth import AuthDictProxy
|
from chatmaild.doveauth import AuthDictProxy
|
||||||
from chatmaild.expire import main as main_expire
|
|
||||||
|
|
||||||
|
|
||||||
def test_login_timestamps(example_config):
|
def test_login_timestamps(example_config):
|
||||||
@@ -45,12 +45,7 @@ def test_delete_inactive_users(example_config):
|
|||||||
for addr in to_remove:
|
for addr in to_remove:
|
||||||
assert example_config.get_user(addr).maildir.exists()
|
assert example_config.get_user(addr).maildir.exists()
|
||||||
|
|
||||||
main_expire(
|
delete_inactive_users(example_config)
|
||||||
args=[
|
|
||||||
"--remove",
|
|
||||||
str(example_config._inipath),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
for p in example_config.mailboxes_dir.iterdir():
|
for p in example_config.mailboxes_dir.iterdir():
|
||||||
assert not p.name.startswith("old")
|
assert not p.name.startswith("old")
|
||||||
|
|||||||
@@ -1,150 +0,0 @@
|
|||||||
import os
|
|
||||||
import random
|
|
||||||
from datetime import datetime
|
|
||||||
from fnmatch import fnmatch
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from chatmaild.expire import (
|
|
||||||
FileEntry,
|
|
||||||
MailboxStat,
|
|
||||||
get_file_entry,
|
|
||||||
iter_mailboxes,
|
|
||||||
os_listdir_if_exists,
|
|
||||||
)
|
|
||||||
from chatmaild.expire import main as expiry_main
|
|
||||||
from chatmaild.fsreport import main as report_main
|
|
||||||
|
|
||||||
|
|
||||||
def fill_mbox(basedir):
|
|
||||||
basedir1 = basedir.joinpath("mailbox1@example.org")
|
|
||||||
basedir1.mkdir()
|
|
||||||
password = basedir1.joinpath("password")
|
|
||||||
password.write_text("xxx")
|
|
||||||
basedir1.joinpath("maildirsize").write_text("xxx")
|
|
||||||
|
|
||||||
garbagedir = basedir1.joinpath("garbagedir")
|
|
||||||
garbagedir.mkdir()
|
|
||||||
|
|
||||||
create_new_messages(basedir1, ["cur/msg1"], size=500)
|
|
||||||
create_new_messages(basedir1, ["new/msg2"], size=600)
|
|
||||||
return basedir1
|
|
||||||
|
|
||||||
|
|
||||||
def create_new_messages(basedir, relpaths, size=1000, days=0):
|
|
||||||
now = datetime.utcnow().timestamp()
|
|
||||||
|
|
||||||
for relpath in relpaths:
|
|
||||||
msg_path = Path(basedir).joinpath(relpath)
|
|
||||||
msg_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
msg_path.write_text("x" * size)
|
|
||||||
# accessed now, modified N days ago
|
|
||||||
os.utime(msg_path, (now, now - days * 86400))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mbox1(example_config):
|
|
||||||
basedir1 = fill_mbox(example_config.mailboxes_dir)
|
|
||||||
return MailboxStat(basedir1)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filentry_ordering(tmp_path):
|
|
||||||
l = [FileEntry(f"x{i}", size=i + 10, mtime=1000 - i) for i in range(10)]
|
|
||||||
sorted = list(l)
|
|
||||||
random.shuffle(l)
|
|
||||||
l.sort(key=lambda x: x.size)
|
|
||||||
assert l == sorted
|
|
||||||
|
|
||||||
|
|
||||||
def test_no_mailbxoes(tmp_path, capsys):
|
|
||||||
assert [] == list(iter_mailboxes(str(tmp_path.joinpath("notexists")), maxnum=10))
|
|
||||||
out, err = capsys.readouterr()
|
|
||||||
assert "no mailboxes" in err
|
|
||||||
|
|
||||||
|
|
||||||
def test_stats_mailbox(mbox1):
|
|
||||||
password = Path(mbox1.basedir).joinpath("password")
|
|
||||||
assert mbox1.last_login == password.stat().st_mtime
|
|
||||||
assert len(mbox1.messages) == 2
|
|
||||||
|
|
||||||
msgs = list(sorted(mbox1.messages, key=lambda x: x.size))
|
|
||||||
assert len(msgs) == 2
|
|
||||||
assert msgs[0].size == 500 # cur
|
|
||||||
assert msgs[1].size == 600 # new
|
|
||||||
|
|
||||||
create_new_messages(mbox1.basedir, ["large-extra"], size=1000)
|
|
||||||
create_new_messages(mbox1.basedir, ["index-something"], size=3)
|
|
||||||
mbox2 = MailboxStat(mbox1.basedir)
|
|
||||||
assert len(mbox2.extrafiles) == 4
|
|
||||||
assert mbox2.extrafiles[0].size == 1000
|
|
||||||
|
|
||||||
# cope well with mailbox dirs that have no password (for whatever reason)
|
|
||||||
Path(mbox1.basedir).joinpath("password").unlink()
|
|
||||||
mbox3 = MailboxStat(mbox1.basedir)
|
|
||||||
assert mbox3.last_login is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_report_no_mailboxes(example_config):
|
|
||||||
args = (str(example_config._inipath),)
|
|
||||||
report_main(args)
|
|
||||||
|
|
||||||
|
|
||||||
def test_report(mbox1, example_config):
|
|
||||||
args = (str(example_config._inipath),)
|
|
||||||
report_main(args)
|
|
||||||
args = list(args) + "--days 1".split()
|
|
||||||
report_main(args)
|
|
||||||
args = list(args) + "--min-login-age 1".split()
|
|
||||||
report_main(args)
|
|
||||||
args = list(args) + "--mdir cur".split()
|
|
||||||
report_main(args)
|
|
||||||
|
|
||||||
|
|
||||||
def test_expiry_cli_basic(example_config, mbox1):
|
|
||||||
args = (str(example_config._inipath),)
|
|
||||||
expiry_main(args)
|
|
||||||
|
|
||||||
|
|
||||||
def test_expiry_cli_old_files(capsys, example_config, mbox1):
|
|
||||||
relpaths_old = ["cur/msg_old1", "cur/msg_old1"]
|
|
||||||
cutoff_days = int(example_config.delete_mails_after) + 1
|
|
||||||
create_new_messages(mbox1.basedir, relpaths_old, size=1000, days=cutoff_days)
|
|
||||||
|
|
||||||
relpaths_large = ["cur/msg_old_large1", "new/msg_old_large2"]
|
|
||||||
cutoff_days = int(example_config.delete_large_after) + 1
|
|
||||||
create_new_messages(
|
|
||||||
mbox1.basedir, relpaths_large, size=1000 * 300, days=cutoff_days
|
|
||||||
)
|
|
||||||
|
|
||||||
create_new_messages(mbox1.basedir, ["cur/shouldstay"], size=1000 * 300, days=1)
|
|
||||||
|
|
||||||
args = str(example_config._inipath), "--remove", "-v"
|
|
||||||
expiry_main(args)
|
|
||||||
out, err = capsys.readouterr()
|
|
||||||
|
|
||||||
allpaths = relpaths_old + relpaths_large + ["maildirsize"]
|
|
||||||
for path in allpaths:
|
|
||||||
for line in err.split("\n"):
|
|
||||||
if fnmatch(line, f"removing*{path}"):
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if path != "new/msg_old_large2":
|
|
||||||
pytest.fail(f"failed to remove {path}\n{err}")
|
|
||||||
|
|
||||||
assert "shouldstay" not in err
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_file_entry(tmp_path):
|
|
||||||
assert get_file_entry(str(tmp_path.joinpath("123123"))) is None
|
|
||||||
p = tmp_path.joinpath("x")
|
|
||||||
p.write_text("hello")
|
|
||||||
entry = get_file_entry(str(p))
|
|
||||||
assert entry.size == 5
|
|
||||||
assert entry.mtime
|
|
||||||
|
|
||||||
|
|
||||||
def test_os_listdir_if_exists(tmp_path):
|
|
||||||
tmp_path.joinpath("x").write_text("hello")
|
|
||||||
assert len(os_listdir_if_exists(str(tmp_path))) == 1
|
|
||||||
assert len(os_listdir_if_exists(str(tmp_path.joinpath("123123")))) == 0
|
|
||||||
@@ -241,9 +241,8 @@ def test_cleartext_passthrough_senders(gencreds, handler, maildata):
|
|||||||
|
|
||||||
|
|
||||||
def test_check_armored_payload():
|
def test_check_armored_payload():
|
||||||
prefix = "-----BEGIN PGP MESSAGE-----\r\n"
|
payload = """-----BEGIN PGP MESSAGE-----\r
|
||||||
comment = "Version: ProtonMail\r\n"
|
\r
|
||||||
payload = """\r
|
|
||||||
wU4DSqFx0d1yqAoSAQdAYkX/ZN/Az4B0k7X47zKyWrXxlDEdS3WOy0Yf2+GJTFgg\r
|
wU4DSqFx0d1yqAoSAQdAYkX/ZN/Az4B0k7X47zKyWrXxlDEdS3WOy0Yf2+GJTFgg\r
|
||||||
Zk5ql0mLG8Ze+ZifCS0XMO4otlemSyJ0K1ZPdFMGzUDBTgNqzkFabxXoXRIBB0AM\r
|
Zk5ql0mLG8Ze+ZifCS0XMO4otlemSyJ0K1ZPdFMGzUDBTgNqzkFabxXoXRIBB0AM\r
|
||||||
755wlX41X6Ay3KhnwBq7yEqSykVH6F3x11iHPKraLCAGZoaS8bKKNy/zg5slda1X\r
|
755wlX41X6Ay3KhnwBq7yEqSykVH6F3x11iHPKraLCAGZoaS8bKKNy/zg5slda1X\r
|
||||||
@@ -279,25 +278,16 @@ UN4fiB0KR9JyG2ayUdNJVkXZSZLnHyRgiaadlpUo16LVvw==\r
|
|||||||
\r
|
\r
|
||||||
"""
|
"""
|
||||||
|
|
||||||
commented_payload = prefix + comment + payload
|
assert check_armored_payload(payload) == True
|
||||||
assert check_armored_payload(commented_payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(commented_payload, outgoing=True) == False
|
|
||||||
|
|
||||||
payload = prefix + payload
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
payload = payload.removesuffix("\r\n")
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
assert check_armored_payload(payload) == True
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
payload = payload.removesuffix("\r\n")
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
assert check_armored_payload(payload) == True
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
payload = payload.removesuffix("\r\n")
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
assert check_armored_payload(payload) == True
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
payload = """-----BEGIN PGP MESSAGE-----\r
|
||||||
\r
|
\r
|
||||||
@@ -305,8 +295,7 @@ HELLOWORLD
|
|||||||
-----END PGP MESSAGE-----\r
|
-----END PGP MESSAGE-----\r
|
||||||
\r
|
\r
|
||||||
"""
|
"""
|
||||||
assert check_armored_payload(payload, outgoing=False) == False
|
assert check_armored_payload(payload) == False
|
||||||
assert check_armored_payload(payload, outgoing=True) == False
|
|
||||||
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
payload = """-----BEGIN PGP MESSAGE-----\r
|
||||||
\r
|
\r
|
||||||
@@ -314,8 +303,7 @@ HELLOWORLD
|
|||||||
-----END PGP MESSAGE-----\r
|
-----END PGP MESSAGE-----\r
|
||||||
\r
|
\r
|
||||||
"""
|
"""
|
||||||
assert check_armored_payload(payload, outgoing=False) == False
|
assert check_armored_payload(payload) == False
|
||||||
assert check_armored_payload(payload, outgoing=True) == False
|
|
||||||
|
|
||||||
# Test payload using partial body length
|
# Test payload using partial body length
|
||||||
# as generated by GopenPGP.
|
# as generated by GopenPGP.
|
||||||
@@ -357,5 +345,4 @@ myLbG7cJB787QjplEyVe2P/JBO6xYvbkJLf9Q+HaviTO25rugRSrYsoKMDfO8VlQ\r
|
|||||||
=6iHb\r
|
=6iHb\r
|
||||||
-----END PGP MESSAGE-----\r
|
-----END PGP MESSAGE-----\r
|
||||||
"""
|
"""
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
assert check_armored_payload(payload) == True
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|||||||
@@ -1,78 +0,0 @@
|
|||||||
import smtplib
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def smtpserver():
|
|
||||||
from pytest_localserver import smtp
|
|
||||||
|
|
||||||
server = smtp.Server("127.0.0.1")
|
|
||||||
server.start()
|
|
||||||
yield server
|
|
||||||
server.stop()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def make_popen(request):
|
|
||||||
def popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kw):
|
|
||||||
p = subprocess.Popen(
|
|
||||||
cmdargs,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
)
|
|
||||||
|
|
||||||
def fin():
|
|
||||||
p.terminate()
|
|
||||||
out, err = p.communicate()
|
|
||||||
print(out.decode("ascii"))
|
|
||||||
print(err.decode("ascii"), file=sys.stderr)
|
|
||||||
|
|
||||||
request.addfinalizer(fin)
|
|
||||||
return p
|
|
||||||
|
|
||||||
return popen
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("filtermail_mode", ["outgoing", "incoming"])
|
|
||||||
def test_one_mail(
|
|
||||||
make_config, make_popen, smtpserver, maildata, filtermail_mode, monkeypatch
|
|
||||||
):
|
|
||||||
monkeypatch.setenv("PYTHONUNBUFFERED", "1")
|
|
||||||
smtp_inject_port = 20025
|
|
||||||
if filtermail_mode == "outgoing":
|
|
||||||
settings = dict(
|
|
||||||
postfix_reinject_port=smtpserver.port,
|
|
||||||
filtermail_smtp_port=smtp_inject_port,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
settings = dict(
|
|
||||||
postfix_reinject_port_incoming=smtpserver.port,
|
|
||||||
filtermail_smtp_port_incoming=smtp_inject_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
config = make_config("example.org", settings=settings)
|
|
||||||
path = str(config._inipath)
|
|
||||||
|
|
||||||
popen = make_popen(["filtermail", path, filtermail_mode])
|
|
||||||
line = popen.stderr.readline().strip()
|
|
||||||
if b"loop" not in line:
|
|
||||||
print(line.decode("ascii"), file=sys.stderr)
|
|
||||||
pytest.fail("starting filtermail failed")
|
|
||||||
|
|
||||||
addr = f"user1@{config.mail_domain}"
|
|
||||||
config.get_user(addr).set_password("l1k2j3l1k2j3l")
|
|
||||||
|
|
||||||
# send encrypted mail
|
|
||||||
data = str(maildata("encrypted.eml", from_addr=addr, to_addr=addr))
|
|
||||||
client = smtplib.SMTP("localhost", smtp_inject_port)
|
|
||||||
client.sendmail(addr, [addr], data)
|
|
||||||
assert len(smtpserver.outbox) == 1
|
|
||||||
|
|
||||||
# send un-encrypted mail that errors
|
|
||||||
data = str(maildata("fake-encrypted.eml", from_addr=addr, to_addr=addr))
|
|
||||||
with pytest.raises(smtplib.SMTPDataError) as e:
|
|
||||||
client.sendmail(addr, [addr], data)
|
|
||||||
assert e.value.smtp_code == 523
|
|
||||||
@@ -36,3 +36,29 @@ def test_handle_dovecot_request_last_login(testaddr, example_config):
|
|||||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
||||||
assert res == "O\n"
|
assert res == "O\n"
|
||||||
assert len(dictproxy_transactions) == 0
|
assert len(dictproxy_transactions) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_dovecot_request_last_login_echobot(example_config):
|
||||||
|
dictproxy = LastLoginDictProxy(config=example_config)
|
||||||
|
|
||||||
|
authproxy = AuthDictProxy(config=example_config)
|
||||||
|
testaddr = f"echo@{example_config.mail_domain}"
|
||||||
|
authproxy.lookup_passdb(testaddr, "ignore")
|
||||||
|
user = dictproxy.config.get_user(testaddr)
|
||||||
|
|
||||||
|
transactions = {}
|
||||||
|
|
||||||
|
# set last-login info for user
|
||||||
|
tx = "1111"
|
||||||
|
msg = f"B{tx}\t{testaddr}"
|
||||||
|
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||||
|
assert not res
|
||||||
|
assert transactions == {tx: dict(addr=testaddr, res="O\n")}
|
||||||
|
|
||||||
|
timestamp = int(time.time())
|
||||||
|
msg = f"S{tx}\tshared/last-login/{testaddr}\t{timestamp}"
|
||||||
|
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||||
|
assert not res
|
||||||
|
assert len(transactions) == 1
|
||||||
|
read_timestamp = user.get_last_login_timestamp()
|
||||||
|
assert read_timestamp is None
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import socket
|
|
||||||
|
|
||||||
|
|
||||||
def turn_credentials() -> str:
|
|
||||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
|
|
||||||
client_socket.connect("/run/chatmail-turn/turn.socket")
|
|
||||||
with client_socket.makefile("rb") as file:
|
|
||||||
return file.readline().decode("utf-8").strip()
|
|
||||||
869
cmdeploy/src/cmdeploy/__init__.py
Normal file
869
cmdeploy/src/cmdeploy/__init__.py
Normal file
@@ -0,0 +1,869 @@
|
|||||||
|
"""
|
||||||
|
Chat Mail pyinfra deploy.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import importlib.resources
|
||||||
|
import io
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from io import StringIO
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from chatmaild.config import Config, read_config
|
||||||
|
from pyinfra import facts, host, logger
|
||||||
|
from pyinfra.api import FactBase
|
||||||
|
from pyinfra.facts.files import File, Sha256File
|
||||||
|
from pyinfra.facts.server import Sysctl
|
||||||
|
from pyinfra.facts.systemd import SystemdEnabled
|
||||||
|
from pyinfra.operations import apt, files, pip, server, systemd
|
||||||
|
|
||||||
|
from .acmetool import deploy_acmetool
|
||||||
|
|
||||||
|
|
||||||
|
class Port(FactBase):
|
||||||
|
"""
|
||||||
|
Returns the process occuping a port.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def command(self, port: int) -> str:
|
||||||
|
return (
|
||||||
|
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
||||||
|
% (port,)
|
||||||
|
)
|
||||||
|
|
||||||
|
def process(self, output: [str]) -> str:
|
||||||
|
return output[0]
|
||||||
|
|
||||||
|
|
||||||
|
def _build_chatmaild(dist_dir) -> None:
|
||||||
|
dist_dir = Path(dist_dir).resolve()
|
||||||
|
if dist_dir.exists():
|
||||||
|
shutil.rmtree(dist_dir)
|
||||||
|
dist_dir.mkdir()
|
||||||
|
subprocess.check_output(
|
||||||
|
[sys.executable, "-m", "build", "-n"]
|
||||||
|
+ ["--sdist", "chatmaild", "--outdir", str(dist_dir)]
|
||||||
|
)
|
||||||
|
entries = list(dist_dir.iterdir())
|
||||||
|
assert len(entries) == 1
|
||||||
|
return entries[0]
|
||||||
|
|
||||||
|
|
||||||
|
def remove_legacy_artifacts():
|
||||||
|
# disable legacy doveauth-dictproxy.service
|
||||||
|
if host.get_fact(SystemdEnabled).get("doveauth-dictproxy.service"):
|
||||||
|
systemd.service(
|
||||||
|
name="Disable legacy doveauth-dictproxy.service",
|
||||||
|
service="doveauth-dictproxy.service",
|
||||||
|
running=False,
|
||||||
|
enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_remote_venv_with_chatmaild(config) -> None:
|
||||||
|
remove_legacy_artifacts()
|
||||||
|
dist_file = _build_chatmaild(dist_dir=Path("chatmaild/dist"))
|
||||||
|
remote_base_dir = "/usr/local/lib/chatmaild"
|
||||||
|
remote_dist_file = f"{remote_base_dir}/dist/{dist_file.name}"
|
||||||
|
remote_venv_dir = f"{remote_base_dir}/venv"
|
||||||
|
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
||||||
|
root_owned = dict(user="root", group="root", mode="644")
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="apt install python3-virtualenv",
|
||||||
|
packages=["python3-virtualenv"],
|
||||||
|
)
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name="Upload chatmaild source package",
|
||||||
|
src=dist_file.open("rb"),
|
||||||
|
dest=remote_dist_file,
|
||||||
|
create_remote_dir=True,
|
||||||
|
**root_owned,
|
||||||
|
)
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name=f"Upload {remote_chatmail_inipath}",
|
||||||
|
src=config._getbytefile(),
|
||||||
|
dest=remote_chatmail_inipath,
|
||||||
|
**root_owned,
|
||||||
|
)
|
||||||
|
|
||||||
|
pip.virtualenv(
|
||||||
|
name=f"chatmaild virtualenv {remote_venv_dir}",
|
||||||
|
path=remote_venv_dir,
|
||||||
|
always_copy=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="install gcc and headers to build crypt_r source package",
|
||||||
|
packages=["gcc", "python3-dev"],
|
||||||
|
)
|
||||||
|
|
||||||
|
server.shell(
|
||||||
|
name=f"forced pip-install {dist_file.name}",
|
||||||
|
commands=[
|
||||||
|
f"{remote_venv_dir}/bin/pip install --force-reinstall {remote_dist_file}"
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("metrics.cron.j2"),
|
||||||
|
dest="/etc/cron.d/chatmail-metrics",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={
|
||||||
|
"mailboxes_dir": config.mailboxes_dir,
|
||||||
|
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# install systemd units
|
||||||
|
for fn in (
|
||||||
|
"doveauth",
|
||||||
|
"filtermail",
|
||||||
|
"filtermail-incoming",
|
||||||
|
"echobot",
|
||||||
|
"chatmail-metadata",
|
||||||
|
"lastlogin",
|
||||||
|
):
|
||||||
|
execpath = fn if fn != "filtermail-incoming" else "filtermail"
|
||||||
|
params = dict(
|
||||||
|
execpath=f"{remote_venv_dir}/bin/{execpath}",
|
||||||
|
config_path=remote_chatmail_inipath,
|
||||||
|
remote_venv_dir=remote_venv_dir,
|
||||||
|
mail_domain=config.mail_domain,
|
||||||
|
)
|
||||||
|
source_path = importlib.resources.files(__package__).joinpath(
|
||||||
|
"service", f"{fn}.service.f"
|
||||||
|
)
|
||||||
|
content = source_path.read_text().format(**params).encode()
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name=f"Upload {fn}.service",
|
||||||
|
src=io.BytesIO(content),
|
||||||
|
dest=f"/etc/systemd/system/{fn}.service",
|
||||||
|
**root_owned,
|
||||||
|
)
|
||||||
|
systemd.service(
|
||||||
|
name=f"Setup {fn} service",
|
||||||
|
service=f"{fn}.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=True,
|
||||||
|
daemon_reload=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_opendkim(domain: str, dkim_selector: str = "dkim") -> bool:
|
||||||
|
"""Configures OpenDKIM"""
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("opendkim/opendkim.conf"),
|
||||||
|
dest="/etc/opendkim.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
|
||||||
|
screen_script = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("opendkim/screen.lua"),
|
||||||
|
dest="/etc/opendkim/screen.lua",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= screen_script.changed
|
||||||
|
|
||||||
|
final_script = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("opendkim/final.lua"),
|
||||||
|
dest="/etc/opendkim/final.lua",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= final_script.changed
|
||||||
|
|
||||||
|
files.directory(
|
||||||
|
name="Add opendkim directory to /etc",
|
||||||
|
path="/etc/opendkim",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="750",
|
||||||
|
present=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
keytable = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("opendkim/KeyTable"),
|
||||||
|
dest="/etc/dkimkeys/KeyTable",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
||||||
|
)
|
||||||
|
need_restart |= keytable.changed
|
||||||
|
|
||||||
|
signing_table = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("opendkim/SigningTable"),
|
||||||
|
dest="/etc/dkimkeys/SigningTable",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
||||||
|
)
|
||||||
|
need_restart |= signing_table.changed
|
||||||
|
files.directory(
|
||||||
|
name="Add opendkim socket directory to /var/spool/postfix",
|
||||||
|
path="/var/spool/postfix/opendkim",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="750",
|
||||||
|
present=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="apt install opendkim opendkim-tools",
|
||||||
|
packages=["opendkim", "opendkim-tools"],
|
||||||
|
)
|
||||||
|
|
||||||
|
if not host.get_fact(File, f"/etc/dkimkeys/{dkim_selector}.private"):
|
||||||
|
server.shell(
|
||||||
|
name="Generate OpenDKIM domain keys",
|
||||||
|
commands=[
|
||||||
|
f"/usr/sbin/opendkim-genkey -D /etc/dkimkeys -d {domain} -s {dkim_selector}"
|
||||||
|
],
|
||||||
|
_use_su_login=True,
|
||||||
|
_su_user="opendkim",
|
||||||
|
)
|
||||||
|
|
||||||
|
service_file = files.put(
|
||||||
|
name="Configure opendkim to restart once a day",
|
||||||
|
src=importlib.resources.files(__package__).joinpath("opendkim/systemd.conf"),
|
||||||
|
dest="/etc/systemd/system/opendkim.service.d/10-prevent-memory-leak.conf",
|
||||||
|
)
|
||||||
|
need_restart |= service_file.changed
|
||||||
|
|
||||||
|
return need_restart
|
||||||
|
|
||||||
|
|
||||||
|
def _uninstall_mta_sts_daemon() -> None:
|
||||||
|
# Remove configuration.
|
||||||
|
files.file("/etc/mta-sts-daemon.yml", present=False)
|
||||||
|
|
||||||
|
files.directory("/usr/local/lib/postfix-mta-sts-resolver", present=False)
|
||||||
|
|
||||||
|
files.file("/etc/systemd/system/mta-sts-daemon.service", present=False)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Stop MTA-STS daemon",
|
||||||
|
service="mta-sts-daemon.service",
|
||||||
|
daemon_reload=True,
|
||||||
|
running=False,
|
||||||
|
enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_postfix(config: Config, debug: bool = False) -> bool:
|
||||||
|
"""Configures Postfix SMTP server."""
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("postfix/main.cf.j2"),
|
||||||
|
dest="/etc/postfix/main.cf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config=config,
|
||||||
|
disable_ipv6=config.disable_ipv6,
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
|
||||||
|
master_config = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("postfix/master.cf.j2"),
|
||||||
|
dest="/etc/postfix/master.cf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
debug=debug,
|
||||||
|
config=config,
|
||||||
|
)
|
||||||
|
need_restart |= master_config.changed
|
||||||
|
|
||||||
|
header_cleanup = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
|
"postfix/submission_header_cleanup"
|
||||||
|
),
|
||||||
|
dest="/etc/postfix/submission_header_cleanup",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= header_cleanup.changed
|
||||||
|
|
||||||
|
# Login map that 1:1 maps email address to login.
|
||||||
|
login_map = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("postfix/login_map"),
|
||||||
|
dest="/etc/postfix/login_map",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= login_map.changed
|
||||||
|
|
||||||
|
return need_restart
|
||||||
|
|
||||||
|
|
||||||
|
def _install_dovecot_package(package: str, arch: str):
|
||||||
|
arch = "amd64" if arch == "x86_64" else arch
|
||||||
|
arch = "arm64" if arch == "aarch64" else arch
|
||||||
|
url = f"https://download.delta.chat/dovecot/dovecot-{package}_2.3.21%2Bdfsg1-3_{arch}.deb"
|
||||||
|
deb_filename = "/root/" + url.split("/")[-1]
|
||||||
|
|
||||||
|
match (package, arch):
|
||||||
|
case ("core", "amd64"):
|
||||||
|
sha256 = "43f593332e22ac7701c62d58b575d2ca409e0f64857a2803be886c22860f5587"
|
||||||
|
case ("core", "arm64"):
|
||||||
|
sha256 = "4d21eba1a83f51c100f08f2e49f0c9f8f52f721ebc34f75018e043306da993a7"
|
||||||
|
case ("imapd", "amd64"):
|
||||||
|
sha256 = "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86"
|
||||||
|
case ("imapd", "arm64"):
|
||||||
|
sha256 = "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f"
|
||||||
|
case ("lmtpd", "amd64"):
|
||||||
|
sha256 = "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab"
|
||||||
|
case ("lmtpd", "arm64"):
|
||||||
|
sha256 = "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f"
|
||||||
|
case _:
|
||||||
|
apt.packages(packages=[f"dovecot-{package}"])
|
||||||
|
return
|
||||||
|
|
||||||
|
files.download(
|
||||||
|
name=f"Download dovecot-{package}",
|
||||||
|
src=url,
|
||||||
|
dest=deb_filename,
|
||||||
|
sha256sum=sha256,
|
||||||
|
cache_time=60 * 60 * 24 * 365 * 10, # never redownload the package
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.deb(name=f"Install dovecot-{package}", src=deb_filename)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
||||||
|
"""Configures Dovecot IMAP server."""
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("dovecot/dovecot.conf.j2"),
|
||||||
|
dest="/etc/dovecot/dovecot.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config=config,
|
||||||
|
debug=debug,
|
||||||
|
disable_ipv6=config.disable_ipv6,
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
auth_config = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("dovecot/auth.conf"),
|
||||||
|
dest="/etc/dovecot/auth.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= auth_config.changed
|
||||||
|
lua_push_notification_script = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
|
"dovecot/push_notification.lua"
|
||||||
|
),
|
||||||
|
dest="/etc/dovecot/push_notification.lua",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= lua_push_notification_script.changed
|
||||||
|
|
||||||
|
files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("dovecot/expunge.cron.j2"),
|
||||||
|
dest="/etc/cron.d/expunge",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config=config,
|
||||||
|
)
|
||||||
|
|
||||||
|
# as per https://doc.dovecot.org/configuration_manual/os/
|
||||||
|
# it is recommended to set the following inotify limits
|
||||||
|
for name in ("max_user_instances", "max_user_watches"):
|
||||||
|
key = f"fs.inotify.{name}"
|
||||||
|
if host.get_fact(Sysctl)[key] > 65535:
|
||||||
|
# Skip updating limits if already sufficient
|
||||||
|
# (enables running in incus containers where sysctl readonly)
|
||||||
|
continue
|
||||||
|
server.sysctl(
|
||||||
|
name=f"Change {key}",
|
||||||
|
key=key,
|
||||||
|
value=65535,
|
||||||
|
persist=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
timezone_env = files.line(
|
||||||
|
name="Set TZ environment variable",
|
||||||
|
path="/etc/environment",
|
||||||
|
line="TZ=:/etc/localtime",
|
||||||
|
)
|
||||||
|
need_restart |= timezone_env.changed
|
||||||
|
|
||||||
|
return need_restart
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_nginx(config: Config, debug: bool = False) -> bool:
|
||||||
|
"""Configures nginx HTTP server."""
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("nginx/nginx.conf.j2"),
|
||||||
|
dest="/etc/nginx/nginx.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": config.mail_domain},
|
||||||
|
disable_ipv6=config.disable_ipv6,
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
|
||||||
|
autoconfig = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("nginx/autoconfig.xml.j2"),
|
||||||
|
dest="/var/www/html/.well-known/autoconfig/mail/config-v1.1.xml",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": config.mail_domain},
|
||||||
|
)
|
||||||
|
need_restart |= autoconfig.changed
|
||||||
|
|
||||||
|
mta_sts_config = files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("nginx/mta-sts.txt.j2"),
|
||||||
|
dest="/var/www/html/.well-known/mta-sts.txt",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": config.mail_domain},
|
||||||
|
)
|
||||||
|
need_restart |= mta_sts_config.changed
|
||||||
|
|
||||||
|
# install CGI newemail script
|
||||||
|
#
|
||||||
|
cgi_dir = "/usr/lib/cgi-bin"
|
||||||
|
files.directory(
|
||||||
|
name=f"Ensure {cgi_dir} exists",
|
||||||
|
path=cgi_dir,
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
)
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name="Upload cgi newemail.py script",
|
||||||
|
src=importlib.resources.files("chatmaild").joinpath("newemail.py").open("rb"),
|
||||||
|
dest=f"{cgi_dir}/newemail.py",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
)
|
||||||
|
|
||||||
|
return need_restart
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_rspamd() -> None:
|
||||||
|
"""Remove rspamd"""
|
||||||
|
apt.packages(name="Remove rspamd", packages="rspamd", present=False)
|
||||||
|
|
||||||
|
|
||||||
|
def check_config(config):
|
||||||
|
mail_domain = config.mail_domain
|
||||||
|
if mail_domain != "testrun.org" and not mail_domain.endswith(".testrun.org"):
|
||||||
|
blocked_words = "merlinux schmieder testrun.org".split()
|
||||||
|
for key in config.__dict__:
|
||||||
|
value = config.__dict__[key]
|
||||||
|
if key.startswith("privacy") and any(
|
||||||
|
x in str(value) for x in blocked_words
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
f"please set your own privacy contacts/addresses in {config._inipath}"
|
||||||
|
)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def deploy_mtail(config):
|
||||||
|
# Uninstall mtail package, we are going to install a static binary.
|
||||||
|
apt.packages(name="Uninstall mtail", packages=["mtail"], present=False)
|
||||||
|
|
||||||
|
(url, sha256sum) = {
|
||||||
|
"x86_64": (
|
||||||
|
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_amd64.tar.gz",
|
||||||
|
"123c2ee5f48c3eff12ebccee38befd2233d715da736000ccde49e3d5607724e4",
|
||||||
|
),
|
||||||
|
"aarch64": (
|
||||||
|
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_arm64.tar.gz",
|
||||||
|
"aa04811c0929b6754408676de520e050c45dddeb3401881888a092c9aea89cae",
|
||||||
|
),
|
||||||
|
}[host.get_fact(facts.server.Arch)]
|
||||||
|
|
||||||
|
server.shell(
|
||||||
|
name="Download mtail",
|
||||||
|
commands=[
|
||||||
|
f"(echo '{sha256sum} /usr/local/bin/mtail' | sha256sum -c) || (curl -L {url} | gunzip | tar -x -f - mtail -O >/usr/local/bin/mtail.new && mv /usr/local/bin/mtail.new /usr/local/bin/mtail)",
|
||||||
|
"chmod 755 /usr/local/bin/mtail",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Using our own systemd unit instead of `/usr/lib/systemd/system/mtail.service`.
|
||||||
|
# This allows to read from journalctl instead of log files.
|
||||||
|
files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("mtail/mtail.service.j2"),
|
||||||
|
dest="/etc/systemd/system/mtail.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
address=config.mtail_address or "127.0.0.1",
|
||||||
|
port=3903,
|
||||||
|
)
|
||||||
|
|
||||||
|
mtail_conf = files.put(
|
||||||
|
name="Mtail configuration",
|
||||||
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
|
"mtail/delivered_mail.mtail"
|
||||||
|
),
|
||||||
|
dest="/etc/mtail/delivered_mail.mtail",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable mtail",
|
||||||
|
service="mtail.service",
|
||||||
|
running=bool(config.mtail_address),
|
||||||
|
enabled=bool(config.mtail_address),
|
||||||
|
restarted=mtail_conf.changed,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def deploy_iroh_relay(config) -> None:
|
||||||
|
(url, sha256sum) = {
|
||||||
|
"x86_64": (
|
||||||
|
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-x86_64-unknown-linux-musl.tar.gz",
|
||||||
|
"45c81199dbd70f8c4c30fef7f3b9727ca6e3cea8f2831333eeaf8aa71bf0fac1",
|
||||||
|
),
|
||||||
|
"aarch64": (
|
||||||
|
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-aarch64-unknown-linux-musl.tar.gz",
|
||||||
|
"f8ef27631fac213b3ef668d02acd5b3e215292746a3fc71d90c63115446008b1",
|
||||||
|
),
|
||||||
|
}[host.get_fact(facts.server.Arch)]
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install curl",
|
||||||
|
packages=["curl"],
|
||||||
|
)
|
||||||
|
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/iroh-relay")
|
||||||
|
if existing_sha256sum != sha256sum:
|
||||||
|
server.shell(
|
||||||
|
name="Download iroh-relay",
|
||||||
|
commands=[
|
||||||
|
f"(curl -L {url} | gunzip | tar -x -f - ./iroh-relay -O >/usr/local/bin/iroh-relay.new && (echo '{sha256sum} /usr/local/bin/iroh-relay.new' | sha256sum -c) && mv /usr/local/bin/iroh-relay.new /usr/local/bin/iroh-relay)",
|
||||||
|
"chmod 755 /usr/local/bin/iroh-relay",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
need_restart = True
|
||||||
|
|
||||||
|
systemd_unit = files.put(
|
||||||
|
name="Upload iroh-relay systemd unit",
|
||||||
|
src=importlib.resources.files(__package__).joinpath("iroh-relay.service"),
|
||||||
|
dest="/etc/systemd/system/iroh-relay.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= systemd_unit.changed
|
||||||
|
|
||||||
|
iroh_config = files.put(
|
||||||
|
name="Upload iroh-relay config",
|
||||||
|
src=importlib.resources.files(__package__).joinpath("iroh-relay.toml"),
|
||||||
|
dest="/etc/iroh-relay.toml",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= iroh_config.changed
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable iroh-relay",
|
||||||
|
service="iroh-relay.service",
|
||||||
|
running=True,
|
||||||
|
enabled=config.enable_iroh_relay,
|
||||||
|
restarted=need_restart,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def deploy_chatmail(config_path: Path, disable_mail: bool) -> None:
|
||||||
|
"""Deploy a chat-mail instance.
|
||||||
|
|
||||||
|
:param config_path: path to chatmail.ini
|
||||||
|
:param disable_mail: whether to disable postfix & dovecot
|
||||||
|
"""
|
||||||
|
config = read_config(config_path)
|
||||||
|
check_config(config)
|
||||||
|
mail_domain = config.mail_domain
|
||||||
|
|
||||||
|
from .www import build_webpages, get_paths
|
||||||
|
|
||||||
|
server.group(name="Create vmail group", group="vmail", system=True)
|
||||||
|
server.user(name="Create vmail user", user="vmail", group="vmail", system=True)
|
||||||
|
server.group(name="Create opendkim group", group="opendkim", system=True)
|
||||||
|
server.user(
|
||||||
|
name="Create opendkim user",
|
||||||
|
user="opendkim",
|
||||||
|
groups=["opendkim"],
|
||||||
|
system=True,
|
||||||
|
)
|
||||||
|
server.user(
|
||||||
|
name="Add postfix user to opendkim group for socket access",
|
||||||
|
user="postfix",
|
||||||
|
groups=["opendkim"],
|
||||||
|
system=True,
|
||||||
|
)
|
||||||
|
server.user(name="Create echobot user", user="echobot", system=True)
|
||||||
|
server.user(name="Create iroh user", user="iroh", system=True)
|
||||||
|
|
||||||
|
# Add our OBS repository for dovecot_no_delay
|
||||||
|
files.put(
|
||||||
|
name="Add Deltachat OBS GPG key to apt keyring",
|
||||||
|
src=importlib.resources.files(__package__).joinpath("obs-home-deltachat.gpg"),
|
||||||
|
dest="/etc/apt/keyrings/obs-home-deltachat.gpg",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
|
||||||
|
files.line(
|
||||||
|
name="Add DeltaChat OBS home repository to sources.list",
|
||||||
|
path="/etc/apt/sources.list",
|
||||||
|
line="deb [signed-by=/etc/apt/keyrings/obs-home-deltachat.gpg] https://download.opensuse.org/repositories/home:/deltachat/Debian_12/ ./",
|
||||||
|
escape_regex_characters=True,
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if host.get_fact(Port, port=53) != "unbound":
|
||||||
|
files.line(
|
||||||
|
name="Add 9.9.9.9 to resolv.conf",
|
||||||
|
path="/etc/resolv.conf",
|
||||||
|
line="nameserver 9.9.9.9",
|
||||||
|
)
|
||||||
|
apt.update(name="apt update", cache_time=24 * 3600)
|
||||||
|
apt.upgrade(name="upgrade apt packages", auto_remove=True)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install rsync",
|
||||||
|
packages=["rsync"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run local DNS resolver `unbound`.
|
||||||
|
# `resolvconf` takes care of setting up /etc/resolv.conf
|
||||||
|
# to use 127.0.0.1 as the resolver.
|
||||||
|
from cmdeploy.cmdeploy import Out
|
||||||
|
|
||||||
|
port_services = [
|
||||||
|
(["master", "smtpd"], 25),
|
||||||
|
("unbound", 53),
|
||||||
|
("acmetool", 80),
|
||||||
|
(["imap-login", "dovecot"], 143),
|
||||||
|
("nginx", 443),
|
||||||
|
(["master", "smtpd"], 465),
|
||||||
|
(["master", "smtpd"], 587),
|
||||||
|
(["imap-login", "dovecot"], 993),
|
||||||
|
("iroh-relay", 3340),
|
||||||
|
("nginx", 8443),
|
||||||
|
(["master", "smtpd"], config.postfix_reinject_port),
|
||||||
|
(["master", "smtpd"], config.postfix_reinject_port_incoming),
|
||||||
|
("filtermail", config.filtermail_smtp_port),
|
||||||
|
("filtermail", config.filtermail_smtp_port_incoming),
|
||||||
|
]
|
||||||
|
for service, port in port_services:
|
||||||
|
print(f"Checking if port {port} is available for {service}...")
|
||||||
|
running_service = host.get_fact(Port, port=port)
|
||||||
|
if running_service:
|
||||||
|
if running_service not in service:
|
||||||
|
Out().red(
|
||||||
|
f"Deploy failed: port {port} is occupied by: {running_service}"
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install unbound",
|
||||||
|
packages=["unbound", "unbound-anchor", "dnsutils"],
|
||||||
|
)
|
||||||
|
server.shell(
|
||||||
|
name="Generate root keys for validating DNSSEC",
|
||||||
|
commands=[
|
||||||
|
"unbound-anchor -a /var/lib/unbound/root.key || true",
|
||||||
|
"systemctl reset-failed unbound.service",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable unbound",
|
||||||
|
service="unbound.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
deploy_iroh_relay(config)
|
||||||
|
|
||||||
|
# Deploy acmetool to have TLS certificates.
|
||||||
|
tls_domains = [mail_domain, f"mta-sts.{mail_domain}", f"www.{mail_domain}"]
|
||||||
|
deploy_acmetool(
|
||||||
|
domains=tls_domains,
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
# required for setfacl for echobot
|
||||||
|
name="Install acl",
|
||||||
|
packages="acl",
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install Postfix",
|
||||||
|
packages="postfix",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not "dovecot.service" in host.get_fact(SystemdEnabled):
|
||||||
|
_install_dovecot_package("core", host.get_fact(facts.server.Arch))
|
||||||
|
_install_dovecot_package("imapd", host.get_fact(facts.server.Arch))
|
||||||
|
_install_dovecot_package("lmtpd", host.get_fact(facts.server.Arch))
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install nginx",
|
||||||
|
packages=["nginx", "libnginx-mod-stream"],
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install fcgiwrap",
|
||||||
|
packages=["fcgiwrap"],
|
||||||
|
)
|
||||||
|
|
||||||
|
www_path, src_dir, build_dir = get_paths(config)
|
||||||
|
# if www_folder was set to a non-existing folder, skip upload
|
||||||
|
if not www_path.is_dir():
|
||||||
|
logger.warning("Building web pages is disabled in chatmail.ini, skipping")
|
||||||
|
else:
|
||||||
|
# if www_folder is a hugo page, build it
|
||||||
|
if build_dir:
|
||||||
|
www_path = build_webpages(src_dir, build_dir, config)
|
||||||
|
# if it is not a hugo page, upload it as is
|
||||||
|
files.rsync(f"{www_path}/", "/var/www/html", flags=["-avz"])
|
||||||
|
|
||||||
|
_install_remote_venv_with_chatmaild(config)
|
||||||
|
debug = False
|
||||||
|
dovecot_need_restart = _configure_dovecot(config, debug=debug)
|
||||||
|
postfix_need_restart = _configure_postfix(config, debug=debug)
|
||||||
|
nginx_need_restart = _configure_nginx(config)
|
||||||
|
_uninstall_mta_sts_daemon()
|
||||||
|
|
||||||
|
_remove_rspamd()
|
||||||
|
opendkim_need_restart = _configure_opendkim(mail_domain, "opendkim")
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable OpenDKIM",
|
||||||
|
service="opendkim.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
daemon_reload=opendkim_need_restart,
|
||||||
|
restarted=opendkim_need_restart,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Dovecot should be started before Postfix
|
||||||
|
# because it creates authentication socket
|
||||||
|
# required by Postfix.
|
||||||
|
systemd.service(
|
||||||
|
name="disable dovecot for now" if disable_mail else "Start and enable Dovecot",
|
||||||
|
service="dovecot.service",
|
||||||
|
running=False if disable_mail else True,
|
||||||
|
enabled=False if disable_mail else True,
|
||||||
|
restarted=dovecot_need_restart if not disable_mail else False,
|
||||||
|
)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="disable postfix for now" if disable_mail else "Start and enable Postfix",
|
||||||
|
service="postfix.service",
|
||||||
|
running=False if disable_mail else True,
|
||||||
|
enabled=False if disable_mail else True,
|
||||||
|
restarted=postfix_need_restart if not disable_mail else False,
|
||||||
|
)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable nginx",
|
||||||
|
service="nginx.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=nginx_need_restart,
|
||||||
|
)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Restart echobot if postfix and dovecot were just started",
|
||||||
|
service="echobot.service",
|
||||||
|
restarted=postfix_need_restart and dovecot_need_restart,
|
||||||
|
)
|
||||||
|
|
||||||
|
# This file is used by auth proxy.
|
||||||
|
# https://wiki.debian.org/EtcMailName
|
||||||
|
server.shell(
|
||||||
|
name="Setup /etc/mailname",
|
||||||
|
commands=[f"echo {mail_domain} >/etc/mailname; chmod 644 /etc/mailname"],
|
||||||
|
)
|
||||||
|
|
||||||
|
journald_conf = files.put(
|
||||||
|
name="Configure journald",
|
||||||
|
src=importlib.resources.files(__package__).joinpath("journald.conf"),
|
||||||
|
dest="/etc/systemd/journald.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable journald",
|
||||||
|
service="systemd-journald.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=journald_conf.changed,
|
||||||
|
)
|
||||||
|
files.directory(
|
||||||
|
name="Ensure old logs on disk are deleted",
|
||||||
|
path="/var/log/journal/",
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Ensure cron is installed",
|
||||||
|
packages=["cron"],
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
||||||
|
except Exception:
|
||||||
|
git_hash = "unknown\n"
|
||||||
|
try:
|
||||||
|
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
||||||
|
except Exception:
|
||||||
|
git_diff = ""
|
||||||
|
files.put(
|
||||||
|
name="Upload chatmail relay git commiit hash",
|
||||||
|
src=StringIO(git_hash + git_diff),
|
||||||
|
dest="/etc/chatmail-version",
|
||||||
|
mode="700",
|
||||||
|
)
|
||||||
|
|
||||||
|
deploy_mtail(config)
|
||||||
@@ -2,127 +2,66 @@ import importlib.resources
|
|||||||
|
|
||||||
from pyinfra.operations import apt, files, server, systemd
|
from pyinfra.operations import apt, files, server, systemd
|
||||||
|
|
||||||
from ..basedeploy import Deployer
|
|
||||||
|
|
||||||
|
def deploy_acmetool(email="", domains=[]):
|
||||||
|
"""Deploy acmetool."""
|
||||||
|
apt.packages(
|
||||||
|
name="Install acmetool",
|
||||||
|
packages=["acmetool"],
|
||||||
|
)
|
||||||
|
|
||||||
class AcmetoolDeployer(Deployer):
|
files.put(
|
||||||
def __init__(self, email, domains):
|
src=importlib.resources.files(__package__).joinpath("acmetool.cron").open("rb"),
|
||||||
self.domains = domains
|
dest="/etc/cron.d/acmetool",
|
||||||
self.email = email
|
user="root",
|
||||||
self.need_restart_redirector = False
|
group="root",
|
||||||
self.need_restart_reconcile_service = False
|
mode="644",
|
||||||
self.need_restart_reconcile_timer = False
|
)
|
||||||
|
|
||||||
def install(self):
|
files.put(
|
||||||
apt.packages(
|
src=importlib.resources.files(__package__).joinpath("acmetool.hook").open("rb"),
|
||||||
name="Install acmetool",
|
dest="/usr/lib/acme/hooks/nginx",
|
||||||
packages=["acmetool"],
|
user="root",
|
||||||
)
|
group="root",
|
||||||
|
mode="744",
|
||||||
|
)
|
||||||
|
|
||||||
files.file(
|
files.template(
|
||||||
name="Remove old acmetool cronjob, it is replaced with systemd timer.",
|
src=importlib.resources.files(__package__).joinpath("response-file.yaml.j2"),
|
||||||
path="/etc/cron.d/acmetool",
|
dest="/var/lib/acme/conf/responses",
|
||||||
present=False,
|
user="root",
|
||||||
)
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
email=email,
|
||||||
|
)
|
||||||
|
|
||||||
files.put(
|
files.template(
|
||||||
name="Install acmetool hook.",
|
src=importlib.resources.files(__package__).joinpath("target.yaml.j2"),
|
||||||
src=importlib.resources.files(__package__)
|
dest="/var/lib/acme/conf/target",
|
||||||
.joinpath("acmetool.hook")
|
user="root",
|
||||||
.open("rb"),
|
group="root",
|
||||||
dest="/etc/acme/hooks/nginx",
|
mode="644",
|
||||||
user="root",
|
)
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
)
|
|
||||||
files.file(
|
|
||||||
name="Remove acmetool hook from the wrong location where it was previously installed.",
|
|
||||||
path="/usr/lib/acme/hooks/nginx",
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
service_file = files.put(
|
||||||
files.template(
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
"acmetool-redirector.service"
|
||||||
"response-file.yaml.j2"
|
),
|
||||||
),
|
dest="/etc/systemd/system/acmetool-redirector.service",
|
||||||
dest="/var/lib/acme/conf/responses",
|
user="root",
|
||||||
user="root",
|
group="root",
|
||||||
group="root",
|
mode="644",
|
||||||
mode="644",
|
)
|
||||||
email=self.email,
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
systemd.service(
|
||||||
src=importlib.resources.files(__package__).joinpath("target.yaml.j2"),
|
name="Setup acmetool-redirector service",
|
||||||
dest="/var/lib/acme/conf/target",
|
service="acmetool-redirector.service",
|
||||||
user="root",
|
running=True,
|
||||||
group="root",
|
enabled=True,
|
||||||
mode="644",
|
restarted=service_file.changed,
|
||||||
)
|
)
|
||||||
|
|
||||||
service_file = files.put(
|
server.shell(
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
name=f"Request certificate for: {', '.join(domains)}",
|
||||||
"acmetool-redirector.service"
|
commands=[f"acmetool want --xlog.severity=debug {' '.join(domains)}"],
|
||||||
),
|
)
|
||||||
dest="/etc/systemd/system/acmetool-redirector.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart_redirector = service_file.changed
|
|
||||||
|
|
||||||
reconcile_service_file = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"acmetool-reconcile.service"
|
|
||||||
),
|
|
||||||
dest="/etc/systemd/system/acmetool-reconcile.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart_reconcile_service = reconcile_service_file.changed
|
|
||||||
|
|
||||||
reconcile_timer_file = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"acmetool-reconcile.timer"
|
|
||||||
),
|
|
||||||
dest="/etc/systemd/system/acmetool-reconcile.timer",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart_reconcile_timer = reconcile_timer_file.changed
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Setup acmetool-redirector service",
|
|
||||||
service="acmetool-redirector.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=self.need_restart_redirector,
|
|
||||||
)
|
|
||||||
self.need_restart_redirector = False
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Setup acmetool-reconcile service",
|
|
||||||
service="acmetool-reconcile.service",
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
daemon_reload=self.need_restart_reconcile_service,
|
|
||||||
)
|
|
||||||
self.need_restart_reconcile_service = False
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Setup acmetool-reconcile timer",
|
|
||||||
service="acmetool-reconcile.timer",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
daemon_reload=self.need_restart_reconcile_timer,
|
|
||||||
)
|
|
||||||
self.need_restart_reconcile_timer = False
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name=f"Request certificate for: {', '.join(self.domains)}",
|
|
||||||
commands=[f"acmetool want --xlog.severity=debug {' '.join(self.domains)}"],
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Renew TLS certificates with acmetool
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=oneshot
|
|
||||||
ExecStart=/usr/bin/acmetool --batch reconcile
|
|
||||||
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Renew TLS certificates with acmetool
|
|
||||||
|
|
||||||
[Timer]
|
|
||||||
OnCalendar=*-*-* 16:20:00
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=timers.target
|
|
||||||
4
cmdeploy/src/cmdeploy/acmetool/acmetool.cron
Normal file
4
cmdeploy/src/cmdeploy/acmetool/acmetool.cron
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
SHELL=/bin/sh
|
||||||
|
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
|
||||||
|
MAILTO=root
|
||||||
|
20 16 * * * root /usr/bin/acmetool --batch reconcile && systemctl reload dovecot && systemctl reload postfix && systemctl reload nginx
|
||||||
@@ -1,2 +1,2 @@
|
|||||||
"acme-enter-email": "{{ email }}"
|
"acme-enter-email": "{{ email }}"
|
||||||
"acme-agreement:https://letsencrypt.org/documents/LE-SA-v1.6-August-18-2025.pdf": true
|
"acme-agreement:https://letsencrypt.org/documents/LE-SA-v1.5-February-24-2025.pdf": true
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
request:
|
request:
|
||||||
provider: https://acme-v02.api.letsencrypt.org/directory
|
provider: https://acme-v02.api.letsencrypt.org/directory
|
||||||
key:
|
key:
|
||||||
type: ecdsa
|
type: rsa
|
||||||
ecdsa-curve: nistp256
|
|
||||||
challenge:
|
challenge:
|
||||||
webroot-paths:
|
webroot-paths:
|
||||||
- /var/www/html/.well-known/acme-challenge
|
- /var/www/html/.well-known/acme-challenge
|
||||||
|
|||||||
@@ -1,112 +0,0 @@
|
|||||||
import importlib.resources
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pyinfra.operations import files, server, systemd
|
|
||||||
|
|
||||||
|
|
||||||
def get_resource(arg, pkg=__package__):
|
|
||||||
return importlib.resources.files(pkg).joinpath(arg)
|
|
||||||
|
|
||||||
|
|
||||||
def configure_remote_units(mail_domain, units) -> None:
|
|
||||||
remote_base_dir = "/usr/local/lib/chatmaild"
|
|
||||||
remote_venv_dir = f"{remote_base_dir}/venv"
|
|
||||||
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
|
||||||
root_owned = dict(user="root", group="root", mode="644")
|
|
||||||
|
|
||||||
# install systemd units
|
|
||||||
for fn in units:
|
|
||||||
execpath = fn if fn != "filtermail-incoming" else "filtermail"
|
|
||||||
params = dict(
|
|
||||||
execpath=f"{remote_venv_dir}/bin/{execpath}",
|
|
||||||
config_path=remote_chatmail_inipath,
|
|
||||||
remote_venv_dir=remote_venv_dir,
|
|
||||||
mail_domain=mail_domain,
|
|
||||||
)
|
|
||||||
|
|
||||||
basename = fn if "." in fn else f"{fn}.service"
|
|
||||||
|
|
||||||
source_path = get_resource(f"service/{basename}.f")
|
|
||||||
content = source_path.read_text().format(**params).encode()
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name=f"Upload {basename}",
|
|
||||||
src=io.BytesIO(content),
|
|
||||||
dest=f"/etc/systemd/system/{basename}",
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def activate_remote_units(units) -> None:
|
|
||||||
# activate systemd units
|
|
||||||
for fn in units:
|
|
||||||
basename = fn if "." in fn else f"{fn}.service"
|
|
||||||
|
|
||||||
if fn == "chatmail-expire" or fn == "chatmail-fsreport":
|
|
||||||
# don't auto-start but let the corresponding timer trigger execution
|
|
||||||
enabled = False
|
|
||||||
else:
|
|
||||||
enabled = True
|
|
||||||
systemd.service(
|
|
||||||
name=f"Setup {basename}",
|
|
||||||
service=basename,
|
|
||||||
running=enabled,
|
|
||||||
enabled=enabled,
|
|
||||||
restarted=enabled,
|
|
||||||
daemon_reload=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Deployment:
|
|
||||||
def install(self, deployer):
|
|
||||||
# optional 'required_users' contains a list of (user, group, secondary-group-list) tuples.
|
|
||||||
# If the group is None, no group is created corresponding to that user.
|
|
||||||
# If the secondary group list is not None, all listed groups are created as well.
|
|
||||||
required_users = getattr(deployer, "required_users", [])
|
|
||||||
for user, group, groups in required_users:
|
|
||||||
if group is not None:
|
|
||||||
server.group(
|
|
||||||
name="Create {} group".format(group), group=group, system=True
|
|
||||||
)
|
|
||||||
if groups is not None:
|
|
||||||
for group2 in groups:
|
|
||||||
server.group(
|
|
||||||
name="Create {} group".format(group2), group=group2, system=True
|
|
||||||
)
|
|
||||||
server.user(
|
|
||||||
name="Create {} user".format(user),
|
|
||||||
user=user,
|
|
||||||
group=group,
|
|
||||||
groups=groups,
|
|
||||||
system=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
deployer.install()
|
|
||||||
|
|
||||||
def configure(self, deployer):
|
|
||||||
deployer.configure()
|
|
||||||
|
|
||||||
def activate(self, deployer):
|
|
||||||
deployer.activate()
|
|
||||||
|
|
||||||
def perform_stages(self, deployers):
|
|
||||||
default_stages = "install,configure,activate"
|
|
||||||
stages = os.getenv("CMDEPLOY_STAGES", default_stages).split(",")
|
|
||||||
|
|
||||||
for stage in stages:
|
|
||||||
for deployer in deployers:
|
|
||||||
getattr(self, stage)(deployer)
|
|
||||||
|
|
||||||
|
|
||||||
class Deployer:
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
pass
|
|
||||||
@@ -19,7 +19,7 @@ from packaging import version
|
|||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
|
|
||||||
from . import dns, remote
|
from . import dns, remote
|
||||||
from .sshexec import LocalExec, SSHExec
|
from .sshexec import SSHExec
|
||||||
|
|
||||||
#
|
#
|
||||||
# cmdeploy sub commands and options
|
# cmdeploy sub commands and options
|
||||||
@@ -32,30 +32,17 @@ def init_cmd_options(parser):
|
|||||||
action="store",
|
action="store",
|
||||||
help="fully qualified DNS domain name for your chatmail instance",
|
help="fully qualified DNS domain name for your chatmail instance",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--force",
|
|
||||||
dest="recreate_ini",
|
|
||||||
action="store_true",
|
|
||||||
help="force reacreate ini file",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def init_cmd(args, out):
|
def init_cmd(args, out):
|
||||||
"""Initialize chatmail config file."""
|
"""Initialize chatmail config file."""
|
||||||
mail_domain = args.chatmail_domain
|
mail_domain = args.chatmail_domain
|
||||||
inipath = args.inipath
|
|
||||||
if args.inipath.exists():
|
if args.inipath.exists():
|
||||||
if not args.recreate_ini:
|
print(f"Path exists, not modifying: {args.inipath}")
|
||||||
print(f"[WARNING] Path exists, not modifying: {inipath}")
|
return 1
|
||||||
return 1
|
else:
|
||||||
else:
|
write_initial_config(args.inipath, mail_domain, overrides={})
|
||||||
print(
|
out.green(f"created config file for {mail_domain} in {args.inipath}")
|
||||||
f"[WARNING] Force argument was provided, deleting config file: {inipath}"
|
|
||||||
)
|
|
||||||
inipath.unlink()
|
|
||||||
|
|
||||||
write_initial_config(inipath, mail_domain, overrides={})
|
|
||||||
out.green(f"created config file for {mail_domain} in {inipath}")
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd_options(parser):
|
def run_cmd_options(parser):
|
||||||
@@ -72,12 +59,10 @@ def run_cmd_options(parser):
|
|||||||
help="install/upgrade the server, but disable postfix & dovecot for now",
|
help="install/upgrade the server, but disable postfix & dovecot for now",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--skip-dns-check",
|
"--ssh-host",
|
||||||
dest="dns_check_disabled",
|
dest="ssh_host",
|
||||||
action="store_true",
|
help="Deploy to 'localhost' or to a specific SSH host",
|
||||||
help="disable checks nslookup for dns",
|
|
||||||
)
|
)
|
||||||
add_ssh_host_option(parser)
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd(args, out):
|
def run_cmd(args, out):
|
||||||
@@ -86,20 +71,19 @@ def run_cmd(args, out):
|
|||||||
ssh_host = args.ssh_host if args.ssh_host else args.config.mail_domain
|
ssh_host = args.ssh_host if args.ssh_host else args.config.mail_domain
|
||||||
sshexec = get_sshexec(ssh_host)
|
sshexec = get_sshexec(ssh_host)
|
||||||
require_iroh = args.config.enable_iroh_relay
|
require_iroh = args.config.enable_iroh_relay
|
||||||
if not args.dns_check_disabled:
|
remote_data = dns.get_initial_remote_data(sshexec, args.config.mail_domain)
|
||||||
remote_data = dns.get_initial_remote_data(sshexec, args.config.mail_domain)
|
if not dns.check_initial_remote_data(remote_data, print=out.red):
|
||||||
if not dns.check_initial_remote_data(remote_data, print=out.red):
|
return 1
|
||||||
return 1
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env["CHATMAIL_INI"] = args.inipath
|
env["CHATMAIL_INI"] = args.inipath
|
||||||
env["CHATMAIL_DISABLE_MAIL"] = "True" if args.disable_mail else ""
|
env["CHATMAIL_DISABLE_MAIL"] = "True" if args.disable_mail else ""
|
||||||
env["CHATMAIL_REQUIRE_IROH"] = "True" if require_iroh else ""
|
env["CHATMAIL_REQUIRE_IROH"] = "True" if require_iroh else ""
|
||||||
deploy_path = importlib.resources.files(__package__).joinpath("run.py").resolve()
|
deploy_path = importlib.resources.files(__package__).joinpath("deploy.py").resolve()
|
||||||
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
||||||
|
|
||||||
cmd = f"{pyinf} --ssh-user root {ssh_host} {deploy_path} -y"
|
cmd = f"{pyinf} --ssh-user root {ssh_host} {deploy_path} -y"
|
||||||
if ssh_host in ["localhost", "@docker"]:
|
if ssh_host == "localhost":
|
||||||
cmd = f"{pyinf} @local {deploy_path} -y"
|
cmd = f"{pyinf} @local {deploy_path} -y"
|
||||||
|
|
||||||
if version.parse(pyinfra.__version__) < version.parse("3"):
|
if version.parse(pyinfra.__version__) < version.parse("3"):
|
||||||
@@ -109,6 +93,14 @@ def run_cmd(args, out):
|
|||||||
try:
|
try:
|
||||||
retcode = out.check_call(cmd, env=env)
|
retcode = out.check_call(cmd, env=env)
|
||||||
if retcode == 0:
|
if retcode == 0:
|
||||||
|
print("\nYou can try out the relay by talking to this echo bot: ")
|
||||||
|
sshexec = SSHExec(args.config.mail_domain, verbose=args.verbose)
|
||||||
|
print(
|
||||||
|
sshexec(
|
||||||
|
call=remote.rshell.shell,
|
||||||
|
kwargs=dict(command="cat /var/lib/echobot/invite-link.txt"),
|
||||||
|
)
|
||||||
|
)
|
||||||
out.green("Deploy completed, call `cmdeploy dns` next.")
|
out.green("Deploy completed, call `cmdeploy dns` next.")
|
||||||
elif not remote_data["acme_account_url"]:
|
elif not remote_data["acme_account_url"]:
|
||||||
out.red("Deploy completed but letsencrypt not configured")
|
out.red("Deploy completed but letsencrypt not configured")
|
||||||
@@ -130,7 +122,11 @@ def dns_cmd_options(parser):
|
|||||||
default=None,
|
default=None,
|
||||||
help="write out a zonefile",
|
help="write out a zonefile",
|
||||||
)
|
)
|
||||||
add_ssh_host_option(parser)
|
parser.add_argument(
|
||||||
|
"--ssh-host",
|
||||||
|
dest="ssh_host",
|
||||||
|
help="Run the DNS queries on 'localhost' or on a specific SSH host",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def dns_cmd(args, out):
|
def dns_cmd(args, out):
|
||||||
@@ -162,15 +158,10 @@ def dns_cmd(args, out):
|
|||||||
return retcode
|
return retcode
|
||||||
|
|
||||||
|
|
||||||
def status_cmd_options(parser):
|
|
||||||
add_ssh_host_option(parser)
|
|
||||||
|
|
||||||
|
|
||||||
def status_cmd(args, out):
|
def status_cmd(args, out):
|
||||||
"""Display status for online chatmail instance."""
|
"""Display status for online chatmail instance."""
|
||||||
|
|
||||||
ssh_host = args.ssh_host if args.ssh_host else args.config.mail_domain
|
sshexec = args.get_sshexec()
|
||||||
sshexec = get_sshexec(ssh_host, verbose=args.verbose)
|
|
||||||
|
|
||||||
out.green(f"chatmail domain: {args.config.mail_domain}")
|
out.green(f"chatmail domain: {args.config.mail_domain}")
|
||||||
if args.config.privacy_mail:
|
if args.config.privacy_mail:
|
||||||
@@ -217,6 +208,61 @@ def test_cmd(args, out):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def proxy_cmd_options(parser: argparse.ArgumentParser):
|
||||||
|
parser.add_argument(
|
||||||
|
"ip_address",
|
||||||
|
help="specify a server to deploy to; can also be an inventory.py file",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--relay-ipv4",
|
||||||
|
dest="relay_ipv4",
|
||||||
|
help="The ipv4 address of the relay you want to forward traffic to",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--relay-ipv6",
|
||||||
|
dest="relay_ipv6",
|
||||||
|
help="The ipv6 address of the relay you want to forward traffic to",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
dest="dry_run",
|
||||||
|
action="store_true",
|
||||||
|
help="don't actually modify the server",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def proxy_cmd(args, out):
|
||||||
|
"""Deploy reverse proxy on a second server."""
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["RELAY_IPV4"] = args.relay_ipv4
|
||||||
|
env["RELAY_IPV6"] = args.relay_ipv6
|
||||||
|
deploy_path = importlib.resources.files(__package__).joinpath("proxy-deploy.py").resolve()
|
||||||
|
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
||||||
|
|
||||||
|
sshexec = args.get_sshexec()
|
||||||
|
# :todo make sure relay is deployed to args.relay_ipv4 and args.relay_ipv6
|
||||||
|
|
||||||
|
# abort if IP address == the chatmail relay itself: if port 22 is open AND /etc/chatmail-version exists
|
||||||
|
if sshexec.logged(call=remote.rshell.get_port_service, args=[22]):
|
||||||
|
if sshexec.logged(call=remote.rshell.chatmail_version):
|
||||||
|
out.red("Can not deploy proxy on the chatmail relay itself, use another server")
|
||||||
|
return 1
|
||||||
|
cmd = f"{pyinf} --ssh-user root {args.ip_address} {deploy_path} -y"
|
||||||
|
out.check_call(cmd, env=env) # during first try, only set SSH port to 2222
|
||||||
|
|
||||||
|
cmd = f"{pyinf} --ssh-port 2222 --ssh-user root {args.ip_address} {deploy_path} -y"
|
||||||
|
try:
|
||||||
|
retcode = out.check_call(cmd, env=env)
|
||||||
|
if retcode == 0:
|
||||||
|
out.green("Reverse proxy deployed - you can distribute the IP address now.")
|
||||||
|
else:
|
||||||
|
out.red("Deploying reverse proxy failed")
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
out.red("Deploying reverse proxy failed")
|
||||||
|
retcode = 1
|
||||||
|
return retcode
|
||||||
|
|
||||||
|
|
||||||
def fmt_cmd_options(parser):
|
def fmt_cmd_options(parser):
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--check",
|
"--check",
|
||||||
@@ -229,12 +275,7 @@ def fmt_cmd_options(parser):
|
|||||||
def fmt_cmd(args, out):
|
def fmt_cmd(args, out):
|
||||||
"""Run formattting fixes on all chatmail source code."""
|
"""Run formattting fixes on all chatmail source code."""
|
||||||
|
|
||||||
chatmaild_dir = importlib.resources.files("chatmaild").resolve()
|
sources = [str(importlib.resources.files(x)) for x in ("chatmaild", "cmdeploy")]
|
||||||
cmdeploy_dir = chatmaild_dir.joinpath(
|
|
||||||
"..", "..", "..", "cmdeploy", "src", "cmdeploy"
|
|
||||||
).resolve()
|
|
||||||
sources = [str(chatmaild_dir), str(cmdeploy_dir)]
|
|
||||||
|
|
||||||
format_args = [shutil.which("ruff"), "format"]
|
format_args = [shutil.which("ruff"), "format"]
|
||||||
check_args = [shutil.which("ruff"), "check"]
|
check_args = [shutil.which("ruff"), "check"]
|
||||||
|
|
||||||
@@ -300,15 +341,6 @@ class Out:
|
|||||||
return proc.returncode
|
return proc.returncode
|
||||||
|
|
||||||
|
|
||||||
def add_ssh_host_option(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--ssh-host",
|
|
||||||
dest="ssh_host",
|
|
||||||
help="Run commands on 'localhost', via '@docker', or on a specific SSH host "
|
|
||||||
"instead of chatmail.ini's mail_domain.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def add_config_option(parser):
|
def add_config_option(parser):
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--config",
|
"--config",
|
||||||
@@ -366,9 +398,7 @@ def get_parser():
|
|||||||
|
|
||||||
def get_sshexec(ssh_host: str, verbose=True):
|
def get_sshexec(ssh_host: str, verbose=True):
|
||||||
if ssh_host in ["localhost", "@local"]:
|
if ssh_host in ["localhost", "@local"]:
|
||||||
return LocalExec(verbose, docker=False)
|
return "localhost"
|
||||||
elif ssh_host == "@docker":
|
|
||||||
return LocalExec(verbose, docker=True)
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print(f"[ssh] login to {ssh_host}")
|
print(f"[ssh] login to {ssh_host}")
|
||||||
return SSHExec(ssh_host, verbose=verbose)
|
return SSHExec(ssh_host, verbose=verbose)
|
||||||
|
|||||||
@@ -3,9 +3,7 @@ import os
|
|||||||
|
|
||||||
import pyinfra
|
import pyinfra
|
||||||
|
|
||||||
# pyinfra runs this module as a python file and not as a module so
|
from cmdeploy import deploy_chatmail
|
||||||
# import paths must be absolute
|
|
||||||
from cmdeploy.deployers import deploy_chatmail
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -1,581 +0,0 @@
|
|||||||
"""
|
|
||||||
Chat Mail pyinfra deploy.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from chatmaild.config import read_config
|
|
||||||
from pyinfra import facts, host, logger
|
|
||||||
from pyinfra.api import FactBase
|
|
||||||
from pyinfra.facts.files import Sha256File
|
|
||||||
from pyinfra.facts.systemd import SystemdEnabled
|
|
||||||
from pyinfra.operations import apt, files, pip, server, systemd
|
|
||||||
|
|
||||||
from cmdeploy.cmdeploy import Out
|
|
||||||
|
|
||||||
from .acmetool import AcmetoolDeployer
|
|
||||||
from .basedeploy import (
|
|
||||||
Deployer,
|
|
||||||
Deployment,
|
|
||||||
activate_remote_units,
|
|
||||||
configure_remote_units,
|
|
||||||
get_resource,
|
|
||||||
)
|
|
||||||
from .dovecot.deployer import DovecotDeployer
|
|
||||||
from .mtail.deployer import MtailDeployer
|
|
||||||
from .nginx.deployer import NginxDeployer
|
|
||||||
from .opendkim.deployer import OpendkimDeployer
|
|
||||||
from .postfix.deployer import PostfixDeployer
|
|
||||||
from .www import build_webpages, find_merge_conflict, get_paths
|
|
||||||
|
|
||||||
|
|
||||||
class Port(FactBase):
|
|
||||||
"""
|
|
||||||
Returns the process occuping a port.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def command(self, port: int) -> str:
|
|
||||||
return (
|
|
||||||
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
|
||||||
% (port,)
|
|
||||||
)
|
|
||||||
|
|
||||||
def process(self, output: [str]) -> str:
|
|
||||||
return output[0]
|
|
||||||
|
|
||||||
|
|
||||||
def _build_chatmaild(dist_dir) -> None:
|
|
||||||
dist_dir = Path(dist_dir).resolve()
|
|
||||||
if dist_dir.exists():
|
|
||||||
shutil.rmtree(dist_dir)
|
|
||||||
dist_dir.mkdir()
|
|
||||||
subprocess.check_output(
|
|
||||||
[sys.executable, "-m", "build", "-n"]
|
|
||||||
+ ["--sdist", "chatmaild", "--outdir", str(dist_dir)]
|
|
||||||
)
|
|
||||||
entries = list(dist_dir.iterdir())
|
|
||||||
assert len(entries) == 1
|
|
||||||
return entries[0]
|
|
||||||
|
|
||||||
|
|
||||||
def remove_legacy_artifacts():
|
|
||||||
# disable legacy doveauth-dictproxy.service
|
|
||||||
if host.get_fact(SystemdEnabled).get("doveauth-dictproxy.service"):
|
|
||||||
systemd.service(
|
|
||||||
name="Disable legacy doveauth-dictproxy.service",
|
|
||||||
service="doveauth-dictproxy.service",
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _install_remote_venv_with_chatmaild() -> None:
|
|
||||||
remove_legacy_artifacts()
|
|
||||||
dist_file = _build_chatmaild(dist_dir=Path("chatmaild/dist"))
|
|
||||||
remote_base_dir = "/usr/local/lib/chatmaild"
|
|
||||||
remote_dist_file = f"{remote_base_dir}/dist/{dist_file.name}"
|
|
||||||
remote_venv_dir = f"{remote_base_dir}/venv"
|
|
||||||
root_owned = dict(user="root", group="root", mode="644")
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="apt install python3-virtualenv",
|
|
||||||
packages=["python3-virtualenv"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name="Upload chatmaild source package",
|
|
||||||
src=dist_file.open("rb"),
|
|
||||||
dest=remote_dist_file,
|
|
||||||
create_remote_dir=True,
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
pip.virtualenv(
|
|
||||||
name=f"chatmaild virtualenv {remote_venv_dir}",
|
|
||||||
path=remote_venv_dir,
|
|
||||||
always_copy=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="install gcc and headers to build crypt_r source package",
|
|
||||||
packages=["gcc", "python3-dev"],
|
|
||||||
)
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name=f"forced pip-install {dist_file.name}",
|
|
||||||
commands=[
|
|
||||||
f"{remote_venv_dir}/bin/pip install --force-reinstall {remote_dist_file}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_remote_venv_with_chatmaild(config) -> None:
|
|
||||||
remote_base_dir = "/usr/local/lib/chatmaild"
|
|
||||||
remote_venv_dir = f"{remote_base_dir}/venv"
|
|
||||||
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
|
||||||
root_owned = dict(user="root", group="root", mode="644")
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name=f"Upload {remote_chatmail_inipath}",
|
|
||||||
src=config._getbytefile(),
|
|
||||||
dest=remote_chatmail_inipath,
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=get_resource("metrics.cron.j2"),
|
|
||||||
dest="/etc/cron.d/chatmail-metrics",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={
|
|
||||||
"mailboxes_dir": config.mailboxes_dir,
|
|
||||||
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UnboundDeployer(Deployer):
|
|
||||||
def install(self):
|
|
||||||
# Run local DNS resolver `unbound`.
|
|
||||||
# `resolvconf` takes care of setting up /etc/resolv.conf
|
|
||||||
# to use 127.0.0.1 as the resolver.
|
|
||||||
|
|
||||||
#
|
|
||||||
# On an IPv4-only system, if unbound is started but not
|
|
||||||
# configured, it causes subsequent steps to fail to resolve hosts.
|
|
||||||
# Here, we use policy-rc.d to prevent unbound from starting up
|
|
||||||
# on initial install. Later, we will configure it and start it.
|
|
||||||
#
|
|
||||||
# For documentation about policy-rc.d, see:
|
|
||||||
# https://people.debian.org/~hmh/invokerc.d-policyrc.d-specification.txt
|
|
||||||
#
|
|
||||||
files.put(
|
|
||||||
src=get_resource("policy-rc.d"),
|
|
||||||
dest="/usr/sbin/policy-rc.d",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install unbound",
|
|
||||||
packages=["unbound", "unbound-anchor", "dnsutils"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.file("/usr/sbin/policy-rc.d", present=False)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
server.shell(
|
|
||||||
name="Generate root keys for validating DNSSEC",
|
|
||||||
commands=[
|
|
||||||
"unbound-anchor -a /var/lib/unbound/root.key || true",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
server.shell(
|
|
||||||
name="Generate root keys for validating DNSSEC",
|
|
||||||
commands=[
|
|
||||||
"systemctl reset-failed unbound.service",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable unbound",
|
|
||||||
service="unbound.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MtastsDeployer(Deployer):
|
|
||||||
def configure(self):
|
|
||||||
# Remove configuration.
|
|
||||||
files.file("/etc/mta-sts-daemon.yml", present=False)
|
|
||||||
files.directory("/usr/local/lib/postfix-mta-sts-resolver", present=False)
|
|
||||||
files.file("/etc/systemd/system/mta-sts-daemon.service", present=False)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Stop MTA-STS daemon",
|
|
||||||
service="mta-sts-daemon.service",
|
|
||||||
daemon_reload=True,
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class WebsiteDeployer(Deployer):
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
files.directory(
|
|
||||||
name="Ensure /var/www exists",
|
|
||||||
path="/var/www",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
www_path, src_dir, build_dir = get_paths(self.config)
|
|
||||||
# if www_folder was set to a non-existing folder, skip upload
|
|
||||||
if not www_path.is_dir():
|
|
||||||
logger.warning("Building web pages is disabled in chatmail.ini, skipping")
|
|
||||||
elif (path := find_merge_conflict(src_dir)) is not None:
|
|
||||||
logger.warning(
|
|
||||||
f"Merge conflict found in {path}, skipping website deployment. Fix merge conflict if you want to upload your web page."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# if www_folder is a hugo page, build it
|
|
||||||
if build_dir:
|
|
||||||
www_path = build_webpages(src_dir, build_dir, self.config)
|
|
||||||
# if it is not a hugo page, upload it as is
|
|
||||||
files.rsync(
|
|
||||||
f"{www_path}/", "/var/www/html", flags=["-avz", "--chown=www-data"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LegacyRemoveDeployer(Deployer):
|
|
||||||
def install(self):
|
|
||||||
apt.packages(name="Remove rspamd", packages="rspamd", present=False)
|
|
||||||
|
|
||||||
# remove historic expunge script
|
|
||||||
# which is now implemented through a systemd timer (chatmail-expire)
|
|
||||||
files.file(
|
|
||||||
path="/etc/cron.d/expunge",
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Remove OBS repository key that is no longer used.
|
|
||||||
files.file("/etc/apt/keyrings/obs-home-deltachat.gpg", present=False)
|
|
||||||
files.line(
|
|
||||||
name="Remove DeltaChat OBS home repository from sources.list",
|
|
||||||
path="/etc/apt/sources.list",
|
|
||||||
line="deb [signed-by=/etc/apt/keyrings/obs-home-deltachat.gpg] https://download.opensuse.org/repositories/home:/deltachat/Debian_12/ ./",
|
|
||||||
escape_regex_characters=True,
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# prior relay versions used filelogging
|
|
||||||
files.directory(
|
|
||||||
name="Ensure old logs on disk are deleted",
|
|
||||||
path="/var/log/journal/",
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
# remove echobot if it is still running
|
|
||||||
if host.get_fact(SystemdEnabled).get("echobot.service"):
|
|
||||||
systemd.service(
|
|
||||||
name="Disable echobot.service",
|
|
||||||
service="echobot.service",
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_config(config):
|
|
||||||
mail_domain = config.mail_domain
|
|
||||||
if mail_domain != "testrun.org" and not mail_domain.endswith(".testrun.org"):
|
|
||||||
blocked_words = "merlinux schmieder testrun.org".split()
|
|
||||||
for key in config.__dict__:
|
|
||||||
value = config.__dict__[key]
|
|
||||||
if key.startswith("privacy") and any(
|
|
||||||
x in str(value) for x in blocked_words
|
|
||||||
):
|
|
||||||
raise ValueError(
|
|
||||||
f"please set your own privacy contacts/addresses in {config._inipath}"
|
|
||||||
)
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
class TurnDeployer(Deployer):
|
|
||||||
def __init__(self, mail_domain):
|
|
||||||
self.mail_domain = mail_domain
|
|
||||||
self.units = ["turnserver"]
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-x86_64-linux",
|
|
||||||
"841e527c15fdc2940b0469e206188ea8f0af48533be12ecb8098520f813d41e4",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-aarch64-linux",
|
|
||||||
"a5fc2d06d937b56a34e098d2cd72a82d3e89967518d159bf246dc69b65e81b42",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/chatmail-turn")
|
|
||||||
if existing_sha256sum != sha256sum:
|
|
||||||
server.shell(
|
|
||||||
name="Download chatmail-turn",
|
|
||||||
commands=[
|
|
||||||
f"(curl -L {url} >/usr/local/bin/chatmail-turn.new && (echo '{sha256sum} /usr/local/bin/chatmail-turn.new' | sha256sum -c) && mv /usr/local/bin/chatmail-turn.new /usr/local/bin/chatmail-turn)",
|
|
||||||
"chmod 755 /usr/local/bin/chatmail-turn",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
configure_remote_units(self.mail_domain, self.units)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
activate_remote_units(self.units)
|
|
||||||
|
|
||||||
|
|
||||||
class IrohDeployer(Deployer):
|
|
||||||
def __init__(self, enable_iroh_relay):
|
|
||||||
self.enable_iroh_relay = enable_iroh_relay
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-x86_64-unknown-linux-musl.tar.gz",
|
|
||||||
"45c81199dbd70f8c4c30fef7f3b9727ca6e3cea8f2831333eeaf8aa71bf0fac1",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-aarch64-unknown-linux-musl.tar.gz",
|
|
||||||
"f8ef27631fac213b3ef668d02acd5b3e215292746a3fc71d90c63115446008b1",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/iroh-relay")
|
|
||||||
if existing_sha256sum != sha256sum:
|
|
||||||
server.shell(
|
|
||||||
name="Download iroh-relay",
|
|
||||||
commands=[
|
|
||||||
f"(curl -L {url} | gunzip | tar -x -f - ./iroh-relay -O >/usr/local/bin/iroh-relay.new && (echo '{sha256sum} /usr/local/bin/iroh-relay.new' | sha256sum -c) && mv /usr/local/bin/iroh-relay.new /usr/local/bin/iroh-relay)",
|
|
||||||
"chmod 755 /usr/local/bin/iroh-relay",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.need_restart = True
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
systemd_unit = files.put(
|
|
||||||
name="Upload iroh-relay systemd unit",
|
|
||||||
src=get_resource("iroh-relay.service"),
|
|
||||||
dest="/etc/systemd/system/iroh-relay.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart |= systemd_unit.changed
|
|
||||||
|
|
||||||
iroh_config = files.put(
|
|
||||||
name="Upload iroh-relay config",
|
|
||||||
src=get_resource("iroh-relay.toml"),
|
|
||||||
dest="/etc/iroh-relay.toml",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart |= iroh_config.changed
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable iroh-relay",
|
|
||||||
service="iroh-relay.service",
|
|
||||||
running=True,
|
|
||||||
enabled=self.enable_iroh_relay,
|
|
||||||
restarted=self.need_restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
|
|
||||||
|
|
||||||
class JournaldDeployer(Deployer):
|
|
||||||
def configure(self):
|
|
||||||
journald_conf = files.put(
|
|
||||||
name="Configure journald",
|
|
||||||
src=get_resource("journald.conf"),
|
|
||||||
dest="/etc/systemd/journald.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart = journald_conf.changed
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable journald",
|
|
||||||
service="systemd-journald.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=self.need_restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
|
|
||||||
|
|
||||||
class ChatmailVenvDeployer(Deployer):
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
self.units = (
|
|
||||||
"filtermail",
|
|
||||||
"filtermail-incoming",
|
|
||||||
"chatmail-metadata",
|
|
||||||
"lastlogin",
|
|
||||||
"chatmail-expire",
|
|
||||||
"chatmail-expire.timer",
|
|
||||||
"chatmail-fsreport",
|
|
||||||
"chatmail-fsreport.timer",
|
|
||||||
)
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
_install_remote_venv_with_chatmaild()
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
_configure_remote_venv_with_chatmaild(self.config)
|
|
||||||
configure_remote_units(self.config.mail_domain, self.units)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
activate_remote_units(self.units)
|
|
||||||
|
|
||||||
|
|
||||||
class ChatmailDeployer(Deployer):
|
|
||||||
required_users = [
|
|
||||||
("vmail", "vmail", None),
|
|
||||||
("echobot", None, None),
|
|
||||||
("iroh", None, None),
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, mail_domain):
|
|
||||||
self.mail_domain = mail_domain
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
apt.update(name="apt update", cache_time=24 * 3600)
|
|
||||||
apt.upgrade(name="upgrade apt packages", auto_remove=True)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install curl",
|
|
||||||
packages=["curl"],
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install rsync",
|
|
||||||
packages=["rsync"],
|
|
||||||
)
|
|
||||||
apt.packages(
|
|
||||||
name="Ensure cron is installed",
|
|
||||||
packages=["cron"],
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
# This file is used by auth proxy.
|
|
||||||
# https://wiki.debian.org/EtcMailName
|
|
||||||
server.shell(
|
|
||||||
name="Setup /etc/mailname",
|
|
||||||
commands=[
|
|
||||||
f"echo {self.mail_domain} >/etc/mailname; chmod 644 /etc/mailname"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FcgiwrapDeployer(Deployer):
|
|
||||||
def install(self):
|
|
||||||
apt.packages(
|
|
||||||
name="Install fcgiwrap",
|
|
||||||
packages=["fcgiwrap"],
|
|
||||||
)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable fcgiwrap",
|
|
||||||
service="fcgiwrap.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GithashDeployer(Deployer):
|
|
||||||
def activate(self):
|
|
||||||
try:
|
|
||||||
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_hash = "unknown\n"
|
|
||||||
try:
|
|
||||||
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_diff = ""
|
|
||||||
files.put(
|
|
||||||
name="Upload chatmail relay git commiit hash",
|
|
||||||
src=StringIO(git_hash + git_diff),
|
|
||||||
dest="/etc/chatmail-version",
|
|
||||||
mode="700",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_chatmail(config_path: Path, disable_mail: bool) -> None:
|
|
||||||
"""Deploy a chat-mail instance.
|
|
||||||
|
|
||||||
:param config_path: path to chatmail.ini
|
|
||||||
:param disable_mail: whether to disable postfix & dovecot
|
|
||||||
"""
|
|
||||||
config = read_config(config_path)
|
|
||||||
check_config(config)
|
|
||||||
mail_domain = config.mail_domain
|
|
||||||
|
|
||||||
if host.get_fact(Port, port=53) != "unbound":
|
|
||||||
files.line(
|
|
||||||
name="Add 9.9.9.9 to resolv.conf",
|
|
||||||
path="/etc/resolv.conf",
|
|
||||||
line="nameserver 9.9.9.9",
|
|
||||||
)
|
|
||||||
|
|
||||||
port_services = [
|
|
||||||
(["master", "smtpd"], 25),
|
|
||||||
("unbound", 53),
|
|
||||||
("acmetool", 80),
|
|
||||||
(["imap-login", "dovecot"], 143),
|
|
||||||
("nginx", 443),
|
|
||||||
(["master", "smtpd"], 465),
|
|
||||||
(["master", "smtpd"], 587),
|
|
||||||
(["imap-login", "dovecot"], 993),
|
|
||||||
("iroh-relay", 3340),
|
|
||||||
("nginx", 8443),
|
|
||||||
(["master", "smtpd"], config.postfix_reinject_port),
|
|
||||||
(["master", "smtpd"], config.postfix_reinject_port_incoming),
|
|
||||||
("filtermail", config.filtermail_smtp_port),
|
|
||||||
("filtermail", config.filtermail_smtp_port_incoming),
|
|
||||||
]
|
|
||||||
for service, port in port_services:
|
|
||||||
print(f"Checking if port {port} is available for {service}...")
|
|
||||||
running_service = host.get_fact(Port, port=port)
|
|
||||||
if running_service:
|
|
||||||
if running_service not in service:
|
|
||||||
Out().red(
|
|
||||||
f"Deploy failed: port {port} is occupied by: {running_service}"
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
tls_domains = [mail_domain, f"mta-sts.{mail_domain}", f"www.{mail_domain}"]
|
|
||||||
|
|
||||||
all_deployers = [
|
|
||||||
ChatmailDeployer(mail_domain),
|
|
||||||
LegacyRemoveDeployer(),
|
|
||||||
JournaldDeployer(),
|
|
||||||
UnboundDeployer(),
|
|
||||||
TurnDeployer(mail_domain),
|
|
||||||
IrohDeployer(config.enable_iroh_relay),
|
|
||||||
AcmetoolDeployer(config.acme_email, tls_domains),
|
|
||||||
WebsiteDeployer(config),
|
|
||||||
ChatmailVenvDeployer(config),
|
|
||||||
MtastsDeployer(),
|
|
||||||
OpendkimDeployer(mail_domain),
|
|
||||||
# Dovecot should be started before Postfix
|
|
||||||
# because it creates authentication socket
|
|
||||||
# required by Postfix.
|
|
||||||
DovecotDeployer(config, disable_mail),
|
|
||||||
PostfixDeployer(config, disable_mail),
|
|
||||||
FcgiwrapDeployer(),
|
|
||||||
NginxDeployer(config),
|
|
||||||
MtailDeployer(config.mtail_address),
|
|
||||||
GithashDeployer(),
|
|
||||||
]
|
|
||||||
|
|
||||||
Deployment().perform_stages(all_deployers)
|
|
||||||
@@ -7,9 +7,13 @@ from . import remote
|
|||||||
|
|
||||||
|
|
||||||
def get_initial_remote_data(sshexec, mail_domain):
|
def get_initial_remote_data(sshexec, mail_domain):
|
||||||
return sshexec.logged(
|
if sshexec == "localhost":
|
||||||
call=remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=mail_domain)
|
result = remote.rdns.perform_initial_checks(mail_domain)
|
||||||
)
|
else:
|
||||||
|
result = sshexec.logged(
|
||||||
|
call=remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=mail_domain)
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def check_initial_remote_data(remote_data, *, print=print):
|
def check_initial_remote_data(remote_data, *, print=print):
|
||||||
@@ -44,10 +48,14 @@ def check_full_zone(sshexec, remote_data, out, zonefile) -> int:
|
|||||||
"""Check existing DNS records, optionally write them to zone file
|
"""Check existing DNS records, optionally write them to zone file
|
||||||
and return (exitcode, remote_data) tuple."""
|
and return (exitcode, remote_data) tuple."""
|
||||||
|
|
||||||
required_diff, recommended_diff = sshexec.logged(
|
if sshexec == "localhost":
|
||||||
remote.rdns.check_zonefile,
|
required_diff, recommended_diff = remote.rdns.check_zonefile(
|
||||||
kwargs=dict(zonefile=zonefile, verbose=False),
|
zonefile=zonefile, verbose=False
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
required_diff, recommended_diff = sshexec.logged(
|
||||||
|
remote.rdns.check_zonefile, kwargs=dict(zonefile=zonefile, verbose=False),
|
||||||
|
)
|
||||||
|
|
||||||
returncode = 0
|
returncode = 0
|
||||||
if required_diff:
|
if required_diff:
|
||||||
|
|||||||
@@ -1,137 +0,0 @@
|
|||||||
from chatmaild.config import Config
|
|
||||||
from pyinfra import host
|
|
||||||
from pyinfra.facts.server import Arch, Sysctl
|
|
||||||
from pyinfra.facts.systemd import SystemdEnabled
|
|
||||||
from pyinfra.operations import apt, files, server, systemd
|
|
||||||
|
|
||||||
from cmdeploy.basedeploy import (
|
|
||||||
Deployer,
|
|
||||||
activate_remote_units,
|
|
||||||
configure_remote_units,
|
|
||||||
get_resource,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DovecotDeployer(Deployer):
|
|
||||||
def __init__(self, config, disable_mail):
|
|
||||||
self.config = config
|
|
||||||
self.disable_mail = disable_mail
|
|
||||||
self.units = ["doveauth"]
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
arch = host.get_fact(Arch)
|
|
||||||
if not "dovecot.service" in host.get_fact(SystemdEnabled):
|
|
||||||
_install_dovecot_package("core", arch)
|
|
||||||
_install_dovecot_package("imapd", arch)
|
|
||||||
_install_dovecot_package("lmtpd", arch)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
configure_remote_units(self.config.mail_domain, self.units)
|
|
||||||
self.need_restart = _configure_dovecot(self.config)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
activate_remote_units(self.units)
|
|
||||||
|
|
||||||
restart = False if self.disable_mail else self.need_restart
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="disable dovecot for now"
|
|
||||||
if self.disable_mail
|
|
||||||
else "Start and enable Dovecot",
|
|
||||||
service="dovecot.service",
|
|
||||||
running=False if self.disable_mail else True,
|
|
||||||
enabled=False if self.disable_mail else True,
|
|
||||||
restarted=restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
|
|
||||||
|
|
||||||
def _install_dovecot_package(package: str, arch: str):
|
|
||||||
arch = "amd64" if arch == "x86_64" else arch
|
|
||||||
arch = "arm64" if arch == "aarch64" else arch
|
|
||||||
url = f"https://download.delta.chat/dovecot/dovecot-{package}_2.3.21%2Bdfsg1-3_{arch}.deb"
|
|
||||||
deb_filename = "/root/" + url.split("/")[-1]
|
|
||||||
|
|
||||||
match (package, arch):
|
|
||||||
case ("core", "amd64"):
|
|
||||||
sha256 = "dd060706f52a306fa863d874717210b9fe10536c824afe1790eec247ded5b27d"
|
|
||||||
case ("core", "arm64"):
|
|
||||||
sha256 = "e7548e8a82929722e973629ecc40fcfa886894cef3db88f23535149e7f730dc9"
|
|
||||||
case ("imapd", "amd64"):
|
|
||||||
sha256 = "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86"
|
|
||||||
case ("imapd", "arm64"):
|
|
||||||
sha256 = "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f"
|
|
||||||
case ("lmtpd", "amd64"):
|
|
||||||
sha256 = "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab"
|
|
||||||
case ("lmtpd", "arm64"):
|
|
||||||
sha256 = "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f"
|
|
||||||
case _:
|
|
||||||
apt.packages(packages=[f"dovecot-{package}"])
|
|
||||||
return
|
|
||||||
|
|
||||||
files.download(
|
|
||||||
name=f"Download dovecot-{package}",
|
|
||||||
src=url,
|
|
||||||
dest=deb_filename,
|
|
||||||
sha256sum=sha256,
|
|
||||||
cache_time=60 * 60 * 24 * 365 * 10, # never redownload the package
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.deb(name=f"Install dovecot-{package}", src=deb_filename)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures Dovecot IMAP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=get_resource("dovecot/dovecot.conf.j2"),
|
|
||||||
dest="/etc/dovecot/dovecot.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
debug=debug,
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
auth_config = files.put(
|
|
||||||
src=get_resource("dovecot/auth.conf"),
|
|
||||||
dest="/etc/dovecot/auth.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= auth_config.changed
|
|
||||||
lua_push_notification_script = files.put(
|
|
||||||
src=get_resource("dovecot/push_notification.lua"),
|
|
||||||
dest="/etc/dovecot/push_notification.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= lua_push_notification_script.changed
|
|
||||||
|
|
||||||
# as per https://doc.dovecot.org/configuration_manual/os/
|
|
||||||
# it is recommended to set the following inotify limits
|
|
||||||
for name in ("max_user_instances", "max_user_watches"):
|
|
||||||
key = f"fs.inotify.{name}"
|
|
||||||
if host.get_fact(Sysctl)[key] > 65535:
|
|
||||||
# Skip updating limits if already sufficient
|
|
||||||
# (enables running in incus containers where sysctl readonly)
|
|
||||||
continue
|
|
||||||
server.sysctl(
|
|
||||||
name=f"Change {key}",
|
|
||||||
key=key,
|
|
||||||
value=65535,
|
|
||||||
persist=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
timezone_env = files.line(
|
|
||||||
name="Set TZ environment variable",
|
|
||||||
path="/etc/environment",
|
|
||||||
line="TZ=:/etc/localtime",
|
|
||||||
)
|
|
||||||
need_restart |= timezone_env.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
@@ -70,12 +70,6 @@ userdb {
|
|||||||
# Mailboxes are stored in the "mail" directory of the vmail user home.
|
# Mailboxes are stored in the "mail" directory of the vmail user home.
|
||||||
mail_location = maildir:{{ config.mailboxes_dir }}/%u
|
mail_location = maildir:{{ config.mailboxes_dir }}/%u
|
||||||
|
|
||||||
# index/cache files are not very useful for chatmail relay operations
|
|
||||||
# but it's not clear how to disable them completely.
|
|
||||||
# According to https://doc.dovecot.org/2.3/settings/advanced/#core_setting-mail_cache_max_size
|
|
||||||
# if the cache file becomes larger than the specified size, it is truncated by dovecot
|
|
||||||
mail_cache_max_size = 500K
|
|
||||||
|
|
||||||
namespace inbox {
|
namespace inbox {
|
||||||
inbox = yes
|
inbox = yes
|
||||||
|
|
||||||
@@ -113,7 +107,7 @@ mail_attribute_dict = proxy:/run/chatmail-metadata/metadata.socket:metadata
|
|||||||
# `imap_zlib` enables IMAP COMPRESS (RFC 4978).
|
# `imap_zlib` enables IMAP COMPRESS (RFC 4978).
|
||||||
# <https://datatracker.ietf.org/doc/html/rfc4978.html>
|
# <https://datatracker.ietf.org/doc/html/rfc4978.html>
|
||||||
protocol imap {
|
protocol imap {
|
||||||
mail_plugins = $mail_plugins imap_quota last_login {% if config.imap_compress %}imap_zlib{% endif %}
|
mail_plugins = $mail_plugins imap_zlib imap_quota last_login
|
||||||
imap_metadata = yes
|
imap_metadata = yes
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -252,28 +246,3 @@ protocol imap {
|
|||||||
rawlog_dir = %h
|
rawlog_dir = %h
|
||||||
}
|
}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if not config.imap_compress %}
|
|
||||||
# Hibernate IDLE users to save memory and CPU resources
|
|
||||||
# NOTE: this will have no effect if imap_zlib plugin is used
|
|
||||||
imap_hibernate_timeout = 30s
|
|
||||||
service imap {
|
|
||||||
# Note that this change will allow any process running as
|
|
||||||
# $default_internal_user (dovecot) to access mails as any other user.
|
|
||||||
# This may be insecure in some installations, which is why this isn't
|
|
||||||
# done by default.
|
|
||||||
unix_listener imap-master {
|
|
||||||
user = $default_internal_user
|
|
||||||
}
|
|
||||||
}
|
|
||||||
# The following is the default already in v2.3.1+:
|
|
||||||
service imap {
|
|
||||||
extra_groups = $default_internal_group
|
|
||||||
}
|
|
||||||
service imap-hibernate {
|
|
||||||
unix_listener imap-hibernate {
|
|
||||||
mode = 0660
|
|
||||||
group = $default_internal_group
|
|
||||||
}
|
|
||||||
}
|
|
||||||
{% endif %}
|
|
||||||
|
|||||||
14
cmdeploy/src/cmdeploy/dovecot/expunge.cron.j2
Normal file
14
cmdeploy/src/cmdeploy/dovecot/expunge.cron.j2
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# delete already seen big mails after 7 days, in the INBOX
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_large_after }} -size +200k -type f -delete
|
||||||
|
# delete all mails after {{ config.delete_mails_after }} days, in the Inbox
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||||
|
# or in any IMAP subfolder
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||||
|
# even if they are unseen
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||||
|
# or only temporary (but then they shouldn't be around after {{ config.delete_mails_after }} days anyway).
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||||
|
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||||
|
3 0 * * * vmail find {{ config.mailboxes_dir }} -name 'maildirsize' -type f -delete
|
||||||
|
4 0 * * * vmail /usr/local/lib/chatmaild/venv/bin/delete_inactive_users /usr/local/lib/chatmaild/chatmail.ini
|
||||||
@@ -1,11 +1,5 @@
|
|||||||
enable_relay = true
|
enable_relay = true
|
||||||
http_bind_addr = "[::]:3340"
|
http_bind_addr = "[::]:3340"
|
||||||
|
enable_stun = true
|
||||||
# Disable built-in STUN server in iroh-relay 0.35
|
|
||||||
# as we deploy our own TURN server instead.
|
|
||||||
# STUN server is going to be removed in iroh-relay 1.0
|
|
||||||
# and this line can be removed after upgrade.
|
|
||||||
enable_stun = false
|
|
||||||
|
|
||||||
enable_metrics = false
|
enable_metrics = false
|
||||||
metrics_bind_addr = "127.0.0.1:9092"
|
metrics_bind_addr = "127.0.0.1:9092"
|
||||||
|
|||||||
@@ -1,68 +0,0 @@
|
|||||||
from pyinfra import facts, host
|
|
||||||
from pyinfra.operations import apt, files, server, systemd
|
|
||||||
|
|
||||||
from cmdeploy.basedeploy import (
|
|
||||||
Deployer,
|
|
||||||
get_resource,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MtailDeployer(Deployer):
|
|
||||||
def __init__(self, mtail_address):
|
|
||||||
self.mtail_address = mtail_address
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
# Uninstall mtail package to install a static binary.
|
|
||||||
apt.packages(name="Uninstall mtail", packages=["mtail"], present=False)
|
|
||||||
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_amd64.tar.gz",
|
|
||||||
"123c2ee5f48c3eff12ebccee38befd2233d715da736000ccde49e3d5607724e4",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_arm64.tar.gz",
|
|
||||||
"aa04811c0929b6754408676de520e050c45dddeb3401881888a092c9aea89cae",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name="Download mtail",
|
|
||||||
commands=[
|
|
||||||
f"(echo '{sha256sum} /usr/local/bin/mtail' | sha256sum -c) || (curl -L {url} | gunzip | tar -x -f - mtail -O >/usr/local/bin/mtail.new && mv /usr/local/bin/mtail.new /usr/local/bin/mtail)",
|
|
||||||
"chmod 755 /usr/local/bin/mtail",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
# Using our own systemd unit instead of `/usr/lib/systemd/system/mtail.service`.
|
|
||||||
# This allows to read from journalctl instead of log files.
|
|
||||||
files.template(
|
|
||||||
src=get_resource("mtail/mtail.service.j2"),
|
|
||||||
dest="/etc/systemd/system/mtail.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
address=self.mtail_address or "127.0.0.1",
|
|
||||||
port=3903,
|
|
||||||
)
|
|
||||||
|
|
||||||
mtail_conf = files.put(
|
|
||||||
name="Mtail configuration",
|
|
||||||
src=get_resource("mtail/delivered_mail.mtail"),
|
|
||||||
dest="/etc/mtail/delivered_mail.mtail",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
self.need_restart = mtail_conf.changed
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable mtail",
|
|
||||||
service="mtail.service",
|
|
||||||
running=bool(self.mtail_address),
|
|
||||||
enabled=bool(self.mtail_address),
|
|
||||||
restarted=self.need_restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
@@ -1,117 +0,0 @@
|
|||||||
from chatmaild.config import Config
|
|
||||||
from pyinfra.operations import apt, files, systemd
|
|
||||||
|
|
||||||
from cmdeploy.basedeploy import (
|
|
||||||
Deployer,
|
|
||||||
get_resource,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NginxDeployer(Deployer):
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
#
|
|
||||||
# If we allow nginx to start up on install, it will grab port
|
|
||||||
# 80, which then will block acmetool from listening on the port.
|
|
||||||
# That in turn prevents getting certificates, which then causes
|
|
||||||
# an error when we try to start nginx on the custom config
|
|
||||||
# that leaves port 80 open but also requires certificates to
|
|
||||||
# be present. To avoid getting into that interlocking mess,
|
|
||||||
# we use policy-rc.d to prevent nginx from starting up when it
|
|
||||||
# is installed.
|
|
||||||
#
|
|
||||||
# This approach allows us to avoid performing any explicit
|
|
||||||
# systemd operations during the install stage (as opposed to
|
|
||||||
# allowing it to start and then forcing it to stop), which allows
|
|
||||||
# the install stage to run in non-systemd environments like a
|
|
||||||
# container image build.
|
|
||||||
#
|
|
||||||
# For documentation about policy-rc.d, see:
|
|
||||||
# https://people.debian.org/~hmh/invokerc.d-policyrc.d-specification.txt
|
|
||||||
#
|
|
||||||
files.put(
|
|
||||||
src=get_resource("policy-rc.d"),
|
|
||||||
dest="/usr/sbin/policy-rc.d",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install nginx",
|
|
||||||
packages=["nginx", "libnginx-mod-stream"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.file("/usr/sbin/policy-rc.d", present=False)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
self.need_restart = _configure_nginx(self.config)
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable nginx",
|
|
||||||
service="nginx.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=self.need_restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_nginx(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures nginx HTTP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=get_resource("nginx/nginx.conf.j2"),
|
|
||||||
dest="/etc/nginx/nginx.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
autoconfig = files.template(
|
|
||||||
src=get_resource("nginx/autoconfig.xml.j2"),
|
|
||||||
dest="/var/www/html/.well-known/autoconfig/mail/config-v1.1.xml",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
)
|
|
||||||
need_restart |= autoconfig.changed
|
|
||||||
|
|
||||||
mta_sts_config = files.template(
|
|
||||||
src=get_resource("nginx/mta-sts.txt.j2"),
|
|
||||||
dest="/var/www/html/.well-known/mta-sts.txt",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
)
|
|
||||||
need_restart |= mta_sts_config.changed
|
|
||||||
|
|
||||||
# install CGI newemail script
|
|
||||||
#
|
|
||||||
cgi_dir = "/usr/lib/cgi-bin"
|
|
||||||
files.directory(
|
|
||||||
name=f"Ensure {cgi_dir} exists",
|
|
||||||
path=cgi_dir,
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name="Upload cgi newemail.py script",
|
|
||||||
src=get_resource("newemail.py", pkg="chatmaild").open("rb"),
|
|
||||||
dest=f"{cgi_dir}/newemail.py",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
)
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
@@ -66,7 +66,7 @@ http {
|
|||||||
|
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
|
|
||||||
server_name {{ config.domain_name }} www.{{ config.domain_name }} mta-sts.{{ config.domain_name }};
|
server_name _;
|
||||||
|
|
||||||
access_log syslog:server=unix:/dev/log,facility=local7;
|
access_log syslog:server=unix:/dev/log,facility=local7;
|
||||||
|
|
||||||
|
|||||||
BIN
cmdeploy/src/cmdeploy/obs-home-deltachat.gpg
Normal file
BIN
cmdeploy/src/cmdeploy/obs-home-deltachat.gpg
Normal file
Binary file not shown.
@@ -1,123 +0,0 @@
|
|||||||
"""
|
|
||||||
Installs OpenDKIM
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pyinfra import host
|
|
||||||
from pyinfra.facts.files import File
|
|
||||||
from pyinfra.operations import apt, files, server, systemd
|
|
||||||
|
|
||||||
from cmdeploy.basedeploy import Deployer, get_resource
|
|
||||||
|
|
||||||
|
|
||||||
class OpendkimDeployer(Deployer):
|
|
||||||
required_users = [("opendkim", None, ["opendkim"])]
|
|
||||||
|
|
||||||
def __init__(self, mail_domain):
|
|
||||||
self.mail_domain = mail_domain
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
apt.packages(
|
|
||||||
name="apt install opendkim opendkim-tools",
|
|
||||||
packages=["opendkim", "opendkim-tools"],
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
domain = self.mail_domain
|
|
||||||
dkim_selector = "opendkim"
|
|
||||||
"""Configures OpenDKIM"""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=get_resource("opendkim/opendkim.conf"),
|
|
||||||
dest="/etc/opendkim.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
screen_script = files.put(
|
|
||||||
src=get_resource("opendkim/screen.lua"),
|
|
||||||
dest="/etc/opendkim/screen.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= screen_script.changed
|
|
||||||
|
|
||||||
final_script = files.put(
|
|
||||||
src=get_resource("opendkim/final.lua"),
|
|
||||||
dest="/etc/opendkim/final.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= final_script.changed
|
|
||||||
|
|
||||||
files.directory(
|
|
||||||
name="Add opendkim directory to /etc",
|
|
||||||
path="/etc/opendkim",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="750",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
keytable = files.template(
|
|
||||||
src=get_resource("opendkim/KeyTable"),
|
|
||||||
dest="/etc/dkimkeys/KeyTable",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= keytable.changed
|
|
||||||
|
|
||||||
signing_table = files.template(
|
|
||||||
src=get_resource("opendkim/SigningTable"),
|
|
||||||
dest="/etc/dkimkeys/SigningTable",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= signing_table.changed
|
|
||||||
files.directory(
|
|
||||||
name="Add opendkim socket directory to /var/spool/postfix",
|
|
||||||
path="/var/spool/postfix/opendkim",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="750",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not host.get_fact(File, f"/etc/dkimkeys/{dkim_selector}.private"):
|
|
||||||
server.shell(
|
|
||||||
name="Generate OpenDKIM domain keys",
|
|
||||||
commands=[
|
|
||||||
f"/usr/sbin/opendkim-genkey -D /etc/dkimkeys -d {domain} -s {dkim_selector}"
|
|
||||||
],
|
|
||||||
_use_su_login=True,
|
|
||||||
_su_user="opendkim",
|
|
||||||
)
|
|
||||||
|
|
||||||
service_file = files.put(
|
|
||||||
name="Configure opendkim to restart once a day",
|
|
||||||
src=get_resource("opendkim/systemd.conf"),
|
|
||||||
dest="/etc/systemd/system/opendkim.service.d/10-prevent-memory-leak.conf",
|
|
||||||
)
|
|
||||||
need_restart |= service_file.changed
|
|
||||||
|
|
||||||
self.need_restart = need_restart
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable OpenDKIM",
|
|
||||||
service="opendkim.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
daemon_reload=self.need_restart,
|
|
||||||
restarted=self.need_restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
@@ -9,10 +9,9 @@ if nsigs == nil then
|
|||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
|
|
||||||
local valid = false
|
|
||||||
for i = 1, nsigs do
|
for i = 1, nsigs do
|
||||||
sig = odkim.get_sighandle(ctx, i - 1)
|
sig = odkim.get_sighandle(ctx, i - 1)
|
||||||
sigres = odkim.sig_result(sig)
|
sigres = odkim.sig_result(sig)
|
||||||
|
|
||||||
-- All signatures that do not correspond to From:
|
-- All signatures that do not correspond to From:
|
||||||
-- were ignored in screen.lua and return sigres -1.
|
-- were ignored in screen.lua and return sigres -1.
|
||||||
@@ -20,19 +19,10 @@ for i = 1, nsigs do
|
|||||||
-- Any valid signature that was not ignored like this
|
-- Any valid signature that was not ignored like this
|
||||||
-- means the message is acceptable.
|
-- means the message is acceptable.
|
||||||
if sigres == 0 then
|
if sigres == 0 then
|
||||||
valid = true
|
return nil
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
if valid then
|
|
||||||
-- Strip all DKIM-Signature headers after successful validation
|
|
||||||
-- Delete in reverse order to avoid index shifting.
|
|
||||||
for i = nsigs, 1, -1 do
|
|
||||||
odkim.del_header(ctx, "DKIM-Signature", i)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
odkim.set_reply(ctx, "554", "5.7.1", "No valid DKIM signature found")
|
|
||||||
odkim.set_result(ctx, SMFIS_REJECT)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
odkim.set_reply(ctx, "554", "5.7.1", "No valid DKIM signature found")
|
||||||
|
odkim.set_result(ctx, SMFIS_REJECT)
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ OversignHeaders From
|
|||||||
On-BadSignature reject
|
On-BadSignature reject
|
||||||
On-KeyNotFound reject
|
On-KeyNotFound reject
|
||||||
On-NoSignature reject
|
On-NoSignature reject
|
||||||
DNSTimeout 60
|
|
||||||
|
|
||||||
# Signing domain, selector, and key (required). For example, perform signing
|
# Signing domain, selector, and key (required). For example, perform signing
|
||||||
# for domain "example.com" with selector "2020" (2020._domainkey.example.com),
|
# for domain "example.com" with selector "2020" (2020._domainkey.example.com),
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
echo "All runlevel operations denied by policy" >&2
|
|
||||||
exit 101
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
from pyinfra.operations import apt, files, systemd
|
|
||||||
|
|
||||||
from cmdeploy.basedeploy import Deployer, get_resource
|
|
||||||
|
|
||||||
|
|
||||||
class PostfixDeployer(Deployer):
|
|
||||||
required_users = [("postfix", None, ["opendkim"])]
|
|
||||||
|
|
||||||
def __init__(self, config, disable_mail):
|
|
||||||
self.config = config
|
|
||||||
self.disable_mail = disable_mail
|
|
||||||
|
|
||||||
def install(self):
|
|
||||||
apt.packages(
|
|
||||||
name="Install Postfix",
|
|
||||||
packages="postfix",
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
config = self.config
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=get_resource("postfix/main.cf.j2"),
|
|
||||||
dest="/etc/postfix/main.cf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
master_config = files.template(
|
|
||||||
src=get_resource("postfix/master.cf.j2"),
|
|
||||||
dest="/etc/postfix/master.cf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
debug=False,
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
need_restart |= master_config.changed
|
|
||||||
|
|
||||||
header_cleanup = files.put(
|
|
||||||
src=get_resource("postfix/submission_header_cleanup"),
|
|
||||||
dest="/etc/postfix/submission_header_cleanup",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= header_cleanup.changed
|
|
||||||
|
|
||||||
# Login map that 1:1 maps email address to login.
|
|
||||||
login_map = files.put(
|
|
||||||
src=get_resource("postfix/login_map"),
|
|
||||||
dest="/etc/postfix/login_map",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= login_map.changed
|
|
||||||
self.need_restart = need_restart
|
|
||||||
|
|
||||||
def activate(self):
|
|
||||||
restart = False if self.disable_mail else self.need_restart
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="disable postfix for now"
|
|
||||||
if self.disable_mail
|
|
||||||
else "Start and enable Postfix",
|
|
||||||
service="postfix.service",
|
|
||||||
running=False if self.disable_mail else True,
|
|
||||||
enabled=False if self.disable_mail else True,
|
|
||||||
restarted=restart,
|
|
||||||
)
|
|
||||||
self.need_restart = False
|
|
||||||
@@ -26,8 +26,7 @@ smtp_tls_security_level=verify
|
|||||||
smtp_tls_servername = hostname
|
smtp_tls_servername = hostname
|
||||||
smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
|
smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
|
||||||
smtp_tls_policy_maps = inline:{nauta.cu=may}
|
smtp_tls_policy_maps = inline:{nauta.cu=may}
|
||||||
smtp_tls_protocols = >=TLSv1.2
|
smtpd_tls_protocols = >=TLSv1.2
|
||||||
smtp_tls_mandatory_protocols = >=TLSv1.2
|
|
||||||
|
|
||||||
# Disable anonymous cipher suites
|
# Disable anonymous cipher suites
|
||||||
# and known insecure algorithms.
|
# and known insecure algorithms.
|
||||||
|
|||||||
@@ -14,8 +14,6 @@ smtp inet n - y - - smtpd -v
|
|||||||
{%- else %}
|
{%- else %}
|
||||||
smtp inet n - y - - smtpd
|
smtp inet n - y - - smtpd
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
-o smtpd_tls_security_level=encrypt
|
|
||||||
-o smtpd_tls_mandatory_protocols=>=TLSv1.2
|
|
||||||
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port_incoming }}
|
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port_incoming }}
|
||||||
submission inet n - y - 5000 smtpd
|
submission inet n - y - 5000 smtpd
|
||||||
-o syslog_name=postfix/submission
|
-o syslog_name=postfix/submission
|
||||||
|
|||||||
19
cmdeploy/src/cmdeploy/proxy-deploy.py
Normal file
19
cmdeploy/src/cmdeploy/proxy-deploy.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pyinfra
|
||||||
|
from pyinfra import host
|
||||||
|
|
||||||
|
from proxy import configure_ssh, configure_proxy
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
ipv4_relay = os.getenv("IPV4_RELAY")
|
||||||
|
ipv6_relay = os.getenv("IPV6_RELAY")
|
||||||
|
|
||||||
|
configure_ssh()
|
||||||
|
if host.data.get("ssh_port") not in (None, 22):
|
||||||
|
configure_proxy(ipv4_relay, ipv6_relay)
|
||||||
|
|
||||||
|
|
||||||
|
if pyinfra.is_cli:
|
||||||
|
main()
|
||||||
63
cmdeploy/src/cmdeploy/proxy.py
Normal file
63
cmdeploy/src/cmdeploy/proxy.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import importlib
|
||||||
|
|
||||||
|
from pyinfra import host
|
||||||
|
from pyinfra.operations import files, server, apt, systemd
|
||||||
|
|
||||||
|
def configure_ssh():
|
||||||
|
files.replace(
|
||||||
|
name="Configure sshd to use port 2222",
|
||||||
|
path="/etc/ssh/sshd_config",
|
||||||
|
text="Port 22\n",
|
||||||
|
replace="Port 2222\n",
|
||||||
|
)
|
||||||
|
systemd.service(
|
||||||
|
name="apply SSH config",
|
||||||
|
service="ssh",
|
||||||
|
reloaded=True,
|
||||||
|
)
|
||||||
|
apt.update()
|
||||||
|
|
||||||
|
|
||||||
|
def configure_proxy(ipv4_relay, ipv6_relay):
|
||||||
|
files.put(
|
||||||
|
name="Configure nftables",
|
||||||
|
src=importlib.resources.files(__package__).joinpath("proxy_files/nftables.conf.j2"),
|
||||||
|
dest="/etc/nftables.conf",
|
||||||
|
ipv4_address=ipv4_relay, # :todo what if only one of them is specified?
|
||||||
|
ipv6_address=ipv6_relay,
|
||||||
|
)
|
||||||
|
|
||||||
|
server.sysctl(name="enable IPv4 forwarding", key="net.ipv4.ip_forward", value=1, persist=True)
|
||||||
|
|
||||||
|
server.sysctl(
|
||||||
|
name="enable IPv6 forwarding",
|
||||||
|
key="net.ipv6.conf.all.forwarding",
|
||||||
|
value=1,
|
||||||
|
persist=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
server.shell(
|
||||||
|
name="apply forwarding configuration",
|
||||||
|
commands=[
|
||||||
|
"sysctl -p",
|
||||||
|
"nft -f /etc/nftables.conf",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
if host.data.get("floating_ips"):
|
||||||
|
i = 0
|
||||||
|
for floating_ip in host.data.get("floating_ips"):
|
||||||
|
i += 1
|
||||||
|
files.template(
|
||||||
|
name="Add floating IPs",
|
||||||
|
src="servers/proxy-nine/files/60-floating.ip.cfg.j2",
|
||||||
|
dest=f"/etc/network/interfaces.d/{59 + i}-floating.ip.cfg",
|
||||||
|
ip_address=floating_ip,
|
||||||
|
i=i,
|
||||||
|
)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="apply floating IPs",
|
||||||
|
service="networking",
|
||||||
|
restarted=True,
|
||||||
|
)
|
||||||
4
cmdeploy/src/cmdeploy/proxy_files/60-floating.ip.cfg.j2
Normal file
4
cmdeploy/src/cmdeploy/proxy_files/60-floating.ip.cfg.j2
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
auto eth0:{{ i }}
|
||||||
|
iface eth0:{{ i }} inet static
|
||||||
|
address {{ ip_address }}
|
||||||
|
netmask 32
|
||||||
67
cmdeploy/src/cmdeploy/proxy_files/nftables.conf.j2
Normal file
67
cmdeploy/src/cmdeploy/proxy_files/nftables.conf.j2
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
#!/usr/sbin/nft -f
|
||||||
|
|
||||||
|
flush ruleset
|
||||||
|
|
||||||
|
define wan = eth0
|
||||||
|
|
||||||
|
# which ports to proxy
|
||||||
|
define ports = { smtp, http, https, imap, imaps, submission, submissions }
|
||||||
|
|
||||||
|
# the host we want to proxy to
|
||||||
|
define ipv4_address = {{ ipv4_address }}
|
||||||
|
define ipv6_address = [{{ ipv6_address }}]
|
||||||
|
|
||||||
|
table ip nat {
|
||||||
|
chain prerouting {
|
||||||
|
type nat hook prerouting priority dstnat; policy accept;
|
||||||
|
iif $wan tcp dport $ports dnat to $ipv4_address
|
||||||
|
}
|
||||||
|
|
||||||
|
chain postrouting {
|
||||||
|
type nat hook postrouting priority 0;
|
||||||
|
|
||||||
|
oifname $wan masquerade
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table ip6 nat {
|
||||||
|
chain prerouting {
|
||||||
|
type nat hook prerouting priority dstnat; policy accept;
|
||||||
|
iif $wan tcp dport $ports dnat to $ipv6_address
|
||||||
|
}
|
||||||
|
|
||||||
|
chain postrouting {
|
||||||
|
type nat hook postrouting priority 0;
|
||||||
|
|
||||||
|
oifname $wan masquerade
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table inet filter {
|
||||||
|
chain input {
|
||||||
|
type filter hook input priority filter; policy drop;
|
||||||
|
|
||||||
|
# Accept ICMP.
|
||||||
|
# It is especially important to accept ICMPv6 ND messages,
|
||||||
|
# otherwise IPv6 connectivity breaks.
|
||||||
|
icmp type { echo-request } accept
|
||||||
|
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
||||||
|
|
||||||
|
# Allow incoming SSH connections.
|
||||||
|
tcp dport { 22, 2222 } accept
|
||||||
|
# Allow incoming shadowsocks connections.
|
||||||
|
tcp dport { 8388 } accept
|
||||||
|
|
||||||
|
ct state established accept
|
||||||
|
}
|
||||||
|
chain forward {
|
||||||
|
type filter hook forward priority filter; policy drop;
|
||||||
|
|
||||||
|
ct state established accept
|
||||||
|
ip daddr $ipv4_address counter accept
|
||||||
|
ip6 daddr $ipv6_address counter accept
|
||||||
|
}
|
||||||
|
chain output {
|
||||||
|
type filter hook output priority filter;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -12,7 +12,7 @@ All functions of this module
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from .rshell import CalledProcessError, log_progress, shell
|
from .rshell import CalledProcessError, shell, log_progress
|
||||||
|
|
||||||
|
|
||||||
def perform_initial_checks(mail_domain, pre_command=""):
|
def perform_initial_checks(mail_domain, pre_command=""):
|
||||||
@@ -26,9 +26,7 @@ def perform_initial_checks(mail_domain, pre_command=""):
|
|||||||
WWW = query_dns("CNAME", f"www.{mail_domain}")
|
WWW = query_dns("CNAME", f"www.{mail_domain}")
|
||||||
|
|
||||||
res = dict(mail_domain=mail_domain, A=A, AAAA=AAAA, MTA_STS=MTA_STS, WWW=WWW)
|
res = dict(mail_domain=mail_domain, A=A, AAAA=AAAA, MTA_STS=MTA_STS, WWW=WWW)
|
||||||
res["acme_account_url"] = shell(
|
res["acme_account_url"] = shell(pre_command + "acmetool account-url", fail_ok=True, print=log_progress)
|
||||||
pre_command + "acmetool account-url", fail_ok=True, print=log_progress
|
|
||||||
)
|
|
||||||
res["dkim_entry"], res["web_dkim_entry"] = get_dkim_entry(
|
res["dkim_entry"], res["web_dkim_entry"] = get_dkim_entry(
|
||||||
mail_domain, pre_command, dkim_selector="opendkim"
|
mail_domain, pre_command, dkim_selector="opendkim"
|
||||||
)
|
)
|
||||||
@@ -37,10 +35,7 @@ def perform_initial_checks(mail_domain, pre_command=""):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
# parse out sts-id if exists, example: "v=STSv1; id=2090123"
|
# parse out sts-id if exists, example: "v=STSv1; id=2090123"
|
||||||
mta_sts_txt = query_dns("TXT", f"_mta-sts.{mail_domain}")
|
parts = query_dns("TXT", f"_mta-sts.{mail_domain}").split("id=")
|
||||||
if not mta_sts_txt:
|
|
||||||
return res
|
|
||||||
parts = mta_sts_txt.split("id=")
|
|
||||||
res["sts_id"] = parts[1].rstrip('"') if len(parts) == 2 else ""
|
res["sts_id"] = parts[1].rstrip('"') if len(parts) == 2 else ""
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@@ -50,7 +45,7 @@ def get_dkim_entry(mail_domain, pre_command, dkim_selector):
|
|||||||
dkim_pubkey = shell(
|
dkim_pubkey = shell(
|
||||||
f"{pre_command}openssl rsa -in /etc/dkimkeys/{dkim_selector}.private "
|
f"{pre_command}openssl rsa -in /etc/dkimkeys/{dkim_selector}.private "
|
||||||
"-pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'",
|
"-pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'",
|
||||||
print=log_progress,
|
print=log_progress
|
||||||
)
|
)
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
return
|
return
|
||||||
@@ -67,9 +62,9 @@ def query_dns(typ, domain):
|
|||||||
# Get autoritative nameserver from the SOA record.
|
# Get autoritative nameserver from the SOA record.
|
||||||
soa_answers = [
|
soa_answers = [
|
||||||
x.split()
|
x.split()
|
||||||
for x in shell(
|
for x in shell(f"dig -r -q {domain} -t SOA +noall +authority +answer", print=log_progress).split(
|
||||||
f"dig -r -q {domain} -t SOA +noall +authority +answer", print=log_progress
|
"\n"
|
||||||
).split("\n")
|
)
|
||||||
]
|
]
|
||||||
soa = [a for a in soa_answers if len(a) >= 3 and a[3] == "SOA"]
|
soa = [a for a in soa_answers if len(a) >= 3 and a[3] == "SOA"]
|
||||||
if not soa:
|
if not soa:
|
||||||
@@ -78,7 +73,9 @@ def query_dns(typ, domain):
|
|||||||
|
|
||||||
# Query authoritative nameserver directly to bypass DNS cache.
|
# Query authoritative nameserver directly to bypass DNS cache.
|
||||||
res = shell(f"dig @{ns} -r -q {domain} -t {typ} +short", print=log_progress)
|
res = shell(f"dig @{ns} -r -q {domain} -t {typ} +short", print=log_progress)
|
||||||
return next((line for line in res.split("\n") if not line.startswith(";")), "")
|
if res:
|
||||||
|
return res.split("\n")[0]
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def check_zonefile(zonefile, verbose=True):
|
def check_zonefile(zonefile, verbose=True):
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from subprocess import DEVNULL, CalledProcessError, check_output
|
from subprocess import DEVNULL, CalledProcessError, check_output
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +21,20 @@ def shell(command, fail_ok=False, print=print):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def get_port_service(port: int) -> str:
|
||||||
|
return shell(
|
||||||
|
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
||||||
|
% (port,)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def chatmail_version():
|
||||||
|
version = shell("cat /etc/chatmail-version")
|
||||||
|
if "cat: /etc/chatmail-version:" in version:
|
||||||
|
version = None
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
def get_systemd_running():
|
def get_systemd_running():
|
||||||
lines = shell("systemctl --type=service --state=running").split("\n")
|
lines = shell("systemctl --type=service --state=running").split("\n")
|
||||||
return [line for line in lines if line.startswith(" ")]
|
return [line for line in lines if line.startswith(" ")]
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=chatmail mail storage expiration job
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=oneshot
|
|
||||||
User=vmail
|
|
||||||
ExecStart=/usr/local/lib/chatmaild/venv/bin/chatmail-expire /usr/local/lib/chatmaild/chatmail.ini -v --remove
|
|
||||||
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Run Daily chatmail-expire job
|
|
||||||
|
|
||||||
[Timer]
|
|
||||||
OnCalendar=*-*-* 00:02:00
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=timers.target
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=chatmail file system storage reporting job
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=oneshot
|
|
||||||
User=vmail
|
|
||||||
ExecStart=/usr/local/lib/chatmaild/venv/bin/chatmail-fsreport /usr/local/lib/chatmaild/chatmail.ini
|
|
||||||
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Run Daily Chatmail fsreport Job
|
|
||||||
|
|
||||||
[Timer]
|
|
||||||
OnCalendar=*-*-* 08:02:00
|
|
||||||
Persistent=true
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=timers.target
|
|
||||||
67
cmdeploy/src/cmdeploy/service/echobot.service.f
Normal file
67
cmdeploy/src/cmdeploy/service/echobot.service.f
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Chatmail echo bot for testing it works
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart={execpath} {config_path}
|
||||||
|
Environment="PATH={remote_venv_dir}:$PATH"
|
||||||
|
Restart=always
|
||||||
|
RestartSec=30
|
||||||
|
|
||||||
|
User=echobot
|
||||||
|
Group=echobot
|
||||||
|
|
||||||
|
# Create /var/lib/echobot
|
||||||
|
StateDirectory=echobot
|
||||||
|
|
||||||
|
# Create /run/echobot
|
||||||
|
#
|
||||||
|
# echobot stores /run/echobot/password
|
||||||
|
# with a password there, which doveauth then reads.
|
||||||
|
RuntimeDirectory=echobot
|
||||||
|
|
||||||
|
WorkingDirectory=/var/lib/echobot
|
||||||
|
|
||||||
|
# Apply security restrictions suggested by
|
||||||
|
# systemd-analyze security echobot.service
|
||||||
|
CapabilityBoundingSet=
|
||||||
|
LockPersonality=true
|
||||||
|
MemoryDenyWriteExecute=true
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateDevices=true
|
||||||
|
PrivateMounts=true
|
||||||
|
PrivateTmp=true
|
||||||
|
|
||||||
|
# We need to know about doveauth user to give it access to /run/echobot/password
|
||||||
|
PrivateUsers=false
|
||||||
|
|
||||||
|
ProtectClock=true
|
||||||
|
ProtectControlGroups=true
|
||||||
|
ProtectHostname=true
|
||||||
|
ProtectKernelLogs=true
|
||||||
|
ProtectKernelModules=true
|
||||||
|
ProtectKernelTunables=true
|
||||||
|
ProtectProc=noaccess
|
||||||
|
|
||||||
|
# Should be "strict", but we currently write /accounts folder in a protected path
|
||||||
|
ProtectSystem=full
|
||||||
|
|
||||||
|
RemoveIPC=true
|
||||||
|
RestrictAddressFamilies=AF_INET AF_INET6
|
||||||
|
RestrictNamespaces=true
|
||||||
|
RestrictRealtime=true
|
||||||
|
RestrictSUIDSGID=true
|
||||||
|
SystemCallArchitectures=native
|
||||||
|
SystemCallFilter=~@clock
|
||||||
|
SystemCallFilter=~@cpu-emulation
|
||||||
|
SystemCallFilter=~@debug
|
||||||
|
SystemCallFilter=~@module
|
||||||
|
SystemCallFilter=~@mount
|
||||||
|
SystemCallFilter=~@obsolete
|
||||||
|
SystemCallFilter=~@raw-io
|
||||||
|
SystemCallFilter=~@reboot
|
||||||
|
SystemCallFilter=~@resources
|
||||||
|
SystemCallFilter=~@swap
|
||||||
|
UMask=0077
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=A wrapper for the TURN server
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
Restart=always
|
|
||||||
ExecStart=/usr/local/bin/chatmail-turn --realm {mail_domain} --socket /run/chatmail-turn/turn.socket
|
|
||||||
|
|
||||||
# Create /run/chatmail-turn
|
|
||||||
RuntimeDirectory=chatmail-turn
|
|
||||||
User=vmail
|
|
||||||
Group=vmail
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -82,19 +82,3 @@ class SSHExec:
|
|||||||
res = self(call, kwargs, log_callback=remote.rshell.log_progress)
|
res = self(call, kwargs, log_callback=remote.rshell.log_progress)
|
||||||
print_stderr()
|
print_stderr()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
class LocalExec:
|
|
||||||
def __init__(self, verbose=False, docker=False):
|
|
||||||
self.verbose = verbose
|
|
||||||
self.docker = docker
|
|
||||||
|
|
||||||
def logged(self, call, kwargs: dict):
|
|
||||||
where = "locally"
|
|
||||||
if self.docker:
|
|
||||||
if call == remote.rdns.perform_initial_checks:
|
|
||||||
kwargs["pre_command"] = "docker exec chatmail "
|
|
||||||
where = "in docker"
|
|
||||||
if self.verbose:
|
|
||||||
print(f"Running {where}: {call.__name__}(**{kwargs})")
|
|
||||||
return call(**kwargs)
|
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ class TestDC:
|
|||||||
|
|
||||||
def test_ping_pong(self, benchmark, cmfactory):
|
def test_ping_pong(self, benchmark, cmfactory):
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
chat = cmfactory.get_protected_chat(ac1, ac2)
|
||||||
|
|
||||||
def dc_ping_pong():
|
def dc_ping_pong():
|
||||||
chat.send_text("ping")
|
chat.send_text("ping")
|
||||||
@@ -49,7 +49,7 @@ class TestDC:
|
|||||||
|
|
||||||
def test_send_10_receive_10(self, benchmark, cmfactory, lp):
|
def test_send_10_receive_10(self, benchmark, cmfactory, lp):
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
chat = cmfactory.get_protected_chat(ac1, ac2)
|
||||||
|
|
||||||
def dc_send_10_receive_10():
|
def dc_send_10_receive_10():
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import queue
|
import queue
|
||||||
import smtplib
|
import socket
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@@ -91,23 +91,25 @@ def test_concurrent_logins_same_account(
|
|||||||
|
|
||||||
def test_no_vrfy(chatmail_config):
|
def test_no_vrfy(chatmail_config):
|
||||||
domain = chatmail_config.mail_domain
|
domain = chatmail_config.mail_domain
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
s = smtplib.SMTP(domain)
|
sock.settimeout(10)
|
||||||
s.starttls()
|
try:
|
||||||
|
sock.connect((domain, 25))
|
||||||
s.putcmd("vrfy", f"wrongaddress@{chatmail_config.mail_domain}")
|
except socket.timeout:
|
||||||
result = s.getreply()
|
pytest.skip(f"port 25 not reachable for {domain}")
|
||||||
|
banner = sock.recv(1024)
|
||||||
|
print(banner)
|
||||||
|
sock.send(b"VRFY wrongaddress@%s\r\n" % (chatmail_config.mail_domain.encode(),))
|
||||||
|
result = sock.recv(1024)
|
||||||
print(result)
|
print(result)
|
||||||
s.putcmd("vrfy", f"echo@{chatmail_config.mail_domain}")
|
sock.send(b"VRFY echo@%s\r\n" % (chatmail_config.mail_domain.encode(),))
|
||||||
result2 = s.getreply()
|
result2 = sock.recv(1024)
|
||||||
print(result2)
|
print(result2)
|
||||||
assert result[0] == result2[0] == 252
|
assert result[0:10] == result2[0:10]
|
||||||
assert result[1][0:6] == result2[1][0:6] == b"2.0.0 "
|
sock.send(b"VRFY wrongaddress\r\n")
|
||||||
s.putcmd("vrfy", "wrongaddress")
|
result = sock.recv(1024)
|
||||||
result = s.getreply()
|
|
||||||
print(result)
|
print(result)
|
||||||
s.putcmd("vrfy", "echo")
|
sock.send(b"VRFY echo\r\n")
|
||||||
result2 = s.getreply()
|
result2 = sock.recv(1024)
|
||||||
print(result2)
|
print(result2)
|
||||||
assert result[0] == result2[0] == 252
|
assert result[0:10] == result2[0:10] == b"252 2.0.0 "
|
||||||
assert result[1][0:6] == result2[1][0:6] == b"2.0.0 "
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import datetime
|
|||||||
import smtplib
|
import smtplib
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -32,8 +31,7 @@ class TestSSHExecutor:
|
|||||||
)
|
)
|
||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
assert err.startswith("Collecting")
|
assert err.startswith("Collecting")
|
||||||
# XXX could not figure out how capturing can be made to work properly
|
#assert err.endswith("....\n")
|
||||||
# assert err.endswith("....\n")
|
|
||||||
assert err.count("\n") == 1
|
assert err.count("\n") == 1
|
||||||
|
|
||||||
sshexec.verbose = True
|
sshexec.verbose = True
|
||||||
@@ -42,8 +40,7 @@ class TestSSHExecutor:
|
|||||||
)
|
)
|
||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
lines = err.split("\n")
|
lines = err.split("\n")
|
||||||
# XXX could not figure out how capturing can be made to work properly
|
#assert len(lines) > 4
|
||||||
# assert len(lines) > 4
|
|
||||||
assert remote.rdns.perform_initial_checks.__doc__ in lines[0]
|
assert remote.rdns.perform_initial_checks.__doc__ in lines[0]
|
||||||
|
|
||||||
def test_exception(self, sshexec, capsys):
|
def test_exception(self, sshexec, capsys):
|
||||||
@@ -72,7 +69,7 @@ def test_timezone_env(remote):
|
|||||||
for line in remote.iter_output("env"):
|
for line in remote.iter_output("env"):
|
||||||
print(line)
|
print(line)
|
||||||
if line == "tz=:/etc/localtime":
|
if line == "tz=:/etc/localtime":
|
||||||
return
|
return True
|
||||||
pytest.fail("TZ is not set")
|
pytest.fail("TZ is not set")
|
||||||
|
|
||||||
|
|
||||||
@@ -143,23 +140,12 @@ def test_reject_missing_dkim(cmsetup, maildata, from_addr):
|
|||||||
"encrypted.eml", from_addr=from_addr, to_addr=recipient.addr
|
"encrypted.eml", from_addr=from_addr, to_addr=recipient.addr
|
||||||
).as_string()
|
).as_string()
|
||||||
conn = smtplib.SMTP(cmsetup.maildomain, 25, timeout=10)
|
conn = smtplib.SMTP(cmsetup.maildomain, 25, timeout=10)
|
||||||
conn.starttls()
|
|
||||||
|
|
||||||
with conn as s:
|
with conn as s:
|
||||||
with pytest.raises(smtplib.SMTPDataError, match="No valid DKIM signature"):
|
with pytest.raises(smtplib.SMTPDataError, match="No valid DKIM signature"):
|
||||||
s.sendmail(from_addr=from_addr, to_addrs=recipient.addr, msg=msg)
|
s.sendmail(from_addr=from_addr, to_addrs=recipient.addr, msg=msg)
|
||||||
|
|
||||||
|
|
||||||
def try_n_times(n, f):
|
|
||||||
for _ in range(n - 1):
|
|
||||||
try:
|
|
||||||
return f()
|
|
||||||
except Exception:
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
return f()
|
|
||||||
|
|
||||||
|
|
||||||
def test_rewrite_subject(cmsetup, maildata):
|
def test_rewrite_subject(cmsetup, maildata):
|
||||||
"""Test that subject gets replaced with [...]."""
|
"""Test that subject gets replaced with [...]."""
|
||||||
user1, user2 = cmsetup.gen_users(2)
|
user1, user2 = cmsetup.gen_users(2)
|
||||||
@@ -172,8 +158,7 @@ def test_rewrite_subject(cmsetup, maildata):
|
|||||||
).as_string()
|
).as_string()
|
||||||
user1.smtp.sendmail(from_addr=user1.addr, to_addrs=[user2.addr], msg=sent_msg)
|
user1.smtp.sendmail(from_addr=user1.addr, to_addrs=[user2.addr], msg=sent_msg)
|
||||||
|
|
||||||
# The message may need some time to get delivered by postfix.
|
messages = user2.imap.fetch_all_messages()
|
||||||
messages = try_n_times(5, user2.imap.fetch_all_messages)
|
|
||||||
assert len(messages) == 1
|
assert len(messages) == 1
|
||||||
rcvd_msg = messages[0]
|
rcvd_msg = messages[0]
|
||||||
assert "Subject: [...]" not in sent_msg
|
assert "Subject: [...]" not in sent_msg
|
||||||
@@ -224,14 +209,8 @@ def test_expunged(remote, chatmail_config):
|
|||||||
|
|
||||||
|
|
||||||
def test_deployed_state(remote):
|
def test_deployed_state(remote):
|
||||||
try:
|
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
||||||
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
||||||
except Exception:
|
|
||||||
git_hash = "unknown\n"
|
|
||||||
try:
|
|
||||||
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_diff = ""
|
|
||||||
git_status = [git_hash.strip()]
|
git_status = [git_hash.strip()]
|
||||||
for line in git_diff.splitlines():
|
for line in git_diff.splitlines():
|
||||||
git_status.append(line.strip().lower())
|
git_status.append(line.strip().lower())
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ class TestEndToEndDeltaChat:
|
|||||||
"""Test that a DC account can send a message to a second DC account
|
"""Test that a DC account can send a message to a second DC account
|
||||||
on the same chat-mail instance."""
|
on the same chat-mail instance."""
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
chat = cmfactory.get_protected_chat(ac1, ac2)
|
||||||
chat.send_text("message0")
|
chat.send_text("message0")
|
||||||
|
|
||||||
lp.sec("wait for ac2 to receive message")
|
lp.sec("wait for ac2 to receive message")
|
||||||
@@ -70,7 +70,7 @@ class TestEndToEndDeltaChat:
|
|||||||
before quota is exceeded, and thus depends on the speed of the upload.
|
before quota is exceeded, and thus depends on the speed of the upload.
|
||||||
"""
|
"""
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
chat = cmfactory.get_protected_chat(ac1, ac2)
|
||||||
|
|
||||||
user = ac2.get_config("configured_addr")
|
user = ac2.get_config("configured_addr")
|
||||||
|
|
||||||
@@ -153,10 +153,29 @@ def test_hide_senders_ip_address(cmfactory):
|
|||||||
assert ipaddress.ip_address(public_ip)
|
assert ipaddress.ip_address(public_ip)
|
||||||
|
|
||||||
user1, user2 = cmfactory.get_online_accounts(2)
|
user1, user2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_accepted_chat(user1, user2)
|
chat = cmfactory.get_protected_chat(user1, user2)
|
||||||
|
|
||||||
chat.send_text("testing submission header cleanup")
|
chat.send_text("testing submission header cleanup")
|
||||||
user2._evtracker.wait_next_incoming_message()
|
user2._evtracker.wait_next_incoming_message()
|
||||||
user2.direct_imap.select_folder("Inbox")
|
user2.direct_imap.select_folder("Inbox")
|
||||||
msg = user2.direct_imap.get_all_messages()[0]
|
msg = user2.direct_imap.get_all_messages()[0]
|
||||||
assert public_ip not in msg.obj.as_string()
|
assert public_ip not in msg.obj.as_string()
|
||||||
|
|
||||||
|
|
||||||
|
def test_echobot(cmfactory, chatmail_config, lp, sshdomain):
|
||||||
|
ac = cmfactory.get_online_accounts(1)[0]
|
||||||
|
|
||||||
|
# establish contact with echobot
|
||||||
|
sshexec = SSHExec(sshdomain)
|
||||||
|
command = "cat /var/lib/echobot/invite-link.txt"
|
||||||
|
echo_invite_link = sshexec(call=rshell.shell, kwargs=dict(command=command))
|
||||||
|
chat = ac.qr_setup_contact(echo_invite_link)
|
||||||
|
ac._evtracker.wait_securejoin_joiner_progress(1000)
|
||||||
|
|
||||||
|
# send message and check it gets replied back
|
||||||
|
lp.sec("Send message to echobot")
|
||||||
|
text = "hi, I hope you text me back"
|
||||||
|
chat.send_text(text)
|
||||||
|
lp.sec("Wait for reply from echobot")
|
||||||
|
reply = ac._evtracker.wait_next_incoming_message()
|
||||||
|
assert reply.text == text
|
||||||
|
|||||||
@@ -1,49 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
from cmdeploy.cmdeploy import main
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_cmd(chatmail_config, capsys, request):
|
|
||||||
os.chdir(request.config.invocation_params.dir)
|
|
||||||
assert main(["status"]) == 0
|
|
||||||
status_out = capsys.readouterr()
|
|
||||||
print(status_out.out)
|
|
||||||
|
|
||||||
assert len(status_out.out.splitlines()) > 5
|
|
||||||
|
|
||||||
"""
|
|
||||||
don't test actual server state:
|
|
||||||
|
|
||||||
services = [
|
|
||||||
"acmetool-redirector",
|
|
||||||
"chatmail-metadata",
|
|
||||||
"doveauth",
|
|
||||||
"dovecot",
|
|
||||||
"fcgiwrap",
|
|
||||||
"filtermail-incoming",
|
|
||||||
"filtermail",
|
|
||||||
"lastlogin",
|
|
||||||
"nginx",
|
|
||||||
"opendkim",
|
|
||||||
"postfix@-",
|
|
||||||
"systemd-journald",
|
|
||||||
"turnserver",
|
|
||||||
"unbound",
|
|
||||||
]
|
|
||||||
not_running = []
|
|
||||||
for service in services:
|
|
||||||
active = False
|
|
||||||
for line in status_out:
|
|
||||||
if service in line:
|
|
||||||
active = True
|
|
||||||
if not "loaded" in line:
|
|
||||||
active = False
|
|
||||||
if not "active" in line:
|
|
||||||
active = False
|
|
||||||
if not "running" in line:
|
|
||||||
active = False
|
|
||||||
break
|
|
||||||
if not active:
|
|
||||||
not_running.append(service)
|
|
||||||
assert not_running == []
|
|
||||||
"""
|
|
||||||
@@ -26,15 +26,10 @@ class TestCmdline:
|
|||||||
def test_init_not_overwrite(self, capsys):
|
def test_init_not_overwrite(self, capsys):
|
||||||
assert main(["init", "chat.example.org"]) == 0
|
assert main(["init", "chat.example.org"]) == 0
|
||||||
capsys.readouterr()
|
capsys.readouterr()
|
||||||
|
|
||||||
assert main(["init", "chat.example.org"]) == 1
|
assert main(["init", "chat.example.org"]) == 1
|
||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
assert "path exists" in out.lower()
|
assert "path exists" in out.lower()
|
||||||
|
|
||||||
assert main(["init", "chat.example.org", "--force"]) == 0
|
|
||||||
out, err = capsys.readouterr()
|
|
||||||
assert "deleting config file" in out.lower()
|
|
||||||
|
|
||||||
|
|
||||||
def test_www_folder(example_config, tmp_path):
|
def test_www_folder(example_config, tmp_path):
|
||||||
reporoot = importlib.resources.files(__package__).joinpath("../../../../").resolve()
|
reporoot = importlib.resources.files(__package__).joinpath("../../../../").resolve()
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from copy import deepcopy
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from cmdeploy import remote
|
from cmdeploy import remote
|
||||||
@@ -10,65 +8,38 @@ from cmdeploy.dns import check_full_zone, check_initial_remote_data
|
|||||||
def mockdns_base(monkeypatch):
|
def mockdns_base(monkeypatch):
|
||||||
qdict = {}
|
qdict = {}
|
||||||
|
|
||||||
def shell(command, fail_ok=False, print=print):
|
def query_dns(typ, domain):
|
||||||
if command.startswith("dig"):
|
try:
|
||||||
if command == "dig":
|
return qdict[typ][domain]
|
||||||
return "."
|
except KeyError:
|
||||||
if "SOA" in command:
|
return ""
|
||||||
return (
|
|
||||||
"delta.chat. 21600 IN SOA ns1.first-ns.de. dns.hetzner.com."
|
|
||||||
" 2025102800 14400 1800 604800 3600"
|
|
||||||
)
|
|
||||||
command_chunks = command.split()
|
|
||||||
domain, typ = command_chunks[4], command_chunks[6]
|
|
||||||
try:
|
|
||||||
return qdict[typ][domain]
|
|
||||||
except KeyError:
|
|
||||||
return ""
|
|
||||||
return remote.rshell.shell(command=command, fail_ok=fail_ok, print=print)
|
|
||||||
|
|
||||||
monkeypatch.setattr(remote.rdns, shell.__name__, shell)
|
monkeypatch.setattr(remote.rdns, query_dns.__name__, query_dns)
|
||||||
return qdict
|
return qdict
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mockdns_expected():
|
def mockdns(mockdns_base):
|
||||||
return {
|
mockdns_base.update(
|
||||||
"A": {"some.domain": "1.1.1.1"},
|
{
|
||||||
"AAAA": {"some.domain": "fde5:cd7a:9e1c:3240:5a99:936f:cdac:53ae"},
|
"A": {"some.domain": "1.1.1.1"},
|
||||||
"CNAME": {
|
"AAAA": {"some.domain": "fde5:cd7a:9e1c:3240:5a99:936f:cdac:53ae"},
|
||||||
"mta-sts.some.domain": "some.domain.",
|
"CNAME": {
|
||||||
"www.some.domain": "some.domain.",
|
"mta-sts.some.domain": "some.domain.",
|
||||||
},
|
"www.some.domain": "some.domain.",
|
||||||
}
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
@pytest.fixture(params=["plain", "with-dns-comments"])
|
|
||||||
def mockdns(request, mockdns_base, mockdns_expected):
|
|
||||||
mockdns_base.update(deepcopy(mockdns_expected))
|
|
||||||
match request.param:
|
|
||||||
case "plain":
|
|
||||||
pass
|
|
||||||
case "with-dns-comments":
|
|
||||||
for typ, data in mockdns_base.items():
|
|
||||||
for host, result in data.items():
|
|
||||||
mockdns_base[typ][host] = (
|
|
||||||
";; some unsuccessful attempt result\n"
|
|
||||||
"; and another with a single semicolon\n"
|
|
||||||
f"{result}"
|
|
||||||
)
|
|
||||||
return mockdns_base
|
return mockdns_base
|
||||||
|
|
||||||
|
|
||||||
class TestPerformInitialChecks:
|
class TestPerformInitialChecks:
|
||||||
def test_perform_initial_checks_ok1(self, mockdns, mockdns_expected):
|
def test_perform_initial_checks_ok1(self, mockdns):
|
||||||
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
||||||
assert remote_data["A"] == mockdns_expected["A"]["some.domain"]
|
assert remote_data["A"] == mockdns["A"]["some.domain"]
|
||||||
assert remote_data["AAAA"] == mockdns_expected["AAAA"]["some.domain"]
|
assert remote_data["AAAA"] == mockdns["AAAA"]["some.domain"]
|
||||||
assert (
|
assert remote_data["MTA_STS"] == mockdns["CNAME"]["mta-sts.some.domain"]
|
||||||
remote_data["MTA_STS"] == mockdns_expected["CNAME"]["mta-sts.some.domain"]
|
assert remote_data["WWW"] == mockdns["CNAME"]["www.some.domain"]
|
||||||
)
|
|
||||||
assert remote_data["WWW"] == mockdns_expected["CNAME"]["www.some.domain"]
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("drop", ["A", "AAAA"])
|
@pytest.mark.parametrize("drop", ["A", "AAAA"])
|
||||||
def test_perform_initial_checks_with_one_of_A_AAAA(self, mockdns, drop):
|
def test_perform_initial_checks_with_one_of_A_AAAA(self, mockdns, drop):
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import importlib.resources
|
import importlib.resources
|
||||||
import re
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import webbrowser
|
import webbrowser
|
||||||
@@ -12,10 +11,6 @@ from jinja2 import Template
|
|||||||
|
|
||||||
from .genqr import gen_qr_png_data
|
from .genqr import gen_qr_png_data
|
||||||
|
|
||||||
_MERGE_CONFLICT_RE = re.compile(
|
|
||||||
r"^<<<<<<<.+^=======.+^>>>>>>>", re.DOTALL | re.MULTILINE
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def snapshot_dir_stats(somedir):
|
def snapshot_dir_stats(somedir):
|
||||||
d = {}
|
d = {}
|
||||||
@@ -121,17 +116,6 @@ def _build_webpages(src_dir, build_dir, config):
|
|||||||
return build_dir
|
return build_dir
|
||||||
|
|
||||||
|
|
||||||
def find_merge_conflict(src_dir) -> Path:
|
|
||||||
assert src_dir.exists(), src_dir
|
|
||||||
result = None
|
|
||||||
for path in src_dir.iterdir():
|
|
||||||
if path.suffix in [".css", ".html", ".md"]:
|
|
||||||
if _MERGE_CONFLICT_RE.search(path.read_text()):
|
|
||||||
result = path
|
|
||||||
break
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
path = importlib.resources.files(__package__)
|
path = importlib.resources.files(__package__)
|
||||||
reporoot = path.joinpath("../../../").resolve()
|
reporoot = path.joinpath("../../../").resolve()
|
||||||
@@ -140,34 +124,34 @@ def main():
|
|||||||
config.webdev = True
|
config.webdev = True
|
||||||
assert config.mail_domain
|
assert config.mail_domain
|
||||||
|
|
||||||
|
# start web page generation, open a browser and wait for changes
|
||||||
www_path, src_path, build_dir = get_paths(config)
|
www_path, src_path, build_dir = get_paths(config)
|
||||||
build_dir = build_webpages(src_path, build_dir, config)
|
build_dir = build_webpages(src_path, build_dir, config)
|
||||||
index_path = build_dir.joinpath("index.html")
|
index_path = build_dir.joinpath("index.html")
|
||||||
webbrowser.open(str(index_path))
|
webbrowser.open(str(index_path))
|
||||||
|
|
||||||
print(f"\nOpened URL: file://{index_path.resolve()}\n")
|
|
||||||
print(f"Watching {src_path} directory for changes...")
|
|
||||||
|
|
||||||
stats = snapshot_dir_stats(src_path)
|
stats = snapshot_dir_stats(src_path)
|
||||||
|
print(f"\nOpened URL: file://{index_path.resolve()}\n")
|
||||||
|
print(f"watching {src_path} directory for changes")
|
||||||
|
|
||||||
changenum = 0
|
changenum = 0
|
||||||
debounce_time = 0.5 # wait 0.5s after detecting a change
|
count = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
time.sleep(1)
|
|
||||||
newstats = snapshot_dir_stats(src_path)
|
newstats = snapshot_dir_stats(src_path)
|
||||||
|
if newstats == stats and count % 60 != 0:
|
||||||
|
count += 1
|
||||||
|
time.sleep(1.0)
|
||||||
|
continue
|
||||||
|
|
||||||
if newstats != stats:
|
for key in newstats:
|
||||||
changed_files = [f for f in newstats if stats.get(f) != newstats[f]]
|
if stats[key] != newstats[key]:
|
||||||
for f in changed_files:
|
print(f"*** CHANGED: {key}")
|
||||||
print(f"*** CHANGED: {f}")
|
changenum += 1
|
||||||
|
|
||||||
stats = newstats
|
stats = newstats
|
||||||
changenum += 1
|
build_webpages(src_path, build_dir, config)
|
||||||
build_webpages(src_path, build_dir, config)
|
print(f"[{changenum}] regenerated web pages at: {index_path}")
|
||||||
print(f"[{changenum}] regenerated web pages at: {index_path}")
|
print(f"URL: file://{index_path.resolve()}\n\n")
|
||||||
print(f"URL: file://{index_path.resolve()}\n\n")
|
count = 0
|
||||||
|
|
||||||
time.sleep(debounce_time) # simple debounce
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
24
doc/Makefile
24
doc/Makefile
@@ -1,24 +0,0 @@
|
|||||||
# Minimal makefile for Sphinx documentation
|
|
||||||
#
|
|
||||||
|
|
||||||
# You can set these variables from the command line, and also
|
|
||||||
# from the environment for the first two.
|
|
||||||
SPHINXOPTS ?=
|
|
||||||
SPHINXBUILD ?= sphinx-build
|
|
||||||
SOURCEDIR = source
|
|
||||||
BUILDDIR = build
|
|
||||||
|
|
||||||
# Put it first so that "make" without argument is like "make help".
|
|
||||||
help:
|
|
||||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
auto:
|
|
||||||
sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
.PHONY: help Makefile auto
|
|
||||||
|
|
||||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
|
||||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
|
||||||
%: Makefile
|
|
||||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
## Building the documentation
|
|
||||||
|
|
||||||
You can use the `make` command and `make html` to build web pages.
|
|
||||||
|
|
||||||
You need a Python environment where the following install was excuted:
|
|
||||||
|
|
||||||
pip install sphinx-build furo sphinx-autobuild
|
|
||||||
|
|
||||||
To develop/change documentation, you can then do:
|
|
||||||
|
|
||||||
make auto
|
|
||||||
|
|
||||||
A page will open at https://127.0.0.1:8000/ serving the docs and it will
|
|
||||||
react to changes to source files pretty fast.
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg" id="svg4" width="145" height="145" version="1.1"><g id="text2" aria-label="@" style="font-size:144px;font-family:Arial" transform="matrix(1.0934997,0,0,1.0934997,-6.7787266,-6.7787281)"><path id="path347" d="m 79.927878,94.422406 c -2.704286,3.120332 -5.741407,5.637394 -9.111364,7.551194 -3.328352,1.87221 -6.677506,2.80831 -10.047463,2.80831 -3.702792,0 -7.301573,-1.08172 -10.796342,-3.24515 -3.49477,-2.163426 -6.344671,-5.491779 -8.549704,-9.985058 -2.163429,-4.493275 -3.245144,-9.423397 -3.245144,-14.790365 0,-6.615099 1.684978,-13.230199 5.054935,-19.845299 3.411561,-6.656705 7.634407,-11.649233 12.66854,-14.977585 5.034133,-3.328352 9.92265,-4.992528 14.665552,-4.992528 3.619583,0 7.072748,0.956901 10.359496,2.870704 3.286748,1.872198 6.115847,4.742902 8.487297,8.612111 l 2.121825,-9.673023 h 11.170784 l -8.986557,41.87483 c -1.248129,5.824616 -1.872194,9.048957 -1.872194,9.673023 0,1.123319 0.416044,2.101022 1.248132,2.93311 0.873692,0.790484 1.913802,1.185726 3.120332,1.185726 2.20503,0 5.096537,-1.268934 8.674517,-3.806803 4.7429,-3.328352 8.4873,-7.780023 11.23319,-13.355013 2.78749,-5.616594 4.18124,-11.399606 4.18124,-17.349035 0,-6.947935 -1.78899,-13.438222 -5.36697,-19.47086 -3.53637,-6.032638 -8.84094,-10.858749 -15.913687,-14.478332 -7.03114,-3.619583 -14.811161,-5.429374 -23.340064,-5.429374 -9.73543,0 -18.638772,2.288242 -26.710026,6.864726 -8.029649,4.534879 -14.27031,11.06677 -18.721981,19.595673 -4.410066,8.487298 -6.615099,17.598662 -6.615099,27.334092 0,10.193078 2.205033,18.971607 6.615099,26.33559 2.290454,3.78888 -7.136335,18.96983 -3.810585,21.73443 3.138096,2.60861 18.971963,-7.14297 23.031819,-5.44631 8.404089,3.53637 17.702673,5.30456 27.895752,5.30456 10.90035,0 20.032515,-1.83059 27.396492,-5.49178 7.36399,-3.66119 12.87657,-8.11286 16.53776,-13.35501 l 9.29559,4 c -2.12183,4.36846 -3.76221,4.82013 -8.92116,9.35501 -5.15895,4.53488 -11.2956,8.11286 -18.40995,10.73393 -7.114346,2.66268 -15.684851,3.99402 -25.711512,3.99402 -9.236177,0 -17.76508,-1.18572 -25.586707,-3.55717 -7.780023,-2.37145 -29.296198,9.26152 -34.78798,4.47701 -5.49178,-4.7429 5.248856,-25.42482 2.461361,-31.62388 -3.49477,-7.863231 -5.242155,-16.350531 -5.242155,-25.461894 0,-10.151474 2.08022,-19.824498 6.240661,-29.019071 5.075736,-11.274793 12.273297,-19.907706 21.592683,-25.898739 9.360991,-5.991034 20.69819,-8.986551 34.011599,-8.986551 10.317891,0 19.574873,2.121824 27.77093,6.365473 8.23767,4.202045 14.72796,10.484309 19.47086,18.846794 4.03563,7.197561 6.05344,15.019189 6.05344,23.464883 0,12.065277 -4.24365,22.77841 -12.73094,32.1394 -7.572,8.404095 -15.85128,12.606135 -24.837827,12.606135 -2.870704,0 -5.200551,-0.43684 -6.98954,-1.31053 -1.747385,-0.8737 -3.037121,-2.12183 -3.869209,-3.744402 -0.540857,-1.040114 -0.936099,-2.829105 -1.185726,-5.366972 z M 49.723082,77.510217 c 0,5.699803 1.352143,10.130671 4.05643,13.292606 2.704286,3.161935 5.803814,4.742902 9.298583,4.742902 2.329847,0 4.784506,-0.686473 7.363979,-2.059418 2.579473,-1.41455 5.034133,-3.49477 7.363979,-6.240661 2.371451,-2.74589 4.306056,-6.219857 5.803815,-10.421902 1.497759,-4.243649 2.246638,-8.487298 2.246638,-12.730947 0,-5.658198 -1.41455,-10.047462 -4.243649,-13.167793 -2.787495,-3.12033 -6.199056,-4.680495 -10.234683,-4.680495 -2.662682,0 -5.179749,0.686473 -7.5512,2.059418 -2.329846,1.331341 -4.597286,3.494769 -6.802319,6.490286 -2.205033,2.995517 -3.97322,6.635903 -5.304561,10.921156 -1.331341,4.285253 -1.997012,8.216869 -1.997012,11.794848 z" style="stroke-width:.887561"/></g></svg>
|
|
||||||
|
Before Width: | Height: | Size: 3.5 KiB |
@@ -1,21 +0,0 @@
|
|||||||
/* Tweak how the sidebar logo is presented */
|
|
||||||
.sidebar-logo {
|
|
||||||
width: 70%;
|
|
||||||
}
|
|
||||||
.sidebar-brand {
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* The landing pages' sidebar-in-content highlights */
|
|
||||||
#features ul {
|
|
||||||
padding-left: 1rem;
|
|
||||||
list-style: none;
|
|
||||||
}
|
|
||||||
#features ul li {
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
@media (min-width: 46em) {
|
|
||||||
#features {
|
|
||||||
width: 50%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# Configuration file for the Sphinx documentation builder.
|
|
||||||
#
|
|
||||||
# For the full list of built-in configuration values, see the documentation:
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
|
||||||
|
|
||||||
project = 'chatmail relay documentation'
|
|
||||||
copyright = '2025, chatmail collective'
|
|
||||||
author = 'chatmail collective'
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
|
||||||
|
|
||||||
extensions = [
|
|
||||||
#'sphinx.ext.autodoc',
|
|
||||||
#'sphinx.ext.viewdoc',
|
|
||||||
'sphinxcontrib.mermaid',
|
|
||||||
]
|
|
||||||
|
|
||||||
templates_path = ['_templates']
|
|
||||||
exclude_patterns = []
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output -------------------------------------------------
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
|
||||||
|
|
||||||
html_theme = 'furo'
|
|
||||||
html_static_path = ['_static']
|
|
||||||
html_css_files = [
|
|
||||||
"custom.css",
|
|
||||||
]
|
|
||||||
|
|
||||||
html_title = "chatmail relay documentation"
|
|
||||||
#html_short_title = f"chatmail-{release}"
|
|
||||||
|
|
||||||
html_logo = "_static/chatmail.svg"
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
Frequently asked questions
|
|
||||||
===========================
|
|
||||||
|
|
||||||
What is the difference between chatmail relays and classic email servers?
|
|
||||||
--------------------------------------------------------------------------
|
|
||||||
|
|
||||||
A chatmail relay is a minimal Mail Transport Agent (MTA) setup that
|
|
||||||
goes beyond what classic email servers offer:
|
|
||||||
|
|
||||||
- **Zero State:** no private data or metadata collected, messages are auto-deleted, low disk usage
|
|
||||||
|
|
||||||
- **Instant/Realtime:** sub-second message delivery, realtime P2P
|
|
||||||
streaming, privacy-preserving Push Notifications for Apple, Google, and `Ubuntu Touch <https://docs.ubports.com/en/latest/appdev/guides/pushnotifications.html>`_;
|
|
||||||
|
|
||||||
- **Security Enforcement**: only strict TLS, DKIM and OpenPGP with minimized metadata accepted
|
|
||||||
|
|
||||||
- **Reliable Federation and Decentralization:** No spam or IP reputation checks, federating
|
|
||||||
depends on established IETF standards and protocols.
|
|
||||||
|
|
||||||
|
|
||||||
How about interoperability with classic email servers?
|
|
||||||
-------------------------------------------------------
|
|
||||||
|
|
||||||
Generally, chatmail relays interoperate well with classic email servers.
|
|
||||||
However, some chatmail relays may be blocked by Big-Tech email
|
|
||||||
providers that use intransparent and proprietary techniques for scanning
|
|
||||||
and looking at cleartext email messages between users, or because they
|
|
||||||
use questionable IP-reputation systems that break interoperability.
|
|
||||||
|
|
||||||
**Chatmail relays instead use and require strong cryptography, allowing
|
|
||||||
anyone to participate, without having to submit to Big-Tech
|
|
||||||
restrictions.**
|
|
||||||
|
|
||||||
.. _selfhosted:
|
|
||||||
|
|
||||||
How are chatmail relays run? Can I run one myself?
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Chatmail relays are designed to be very cheap to run, and are generally
|
|
||||||
self-funded by respective operators. All chatmail relays are
|
|
||||||
automatically deployed and updated using `the chatmail relay
|
|
||||||
repository <https://github.com/chatmail/relay>`__. Chatmail relays are
|
|
||||||
composed of proven standard email server components, Postfix and
|
|
||||||
Dovecot, and are configured to run unattended without much maintenance
|
|
||||||
effort. Chatmail relays happily run on low-end hardware like a Raspberry
|
|
||||||
Pi.
|
|
||||||
|
|
||||||
|
|
||||||
How trustable are chatmail relays?
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
Chatmail relays enforce end-to-end encryption,
|
|
||||||
and chatmail clients like `Delta Chat <https://delta.chat>`_
|
|
||||||
enforce end-to-end encryption on their own.
|
|
||||||
|
|
||||||
The end-to-end encryption protection includes attached media, user
|
|
||||||
display names, avatars and group names. What is visible to operators is:
|
|
||||||
message date, sender and receiver addresses.
|
|
||||||
Please see the `Delta Chat FAQ on encryption and security <https://delta.chat/en/help#e2ee>`_ for further info.
|
|
||||||
@@ -1,169 +0,0 @@
|
|||||||
Setting up a chatmail relay
|
|
||||||
===========================
|
|
||||||
|
|
||||||
This section contains everything needed to setup a ready-to-use chatmail relay.
|
|
||||||
The automated setup is designed and optimized for providing chatmail
|
|
||||||
addresses for immediate permission-free onboarding through chat apps and bots.
|
|
||||||
Chatmail addresses are automatically created at first login,
|
|
||||||
after which the initially specified password is required
|
|
||||||
for sending and receiving messages through them.
|
|
||||||
|
|
||||||
|
|
||||||
Minimal requirements and prerequisites
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
You will need the following:
|
|
||||||
|
|
||||||
- Control over a domain through a DNS provider of your choice.
|
|
||||||
|
|
||||||
- A Debian 12 server with reachable SMTP/SUBMISSIONS/IMAPS/HTTPS ports.
|
|
||||||
IPv6 is encouraged if available. Chatmail relay servers only require
|
|
||||||
1GB RAM, one CPU, and perhaps 10GB storage for a few thousand active
|
|
||||||
chatmail addresses.
|
|
||||||
|
|
||||||
- Key-based SSH authentication to the root user. You must add a
|
|
||||||
passphrase-protected private key to your local ssh-agent because you
|
|
||||||
can’t type in your passphrase during deployment. (An ed25519 private
|
|
||||||
key is required due to an `upstream bug in
|
|
||||||
paramiko <https://github.com/paramiko/paramiko/issues/2191>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Setup with ``scripts/cmdeploy``
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
We use ``chat.example.org`` as the chatmail domain in the following
|
|
||||||
steps. Please substitute it with your own domain.
|
|
||||||
|
|
||||||
1. Setup the initial DNS records. The following is an example in the
|
|
||||||
familiar BIND zone file format with a TTL of 1 hour (3600 seconds).
|
|
||||||
Please substitute your domain and IP addresses.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
chat.example.com. 3600 IN A 198.51.100.5
|
|
||||||
chat.example.com. 3600 IN AAAA 2001:db8::5
|
|
||||||
www.chat.example.com. 3600 IN CNAME chat.example.com.
|
|
||||||
mta-sts.chat.example.com. 3600 IN CNAME chat.example.com.
|
|
||||||
|
|
||||||
2. On your local PC, clone the repository and bootstrap the Python
|
|
||||||
virtualenv.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
git clone https://github.com/chatmail/relay
|
|
||||||
cd relay
|
|
||||||
scripts/initenv.sh
|
|
||||||
|
|
||||||
3. On your local PC, create chatmail configuration file
|
|
||||||
``chatmail.ini``:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy init chat.example.org # <-- use your domain
|
|
||||||
|
|
||||||
4. Verify that SSH root login to your remote server works:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
ssh root@chat.example.org # <-- use your domain
|
|
||||||
|
|
||||||
5. From your local PC, deploy the remote chatmail relay server:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy run
|
|
||||||
|
|
||||||
This script will also check that you have all necessary DNS records.
|
|
||||||
If DNS records are missing, it will recommend which you should
|
|
||||||
configure at your DNS provider (it can take some time until they are
|
|
||||||
public).
|
|
||||||
|
|
||||||
Other helpful commands
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
To check the status of your remotely running chatmail service:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy status
|
|
||||||
|
|
||||||
To display and check all recommended DNS records:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy dns
|
|
||||||
|
|
||||||
To test whether your chatmail service is working correctly:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy test
|
|
||||||
|
|
||||||
To measure the performance of your chatmail service:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy bench
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Modifying the home page
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
``cmdeploy run`` also creates default static web pages and deploys them
|
|
||||||
to a Nginx web server with:
|
|
||||||
|
|
||||||
- a default ``index.html`` along with a QR code that users can click to
|
|
||||||
create an address on your chatmail relay
|
|
||||||
|
|
||||||
- a default ``info.html`` that is linked from the home page
|
|
||||||
|
|
||||||
- a default ``policy.html`` that is linked from the home page
|
|
||||||
|
|
||||||
All ``.html`` files are generated by the according markdown ``.md`` file
|
|
||||||
in the ``www/src`` directory.
|
|
||||||
|
|
||||||
Refining the web pages
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
scripts/cmdeploy webdev
|
|
||||||
|
|
||||||
This starts a local live development cycle for chatmail web pages:
|
|
||||||
|
|
||||||
- uses the ``www/src/page-layout.html`` file for producing static HTML
|
|
||||||
pages from ``www/src/*.md`` files
|
|
||||||
|
|
||||||
- continously builds the web presence reading files from ``www/src``
|
|
||||||
directory and generating HTML files and copying assets to the
|
|
||||||
``www/build`` directory.
|
|
||||||
|
|
||||||
- Starts a browser window automatically where you can “refresh” as
|
|
||||||
needed.
|
|
||||||
|
|
||||||
Custom web pages
|
|
||||||
----------------
|
|
||||||
|
|
||||||
You can skip uploading a web page by setting ``www_folder=disabled`` in
|
|
||||||
``chatmail.ini``.
|
|
||||||
|
|
||||||
If you want to manage your web pages outside this git repository, you
|
|
||||||
can set ``www_folder`` in ``chatmail.ini`` to a custom directory on your
|
|
||||||
computer. ``cmdeploy run`` will upload it as the server’s home page, and
|
|
||||||
if it contains a ``src/index.md`` file, will build it with hugo.
|
|
||||||
|
|
||||||
|
|
||||||
Disable automatic address creation
|
|
||||||
--------------------------------------------------------
|
|
||||||
|
|
||||||
If you need to stop address creation, e.g. because some script is wildly
|
|
||||||
creating addresses, login with ssh and run:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
touch /etc/chatmail-nocreate
|
|
||||||
|
|
||||||
Chatmail address creation will be denied while this file is present.
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
*******************************************
|
|
||||||
chatmail relay documentation
|
|
||||||
*******************************************
|
|
||||||
|
|
||||||
.. image:: ../../www/src/collage-top.png
|
|
||||||
:target: https://testrun.org
|
|
||||||
|
|
||||||
This documentation details how to setup, maintain and understand `chatmail <https://chatmail.at>`_ relays.
|
|
||||||
|
|
||||||
Contributions and feedback welcome through the https://github.com/chatmail/relay repository.
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 5
|
|
||||||
|
|
||||||
getting_started
|
|
||||||
proxy
|
|
||||||
migrate
|
|
||||||
overview
|
|
||||||
related
|
|
||||||
faq
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
|
|
||||||
Migrating to a new host
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
If you want to migrate chatmail relay from an old machine to a new
|
|
||||||
machine, you can use these steps. They were tested with a Linux laptop;
|
|
||||||
you might need to adjust some of the steps to your environment.
|
|
||||||
|
|
||||||
Let’s assume that your ``mail_domain`` is ``mail.example.org``, all
|
|
||||||
involved machines run Debian 12, your old site’s IP address is
|
|
||||||
``13.37.13.37``, and your new site’s IP address is ``13.12.23.42``.
|
|
||||||
|
|
||||||
Note, you should lower the TTLs of your DNS records to a value such as
|
|
||||||
300 (5 minutes) so the migration happens as smoothly as possible.
|
|
||||||
|
|
||||||
During the guide you might get a warning about changed SSH Host keys; in
|
|
||||||
this case, just run ``ssh-keygen -R "mail.example.org"`` as recommended.
|
|
||||||
|
|
||||||
1. First, disable mail services on the old site.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
cmdeploy run --disable-mail --ssh-host 13.37.13.37
|
|
||||||
|
|
||||||
Now your users will notice the migration and will not be able to send
|
|
||||||
or receive messages until the migration is completed.
|
|
||||||
|
|
||||||
2. Now we want to copy ``/home/vmail``, ``/var/lib/acme``,
|
|
||||||
``/etc/dkimkeys``, and ``/var/spool/postfix`` to
|
|
||||||
the new site. Login to the old site while forwarding your SSH agent
|
|
||||||
so you can copy directly from the old to the new site with your SSH
|
|
||||||
key:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
ssh -A root@13.37.13.37
|
|
||||||
tar c - /home/vmail/mail /var/lib/acme /etc/dkimkeys /var/spool/postfix | ssh root@13.12.23.42 "tar x -C /"
|
|
||||||
|
|
||||||
This transfers all addresses, the TLS certificate,
|
|
||||||
and DKIM keys (so DKIM DNS record remains valid).
|
|
||||||
It also preserves the Postfix mail spool so any messages
|
|
||||||
pending delivery will still be delivered.
|
|
||||||
|
|
||||||
3. Install chatmail on the new machine:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
cmdeploy run --disable-mail --ssh-host 13.12.23.42
|
|
||||||
|
|
||||||
Postfix and Dovecot are disabled for now; we will enable them later.
|
|
||||||
We first need to make the new site fully operational.
|
|
||||||
|
|
||||||
4. On the new site, run the following to ensure the ownership is correct
|
|
||||||
in case UIDs/GIDs changed:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
chown root: -R /var/lib/acme
|
|
||||||
chown opendkim: -R /etc/dkimkeys
|
|
||||||
chown vmail: -R /home/vmail/mail
|
|
||||||
|
|
||||||
5. Now, update DNS entries.
|
|
||||||
|
|
||||||
If other MTAs try to deliver messages to your chatmail domain they
|
|
||||||
may fail intermittently, as DNS catches up with the new site settings
|
|
||||||
but normally will retry delivering messages for at least a week, so
|
|
||||||
messages will not be lost.
|
|
||||||
|
|
||||||
6. Finally, you can execute ``cmdeploy run --ssh-host 13.12.23.42`` to
|
|
||||||
turn on chatmail on the new relay. Your users will be able to use the
|
|
||||||
chatmail relay as soon as the DNS changes have propagated. Voilà!
|
|
||||||
|
|
||||||
@@ -1,370 +0,0 @@
|
|||||||
|
|
||||||
Technical overview
|
|
||||||
======================
|
|
||||||
|
|
||||||
|
|
||||||
Directories of the relay repository
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
The `chatmail relay repository <https://github.com/chatmail/relay/tree/main/>`_
|
|
||||||
has four main directories.
|
|
||||||
|
|
||||||
``scripts/``
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
`scripts <https://github.com/chatmail/relay/tree/main/scripts>`_
|
|
||||||
offers two convenience tools for beginners:
|
|
||||||
|
|
||||||
- ``initenv.sh`` installs a local virtualenv Python environment and
|
|
||||||
installs necessary dependencies
|
|
||||||
|
|
||||||
- ``scripts/cmdeploy`` script enables you to run the ``cmdeploy``
|
|
||||||
command line tool in the local Python virtual environment.
|
|
||||||
|
|
||||||
|
|
||||||
``cmdeploy/``
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The ``cmdeploy`` directory contains the Python package and command line tool
|
|
||||||
to setup a chatmail relay remotely via SSH:
|
|
||||||
|
|
||||||
- ``cmdeploy init`` creates the ``chatmail.ini`` config file locally.
|
|
||||||
|
|
||||||
- ``cmdeploy run`` under the hood uses pyinfra_
|
|
||||||
to automatically install or upgrade all chatmail components on a relay,
|
|
||||||
according to the local ``chatmail.ini`` config.
|
|
||||||
|
|
||||||
The deployed system components of a chatmail relay are:
|
|
||||||
|
|
||||||
- Postfix_ is the Mail Transport Agent (MTA) and
|
|
||||||
accepts messages from, and sends messages to, the wider email MTA network
|
|
||||||
|
|
||||||
- Dovecot_ is the Mail Delivery Agent (MDA) and
|
|
||||||
stores messages for users until they download them
|
|
||||||
|
|
||||||
- Nginx_ shows the web page with privacy policy and additional information
|
|
||||||
|
|
||||||
- `acmetool <https://hlandau.github.io/acmetool/>`_ manages TLS
|
|
||||||
certificates for Dovecot, Postfix, and Nginx
|
|
||||||
|
|
||||||
- `OpenDKIM <http://www.opendkim.org/>`_ for signing messages with
|
|
||||||
DKIM and rejecting inbound messages without DKIM
|
|
||||||
|
|
||||||
- `mtail <https://google.github.io/mtail/>`_ for collecting anonymized
|
|
||||||
metrics in case you have monitoring
|
|
||||||
|
|
||||||
- `Iroh relay <https://www.iroh.computer/docs/concepts/relay>`_ which
|
|
||||||
helps client devices to establish Peer-to-Peer connections
|
|
||||||
|
|
||||||
- `TURN <https://github.com/chatmail/chatmail-turn>`_ to enable relay
|
|
||||||
users to start webRTC calls even if a p2p connection can’t be
|
|
||||||
established
|
|
||||||
|
|
||||||
- and the chatmaild services, explained in the next section:
|
|
||||||
|
|
||||||
|
|
||||||
``chatmaild/``
|
|
||||||
~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
`chatmaild <https://github.com/chatmail/relay/tree/main/chatmaild>`_
|
|
||||||
is a Python package containing several small services which handle
|
|
||||||
authentication, trigger push notifications on new messages, ensure
|
|
||||||
that outbound mails are encrypted, delete inactive users, and some
|
|
||||||
other minor things. chatmaild can also be installed as a stand-alone
|
|
||||||
Python package.
|
|
||||||
|
|
||||||
``chatmaild`` implements various systemd-controlled services
|
|
||||||
that integrate with Dovecot and Postfix to achieve instant-onboarding
|
|
||||||
and only relaying OpenPGP end-to-end messages encrypted messages. A
|
|
||||||
short overview of ``chatmaild`` services:
|
|
||||||
|
|
||||||
- `doveauth <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/doveauth.py>`_
|
|
||||||
implements create-on-login address semantics and is used by Dovecot
|
|
||||||
during IMAP login and by Postfix during SMTP/SUBMISSION login which
|
|
||||||
in turn uses `Dovecot SASL
|
|
||||||
<https://doc.dovecot.org/2.3/configuration_manual/authentication/dict/#complete-example-for-authenticating-via-a-unix-socket>`_
|
|
||||||
to authenticate logins.
|
|
||||||
|
|
||||||
- `filtermail <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/filtermail.py>`_
|
|
||||||
prevents unencrypted email from leaving or entering the chatmail
|
|
||||||
service and is integrated into Postfix’s outbound and inbound mail
|
|
||||||
pipelines.
|
|
||||||
|
|
||||||
- `chatmail-metadata <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metadata.py>`_
|
|
||||||
is contacted by a `Dovecot lua
|
|
||||||
script <https://github.com/chatmail/relay/blob/main/cmdeploy/src/cmdeploy/dovecot/push_notification.lua>`_
|
|
||||||
to store user-specific relay-side config. On new messages, it `passes
|
|
||||||
the user’s push notification
|
|
||||||
token <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/notifier.py>`_
|
|
||||||
to
|
|
||||||
`notifications.delta.chat <https://delta.chat/en/help#instant-delivery>`_
|
|
||||||
so the push notifications on the user’s phone can be triggered by
|
|
||||||
Apple/Google/Huawei.
|
|
||||||
|
|
||||||
- `chatmail-expire <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/expire.py>`_
|
|
||||||
deletes users if they have not logged in for a longer while.
|
|
||||||
The timeframe can be configured in ``chatmail.ini``.
|
|
||||||
|
|
||||||
- `lastlogin <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/lastlogin.py>`_
|
|
||||||
is contacted by Dovecot when a user logs in and stores the date of
|
|
||||||
the login.
|
|
||||||
|
|
||||||
- `metrics <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metrics.py>`_
|
|
||||||
collects some metrics and displays them at
|
|
||||||
``https://example.org/metrics``.
|
|
||||||
|
|
||||||
``www/``
|
|
||||||
~~~~~~~~~
|
|
||||||
|
|
||||||
`www <https://github.com/chatmail/relay/tree/main/www>`_ contains
|
|
||||||
the html, css, and markdown files which make up a chatmail relay’s
|
|
||||||
web page. Edit them before deploying to make your chatmail relay
|
|
||||||
stand out.
|
|
||||||
|
|
||||||
|
|
||||||
Chatmail relay dependency diagram
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
.. mermaid::
|
|
||||||
:caption: This diagram shows relay components and dependencies/communication paths.
|
|
||||||
|
|
||||||
graph LR;
|
|
||||||
letsencrypt --- |80|acmetool-redirector;
|
|
||||||
acmetool-redirector --- |443|nginx-right(["`nginx
|
|
||||||
(external)`"]);
|
|
||||||
nginx-external --- |465|postfix;
|
|
||||||
nginx-external(["`nginx
|
|
||||||
(external)`"]) --- |8443|nginx-internal["`nginx
|
|
||||||
(internal)`"];
|
|
||||||
nginx-internal --- website["`Website
|
|
||||||
/var/www/html`"];
|
|
||||||
nginx-internal --- newemail.py;
|
|
||||||
nginx-internal --- autoconfig.xml;
|
|
||||||
certs-nginx[("`TLS certs
|
|
||||||
/var/lib/acme`")] --> nginx-internal;
|
|
||||||
systemd-timer --- chatmail-metrics;
|
|
||||||
systemd-timer --- acmetool;
|
|
||||||
systemd-timer --- chatmail-expire-daily;
|
|
||||||
systemd-timer --- chatmail-fsreport-daily;
|
|
||||||
chatmail-metrics --- website;
|
|
||||||
acmetool --> certs[("`TLS certs
|
|
||||||
/var/lib/acme`")];
|
|
||||||
nginx-external --- |993|dovecot;
|
|
||||||
postfix --- |SASL|dovecot;
|
|
||||||
autoconfig.xml --- postfix;
|
|
||||||
autoconfig.xml --- dovecot;
|
|
||||||
postfix --- |10080|filtermail-outgoing;
|
|
||||||
postfix --- |10081|filtermail-incoming;
|
|
||||||
filtermail-outgoing --- |10025 reinject|postfix;
|
|
||||||
filtermail-incoming --- |10026 reinject|postfix;
|
|
||||||
dovecot --- |doveauth.socket|doveauth;
|
|
||||||
dovecot --- |message delivery|maildir["maildir
|
|
||||||
/home/vmail/.../user"];
|
|
||||||
dovecot --- |lastlogin.socket|lastlogin;
|
|
||||||
dovecot --- chatmail-metadata;
|
|
||||||
lastlogin --- maildir;
|
|
||||||
doveauth --- maildir;
|
|
||||||
chatmail-expire-daily --- maildir;
|
|
||||||
chatmail-fsreport-daily --- maildir;
|
|
||||||
chatmail-metadata --- iroh-relay;
|
|
||||||
chatmail-metadata --- |encrypted device token| notifications.delta.chat;
|
|
||||||
certs-nginx --> postfix;
|
|
||||||
certs-nginx --> dovecot;
|
|
||||||
style certs fill:#ff6;
|
|
||||||
style website fill:#ff6;
|
|
||||||
style maildir fill:#ff6;
|
|
||||||
style certs-nginx fill:#ff6;
|
|
||||||
style nginx-external fill:#f66;
|
|
||||||
style nginx-right fill:#f66;
|
|
||||||
style postfix fill:#f66;
|
|
||||||
style dovecot fill:#f66;
|
|
||||||
style notification-proxy fill:#f66;
|
|
||||||
|
|
||||||
Message between users on the same relay
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
.. mermaid::
|
|
||||||
:caption: This diagram shows the path a non-federated message takes.
|
|
||||||
|
|
||||||
graph LR;
|
|
||||||
sender --> |465|smtps/smtpd;
|
|
||||||
sender --> |587|submission/smtpd;
|
|
||||||
smtps/smtpd --> |10080|filtermail;
|
|
||||||
submission/smtpd --> |10080|filtermail;
|
|
||||||
filtermail --> |10025|smtpd_reinject;
|
|
||||||
smtpd_reinject --> cleanup;
|
|
||||||
cleanup --> qmgr;
|
|
||||||
qmgr --> smtpd_accepts_message;
|
|
||||||
qmgr --> |lmtp|dovecot;
|
|
||||||
dovecot --> recipient;
|
|
||||||
dovecot --> sender's_other_devices;
|
|
||||||
|
|
||||||
Operational details of a chatmail relay
|
|
||||||
----------------------------------------
|
|
||||||
|
|
||||||
Mailbox directory layout
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Fresh chatmail addresses have a mailbox directory that contains:
|
|
||||||
|
|
||||||
- a ``password`` file with the salted password required for
|
|
||||||
authenticating whether a login may use the address to send/receive
|
|
||||||
messages. If you modify the password file manually, you effectively
|
|
||||||
block the user.
|
|
||||||
|
|
||||||
- ``enforceE2EEincoming`` is a default-created file with each address.
|
|
||||||
If present the file indicates that this chatmail address rejects
|
|
||||||
incoming cleartext messages. If absent the address accepts incoming
|
|
||||||
cleartext messages.
|
|
||||||
|
|
||||||
- ``dovecot*``, ``cur``, ``new`` and ``tmp`` represent IMAP/mailbox
|
|
||||||
state. If the address is only used by one device, the Maildir
|
|
||||||
directories will typically be empty unless the user of that address
|
|
||||||
hasn’t been online for a while.
|
|
||||||
|
|
||||||
Active ports
|
|
||||||
~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Postfix_ listens on ports
|
|
||||||
|
|
||||||
- 25 (SMTP)
|
|
||||||
|
|
||||||
- 587 (SUBMISSION) and
|
|
||||||
|
|
||||||
- 465 (SUBMISSIONS)
|
|
||||||
|
|
||||||
Dovecot_ listens on ports
|
|
||||||
|
|
||||||
- 143 (IMAP) and
|
|
||||||
|
|
||||||
- 993 (IMAPS)
|
|
||||||
|
|
||||||
Nginx_ listens on port
|
|
||||||
|
|
||||||
- 8443 (HTTPS-ALT) and
|
|
||||||
|
|
||||||
- 443 (HTTPS) which multiplexes HTTPS, IMAP and SMTP using ALPN
|
|
||||||
to redirect connections to ports 8443, 465 or 993.
|
|
||||||
|
|
||||||
`acmetool <https://hlandau.github.io/acmetool/>`_ listens on port:
|
|
||||||
|
|
||||||
- 80 (HTTP).
|
|
||||||
|
|
||||||
`chatmail-turn <https://github.com/chatmail/chatmail-turn>`_ listens on port
|
|
||||||
|
|
||||||
- 3478 UDP (STUN/TURN), and temporarily opens further UDP ports
|
|
||||||
when users request them. UDP port range is not restricted, any free port
|
|
||||||
may be allocated.
|
|
||||||
|
|
||||||
chatmail-core based apps will, however, discover all ports and
|
|
||||||
configurations automatically by reading the `autoconfig XML
|
|
||||||
file <https://www.ietf.org/archive/id/draft-bucksch-autoconfig-00.html>`_
|
|
||||||
from the chatmail relay server.
|
|
||||||
|
|
||||||
Email domain authentication (DKIM)
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Chatmail relays enforce :rfc:`DKIM <6376>` to authenticate incoming emails.
|
|
||||||
Incoming emails must have a valid DKIM signature with
|
|
||||||
Signing Domain Identifier (SDID, ``d=`` parameter in the DKIM-Signature
|
|
||||||
header) equal to the ``From:`` header domain. This property is checked
|
|
||||||
by OpenDKIM screen policy script before validating the signatures. This
|
|
||||||
corresponds to strict :rfc:`DMARC <7489>` alignment (``adkim=s``).
|
|
||||||
If there is no valid DKIM signature on the incoming email, the
|
|
||||||
sender receives a “5.7.1 No valid DKIM signature found” error.
|
|
||||||
After validating the DKIM signature,
|
|
||||||
the `final.lua` script strips all ``OpenDKIM:`` headers to reduce message size on disc.
|
|
||||||
|
|
||||||
Note that chatmail relays
|
|
||||||
|
|
||||||
- do **not** rely on DMARC and do not consult the sender policy published in DMARC records;
|
|
||||||
|
|
||||||
- do **not** rely on legacy authentication mechanisms such as
|
|
||||||
:rfc:`iprev <8601#section-2.7.3>` and :rfc:`SPF <7208>`.
|
|
||||||
Any IP address is accepted if the DKIM signature was valid.
|
|
||||||
|
|
||||||
Outgoing emails must be sent over authenticated connection with envelope
|
|
||||||
``MAIL FROM`` (return path) corresponding to the login.
|
|
||||||
This is ensured by Postfix which maps login username to ``MAIL FROM`` with
|
|
||||||
`smtpd_sender_login_maps <https://www.postfix.org/postconf.5.html#smtpd_sender_login_maps>`_
|
|
||||||
and rejects incorrectly authenticated emails with
|
|
||||||
`reject_sender_login_mismatch <https://www.postfix.org/postconf.5.html#reject_sender_login_mismatch>`_ policy.
|
|
||||||
``From:`` header must correspond to envelope ``MAIL FROM``, this is
|
|
||||||
ensured by ``filtermail`` proxy.
|
|
||||||
|
|
||||||
TLS requirements
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Postfix is configured to require valid TLS by setting
|
|
||||||
`smtp_tls_security_level <https://www.postfix.org/postconf.5.html#smtp_tls_security_level>`_
|
|
||||||
to ``verify``. If emails don’t arrive at your chatmail relay server, the
|
|
||||||
problem is likely that your relay does not have a valid TLS certificate.
|
|
||||||
|
|
||||||
You can test it by resolving ``MX`` records of your relay domain and
|
|
||||||
then connecting to MX relays (e.g ``mx.example.org``) with
|
|
||||||
``openssl s_client -connect mx.example.org:25 -verify_hostname mx.example.org -verify_return_error -starttls smtp``
|
|
||||||
from the host that has open port 25 to verify that certificate is valid.
|
|
||||||
|
|
||||||
When providing a TLS certificate to your chatmail relay server, make
|
|
||||||
sure to provide the full certificate chain and not just the last
|
|
||||||
certificate.
|
|
||||||
|
|
||||||
If you are running an Exim server and don’t see incoming connections
|
|
||||||
from a chatmail relay server in the logs, make sure ``smtp_no_mail`` log
|
|
||||||
item is enabled in the config with ``log_selector = +smtp_no_mail``. By
|
|
||||||
default Exim does not log sessions that are closed before sending the
|
|
||||||
``MAIL`` command. This happens if certificate is not recognized as valid
|
|
||||||
by Postfix, so you might think that connection is not established while
|
|
||||||
actually it is a problem with your TLS certificate.
|
|
||||||
|
|
||||||
|
|
||||||
.. _dovecot: https://dovecot.org
|
|
||||||
.. _postfix: https://www.postfix.org
|
|
||||||
.. _nginx: https://nginx.org
|
|
||||||
.. _pyinfra: https://pyinfra.com
|
|
||||||
|
|
||||||
|
|
||||||
Architecture of cmdeploy
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
cmdeploy is a Python program that uses the pyinfra library to deploy
|
|
||||||
chatmail relays, with all the necessary software, configuration, and
|
|
||||||
services. The deployment process performs three primary types of
|
|
||||||
operation:
|
|
||||||
|
|
||||||
1. Installation of software, universal across all deployments.
|
|
||||||
2. Configuration of software, with deploy-specific variations.
|
|
||||||
3. Activation of services.
|
|
||||||
|
|
||||||
The process is implemented through a family of "deployer" objects
|
|
||||||
which all derive from a common ``Deployer`` base class, defined in
|
|
||||||
cmdeploy/src/cmdeploy/deployer.py. Each object provides
|
|
||||||
implementation methods for the three stages -- install, configure, and
|
|
||||||
activate. The top-level procedure in ``deploy_chatmail()`` calls
|
|
||||||
these methods for all the deployer objects, via the
|
|
||||||
``Deployment.perform_stages()`` method, also defined in deployer.py.
|
|
||||||
This first calls all the install methods, then the configure methods,
|
|
||||||
then the activate methods.
|
|
||||||
|
|
||||||
The ``Deployment`` class also implements support for a CMDEPLOY_STAGES
|
|
||||||
environment variable, which allows limiting the process to specific
|
|
||||||
stages. Note that some deployers are stateful between the stages
|
|
||||||
(this is one reason why they are implemented as objects), and that
|
|
||||||
state will not get propagated between stages when run in separate
|
|
||||||
invocations of cmdeploy. This environment variable is intended for
|
|
||||||
use in future revisions to support building Docker images with
|
|
||||||
software pre-installed, and configuration of containers at run time
|
|
||||||
from environment variables.
|
|
||||||
|
|
||||||
The, ``install()`` methods for the deployer classes should use 'self'
|
|
||||||
as little as possible, preferably not at all. In particular,
|
|
||||||
``install()`` methods should never depend on "config" data, such as
|
|
||||||
the config dictionary in ``self.config`` or specific values like
|
|
||||||
``self.mail_domain``. This ensures that these methods can be used to
|
|
||||||
perform generic installation operations that are applicable across
|
|
||||||
multiple relay deployments, and therefore can be called in the process
|
|
||||||
of building a general-purpose container image.
|
|
||||||
|
|
||||||
Operations that start services for systemd-based deployments should
|
|
||||||
only be called from the ``activate_impl()`` methods. These methods
|
|
||||||
will not be called in non-systemd container environments.
|
|
||||||
@@ -1,175 +0,0 @@
|
|||||||
|
|
||||||
Setting up a reverse proxy
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
A chatmail relay MTA does not track or depend on the client IP address
|
|
||||||
for its operation, so it can be run behind a reverse proxy. This will
|
|
||||||
not even affect incoming mail authentication as DKIM only checks the
|
|
||||||
cryptographic signature of the message and does not use the IP address
|
|
||||||
as the input.
|
|
||||||
|
|
||||||
For example, you may want to self-host your chatmail relay and only use
|
|
||||||
hosted VPS to provide a public IP address for client connections and
|
|
||||||
incoming mail. You can connect chatmail relay to VPS using a tunnel
|
|
||||||
protocol such as `WireGuard <https://www.wireguard.com/>`_ and setup a
|
|
||||||
reverse proxy on a VPS to forward connections to the chatmail relay over
|
|
||||||
the tunnel. You can also setup multiple reverse proxies for your
|
|
||||||
chatmail relay in different networks to ensure your relay is reachable
|
|
||||||
even when one of the IPs becomes inaccessible due to hosting or routing
|
|
||||||
problems.
|
|
||||||
|
|
||||||
Note that your chatmail relay still needs to be able to make outgoing
|
|
||||||
connections on port 25 to send messages outside.
|
|
||||||
|
|
||||||
To setup a reverse proxy (or rather Destination NAT, DNAT) for your
|
|
||||||
chatmail relay, follow these instructions:
|
|
||||||
|
|
||||||
Linux
|
|
||||||
^^^^^
|
|
||||||
|
|
||||||
Put the following configuration in
|
|
||||||
``/etc/nftables.conf``:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
#!/usr/sbin/nft -f
|
|
||||||
|
|
||||||
flush ruleset
|
|
||||||
|
|
||||||
define wan = eth0
|
|
||||||
|
|
||||||
# Which ports to proxy.
|
|
||||||
#
|
|
||||||
# Note that SSH is not proxied
|
|
||||||
# so it is possible to log into the proxy server
|
|
||||||
# and not the original one.
|
|
||||||
define ports = { smtp, http, https, imap, imaps, submission, submissions }
|
|
||||||
|
|
||||||
# The host we want to proxy to.
|
|
||||||
define ipv4_address = AAA.BBB.CCC.DDD
|
|
||||||
define ipv6_address = [XXX::1]
|
|
||||||
|
|
||||||
table ip nat {
|
|
||||||
chain prerouting {
|
|
||||||
type nat hook prerouting priority dstnat; policy accept;
|
|
||||||
iif $wan tcp dport $ports dnat to $ipv4_address
|
|
||||||
}
|
|
||||||
|
|
||||||
chain postrouting {
|
|
||||||
type nat hook postrouting priority 0;
|
|
||||||
|
|
||||||
oifname $wan masquerade
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table ip6 nat {
|
|
||||||
chain prerouting {
|
|
||||||
type nat hook prerouting priority dstnat; policy accept;
|
|
||||||
iif $wan tcp dport $ports dnat to $ipv6_address
|
|
||||||
}
|
|
||||||
|
|
||||||
chain postrouting {
|
|
||||||
type nat hook postrouting priority 0;
|
|
||||||
|
|
||||||
oifname $wan masquerade
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table inet filter {
|
|
||||||
chain input {
|
|
||||||
type filter hook input priority filter; policy drop;
|
|
||||||
|
|
||||||
# Accept ICMP.
|
|
||||||
# It is especially important to accept ICMPv6 ND messages,
|
|
||||||
# otherwise IPv6 connectivity breaks.
|
|
||||||
icmp type { echo-request } accept
|
|
||||||
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
|
||||||
|
|
||||||
# Allow incoming SSH connections.
|
|
||||||
tcp dport { ssh } accept
|
|
||||||
|
|
||||||
ct state established accept
|
|
||||||
}
|
|
||||||
chain forward {
|
|
||||||
type filter hook forward priority filter; policy drop;
|
|
||||||
|
|
||||||
ct state established accept
|
|
||||||
ip daddr $ipv4_address counter accept
|
|
||||||
ip6 daddr $ipv6_address counter accept
|
|
||||||
}
|
|
||||||
chain output {
|
|
||||||
type filter hook output priority filter;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Run ``systemctl enable nftables.service`` to ensure configuration is
|
|
||||||
reloaded when the proxy relay reboots.
|
|
||||||
|
|
||||||
Uncomment in ``/etc/sysctl.conf`` the following two lines:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
net.ipv4.ip_forward=1
|
|
||||||
net.ipv6.conf.all.forwarding=1
|
|
||||||
|
|
||||||
Then reboot the relay or do ``sysctl -p`` and
|
|
||||||
``nft -f /etc/nftables.conf``.
|
|
||||||
|
|
||||||
FreeBSD / pf
|
|
||||||
^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Put the following configuration in
|
|
||||||
``/etc/pf.conf``:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
ext_if = "em0"
|
|
||||||
forward_ports = "{ 25, 80, 143, 443, 465, 587, 993 }"
|
|
||||||
chatmail_ipv4 = "AAA.BBB.CCC.DDD"
|
|
||||||
icmp_types = "{ echoreq, echorep, unreach, timex }"
|
|
||||||
chatmail_ipv6 = "XXX::1"
|
|
||||||
icmp6_types = "{ echorep, echoreq, neighbradv, neighbrsol, routeradv, routersol, unreach, toobig, timex }"
|
|
||||||
|
|
||||||
set skip on lo0
|
|
||||||
|
|
||||||
nat on $ext_if inet from any to any -> ($ext_if:0)
|
|
||||||
nat on $ext_if inet6 from any to any -> ($ext_if:0)
|
|
||||||
|
|
||||||
# Define the redirect rules
|
|
||||||
rdr on $ext_if inet proto tcp from any to ($ext_if:0) port $forward_ports -> $chatmail_ipv4
|
|
||||||
rdr on $ext_if inet6 proto tcp from any to ($ext_if:0) port $forward_ports -> $chatmail_ipv6
|
|
||||||
|
|
||||||
# Accept the incoming traffic to the specified ports we will NAT redirect
|
|
||||||
pass in quick on $ext_if inet proto tcp from any to any port $forward_ports flags S/SA modulate state
|
|
||||||
pass in quick on $ext_if inet6 proto tcp from any to any port $forward_ports flags S/SA modulate state
|
|
||||||
|
|
||||||
# Allow incoming SSH for host mgmt
|
|
||||||
pass in quick on $ext_if proto tcp from any to ($ext_if) port 22 flags S/SA modulate state
|
|
||||||
|
|
||||||
# Allow ICMP
|
|
||||||
pass in quick on $ext_if inet proto icmp all icmp-type $icmp_types keep state
|
|
||||||
pass in quick on $ext_if inet6 proto ipv6-icmp all icmp6-type $icmp6_types keep state
|
|
||||||
|
|
||||||
# Allow traffic from anyone to go through the NAT
|
|
||||||
pass on $ext_if inet proto tcp from any to $chatmail_ipv4 flags S/SA modulate state
|
|
||||||
pass on $ext_if inet6 proto tcp from any to $chatmail_ipv6 flags S/SA modulate state
|
|
||||||
|
|
||||||
# Default allow out
|
|
||||||
pass out quick on $ext_if from any to any
|
|
||||||
|
|
||||||
# Default block
|
|
||||||
block drop in log all
|
|
||||||
|
|
||||||
Insert into ``/etc/sysctl.conf.local`` the following two lines:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
net.inet.ip.forwarding=1
|
|
||||||
net.inet6.ip6.forwarding=1
|
|
||||||
|
|
||||||
Activate the sysctls with ``service sysctl onestart``.
|
|
||||||
Enable the pf firewall with ``service pf enable``.
|
|
||||||
Apply the firewall rules with ``service pf start`` or ``pfctl -f /etc/pf.conf``.
|
|
||||||
Note, enabling the firewall may interrupt your SSH session, but you can reconnect.
|
|
||||||
|
|
||||||
Once proxy relay is set up, you can add its IP address to the DNS.
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
|
|
||||||
Community developments
|
|
||||||
======================
|
|
||||||
|
|
||||||
Active development takes place in the `chatmail/relay github repository <https://github.com/chatmail/relay>`_.
|
|
||||||
|
|
||||||
You can check out the `'chatmail' tag in the support.delta.chat forum <https://support.delta.chat/tag/chatmail>`_
|
|
||||||
and ask to get added to a non-public support chat for debugging issues.
|
|
||||||
|
|
||||||
We know of two work-in-progress alternative implementation efforts:
|
|
||||||
|
|
||||||
- `Mox <https://github.com/mjl-/mox>`_: A Golang email server. `Work
|
|
||||||
is in progress <https://github.com/mjl-/mox/issues/251>`_ to modify
|
|
||||||
it to support all of the features and configuration settings required
|
|
||||||
to operate as a chatmail relay.
|
|
||||||
|
|
||||||
- `Maddy-Chatmail <https://github.com/sadraiiali/maddy_chatmail>`_: a
|
|
||||||
plugin for the `Maddy email server <https://maddy.email/>`_ which
|
|
||||||
aims to implement the chatmail relay features and configuration
|
|
||||||
options.
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
#
|
|
||||||
# Wrapper for building the docs
|
|
||||||
set -e
|
|
||||||
. venv/bin/activate
|
|
||||||
cd doc/
|
|
||||||
make html
|
|
||||||
@@ -22,4 +22,3 @@ python3 -m venv --upgrade-deps venv
|
|||||||
|
|
||||||
venv/bin/pip install -e chatmaild
|
venv/bin/pip install -e chatmaild
|
||||||
venv/bin/pip install -e cmdeploy
|
venv/bin/pip install -e cmdeploy
|
||||||
venv/bin/pip install sphinx sphinxcontrib-mermaid sphinx-autobuild furo # for building the docs
|
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
User-agent: *
|
|
||||||
Disallow: /
|
|
||||||
Reference in New Issue
Block a user