mirror of
https://github.com/chatmail/relay.git
synced 2026-05-10 16:04:37 +00:00
Compare commits
144 Commits
docker-tes
...
tmpfs-inde
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
151d6ef445 | ||
|
|
27443ca044 | ||
|
|
be35244371 | ||
|
|
f7f2c9600d | ||
|
|
dfcaf415b1 | ||
|
|
c0718325ef | ||
|
|
7d72b0e592 | ||
|
|
8f1e23d98e | ||
|
|
56aaf2649b | ||
|
|
2660b4d24c | ||
|
|
ea60ecfb57 | ||
|
|
2a3a224cc2 | ||
|
|
e42139e97b | ||
|
|
65b660c413 | ||
|
|
dd2beb226a | ||
|
|
9c7508cc33 | ||
|
|
ab3492d9a1 | ||
|
|
032faf0a94 | ||
|
|
c45fe03652 | ||
|
|
08bf4c234b | ||
|
|
2d0ccdb4a3 | ||
|
|
3abba6f2fa | ||
|
|
f9aaeb0f42 | ||
|
|
e0c44bf04f | ||
|
|
8ff53d12cb | ||
|
|
0aa0324c81 | ||
|
|
bfcfc9b090 | ||
|
|
e101c36ab4 | ||
|
|
be7aa21039 | ||
|
|
4906b82e44 | ||
|
|
5d49b4c0fd | ||
|
|
56c8f9faae | ||
|
|
203a7da3f4 | ||
|
|
a1667ca54d | ||
|
|
6401bbb32c | ||
|
|
325cc7a7b4 | ||
|
|
c2acbad802 | ||
|
|
0e7ab96dc8 | ||
|
|
d1f9523836 | ||
|
|
bcf2fdb5d0 | ||
|
|
77a6f49c9b | ||
|
|
99630e4d1b | ||
|
|
2f8199a7c6 | ||
|
|
4eeead2826 | ||
|
|
0d890274fd | ||
|
|
7191329a9f | ||
|
|
1ae4c8451a | ||
|
|
f04a624e19 | ||
|
|
24e3f33acd | ||
|
|
610843a44a | ||
|
|
966754a346 | ||
|
|
87153667ed | ||
|
|
abe0cb5d08 | ||
|
|
8c8c37c822 | ||
|
|
e7bed4d2a1 | ||
|
|
df21076e9b | ||
|
|
70da217442 | ||
|
|
40fd62c562 | ||
|
|
d76b33def1 | ||
|
|
bab3de9768 | ||
|
|
49c66116bf | ||
|
|
9bf99cc8a9 | ||
|
|
1188aed061 | ||
|
|
e15b8ebf11 | ||
|
|
c84ddf69e8 | ||
|
|
96fc3d9ff6 | ||
|
|
4b5e8feb96 | ||
|
|
c98853570b | ||
|
|
bad356503e | ||
|
|
dba48e88d1 | ||
|
|
3ae8834cbe | ||
|
|
81391f4066 | ||
|
|
55cfd00505 | ||
|
|
b000213c68 | ||
|
|
51d16b6bb8 | ||
|
|
2beba8c455 | ||
|
|
33c67d22fa | ||
|
|
166bf68915 | ||
|
|
abb70a6b14 | ||
|
|
96108bbaba | ||
|
|
8f68672e31 | ||
|
|
9e6e3af534 | ||
|
|
fa5a6a64b3 | ||
|
|
6b7c002e24 | ||
|
|
4b2f98788d | ||
|
|
13faa42abd | ||
|
|
7c12136991 | ||
|
|
3637bba5dc | ||
|
|
e2b157bd96 | ||
|
|
83abb3a3e1 | ||
|
|
2e3e3101b6 | ||
|
|
213d68ed02 | ||
|
|
68cc6676ef | ||
|
|
14ca95d25a | ||
|
|
3524b055db | ||
|
|
7b16f1330d | ||
|
|
7a907b138c | ||
|
|
0ff0159a89 | ||
|
|
81d2bf89c7 | ||
|
|
514a911529 | ||
|
|
fc7240a1ad | ||
|
|
bdcccd858c | ||
|
|
af30d2b55d | ||
|
|
5664b97db4 | ||
|
|
81364bd523 | ||
|
|
3c3e54fceb | ||
|
|
ae96b752a3 | ||
|
|
33b69fac95 | ||
|
|
165dc10f59 | ||
|
|
3df3c031d4 | ||
|
|
5515dc4c4b | ||
|
|
50b986a265 | ||
|
|
f24bc99c6f | ||
|
|
a0ebb2bdbc | ||
|
|
132bdcb5e5 | ||
|
|
7d593841bb | ||
|
|
83e7caeaf8 | ||
|
|
1cff4a94f1 | ||
|
|
ded9dd470d | ||
|
|
b94ad729fd | ||
|
|
b60267f37f | ||
|
|
a0aa2912dd | ||
|
|
76108c1c03 | ||
|
|
61b8dc4637 | ||
|
|
d42f579291 | ||
|
|
dd3cf4d449 | ||
|
|
7361cc9350 | ||
|
|
00f199816d | ||
|
|
8d7e1dad0e | ||
|
|
c0da7bb3bf | ||
|
|
863ded6480 | ||
|
|
d75321b355 | ||
|
|
9148b16d81 | ||
|
|
fa9aa5b015 | ||
|
|
0155f32df6 | ||
|
|
9ddd5d8b2b | ||
|
|
4cfe228a1f | ||
|
|
741a20450c | ||
|
|
b7fadcd4be | ||
|
|
7db26f33d9 | ||
|
|
2b90f7db37 | ||
|
|
e37dd5153a | ||
|
|
f21e4ff55b | ||
|
|
21258a267a |
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1 @@
|
|||||||
blank_issues_enabled: true
|
blank_issues_enabled: true
|
||||||
contact_links:
|
|
||||||
- name: Mutual Help Chat Group
|
|
||||||
url: https://i.delta.chat/#6CBFF8FFD505C0FDEA20A66674F2916EA8FBEE99&a=invitebot%40nine.testrun.org&g=Chatmail%20Mutual%20Help&x=7sFF7Ik50pWv6J1z7RVC5527&i=X69wTFfvCfs3d-JzqP0kVA3i&s=ibp-447dU-wUq-52QanwAtWc
|
|
||||||
about: If you have troubles setting up the relay server, feel free to ask here.
|
|
||||||
|
|||||||
3
.github/workflows/ci.yaml
vendored
3
.github/workflows/ci.yaml
vendored
@@ -14,7 +14,8 @@ jobs:
|
|||||||
# Otherwise `test_deployed_state` will be unhappy.
|
# Otherwise `test_deployed_state` will be unhappy.
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- name: download filtermail
|
||||||
|
run: curl -L https://github.com/chatmail/filtermail/releases/download/v0.2.0/filtermail-x86_64-musl -o /usr/local/bin/filtermail && chmod +x /usr/local/bin/filtermail
|
||||||
- name: run chatmaild tests
|
- name: run chatmaild tests
|
||||||
working-directory: chatmaild
|
working-directory: chatmaild
|
||||||
run: pipx run tox
|
run: pipx run tox
|
||||||
|
|||||||
53
.github/workflows/docs-preview.yaml
vendored
Normal file
53
.github/workflows/docs-preview.yaml
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
name: documentation preview
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'doc/**'
|
||||||
|
- 'scripts/build-docs.sh'
|
||||||
|
- '.github/workflows/docs-preview.yaml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
scripts:
|
||||||
|
name: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment:
|
||||||
|
name: 'staging.chatmail.at/doc/relay/'
|
||||||
|
url: https://staging.chatmail.at/doc/relay/${{ steps.prepare.outputs.prid }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: initenv
|
||||||
|
run: scripts/initenv.sh
|
||||||
|
|
||||||
|
- name: append venv/bin to PATH
|
||||||
|
run: echo `pwd`/venv/bin >>$GITHUB_PATH
|
||||||
|
|
||||||
|
- name: build documentation
|
||||||
|
working-directory: doc
|
||||||
|
run: sphinx-build source build
|
||||||
|
|
||||||
|
- name: build documentation second time (for TOC)
|
||||||
|
working-directory: doc
|
||||||
|
run: sphinx-build source build
|
||||||
|
|
||||||
|
- name: Get Pullrequest ID
|
||||||
|
id: prepare
|
||||||
|
run: |
|
||||||
|
export PULLREQUEST_ID=$(echo "${{ github.ref }}" | cut -d "/" -f3)
|
||||||
|
echo "prid=$PULLREQUEST_ID" >> $GITHUB_OUTPUT
|
||||||
|
if [ $(expr length "${{ secrets.USERNAME }}") -gt "1" ]; then echo "uploadtoserver=true" >> $GITHUB_OUTPUT; fi
|
||||||
|
- run: |
|
||||||
|
echo "baseurl: /${{ steps.prepare.outputs.prid }}" >> _config.yml
|
||||||
|
|
||||||
|
- name: Upload preview
|
||||||
|
run: |
|
||||||
|
mkdir -p "$HOME/.ssh"
|
||||||
|
echo "${{ secrets.CHATMAIL_STAGING_SSHKEY }}" > "$HOME/.ssh/key"
|
||||||
|
chmod 600 "$HOME/.ssh/key"
|
||||||
|
rsync -rILvh -e "ssh -i $HOME/.ssh/key -o StrictHostKeyChecking=no" $GITHUB_WORKSPACE/doc/build/ "${{ secrets.USERNAME }}@chatmail.at:/var/www/html/staging.chatmail.at/doc/relay/${{ steps.prepare.outputs.prid }}/"
|
||||||
|
|
||||||
|
- name: check links
|
||||||
|
working-directory: doc
|
||||||
|
run: sphinx-build --builder linkcheck source build
|
||||||
|
|
||||||
47
.github/workflows/docs.yaml
vendored
Normal file
47
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: build and upload documentation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- 'missytake/docs-ci'
|
||||||
|
paths:
|
||||||
|
- 'doc/**'
|
||||||
|
- 'scripts/build-docs.sh'
|
||||||
|
- '.github/workflows/docs.yaml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
scripts:
|
||||||
|
name: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment:
|
||||||
|
name: 'chatmail.at/doc/relay/'
|
||||||
|
url: https://chatmail.at/doc/relay/
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: initenv
|
||||||
|
run: scripts/initenv.sh
|
||||||
|
|
||||||
|
- name: append venv/bin to PATH
|
||||||
|
run: echo `pwd`/venv/bin >>$GITHUB_PATH
|
||||||
|
|
||||||
|
- name: build documentation
|
||||||
|
working-directory: doc
|
||||||
|
run: sphinx-build source build
|
||||||
|
|
||||||
|
- name: build documentation second time (for TOC)
|
||||||
|
working-directory: doc
|
||||||
|
run: sphinx-build source build
|
||||||
|
|
||||||
|
- name: check links
|
||||||
|
working-directory: doc
|
||||||
|
run: sphinx-build --builder linkcheck source build
|
||||||
|
|
||||||
|
- name: upload documentation
|
||||||
|
run: |
|
||||||
|
mkdir -p "$HOME/.ssh"
|
||||||
|
echo "${{ secrets.CHATMAIL_STAGING_SSHKEY }}" > "$HOME/.ssh/key"
|
||||||
|
chmod 600 "$HOME/.ssh/key"
|
||||||
|
rsync -rILvh -e "ssh -i $HOME/.ssh/key -o StrictHostKeyChecking=no" $GITHUB_WORKSPACE/doc/build/ "${{ secrets.USERNAME }}@chatmail.at:/var/www/html/chatmail.at/doc/relay/"
|
||||||
|
|
||||||
13
.github/workflows/test-and-deploy-ipv4only.yaml
vendored
13
.github/workflows/test-and-deploy-ipv4only.yaml
vendored
@@ -16,13 +16,11 @@ jobs:
|
|||||||
name: deploy on staging-ipv4.testrun.org, and run tests
|
name: deploy on staging-ipv4.testrun.org, and run tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
concurrency:
|
environment:
|
||||||
group: ci-ipv4-${{ github.workflow }}-${{ github.ref }}
|
name: staging-ipv4.testrun.org
|
||||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
url: https://staging-ipv4.testrun.org/
|
||||||
|
concurrency: staging-ipv4.testrun.org
|
||||||
steps:
|
steps:
|
||||||
- uses: jsok/serialize-workflow-action@v1
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: prepare SSH
|
- name: prepare SSH
|
||||||
@@ -76,6 +74,7 @@ jobs:
|
|||||||
- run: |
|
- run: |
|
||||||
cmdeploy init staging-ipv4.testrun.org
|
cmdeploy init staging-ipv4.testrun.org
|
||||||
sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' chatmail.ini
|
sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' chatmail.ini
|
||||||
|
sed -i 's/#\s*mtail_address/mtail_address/' chatmail.ini
|
||||||
|
|
||||||
- run: cmdeploy run --verbose --skip-dns-check
|
- run: cmdeploy run --verbose --skip-dns-check
|
||||||
|
|
||||||
@@ -90,7 +89,7 @@ jobs:
|
|||||||
ssh root@ns.testrun.org systemctl reload nsd
|
ssh root@ns.testrun.org systemctl reload nsd
|
||||||
|
|
||||||
- name: cmdeploy test
|
- name: cmdeploy test
|
||||||
run: CHATMAIL_DOMAIN2=nine.testrun.org cmdeploy test --slow
|
run: CHATMAIL_DOMAIN2=ci-chatmail.testrun.org cmdeploy test --slow
|
||||||
|
|
||||||
- name: cmdeploy dns
|
- name: cmdeploy dns
|
||||||
run: cmdeploy dns -v
|
run: cmdeploy dns -v
|
||||||
|
|||||||
19
.github/workflows/test-and-deploy.yaml
vendored
19
.github/workflows/test-and-deploy.yaml
vendored
@@ -16,13 +16,11 @@ jobs:
|
|||||||
name: deploy on staging2.testrun.org, and run tests
|
name: deploy on staging2.testrun.org, and run tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
concurrency:
|
environment:
|
||||||
group: ci-${{ github.workflow }}-${{ github.ref }}
|
name: staging2.testrun.org
|
||||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
url: https://staging2.testrun.org/
|
||||||
|
concurrency: staging2.testrun.org
|
||||||
steps:
|
steps:
|
||||||
- uses: jsok/serialize-workflow-action@v1
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: prepare SSH
|
- name: prepare SSH
|
||||||
@@ -70,10 +68,15 @@ jobs:
|
|||||||
rsync -avz dkimkeys-restore/dkimkeys root@staging2.testrun.org:/etc/ || true
|
rsync -avz dkimkeys-restore/dkimkeys root@staging2.testrun.org:/etc/ || true
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
|
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
|
||||||
|
|
||||||
|
- name: add hpk42 key to staging server
|
||||||
|
run: ssh root@staging2.testrun.org 'curl -s https://github.com/hpk42.keys >> .ssh/authorized_keys'
|
||||||
|
|
||||||
- name: run deploy-chatmail offline tests
|
- name: run deploy-chatmail offline tests
|
||||||
run: pytest --pyargs cmdeploy
|
run: pytest --pyargs cmdeploy
|
||||||
|
|
||||||
- run: cmdeploy init staging2.testrun.org
|
- run: |
|
||||||
|
cmdeploy init staging2.testrun.org
|
||||||
|
sed -i 's/#\s*mtail_address/mtail_address/' chatmail.ini
|
||||||
|
|
||||||
- run: cmdeploy run --verbose --skip-dns-check
|
- run: cmdeploy run --verbose --skip-dns-check
|
||||||
|
|
||||||
@@ -88,7 +91,7 @@ jobs:
|
|||||||
ssh root@ns.testrun.org systemctl reload nsd
|
ssh root@ns.testrun.org systemctl reload nsd
|
||||||
|
|
||||||
- name: cmdeploy test
|
- name: cmdeploy test
|
||||||
run: CHATMAIL_DOMAIN2=nine.testrun.org cmdeploy test --slow
|
run: CHATMAIL_DOMAIN2=ci-chatmail.testrun.org cmdeploy test --slow
|
||||||
|
|
||||||
- name: cmdeploy dns
|
- name: cmdeploy dns
|
||||||
run: cmdeploy dns -v
|
run: cmdeploy dns -v
|
||||||
|
|||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -164,9 +164,3 @@ cython_debug/
|
|||||||
#.idea/
|
#.idea/
|
||||||
|
|
||||||
chatmail.zone
|
chatmail.zone
|
||||||
|
|
||||||
# docker
|
|
||||||
/data/
|
|
||||||
/custom/
|
|
||||||
docker-compose.yaml
|
|
||||||
.env
|
|
||||||
|
|||||||
@@ -1,50 +0,0 @@
|
|||||||
This diagram shows components of the chatmail server; this is a draft
|
|
||||||
overview as of mid-August 2025:
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
graph LR;
|
|
||||||
cmdeploy --- sshd;
|
|
||||||
letsencrypt --- |80|acmetool-redirector;
|
|
||||||
acmetool-redirector --- |443|nginx-right(["`nginx
|
|
||||||
(external)`"]);
|
|
||||||
nginx-external --- |465|postfix;
|
|
||||||
nginx-external(["`nginx
|
|
||||||
(external)`"]) --- |8443|nginx-internal["`nginx
|
|
||||||
(internal)`"];
|
|
||||||
nginx-internal --- website["`Website
|
|
||||||
/var/www/html`"];
|
|
||||||
nginx-internal --- newemail.py;
|
|
||||||
nginx-internal --- autoconfig.xml;
|
|
||||||
certs-nginx[("`TLS certs
|
|
||||||
/var/lib/acme`")] --> nginx-internal;
|
|
||||||
cron --- chatmail-metrics;
|
|
||||||
cron --- acmetool;
|
|
||||||
cron --- expunge;
|
|
||||||
chatmail-metrics --- website;
|
|
||||||
acmetool --> certs[("`TLS certs
|
|
||||||
/var/lib/acme`")];
|
|
||||||
nginx-external --- |993|dovecot;
|
|
||||||
autoconfig.xml --- postfix;
|
|
||||||
autoconfig.xml --- dovecot;
|
|
||||||
postfix --- echobot;
|
|
||||||
postfix --- |10080,10081|filtermail;
|
|
||||||
postfix --- users["`User data
|
|
||||||
home/vmail/mail`"];
|
|
||||||
postfix --- |doveauth.socket|doveauth;
|
|
||||||
dovecot --- |doveauth.socket|doveauth;
|
|
||||||
dovecot --- users;
|
|
||||||
dovecot --- |metadata.socket|chatmail-metadata;
|
|
||||||
doveauth --- users;
|
|
||||||
expunge --- users;
|
|
||||||
chatmail-metadata --- iroh-relay;
|
|
||||||
certs-nginx --> postfix;
|
|
||||||
certs-nginx --> dovecot;
|
|
||||||
style certs fill:#ff6;
|
|
||||||
style certs-nginx fill:#ff6;
|
|
||||||
style nginx-external fill:#fc9;
|
|
||||||
style nginx-right fill:#fc9;
|
|
||||||
```
|
|
||||||
|
|
||||||
The edges in this graph should not be taken too literally; they
|
|
||||||
reflect some sort of communication path or dependency relationship
|
|
||||||
between components of the chatmail server.
|
|
||||||
92
CHANGELOG.md
92
CHANGELOG.md
@@ -1,6 +1,80 @@
|
|||||||
# Changelog for chatmail deployment
|
# Changelog for chatmail deployment
|
||||||
|
|
||||||
## untagged
|
## 1.9.0 2025-12-18
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Add RELEASE.md and CONTRIBUTING.md
|
||||||
|
- README update, mention Chatmail Cookbook project
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Expire messages also from IMAP subfolders
|
||||||
|
- Use absolute path instead of relative path in message expiration script
|
||||||
|
- Restart Postfix and Dovecot automatically on failure
|
||||||
|
- acmetool: Use a fixed name and `reconcile` instead of `want`
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Report DKIM error code in SMTP response
|
||||||
|
- Remove development notice from the web pages
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Update the heading in the CHANGELOG.md
|
||||||
|
- Setup git-cliff
|
||||||
|
- Run tests against ci-chatmail.testrun.org instead of nine.testrun.org
|
||||||
|
- Cleanup remaining echobot code, remove echobot user from deployment and passthrough recipients
|
||||||
|
|
||||||
|
## 1.8.0 2025-12-12
|
||||||
|
|
||||||
|
- Add imap_compress option to chatmail.ini
|
||||||
|
([#760](https://github.com/chatmail/relay/pull/760))
|
||||||
|
|
||||||
|
- Remove echobot from relays
|
||||||
|
([#753](https://github.com/chatmail/relay/pull/753))
|
||||||
|
|
||||||
|
- Fix `cmdeploy webdev`
|
||||||
|
([#743](https://github.com/chatmail/relay/pull/743))
|
||||||
|
|
||||||
|
- Add robots.txt to exclude all web crawlers
|
||||||
|
([#732](https://github.com/chatmail/relay/pull/732))
|
||||||
|
|
||||||
|
- acmetool: accept new Let's Encrypt ToS: https://letsencrypt.org/documents/LE-SA-v1.6-August-18-2025.pdf
|
||||||
|
([#729](https://github.com/chatmail/relay/pull/729))
|
||||||
|
|
||||||
|
- Organized cmdeploy into install, configure, and activate stages
|
||||||
|
([#695](https://github.com/chatmail/relay/pull/695))
|
||||||
|
|
||||||
|
- docs: move readme.md docs to sphinx documentation rendered at https://chatmail.at/doc/relay
|
||||||
|
([#711](https://github.com/chatmail/relay/pull/711))
|
||||||
|
|
||||||
|
- acmetool: replace cronjob with a systemd timer
|
||||||
|
([#719](https://github.com/chatmail/relay/pull/719))
|
||||||
|
|
||||||
|
- remove xstore@testrun.org from default passthrough recipients
|
||||||
|
([#722](https://github.com/chatmail/relay/pull/722))
|
||||||
|
|
||||||
|
- don't deploy the website if there are merge conflicts in the www folder
|
||||||
|
([#714](https://github.com/chatmail/relay/pull/714))
|
||||||
|
|
||||||
|
- acmetool: use ECDSA keys instead of RSA
|
||||||
|
([#689](https://github.com/chatmail/relay/pull/689))
|
||||||
|
|
||||||
|
- Require TLS 1.2 for outgoing SMTP connections
|
||||||
|
([#685](https://github.com/chatmail/relay/pull/685), [#730](https://github.com/chatmail/relay/pull/730))
|
||||||
|
|
||||||
|
- require STARTTLS for incoming port 25 connections
|
||||||
|
([#684](https://github.com/chatmail/relay/pull/684), [#730](https://github.com/chatmail/relay/pull/730))
|
||||||
|
|
||||||
|
- filtermail: run CPU-intensive handle_DATA in a thread pool executor
|
||||||
|
([#676](https://github.com/chatmail/relay/pull/676))
|
||||||
|
|
||||||
|
- don't use the complicated logging module in filtermail to exclude a potential source of errors.
|
||||||
|
([#674](https://github.com/chatmail/relay/pull/674))
|
||||||
|
|
||||||
|
- Specify nginx.conf to only handle `mail_domain`, www, and mta-sts domains
|
||||||
|
([#636](https://github.com/chatmail/relay/pull/636))
|
||||||
|
|
||||||
- Setup TURN server
|
- Setup TURN server
|
||||||
([#621](https://github.com/chatmail/relay/pull/621))
|
([#621](https://github.com/chatmail/relay/pull/621))
|
||||||
@@ -12,11 +86,14 @@
|
|||||||
([#650](https://github.com/chatmail/relay/pull/650))
|
([#650](https://github.com/chatmail/relay/pull/650))
|
||||||
|
|
||||||
- filtermail: accept mails from Protonmail
|
- filtermail: accept mails from Protonmail
|
||||||
([#616](https://github.com/chatmail/relay/pull/655))
|
([#616](https://github.com/chatmail/relay/pull/616))
|
||||||
|
|
||||||
- Ignore all RCPT TO: parameters
|
- Ignore all RCPT TO: parameters
|
||||||
([#651](https://github.com/chatmail/relay/pull/651))
|
([#651](https://github.com/chatmail/relay/pull/651))
|
||||||
|
|
||||||
|
- Increase opendkim DNS Timeout from 5 to 60 seconds
|
||||||
|
([#672](https://github.com/chatmail/relay/pull/672))
|
||||||
|
|
||||||
- Add config parameter for Let's Encrypt ACME email
|
- Add config parameter for Let's Encrypt ACME email
|
||||||
([#663](https://github.com/chatmail/relay/pull/663))
|
([#663](https://github.com/chatmail/relay/pull/663))
|
||||||
|
|
||||||
@@ -38,13 +115,12 @@
|
|||||||
- Add `--skip-dns-check` argument to `cmdeploy run` command, which disables DNS record checking before installation.
|
- Add `--skip-dns-check` argument to `cmdeploy run` command, which disables DNS record checking before installation.
|
||||||
([#661](https://github.com/chatmail/relay/pull/661))
|
([#661](https://github.com/chatmail/relay/pull/661))
|
||||||
|
|
||||||
- Add installation via docker compose (MVP 1). The instructions, known issues and limitations are located in `/docs`
|
- Rework expiry of message files and mailboxes in Python
|
||||||
([#614](https://github.com/chatmail/relay/pull/614))
|
to only do a single iteration over sometimes millions of messages
|
||||||
|
instead of doing "find" commands that iterate 9 times over the messages.
|
||||||
|
Provide an "fsreport" CLI for more fine grained analysis of message files.
|
||||||
|
([#637](https://github.com/chatmail/relay/pull/637))
|
||||||
|
|
||||||
- Add configuration parameters
|
|
||||||
([#614](https://github.com/chatmail/relay/pull/614)):
|
|
||||||
- `change_kernel_settings` - Whether to change kernel parameters during installation (default: `True`)
|
|
||||||
- `fs_inotify_max_user_instances_and_watchers` - Value for kernel parameters `fs.inotify.max_user_instances` and `fs.inotify.max_user_watches` (default: `65535`)
|
|
||||||
|
|
||||||
## 1.7.0 2025-09-11
|
## 1.7.0 2025-09-11
|
||||||
|
|
||||||
|
|||||||
7
CONTRIBUTING.md
Normal file
7
CONTRIBUTING.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Contributing to the chatmail relay
|
||||||
|
|
||||||
|
Commit messages follow the [Conventional Commits] notation.
|
||||||
|
We use [git-cliff] to generate the changelog from commit messages before the release.
|
||||||
|
|
||||||
|
[Conventional Commits]: https://www.conventionalcommits.org/
|
||||||
|
[git-cliff]: https://git-cliff.org/
|
||||||
565
README.md
565
README.md
@@ -1,565 +1,20 @@
|
|||||||
|
|
||||||
<img width="800px" src="www/src/collage-top.png"/>
|
# Chatmail relays for end-to-end encrypted email
|
||||||
|
|
||||||
# Chatmail relays for end-to-end encrypted e-mail
|
|
||||||
|
|
||||||
Chatmail relay servers are interoperable Mail Transport Agents (MTAs) designed for:
|
Chatmail relay servers are interoperable Mail Transport Agents (MTAs) designed for:
|
||||||
|
|
||||||
- **Convenience:** Low friction instant onboarding
|
- **Zero State:** no private data or metadata collected, messages are auto-deleted, low disk usage
|
||||||
|
|
||||||
- **Privacy:** No name, phone numbers, email required or collected
|
- **Instant/Realtime:** sub-second message delivery, realtime P2P
|
||||||
|
streaming, privacy-preserving Push Notifications for Apple, Google, and Huawei;
|
||||||
|
|
||||||
- **End-to-End Encryption enforced**: only OpenPGP messages with metadata minimization allowed
|
- **Security Enforcement**: only strict TLS, DKIM and OpenPGP with minimized metadata accepted
|
||||||
|
|
||||||
- **Instant:** Privacy-preserving Push Notifications for Apple, Google, and Huawei
|
- **Reliable Federation and Decentralization:** No spam or IP reputation checks, federating
|
||||||
|
depends on established IETF standards and protocols.
|
||||||
|
|
||||||
- **Speed:** Message delivery in half a second, with optional P2P realtime connections
|
This repository contains everything needed to setup a ready-to-use chatmail relay on an ssh-reachable host.
|
||||||
|
For getting started and more information please refer to the web version of this repositories' documentation at
|
||||||
|
|
||||||
- **Transport Security:** Strict TLS and DKIM enforced
|
[https://chatmail.at/doc/relay](https://chatmail.at/doc/relay)
|
||||||
|
|
||||||
- **Reliability:** No spam or IP reputation checks; rate-limits are suitable for realtime chats
|
|
||||||
|
|
||||||
- **Efficiency:** Messages are only stored for transit and removed automatically
|
|
||||||
|
|
||||||
This repository contains everything needed to setup a ready-to-use chatmail relay
|
|
||||||
comprised of a minimal setup of the battle-tested
|
|
||||||
[Postfix SMTP](https://www.postfix.org) and [Dovecot IMAP](https://www.dovecot.org) MTAs/MDAs.
|
|
||||||
|
|
||||||
The automated setup is designed and optimized for providing chatmail addresses
|
|
||||||
for immediate permission-free onboarding through chat apps and bots.
|
|
||||||
Chatmail addresses are automatically created at first login,
|
|
||||||
after which the initially specified password is required
|
|
||||||
for sending and receiving messages through them.
|
|
||||||
|
|
||||||
Please see [this list of known apps and client projects](https://chatmail.at/clients.html)
|
|
||||||
and [this list of known public 3rd party chatmail relay servers](https://chatmail.at/relays).
|
|
||||||
|
|
||||||
|
|
||||||
## Minimal requirements, Prerequisites
|
|
||||||
|
|
||||||
You will need the following:
|
|
||||||
|
|
||||||
- Control over a domain through a DNS provider of your choice.
|
|
||||||
|
|
||||||
- A Debian 12 server with reachable SMTP/SUBMISSIONS/IMAPS/HTTPS ports.
|
|
||||||
IPv6 is encouraged if available.
|
|
||||||
Chatmail relay servers only require 1GB RAM, one CPU, and perhaps 10GB storage for a
|
|
||||||
few thousand active chatmail addresses.
|
|
||||||
|
|
||||||
- Key-based SSH authentication to the root user.
|
|
||||||
You must add a passphrase-protected private key to your local ssh-agent
|
|
||||||
because you can't type in your passphrase during deployment.
|
|
||||||
(An ed25519 private key is required due to an [upstream bug in paramiko](https://github.com/paramiko/paramiko/issues/2191))
|
|
||||||
|
|
||||||
|
|
||||||
## Getting started
|
|
||||||
|
|
||||||
We use `chat.example.org` as the chatmail domain in the following steps.
|
|
||||||
Please substitute it with your own domain.
|
|
||||||
|
|
||||||
1. Setup the initial DNS records.
|
|
||||||
The following is an example in the familiar BIND zone file format with
|
|
||||||
a TTL of 1 hour (3600 seconds).
|
|
||||||
Please substitute your domain and IP addresses.
|
|
||||||
|
|
||||||
```
|
|
||||||
chat.example.com. 3600 IN A 198.51.100.5
|
|
||||||
chat.example.com. 3600 IN AAAA 2001:db8::5
|
|
||||||
www.chat.example.com. 3600 IN CNAME chat.example.com.
|
|
||||||
mta-sts.chat.example.com. 3600 IN CNAME chat.example.com.
|
|
||||||
```
|
|
||||||
|
|
||||||
2. On your local PC, clone the repository and bootstrap the Python virtualenv.
|
|
||||||
|
|
||||||
```
|
|
||||||
git clone https://github.com/chatmail/relay
|
|
||||||
cd relay
|
|
||||||
```
|
|
||||||
|
|
||||||
### Manual installation
|
|
||||||
1. On your local PC, create chatmail configuration file `chatmail.ini`:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/initenv.sh
|
|
||||||
scripts/cmdeploy init chat.example.org # <-- use your domain
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Verify that SSH root login to your remote server works:
|
|
||||||
|
|
||||||
```
|
|
||||||
ssh root@chat.example.org # <-- use your domain
|
|
||||||
```
|
|
||||||
|
|
||||||
3. From your local PC, deploy the remote chatmail relay server:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy run
|
|
||||||
```
|
|
||||||
This script will also check that you have all necessary DNS records.
|
|
||||||
If DNS records are missing, it will recommend
|
|
||||||
which you should configure at your DNS provider
|
|
||||||
(it can take some time until they are public).
|
|
||||||
|
|
||||||
### Docker installation
|
|
||||||
|
|
||||||
We have experimental support for [docker compose](./docs/DOCKER_INSTALLATION_EN.md),
|
|
||||||
but it is not covered by automated tests yet,
|
|
||||||
so don't expect everything to work.
|
|
||||||
|
|
||||||
### Other helpful commands
|
|
||||||
|
|
||||||
To check the status of your remotely running chatmail service:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy status
|
|
||||||
```
|
|
||||||
|
|
||||||
To display and check all recommended DNS records:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy dns
|
|
||||||
```
|
|
||||||
|
|
||||||
To test whether your chatmail service is working correctly:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy test
|
|
||||||
```
|
|
||||||
|
|
||||||
To measure the performance of your chatmail service:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy bench
|
|
||||||
```
|
|
||||||
|
|
||||||
## Overview of this repository
|
|
||||||
|
|
||||||
This repository has four directories:
|
|
||||||
|
|
||||||
- [cmdeploy](https://github.com/chatmail/relay/tree/main/cmdeploy)
|
|
||||||
is a collection of configuration files
|
|
||||||
and a [pyinfra](https://pyinfra.com)-based deployment script.
|
|
||||||
|
|
||||||
- [chatmaild](https://github.com/chatmail/relay/tree/main/chatmaild)
|
|
||||||
is a Python package containing several small services
|
|
||||||
which handle authentication,
|
|
||||||
trigger push notifications on new messages,
|
|
||||||
ensure that outbound mails are encrypted,
|
|
||||||
delete inactive users,
|
|
||||||
and some other minor things.
|
|
||||||
chatmaild can also be installed as a stand-alone Python package.
|
|
||||||
|
|
||||||
- [www](https://github.com/chatmail/relay/tree/main/www)
|
|
||||||
contains the html, css, and markdown files
|
|
||||||
which make up a chatmail relay's web page.
|
|
||||||
Edit them before deploying to make your chatmail relay stand out.
|
|
||||||
|
|
||||||
- [scripts](https://github.com/chatmail/relay/tree/main/scripts)
|
|
||||||
offers two convenience tools for beginners;
|
|
||||||
`initenv.sh` installs the necessary dependencies to a local virtual environment,
|
|
||||||
and the `scripts/cmdeploy` script enables you
|
|
||||||
to run the `cmdeploy` command line tool in the local virtual environment.
|
|
||||||
|
|
||||||
### cmdeploy
|
|
||||||
|
|
||||||
The `cmdeploy/src/cmdeploy/cmdeploy.py` command line tool
|
|
||||||
helps with setting up and managing the chatmail service.
|
|
||||||
`cmdeploy init` creates the `chatmail.ini` config file.
|
|
||||||
`cmdeploy run` uses a [pyinfra](https://pyinfra.com/)-based [`script`](cmdeploy/src/cmdeploy/__init__.py)
|
|
||||||
to automatically install or upgrade all chatmail components on a relay,
|
|
||||||
according to the `chatmail.ini` config.
|
|
||||||
|
|
||||||
The components of chatmail are:
|
|
||||||
|
|
||||||
- [Postfix SMTP MTA](https://www.postfix.org) accepts and relays messages
|
|
||||||
(both from your users and from the wider e-mail MTA network)
|
|
||||||
|
|
||||||
- [Dovecot IMAP MDA](https://www.dovecot.org) stores messages for your users until they download them
|
|
||||||
|
|
||||||
- [Nginx](https://nginx.org/) shows the web page with your privacy policy and additional information
|
|
||||||
|
|
||||||
- [acmetool](https://hlandau.github.io/acmetool/) manages TLS certificates for Dovecot, Postfix, and Nginx
|
|
||||||
|
|
||||||
- [OpenDKIM](http://www.opendkim.org/) for signing messages with DKIM and rejecting inbound messages without DKIM
|
|
||||||
|
|
||||||
- [mtail](https://google.github.io/mtail/) for collecting anonymized metrics in case you have monitoring
|
|
||||||
|
|
||||||
- [Iroh relay](https://www.iroh.computer/docs/concepts/relay)
|
|
||||||
which helps client devices to establish Peer-to-Peer connections
|
|
||||||
|
|
||||||
- and the chatmaild services, explained in the next section:
|
|
||||||
|
|
||||||
### chatmaild
|
|
||||||
|
|
||||||
`chatmaild` implements various systemd-controlled services
|
|
||||||
that integrate with Dovecot and Postfix to achieve instant-onboarding and
|
|
||||||
only relaying OpenPGP end-to-end messages encrypted messages.
|
|
||||||
A short overview of `chatmaild` services:
|
|
||||||
|
|
||||||
- [`doveauth`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/doveauth.py)
|
|
||||||
implements create-on-login address semantics and is used
|
|
||||||
by Dovecot during IMAP login and by Postfix during SMTP/SUBMISSION login
|
|
||||||
which in turn uses [Dovecot SASL](https://doc.dovecot.org/configuration_manual/authentication/dict/#complete-example-for-authenticating-via-a-unix-socket)
|
|
||||||
to authenticate logins.
|
|
||||||
|
|
||||||
- [`filtermail`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/filtermail.py)
|
|
||||||
prevents unencrypted email from leaving or entering the chatmail service
|
|
||||||
and is integrated into Postfix's outbound and inbound mail pipelines.
|
|
||||||
|
|
||||||
- [`chatmail-metadata`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metadata.py) is contacted by a
|
|
||||||
[Dovecot lua script](https://github.com/chatmail/relay/blob/main/cmdeploy/src/cmdeploy/dovecot/push_notification.lua)
|
|
||||||
to store user-specific relay-side config.
|
|
||||||
On new messages,
|
|
||||||
it [passes the user's push notification token](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/notifier.py)
|
|
||||||
to [notifications.delta.chat](https://delta.chat/help#instant-delivery)
|
|
||||||
so the push notifications on the user's phone can be triggered
|
|
||||||
by Apple/Google/Huawei.
|
|
||||||
|
|
||||||
- [`delete_inactive_users`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/delete_inactive_users.py)
|
|
||||||
deletes users if they have not logged in for a very long time.
|
|
||||||
The timeframe can be configured in `chatmail.ini`.
|
|
||||||
|
|
||||||
- [`lastlogin`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/lastlogin.py)
|
|
||||||
is contacted by Dovecot when a user logs in
|
|
||||||
and stores the date of the login.
|
|
||||||
|
|
||||||
- [`echobot`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/echo.py)
|
|
||||||
is a small bot for test purposes.
|
|
||||||
It simply echoes back messages from users.
|
|
||||||
|
|
||||||
- [`chatmail-metrics`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metrics.py)
|
|
||||||
collects some metrics and displays them at `https://example.org/metrics`.
|
|
||||||
|
|
||||||
### Home page and getting started for users
|
|
||||||
|
|
||||||
`cmdeploy run` also creates default static web pages and deploys them
|
|
||||||
to a Nginx web server with:
|
|
||||||
|
|
||||||
- a default `index.html` along with a QR code that users can click to
|
|
||||||
create an address on your chatmail relay
|
|
||||||
|
|
||||||
- a default `info.html` that is linked from the home page
|
|
||||||
|
|
||||||
- a default `policy.html` that is linked from the home page
|
|
||||||
|
|
||||||
All `.html` files are generated
|
|
||||||
by the according markdown `.md` file in the `www/src` directory.
|
|
||||||
|
|
||||||
|
|
||||||
### Refining the web pages
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy webdev
|
|
||||||
```
|
|
||||||
|
|
||||||
This starts a local live development cycle for chatmail web pages:
|
|
||||||
|
|
||||||
- uses the `www/src/page-layout.html` file for producing static
|
|
||||||
HTML pages from `www/src/*.md` files
|
|
||||||
|
|
||||||
- continously builds the web presence reading files from `www/src` directory
|
|
||||||
and generating HTML files and copying assets to the `www/build` directory.
|
|
||||||
|
|
||||||
- Starts a browser window automatically where you can "refresh" as needed.
|
|
||||||
|
|
||||||
#### Custom web pages
|
|
||||||
|
|
||||||
You can skip uploading a web page
|
|
||||||
by setting `www_folder=disabled` in `chatmail.ini`.
|
|
||||||
|
|
||||||
If you want to manage your web pages outside this git repository,
|
|
||||||
you can set `www_folder` in `chatmail.ini` to a custom directory on your computer.
|
|
||||||
`cmdeploy run` will upload it as the server's home page,
|
|
||||||
and if it contains a `src/index.md` file,
|
|
||||||
will build it with hugo.
|
|
||||||
|
|
||||||
|
|
||||||
## Mailbox directory layout
|
|
||||||
|
|
||||||
Fresh chatmail addresses have a mailbox directory that contains:
|
|
||||||
|
|
||||||
- a `password` file with the salted password required for authenticating
|
|
||||||
whether a login may use the address to send/receive messages.
|
|
||||||
If you modify the password file manually, you effectively block the user.
|
|
||||||
|
|
||||||
- `enforceE2EEincoming` is a default-created file with each address.
|
|
||||||
If present the file indicates that this chatmail address rejects incoming cleartext messages.
|
|
||||||
If absent the address accepts incoming cleartext messages.
|
|
||||||
|
|
||||||
- `dovecot*`, `cur`, `new` and `tmp` represent IMAP/mailbox state.
|
|
||||||
If the address is only used by one device, the Maildir directories
|
|
||||||
will typically be empty unless the user of that address hasn't been online
|
|
||||||
for a while.
|
|
||||||
|
|
||||||
|
|
||||||
## Emergency Commands to disable automatic address creation
|
|
||||||
|
|
||||||
If you need to stop address creation,
|
|
||||||
e.g. because some script is wildly creating addresses,
|
|
||||||
login with ssh and run:
|
|
||||||
|
|
||||||
```
|
|
||||||
touch /etc/chatmail-nocreate
|
|
||||||
```
|
|
||||||
|
|
||||||
Chatmail address creation will be denied while this file is present.
|
|
||||||
|
|
||||||
### Ports
|
|
||||||
|
|
||||||
[Postfix](http://www.postfix.org/) listens on ports 25 (SMTP) and 587 (SUBMISSION) and 465 (SUBMISSIONS).
|
|
||||||
[Dovecot](https://www.dovecot.org/) listens on ports 143 (IMAP) and 993 (IMAPS).
|
|
||||||
[Nginx](https://www.nginx.com/) listens on port 8443 (HTTPS-ALT) and 443 (HTTPS).
|
|
||||||
Port 443 multiplexes HTTPS, IMAP and SMTP using ALPN to redirect connections to ports 8443, 465 or 993.
|
|
||||||
[acmetool](https://hlandau.github.io/acmetool/) listens on port 80 (HTTP).
|
|
||||||
|
|
||||||
chatmail-core based apps will, however, discover all ports and configurations
|
|
||||||
automatically by reading the [autoconfig XML file](https://www.ietf.org/archive/id/draft-bucksch-autoconfig-00.html) from the chatmail relay server.
|
|
||||||
|
|
||||||
## Email authentication
|
|
||||||
|
|
||||||
Chatmail relays enforce [DKIM](https://www.rfc-editor.org/rfc/rfc6376)
|
|
||||||
to authenticate incoming emails.
|
|
||||||
Incoming emails must have a valid DKIM signature with
|
|
||||||
Signing Domain Identifier (SDID, `d=` parameter in the DKIM-Signature header)
|
|
||||||
equal to the `From:` header domain.
|
|
||||||
This property is checked by OpenDKIM screen policy script
|
|
||||||
before validating the signatures.
|
|
||||||
This correpsonds to strict [DMARC](https://www.rfc-editor.org/rfc/rfc7489) alignment (`adkim=s`),
|
|
||||||
but chatmail does not rely on DMARC and does not consult the sender policy published in DMARC records.
|
|
||||||
Other legacy authentication mechanisms such as [iprev](https://www.rfc-editor.org/rfc/rfc8601#section-2.7.3)
|
|
||||||
and [SPF](https://www.rfc-editor.org/rfc/rfc7208) are also not taken into account.
|
|
||||||
If there is no valid DKIM signature on the incoming email,
|
|
||||||
the sender receives a "5.7.1 No valid DKIM signature found" error.
|
|
||||||
|
|
||||||
Outgoing emails must be sent over authenticated connection
|
|
||||||
with envelope MAIL FROM (return path) corresponding to the login.
|
|
||||||
This is ensured by Postfix which maps login username
|
|
||||||
to MAIL FROM with
|
|
||||||
[`smtpd_sender_login_maps`](https://www.postfix.org/postconf.5.html#smtpd_sender_login_maps)
|
|
||||||
and rejects incorrectly authenticated emails with [`reject_sender_login_mismatch`](reject_sender_login_mismatch) policy.
|
|
||||||
`From:` header must correspond to envelope MAIL FROM,
|
|
||||||
this is ensured by `filtermail` proxy.
|
|
||||||
|
|
||||||
## TLS requirements
|
|
||||||
|
|
||||||
Postfix is configured to require valid TLS
|
|
||||||
by setting [`smtp_tls_security_level`](https://www.postfix.org/postconf.5.html#smtp_tls_security_level) to `verify`.
|
|
||||||
If emails don't arrive at your chatmail relay server,
|
|
||||||
the problem is likely that your relay does not have a valid TLS certificate.
|
|
||||||
|
|
||||||
You can test it by resolving `MX` records of your relay domain
|
|
||||||
and then connecting to MX relays (e.g `mx.example.org`) with
|
|
||||||
`openssl s_client -connect mx.example.org:25 -verify_hostname mx.example.org -verify_return_error -starttls smtp`
|
|
||||||
from the host that has open port 25 to verify that certificate is valid.
|
|
||||||
|
|
||||||
When providing a TLS certificate to your chatmail relay server,
|
|
||||||
make sure to provide the full certificate chain
|
|
||||||
and not just the last certificate.
|
|
||||||
|
|
||||||
If you are running an Exim server and don't see incoming connections
|
|
||||||
from a chatmail relay server in the logs,
|
|
||||||
make sure `smtp_no_mail` log item is enabled in the config
|
|
||||||
with `log_selector = +smtp_no_mail`.
|
|
||||||
By default Exim does not log sessions that are closed
|
|
||||||
before sending the `MAIL` command.
|
|
||||||
This happens if certificate is not recognized as valid by Postfix,
|
|
||||||
so you might think that connection is not established
|
|
||||||
while actually it is a problem with your TLS certificate.
|
|
||||||
|
|
||||||
## Migrating a chatmail relay to a new host
|
|
||||||
|
|
||||||
If you want to migrate chatmail relay from an old machine
|
|
||||||
to a new machine,
|
|
||||||
you can use these steps.
|
|
||||||
They were tested with a Linux laptop;
|
|
||||||
you might need to adjust some of the steps to your environment.
|
|
||||||
|
|
||||||
Let's assume that your `mail_domain` is `mail.example.org`,
|
|
||||||
all involved machines run Debian 12,
|
|
||||||
your old site's IP address is `13.37.13.37`,
|
|
||||||
and your new site's IP address is `13.12.23.42`.
|
|
||||||
|
|
||||||
Note, you should lower the TTLs of your DNS records to a value
|
|
||||||
such as 300 (5 minutes) so the migration happens as smoothly as possible.
|
|
||||||
|
|
||||||
During the guide you might get a warning about changed SSH Host keys;
|
|
||||||
in this case, just run `ssh-keygen -R "mail.example.org"` as recommended.
|
|
||||||
|
|
||||||
1. First, disable mail services on the old site.
|
|
||||||
|
|
||||||
```
|
|
||||||
cmdeploy run --disable-mail --ssh-host 13.37.13.37
|
|
||||||
```
|
|
||||||
|
|
||||||
Now your users will notice the migration
|
|
||||||
and will not be able to send or receive messages
|
|
||||||
until the migration is completed.
|
|
||||||
|
|
||||||
2. Now we want to copy `/home/vmail`, `/var/lib/acme`, `/etc/dkimkeys`, `/run/echobot`, and `/var/spool/postfix` to the new site.
|
|
||||||
Login to the old site while forwarding your SSH agent
|
|
||||||
so you can copy directly from the old to the new site with your SSH key:
|
|
||||||
```
|
|
||||||
ssh -A root@13.37.13.37
|
|
||||||
tar c - /home/vmail/mail /var/lib/acme /etc/dkimkeys /run/echobot /var/spool/postfix | ssh root@13.12.23.42 "tar x -C /"
|
|
||||||
```
|
|
||||||
|
|
||||||
This transfers all addresses, the TLS certificate, DKIM keys (so DKIM DNS record remains valid), and the echobot's password so it continues to function.
|
|
||||||
It also preserves the Postfix mail spool so any messages pending delivery will still be delivered.
|
|
||||||
|
|
||||||
3. Install chatmail on the new machine:
|
|
||||||
|
|
||||||
```
|
|
||||||
cmdeploy run --disable-mail --ssh-host 13.12.23.42
|
|
||||||
```
|
|
||||||
Postfix and Dovecot are disabled for now; we will enable them later.
|
|
||||||
We first need to make the new site fully operational.
|
|
||||||
|
|
||||||
3. On the new site, run the following to ensure the ownership is correct in case UIDs/GIDs changed:
|
|
||||||
|
|
||||||
```
|
|
||||||
chown root: -R /var/lib/acme
|
|
||||||
chown opendkim: -R /etc/dkimkeys
|
|
||||||
chown vmail: -R /home/vmail/mail
|
|
||||||
chown echobot: -R /run/echobot
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Now, update DNS entries.
|
|
||||||
|
|
||||||
If other MTAs try to deliver messages to your chatmail domain they may fail intermittently,
|
|
||||||
as DNS catches up with the new site settings
|
|
||||||
but normally will retry delivering messages
|
|
||||||
for at least a week, so messages will not be lost.
|
|
||||||
|
|
||||||
5. Finally, you can execute `cmdeploy run --ssh-host 13.12.23.42` to turn on chatmail on the new relay.
|
|
||||||
Your users will be able to use the chatmail relay as soon as the DNS changes have propagated.
|
|
||||||
Voilà!
|
|
||||||
|
|
||||||
## Setting up a reverse proxy
|
|
||||||
|
|
||||||
A chatmail relay MTA does not track or depend on the client IP address
|
|
||||||
for its operation, so it can be run behind a reverse proxy.
|
|
||||||
This will not even affect incoming mail authentication
|
|
||||||
as DKIM only checks the cryptographic signature
|
|
||||||
of the message and does not use the IP address as the input.
|
|
||||||
|
|
||||||
For example, you may want to self-host your chatmail relay
|
|
||||||
and only use hosted VPS to provide a public IP address
|
|
||||||
for client connections and incoming mail.
|
|
||||||
You can connect chatmail relay to VPS
|
|
||||||
using a tunnel protocol
|
|
||||||
such as [WireGuard](https://www.wireguard.com/)
|
|
||||||
and setup a reverse proxy on a VPS
|
|
||||||
to forward connections to the chatmail relay
|
|
||||||
over the tunnel.
|
|
||||||
You can also setup multiple reverse proxies
|
|
||||||
for your chatmail relay in different networks
|
|
||||||
to ensure your relay is reachable even when
|
|
||||||
one of the IPs becomes inaccessible due to
|
|
||||||
hosting or routing problems.
|
|
||||||
|
|
||||||
Note that your chatmail relay still needs
|
|
||||||
to be able to make outgoing connections on port 25
|
|
||||||
to send messages outside.
|
|
||||||
|
|
||||||
To setup a reverse proxy
|
|
||||||
(or rather Destination NAT, DNAT)
|
|
||||||
for your chatmail relay,
|
|
||||||
put the following configuration in `/etc/nftables.conf`:
|
|
||||||
```
|
|
||||||
#!/usr/sbin/nft -f
|
|
||||||
|
|
||||||
flush ruleset
|
|
||||||
|
|
||||||
define wan = eth0
|
|
||||||
|
|
||||||
# Which ports to proxy.
|
|
||||||
#
|
|
||||||
# Note that SSH is not proxied
|
|
||||||
# so it is possible to log into the proxy server
|
|
||||||
# and not the original one.
|
|
||||||
define ports = { smtp, http, https, imap, imaps, submission, submissions }
|
|
||||||
|
|
||||||
# The host we want to proxy to.
|
|
||||||
define ipv4_address = AAA.BBB.CCC.DDD
|
|
||||||
define ipv6_address = [XXX::1]
|
|
||||||
|
|
||||||
table ip nat {
|
|
||||||
chain prerouting {
|
|
||||||
type nat hook prerouting priority dstnat; policy accept;
|
|
||||||
iif $wan tcp dport $ports dnat to $ipv4_address
|
|
||||||
}
|
|
||||||
|
|
||||||
chain postrouting {
|
|
||||||
type nat hook postrouting priority 0;
|
|
||||||
|
|
||||||
oifname $wan masquerade
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table ip6 nat {
|
|
||||||
chain prerouting {
|
|
||||||
type nat hook prerouting priority dstnat; policy accept;
|
|
||||||
iif $wan tcp dport $ports dnat to $ipv6_address
|
|
||||||
}
|
|
||||||
|
|
||||||
chain postrouting {
|
|
||||||
type nat hook postrouting priority 0;
|
|
||||||
|
|
||||||
oifname $wan masquerade
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table inet filter {
|
|
||||||
chain input {
|
|
||||||
type filter hook input priority filter; policy drop;
|
|
||||||
|
|
||||||
# Accept ICMP.
|
|
||||||
# It is especially important to accept ICMPv6 ND messages,
|
|
||||||
# otherwise IPv6 connectivity breaks.
|
|
||||||
icmp type { echo-request } accept
|
|
||||||
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
|
||||||
|
|
||||||
# Allow incoming SSH connections.
|
|
||||||
tcp dport { ssh } accept
|
|
||||||
|
|
||||||
ct state established accept
|
|
||||||
}
|
|
||||||
chain forward {
|
|
||||||
type filter hook forward priority filter; policy drop;
|
|
||||||
|
|
||||||
ct state established accept
|
|
||||||
ip daddr $ipv4_address counter accept
|
|
||||||
ip6 daddr $ipv6_address counter accept
|
|
||||||
}
|
|
||||||
chain output {
|
|
||||||
type filter hook output priority filter;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Run `systemctl enable nftables.service`
|
|
||||||
to ensure configuration is reloaded when the proxy relay reboots.
|
|
||||||
|
|
||||||
Uncomment in `/etc/sysctl.conf` the following two lines:
|
|
||||||
|
|
||||||
```
|
|
||||||
net.ipv4.ip_forward=1
|
|
||||||
net.ipv6.conf.all.forwarding=1
|
|
||||||
```
|
|
||||||
|
|
||||||
Then reboot the relay or do `sysctl -p` and `nft -f /etc/nftables.conf`.
|
|
||||||
|
|
||||||
Once proxy relay is set up,
|
|
||||||
you can add its IP address to the DNS.
|
|
||||||
|
|
||||||
## Neighbors and Acquaintances
|
|
||||||
|
|
||||||
Here are some related projects that you may be interested in:
|
|
||||||
|
|
||||||
- [Mox](https://github.com/mjl-/mox): A Golang email server. [Work is in
|
|
||||||
progress](https://github.com/mjl-/mox/issues/251) to modify it to support all
|
|
||||||
of the features and configuration settings required to operate as a chatmail
|
|
||||||
relay.
|
|
||||||
- [Maddy-Chatmail](https://github.com/sadraiiali/maddy_chatmail): a plugin for the
|
|
||||||
[Maddy email server](https://maddy.email/) which aims to implement the
|
|
||||||
chatmail relay features and configuration options.
|
|
||||||
|
|||||||
15
RELEASE.md
Normal file
15
RELEASE.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Releasing a new version of chatmail relay
|
||||||
|
|
||||||
|
For example, to release version 1.9.0 of chatmail relay, do the following steps.
|
||||||
|
|
||||||
|
1. Update the changelog: `git cliff --unreleased --tag 1.9.0 --prepend CHANGELOG.md` or `git cliff -u -t 1.9.0 -p CHANGELOG.md`.
|
||||||
|
|
||||||
|
2. Open the changelog in the editor, edit it if required.
|
||||||
|
|
||||||
|
3. Commit the changes to the changelog with a commit message `chore(release): prepare for 1.9.0`.
|
||||||
|
|
||||||
|
3. Tag the release: `git tag --annotate 1.9.0`.
|
||||||
|
|
||||||
|
4. Push the release tag: `git push origin 1.9.0`.
|
||||||
|
|
||||||
|
5. Create a GitHub release: `gh release create 1.9.0`.
|
||||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "chatmaild"
|
name = "chatmaild"
|
||||||
version = "0.2"
|
version = "0.3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiosmtpd",
|
"aiosmtpd",
|
||||||
"iniconfig",
|
"iniconfig",
|
||||||
@@ -24,10 +24,9 @@ where = ['src']
|
|||||||
[project.scripts]
|
[project.scripts]
|
||||||
doveauth = "chatmaild.doveauth:main"
|
doveauth = "chatmaild.doveauth:main"
|
||||||
chatmail-metadata = "chatmaild.metadata:main"
|
chatmail-metadata = "chatmaild.metadata:main"
|
||||||
filtermail = "chatmaild.filtermail:main"
|
|
||||||
echobot = "chatmaild.echo:main"
|
|
||||||
chatmail-metrics = "chatmaild.metrics:main"
|
chatmail-metrics = "chatmaild.metrics:main"
|
||||||
delete_inactive_users = "chatmaild.delete_inactive_users:main"
|
chatmail-expire = "chatmaild.expire:main"
|
||||||
|
chatmail-fsreport = "chatmaild.fsreport:main"
|
||||||
lastlogin = "chatmaild.lastlogin:main"
|
lastlogin = "chatmaild.lastlogin:main"
|
||||||
turnserver = "chatmaild.turnserver:main"
|
turnserver = "chatmaild.turnserver:main"
|
||||||
|
|
||||||
@@ -71,5 +70,7 @@ commands =
|
|||||||
[testenv]
|
[testenv]
|
||||||
deps = pytest
|
deps = pytest
|
||||||
pdbpp
|
pdbpp
|
||||||
|
pytest-localserver
|
||||||
|
execnet
|
||||||
commands = pytest -v -rsXx {posargs}
|
commands = pytest -v -rsXx {posargs}
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -4,8 +4,6 @@ import iniconfig
|
|||||||
|
|
||||||
from chatmaild.user import User
|
from chatmaild.user import User
|
||||||
|
|
||||||
echobot_password_path = Path("/run/echobot/password")
|
|
||||||
|
|
||||||
|
|
||||||
def read_config(inipath):
|
def read_config(inipath):
|
||||||
assert Path(inipath).exists(), inipath
|
assert Path(inipath).exists(), inipath
|
||||||
@@ -22,7 +20,8 @@ class Config:
|
|||||||
def __init__(self, inipath, params):
|
def __init__(self, inipath, params):
|
||||||
self._inipath = inipath
|
self._inipath = inipath
|
||||||
self.mail_domain = params["mail_domain"]
|
self.mail_domain = params["mail_domain"]
|
||||||
self.max_user_send_per_minute = int(params["max_user_send_per_minute"])
|
self.max_user_send_per_minute = int(params.get("max_user_send_per_minute", 60))
|
||||||
|
self.max_user_send_burst_size = int(params.get("max_user_send_burst_size", 10))
|
||||||
self.max_mailbox_size = params["max_mailbox_size"]
|
self.max_mailbox_size = params["max_mailbox_size"]
|
||||||
self.max_message_size = int(params.get("max_message_size", "31457280"))
|
self.max_message_size = int(params.get("max_message_size", "31457280"))
|
||||||
self.delete_mails_after = params["delete_mails_after"]
|
self.delete_mails_after = params["delete_mails_after"]
|
||||||
@@ -34,24 +33,19 @@ class Config:
|
|||||||
self.passthrough_senders = params["passthrough_senders"].split()
|
self.passthrough_senders = params["passthrough_senders"].split()
|
||||||
self.passthrough_recipients = params["passthrough_recipients"].split()
|
self.passthrough_recipients = params["passthrough_recipients"].split()
|
||||||
self.www_folder = params.get("www_folder", "")
|
self.www_folder = params.get("www_folder", "")
|
||||||
self.filtermail_smtp_port = int(params["filtermail_smtp_port"])
|
self.filtermail_smtp_port = int(params.get("filtermail_smtp_port", "10080"))
|
||||||
self.filtermail_smtp_port_incoming = int(
|
self.filtermail_smtp_port_incoming = int(
|
||||||
params["filtermail_smtp_port_incoming"]
|
params.get("filtermail_smtp_port_incoming", "10081")
|
||||||
)
|
)
|
||||||
self.postfix_reinject_port = int(params["postfix_reinject_port"])
|
self.postfix_reinject_port = int(params.get("postfix_reinject_port", "10025"))
|
||||||
self.postfix_reinject_port_incoming = int(
|
self.postfix_reinject_port_incoming = int(
|
||||||
params["postfix_reinject_port_incoming"]
|
params.get("postfix_reinject_port_incoming", "10026")
|
||||||
)
|
)
|
||||||
self.mtail_address = params.get("mtail_address")
|
self.mtail_address = params.get("mtail_address")
|
||||||
self.disable_ipv6 = params.get("disable_ipv6", "false").lower() == "true"
|
self.disable_ipv6 = params.get("disable_ipv6", "false").lower() == "true"
|
||||||
self.acme_email = params.get("acme_email", "")
|
self.acme_email = params.get("acme_email", "")
|
||||||
self.change_kernel_settings = (
|
|
||||||
params.get("change_kernel_settings", "true").lower() == "true"
|
|
||||||
)
|
|
||||||
self.fs_inotify_max_user_instances_and_watchers = int(
|
|
||||||
params["fs_inotify_max_user_instances_and_watchers"]
|
|
||||||
)
|
|
||||||
self.imap_rawlog = params.get("imap_rawlog", "false").lower() == "true"
|
self.imap_rawlog = params.get("imap_rawlog", "false").lower() == "true"
|
||||||
|
self.imap_compress = params.get("imap_compress", "false").lower() == "true"
|
||||||
if "iroh_relay" not in params:
|
if "iroh_relay" not in params:
|
||||||
self.iroh_relay = "https://" + params["mail_domain"]
|
self.iroh_relay = "https://" + params["mail_domain"]
|
||||||
self.enable_iroh_relay = True
|
self.enable_iroh_relay = True
|
||||||
@@ -62,6 +56,7 @@ class Config:
|
|||||||
self.privacy_mail = params.get("privacy_mail")
|
self.privacy_mail = params.get("privacy_mail")
|
||||||
self.privacy_pdo = params.get("privacy_pdo")
|
self.privacy_pdo = params.get("privacy_pdo")
|
||||||
self.privacy_supervisor = params.get("privacy_supervisor")
|
self.privacy_supervisor = params.get("privacy_supervisor")
|
||||||
|
self.tmpfs_index = params.get("tmpfs_index", "false").lower() == "true"
|
||||||
|
|
||||||
# deprecated option
|
# deprecated option
|
||||||
mbdir = params.get("mailboxes_dir", f"/home/vmail/mail/{self.mail_domain}")
|
mbdir = params.get("mailboxes_dir", f"/home/vmail/mail/{self.mail_domain}")
|
||||||
@@ -78,10 +73,7 @@ class Config:
|
|||||||
raise ValueError(f"invalid address {addr!r}")
|
raise ValueError(f"invalid address {addr!r}")
|
||||||
|
|
||||||
maildir = self.mailboxes_dir.joinpath(addr)
|
maildir = self.mailboxes_dir.joinpath(addr)
|
||||||
if addr.startswith("echo@"):
|
password_path = maildir.joinpath("password")
|
||||||
password_path = echobot_password_path
|
|
||||||
else:
|
|
||||||
password_path = maildir.joinpath("password")
|
|
||||||
|
|
||||||
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
|
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
|
||||||
|
|
||||||
@@ -120,10 +112,10 @@ def get_default_config_content(mail_domain, **overrides):
|
|||||||
|
|
||||||
if mail_domain.endswith(".testrun.org"):
|
if mail_domain.endswith(".testrun.org"):
|
||||||
override_inipath = inidir.joinpath("override-testrun.ini")
|
override_inipath = inidir.joinpath("override-testrun.ini")
|
||||||
privacy = iniconfig.IniConfig(override_inipath)["privacy"]
|
params = iniconfig.IniConfig(override_inipath)["params"]
|
||||||
lines = []
|
lines = []
|
||||||
for line in content.split("\n"):
|
for line in content.split("\n"):
|
||||||
for key, value in privacy.items():
|
for key, value in params.items():
|
||||||
value_lines = value.format(mail_domain=mail_domain).strip().split("\n")
|
value_lines = value.format(mail_domain=mail_domain).strip().split("\n")
|
||||||
if not line.startswith(f"{key} =") or not value_lines:
|
if not line.startswith(f"{key} =") or not value_lines:
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
"""
|
|
||||||
Remove inactive users
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from .config import read_config
|
|
||||||
|
|
||||||
|
|
||||||
def delete_inactive_users(config):
|
|
||||||
cutoff_date = time.time() - config.delete_inactive_users_after * 86400
|
|
||||||
for addr in os.listdir(config.mailboxes_dir):
|
|
||||||
try:
|
|
||||||
user = config.get_user(addr)
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
read_timestamp = user.get_last_login_timestamp()
|
|
||||||
if read_timestamp and read_timestamp < cutoff_date:
|
|
||||||
path = config.mailboxes_dir.joinpath(addr)
|
|
||||||
assert path == user.maildir
|
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
(cfgpath,) = sys.argv[1:]
|
|
||||||
config = read_config(cfgpath)
|
|
||||||
delete_inactive_users(config)
|
|
||||||
@@ -22,7 +22,7 @@ class DictProxy:
|
|||||||
wfile.flush()
|
wfile.flush()
|
||||||
|
|
||||||
def handle_dovecot_request(self, msg, transactions):
|
def handle_dovecot_request(self, msg, transactions):
|
||||||
# see https://doc.dovecot.org/developer_manual/design/dict_protocol/#dovecot-dict-protocol
|
# see https://doc.dovecot.org/2.3/developer_manual/design/dict_protocol/#dovecot-dict-protocol
|
||||||
short_command = msg[0]
|
short_command = msg[0]
|
||||||
parts = msg[1:].split("\t")
|
parts = msg[1:].split("\t")
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ NOCREATE_FILE = "/etc/chatmail-nocreate"
|
|||||||
|
|
||||||
|
|
||||||
def encrypt_password(password: str):
|
def encrypt_password(password: str):
|
||||||
# https://doc.dovecot.org/configuration_manual/authentication/password_schemes/
|
# https://doc.dovecot.org/2.3/configuration_manual/authentication/password_schemes/
|
||||||
passhash = crypt_r.crypt(password, crypt_r.METHOD_SHA512)
|
passhash = crypt_r.crypt(password, crypt_r.METHOD_SHA512)
|
||||||
return "{SHA512-CRYPT}" + passhash
|
return "{SHA512-CRYPT}" + passhash
|
||||||
|
|
||||||
@@ -40,10 +40,6 @@ def is_allowed_to_create(config: Config, user, cleartext_password) -> bool:
|
|||||||
return False
|
return False
|
||||||
localpart, domain = parts
|
localpart, domain = parts
|
||||||
|
|
||||||
if localpart == "echo":
|
|
||||||
# echobot account should not be created in the database
|
|
||||||
return False
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
len(localpart) > config.username_max_length
|
len(localpart) > config.username_max_length
|
||||||
or len(localpart) < config.username_min_length
|
or len(localpart) < config.username_min_length
|
||||||
|
|||||||
@@ -1,109 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Advanced echo bot example.
|
|
||||||
|
|
||||||
it will echo back any message that has non-empty text and also supports the /help command.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from deltachat_rpc_client import Bot, DeltaChat, EventType, Rpc, events
|
|
||||||
|
|
||||||
from chatmaild.config import echobot_password_path, read_config
|
|
||||||
from chatmaild.doveauth import encrypt_password
|
|
||||||
from chatmaild.newemail import create_newemail_dict
|
|
||||||
|
|
||||||
hooks = events.HookCollection()
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.RawEvent)
|
|
||||||
def log_event(event):
|
|
||||||
if event.kind == EventType.INFO:
|
|
||||||
logging.info(event.msg)
|
|
||||||
elif event.kind == EventType.WARNING:
|
|
||||||
logging.warning(event.msg)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.RawEvent(EventType.ERROR))
|
|
||||||
def log_error(event):
|
|
||||||
logging.error("%s", event.msg)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.MemberListChanged)
|
|
||||||
def on_memberlist_changed(event):
|
|
||||||
logging.info(
|
|
||||||
"member %s was %s", event.member, "added" if event.member_added else "removed"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.GroupImageChanged)
|
|
||||||
def on_group_image_changed(event):
|
|
||||||
logging.info("group image %s", "deleted" if event.image_deleted else "changed")
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.GroupNameChanged)
|
|
||||||
def on_group_name_changed(event):
|
|
||||||
logging.info(f"group name changed, old name: {event.old_name}")
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.NewMessage(func=lambda e: not e.command))
|
|
||||||
def echo(event):
|
|
||||||
snapshot = event.message_snapshot
|
|
||||||
if snapshot.is_info:
|
|
||||||
# Ignore info messages
|
|
||||||
return
|
|
||||||
if snapshot.text or snapshot.file:
|
|
||||||
snapshot.chat.send_message(text=snapshot.text, file=snapshot.file)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.NewMessage(command="/help"))
|
|
||||||
def help_command(event):
|
|
||||||
snapshot = event.message_snapshot
|
|
||||||
snapshot.chat.send_text("Send me any message and I will echo it back")
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
path = os.environ.get("PATH")
|
|
||||||
venv_path = sys.argv[0].strip("echobot")
|
|
||||||
os.environ["PATH"] = path + ":" + venv_path
|
|
||||||
with Rpc() as rpc:
|
|
||||||
deltachat = DeltaChat(rpc)
|
|
||||||
system_info = deltachat.get_system_info()
|
|
||||||
logging.info(f"Running deltachat core {system_info.deltachat_core_version}")
|
|
||||||
|
|
||||||
accounts = deltachat.get_all_accounts()
|
|
||||||
account = accounts[0] if accounts else deltachat.add_account()
|
|
||||||
|
|
||||||
bot = Bot(account, hooks)
|
|
||||||
|
|
||||||
config = read_config(sys.argv[1])
|
|
||||||
addr = "echo@" + config.mail_domain
|
|
||||||
|
|
||||||
# Create password file
|
|
||||||
if bot.is_configured():
|
|
||||||
password = bot.account.get_config("mail_pw")
|
|
||||||
else:
|
|
||||||
password = create_newemail_dict(config)["password"]
|
|
||||||
|
|
||||||
echobot_password_path.write_text(encrypt_password(password))
|
|
||||||
# Give the user which doveauth runs as access to the password file.
|
|
||||||
subprocess.check_call(
|
|
||||||
["/usr/bin/setfacl", "-m", "user:vmail:r", echobot_password_path],
|
|
||||||
)
|
|
||||||
|
|
||||||
if not bot.is_configured():
|
|
||||||
bot.configure(addr, password)
|
|
||||||
|
|
||||||
# write invite link to working directory
|
|
||||||
invitelink = bot.account.get_qr_code()
|
|
||||||
Path("invite-link.txt").write_text(invitelink)
|
|
||||||
|
|
||||||
bot.run_forever()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
216
chatmaild/src/chatmaild/expire.py
Normal file
216
chatmaild/src/chatmaild/expire.py
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
"""
|
||||||
|
Expire old messages and addresses.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from collections import namedtuple
|
||||||
|
from datetime import datetime
|
||||||
|
from stat import S_ISREG
|
||||||
|
|
||||||
|
from chatmaild.config import read_config
|
||||||
|
|
||||||
|
FileEntry = namedtuple("FileEntry", ("path", "mtime", "size"))
|
||||||
|
|
||||||
|
|
||||||
|
def iter_mailboxes(basedir, maxnum, tmpfs_index):
|
||||||
|
if not os.path.exists(basedir):
|
||||||
|
print_info(f"no mailboxes found at: {basedir}")
|
||||||
|
return
|
||||||
|
|
||||||
|
for name in os_listdir_if_exists(basedir)[:maxnum]:
|
||||||
|
if "@" in name:
|
||||||
|
yield MailboxStat(basedir + "/" + name, name, tmpfs_index)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_entry(path):
|
||||||
|
"""return a FileEntry or None if the path does not exist or is not a regular file."""
|
||||||
|
try:
|
||||||
|
st = os.stat(path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
if not S_ISREG(st.st_mode):
|
||||||
|
return None
|
||||||
|
return FileEntry(path, st.st_mtime, st.st_size)
|
||||||
|
|
||||||
|
|
||||||
|
def os_listdir_if_exists(path):
|
||||||
|
"""return a list of names obtained from os.listdir or an empty list if the path does not exist."""
|
||||||
|
try:
|
||||||
|
return os.listdir(path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class MailboxStat:
|
||||||
|
last_login = None
|
||||||
|
|
||||||
|
def __init__(self, basedir, name, tmpfs_index):
|
||||||
|
self.basedir = str(basedir)
|
||||||
|
self.name = name
|
||||||
|
self.messages = []
|
||||||
|
self.extrafiles = []
|
||||||
|
self.scandir(self.basedir)
|
||||||
|
if tmpfs_index:
|
||||||
|
self.scandir("/dev/shm/" + name)
|
||||||
|
|
||||||
|
def scandir(self, folderdir):
|
||||||
|
for name in os_listdir_if_exists(folderdir):
|
||||||
|
path = f"{folderdir}/{name}"
|
||||||
|
if name in ("cur", "new", "tmp"):
|
||||||
|
for msg_name in os_listdir_if_exists(path):
|
||||||
|
entry = get_file_entry(f"{path}/{msg_name}")
|
||||||
|
if entry is not None:
|
||||||
|
self.messages.append(entry)
|
||||||
|
elif os.path.isdir(path):
|
||||||
|
self.scandir(path)
|
||||||
|
else:
|
||||||
|
entry = get_file_entry(path)
|
||||||
|
if entry is not None:
|
||||||
|
self.extrafiles.append(entry)
|
||||||
|
if name == "password":
|
||||||
|
self.last_login = entry.mtime
|
||||||
|
self.extrafiles.sort(key=lambda x: -x.size)
|
||||||
|
|
||||||
|
|
||||||
|
def print_info(msg):
|
||||||
|
print(msg, file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
class Expiry:
|
||||||
|
def __init__(self, config, dry, now, verbose):
|
||||||
|
self.config = config
|
||||||
|
self.dry = dry
|
||||||
|
self.now = now
|
||||||
|
self.verbose = verbose
|
||||||
|
self.del_mboxes = 0
|
||||||
|
self.all_mboxes = 0
|
||||||
|
self.del_files = 0
|
||||||
|
self.all_files = 0
|
||||||
|
self.start = time.time()
|
||||||
|
|
||||||
|
def remove_mailbox(self, mboxdir, name):
|
||||||
|
if self.verbose:
|
||||||
|
print_info(f"removing {mboxdir}")
|
||||||
|
if not self.dry:
|
||||||
|
shutil.rmtree(mboxdir)
|
||||||
|
if self.config.tmpfs_index:
|
||||||
|
shutil.rmtree("/dev/shm/" + name)
|
||||||
|
self.del_mboxes += 1
|
||||||
|
|
||||||
|
def remove_file(self, path, mtime=None):
|
||||||
|
if self.verbose:
|
||||||
|
if mtime is not None:
|
||||||
|
date = datetime.fromtimestamp(mtime).strftime("%b %d")
|
||||||
|
print_info(f"removing {date} {path}")
|
||||||
|
else:
|
||||||
|
print_info(f"removing {path}")
|
||||||
|
if not self.dry:
|
||||||
|
try:
|
||||||
|
os.unlink(path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print_info(f"file not found/vanished {path}")
|
||||||
|
self.del_files += 1
|
||||||
|
|
||||||
|
def process_mailbox_stat(self, mbox):
|
||||||
|
cutoff_without_login = (
|
||||||
|
self.now - int(self.config.delete_inactive_users_after) * 86400
|
||||||
|
)
|
||||||
|
cutoff_mails = self.now - int(self.config.delete_mails_after) * 86400
|
||||||
|
cutoff_large_mails = self.now - int(self.config.delete_large_after) * 86400
|
||||||
|
|
||||||
|
self.all_mboxes += 1
|
||||||
|
changed = False
|
||||||
|
if mbox.last_login and mbox.last_login < cutoff_without_login:
|
||||||
|
self.remove_mailbox(mbox.basedir, mbox.name)
|
||||||
|
return
|
||||||
|
|
||||||
|
mboxname = os.path.basename(mbox.basedir)
|
||||||
|
if self.verbose:
|
||||||
|
date = datetime.fromtimestamp(mbox.last_login) if mbox.last_login else None
|
||||||
|
if date:
|
||||||
|
print_info(f"checking mailbox {date.strftime('%b %d')} {mboxname}")
|
||||||
|
else:
|
||||||
|
print_info(f"checking mailbox (no last_login) {mboxname}")
|
||||||
|
self.all_files += len(mbox.messages)
|
||||||
|
for message in mbox.messages:
|
||||||
|
if message.mtime < cutoff_mails:
|
||||||
|
self.remove_file(message.path, mtime=message.mtime)
|
||||||
|
elif message.size > 200000 and message.mtime < cutoff_large_mails:
|
||||||
|
# we only remove noticed large files (not unnoticed ones in new/)
|
||||||
|
parts = message.path.split("/")
|
||||||
|
if len(parts) >= 2 and parts[-2] == "cur":
|
||||||
|
self.remove_file(message.path, mtime=message.mtime)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
changed = True
|
||||||
|
if changed:
|
||||||
|
self.remove_file(f"{mbox.basedir}/maildirsize")
|
||||||
|
for file in mbox.extrafiles:
|
||||||
|
if "dovecot.index" in file.path.split("/")[-1] and file.size > 500 * 1024:
|
||||||
|
self.remove_file(file.path)
|
||||||
|
|
||||||
|
def get_summary(self):
|
||||||
|
return (
|
||||||
|
f"Removed {self.del_mboxes} out of {self.all_mboxes} mailboxes "
|
||||||
|
f"and {self.del_files} out of {self.all_files} files in existing mailboxes "
|
||||||
|
f"in {time.time() - self.start:2.2f} seconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
"""Expire mailboxes and messages according to chatmail config"""
|
||||||
|
parser = ArgumentParser(description=main.__doc__)
|
||||||
|
ini = "/usr/local/lib/chatmaild/chatmail.ini"
|
||||||
|
parser.add_argument(
|
||||||
|
"chatmail_ini",
|
||||||
|
action="store",
|
||||||
|
nargs="?",
|
||||||
|
help=f"path pointing to chatmail.ini file, default: {ini}",
|
||||||
|
default=ini,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--days", action="store", help="assume date to be days older than now"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--maxnum",
|
||||||
|
default=None,
|
||||||
|
action="store",
|
||||||
|
help="maximum number of mailboxes to iterate on",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-v",
|
||||||
|
dest="verbose",
|
||||||
|
action="store_true",
|
||||||
|
help="print out removed files and mailboxes",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--remove",
|
||||||
|
dest="remove",
|
||||||
|
action="store_true",
|
||||||
|
help="actually remove all expired files and dirs",
|
||||||
|
)
|
||||||
|
args = parser.parse_args(args)
|
||||||
|
|
||||||
|
config = read_config(args.chatmail_ini)
|
||||||
|
now = datetime.utcnow().timestamp()
|
||||||
|
if args.days:
|
||||||
|
now = now - 86400 * int(args.days)
|
||||||
|
|
||||||
|
maxnum = int(args.maxnum) if args.maxnum else None
|
||||||
|
exp = Expiry(config, dry=not args.remove, now=now, verbose=args.verbose)
|
||||||
|
for mailbox in iter_mailboxes(
|
||||||
|
str(config.mailboxes_dir), maxnum, config.tmpfs_index
|
||||||
|
):
|
||||||
|
exp.process_mailbox_stat(mailbox)
|
||||||
|
print(exp.get_summary())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(sys.argv[1:])
|
||||||
@@ -1,372 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import asyncio
|
|
||||||
import base64
|
|
||||||
import binascii
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from email import policy
|
|
||||||
from email.parser import BytesParser
|
|
||||||
from email.utils import parseaddr
|
|
||||||
from smtplib import SMTP as SMTPClient
|
|
||||||
|
|
||||||
from aiosmtpd.controller import Controller
|
|
||||||
from aiosmtpd.smtp import SMTP
|
|
||||||
|
|
||||||
from .config import read_config
|
|
||||||
|
|
||||||
ENCRYPTION_NEEDED_523 = "523 Encryption Needed: Invalid Unencrypted Mail"
|
|
||||||
|
|
||||||
|
|
||||||
def check_openpgp_payload(payload: bytes):
|
|
||||||
"""Checks the OpenPGP payload.
|
|
||||||
|
|
||||||
OpenPGP payload must consist only of PKESK and SKESK packets
|
|
||||||
terminated by a single SEIPD packet.
|
|
||||||
|
|
||||||
Returns True if OpenPGP payload is correct,
|
|
||||||
False otherwise.
|
|
||||||
|
|
||||||
May raise IndexError while trying to read OpenPGP packet header
|
|
||||||
if it is truncated.
|
|
||||||
"""
|
|
||||||
i = 0
|
|
||||||
while i < len(payload):
|
|
||||||
# Only OpenPGP format is allowed.
|
|
||||||
if payload[i] & 0xC0 != 0xC0:
|
|
||||||
return False
|
|
||||||
|
|
||||||
packet_type_id = payload[i] & 0x3F
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
while payload[i] >= 224 and payload[i] < 255:
|
|
||||||
# Partial body length.
|
|
||||||
partial_length = 1 << (payload[i] & 0x1F)
|
|
||||||
i += 1 + partial_length
|
|
||||||
|
|
||||||
if payload[i] < 192:
|
|
||||||
# One-octet length.
|
|
||||||
body_len = payload[i]
|
|
||||||
i += 1
|
|
||||||
elif payload[i] < 224:
|
|
||||||
# Two-octet length.
|
|
||||||
body_len = ((payload[i] - 192) << 8) + payload[i + 1] + 192
|
|
||||||
i += 2
|
|
||||||
elif payload[i] == 255:
|
|
||||||
# Five-octet length.
|
|
||||||
body_len = (
|
|
||||||
(payload[i + 1] << 24)
|
|
||||||
| (payload[i + 2] << 16)
|
|
||||||
| (payload[i + 3] << 8)
|
|
||||||
| payload[i + 4]
|
|
||||||
)
|
|
||||||
i += 5
|
|
||||||
else:
|
|
||||||
# Impossible, partial body length was processed above.
|
|
||||||
return False
|
|
||||||
|
|
||||||
i += body_len
|
|
||||||
|
|
||||||
if i == len(payload):
|
|
||||||
# Last packet should be
|
|
||||||
# Symmetrically Encrypted and Integrity Protected Data Packet (SEIPD)
|
|
||||||
#
|
|
||||||
# This is the only place where this function may return `True`.
|
|
||||||
return packet_type_id == 18
|
|
||||||
elif packet_type_id not in [1, 3]:
|
|
||||||
# All packets except the last one must be either
|
|
||||||
# Public-Key Encrypted Session Key Packet (PKESK)
|
|
||||||
# or
|
|
||||||
# Symmetric-Key Encrypted Session Key Packet (SKESK)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def check_armored_payload(payload: str, outgoing: bool):
|
|
||||||
"""Check the armored PGP message for invalid content.
|
|
||||||
|
|
||||||
:param payload: the armored PGP message
|
|
||||||
:param outgoing: whether the message is outgoing or incoming
|
|
||||||
:return: whether the message is a valid PGP message
|
|
||||||
"""
|
|
||||||
prefix = "-----BEGIN PGP MESSAGE-----\r\n"
|
|
||||||
if not payload.startswith(prefix):
|
|
||||||
return False
|
|
||||||
payload = payload.removeprefix(prefix)
|
|
||||||
|
|
||||||
while payload.endswith("\r\n"):
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
suffix = "-----END PGP MESSAGE-----"
|
|
||||||
if not payload.endswith(suffix):
|
|
||||||
return False
|
|
||||||
payload = payload.removesuffix(suffix)
|
|
||||||
|
|
||||||
# Disallow comments in outgoing messages
|
|
||||||
version_comment = "Version: "
|
|
||||||
if payload.startswith(version_comment):
|
|
||||||
version_line = payload.splitlines()[0]
|
|
||||||
payload = payload.removeprefix(version_line)
|
|
||||||
if outgoing:
|
|
||||||
return False
|
|
||||||
|
|
||||||
while payload.startswith("\r\n"):
|
|
||||||
payload = payload.removeprefix("\r\n")
|
|
||||||
|
|
||||||
# Remove CRC24.
|
|
||||||
payload = payload.rpartition("=")[0]
|
|
||||||
|
|
||||||
try:
|
|
||||||
payload = base64.b64decode(payload)
|
|
||||||
except binascii.Error:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
return check_openpgp_payload(payload)
|
|
||||||
except IndexError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_securejoin(message):
|
|
||||||
if message.get("secure-join") not in ["vc-request", "vg-request"]:
|
|
||||||
return False
|
|
||||||
if not message.is_multipart():
|
|
||||||
return False
|
|
||||||
parts_count = 0
|
|
||||||
for part in message.iter_parts():
|
|
||||||
parts_count += 1
|
|
||||||
if parts_count > 1:
|
|
||||||
return False
|
|
||||||
if part.is_multipart():
|
|
||||||
return False
|
|
||||||
if part.get_content_type() != "text/plain":
|
|
||||||
return False
|
|
||||||
|
|
||||||
payload = part.get_payload().strip().lower()
|
|
||||||
if payload not in ("secure-join: vc-request", "secure-join: vg-request"):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def check_encrypted(message, outgoing=True):
|
|
||||||
"""Check that the message is an OpenPGP-encrypted message.
|
|
||||||
|
|
||||||
MIME structure of the message must correspond to <https://www.rfc-editor.org/rfc/rfc3156>.
|
|
||||||
"""
|
|
||||||
if not message.is_multipart():
|
|
||||||
return False
|
|
||||||
if message.get_content_type() != "multipart/encrypted":
|
|
||||||
return False
|
|
||||||
parts_count = 0
|
|
||||||
for part in message.iter_parts():
|
|
||||||
# We explicitly check Content-Type of each part later,
|
|
||||||
# but this is to be absolutely sure `get_payload()` returns string and not list.
|
|
||||||
if part.is_multipart():
|
|
||||||
return False
|
|
||||||
|
|
||||||
if parts_count == 0:
|
|
||||||
if part.get_content_type() != "application/pgp-encrypted":
|
|
||||||
return False
|
|
||||||
|
|
||||||
payload = part.get_payload()
|
|
||||||
if payload.strip() != "Version: 1":
|
|
||||||
return False
|
|
||||||
elif parts_count == 1:
|
|
||||||
if part.get_content_type() != "application/octet-stream":
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not check_armored_payload(part.get_payload(), outgoing=outgoing):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
parts_count += 1
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def asyncmain_beforequeue(config, mode):
|
|
||||||
if mode == "outgoing":
|
|
||||||
port = config.filtermail_smtp_port
|
|
||||||
handler = OutgoingBeforeQueueHandler(config)
|
|
||||||
else:
|
|
||||||
port = config.filtermail_smtp_port_incoming
|
|
||||||
handler = IncomingBeforeQueueHandler(config)
|
|
||||||
HackedController(
|
|
||||||
handler,
|
|
||||||
hostname="127.0.0.1",
|
|
||||||
port=port,
|
|
||||||
data_size_limit=config.max_message_size,
|
|
||||||
).start()
|
|
||||||
|
|
||||||
|
|
||||||
def recipient_matches_passthrough(recipient, passthrough_recipients):
|
|
||||||
for addr in passthrough_recipients:
|
|
||||||
if recipient == addr:
|
|
||||||
return True
|
|
||||||
if addr[0] == "@" and recipient.endswith(addr):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class HackedController(Controller):
|
|
||||||
def factory(self):
|
|
||||||
return SMTPDiscardRCPTO_options(self.handler, **self.SMTP_kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class SMTPDiscardRCPTO_options(SMTP):
|
|
||||||
def _getparams(self, params):
|
|
||||||
# Ignore RCPT TO parameters.
|
|
||||||
#
|
|
||||||
# Otherwise parameters such as `ORCPT=...`
|
|
||||||
# or `NOTIFY=DELAY,FAILURE` (generated by Stalwart)
|
|
||||||
# make aiosmtpd reject the message here:
|
|
||||||
# <https://github.com/aio-libs/aiosmtpd/blob/98f578389ae86e5345cc343fa4e5a17b21d9c96d/aiosmtpd/smtp.py#L1379-L1384>
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class OutgoingBeforeQueueHandler:
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
self.send_rate_limiter = SendRateLimiter()
|
|
||||||
|
|
||||||
async def handle_MAIL(self, server, session, envelope, address, mail_options):
|
|
||||||
logging.info(f"handle_MAIL from {address}")
|
|
||||||
envelope.mail_from = address
|
|
||||||
max_sent = self.config.max_user_send_per_minute
|
|
||||||
if not self.send_rate_limiter.is_sending_allowed(address, max_sent):
|
|
||||||
return f"450 4.7.1: Too much mail from {address}"
|
|
||||||
|
|
||||||
parts = envelope.mail_from.split("@")
|
|
||||||
if len(parts) != 2:
|
|
||||||
return f"500 Invalid from address <{envelope.mail_from!r}>"
|
|
||||||
|
|
||||||
return "250 OK"
|
|
||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
|
||||||
logging.info("handle_DATA before-queue")
|
|
||||||
error = self.check_DATA(envelope)
|
|
||||||
if error:
|
|
||||||
return error
|
|
||||||
logging.info("re-injecting the mail that passed checks")
|
|
||||||
client = SMTPClient("localhost", self.config.postfix_reinject_port)
|
|
||||||
client.sendmail(
|
|
||||||
envelope.mail_from, envelope.rcpt_tos, envelope.original_content
|
|
||||||
)
|
|
||||||
return "250 OK"
|
|
||||||
|
|
||||||
def check_DATA(self, envelope):
|
|
||||||
"""the central filtering function for e-mails."""
|
|
||||||
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
|
||||||
|
|
||||||
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
|
||||||
mail_encrypted = check_encrypted(message, outgoing=True)
|
|
||||||
|
|
||||||
_, from_addr = parseaddr(message.get("from").strip())
|
|
||||||
|
|
||||||
if envelope.mail_from.lower() != from_addr.lower():
|
|
||||||
return f"500 Invalid FROM <{from_addr!r}> for <{envelope.mail_from!r}>"
|
|
||||||
|
|
||||||
if mail_encrypted or is_securejoin(message):
|
|
||||||
print("Outgoing: Filtering encrypted mail.", file=sys.stderr)
|
|
||||||
return
|
|
||||||
|
|
||||||
print("Outgoing: Filtering unencrypted mail.", file=sys.stderr)
|
|
||||||
|
|
||||||
if envelope.mail_from in self.config.passthrough_senders:
|
|
||||||
return
|
|
||||||
|
|
||||||
# allow self-sent Autocrypt Setup Message
|
|
||||||
if envelope.rcpt_tos == [from_addr]:
|
|
||||||
if message.get("subject") == "Autocrypt Setup Message":
|
|
||||||
if message.get_content_type() == "multipart/mixed":
|
|
||||||
return
|
|
||||||
|
|
||||||
passthrough_recipients = self.config.passthrough_recipients
|
|
||||||
|
|
||||||
for recipient in envelope.rcpt_tos:
|
|
||||||
if recipient_matches_passthrough(recipient, passthrough_recipients):
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Rejected unencrypted mail.", file=sys.stderr)
|
|
||||||
return ENCRYPTION_NEEDED_523
|
|
||||||
|
|
||||||
|
|
||||||
class IncomingBeforeQueueHandler:
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
|
||||||
logging.info("handle_DATA before-queue")
|
|
||||||
error = self.check_DATA(envelope)
|
|
||||||
if error:
|
|
||||||
return error
|
|
||||||
logging.info("re-injecting the mail that passed checks")
|
|
||||||
|
|
||||||
# the smtp daemon on reinject_port_incoming gives it to dkim milter
|
|
||||||
# which looks at source address to determine whether to verify or sign
|
|
||||||
client = SMTPClient(
|
|
||||||
"localhost",
|
|
||||||
self.config.postfix_reinject_port_incoming,
|
|
||||||
source_address=("127.0.0.2", 0),
|
|
||||||
)
|
|
||||||
client.sendmail(
|
|
||||||
envelope.mail_from, envelope.rcpt_tos, envelope.original_content
|
|
||||||
)
|
|
||||||
return "250 OK"
|
|
||||||
|
|
||||||
def check_DATA(self, envelope):
|
|
||||||
"""the central filtering function for e-mails."""
|
|
||||||
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
|
||||||
|
|
||||||
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
|
||||||
mail_encrypted = check_encrypted(message, outgoing=False)
|
|
||||||
|
|
||||||
if mail_encrypted or is_securejoin(message):
|
|
||||||
print("Incoming: Filtering encrypted mail.", file=sys.stderr)
|
|
||||||
return
|
|
||||||
|
|
||||||
print("Incoming: Filtering unencrypted mail.", file=sys.stderr)
|
|
||||||
|
|
||||||
# we want cleartext mailer-daemon messages to pass through
|
|
||||||
# chatmail core will typically not display them as normal messages
|
|
||||||
if message.get("auto-submitted"):
|
|
||||||
_, from_addr = parseaddr(message.get("from").strip())
|
|
||||||
if from_addr.lower().startswith("mailer-daemon@"):
|
|
||||||
if message.get_content_type() == "multipart/report":
|
|
||||||
return
|
|
||||||
|
|
||||||
for recipient in envelope.rcpt_tos:
|
|
||||||
user = self.config.get_user(recipient)
|
|
||||||
if user is None or user.is_incoming_cleartext_ok():
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Rejected unencrypted mail.", file=sys.stderr)
|
|
||||||
return ENCRYPTION_NEEDED_523
|
|
||||||
|
|
||||||
|
|
||||||
class SendRateLimiter:
|
|
||||||
def __init__(self):
|
|
||||||
self.addr2timestamps = {}
|
|
||||||
|
|
||||||
def is_sending_allowed(self, mail_from, max_send_per_minute):
|
|
||||||
last = self.addr2timestamps.setdefault(mail_from, [])
|
|
||||||
now = time.time()
|
|
||||||
last[:] = [ts for ts in last if ts >= (now - 60)]
|
|
||||||
if len(last) <= max_send_per_minute:
|
|
||||||
last.append(now)
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = sys.argv[1:]
|
|
||||||
assert len(args) == 2
|
|
||||||
config = read_config(args[0])
|
|
||||||
mode = args[1]
|
|
||||||
logging.basicConfig(level=logging.WARN)
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
assert mode in ["incoming", "outgoing"]
|
|
||||||
task = asyncmain_beforequeue(config, mode)
|
|
||||||
loop.create_task(task)
|
|
||||||
logging.info("entering serving loop")
|
|
||||||
loop.run_forever()
|
|
||||||
168
chatmaild/src/chatmaild/fsreport.py
Normal file
168
chatmaild/src/chatmaild/fsreport.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
"""
|
||||||
|
command line tool to analyze mailbox message storage
|
||||||
|
|
||||||
|
example invocation:
|
||||||
|
|
||||||
|
python -m chatmaild.fsreport /path/to/chatmail.ini
|
||||||
|
|
||||||
|
to show storage summaries for all "cur" folders
|
||||||
|
|
||||||
|
python -m chatmaild.fsreport /path/to/chatmail.ini --mdir cur
|
||||||
|
|
||||||
|
to show storage summaries only for first 1000 mailboxes
|
||||||
|
|
||||||
|
python -m chatmaild.fsreport /path/to/chatmail.ini --maxnum 1000
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from chatmaild.config import read_config
|
||||||
|
from chatmaild.expire import iter_mailboxes
|
||||||
|
|
||||||
|
DAYSECONDS = 24 * 60 * 60
|
||||||
|
MONTHSECONDS = DAYSECONDS * 30
|
||||||
|
|
||||||
|
|
||||||
|
def HSize(size: int):
|
||||||
|
"""Format a size integer as a Human-readable string Kilobyte, Megabyte or Gigabyte"""
|
||||||
|
if size < 10000:
|
||||||
|
return f"{size / 1000:5.2f}K"
|
||||||
|
if size < 1000 * 1000:
|
||||||
|
return f"{size / 1000:5.0f}K"
|
||||||
|
if size < 1000 * 1000 * 1000:
|
||||||
|
return f"{int(size / 1000000):5.0f}M"
|
||||||
|
return f"{size / 1000000000:5.2f}G"
|
||||||
|
|
||||||
|
|
||||||
|
class Report:
|
||||||
|
def __init__(self, now, min_login_age, mdir):
|
||||||
|
self.size_extra = 0
|
||||||
|
self.size_messages = 0
|
||||||
|
self.now = now
|
||||||
|
self.min_login_age = min_login_age
|
||||||
|
self.mdir = mdir
|
||||||
|
|
||||||
|
self.num_ci_logins = self.num_all_logins = 0
|
||||||
|
self.login_buckets = {x: 0 for x in (1, 10, 30, 40, 80, 100, 150)}
|
||||||
|
|
||||||
|
self.message_buckets = {x: 0 for x in (0, 160000, 500000, 2000000)}
|
||||||
|
|
||||||
|
def process_mailbox_stat(self, mailbox):
|
||||||
|
# categorize login times
|
||||||
|
last_login = mailbox.last_login
|
||||||
|
if last_login:
|
||||||
|
self.num_all_logins += 1
|
||||||
|
if os.path.basename(mailbox.basedir)[:3] == "ci-":
|
||||||
|
self.num_ci_logins += 1
|
||||||
|
else:
|
||||||
|
for days in self.login_buckets:
|
||||||
|
if last_login >= self.now - days * DAYSECONDS:
|
||||||
|
self.login_buckets[days] += 1
|
||||||
|
|
||||||
|
cutoff_login_date = self.now - self.min_login_age * DAYSECONDS
|
||||||
|
if last_login and last_login <= cutoff_login_date:
|
||||||
|
# categorize message sizes
|
||||||
|
for size in self.message_buckets:
|
||||||
|
for msg in mailbox.messages:
|
||||||
|
if msg.size >= size:
|
||||||
|
if self.mdir and not msg.relpath.startswith(self.mdir):
|
||||||
|
continue
|
||||||
|
self.message_buckets[size] += msg.size
|
||||||
|
|
||||||
|
self.size_messages += sum(entry.size for entry in mailbox.messages)
|
||||||
|
self.size_extra += sum(entry.size for entry in mailbox.extrafiles)
|
||||||
|
|
||||||
|
def dump_summary(self):
|
||||||
|
all_messages = self.size_messages
|
||||||
|
print()
|
||||||
|
print("## Mailbox storage use analysis")
|
||||||
|
print(f"Mailbox data total size: {HSize(self.size_extra + all_messages)}")
|
||||||
|
print(f"Messages total size : {HSize(all_messages)}")
|
||||||
|
try:
|
||||||
|
percent = self.size_extra / (self.size_extra + all_messages) * 100
|
||||||
|
except ZeroDivisionError:
|
||||||
|
percent = 100
|
||||||
|
print(f"Extra files : {HSize(self.size_extra)} ({percent:.2f}%)")
|
||||||
|
|
||||||
|
print()
|
||||||
|
if self.min_login_age:
|
||||||
|
print(f"### Message storage for {self.min_login_age} days old logins")
|
||||||
|
|
||||||
|
pref = f"[{self.mdir}] " if self.mdir else ""
|
||||||
|
for minsize, sumsize in self.message_buckets.items():
|
||||||
|
percent = (sumsize / all_messages * 100) if all_messages else 0
|
||||||
|
print(
|
||||||
|
f"{pref}larger than {HSize(minsize)}: {HSize(sumsize)} ({percent:.2f}%)"
|
||||||
|
)
|
||||||
|
|
||||||
|
user_logins = self.num_all_logins - self.num_ci_logins
|
||||||
|
|
||||||
|
def p(num):
|
||||||
|
return f"({num / user_logins * 100:2.2f}%)" if user_logins else "100%"
|
||||||
|
|
||||||
|
print()
|
||||||
|
print(f"## Login stats, from date reference {datetime.fromtimestamp(self.now)}")
|
||||||
|
print(f"all: {HSize(self.num_all_logins)}")
|
||||||
|
print(f"non-ci: {HSize(user_logins)}")
|
||||||
|
print(f"ci: {HSize(self.num_ci_logins)}")
|
||||||
|
for days, active in self.login_buckets.items():
|
||||||
|
print(f"last {days:3} days: {HSize(active)} {p(active)}")
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
"""Report about filesystem storage usage of all mailboxes and messages"""
|
||||||
|
parser = ArgumentParser(description=main.__doc__)
|
||||||
|
ini = "/usr/local/lib/chatmaild/chatmail.ini"
|
||||||
|
parser.add_argument(
|
||||||
|
"chatmail_ini",
|
||||||
|
action="store",
|
||||||
|
nargs="?",
|
||||||
|
help=f"path pointing to chatmail.ini file, default: {ini}",
|
||||||
|
default=ini,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--days",
|
||||||
|
default=0,
|
||||||
|
action="store",
|
||||||
|
help="assume date to be days older than now",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--min-login-age",
|
||||||
|
default=0,
|
||||||
|
dest="min_login_age",
|
||||||
|
action="store",
|
||||||
|
help="only sum up message size if last login is at least min-login-age days old",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--mdir",
|
||||||
|
action="store",
|
||||||
|
help="only consider 'cur' or 'new' or 'tmp' messages for summary",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--maxnum",
|
||||||
|
default=None,
|
||||||
|
action="store",
|
||||||
|
help="maximum number of mailboxes to iterate on",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args(args)
|
||||||
|
|
||||||
|
config = read_config(args.chatmail_ini)
|
||||||
|
|
||||||
|
now = datetime.utcnow().timestamp()
|
||||||
|
if args.days:
|
||||||
|
now = now - 86400 * int(args.days)
|
||||||
|
|
||||||
|
maxnum = int(args.maxnum) if args.maxnum else None
|
||||||
|
rep = Report(now=now, min_login_age=int(args.min_login_age), mdir=args.mdir)
|
||||||
|
for mbox in iter_mailboxes(str(config.mailboxes_dir), maxnum, config.tmpfs_index):
|
||||||
|
rep.process_mailbox_stat(mbox)
|
||||||
|
rep.dump_summary()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -11,11 +11,14 @@ mail_domain = {mail_domain}
|
|||||||
# Restrictions on user addresses
|
# Restrictions on user addresses
|
||||||
#
|
#
|
||||||
|
|
||||||
# how many mails a user can send out per minute
|
# email sending rate per user and minute
|
||||||
max_user_send_per_minute = 60
|
max_user_send_per_minute = 60
|
||||||
|
|
||||||
|
# per-user max burst size for sending rate limiting (GCRA bucket capacity)
|
||||||
|
max_user_send_burst_size = 10
|
||||||
|
|
||||||
# maximum mailbox size of a chatmail address
|
# maximum mailbox size of a chatmail address
|
||||||
max_mailbox_size = 100M
|
max_mailbox_size = 500M
|
||||||
|
|
||||||
# maximum message size for an e-mail in bytes
|
# maximum message size for an e-mail in bytes
|
||||||
max_message_size = 31457280
|
max_message_size = 31457280
|
||||||
@@ -43,9 +46,12 @@ passthrough_senders =
|
|||||||
|
|
||||||
# list of e-mail recipients for which to accept outbound un-encrypted mails
|
# list of e-mail recipients for which to accept outbound un-encrypted mails
|
||||||
# (space-separated, item may start with "@" to whitelist whole recipient domains)
|
# (space-separated, item may start with "@" to whitelist whole recipient domains)
|
||||||
passthrough_recipients = xstore@testrun.org echo@{mail_domain}
|
passthrough_recipients =
|
||||||
|
|
||||||
# path to www directory - documented here: https://github.com/chatmail/relay/#custom-web-pages
|
# store index files in tmpfs (good for disk size and I/O, bad for ram)
|
||||||
|
tmpfs_index = false
|
||||||
|
|
||||||
|
# path to www directory - documented here: https://chatmail.at/doc/relay/getting_started.html#custom-web-pages
|
||||||
#www_folder = www
|
#www_folder = www
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -66,16 +72,6 @@ disable_ipv6 = False
|
|||||||
# Your email adress, which will be used in acmetool to manage Let's Encrypt SSL certificates
|
# Your email adress, which will be used in acmetool to manage Let's Encrypt SSL certificates
|
||||||
acme_email =
|
acme_email =
|
||||||
|
|
||||||
#
|
|
||||||
# Kernel settings
|
|
||||||
#
|
|
||||||
|
|
||||||
# if you set "True", the kernel settings will be configured according to the values below
|
|
||||||
change_kernel_settings = True
|
|
||||||
|
|
||||||
# change fs.inotify.max_user_instances and fs.inotify.max_user_watches kernel settings
|
|
||||||
fs_inotify_max_user_instances_and_watchers = 65535
|
|
||||||
|
|
||||||
# Defaults to https://iroh.{{mail_domain}} and running `iroh-relay` on the chatmail
|
# Defaults to https://iroh.{{mail_domain}} and running `iroh-relay` on the chatmail
|
||||||
# service.
|
# service.
|
||||||
# If you set it to anything else, the service will be disabled
|
# If you set it to anything else, the service will be disabled
|
||||||
@@ -109,6 +105,12 @@ fs_inotify_max_user_instances_and_watchers = 65535
|
|||||||
# so use this option with caution on production servers.
|
# so use this option with caution on production servers.
|
||||||
imap_rawlog = false
|
imap_rawlog = false
|
||||||
|
|
||||||
|
# set to true if you want to enable the IMAP COMPRESS Extension,
|
||||||
|
# which allows IMAP connections to be efficiently compressed.
|
||||||
|
# WARNING: Enabling this makes it impossible to hibernate IMAP
|
||||||
|
# processes which will result in much higher memory/RAM usage.
|
||||||
|
imap_compress = false
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Privacy Policy
|
# Privacy Policy
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
|
[params]
|
||||||
|
|
||||||
[privacy]
|
tmpfs_index = true
|
||||||
|
|
||||||
passthrough_recipients = privacy@testrun.org xstore@testrun.org echo@{mail_domain}
|
passthrough_recipients = privacy@testrun.org echo@{mail_domain}
|
||||||
|
|
||||||
privacy_postal =
|
privacy_postal =
|
||||||
Merlinux GmbH, Represented by the managing director H. Krekel,
|
Merlinux GmbH, Represented by the managing director H. Krekel,
|
||||||
|
|||||||
@@ -13,8 +13,6 @@ class LastLoginDictProxy(DictProxy):
|
|||||||
keyname = parts[1].split("/")
|
keyname = parts[1].split("/")
|
||||||
value = parts[2] if len(parts) > 2 else ""
|
value = parts[2] if len(parts) > 2 else ""
|
||||||
if keyname[0] == "shared" and keyname[1] == "last-login":
|
if keyname[0] == "shared" and keyname[1] == "last-login":
|
||||||
if addr.startswith("echo@"):
|
|
||||||
return True
|
|
||||||
addr = keyname[2]
|
addr = keyname[2]
|
||||||
timestamp = int(value)
|
timestamp = int(value)
|
||||||
user = self.config.get_user(addr)
|
user = self.config.get_user(addr)
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ def test_read_config_testrun(make_config):
|
|||||||
assert config.filtermail_smtp_port == 10080
|
assert config.filtermail_smtp_port == 10080
|
||||||
assert config.postfix_reinject_port == 10025
|
assert config.postfix_reinject_port == 10025
|
||||||
assert config.max_user_send_per_minute == 60
|
assert config.max_user_send_per_minute == 60
|
||||||
assert config.max_mailbox_size == "100M"
|
assert config.max_mailbox_size == "500M"
|
||||||
assert config.delete_mails_after == "20"
|
assert config.delete_mails_after == "20"
|
||||||
assert config.delete_large_after == "7"
|
assert config.delete_large_after == "7"
|
||||||
assert config.username_min_length == 9
|
assert config.username_min_length == 9
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from chatmaild.delete_inactive_users import delete_inactive_users
|
|
||||||
from chatmaild.doveauth import AuthDictProxy
|
from chatmaild.doveauth import AuthDictProxy
|
||||||
|
from chatmaild.expire import main as main_expire
|
||||||
|
|
||||||
|
|
||||||
def test_login_timestamps(example_config):
|
def test_login_timestamps(example_config):
|
||||||
@@ -45,7 +45,12 @@ def test_delete_inactive_users(example_config):
|
|||||||
for addr in to_remove:
|
for addr in to_remove:
|
||||||
assert example_config.get_user(addr).maildir.exists()
|
assert example_config.get_user(addr).maildir.exists()
|
||||||
|
|
||||||
delete_inactive_users(example_config)
|
main_expire(
|
||||||
|
args=[
|
||||||
|
"--remove",
|
||||||
|
str(example_config._inipath),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
for p in example_config.mailboxes_dir.iterdir():
|
for p in example_config.mailboxes_dir.iterdir():
|
||||||
assert not p.name.startswith("old")
|
assert not p.name.startswith("old")
|
||||||
|
|||||||
167
chatmaild/src/chatmaild/tests/test_expire.py
Normal file
167
chatmaild/src/chatmaild/tests/test_expire.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
import os
|
||||||
|
import random
|
||||||
|
from datetime import datetime
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from chatmaild.expire import (
|
||||||
|
FileEntry,
|
||||||
|
MailboxStat,
|
||||||
|
get_file_entry,
|
||||||
|
iter_mailboxes,
|
||||||
|
os_listdir_if_exists,
|
||||||
|
)
|
||||||
|
from chatmaild.expire import main as expiry_main
|
||||||
|
from chatmaild.fsreport import main as report_main
|
||||||
|
|
||||||
|
|
||||||
|
def fill_mbox(folderdir):
|
||||||
|
password = folderdir.joinpath("password")
|
||||||
|
password.write_text("xxx")
|
||||||
|
folderdir.joinpath("maildirsize").write_text("xxx")
|
||||||
|
|
||||||
|
garbagedir = folderdir.joinpath("garbagedir")
|
||||||
|
garbagedir.mkdir()
|
||||||
|
garbagedir.joinpath("bimbum").write_text("hello")
|
||||||
|
|
||||||
|
create_new_messages(folderdir, ["cur/msg1"], size=500)
|
||||||
|
create_new_messages(folderdir, ["new/msg2"], size=600)
|
||||||
|
|
||||||
|
|
||||||
|
def create_new_messages(basedir, relpaths, size=1000, days=0):
|
||||||
|
now = datetime.utcnow().timestamp()
|
||||||
|
|
||||||
|
for relpath in relpaths:
|
||||||
|
msg_path = Path(basedir).joinpath(relpath)
|
||||||
|
msg_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
msg_path.write_text("x" * size)
|
||||||
|
# accessed now, modified N days ago
|
||||||
|
os.utime(msg_path, (now, now - days * 86400))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mbox1(example_config):
|
||||||
|
addr = "mailbox1@example.org"
|
||||||
|
mboxdir = example_config.mailboxes_dir.joinpath(addr)
|
||||||
|
mboxdir.mkdir()
|
||||||
|
fill_mbox(mboxdir)
|
||||||
|
return MailboxStat(mboxdir, addr, False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_deltachat_folder(example_config):
|
||||||
|
"""Test old setups that might have a .DeltaChat folder where messages also need to get removed."""
|
||||||
|
addr = "mailbox1@example.org"
|
||||||
|
mboxdir = example_config.mailboxes_dir.joinpath(addr)
|
||||||
|
mboxdir.mkdir()
|
||||||
|
mbox2dir = mboxdir.joinpath(".DeltaChat")
|
||||||
|
mbox2dir.mkdir()
|
||||||
|
fill_mbox(mbox2dir)
|
||||||
|
mb = MailboxStat(mboxdir, addr, False)
|
||||||
|
assert len(mb.messages) == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_filentry_ordering(tmp_path):
|
||||||
|
l = [FileEntry(f"x{i}", size=i + 10, mtime=1000 - i) for i in range(10)]
|
||||||
|
sorted = list(l)
|
||||||
|
random.shuffle(l)
|
||||||
|
l.sort(key=lambda x: x.size)
|
||||||
|
assert l == sorted
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_mailbxoes(tmp_path, capsys):
|
||||||
|
assert [] == list(
|
||||||
|
iter_mailboxes(
|
||||||
|
str(tmp_path.joinpath("notexists")), maxnum=10, tmpfs_index=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
out, err = capsys.readouterr()
|
||||||
|
assert "no mailboxes" in err
|
||||||
|
|
||||||
|
|
||||||
|
def test_stats_mailbox(mbox1):
|
||||||
|
password = Path(mbox1.basedir).joinpath("password")
|
||||||
|
assert mbox1.last_login == password.stat().st_mtime
|
||||||
|
assert len(mbox1.messages) == 2
|
||||||
|
|
||||||
|
msgs = list(sorted(mbox1.messages, key=lambda x: x.size))
|
||||||
|
assert len(msgs) == 2
|
||||||
|
assert msgs[0].size == 500 # cur
|
||||||
|
assert msgs[1].size == 600 # new
|
||||||
|
|
||||||
|
create_new_messages(mbox1.basedir, ["large-extra"], size=1000)
|
||||||
|
create_new_messages(mbox1.basedir, ["index-something"], size=3)
|
||||||
|
mbox2 = MailboxStat(mbox1.basedir, mbox1.name, False)
|
||||||
|
assert len(mbox2.extrafiles) == 5
|
||||||
|
assert mbox2.extrafiles[0].size == 1000
|
||||||
|
|
||||||
|
# cope well with mailbox dirs that have no password (for whatever reason)
|
||||||
|
Path(mbox1.basedir).joinpath("password").unlink()
|
||||||
|
mbox3 = MailboxStat(mbox1.basedir, mbox1.name, False)
|
||||||
|
assert mbox3.last_login is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_report_no_mailboxes(example_config):
|
||||||
|
args = (str(example_config._inipath),)
|
||||||
|
report_main(args)
|
||||||
|
|
||||||
|
|
||||||
|
def test_report(mbox1, example_config):
|
||||||
|
args = (str(example_config._inipath),)
|
||||||
|
report_main(args)
|
||||||
|
args = list(args) + "--days 1".split()
|
||||||
|
report_main(args)
|
||||||
|
args = list(args) + "--min-login-age 1".split()
|
||||||
|
report_main(args)
|
||||||
|
args = list(args) + "--mdir cur".split()
|
||||||
|
report_main(args)
|
||||||
|
|
||||||
|
|
||||||
|
def test_expiry_cli_basic(example_config, mbox1):
|
||||||
|
args = (str(example_config._inipath),)
|
||||||
|
expiry_main(args)
|
||||||
|
|
||||||
|
|
||||||
|
def test_expiry_cli_old_files(capsys, example_config, mbox1):
|
||||||
|
relpaths_old = ["cur/msg_old1", "cur/msg_old1"]
|
||||||
|
cutoff_days = int(example_config.delete_mails_after) + 1
|
||||||
|
create_new_messages(mbox1.basedir, relpaths_old, size=1000, days=cutoff_days)
|
||||||
|
|
||||||
|
relpaths_large = ["cur/msg_old_large1", "new/msg_old_large2"]
|
||||||
|
cutoff_days = int(example_config.delete_large_after) + 1
|
||||||
|
create_new_messages(
|
||||||
|
mbox1.basedir, relpaths_large, size=1000 * 300, days=cutoff_days
|
||||||
|
)
|
||||||
|
|
||||||
|
create_new_messages(mbox1.basedir, ["cur/shouldstay"], size=1000 * 300, days=1)
|
||||||
|
|
||||||
|
args = str(example_config._inipath), "--remove", "-v"
|
||||||
|
expiry_main(args)
|
||||||
|
out, err = capsys.readouterr()
|
||||||
|
|
||||||
|
allpaths = relpaths_old + relpaths_large + ["maildirsize"]
|
||||||
|
for path in allpaths:
|
||||||
|
for line in err.split("\n"):
|
||||||
|
if fnmatch(line, f"removing*{path}"):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if path != "new/msg_old_large2":
|
||||||
|
pytest.fail(f"failed to remove {path}\n{err}")
|
||||||
|
|
||||||
|
assert "shouldstay" not in err
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_file_entry(tmp_path):
|
||||||
|
assert get_file_entry(str(tmp_path.joinpath("123123"))) is None
|
||||||
|
p = tmp_path.joinpath("x")
|
||||||
|
p.write_text("hello")
|
||||||
|
entry = get_file_entry(str(p))
|
||||||
|
assert entry.size == 5
|
||||||
|
assert entry.mtime
|
||||||
|
|
||||||
|
|
||||||
|
def test_os_listdir_if_exists(tmp_path):
|
||||||
|
tmp_path.joinpath("x").write_text("hello")
|
||||||
|
assert len(os_listdir_if_exists(str(tmp_path))) == 1
|
||||||
|
assert len(os_listdir_if_exists(str(tmp_path.joinpath("123123")))) == 0
|
||||||
@@ -1,361 +0,0 @@
|
|||||||
import pytest
|
|
||||||
|
|
||||||
from chatmaild.filtermail import (
|
|
||||||
IncomingBeforeQueueHandler,
|
|
||||||
OutgoingBeforeQueueHandler,
|
|
||||||
SendRateLimiter,
|
|
||||||
check_armored_payload,
|
|
||||||
check_encrypted,
|
|
||||||
is_securejoin,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def maildomain():
|
|
||||||
# let's not depend on a real chatmail instance for the offline tests below
|
|
||||||
return "chatmail.example.org"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def handler(make_config, maildomain):
|
|
||||||
config = make_config(maildomain)
|
|
||||||
return OutgoingBeforeQueueHandler(config)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def inhandler(make_config, maildomain):
|
|
||||||
config = make_config(maildomain)
|
|
||||||
return IncomingBeforeQueueHandler(config)
|
|
||||||
|
|
||||||
|
|
||||||
def test_reject_forged_from(maildata, gencreds, handler):
|
|
||||||
class env:
|
|
||||||
mail_from = gencreds()[0]
|
|
||||||
rcpt_tos = [gencreds()[0]]
|
|
||||||
|
|
||||||
# test that the filter lets good mail through
|
|
||||||
to_addr = gencreds()[0]
|
|
||||||
env.content = maildata(
|
|
||||||
"encrypted.eml", from_addr=env.mail_from, to_addr=to_addr
|
|
||||||
).as_bytes()
|
|
||||||
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
# test that the filter rejects forged mail
|
|
||||||
env.content = maildata(
|
|
||||||
"encrypted.eml", from_addr="forged@c3.testrun.org", to_addr=to_addr
|
|
||||||
).as_bytes()
|
|
||||||
error = handler.check_DATA(envelope=env)
|
|
||||||
assert "500" in error
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_no_encryption_detection(maildata):
|
|
||||||
msg = maildata(
|
|
||||||
"plain.eml", from_addr="some@example.org", to_addr="other@example.org"
|
|
||||||
)
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
# https://xkcd.com/1181/
|
|
||||||
msg = maildata(
|
|
||||||
"fake-encrypted.eml", from_addr="some@example.org", to_addr="other@example.org"
|
|
||||||
)
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_securejoin_detection(maildata):
|
|
||||||
msg = maildata(
|
|
||||||
"securejoin-vc.eml", from_addr="some@example.org", to_addr="other@example.org"
|
|
||||||
)
|
|
||||||
assert is_securejoin(msg)
|
|
||||||
|
|
||||||
msg = maildata(
|
|
||||||
"securejoin-vc-fake.eml",
|
|
||||||
from_addr="some@example.org",
|
|
||||||
to_addr="other@example.org",
|
|
||||||
)
|
|
||||||
assert not is_securejoin(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_encryption_detection(maildata):
|
|
||||||
msg = maildata(
|
|
||||||
"encrypted.eml",
|
|
||||||
from_addr="1@example.org",
|
|
||||||
to_addr="2@example.org",
|
|
||||||
subject="Subject does not matter, will be replaced anyway",
|
|
||||||
)
|
|
||||||
assert check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_no_literal_packets(maildata):
|
|
||||||
"""Test that literal OpenPGP packet is not considered an encrypted mail."""
|
|
||||||
msg = maildata("literal.eml", from_addr="1@example.org", to_addr="2@example.org")
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_unencrypted_mdn(maildata, gencreds):
|
|
||||||
"""Unencrypted MDNs should not pass."""
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = gencreds()[0] + ".other"
|
|
||||||
msg = maildata("mdn.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_send_rate_limiter():
|
|
||||||
limiter = SendRateLimiter()
|
|
||||||
for i in range(100):
|
|
||||||
if limiter.is_sending_allowed("some@example.org", 10):
|
|
||||||
if i <= 10:
|
|
||||||
continue
|
|
||||||
pytest.fail("limiter didn't work")
|
|
||||||
else:
|
|
||||||
assert i == 11
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_excempt_privacy(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = "privacy@testrun.org"
|
|
||||||
handler.config.passthrough_recipients = [to_addr]
|
|
||||||
false_to = "privacy@something.org"
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
# assert that None/no error is returned
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
class env2:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr, false_to]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert "523" in handler.check_DATA(envelope=env2)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_self_send_autocrypt_setup_message(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = from_addr
|
|
||||||
|
|
||||||
msg = maildata("asm.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_send_fails(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = gencreds()[0]
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
res = handler.check_DATA(envelope=env)
|
|
||||||
assert "523 Encryption Needed" in res
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_incoming_fails(maildata, gencreds, inhandler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr, password = gencreds()
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
user = inhandler.config.get_user(to_addr)
|
|
||||||
user.set_password(password)
|
|
||||||
res = inhandler.check_DATA(envelope=env)
|
|
||||||
assert "523 Encryption Needed" in res
|
|
||||||
|
|
||||||
user.allow_incoming_cleartext()
|
|
||||||
assert not inhandler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_incoming_mailer_daemon(maildata, gencreds, inhandler):
|
|
||||||
from_addr = "mailer-daemon@example.org"
|
|
||||||
to_addr = gencreds()[0]
|
|
||||||
|
|
||||||
msg = maildata("mailer-daemon.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert not inhandler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_passthrough_domains(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = "privacy@x.y.z"
|
|
||||||
handler.config.passthrough_recipients = ["@x.y.z"]
|
|
||||||
false_to = "something@x.y"
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
# assert that None/no error is returned
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
class env2:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr, false_to]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert "523" in handler.check_DATA(envelope=env2)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_passthrough_senders(gencreds, handler, maildata):
|
|
||||||
acc1 = gencreds()[0]
|
|
||||||
to_addr = "recipient@something.org"
|
|
||||||
handler.config.passthrough_senders = [acc1]
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=acc1, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = acc1
|
|
||||||
rcpt_tos = to_addr
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
# assert that None/no error is returned
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_armored_payload():
|
|
||||||
prefix = "-----BEGIN PGP MESSAGE-----\r\n"
|
|
||||||
comment = "Version: ProtonMail\r\n"
|
|
||||||
payload = """\r
|
|
||||||
wU4DSqFx0d1yqAoSAQdAYkX/ZN/Az4B0k7X47zKyWrXxlDEdS3WOy0Yf2+GJTFgg\r
|
|
||||||
Zk5ql0mLG8Ze+ZifCS0XMO4otlemSyJ0K1ZPdFMGzUDBTgNqzkFabxXoXRIBB0AM\r
|
|
||||||
755wlX41X6Ay3KhnwBq7yEqSykVH6F3x11iHPKraLCAGZoaS8bKKNy/zg5slda1X\r
|
|
||||||
pt14b4aC1VwtSnYhcRRELNLD/wE2TFif+g7poMmFY50VyMPLYjVP96Z5QCT4+z4H\r
|
|
||||||
Ikh/pRRN8S3JNMrRJHc6prooSJmLcx47Y5un7VFy390MsJ+LiUJuQMDdYWRAinfs\r
|
|
||||||
Ebm89Ezjm7F03qbFPXE0X4ZNzVXS/eKO0uhJQdiov/vmbn41rNtHmNpqjaO0vi5+\r
|
|
||||||
sS9tR7yDUrIXiCUCN78eBLVioxtktsPZm5cDORbQWzv+7nmCEz9/JowCUcBVdCGn\r
|
|
||||||
1ofOaH82JCAX/cRx08pLaDNj6iolVBsi56Dd+2bGxJOZOG2AMcEyz0pXY0dOAJCD\r
|
|
||||||
iUThcQeGIdRnU3j8UBcnIEsjLu2+C+rrwMZQESMWKnJ0rnqTk0pK5kXScr6F/L0L\r
|
|
||||||
UE49ccIexNm3xZvYr5drszr6wz3Tv5fdue87P4etBt90gF/Vzknck+g1LLlkzZkp\r
|
|
||||||
d8dI0k2tOSPjUbDPnSy1x+X73WGpPZmj0kWT+RGvq0nH6UkJj3AQTG2qf1T8jK+3\r
|
|
||||||
rTp3LR9vDkMwDjX4R8SA9c0wdnUzzr79OYQC9lTnzcx+fM6BBmgQ2GrS33jaFLp7\r
|
|
||||||
L6/DFpCl5zhnPjM/2dKvMkw/Kd6XS/vjwsO405FQdjSDiQEEAZA+ZvAfcjdccbbU\r
|
|
||||||
yCO+x0QNdeBsufDVnh3xvzuWy4CICdTQT4s1AWRPCzjOj+SGmx5WqCLWfsd8Ma0+\r
|
|
||||||
w/C7SfTYu1FDQILLM+llpq1M/9GPley4QZ8JQjo262AyPXsPF/OW48uuZz0Db1xT\r
|
|
||||||
Yh4iHBztj4VSdy7l2+IyaIf7cnL4EEBFxv/MwmVDXvDlxyvfAfIsd3D9SvJESzKZ\r
|
|
||||||
VWDYwaocgeCN+ojKu1p885lu1EfRbX3fr3YO02K5/c2JYDkc0Py0W3wUP/J1XUax\r
|
|
||||||
pbKpzwlkxEgtmzsGqsOfMJqBV3TNDrOA2uBsa+uBqP5MGYLZ49S/4v/bW9I01Cr1\r
|
|
||||||
D2ZkV510Y1Vgo66WlP8mRqOTyt/5WRhPD+MxXdk67BNN/PmO6tMlVoJDuk+XwWPR\r
|
|
||||||
t2TvNaND/yabT9eYI55Og4fzKD6RIjouUX8DvKLkm+7aXxVs2uuLQ3Jco3O82z55\r
|
|
||||||
dbShU1jYsrw9oouXUz06MHPbkdhNbF/2hfhZ2qA31sNeovJw65iUv7sDKX3LVWgJ\r
|
|
||||||
10jlywcDwqlU8CO7WC9lGixYTbnOkYZpXCGEl8e6Jbs79l42YFo4ogYpFK1NXFhV\r
|
|
||||||
kOXRmDf/wmfj+c/ld3L2PkvwlgofhCudOQknZbo3ub1gjiTn7L+lMGHIj/3suMIl\r
|
|
||||||
ID4EUxAXScIM1ZEz2fjtW5jATlqYcLjLTbf/olw6HFyPNH+9IssqXeZNKnGwPUB9\r
|
|
||||||
3lTXsg0tpzl+x7F/2WjEw1DSNhjC0KnHt1vEYNMkUGDGFdN9y3ERLqX/FIgiASUb\r
|
|
||||||
bTvAVupnAK3raBezGmhrs6LsQtLS9P0VvQiLU3uDhMqw8Z4SISLpcD+NnVBHzQqm\r
|
|
||||||
6W5Qn/8xsCL6av18yUVTi2G3igt3QCNoYx9evt2ZcIkNoyyagUVjfZe5GHXh8Dnz\r
|
|
||||||
GaBXW/hg3HlXLRGaQu4RYCzBMJILcO25OhZOg6jbkCLiEexQlm2e9krB5cXR49Al\r
|
|
||||||
UN4fiB0KR9JyG2ayUdNJVkXZSZLnHyRgiaadlpUo16LVvw==\r
|
|
||||||
=b5Kp\r
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
\r
|
|
||||||
"""
|
|
||||||
|
|
||||||
commented_payload = prefix + comment + payload
|
|
||||||
assert check_armored_payload(commented_payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(commented_payload, outgoing=True) == False
|
|
||||||
|
|
||||||
payload = prefix + payload
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
HELLOWORLD
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
"""
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == False
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == False
|
|
||||||
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
=njUN
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
"""
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == False
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == False
|
|
||||||
|
|
||||||
# Test payload using partial body length
|
|
||||||
# as generated by GopenPGP.
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
wV4DdCVjRfOT3TQSAQdAY5+pjT6mlCxPGdR3be4w7oJJRUGIPI/Vnh+mJxGSm34w\r
|
|
||||||
LNlVc89S1g22uQYFif2sUJsQWbpoHpNkuWpkSgOaHmNvrZiY/YU5iv+cZ3LbmtUG\r
|
|
||||||
0uoBisSHh9O1c+5sYZSbrvYZ1NOwlD7Fv/U5/Mw4E5+CjxfdgNGp5o3DDddzPK78\r
|
|
||||||
jseDhdSXxnaiIJC93hxNX6R1RPt3G2gukyzx69wciPQShcF8zf3W3o75Ed7B8etV\r
|
|
||||||
QEeB16xzdFhKa9JxdjTu3osgCs21IO7wpcFkjc7nZzlW6jPnELJJaNmv4yOOCjMp\r
|
|
||||||
6YAkaN/BkL+jHTznHDuDsT5ilnTXpwHDU1Cm9PIx/KFcNCQnIB+2DcdIHPHUH1ci\r
|
|
||||||
jvqoeXAVWjKXEjS7PqPFuP/xGbrWG2ugs+toXJOKbgRkExvKs1dwPFKrgghvCVbW\r
|
|
||||||
AcKejQKAPArLwpkA7aD875TZQShvGt74fNs45XBlGOYOnNOAJ1KAmzrXLIDViyyB\r
|
|
||||||
kDsmTBk785xofuCkjBpXSe6vsMprPzCteDfaUibh8FHeJjucxPerwuOPEmnogNaf\r
|
|
||||||
YyL4+iy8H8I9/p7pmUqILprxTG0jTOtlk0bTVzeiF56W1xbtSEMuOo4oFbQTyOM2\r
|
|
||||||
bKXaYo774Jm+rRtKAnnI2dtf9RpK19cog6YNzfYjesLKbXDsPZbN5rmwyFiCvvxC\r
|
|
||||||
kQ6JLob+B2fPdY2gzy7LypxktS8Zi1HJcWDHJGVmQodaDLqKUObb4M26bXDe6oxI\r
|
|
||||||
NS8PJz5exVbM3KhZnUOEn6PJRBBf5a/ZqxlhZPcQo/oBuhKpBRpO5kSDwPIUByu3\r
|
|
||||||
UlXLSkpMqe9pUarAOEuQjfl2RVY7U+RrQYp4YP5keMO+i8NCefAFbowTTufO1JIq\r
|
|
||||||
2nVgCi/QVnxZyEc9OYt/8AE3g4cdojE+vsSDifZLSWYIetpfrohHv3dT3StD1QRG\r
|
|
||||||
0QE6qq6oKpg/IL0cjvuX4c7a7bslv2fXp8t75y37RU6253qdIebhxc/cRhPbc/yu\r
|
|
||||||
p0YLyD4SrvKTLP2ZV95jT4IPEpqm4AN3QmiOzdtqR2gLyb62L8QfqI/FdwsIiRiM\r
|
|
||||||
hqydwoqt/lfSqG1WKPh+6EkMkH+TDiCC1BQdbN1MNcyUtcjb35PR2c8Ld2TF3guA\r
|
|
||||||
jLIqMt/Vb7hBoMb2FcsOYY25ka9oV62OwgKWLXnFzk+modMR5fzb4kxVVAYEqP+D\r
|
|
||||||
T5KO1Vs76v1fyPGOq6BbBCvLwTqe/e6IZInJles4v5jrhnLcGKmNGivCUDe6X6NY\r
|
|
||||||
UKNt5RsZllwDQpaAb5dMNhyrk8SgIE7TBI7rvqIdUCE52Vy+0JDxFg5olRpFUfO6\r
|
|
||||||
/MyTW3Yo/ekk/npHr7iYYqJTCc21bDGLWQcIo/XO7WPxrKNWGBNPFnkRdw0MaKr4\r
|
|
||||||
+cEM3V8NFnSEpC12xA+RX/CezuJtwXZK5MpG76eYqMO6qyC+c25YcFecEufDZDxx\r
|
|
||||||
ZLqRszVRyxyWPtk/oIeQK2v9wOqY6N9/ff01gHz69vqYqN5bUw/QKZsmx1zW+gPw\r
|
|
||||||
6x2tDK2BHeYl182gCbhlKISRFwCtbjqZSkiKWao/VtygHkw0fK34avJuyQ/X9YaN\r
|
|
||||||
BRy+7Lf3VA53pnB5WJ1xwRXN8VDvmZeXzv2krHveCMemj0OjnRoCLu117xN0A5m9\r
|
|
||||||
Fm/RoDix5PolDHtWTtr2m1n2hp2LHnj8at9lFEd0SKhAYHVL9KjzycwWODZRXt+x\r
|
|
||||||
zGDDuooEeTvdY5NLyKcl4gETz1ZP4Ez5jGGjhPSwSpq1mU7UaJ9ZXXdr4KHyifW6\r
|
|
||||||
ggNzNsGhXTap7IWZpTtqXABydfiBshmH2NjqtNDwBweJVSgP10+r0WhMWlaZs6xl\r
|
|
||||||
V3o5yskJt6GlkwpJxZrTvN6Tiww/eW7HFV6NGf7IRSWY5tJc/iA7/92tOmkdvJ1q\r
|
|
||||||
myLbG7cJB787QjplEyVe2P/JBO6xYvbkJLf9Q+HaviTO25rugRSrYsoKMDfO8VlQ\r
|
|
||||||
1CcnTPVtApPZJEQzAWJEgVAM8uIlkqWJJMgyWT34sTkdBeCUFGloXQFs9Yxd0AGf\r
|
|
||||||
/zHEkYZSTKpVSvAIGu4=\r
|
|
||||||
=6iHb\r
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
"""
|
|
||||||
assert check_armored_payload(payload, outgoing=False) == True
|
|
||||||
assert check_armored_payload(payload, outgoing=True) == True
|
|
||||||
78
chatmaild/src/chatmaild/tests/test_filtermail_blackbox.py
Normal file
78
chatmaild/src/chatmaild/tests/test_filtermail_blackbox.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import smtplib
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def smtpserver():
|
||||||
|
from pytest_localserver import smtp
|
||||||
|
|
||||||
|
server = smtp.Server("127.0.0.1")
|
||||||
|
server.start()
|
||||||
|
yield server
|
||||||
|
server.stop()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def make_popen(request):
|
||||||
|
def popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kw):
|
||||||
|
p = subprocess.Popen(
|
||||||
|
cmdargs,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
def fin():
|
||||||
|
p.terminate()
|
||||||
|
out, err = p.communicate()
|
||||||
|
print(out.decode("ascii"))
|
||||||
|
print(err.decode("ascii"), file=sys.stderr)
|
||||||
|
|
||||||
|
request.addfinalizer(fin)
|
||||||
|
return p
|
||||||
|
|
||||||
|
return popen
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("filtermail_mode", ["outgoing", "incoming"])
|
||||||
|
def test_one_mail(
|
||||||
|
make_config, make_popen, smtpserver, maildata, filtermail_mode, monkeypatch
|
||||||
|
):
|
||||||
|
monkeypatch.setenv("PYTHONUNBUFFERED", "1")
|
||||||
|
smtp_inject_port = 20025
|
||||||
|
if filtermail_mode == "outgoing":
|
||||||
|
settings = dict(
|
||||||
|
postfix_reinject_port=smtpserver.port,
|
||||||
|
filtermail_smtp_port=smtp_inject_port,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
settings = dict(
|
||||||
|
postfix_reinject_port_incoming=smtpserver.port,
|
||||||
|
filtermail_smtp_port_incoming=smtp_inject_port,
|
||||||
|
)
|
||||||
|
|
||||||
|
config = make_config("example.org", settings=settings)
|
||||||
|
path = str(config._inipath)
|
||||||
|
|
||||||
|
popen = make_popen(["filtermail", path, filtermail_mode])
|
||||||
|
line = popen.stderr.readline().strip()
|
||||||
|
if b"loop" not in line:
|
||||||
|
print(line.decode("ascii"), file=sys.stderr)
|
||||||
|
pytest.fail("starting filtermail failed")
|
||||||
|
|
||||||
|
addr = f"user1@{config.mail_domain}"
|
||||||
|
config.get_user(addr).set_password("l1k2j3l1k2j3l")
|
||||||
|
|
||||||
|
# send encrypted mail
|
||||||
|
data = str(maildata("encrypted.eml", from_addr=addr, to_addr=addr))
|
||||||
|
client = smtplib.SMTP("localhost", smtp_inject_port)
|
||||||
|
client.sendmail(addr, [addr], data)
|
||||||
|
assert len(smtpserver.outbox) == 1
|
||||||
|
|
||||||
|
# send un-encrypted mail that errors
|
||||||
|
data = str(maildata("fake-encrypted.eml", from_addr=addr, to_addr=addr))
|
||||||
|
with pytest.raises(smtplib.SMTPDataError) as e:
|
||||||
|
client.sendmail(addr, [addr], data)
|
||||||
|
assert e.value.smtp_code == 523
|
||||||
@@ -36,29 +36,3 @@ def test_handle_dovecot_request_last_login(testaddr, example_config):
|
|||||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
||||||
assert res == "O\n"
|
assert res == "O\n"
|
||||||
assert len(dictproxy_transactions) == 0
|
assert len(dictproxy_transactions) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_request_last_login_echobot(example_config):
|
|
||||||
dictproxy = LastLoginDictProxy(config=example_config)
|
|
||||||
|
|
||||||
authproxy = AuthDictProxy(config=example_config)
|
|
||||||
testaddr = f"echo@{example_config.mail_domain}"
|
|
||||||
authproxy.lookup_passdb(testaddr, "ignore")
|
|
||||||
user = dictproxy.config.get_user(testaddr)
|
|
||||||
|
|
||||||
transactions = {}
|
|
||||||
|
|
||||||
# set last-login info for user
|
|
||||||
tx = "1111"
|
|
||||||
msg = f"B{tx}\t{testaddr}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert not res
|
|
||||||
assert transactions == {tx: dict(addr=testaddr, res="O\n")}
|
|
||||||
|
|
||||||
timestamp = int(time.time())
|
|
||||||
msg = f"S{tx}\tshared/last-login/{testaddr}\t{timestamp}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert not res
|
|
||||||
assert len(transactions) == 1
|
|
||||||
read_timestamp = user.get_last_login_timestamp()
|
|
||||||
assert read_timestamp is None
|
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ def turn_credentials() -> str:
|
|||||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
|
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as client_socket:
|
||||||
client_socket.connect("/run/chatmail-turn/turn.socket")
|
client_socket.connect("/run/chatmail-turn/turn.socket")
|
||||||
with client_socket.makefile("rb") as file:
|
with client_socket.makefile("rb") as file:
|
||||||
return file.readline().decode("utf-8")
|
return file.readline().decode("utf-8").strip()
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ class User:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def can_track(self):
|
def can_track(self):
|
||||||
return "@" in self.addr and not self.addr.startswith("echo@")
|
return "@" in self.addr
|
||||||
|
|
||||||
def get_userdb_dict(self):
|
def get_userdb_dict(self):
|
||||||
"""Return a non-empty dovecot 'userdb' style dict
|
"""Return a non-empty dovecot 'userdb' style dict
|
||||||
@@ -55,11 +55,9 @@ class User:
|
|||||||
try:
|
try:
|
||||||
write_bytes_atomic(self.password_path, password)
|
write_bytes_atomic(self.password_path, password)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
if not self.addr.startswith("echo@"):
|
logging.error(f"could not write password for: {self.addr}")
|
||||||
logging.error(f"could not write password for: {self.addr}")
|
raise
|
||||||
raise
|
self.enforce_E2EE_path.touch()
|
||||||
if not self.addr.startswith("echo@"):
|
|
||||||
self.enforce_E2EE_path.touch()
|
|
||||||
|
|
||||||
def set_last_login_timestamp(self, timestamp):
|
def set_last_login_timestamp(self, timestamp):
|
||||||
"""Track login time with daily granularity
|
"""Track login time with daily granularity
|
||||||
|
|||||||
94
cliff.toml
Normal file
94
cliff.toml
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# git-cliff ~ configuration file
|
||||||
|
# https://git-cliff.org/docs/configuration
|
||||||
|
|
||||||
|
|
||||||
|
[changelog]
|
||||||
|
# A Tera template to be rendered for each release in the changelog.
|
||||||
|
# See https://keats.github.io/tera/docs/#introduction
|
||||||
|
body = """
|
||||||
|
{% if version %}\
|
||||||
|
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||||
|
{% else %}\
|
||||||
|
## [unreleased]
|
||||||
|
{% endif %}\
|
||||||
|
{% for group, commits in commits | group_by(attribute="group") %}
|
||||||
|
### {{ group | striptags | trim | upper_first }}
|
||||||
|
{% for commit in commits %}
|
||||||
|
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||||
|
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||||
|
{{ commit.message | upper_first }}\
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
"""
|
||||||
|
# Remove leading and trailing whitespaces from the changelog's body.
|
||||||
|
trim = true
|
||||||
|
# Render body even when there are no releases to process.
|
||||||
|
render_always = true
|
||||||
|
# An array of regex based postprocessors to modify the changelog.
|
||||||
|
postprocessors = [
|
||||||
|
# Replace the placeholder <REPO> with a URL.
|
||||||
|
#{ pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" },
|
||||||
|
]
|
||||||
|
# render body even when there are no releases to process
|
||||||
|
# render_always = true
|
||||||
|
# output file path
|
||||||
|
# output = "test.md"
|
||||||
|
|
||||||
|
[git]
|
||||||
|
# Parse commits according to the conventional commits specification.
|
||||||
|
# See https://www.conventionalcommits.org
|
||||||
|
conventional_commits = true
|
||||||
|
# Exclude commits that do not match the conventional commits specification.
|
||||||
|
filter_unconventional = true
|
||||||
|
# Require all commits to be conventional.
|
||||||
|
# Takes precedence over filter_unconventional.
|
||||||
|
require_conventional = false
|
||||||
|
# Split commits on newlines, treating each line as an individual commit.
|
||||||
|
split_commits = false
|
||||||
|
# An array of regex based parsers to modify commit messages prior to further processing.
|
||||||
|
commit_preprocessors = [
|
||||||
|
# Replace issue numbers with link templates to be updated in `changelog.postprocessors`.
|
||||||
|
#{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"},
|
||||||
|
# Check spelling of the commit message using https://github.com/crate-ci/typos.
|
||||||
|
# If the spelling is incorrect, it will be fixed automatically.
|
||||||
|
#{ pattern = '.*', replace_command = 'typos --write-changes -' },
|
||||||
|
]
|
||||||
|
# Prevent commits that are breaking from being excluded by commit parsers.
|
||||||
|
protect_breaking_commits = false
|
||||||
|
# An array of regex based parsers for extracting data from the commit message.
|
||||||
|
# Assigns commits to groups.
|
||||||
|
# Optionally sets the commit's scope and can decide to exclude commits from further processing.
|
||||||
|
commit_parsers = [
|
||||||
|
{ message = "^feat", group = "Features" },
|
||||||
|
{ message = "^fix", group = "Bug Fixes" },
|
||||||
|
{ message = "^docs", group = "Documentation" },
|
||||||
|
{ message = "^perf", group = "Performance" },
|
||||||
|
{ message = "^refactor", group = "Refactor" },
|
||||||
|
{ message = "^style", group = "Styling" },
|
||||||
|
{ message = "^test", group = "Testing" },
|
||||||
|
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||||
|
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||||
|
{ message = "^chore\\(pr\\)", skip = true },
|
||||||
|
{ message = "^chore\\(pull\\)", skip = true },
|
||||||
|
{ message = "^chore|^ci", group = "Miscellaneous Tasks" },
|
||||||
|
{ body = ".*security", group = "Security" },
|
||||||
|
{ message = "^revert", group = "Revert" },
|
||||||
|
{ message = ".*", group = "Other" },
|
||||||
|
]
|
||||||
|
# Exclude commits that are not matched by any commit parser.
|
||||||
|
filter_commits = false
|
||||||
|
# Fail on a commit that is not matched by any commit parser.
|
||||||
|
fail_on_unmatched_commit = false
|
||||||
|
# An array of link parsers for extracting external references, and turning them into URLs, using regex.
|
||||||
|
link_parsers = []
|
||||||
|
# Include only the tags that belong to the current branch.
|
||||||
|
use_branch_tags = false
|
||||||
|
# Order releases topologically instead of chronologically.
|
||||||
|
topo_order = false
|
||||||
|
# Order commits topologically instead of chronologically.
|
||||||
|
topo_order_commits = true
|
||||||
|
# Order of commits in each group/release within the changelog.
|
||||||
|
# Allowed values: newest, oldest
|
||||||
|
sort_commits = "oldest"
|
||||||
|
# Process submodules commits
|
||||||
|
recurse_submodules = false
|
||||||
@@ -1,931 +0,0 @@
|
|||||||
"""
|
|
||||||
Chat Mail pyinfra deploy.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import importlib.resources
|
|
||||||
import io
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from chatmaild.config import Config, read_config
|
|
||||||
from pyinfra import facts, host, logger
|
|
||||||
from pyinfra.api import FactBase
|
|
||||||
from pyinfra.facts.files import File, Sha256File
|
|
||||||
from pyinfra.facts.server import Sysctl
|
|
||||||
from pyinfra.facts.systemd import SystemdEnabled
|
|
||||||
from pyinfra.operations import apt, files, pip, server, systemd
|
|
||||||
|
|
||||||
from .acmetool import deploy_acmetool
|
|
||||||
|
|
||||||
|
|
||||||
class Port(FactBase):
|
|
||||||
"""
|
|
||||||
Returns the process occuping a port.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def command(self, port: int) -> str:
|
|
||||||
return (
|
|
||||||
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
|
||||||
% (port,)
|
|
||||||
)
|
|
||||||
|
|
||||||
def process(self, output: [str]) -> str:
|
|
||||||
return output[0]
|
|
||||||
|
|
||||||
|
|
||||||
def _build_chatmaild(dist_dir) -> None:
|
|
||||||
dist_dir = Path(dist_dir).resolve()
|
|
||||||
if dist_dir.exists():
|
|
||||||
shutil.rmtree(dist_dir)
|
|
||||||
dist_dir.mkdir()
|
|
||||||
subprocess.check_output(
|
|
||||||
[sys.executable, "-m", "build", "-n"]
|
|
||||||
+ ["--sdist", "chatmaild", "--outdir", str(dist_dir)]
|
|
||||||
)
|
|
||||||
entries = list(dist_dir.iterdir())
|
|
||||||
assert len(entries) == 1
|
|
||||||
return entries[0]
|
|
||||||
|
|
||||||
|
|
||||||
def remove_legacy_artifacts():
|
|
||||||
# disable legacy doveauth-dictproxy.service
|
|
||||||
if host.get_fact(SystemdEnabled).get("doveauth-dictproxy.service"):
|
|
||||||
systemd.service(
|
|
||||||
name="Disable legacy doveauth-dictproxy.service",
|
|
||||||
service="doveauth-dictproxy.service",
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _install_remote_venv_with_chatmaild(config) -> None:
|
|
||||||
remove_legacy_artifacts()
|
|
||||||
dist_file = _build_chatmaild(dist_dir=Path("chatmaild/dist"))
|
|
||||||
remote_base_dir = "/usr/local/lib/chatmaild"
|
|
||||||
remote_dist_file = f"{remote_base_dir}/dist/{dist_file.name}"
|
|
||||||
remote_venv_dir = f"{remote_base_dir}/venv"
|
|
||||||
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
|
||||||
root_owned = dict(user="root", group="root", mode="644")
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="apt install python3-virtualenv",
|
|
||||||
packages=["python3-virtualenv"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name="Upload chatmaild source package",
|
|
||||||
src=dist_file.open("rb"),
|
|
||||||
dest=remote_dist_file,
|
|
||||||
create_remote_dir=True,
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name=f"Upload {remote_chatmail_inipath}",
|
|
||||||
src=config._getbytefile(),
|
|
||||||
dest=remote_chatmail_inipath,
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
pip.virtualenv(
|
|
||||||
name=f"chatmaild virtualenv {remote_venv_dir}",
|
|
||||||
path=remote_venv_dir,
|
|
||||||
always_copy=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="install gcc and headers to build crypt_r source package",
|
|
||||||
packages=["gcc", "python3-dev"],
|
|
||||||
)
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name=f"forced pip-install {dist_file.name}",
|
|
||||||
commands=[
|
|
||||||
f"{remote_venv_dir}/bin/pip install --force-reinstall {remote_dist_file}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("metrics.cron.j2"),
|
|
||||||
dest="/etc/cron.d/chatmail-metrics",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={
|
|
||||||
"mailboxes_dir": config.mailboxes_dir,
|
|
||||||
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# install systemd units
|
|
||||||
for fn in (
|
|
||||||
"doveauth",
|
|
||||||
"filtermail",
|
|
||||||
"filtermail-incoming",
|
|
||||||
"echobot",
|
|
||||||
"chatmail-metadata",
|
|
||||||
"lastlogin",
|
|
||||||
"turnserver",
|
|
||||||
):
|
|
||||||
execpath = fn if fn != "filtermail-incoming" else "filtermail"
|
|
||||||
params = dict(
|
|
||||||
execpath=f"{remote_venv_dir}/bin/{execpath}",
|
|
||||||
config_path=remote_chatmail_inipath,
|
|
||||||
remote_venv_dir=remote_venv_dir,
|
|
||||||
mail_domain=config.mail_domain,
|
|
||||||
)
|
|
||||||
source_path = importlib.resources.files(__package__).joinpath(
|
|
||||||
"service", f"{fn}.service.f"
|
|
||||||
)
|
|
||||||
content = source_path.read_text().format(**params).encode()
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name=f"Upload {fn}.service",
|
|
||||||
src=io.BytesIO(content),
|
|
||||||
dest=f"/etc/systemd/system/{fn}.service",
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name=f"Setup {fn} service",
|
|
||||||
service=f"{fn}.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=True,
|
|
||||||
daemon_reload=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_opendkim(domain: str, dkim_selector: str = "dkim") -> bool:
|
|
||||||
"""Configures OpenDKIM"""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/opendkim.conf"),
|
|
||||||
dest="/etc/opendkim.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
screen_script = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/screen.lua"),
|
|
||||||
dest="/etc/opendkim/screen.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= screen_script.changed
|
|
||||||
|
|
||||||
final_script = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/final.lua"),
|
|
||||||
dest="/etc/opendkim/final.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= final_script.changed
|
|
||||||
|
|
||||||
files.directory(
|
|
||||||
name="Add opendkim directory to /etc",
|
|
||||||
path="/etc/opendkim",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="750",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
keytable = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/KeyTable"),
|
|
||||||
dest="/etc/dkimkeys/KeyTable",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= keytable.changed
|
|
||||||
|
|
||||||
signing_table = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/SigningTable"),
|
|
||||||
dest="/etc/dkimkeys/SigningTable",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= signing_table.changed
|
|
||||||
files.directory(
|
|
||||||
name="Add opendkim socket directory to /var/spool/postfix",
|
|
||||||
path="/var/spool/postfix/opendkim",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="750",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="apt install opendkim opendkim-tools",
|
|
||||||
packages=["opendkim", "opendkim-tools"],
|
|
||||||
)
|
|
||||||
|
|
||||||
if not host.get_fact(File, f"/etc/dkimkeys/{dkim_selector}.private"):
|
|
||||||
server.shell(
|
|
||||||
name="Generate OpenDKIM domain keys",
|
|
||||||
commands=[
|
|
||||||
f"/usr/sbin/opendkim-genkey -D /etc/dkimkeys -d {domain} -s {dkim_selector}"
|
|
||||||
],
|
|
||||||
_use_su_login=True,
|
|
||||||
_su_user="opendkim",
|
|
||||||
)
|
|
||||||
|
|
||||||
service_file = files.put(
|
|
||||||
name="Configure opendkim to restart once a day",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/systemd.conf"),
|
|
||||||
dest="/etc/systemd/system/opendkim.service.d/10-prevent-memory-leak.conf",
|
|
||||||
)
|
|
||||||
need_restart |= service_file.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _uninstall_mta_sts_daemon() -> None:
|
|
||||||
# Remove configuration.
|
|
||||||
files.file("/etc/mta-sts-daemon.yml", present=False)
|
|
||||||
|
|
||||||
files.directory("/usr/local/lib/postfix-mta-sts-resolver", present=False)
|
|
||||||
|
|
||||||
files.file("/etc/systemd/system/mta-sts-daemon.service", present=False)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Stop MTA-STS daemon",
|
|
||||||
service="mta-sts-daemon.service",
|
|
||||||
daemon_reload=True,
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_postfix(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures Postfix SMTP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("postfix/main.cf.j2"),
|
|
||||||
dest="/etc/postfix/main.cf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
master_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("postfix/master.cf.j2"),
|
|
||||||
dest="/etc/postfix/master.cf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
debug=debug,
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
need_restart |= master_config.changed
|
|
||||||
|
|
||||||
header_cleanup = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"postfix/submission_header_cleanup"
|
|
||||||
),
|
|
||||||
dest="/etc/postfix/submission_header_cleanup",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= header_cleanup.changed
|
|
||||||
|
|
||||||
# Login map that 1:1 maps email address to login.
|
|
||||||
login_map = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("postfix/login_map"),
|
|
||||||
dest="/etc/postfix/login_map",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= login_map.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _install_dovecot_package(package: str, arch: str):
|
|
||||||
arch = "amd64" if arch == "x86_64" else arch
|
|
||||||
arch = "arm64" if arch == "aarch64" else arch
|
|
||||||
url = f"https://download.delta.chat/dovecot/dovecot-{package}_2.3.21%2Bdfsg1-3_{arch}.deb"
|
|
||||||
deb_filename = "/root/" + url.split("/")[-1]
|
|
||||||
|
|
||||||
match (package, arch):
|
|
||||||
case ("core", "amd64"):
|
|
||||||
sha256 = "43f593332e22ac7701c62d58b575d2ca409e0f64857a2803be886c22860f5587"
|
|
||||||
case ("core", "arm64"):
|
|
||||||
sha256 = "4d21eba1a83f51c100f08f2e49f0c9f8f52f721ebc34f75018e043306da993a7"
|
|
||||||
case ("imapd", "amd64"):
|
|
||||||
sha256 = "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86"
|
|
||||||
case ("imapd", "arm64"):
|
|
||||||
sha256 = "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f"
|
|
||||||
case ("lmtpd", "amd64"):
|
|
||||||
sha256 = "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab"
|
|
||||||
case ("lmtpd", "arm64"):
|
|
||||||
sha256 = "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f"
|
|
||||||
case _:
|
|
||||||
apt.packages(packages=[f"dovecot-{package}"])
|
|
||||||
return
|
|
||||||
|
|
||||||
files.download(
|
|
||||||
name=f"Download dovecot-{package}",
|
|
||||||
src=url,
|
|
||||||
dest=deb_filename,
|
|
||||||
sha256sum=sha256,
|
|
||||||
cache_time=60 * 60 * 24 * 365 * 10, # never redownload the package
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.deb(name=f"Install dovecot-{package}", src=deb_filename)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures Dovecot IMAP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("dovecot/dovecot.conf.j2"),
|
|
||||||
dest="/etc/dovecot/dovecot.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
debug=debug,
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
auth_config = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("dovecot/auth.conf"),
|
|
||||||
dest="/etc/dovecot/auth.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= auth_config.changed
|
|
||||||
lua_push_notification_script = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"dovecot/push_notification.lua"
|
|
||||||
),
|
|
||||||
dest="/etc/dovecot/push_notification.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= lua_push_notification_script.changed
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("dovecot/expunge.cron.j2"),
|
|
||||||
dest="/etc/cron.d/expunge",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
|
|
||||||
# as per https://doc.dovecot.org/2.3/configuration_manual/os/
|
|
||||||
# it is recommended to set the following inotify limits
|
|
||||||
if config.change_kernel_settings:
|
|
||||||
for name in ("max_user_instances", "max_user_watches"):
|
|
||||||
key = f"fs.inotify.{name}"
|
|
||||||
if host.get_fact(Sysctl)[key] == config.fs_inotify_max_user_instances_and_watchers:
|
|
||||||
# Skip updating limits if already sufficient
|
|
||||||
# (enables running in incus containers where sysctl readonly)
|
|
||||||
continue
|
|
||||||
server.sysctl(
|
|
||||||
name=f"Change {key}",
|
|
||||||
key=key,
|
|
||||||
value=config.fs_inotify_max_user_instances_and_watchers,
|
|
||||||
persist=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
timezone_env = files.line(
|
|
||||||
name="Set TZ environment variable",
|
|
||||||
path="/etc/environment",
|
|
||||||
line="TZ=:/etc/localtime",
|
|
||||||
)
|
|
||||||
need_restart |= timezone_env.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_nginx(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures nginx HTTP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("nginx/nginx.conf.j2"),
|
|
||||||
dest="/etc/nginx/nginx.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
autoconfig = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("nginx/autoconfig.xml.j2"),
|
|
||||||
dest="/var/www/html/.well-known/autoconfig/mail/config-v1.1.xml",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
)
|
|
||||||
need_restart |= autoconfig.changed
|
|
||||||
|
|
||||||
mta_sts_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("nginx/mta-sts.txt.j2"),
|
|
||||||
dest="/var/www/html/.well-known/mta-sts.txt",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
)
|
|
||||||
need_restart |= mta_sts_config.changed
|
|
||||||
|
|
||||||
# install CGI newemail script
|
|
||||||
#
|
|
||||||
cgi_dir = "/usr/lib/cgi-bin"
|
|
||||||
files.directory(
|
|
||||||
name=f"Ensure {cgi_dir} exists",
|
|
||||||
path=cgi_dir,
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name="Upload cgi newemail.py script",
|
|
||||||
src=importlib.resources.files("chatmaild").joinpath("newemail.py").open("rb"),
|
|
||||||
dest=f"{cgi_dir}/newemail.py",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
)
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _remove_rspamd() -> None:
|
|
||||||
"""Remove rspamd"""
|
|
||||||
apt.packages(name="Remove rspamd", packages="rspamd", present=False)
|
|
||||||
|
|
||||||
|
|
||||||
def check_config(config):
|
|
||||||
mail_domain = config.mail_domain
|
|
||||||
if mail_domain != "testrun.org" and not mail_domain.endswith(".testrun.org"):
|
|
||||||
blocked_words = "merlinux schmieder testrun.org".split()
|
|
||||||
for key in config.__dict__:
|
|
||||||
value = config.__dict__[key]
|
|
||||||
if key.startswith("privacy") and any(
|
|
||||||
x in str(value) for x in blocked_words
|
|
||||||
):
|
|
||||||
raise ValueError(
|
|
||||||
f"please set your own privacy contacts/addresses in {config._inipath}"
|
|
||||||
)
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_turn_server(config):
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-x86_64-linux",
|
|
||||||
"841e527c15fdc2940b0469e206188ea8f0af48533be12ecb8098520f813d41e4",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-aarch64-linux",
|
|
||||||
"a5fc2d06d937b56a34e098d2cd72a82d3e89967518d159bf246dc69b65e81b42",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/chatmail-turn")
|
|
||||||
if existing_sha256sum != sha256sum:
|
|
||||||
server.shell(
|
|
||||||
name="Download chatmail-turn",
|
|
||||||
commands=[
|
|
||||||
f"(curl -L {url} >/usr/local/bin/chatmail-turn.new && (echo '{sha256sum} /usr/local/bin/chatmail-turn.new' | sha256sum -c) && mv /usr/local/bin/chatmail-turn.new /usr/local/bin/chatmail-turn)",
|
|
||||||
"chmod 755 /usr/local/bin/chatmail-turn",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
need_restart = True
|
|
||||||
|
|
||||||
source_path = importlib.resources.files(__package__).joinpath(
|
|
||||||
"service", "turnserver.service.f"
|
|
||||||
)
|
|
||||||
content = source_path.read_text().format(mail_domain=config.mail_domain).encode()
|
|
||||||
|
|
||||||
systemd_unit = files.put(
|
|
||||||
name="Upload turnserver.service",
|
|
||||||
src=io.BytesIO(content),
|
|
||||||
dest="/etc/systemd/system/turnserver.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= systemd_unit.changed
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Setup turnserver service",
|
|
||||||
service="turnserver.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=need_restart,
|
|
||||||
daemon_reload=systemd_unit.changed,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_mtail(config):
|
|
||||||
# Uninstall mtail package, we are going to install a static binary.
|
|
||||||
apt.packages(name="Uninstall mtail", packages=["mtail"], present=False)
|
|
||||||
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_amd64.tar.gz",
|
|
||||||
"123c2ee5f48c3eff12ebccee38befd2233d715da736000ccde49e3d5607724e4",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_arm64.tar.gz",
|
|
||||||
"aa04811c0929b6754408676de520e050c45dddeb3401881888a092c9aea89cae",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name="Download mtail",
|
|
||||||
commands=[
|
|
||||||
f"(echo '{sha256sum} /usr/local/bin/mtail' | sha256sum -c) || (curl -L {url} | gunzip | tar -x -f - mtail -O >/usr/local/bin/mtail.new && mv /usr/local/bin/mtail.new /usr/local/bin/mtail)",
|
|
||||||
"chmod 755 /usr/local/bin/mtail",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Using our own systemd unit instead of `/usr/lib/systemd/system/mtail.service`.
|
|
||||||
# This allows to read from journalctl instead of log files.
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("mtail/mtail.service.j2"),
|
|
||||||
dest="/etc/systemd/system/mtail.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
address=config.mtail_address or "127.0.0.1",
|
|
||||||
port=3903,
|
|
||||||
)
|
|
||||||
|
|
||||||
mtail_conf = files.put(
|
|
||||||
name="Mtail configuration",
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"mtail/delivered_mail.mtail"
|
|
||||||
),
|
|
||||||
dest="/etc/mtail/delivered_mail.mtail",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable mtail",
|
|
||||||
service="mtail.service",
|
|
||||||
running=bool(config.mtail_address),
|
|
||||||
enabled=bool(config.mtail_address),
|
|
||||||
restarted=mtail_conf.changed,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_iroh_relay(config) -> None:
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-x86_64-unknown-linux-musl.tar.gz",
|
|
||||||
"45c81199dbd70f8c4c30fef7f3b9727ca6e3cea8f2831333eeaf8aa71bf0fac1",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-aarch64-unknown-linux-musl.tar.gz",
|
|
||||||
"f8ef27631fac213b3ef668d02acd5b3e215292746a3fc71d90c63115446008b1",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install curl",
|
|
||||||
packages=["curl"],
|
|
||||||
)
|
|
||||||
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/iroh-relay")
|
|
||||||
if existing_sha256sum != sha256sum:
|
|
||||||
server.shell(
|
|
||||||
name="Download iroh-relay",
|
|
||||||
commands=[
|
|
||||||
f"(curl -L {url} | gunzip | tar -x -f - ./iroh-relay -O >/usr/local/bin/iroh-relay.new && (echo '{sha256sum} /usr/local/bin/iroh-relay.new' | sha256sum -c) && mv /usr/local/bin/iroh-relay.new /usr/local/bin/iroh-relay)",
|
|
||||||
"chmod 755 /usr/local/bin/iroh-relay",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
need_restart = True
|
|
||||||
|
|
||||||
systemd_unit = files.put(
|
|
||||||
name="Upload iroh-relay systemd unit",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("iroh-relay.service"),
|
|
||||||
dest="/etc/systemd/system/iroh-relay.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= systemd_unit.changed
|
|
||||||
|
|
||||||
iroh_config = files.put(
|
|
||||||
name="Upload iroh-relay config",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("iroh-relay.toml"),
|
|
||||||
dest="/etc/iroh-relay.toml",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= iroh_config.changed
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable iroh-relay",
|
|
||||||
service="iroh-relay.service",
|
|
||||||
running=True,
|
|
||||||
enabled=config.enable_iroh_relay,
|
|
||||||
restarted=need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_chatmail(config_path: Path, disable_mail: bool) -> None:
|
|
||||||
"""Deploy a chat-mail instance.
|
|
||||||
|
|
||||||
:param config_path: path to chatmail.ini
|
|
||||||
:param disable_mail: whether to disable postfix & dovecot
|
|
||||||
"""
|
|
||||||
config = read_config(config_path)
|
|
||||||
check_config(config)
|
|
||||||
mail_domain = config.mail_domain
|
|
||||||
|
|
||||||
from .www import build_webpages, get_paths
|
|
||||||
|
|
||||||
server.group(name="Create vmail group", group="vmail", system=True)
|
|
||||||
server.user(name="Create vmail user", user="vmail", group="vmail", system=True)
|
|
||||||
server.group(name="Create opendkim group", group="opendkim", system=True)
|
|
||||||
server.user(
|
|
||||||
name="Create opendkim user",
|
|
||||||
user="opendkim",
|
|
||||||
groups=["opendkim"],
|
|
||||||
system=True,
|
|
||||||
)
|
|
||||||
server.user(
|
|
||||||
name="Add postfix user to opendkim group for socket access",
|
|
||||||
user="postfix",
|
|
||||||
groups=["opendkim"],
|
|
||||||
system=True,
|
|
||||||
)
|
|
||||||
server.user(name="Create echobot user", user="echobot", system=True)
|
|
||||||
server.user(name="Create iroh user", user="iroh", system=True)
|
|
||||||
|
|
||||||
# Add our OBS repository for dovecot_no_delay
|
|
||||||
files.put(
|
|
||||||
name="Add Deltachat OBS GPG key to apt keyring",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("obs-home-deltachat.gpg"),
|
|
||||||
dest="/etc/apt/keyrings/obs-home-deltachat.gpg",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.line(
|
|
||||||
name="Add DeltaChat OBS home repository to sources.list",
|
|
||||||
path="/etc/apt/sources.list",
|
|
||||||
line="deb [signed-by=/etc/apt/keyrings/obs-home-deltachat.gpg] https://download.opensuse.org/repositories/home:/deltachat/Debian_12/ ./",
|
|
||||||
escape_regex_characters=True,
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
if host.get_fact(Port, port=53) != "unbound":
|
|
||||||
files.line(
|
|
||||||
name="Add 9.9.9.9 to resolv.conf",
|
|
||||||
path="/etc/resolv.conf",
|
|
||||||
line="nameserver 9.9.9.9",
|
|
||||||
)
|
|
||||||
apt.update(name="apt update", cache_time=24 * 3600)
|
|
||||||
apt.upgrade(name="upgrade apt packages", auto_remove=True)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install rsync",
|
|
||||||
packages=["rsync"],
|
|
||||||
)
|
|
||||||
|
|
||||||
deploy_turn_server(config)
|
|
||||||
|
|
||||||
# Run local DNS resolver `unbound`.
|
|
||||||
# `resolvconf` takes care of setting up /etc/resolv.conf
|
|
||||||
# to use 127.0.0.1 as the resolver.
|
|
||||||
from cmdeploy.cmdeploy import Out
|
|
||||||
|
|
||||||
port_services = [
|
|
||||||
(["master", "smtpd"], 25),
|
|
||||||
("unbound", 53),
|
|
||||||
("acmetool", 80),
|
|
||||||
(["imap-login", "dovecot"], 143),
|
|
||||||
("nginx", 443),
|
|
||||||
(["master", "smtpd"], 465),
|
|
||||||
(["master", "smtpd"], 587),
|
|
||||||
(["imap-login", "dovecot"], 993),
|
|
||||||
("iroh-relay", 3340),
|
|
||||||
("nginx", 8443),
|
|
||||||
(["master", "smtpd"], config.postfix_reinject_port),
|
|
||||||
(["master", "smtpd"], config.postfix_reinject_port_incoming),
|
|
||||||
("filtermail", config.filtermail_smtp_port),
|
|
||||||
("filtermail", config.filtermail_smtp_port_incoming),
|
|
||||||
]
|
|
||||||
for service, port in port_services:
|
|
||||||
print(f"Checking if port {port} is available for {service}...")
|
|
||||||
running_service = host.get_fact(Port, port=port)
|
|
||||||
if running_service:
|
|
||||||
if running_service not in service:
|
|
||||||
Out().red(
|
|
||||||
f"Deploy failed: port {port} is occupied by: {running_service}"
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install unbound",
|
|
||||||
packages=["unbound", "unbound-anchor", "dnsutils"],
|
|
||||||
)
|
|
||||||
server.shell(
|
|
||||||
name="Generate root keys for validating DNSSEC",
|
|
||||||
commands=[
|
|
||||||
"unbound-anchor -a /var/lib/unbound/root.key || true",
|
|
||||||
"systemctl reset-failed unbound.service",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable unbound",
|
|
||||||
service="unbound.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
deploy_iroh_relay(config)
|
|
||||||
|
|
||||||
# Deploy acmetool to have TLS certificates.
|
|
||||||
tls_domains = [mail_domain, f"mta-sts.{mail_domain}", f"www.{mail_domain}"]
|
|
||||||
deploy_acmetool(
|
|
||||||
email=config.acme_email,
|
|
||||||
domains=tls_domains,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
# required for setfacl for echobot
|
|
||||||
name="Install acl",
|
|
||||||
packages="acl",
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install Postfix",
|
|
||||||
packages="postfix",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not "dovecot.service" in host.get_fact(SystemdEnabled):
|
|
||||||
_install_dovecot_package("core", host.get_fact(facts.server.Arch))
|
|
||||||
_install_dovecot_package("imapd", host.get_fact(facts.server.Arch))
|
|
||||||
_install_dovecot_package("lmtpd", host.get_fact(facts.server.Arch))
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install nginx",
|
|
||||||
packages=["nginx", "libnginx-mod-stream"],
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install fcgiwrap",
|
|
||||||
packages=["fcgiwrap"],
|
|
||||||
)
|
|
||||||
|
|
||||||
www_path, src_dir, build_dir = get_paths(config)
|
|
||||||
# if www_folder was set to a non-existing folder, skip upload
|
|
||||||
if not www_path.is_dir():
|
|
||||||
logger.warning("Building web pages is disabled in chatmail.ini, skipping")
|
|
||||||
else:
|
|
||||||
# if www_folder is a hugo page, build it
|
|
||||||
if build_dir:
|
|
||||||
www_path = build_webpages(src_dir, build_dir, config)
|
|
||||||
# if it is not a hugo page, upload it as is
|
|
||||||
files.rsync(f"{www_path}/", "/var/www/html", flags=["-avz", "--chown=www-data"])
|
|
||||||
|
|
||||||
_install_remote_venv_with_chatmaild(config)
|
|
||||||
debug = False
|
|
||||||
dovecot_need_restart = _configure_dovecot(config, debug=debug)
|
|
||||||
postfix_need_restart = _configure_postfix(config, debug=debug)
|
|
||||||
nginx_need_restart = _configure_nginx(config)
|
|
||||||
_uninstall_mta_sts_daemon()
|
|
||||||
|
|
||||||
_remove_rspamd()
|
|
||||||
opendkim_need_restart = _configure_opendkim(mail_domain, "opendkim")
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable OpenDKIM",
|
|
||||||
service="opendkim.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
daemon_reload=opendkim_need_restart,
|
|
||||||
restarted=opendkim_need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Dovecot should be started before Postfix
|
|
||||||
# because it creates authentication socket
|
|
||||||
# required by Postfix.
|
|
||||||
systemd.service(
|
|
||||||
name="disable dovecot for now" if disable_mail else "Start and enable Dovecot",
|
|
||||||
service="dovecot.service",
|
|
||||||
running=False if disable_mail else True,
|
|
||||||
enabled=False if disable_mail else True,
|
|
||||||
restarted=dovecot_need_restart if not disable_mail else False,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="disable postfix for now" if disable_mail else "Start and enable Postfix",
|
|
||||||
service="postfix.service",
|
|
||||||
running=False if disable_mail else True,
|
|
||||||
enabled=False if disable_mail else True,
|
|
||||||
restarted=postfix_need_restart if not disable_mail else False,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable nginx",
|
|
||||||
service="nginx.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=nginx_need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable fcgiwrap",
|
|
||||||
service="fcgiwrap.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Restart echobot if postfix and dovecot were just started",
|
|
||||||
service="echobot.service",
|
|
||||||
restarted=postfix_need_restart and dovecot_need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
# This file is used by auth proxy.
|
|
||||||
# https://wiki.debian.org/EtcMailName
|
|
||||||
server.shell(
|
|
||||||
name="Setup /etc/mailname",
|
|
||||||
commands=[f"echo {mail_domain} >/etc/mailname; chmod 644 /etc/mailname"],
|
|
||||||
)
|
|
||||||
|
|
||||||
journald_conf = files.put(
|
|
||||||
name="Configure journald",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("journald.conf"),
|
|
||||||
dest="/etc/systemd/journald.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable journald",
|
|
||||||
service="systemd-journald.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=journald_conf.changed,
|
|
||||||
)
|
|
||||||
files.directory(
|
|
||||||
name="Ensure old logs on disk are deleted",
|
|
||||||
path="/var/log/journal/",
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Ensure cron is installed",
|
|
||||||
packages=["cron"],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_hash = "unknown\n"
|
|
||||||
try:
|
|
||||||
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_diff = ""
|
|
||||||
files.put(
|
|
||||||
name="Upload chatmail relay git commiit hash",
|
|
||||||
src=StringIO(git_hash + git_diff),
|
|
||||||
dest="/etc/chatmail-version",
|
|
||||||
mode="700",
|
|
||||||
)
|
|
||||||
|
|
||||||
deploy_mtail(config)
|
|
||||||
@@ -2,66 +2,140 @@ import importlib.resources
|
|||||||
|
|
||||||
from pyinfra.operations import apt, files, server, systemd
|
from pyinfra.operations import apt, files, server, systemd
|
||||||
|
|
||||||
|
from ..basedeploy import Deployer
|
||||||
|
|
||||||
def deploy_acmetool(email="", domains=[]):
|
|
||||||
"""Deploy acmetool."""
|
|
||||||
apt.packages(
|
|
||||||
name="Install acmetool",
|
|
||||||
packages=["acmetool"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
class AcmetoolDeployer(Deployer):
|
||||||
src=importlib.resources.files(__package__).joinpath("acmetool.cron").open("rb"),
|
def __init__(self, email, domains):
|
||||||
dest="/etc/cron.d/acmetool",
|
self.domains = domains
|
||||||
user="root",
|
self.email = email
|
||||||
group="root",
|
self.need_restart_redirector = False
|
||||||
mode="644",
|
self.need_restart_reconcile_service = False
|
||||||
)
|
self.need_restart_reconcile_timer = False
|
||||||
|
|
||||||
files.put(
|
def install(self):
|
||||||
src=importlib.resources.files(__package__).joinpath("acmetool.hook").open("rb"),
|
apt.packages(
|
||||||
dest="/usr/lib/acme/hooks/nginx",
|
name="Install acmetool",
|
||||||
user="root",
|
packages=["acmetool"],
|
||||||
group="root",
|
)
|
||||||
mode="744",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
files.file(
|
||||||
src=importlib.resources.files(__package__).joinpath("response-file.yaml.j2"),
|
name="Remove old acmetool cronjob, it is replaced with systemd timer.",
|
||||||
dest="/var/lib/acme/conf/responses",
|
path="/etc/cron.d/acmetool",
|
||||||
user="root",
|
present=False,
|
||||||
group="root",
|
)
|
||||||
mode="644",
|
|
||||||
email=email,
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
files.put(
|
||||||
src=importlib.resources.files(__package__).joinpath("target.yaml.j2"),
|
name="Install acmetool hook.",
|
||||||
dest="/var/lib/acme/conf/target",
|
src=importlib.resources.files(__package__)
|
||||||
user="root",
|
.joinpath("acmetool.hook")
|
||||||
group="root",
|
.open("rb"),
|
||||||
mode="644",
|
dest="/etc/acme/hooks/nginx",
|
||||||
)
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
)
|
||||||
|
files.file(
|
||||||
|
name="Remove acmetool hook from the wrong location where it was previously installed.",
|
||||||
|
path="/usr/lib/acme/hooks/nginx",
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
|
||||||
service_file = files.put(
|
def configure(self):
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
files.template(
|
||||||
"acmetool-redirector.service"
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
),
|
"response-file.yaml.j2"
|
||||||
dest="/etc/systemd/system/acmetool-redirector.service",
|
),
|
||||||
user="root",
|
dest="/var/lib/acme/conf/responses",
|
||||||
group="root",
|
user="root",
|
||||||
mode="644",
|
group="root",
|
||||||
)
|
mode="644",
|
||||||
|
email=self.email,
|
||||||
|
)
|
||||||
|
|
||||||
systemd.service(
|
files.template(
|
||||||
name="Setup acmetool-redirector service",
|
src=importlib.resources.files(__package__).joinpath("target.yaml.j2"),
|
||||||
service="acmetool-redirector.service",
|
dest="/var/lib/acme/conf/target",
|
||||||
running=True,
|
user="root",
|
||||||
enabled=True,
|
group="root",
|
||||||
restarted=service_file.changed,
|
mode="644",
|
||||||
)
|
)
|
||||||
|
|
||||||
server.shell(
|
server.shell(
|
||||||
name=f"Request certificate for: {', '.join(domains)}",
|
name=f"Remove old acmetool desired files for {self.domains[0]}",
|
||||||
commands=[f"acmetool want --xlog.severity=debug {' '.join(domains)}"],
|
commands=[f"rm -f /var/lib/acme/desired/{self.domains[0]}-*"],
|
||||||
)
|
)
|
||||||
|
files.template(
|
||||||
|
src=importlib.resources.files(__package__).joinpath("desired.yaml.j2"),
|
||||||
|
dest=f"/var/lib/acme/desired/{self.domains[0]}", # 0 is mailhost TLD
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
domains=self.domains,
|
||||||
|
)
|
||||||
|
|
||||||
|
service_file = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
|
"acmetool-redirector.service"
|
||||||
|
),
|
||||||
|
dest="/etc/systemd/system/acmetool-redirector.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart_redirector = service_file.changed
|
||||||
|
|
||||||
|
reconcile_service_file = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
|
"acmetool-reconcile.service"
|
||||||
|
),
|
||||||
|
dest="/etc/systemd/system/acmetool-reconcile.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart_reconcile_service = reconcile_service_file.changed
|
||||||
|
|
||||||
|
reconcile_timer_file = files.put(
|
||||||
|
src=importlib.resources.files(__package__).joinpath(
|
||||||
|
"acmetool-reconcile.timer"
|
||||||
|
),
|
||||||
|
dest="/etc/systemd/system/acmetool-reconcile.timer",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart_reconcile_timer = reconcile_timer_file.changed
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Setup acmetool-redirector service",
|
||||||
|
service="acmetool-redirector.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=self.need_restart_redirector,
|
||||||
|
)
|
||||||
|
self.need_restart_redirector = False
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Setup acmetool-reconcile service",
|
||||||
|
service="acmetool-reconcile.service",
|
||||||
|
running=False,
|
||||||
|
enabled=False,
|
||||||
|
daemon_reload=self.need_restart_reconcile_service,
|
||||||
|
)
|
||||||
|
self.need_restart_reconcile_service = False
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Setup acmetool-reconcile timer",
|
||||||
|
service="acmetool-reconcile.timer",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
daemon_reload=self.need_restart_reconcile_timer,
|
||||||
|
)
|
||||||
|
self.need_restart_reconcile_timer = False
|
||||||
|
|
||||||
|
server.shell(
|
||||||
|
name=f"Reconcile certificates for: {', '.join(self.domains)}",
|
||||||
|
commands=["acmetool --batch --xlog.severity=debug reconcile"],
|
||||||
|
)
|
||||||
|
|||||||
@@ -0,0 +1,8 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Renew TLS certificates with acmetool
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
ExecStart=/usr/bin/acmetool --batch reconcile
|
||||||
|
|
||||||
8
cmdeploy/src/cmdeploy/acmetool/acmetool-reconcile.timer
Normal file
8
cmdeploy/src/cmdeploy/acmetool/acmetool-reconcile.timer
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Renew TLS certificates with acmetool
|
||||||
|
|
||||||
|
[Timer]
|
||||||
|
OnCalendar=*-*-* 16:20:00
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=timers.target
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
SHELL=/bin/sh
|
|
||||||
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
|
|
||||||
MAILTO=root
|
|
||||||
20 16 * * * root /usr/bin/acmetool --batch reconcile && systemctl reload dovecot && systemctl reload postfix && systemctl reload nginx
|
|
||||||
6
cmdeploy/src/cmdeploy/acmetool/desired.yaml.j2
Normal file
6
cmdeploy/src/cmdeploy/acmetool/desired.yaml.j2
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
satisfy:
|
||||||
|
names:
|
||||||
|
{%- for domain in domains %}
|
||||||
|
- {{ domain }}
|
||||||
|
{%- endfor %}
|
||||||
|
|
||||||
@@ -1,2 +1,2 @@
|
|||||||
"acme-enter-email": "{{ email }}"
|
"acme-enter-email": "{{ email }}"
|
||||||
"acme-agreement:https://letsencrypt.org/documents/LE-SA-v1.5-February-24-2025.pdf": true
|
"acme-agreement:https://letsencrypt.org/documents/LE-SA-v1.6-August-18-2025.pdf": true
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
request:
|
request:
|
||||||
provider: https://acme-v02.api.letsencrypt.org/directory
|
provider: https://acme-v02.api.letsencrypt.org/directory
|
||||||
key:
|
key:
|
||||||
type: rsa
|
type: ecdsa
|
||||||
|
ecdsa-curve: nistp256
|
||||||
challenge:
|
challenge:
|
||||||
webroot-paths:
|
webroot-paths:
|
||||||
- /var/www/html/.well-known/acme-challenge
|
- /var/www/html/.well-known/acme-challenge
|
||||||
|
|||||||
111
cmdeploy/src/cmdeploy/basedeploy.py
Normal file
111
cmdeploy/src/cmdeploy/basedeploy.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
import importlib.resources
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pyinfra.operations import files, server, systemd
|
||||||
|
|
||||||
|
|
||||||
|
def get_resource(arg, pkg=__package__):
|
||||||
|
return importlib.resources.files(pkg).joinpath(arg)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_remote_units(mail_domain, units) -> None:
|
||||||
|
remote_base_dir = "/usr/local/lib/chatmaild"
|
||||||
|
remote_venv_dir = f"{remote_base_dir}/venv"
|
||||||
|
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
||||||
|
root_owned = dict(user="root", group="root", mode="644")
|
||||||
|
|
||||||
|
# install systemd units
|
||||||
|
for fn in units:
|
||||||
|
params = dict(
|
||||||
|
execpath=f"{remote_venv_dir}/bin/{fn}",
|
||||||
|
config_path=remote_chatmail_inipath,
|
||||||
|
remote_venv_dir=remote_venv_dir,
|
||||||
|
mail_domain=mail_domain,
|
||||||
|
)
|
||||||
|
|
||||||
|
basename = fn if "." in fn else f"{fn}.service"
|
||||||
|
|
||||||
|
source_path = get_resource(f"service/{basename}.f")
|
||||||
|
content = source_path.read_text().format(**params).encode()
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name=f"Upload {basename}",
|
||||||
|
src=io.BytesIO(content),
|
||||||
|
dest=f"/etc/systemd/system/{basename}",
|
||||||
|
**root_owned,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def activate_remote_units(units) -> None:
|
||||||
|
# activate systemd units
|
||||||
|
for fn in units:
|
||||||
|
basename = fn if "." in fn else f"{fn}.service"
|
||||||
|
|
||||||
|
if fn == "chatmail-expire" or fn == "chatmail-fsreport":
|
||||||
|
# don't auto-start but let the corresponding timer trigger execution
|
||||||
|
enabled = False
|
||||||
|
else:
|
||||||
|
enabled = True
|
||||||
|
systemd.service(
|
||||||
|
name=f"Setup {basename}",
|
||||||
|
service=basename,
|
||||||
|
running=enabled,
|
||||||
|
enabled=enabled,
|
||||||
|
restarted=enabled,
|
||||||
|
daemon_reload=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Deployment:
|
||||||
|
def install(self, deployer):
|
||||||
|
# optional 'required_users' contains a list of (user, group, secondary-group-list) tuples.
|
||||||
|
# If the group is None, no group is created corresponding to that user.
|
||||||
|
# If the secondary group list is not None, all listed groups are created as well.
|
||||||
|
required_users = getattr(deployer, "required_users", [])
|
||||||
|
for user, group, groups in required_users:
|
||||||
|
if group is not None:
|
||||||
|
server.group(
|
||||||
|
name="Create {} group".format(group), group=group, system=True
|
||||||
|
)
|
||||||
|
if groups is not None:
|
||||||
|
for group2 in groups:
|
||||||
|
server.group(
|
||||||
|
name="Create {} group".format(group2), group=group2, system=True
|
||||||
|
)
|
||||||
|
server.user(
|
||||||
|
name="Create {} user".format(user),
|
||||||
|
user=user,
|
||||||
|
group=group,
|
||||||
|
groups=groups,
|
||||||
|
system=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
deployer.install()
|
||||||
|
|
||||||
|
def configure(self, deployer):
|
||||||
|
deployer.configure()
|
||||||
|
|
||||||
|
def activate(self, deployer):
|
||||||
|
deployer.activate()
|
||||||
|
|
||||||
|
def perform_stages(self, deployers):
|
||||||
|
default_stages = "install,configure,activate"
|
||||||
|
stages = os.getenv("CMDEPLOY_STAGES", default_stages).split(",")
|
||||||
|
|
||||||
|
for stage in stages:
|
||||||
|
for deployer in deployers:
|
||||||
|
getattr(self, stage)(deployer)
|
||||||
|
|
||||||
|
|
||||||
|
class Deployer:
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
pass
|
||||||
@@ -19,7 +19,7 @@ from packaging import version
|
|||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
|
|
||||||
from . import dns, remote
|
from . import dns, remote
|
||||||
from .sshexec import SSHExec, LocalExec
|
from .sshexec import LocalExec, SSHExec
|
||||||
|
|
||||||
#
|
#
|
||||||
# cmdeploy sub commands and options
|
# cmdeploy sub commands and options
|
||||||
@@ -71,6 +71,11 @@ def run_cmd_options(parser):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="install/upgrade the server, but disable postfix & dovecot for now",
|
help="install/upgrade the server, but disable postfix & dovecot for now",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--website-only",
|
||||||
|
action="store_true",
|
||||||
|
help="only update/deploy the website, skipping full server upgrade/deployment, useful when you only changed/updated the web pages and don't need to re-run a full server upgrade",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--skip-dns-check",
|
"--skip-dns-check",
|
||||||
dest="dns_check_disabled",
|
dest="dns_check_disabled",
|
||||||
@@ -93,9 +98,10 @@ def run_cmd(args, out):
|
|||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env["CHATMAIL_INI"] = args.inipath
|
env["CHATMAIL_INI"] = args.inipath
|
||||||
|
env["CHATMAIL_WEBSITE_ONLY"] = "True" if args.website_only else ""
|
||||||
env["CHATMAIL_DISABLE_MAIL"] = "True" if args.disable_mail else ""
|
env["CHATMAIL_DISABLE_MAIL"] = "True" if args.disable_mail else ""
|
||||||
env["CHATMAIL_REQUIRE_IROH"] = "True" if require_iroh else ""
|
env["CHATMAIL_REQUIRE_IROH"] = "True" if require_iroh else ""
|
||||||
deploy_path = importlib.resources.files(__package__).joinpath("deploy.py").resolve()
|
deploy_path = importlib.resources.files(__package__).joinpath("run.py").resolve()
|
||||||
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
||||||
|
|
||||||
cmd = f"{pyinf} --ssh-user root {ssh_host} {deploy_path} -y"
|
cmd = f"{pyinf} --ssh-user root {ssh_host} {deploy_path} -y"
|
||||||
@@ -108,19 +114,12 @@ def run_cmd(args, out):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
retcode = out.check_call(cmd, env=env)
|
retcode = out.check_call(cmd, env=env)
|
||||||
if retcode == 0:
|
if args.website_only:
|
||||||
if not args.disable_mail:
|
if retcode == 0:
|
||||||
print("\nYou can try out the relay by talking to this echo bot: ")
|
out.green("Website deployment completed.")
|
||||||
sshexec = SSHExec(args.config.mail_domain, verbose=args.verbose)
|
else:
|
||||||
print(
|
out.red("Website deployment failed.")
|
||||||
sshexec(
|
elif retcode == 0:
|
||||||
call=remote.rshell.shell,
|
|
||||||
kwargs=dict(command="cat /var/lib/echobot/invite-link.txt"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
server_deployed_message = f"Chatmail server started: https://{args.config.mail_domain}/"
|
|
||||||
delimiter_line = "=" * len(server_deployed_message)
|
|
||||||
out.green(f"{delimiter_line}\n{server_deployed_message}\n{delimiter_line}")
|
|
||||||
out.green("Deploy completed, call `cmdeploy dns` next.")
|
out.green("Deploy completed, call `cmdeploy dns` next.")
|
||||||
elif not remote_data["acme_account_url"]:
|
elif not remote_data["acme_account_url"]:
|
||||||
out.red("Deploy completed but letsencrypt not configured")
|
out.red("Deploy completed but letsencrypt not configured")
|
||||||
@@ -174,10 +173,15 @@ def dns_cmd(args, out):
|
|||||||
return retcode
|
return retcode
|
||||||
|
|
||||||
|
|
||||||
|
def status_cmd_options(parser):
|
||||||
|
add_ssh_host_option(parser)
|
||||||
|
|
||||||
|
|
||||||
def status_cmd(args, out):
|
def status_cmd(args, out):
|
||||||
"""Display status for online chatmail instance."""
|
"""Display status for online chatmail instance."""
|
||||||
|
|
||||||
sshexec = args.get_sshexec()
|
ssh_host = args.ssh_host if args.ssh_host else args.config.mail_domain
|
||||||
|
sshexec = get_sshexec(ssh_host, verbose=args.verbose)
|
||||||
|
|
||||||
out.green(f"chatmail domain: {args.config.mail_domain}")
|
out.green(f"chatmail domain: {args.config.mail_domain}")
|
||||||
if args.config.privacy_mail:
|
if args.config.privacy_mail:
|
||||||
@@ -236,7 +240,12 @@ def fmt_cmd_options(parser):
|
|||||||
def fmt_cmd(args, out):
|
def fmt_cmd(args, out):
|
||||||
"""Run formattting fixes on all chatmail source code."""
|
"""Run formattting fixes on all chatmail source code."""
|
||||||
|
|
||||||
sources = [str(importlib.resources.files(x)) for x in ("chatmaild", "cmdeploy")]
|
chatmaild_dir = importlib.resources.files("chatmaild").resolve()
|
||||||
|
cmdeploy_dir = chatmaild_dir.joinpath(
|
||||||
|
"..", "..", "..", "cmdeploy", "src", "cmdeploy"
|
||||||
|
).resolve()
|
||||||
|
sources = [str(chatmaild_dir), str(cmdeploy_dir)]
|
||||||
|
|
||||||
format_args = [shutil.which("ruff"), "format"]
|
format_args = [shutil.which("ruff"), "format"]
|
||||||
check_args = [shutil.which("ruff"), "check"]
|
check_args = [shutil.which("ruff"), "check"]
|
||||||
|
|
||||||
@@ -307,7 +316,7 @@ def add_ssh_host_option(parser):
|
|||||||
"--ssh-host",
|
"--ssh-host",
|
||||||
dest="ssh_host",
|
dest="ssh_host",
|
||||||
help="Run commands on 'localhost', via '@docker', or on a specific SSH host "
|
help="Run commands on 'localhost', via '@docker', or on a specific SSH host "
|
||||||
"instead of chatmail.ini's mail_domain.",
|
"instead of chatmail.ini's mail_domain.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
614
cmdeploy/src/cmdeploy/deployers.py
Normal file
614
cmdeploy/src/cmdeploy/deployers.py
Normal file
@@ -0,0 +1,614 @@
|
|||||||
|
"""
|
||||||
|
Chat Mail pyinfra deploy.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from io import StringIO
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from chatmaild.config import read_config
|
||||||
|
from pyinfra import facts, host, logger
|
||||||
|
from pyinfra.api import FactBase
|
||||||
|
from pyinfra.facts.files import Sha256File
|
||||||
|
from pyinfra.facts.systemd import SystemdEnabled
|
||||||
|
from pyinfra.operations import apt, files, pip, server, systemd
|
||||||
|
|
||||||
|
from cmdeploy.cmdeploy import Out
|
||||||
|
|
||||||
|
from .acmetool import AcmetoolDeployer
|
||||||
|
from .basedeploy import (
|
||||||
|
Deployer,
|
||||||
|
Deployment,
|
||||||
|
activate_remote_units,
|
||||||
|
configure_remote_units,
|
||||||
|
get_resource,
|
||||||
|
)
|
||||||
|
from .dovecot.deployer import DovecotDeployer
|
||||||
|
from .filtermail.deployer import FiltermailDeployer
|
||||||
|
from .mtail.deployer import MtailDeployer
|
||||||
|
from .nginx.deployer import NginxDeployer
|
||||||
|
from .opendkim.deployer import OpendkimDeployer
|
||||||
|
from .postfix.deployer import PostfixDeployer
|
||||||
|
from .www import build_webpages, find_merge_conflict, get_paths
|
||||||
|
|
||||||
|
|
||||||
|
class Port(FactBase):
|
||||||
|
"""
|
||||||
|
Returns the process occuping a port.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def command(self, port: int) -> str:
|
||||||
|
return (
|
||||||
|
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
||||||
|
% (port,)
|
||||||
|
)
|
||||||
|
|
||||||
|
def process(self, output: [str]) -> str:
|
||||||
|
return output[0]
|
||||||
|
|
||||||
|
|
||||||
|
def _build_chatmaild(dist_dir) -> None:
|
||||||
|
dist_dir = Path(dist_dir).resolve()
|
||||||
|
if dist_dir.exists():
|
||||||
|
shutil.rmtree(dist_dir)
|
||||||
|
dist_dir.mkdir()
|
||||||
|
subprocess.check_output(
|
||||||
|
[sys.executable, "-m", "build", "-n"]
|
||||||
|
+ ["--sdist", "chatmaild", "--outdir", str(dist_dir)]
|
||||||
|
)
|
||||||
|
entries = list(dist_dir.iterdir())
|
||||||
|
assert len(entries) == 1
|
||||||
|
return entries[0]
|
||||||
|
|
||||||
|
|
||||||
|
def remove_legacy_artifacts():
|
||||||
|
# disable legacy doveauth-dictproxy.service
|
||||||
|
if host.get_fact(SystemdEnabled).get("doveauth-dictproxy.service"):
|
||||||
|
systemd.service(
|
||||||
|
name="Disable legacy doveauth-dictproxy.service",
|
||||||
|
service="doveauth-dictproxy.service",
|
||||||
|
running=False,
|
||||||
|
enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_remote_venv_with_chatmaild() -> None:
|
||||||
|
remove_legacy_artifacts()
|
||||||
|
dist_file = _build_chatmaild(dist_dir=Path("chatmaild/dist"))
|
||||||
|
remote_base_dir = "/usr/local/lib/chatmaild"
|
||||||
|
remote_dist_file = f"{remote_base_dir}/dist/{dist_file.name}"
|
||||||
|
remote_venv_dir = f"{remote_base_dir}/venv"
|
||||||
|
root_owned = dict(user="root", group="root", mode="644")
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="apt install python3-virtualenv",
|
||||||
|
packages=["python3-virtualenv"],
|
||||||
|
)
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name="Upload chatmaild source package",
|
||||||
|
src=dist_file.open("rb"),
|
||||||
|
dest=remote_dist_file,
|
||||||
|
create_remote_dir=True,
|
||||||
|
**root_owned,
|
||||||
|
)
|
||||||
|
|
||||||
|
pip.virtualenv(
|
||||||
|
name=f"chatmaild virtualenv {remote_venv_dir}",
|
||||||
|
path=remote_venv_dir,
|
||||||
|
always_copy=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="install gcc and headers to build crypt_r source package",
|
||||||
|
packages=["gcc", "python3-dev"],
|
||||||
|
)
|
||||||
|
|
||||||
|
server.shell(
|
||||||
|
name=f"forced pip-install {dist_file.name}",
|
||||||
|
commands=[
|
||||||
|
f"{remote_venv_dir}/bin/pip install --force-reinstall {remote_dist_file}"
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_remote_venv_with_chatmaild(config) -> None:
|
||||||
|
remote_base_dir = "/usr/local/lib/chatmaild"
|
||||||
|
remote_venv_dir = f"{remote_base_dir}/venv"
|
||||||
|
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
||||||
|
root_owned = dict(user="root", group="root", mode="644")
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name=f"Upload {remote_chatmail_inipath}",
|
||||||
|
src=config._getbytefile(),
|
||||||
|
dest=remote_chatmail_inipath,
|
||||||
|
**root_owned,
|
||||||
|
)
|
||||||
|
|
||||||
|
files.template(
|
||||||
|
src=get_resource("metrics.cron.j2"),
|
||||||
|
dest="/etc/cron.d/chatmail-metrics",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={
|
||||||
|
"mailboxes_dir": config.mailboxes_dir,
|
||||||
|
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UnboundDeployer(Deployer):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = config
|
||||||
|
self.need_restart = False
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Run local DNS resolver `unbound`.
|
||||||
|
# `resolvconf` takes care of setting up /etc/resolv.conf
|
||||||
|
# to use 127.0.0.1 as the resolver.
|
||||||
|
|
||||||
|
#
|
||||||
|
# On an IPv4-only system, if unbound is started but not
|
||||||
|
# configured, it causes subsequent steps to fail to resolve hosts.
|
||||||
|
# Here, we use policy-rc.d to prevent unbound from starting up
|
||||||
|
# on initial install. Later, we will configure it and start it.
|
||||||
|
#
|
||||||
|
# For documentation about policy-rc.d, see:
|
||||||
|
# https://people.debian.org/~hmh/invokerc.d-policyrc.d-specification.txt
|
||||||
|
#
|
||||||
|
files.put(
|
||||||
|
src=get_resource("policy-rc.d"),
|
||||||
|
dest="/usr/sbin/policy-rc.d",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install unbound",
|
||||||
|
packages=["unbound", "unbound-anchor", "dnsutils"],
|
||||||
|
)
|
||||||
|
|
||||||
|
files.file("/usr/sbin/policy-rc.d", present=False)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
server.shell(
|
||||||
|
name="Generate root keys for validating DNSSEC",
|
||||||
|
commands=[
|
||||||
|
"unbound-anchor -a /var/lib/unbound/root.key || true",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
if self.config.disable_ipv6:
|
||||||
|
files.directory(
|
||||||
|
path="/etc/unbound/unbound.conf.d",
|
||||||
|
present=True,
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
)
|
||||||
|
conf = files.put(
|
||||||
|
src=get_resource("unbound/unbound.conf.j2"),
|
||||||
|
dest="/etc/unbound/unbound.conf.d/chatmail.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
conf = files.file(
|
||||||
|
path="/etc/unbound/unbound.conf.d/chatmail.conf",
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
self.need_restart |= conf.changed
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
server.shell(
|
||||||
|
name="Generate root keys for validating DNSSEC",
|
||||||
|
commands=[
|
||||||
|
"systemctl reset-failed unbound.service",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable unbound",
|
||||||
|
service="unbound.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=self.need_restart,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MtastsDeployer(Deployer):
|
||||||
|
def configure(self):
|
||||||
|
# Remove configuration.
|
||||||
|
files.file("/etc/mta-sts-daemon.yml", present=False)
|
||||||
|
files.directory("/usr/local/lib/postfix-mta-sts-resolver", present=False)
|
||||||
|
files.file("/etc/systemd/system/mta-sts-daemon.service", present=False)
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Stop MTA-STS daemon",
|
||||||
|
service="mta-sts-daemon.service",
|
||||||
|
daemon_reload=True,
|
||||||
|
running=False,
|
||||||
|
enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WebsiteDeployer(Deployer):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
files.directory(
|
||||||
|
name="Ensure /var/www exists",
|
||||||
|
path="/var/www",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
present=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
www_path, src_dir, build_dir = get_paths(self.config)
|
||||||
|
# if www_folder was set to a non-existing folder, skip upload
|
||||||
|
if not www_path.is_dir():
|
||||||
|
logger.warning("Building web pages is disabled in chatmail.ini, skipping")
|
||||||
|
elif (path := find_merge_conflict(src_dir)) is not None:
|
||||||
|
logger.warning(
|
||||||
|
f"Merge conflict found in {path}, skipping website deployment. Fix merge conflict if you want to upload your web page."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# if www_folder is a hugo page, build it
|
||||||
|
if build_dir:
|
||||||
|
www_path = build_webpages(src_dir, build_dir, self.config)
|
||||||
|
# if it is not a hugo page, upload it as is
|
||||||
|
files.rsync(
|
||||||
|
f"{www_path}/", "/var/www/html", flags=["-avz", "--chown=www-data"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LegacyRemoveDeployer(Deployer):
|
||||||
|
def install(self):
|
||||||
|
apt.packages(name="Remove rspamd", packages="rspamd", present=False)
|
||||||
|
|
||||||
|
# remove historic expunge script
|
||||||
|
# which is now implemented through a systemd timer (chatmail-expire)
|
||||||
|
files.file(
|
||||||
|
path="/etc/cron.d/expunge",
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove OBS repository key that is no longer used.
|
||||||
|
files.file("/etc/apt/keyrings/obs-home-deltachat.gpg", present=False)
|
||||||
|
files.line(
|
||||||
|
name="Remove DeltaChat OBS home repository from sources.list",
|
||||||
|
path="/etc/apt/sources.list",
|
||||||
|
line="deb [signed-by=/etc/apt/keyrings/obs-home-deltachat.gpg] https://download.opensuse.org/repositories/home:/deltachat/Debian_12/ ./",
|
||||||
|
escape_regex_characters=True,
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# prior relay versions used filelogging
|
||||||
|
files.directory(
|
||||||
|
name="Ensure old logs on disk are deleted",
|
||||||
|
path="/var/log/journal/",
|
||||||
|
present=False,
|
||||||
|
)
|
||||||
|
# remove echobot if it is still running
|
||||||
|
if host.get_fact(SystemdEnabled).get("echobot.service"):
|
||||||
|
systemd.service(
|
||||||
|
name="Disable echobot.service",
|
||||||
|
service="echobot.service",
|
||||||
|
running=False,
|
||||||
|
enabled=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_config(config):
|
||||||
|
mail_domain = config.mail_domain
|
||||||
|
if mail_domain != "testrun.org" and not mail_domain.endswith(".testrun.org"):
|
||||||
|
blocked_words = "merlinux schmieder testrun.org".split()
|
||||||
|
for key in config.__dict__:
|
||||||
|
value = config.__dict__[key]
|
||||||
|
if key.startswith("privacy") and any(
|
||||||
|
x in str(value) for x in blocked_words
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
f"please set your own privacy contacts/addresses in {config._inipath}"
|
||||||
|
)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
class TurnDeployer(Deployer):
|
||||||
|
def __init__(self, mail_domain):
|
||||||
|
self.mail_domain = mail_domain
|
||||||
|
self.units = ["turnserver"]
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
(url, sha256sum) = {
|
||||||
|
"x86_64": (
|
||||||
|
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-x86_64-linux",
|
||||||
|
"841e527c15fdc2940b0469e206188ea8f0af48533be12ecb8098520f813d41e4",
|
||||||
|
),
|
||||||
|
"aarch64": (
|
||||||
|
"https://github.com/chatmail/chatmail-turn/releases/download/v0.3/chatmail-turn-aarch64-linux",
|
||||||
|
"a5fc2d06d937b56a34e098d2cd72a82d3e89967518d159bf246dc69b65e81b42",
|
||||||
|
),
|
||||||
|
}[host.get_fact(facts.server.Arch)]
|
||||||
|
|
||||||
|
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/chatmail-turn")
|
||||||
|
if existing_sha256sum != sha256sum:
|
||||||
|
server.shell(
|
||||||
|
name="Download chatmail-turn",
|
||||||
|
commands=[
|
||||||
|
f"(curl -L {url} >/usr/local/bin/chatmail-turn.new && (echo '{sha256sum} /usr/local/bin/chatmail-turn.new' | sha256sum -c) && mv /usr/local/bin/chatmail-turn.new /usr/local/bin/chatmail-turn)",
|
||||||
|
"chmod 755 /usr/local/bin/chatmail-turn",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
configure_remote_units(self.mail_domain, self.units)
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
activate_remote_units(self.units)
|
||||||
|
|
||||||
|
|
||||||
|
class IrohDeployer(Deployer):
|
||||||
|
def __init__(self, enable_iroh_relay):
|
||||||
|
self.enable_iroh_relay = enable_iroh_relay
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
(url, sha256sum) = {
|
||||||
|
"x86_64": (
|
||||||
|
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-x86_64-unknown-linux-musl.tar.gz",
|
||||||
|
"45c81199dbd70f8c4c30fef7f3b9727ca6e3cea8f2831333eeaf8aa71bf0fac1",
|
||||||
|
),
|
||||||
|
"aarch64": (
|
||||||
|
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-aarch64-unknown-linux-musl.tar.gz",
|
||||||
|
"f8ef27631fac213b3ef668d02acd5b3e215292746a3fc71d90c63115446008b1",
|
||||||
|
),
|
||||||
|
}[host.get_fact(facts.server.Arch)]
|
||||||
|
|
||||||
|
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/iroh-relay")
|
||||||
|
if existing_sha256sum != sha256sum:
|
||||||
|
server.shell(
|
||||||
|
name="Download iroh-relay",
|
||||||
|
commands=[
|
||||||
|
f"(curl -L {url} | gunzip | tar -x -f - ./iroh-relay -O >/usr/local/bin/iroh-relay.new && (echo '{sha256sum} /usr/local/bin/iroh-relay.new' | sha256sum -c) && mv /usr/local/bin/iroh-relay.new /usr/local/bin/iroh-relay)",
|
||||||
|
"chmod 755 /usr/local/bin/iroh-relay",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.need_restart = True
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
systemd_unit = files.put(
|
||||||
|
name="Upload iroh-relay systemd unit",
|
||||||
|
src=get_resource("iroh-relay.service"),
|
||||||
|
dest="/etc/systemd/system/iroh-relay.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart |= systemd_unit.changed
|
||||||
|
|
||||||
|
iroh_config = files.put(
|
||||||
|
name="Upload iroh-relay config",
|
||||||
|
src=get_resource("iroh-relay.toml"),
|
||||||
|
dest="/etc/iroh-relay.toml",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart |= iroh_config.changed
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable iroh-relay",
|
||||||
|
service="iroh-relay.service",
|
||||||
|
running=True,
|
||||||
|
enabled=self.enable_iroh_relay,
|
||||||
|
restarted=self.need_restart,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
|
|
||||||
|
|
||||||
|
class JournaldDeployer(Deployer):
|
||||||
|
def configure(self):
|
||||||
|
journald_conf = files.put(
|
||||||
|
name="Configure journald",
|
||||||
|
src=get_resource("journald.conf"),
|
||||||
|
dest="/etc/systemd/journald.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart = journald_conf.changed
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable journald",
|
||||||
|
service="systemd-journald.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=self.need_restart,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
|
|
||||||
|
|
||||||
|
class ChatmailVenvDeployer(Deployer):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = config
|
||||||
|
self.units = (
|
||||||
|
"chatmail-metadata",
|
||||||
|
"lastlogin",
|
||||||
|
"chatmail-expire",
|
||||||
|
"chatmail-expire.timer",
|
||||||
|
"chatmail-fsreport",
|
||||||
|
"chatmail-fsreport.timer",
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
_install_remote_venv_with_chatmaild()
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
_configure_remote_venv_with_chatmaild(self.config)
|
||||||
|
configure_remote_units(self.config.mail_domain, self.units)
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
activate_remote_units(self.units)
|
||||||
|
|
||||||
|
|
||||||
|
class ChatmailDeployer(Deployer):
|
||||||
|
required_users = [
|
||||||
|
("vmail", "vmail", None),
|
||||||
|
("iroh", None, None),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, mail_domain):
|
||||||
|
self.mail_domain = mail_domain
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
apt.update(name="apt update", cache_time=24 * 3600)
|
||||||
|
apt.upgrade(name="upgrade apt packages", auto_remove=True)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install curl",
|
||||||
|
packages=["curl"],
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install rsync",
|
||||||
|
packages=["rsync"],
|
||||||
|
)
|
||||||
|
apt.packages(
|
||||||
|
name="Ensure cron is installed",
|
||||||
|
packages=["cron"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
# This file is used by auth proxy.
|
||||||
|
# https://wiki.debian.org/EtcMailName
|
||||||
|
server.shell(
|
||||||
|
name="Setup /etc/mailname",
|
||||||
|
commands=[
|
||||||
|
f"echo {self.mail_domain} >/etc/mailname; chmod 644 /etc/mailname"
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FcgiwrapDeployer(Deployer):
|
||||||
|
def install(self):
|
||||||
|
apt.packages(
|
||||||
|
name="Install fcgiwrap",
|
||||||
|
packages=["fcgiwrap"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable fcgiwrap",
|
||||||
|
service="fcgiwrap.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GithashDeployer(Deployer):
|
||||||
|
def activate(self):
|
||||||
|
try:
|
||||||
|
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
||||||
|
except Exception:
|
||||||
|
git_hash = "unknown\n"
|
||||||
|
try:
|
||||||
|
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
||||||
|
except Exception:
|
||||||
|
git_diff = ""
|
||||||
|
files.put(
|
||||||
|
name="Upload chatmail relay git commit hash",
|
||||||
|
src=StringIO(git_hash + git_diff),
|
||||||
|
dest="/etc/chatmail-version",
|
||||||
|
mode="700",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def deploy_chatmail(config_path: Path, disable_mail: bool, website_only: bool) -> None:
|
||||||
|
"""Deploy a chat-mail instance.
|
||||||
|
|
||||||
|
:param config_path: path to chatmail.ini
|
||||||
|
:param disable_mail: whether to disable postfix & dovecot
|
||||||
|
:param website_only: if True, only deploy the website
|
||||||
|
"""
|
||||||
|
config = read_config(config_path)
|
||||||
|
check_config(config)
|
||||||
|
mail_domain = config.mail_domain
|
||||||
|
|
||||||
|
if website_only:
|
||||||
|
Deployment().perform_stages([WebsiteDeployer(config)])
|
||||||
|
return
|
||||||
|
|
||||||
|
if host.get_fact(Port, port=53) != "unbound":
|
||||||
|
files.line(
|
||||||
|
name="Add 9.9.9.9 to resolv.conf",
|
||||||
|
path="/etc/resolv.conf",
|
||||||
|
# Guard against resolv.conf missing a trailing newline (SolusVM bug).
|
||||||
|
line="\nnameserver 9.9.9.9",
|
||||||
|
)
|
||||||
|
|
||||||
|
port_services = [
|
||||||
|
(["master", "smtpd"], 25),
|
||||||
|
("unbound", 53),
|
||||||
|
("acmetool", 80),
|
||||||
|
(["imap-login", "dovecot"], 143),
|
||||||
|
("nginx", 443),
|
||||||
|
(["master", "smtpd"], 465),
|
||||||
|
(["master", "smtpd"], 587),
|
||||||
|
(["imap-login", "dovecot"], 993),
|
||||||
|
("iroh-relay", 3340),
|
||||||
|
("mtail", 3903),
|
||||||
|
("dovecot-stats", 3904),
|
||||||
|
("nginx", 8443),
|
||||||
|
(["master", "smtpd"], config.postfix_reinject_port),
|
||||||
|
(["master", "smtpd"], config.postfix_reinject_port_incoming),
|
||||||
|
("filtermail", config.filtermail_smtp_port),
|
||||||
|
("filtermail", config.filtermail_smtp_port_incoming),
|
||||||
|
]
|
||||||
|
for service, port in port_services:
|
||||||
|
print(f"Checking if port {port} is available for {service}...")
|
||||||
|
running_service = host.get_fact(Port, port=port)
|
||||||
|
if running_service:
|
||||||
|
if running_service not in service:
|
||||||
|
Out().red(
|
||||||
|
f"Deploy failed: port {port} is occupied by: {running_service}"
|
||||||
|
)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
tls_domains = [mail_domain, f"mta-sts.{mail_domain}", f"www.{mail_domain}"]
|
||||||
|
|
||||||
|
all_deployers = [
|
||||||
|
ChatmailDeployer(mail_domain),
|
||||||
|
LegacyRemoveDeployer(),
|
||||||
|
FiltermailDeployer(),
|
||||||
|
JournaldDeployer(),
|
||||||
|
UnboundDeployer(config),
|
||||||
|
TurnDeployer(mail_domain),
|
||||||
|
IrohDeployer(config.enable_iroh_relay),
|
||||||
|
AcmetoolDeployer(config.acme_email, tls_domains),
|
||||||
|
WebsiteDeployer(config),
|
||||||
|
ChatmailVenvDeployer(config),
|
||||||
|
MtastsDeployer(),
|
||||||
|
OpendkimDeployer(mail_domain),
|
||||||
|
# Dovecot should be started before Postfix
|
||||||
|
# because it creates authentication socket
|
||||||
|
# required by Postfix.
|
||||||
|
DovecotDeployer(config, disable_mail),
|
||||||
|
PostfixDeployer(config, disable_mail),
|
||||||
|
FcgiwrapDeployer(),
|
||||||
|
NginxDeployer(config),
|
||||||
|
MtailDeployer(config.mtail_address),
|
||||||
|
GithashDeployer(),
|
||||||
|
]
|
||||||
|
|
||||||
|
Deployment().perform_stages(all_deployers)
|
||||||
@@ -45,7 +45,8 @@ def check_full_zone(sshexec, remote_data, out, zonefile) -> int:
|
|||||||
and return (exitcode, remote_data) tuple."""
|
and return (exitcode, remote_data) tuple."""
|
||||||
|
|
||||||
required_diff, recommended_diff = sshexec.logged(
|
required_diff, recommended_diff = sshexec.logged(
|
||||||
remote.rdns.check_zonefile, kwargs=dict(zonefile=zonefile, verbose=False),
|
remote.rdns.check_zonefile,
|
||||||
|
kwargs=dict(zonefile=zonefile, verbose=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
returncode = 0
|
returncode = 0
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ iterate_prefix = userdb/
|
|||||||
|
|
||||||
default_pass_scheme = plain
|
default_pass_scheme = plain
|
||||||
# %E escapes characters " (double quote), ' (single quote) and \ (backslash) with \ (backslash).
|
# %E escapes characters " (double quote), ' (single quote) and \ (backslash) with \ (backslash).
|
||||||
# See <https://doc.dovecot.org/configuration_manual/config_file/config_variables/#modifiers>
|
# See <https://doc.dovecot.org/2.3/configuration_manual/config_file/config_variables/#modifiers>
|
||||||
# for documentation.
|
# for documentation.
|
||||||
#
|
#
|
||||||
# We escape user-provided input and use double quote as a separator.
|
# We escape user-provided input and use double quote as a separator.
|
||||||
|
|||||||
153
cmdeploy/src/cmdeploy/dovecot/deployer.py
Normal file
153
cmdeploy/src/cmdeploy/dovecot/deployer.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
from chatmaild.config import Config
|
||||||
|
from pyinfra import host
|
||||||
|
from pyinfra.facts.server import Arch, Sysctl
|
||||||
|
from pyinfra.facts.systemd import SystemdEnabled
|
||||||
|
from pyinfra.operations import apt, files, server, systemd
|
||||||
|
|
||||||
|
from cmdeploy.basedeploy import (
|
||||||
|
Deployer,
|
||||||
|
activate_remote_units,
|
||||||
|
configure_remote_units,
|
||||||
|
get_resource,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DovecotDeployer(Deployer):
|
||||||
|
daemon_reload = False
|
||||||
|
|
||||||
|
def __init__(self, config, disable_mail):
|
||||||
|
self.config = config
|
||||||
|
self.disable_mail = disable_mail
|
||||||
|
self.units = ["doveauth"]
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
arch = host.get_fact(Arch)
|
||||||
|
if not "dovecot.service" in host.get_fact(SystemdEnabled):
|
||||||
|
_install_dovecot_package("core", arch)
|
||||||
|
_install_dovecot_package("imapd", arch)
|
||||||
|
_install_dovecot_package("lmtpd", arch)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
configure_remote_units(self.config.mail_domain, self.units)
|
||||||
|
self.need_restart, self.daemon_reload = _configure_dovecot(self.config)
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
activate_remote_units(self.units)
|
||||||
|
|
||||||
|
restart = False if self.disable_mail else self.need_restart
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="Disable dovecot for now" if self.disable_mail else "Start and enable Dovecot",
|
||||||
|
service="dovecot.service",
|
||||||
|
running=False if self.disable_mail else True,
|
||||||
|
enabled=False if self.disable_mail else True,
|
||||||
|
restarted=restart,
|
||||||
|
daemon_reload=self.daemon_reload,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
|
|
||||||
|
|
||||||
|
def _install_dovecot_package(package: str, arch: str):
|
||||||
|
arch = "amd64" if arch == "x86_64" else arch
|
||||||
|
arch = "arm64" if arch == "aarch64" else arch
|
||||||
|
url = f"https://download.delta.chat/dovecot/dovecot-{package}_2.3.21%2Bdfsg1-3_{arch}.deb"
|
||||||
|
deb_filename = "/root/" + url.split("/")[-1]
|
||||||
|
|
||||||
|
match (package, arch):
|
||||||
|
case ("core", "amd64"):
|
||||||
|
sha256 = "dd060706f52a306fa863d874717210b9fe10536c824afe1790eec247ded5b27d"
|
||||||
|
case ("core", "arm64"):
|
||||||
|
sha256 = "e7548e8a82929722e973629ecc40fcfa886894cef3db88f23535149e7f730dc9"
|
||||||
|
case ("imapd", "amd64"):
|
||||||
|
sha256 = "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86"
|
||||||
|
case ("imapd", "arm64"):
|
||||||
|
sha256 = "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f"
|
||||||
|
case ("lmtpd", "amd64"):
|
||||||
|
sha256 = "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab"
|
||||||
|
case ("lmtpd", "arm64"):
|
||||||
|
sha256 = "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f"
|
||||||
|
case _:
|
||||||
|
apt.packages(packages=[f"dovecot-{package}"])
|
||||||
|
return
|
||||||
|
|
||||||
|
files.download(
|
||||||
|
name=f"Download dovecot-{package}",
|
||||||
|
src=url,
|
||||||
|
dest=deb_filename,
|
||||||
|
sha256sum=sha256,
|
||||||
|
cache_time=60 * 60 * 24 * 365 * 10, # never redownload the package
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.deb(name=f"Install dovecot-{package}", src=deb_filename)
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_dovecot(config: Config, debug: bool = False) -> (bool, bool):
|
||||||
|
"""Configures Dovecot IMAP server."""
|
||||||
|
need_restart = False
|
||||||
|
daemon_reload = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=get_resource("dovecot/dovecot.conf.j2"),
|
||||||
|
dest="/etc/dovecot/dovecot.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config=config,
|
||||||
|
debug=debug,
|
||||||
|
disable_ipv6=config.disable_ipv6,
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
auth_config = files.put(
|
||||||
|
src=get_resource("dovecot/auth.conf"),
|
||||||
|
dest="/etc/dovecot/auth.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= auth_config.changed
|
||||||
|
lua_push_notification_script = files.put(
|
||||||
|
src=get_resource("dovecot/push_notification.lua"),
|
||||||
|
dest="/etc/dovecot/push_notification.lua",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= lua_push_notification_script.changed
|
||||||
|
|
||||||
|
# as per https://doc.dovecot.org/2.3/configuration_manual/os/
|
||||||
|
# it is recommended to set the following inotify limits
|
||||||
|
for name in ("max_user_instances", "max_user_watches"):
|
||||||
|
key = f"fs.inotify.{name}"
|
||||||
|
if host.get_fact(Sysctl)[key] > 65535:
|
||||||
|
# Skip updating limits if already sufficient
|
||||||
|
# (enables running in incus containers where sysctl readonly)
|
||||||
|
continue
|
||||||
|
server.sysctl(
|
||||||
|
name=f"Change {key}",
|
||||||
|
key=key,
|
||||||
|
value=65535,
|
||||||
|
persist=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
timezone_env = files.line(
|
||||||
|
name="Set TZ environment variable",
|
||||||
|
path="/etc/environment",
|
||||||
|
line="TZ=:/etc/localtime",
|
||||||
|
)
|
||||||
|
need_restart |= timezone_env.changed
|
||||||
|
|
||||||
|
restart_conf = files.put(
|
||||||
|
name="dovecot: restart automatically on failure",
|
||||||
|
src=get_resource("service/10_restart.conf"),
|
||||||
|
dest="/etc/systemd/system/dovecot.service.d/10_restart.conf",
|
||||||
|
)
|
||||||
|
daemon_reload |= restart_conf.changed
|
||||||
|
|
||||||
|
# Validate dovecot configuration before restart
|
||||||
|
if need_restart:
|
||||||
|
server.shell(
|
||||||
|
name="Validate dovecot configuration",
|
||||||
|
commands=["doveconf -n >/dev/null"],
|
||||||
|
)
|
||||||
|
|
||||||
|
return need_restart, daemon_reload
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
## Dovecot configuration file
|
## Dovecot configuration file
|
||||||
|
|
||||||
{% if disable_ipv6 %}
|
{% if disable_ipv6 %}
|
||||||
listen = *
|
listen = 0.0.0.0
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
protocols = imap lmtp
|
protocols = imap lmtp
|
||||||
@@ -26,7 +26,7 @@ default_client_limit = 20000
|
|||||||
# Increase number of logged in IMAP connections.
|
# Increase number of logged in IMAP connections.
|
||||||
# Each connection is handled by a separate `imap` process.
|
# Each connection is handled by a separate `imap` process.
|
||||||
# `imap` process should have `client_limit=1` as described in
|
# `imap` process should have `client_limit=1` as described in
|
||||||
# <https://doc.dovecot.org/configuration_manual/service_configuration/#service-limits>
|
# <https://doc.dovecot.org/2.3/configuration_manual/service_configuration/#service-limits>
|
||||||
# so each logged in IMAP session will need its own `imap` process.
|
# so each logged in IMAP session will need its own `imap` process.
|
||||||
#
|
#
|
||||||
# If this limit is reached,
|
# If this limit is reached,
|
||||||
@@ -44,11 +44,11 @@ mail_server_comment = Chatmail server
|
|||||||
|
|
||||||
# `zlib` enables compressing messages stored in the maildir.
|
# `zlib` enables compressing messages stored in the maildir.
|
||||||
# See
|
# See
|
||||||
# <https://doc.dovecot.org/configuration_manual/zlib_plugin/>
|
# <https://doc.dovecot.org/2.3/configuration_manual/zlib_plugin/>
|
||||||
# for documentation.
|
# for documentation.
|
||||||
#
|
#
|
||||||
# quota plugin documentation:
|
# quota plugin documentation:
|
||||||
# <https://doc.dovecot.org/configuration_manual/quota_plugin/>
|
# <https://doc.dovecot.org/2.3/configuration_manual/quota_plugin/>
|
||||||
mail_plugins = zlib quota
|
mail_plugins = zlib quota
|
||||||
|
|
||||||
imap_capability = +XDELTAPUSH XCHATMAIL
|
imap_capability = +XDELTAPUSH XCHATMAIL
|
||||||
@@ -68,7 +68,11 @@ userdb {
|
|||||||
##
|
##
|
||||||
|
|
||||||
# Mailboxes are stored in the "mail" directory of the vmail user home.
|
# Mailboxes are stored in the "mail" directory of the vmail user home.
|
||||||
|
{% if config.tmpfs_index %}
|
||||||
|
mail_location = maildir:{{ config.mailboxes_dir }}/%u:INDEX=/dev/shm/%u
|
||||||
|
{% else %}
|
||||||
mail_location = maildir:{{ config.mailboxes_dir }}/%u
|
mail_location = maildir:{{ config.mailboxes_dir }}/%u
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
namespace inbox {
|
namespace inbox {
|
||||||
inbox = yes
|
inbox = yes
|
||||||
@@ -107,7 +111,7 @@ mail_attribute_dict = proxy:/run/chatmail-metadata/metadata.socket:metadata
|
|||||||
# `imap_zlib` enables IMAP COMPRESS (RFC 4978).
|
# `imap_zlib` enables IMAP COMPRESS (RFC 4978).
|
||||||
# <https://datatracker.ietf.org/doc/html/rfc4978.html>
|
# <https://datatracker.ietf.org/doc/html/rfc4978.html>
|
||||||
protocol imap {
|
protocol imap {
|
||||||
mail_plugins = $mail_plugins imap_zlib imap_quota last_login
|
mail_plugins = $mail_plugins imap_quota last_login {% if config.imap_compress %}imap_zlib{% endif %}
|
||||||
imap_metadata = yes
|
imap_metadata = yes
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,13 +123,13 @@ plugin {
|
|||||||
|
|
||||||
protocol lmtp {
|
protocol lmtp {
|
||||||
# notify plugin is a dependency of push_notification plugin:
|
# notify plugin is a dependency of push_notification plugin:
|
||||||
# <https://doc.dovecot.org/settings/plugin/notify-plugin/>
|
# <https://doc.dovecot.org/2.3/settings/plugin/notify-plugin/>
|
||||||
#
|
#
|
||||||
# push_notification plugin documentation:
|
# push_notification plugin documentation:
|
||||||
# <https://doc.dovecot.org/configuration_manual/push_notification/>
|
# <https://doc.dovecot.org/2.3/configuration_manual/push_notification/>
|
||||||
#
|
#
|
||||||
# mail_lua and push_notification_lua are needed for Lua push notification handler.
|
# mail_lua and push_notification_lua are needed for Lua push notification handler.
|
||||||
# <https://doc.dovecot.org/configuration_manual/push_notification/#configuration>
|
# <https://doc.dovecot.org/2.3/configuration_manual/push_notification/#configuration>
|
||||||
mail_plugins = $mail_plugins mail_lua notify push_notification push_notification_lua
|
mail_plugins = $mail_plugins mail_lua notify push_notification push_notification_lua
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -148,7 +152,7 @@ plugin {
|
|||||||
|
|
||||||
# push_notification configuration
|
# push_notification configuration
|
||||||
plugin {
|
plugin {
|
||||||
# <https://doc.dovecot.org/configuration_manual/push_notification/#lua-lua>
|
# <https://doc.dovecot.org/2.3/configuration_manual/push_notification/#lua-lua>
|
||||||
push_notification_driver = lua:file=/etc/dovecot/push_notification.lua
|
push_notification_driver = lua:file=/etc/dovecot/push_notification.lua
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -162,6 +166,8 @@ service lmtp {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lmtp_add_received_header = no
|
||||||
|
|
||||||
service auth {
|
service auth {
|
||||||
unix_listener /var/spool/postfix/private/auth {
|
unix_listener /var/spool/postfix/private/auth {
|
||||||
mode = 0660
|
mode = 0660
|
||||||
@@ -246,3 +252,181 @@ protocol imap {
|
|||||||
rawlog_dir = %h
|
rawlog_dir = %h
|
||||||
}
|
}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if not config.imap_compress %}
|
||||||
|
# Hibernate IDLE users to save memory and CPU resources
|
||||||
|
# NOTE: this will have no effect if imap_zlib plugin is used
|
||||||
|
imap_hibernate_timeout = 30s
|
||||||
|
service imap {
|
||||||
|
# Note that this change will allow any process running as
|
||||||
|
# $default_internal_user (dovecot) to access mails as any other user.
|
||||||
|
# This may be insecure in some installations, which is why this isn't
|
||||||
|
# done by default.
|
||||||
|
unix_listener imap-master {
|
||||||
|
user = $default_internal_user
|
||||||
|
}
|
||||||
|
}
|
||||||
|
# The following is the default already in v2.3.1+:
|
||||||
|
service imap {
|
||||||
|
extra_groups = $default_internal_group
|
||||||
|
}
|
||||||
|
service imap-hibernate {
|
||||||
|
unix_listener imap-hibernate {
|
||||||
|
mode = 0660
|
||||||
|
group = $default_internal_group
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if config.mtail_address %}
|
||||||
|
#
|
||||||
|
# Dovecot Statistics
|
||||||
|
#
|
||||||
|
# OpenMetrics endpoint at http://{{- config.mtail_address}}:3904/metrics
|
||||||
|
service stats {
|
||||||
|
inet_listener http {
|
||||||
|
port = 3904
|
||||||
|
address = {{- config.mtail_address}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# IMAP Command Metrics
|
||||||
|
# - Bytes in/out for compression efficiency analysis
|
||||||
|
# - Lock wait time for contention debugging
|
||||||
|
# - Grouped by command name and reply state
|
||||||
|
metric imap_command {
|
||||||
|
filter = event=imap_command_finished
|
||||||
|
fields = bytes_in bytes_out lock_wait_usecs running_usecs
|
||||||
|
group_by = cmd_name tagged_reply_state
|
||||||
|
}
|
||||||
|
|
||||||
|
# Duration buckets for latency histograms (base 10: 10us, 100us, 1ms, 10ms, 100ms, 1s, 10s, 100s)
|
||||||
|
metric imap_command_duration {
|
||||||
|
filter = event=imap_command_finished
|
||||||
|
group_by = cmd_name duration:exponential:1:8:10
|
||||||
|
}
|
||||||
|
|
||||||
|
# Slow command outliers (>1 second = 1000000 usecs)
|
||||||
|
# Useful for alerting without high cardinality
|
||||||
|
metric imap_command_slow {
|
||||||
|
filter = event=imap_command_finished AND duration>1000000 AND NOT cmd_name=IDLE
|
||||||
|
group_by = cmd_name
|
||||||
|
}
|
||||||
|
|
||||||
|
# IDLE-specific Metrics
|
||||||
|
|
||||||
|
metric imap_idle {
|
||||||
|
filter = event=imap_command_finished AND cmd_name=IDLE
|
||||||
|
fields = bytes_in bytes_out running_usecs
|
||||||
|
group_by = tagged_reply_state
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_idle_duration {
|
||||||
|
filter = event=imap_command_finished AND cmd_name=IDLE
|
||||||
|
# Base 10: 100ms to 27h (covers short wakeups to long idle sessions)
|
||||||
|
group_by = duration:exponential:5:11:10
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_idle_commands {
|
||||||
|
filter = event=imap_command_finished AND cmd_name=IDLE
|
||||||
|
group_by = tagged_reply_state
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_idle_failed {
|
||||||
|
filter = event=imap_command_finished AND cmd_name=IDLE AND NOT tagged_reply_state=OK
|
||||||
|
}
|
||||||
|
|
||||||
|
# Hibernation Metrics (requires imap_hibernate_timeout)
|
||||||
|
|
||||||
|
metric imap_hibernated {
|
||||||
|
filter = event=imap_client_hibernated
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_hibernated_failed {
|
||||||
|
filter = event=imap_client_hibernated AND error=*
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_unhibernated {
|
||||||
|
filter = event=imap_client_unhibernated
|
||||||
|
fields = hibernation_usecs
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_unhibernated_reason {
|
||||||
|
filter = event=imap_client_unhibernated
|
||||||
|
group_by = reason
|
||||||
|
fields = hibernation_usecs
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_unhibernated_reason_sleep {
|
||||||
|
filter = event=imap_client_unhibernated
|
||||||
|
group_by = reason hibernation_usecs:exponential:4:8:10
|
||||||
|
}
|
||||||
|
|
||||||
|
metric imap_unhibernated_failed {
|
||||||
|
filter = event=imap_client_unhibernated AND error=*
|
||||||
|
}
|
||||||
|
|
||||||
|
# Hibernation duration buckets (how long clients stayed hibernated)
|
||||||
|
# Base 10: 100ms to 27h
|
||||||
|
metric imap_hibernation_duration {
|
||||||
|
filter = event=imap_client_unhibernated
|
||||||
|
group_by = reason duration:exponential:5:11:10
|
||||||
|
}
|
||||||
|
|
||||||
|
# Authentication / Login Metrics
|
||||||
|
|
||||||
|
metric auth_request {
|
||||||
|
filter = event=auth_request_finished
|
||||||
|
group_by = success
|
||||||
|
}
|
||||||
|
|
||||||
|
metric auth_request_duration {
|
||||||
|
filter = event=auth_request_finished
|
||||||
|
group_by = success duration:exponential:2:6:10
|
||||||
|
}
|
||||||
|
|
||||||
|
metric auth_failed {
|
||||||
|
filter = event=auth_request_finished AND success=no
|
||||||
|
}
|
||||||
|
|
||||||
|
# Passdb cache effectiveness
|
||||||
|
metric auth_passdb {
|
||||||
|
filter = event=auth_passdb_request_finished
|
||||||
|
group_by = result cache
|
||||||
|
}
|
||||||
|
|
||||||
|
# Master login (post-auth userdb lookup)
|
||||||
|
metric auth_master_login {
|
||||||
|
filter = event=auth_master_client_login_finished
|
||||||
|
}
|
||||||
|
|
||||||
|
metric auth_master_login_failed {
|
||||||
|
filter = event=auth_master_client_login_finished AND error=*
|
||||||
|
}
|
||||||
|
|
||||||
|
# Mail Delivery (LMTP) - affects IDLE wakeup latency
|
||||||
|
|
||||||
|
metric mail_delivery {
|
||||||
|
filter = event=mail_delivery_finished
|
||||||
|
}
|
||||||
|
|
||||||
|
metric mail_delivery_duration {
|
||||||
|
filter = event=mail_delivery_finished
|
||||||
|
group_by = duration:exponential:3:7:10
|
||||||
|
}
|
||||||
|
|
||||||
|
metric mail_delivery_failed {
|
||||||
|
filter = event=mail_delivery_finished AND error=*
|
||||||
|
}
|
||||||
|
|
||||||
|
# Connection Events
|
||||||
|
|
||||||
|
metric client_connected {
|
||||||
|
filter = event=client_connection_connected AND category="service:imap"
|
||||||
|
}
|
||||||
|
|
||||||
|
metric client_disconnected {
|
||||||
|
filter = event=client_connection_disconnected AND category="service:imap"
|
||||||
|
fields = bytes_in bytes_out
|
||||||
|
}
|
||||||
|
{% endif %}
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
# delete already seen big mails after 7 days, in the INBOX
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_large_after }} -size +200k -type f -delete
|
|
||||||
# delete all mails after {{ config.delete_mails_after }} days, in the Inbox
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
# or in any IMAP subfolder
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
# even if they are unseen
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
# or only temporary (but then they shouldn't be around after {{ config.delete_mails_after }} days anyway).
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
3 0 * * * vmail find {{ config.mailboxes_dir }} -name 'maildirsize' -type f -delete
|
|
||||||
4 0 * * * vmail /usr/local/lib/chatmaild/venv/bin/delete_inactive_users /usr/local/lib/chatmaild/chatmail.ini
|
|
||||||
52
cmdeploy/src/cmdeploy/filtermail/deployer.py
Normal file
52
cmdeploy/src/cmdeploy/filtermail/deployer.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from pyinfra import facts, host
|
||||||
|
from pyinfra.operations import files, systemd
|
||||||
|
|
||||||
|
from cmdeploy.basedeploy import Deployer, get_resource
|
||||||
|
|
||||||
|
|
||||||
|
class FiltermailDeployer(Deployer):
|
||||||
|
services = ["filtermail", "filtermail-incoming"]
|
||||||
|
bin_path = "/usr/local/bin/filtermail"
|
||||||
|
config_path = "/usr/local/lib/chatmaild/chatmail.ini"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.need_restart = False
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
arch = host.get_fact(facts.server.Arch)
|
||||||
|
url = f"https://github.com/chatmail/filtermail/releases/download/v0.2.0/filtermail-{arch}-musl"
|
||||||
|
sha256sum = {
|
||||||
|
"x86_64": "1e5bbb646582cb16740c6dfbbca39edba492b78cc96ec9fa2528c612bb504edd",
|
||||||
|
"aarch64": "3564fba8605f8f9adfeefff3f4580533205da043f47c5968d0d10db17e50f44e",
|
||||||
|
}[arch]
|
||||||
|
self.need_restart |= files.download(
|
||||||
|
name="Download filtermail",
|
||||||
|
src=url,
|
||||||
|
sha256sum=sha256sum,
|
||||||
|
dest=self.bin_path,
|
||||||
|
mode="755",
|
||||||
|
).changed
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
for service in self.services:
|
||||||
|
self.need_restart |= files.template(
|
||||||
|
src=get_resource(f"filtermail/{service}.service.j2"),
|
||||||
|
dest=f"/etc/systemd/system/{service}.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
bin_path=self.bin_path,
|
||||||
|
config_path=self.config_path,
|
||||||
|
).changed
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
for service in self.services:
|
||||||
|
systemd.service(
|
||||||
|
name=f"Start and enable {service}",
|
||||||
|
service=f"{service}.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=self.need_restart,
|
||||||
|
daemon_reload=True,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
@@ -2,11 +2,10 @@
|
|||||||
Description=Incoming Chatmail Postfix before queue filter
|
Description=Incoming Chatmail Postfix before queue filter
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart={execpath} {config_path} incoming
|
ExecStart={{ bin_path }} {{ config_path }} incoming
|
||||||
Restart=always
|
Restart=always
|
||||||
RestartSec=30
|
RestartSec=30
|
||||||
User=vmail
|
User=vmail
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
Description=Outgoing Chatmail Postfix before queue filter
|
Description=Outgoing Chatmail Postfix before queue filter
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart={execpath} {config_path} outgoing
|
ExecStart={{ bin_path }} {{ config_path }} outgoing
|
||||||
Restart=always
|
Restart=always
|
||||||
RestartSec=30
|
RestartSec=30
|
||||||
User=vmail
|
User=vmail
|
||||||
@@ -44,21 +44,37 @@ counter warning_count
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
counter filtered_mail_count
|
counter filtered_outgoing_mail_count
|
||||||
|
|
||||||
counter encrypted_mail_count
|
counter outgoing_encrypted_mail_count
|
||||||
/Filtering encrypted mail\./ {
|
/Outgoing: Filtering encrypted mail\./ {
|
||||||
encrypted_mail_count++
|
outgoing_encrypted_mail_count++
|
||||||
filtered_mail_count++
|
filtered_outgoing_mail_count++
|
||||||
}
|
}
|
||||||
|
|
||||||
counter unencrypted_mail_count
|
counter outgoing_unencrypted_mail_count
|
||||||
/Filtering unencrypted mail\./ {
|
/Outgoing: Filtering unencrypted mail\./ {
|
||||||
unencrypted_mail_count++
|
outgoing_unencrypted_mail_count++
|
||||||
filtered_mail_count++
|
filtered_outgoing_mail_count++
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
counter filtered_incoming_mail_count
|
||||||
|
|
||||||
|
counter incoming_encrypted_mail_count
|
||||||
|
/Incoming: Filtering encrypted mail\./ {
|
||||||
|
incoming_encrypted_mail_count++
|
||||||
|
filtered_incoming_mail_count++
|
||||||
|
}
|
||||||
|
|
||||||
|
counter incoming_unencrypted_mail_count
|
||||||
|
/Incoming: Filtering unencrypted mail\./ {
|
||||||
|
incoming_unencrypted_mail_count++
|
||||||
|
filtered_incoming_mail_count++
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
counter rejected_unencrypted_mail_count
|
counter rejected_unencrypted_mail_count
|
||||||
/Rejected unencrypted mail\./ {
|
/Rejected unencrypted mail/ {
|
||||||
rejected_unencrypted_mail_count++
|
rejected_unencrypted_mail_count++
|
||||||
}
|
}
|
||||||
|
|||||||
68
cmdeploy/src/cmdeploy/mtail/deployer.py
Normal file
68
cmdeploy/src/cmdeploy/mtail/deployer.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from pyinfra import facts, host
|
||||||
|
from pyinfra.operations import apt, files, server, systemd
|
||||||
|
|
||||||
|
from cmdeploy.basedeploy import (
|
||||||
|
Deployer,
|
||||||
|
get_resource,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MtailDeployer(Deployer):
|
||||||
|
def __init__(self, mtail_address):
|
||||||
|
self.mtail_address = mtail_address
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Uninstall mtail package to install a static binary.
|
||||||
|
apt.packages(name="Uninstall mtail", packages=["mtail"], present=False)
|
||||||
|
|
||||||
|
(url, sha256sum) = {
|
||||||
|
"x86_64": (
|
||||||
|
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_amd64.tar.gz",
|
||||||
|
"123c2ee5f48c3eff12ebccee38befd2233d715da736000ccde49e3d5607724e4",
|
||||||
|
),
|
||||||
|
"aarch64": (
|
||||||
|
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_arm64.tar.gz",
|
||||||
|
"aa04811c0929b6754408676de520e050c45dddeb3401881888a092c9aea89cae",
|
||||||
|
),
|
||||||
|
}[host.get_fact(facts.server.Arch)]
|
||||||
|
|
||||||
|
server.shell(
|
||||||
|
name="Download mtail",
|
||||||
|
commands=[
|
||||||
|
f"(echo '{sha256sum} /usr/local/bin/mtail' | sha256sum -c) || (curl -L {url} | gunzip | tar -x -f - mtail -O >/usr/local/bin/mtail.new && mv /usr/local/bin/mtail.new /usr/local/bin/mtail)",
|
||||||
|
"chmod 755 /usr/local/bin/mtail",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
# Using our own systemd unit instead of `/usr/lib/systemd/system/mtail.service`.
|
||||||
|
# This allows to read from journalctl instead of log files.
|
||||||
|
files.template(
|
||||||
|
src=get_resource("mtail/mtail.service.j2"),
|
||||||
|
dest="/etc/systemd/system/mtail.service",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
address=self.mtail_address or "127.0.0.1",
|
||||||
|
port=3903,
|
||||||
|
)
|
||||||
|
|
||||||
|
mtail_conf = files.put(
|
||||||
|
name="Mtail configuration",
|
||||||
|
src=get_resource("mtail/delivered_mail.mtail"),
|
||||||
|
dest="/etc/mtail/delivered_mail.mtail",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
self.need_restart = mtail_conf.changed
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable mtail",
|
||||||
|
service="mtail.service",
|
||||||
|
running=bool(self.mtail_address),
|
||||||
|
enabled=bool(self.mtail_address),
|
||||||
|
restarted=self.need_restart,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
117
cmdeploy/src/cmdeploy/nginx/deployer.py
Normal file
117
cmdeploy/src/cmdeploy/nginx/deployer.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
from chatmaild.config import Config
|
||||||
|
from pyinfra.operations import apt, files, systemd
|
||||||
|
|
||||||
|
from cmdeploy.basedeploy import (
|
||||||
|
Deployer,
|
||||||
|
get_resource,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NginxDeployer(Deployer):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
#
|
||||||
|
# If we allow nginx to start up on install, it will grab port
|
||||||
|
# 80, which then will block acmetool from listening on the port.
|
||||||
|
# That in turn prevents getting certificates, which then causes
|
||||||
|
# an error when we try to start nginx on the custom config
|
||||||
|
# that leaves port 80 open but also requires certificates to
|
||||||
|
# be present. To avoid getting into that interlocking mess,
|
||||||
|
# we use policy-rc.d to prevent nginx from starting up when it
|
||||||
|
# is installed.
|
||||||
|
#
|
||||||
|
# This approach allows us to avoid performing any explicit
|
||||||
|
# systemd operations during the install stage (as opposed to
|
||||||
|
# allowing it to start and then forcing it to stop), which allows
|
||||||
|
# the install stage to run in non-systemd environments like a
|
||||||
|
# container image build.
|
||||||
|
#
|
||||||
|
# For documentation about policy-rc.d, see:
|
||||||
|
# https://people.debian.org/~hmh/invokerc.d-policyrc.d-specification.txt
|
||||||
|
#
|
||||||
|
files.put(
|
||||||
|
src=get_resource("policy-rc.d"),
|
||||||
|
dest="/usr/sbin/policy-rc.d",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
)
|
||||||
|
|
||||||
|
apt.packages(
|
||||||
|
name="Install nginx",
|
||||||
|
packages=["nginx", "libnginx-mod-stream"],
|
||||||
|
)
|
||||||
|
|
||||||
|
files.file("/usr/sbin/policy-rc.d", present=False)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
self.need_restart = _configure_nginx(self.config)
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable nginx",
|
||||||
|
service="nginx.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
restarted=self.need_restart,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_nginx(config: Config, debug: bool = False) -> bool:
|
||||||
|
"""Configures nginx HTTP server."""
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=get_resource("nginx/nginx.conf.j2"),
|
||||||
|
dest="/etc/nginx/nginx.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": config.mail_domain},
|
||||||
|
disable_ipv6=config.disable_ipv6,
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
|
||||||
|
autoconfig = files.template(
|
||||||
|
src=get_resource("nginx/autoconfig.xml.j2"),
|
||||||
|
dest="/var/www/html/.well-known/autoconfig/mail/config-v1.1.xml",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": config.mail_domain},
|
||||||
|
)
|
||||||
|
need_restart |= autoconfig.changed
|
||||||
|
|
||||||
|
mta_sts_config = files.template(
|
||||||
|
src=get_resource("nginx/mta-sts.txt.j2"),
|
||||||
|
dest="/var/www/html/.well-known/mta-sts.txt",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": config.mail_domain},
|
||||||
|
)
|
||||||
|
need_restart |= mta_sts_config.changed
|
||||||
|
|
||||||
|
# install CGI newemail script
|
||||||
|
#
|
||||||
|
cgi_dir = "/usr/lib/cgi-bin"
|
||||||
|
files.directory(
|
||||||
|
name=f"Ensure {cgi_dir} exists",
|
||||||
|
path=cgi_dir,
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
)
|
||||||
|
|
||||||
|
files.put(
|
||||||
|
name="Upload cgi newemail.py script",
|
||||||
|
src=get_resource("newemail.py", pkg="chatmaild").open("rb"),
|
||||||
|
dest=f"{cgi_dir}/newemail.py",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="755",
|
||||||
|
)
|
||||||
|
|
||||||
|
return need_restart
|
||||||
@@ -66,7 +66,7 @@ http {
|
|||||||
|
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
|
|
||||||
server_name _;
|
server_name {{ config.domain_name }} www.{{ config.domain_name }} mta-sts.{{ config.domain_name }};
|
||||||
|
|
||||||
access_log syslog:server=unix:/dev/log,facility=local7;
|
access_log syslog:server=unix:/dev/log,facility=local7;
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
123
cmdeploy/src/cmdeploy/opendkim/deployer.py
Normal file
123
cmdeploy/src/cmdeploy/opendkim/deployer.py
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
"""
|
||||||
|
Installs OpenDKIM
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pyinfra import host
|
||||||
|
from pyinfra.facts.files import File
|
||||||
|
from pyinfra.operations import apt, files, server, systemd
|
||||||
|
|
||||||
|
from cmdeploy.basedeploy import Deployer, get_resource
|
||||||
|
|
||||||
|
|
||||||
|
class OpendkimDeployer(Deployer):
|
||||||
|
required_users = [("opendkim", None, ["opendkim"])]
|
||||||
|
|
||||||
|
def __init__(self, mail_domain):
|
||||||
|
self.mail_domain = mail_domain
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
apt.packages(
|
||||||
|
name="apt install opendkim opendkim-tools",
|
||||||
|
packages=["opendkim", "opendkim-tools"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
domain = self.mail_domain
|
||||||
|
dkim_selector = "opendkim"
|
||||||
|
"""Configures OpenDKIM"""
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=get_resource("opendkim/opendkim.conf"),
|
||||||
|
dest="/etc/opendkim.conf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
|
||||||
|
screen_script = files.put(
|
||||||
|
src=get_resource("opendkim/screen.lua"),
|
||||||
|
dest="/etc/opendkim/screen.lua",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= screen_script.changed
|
||||||
|
|
||||||
|
final_script = files.put(
|
||||||
|
src=get_resource("opendkim/final.lua"),
|
||||||
|
dest="/etc/opendkim/final.lua",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= final_script.changed
|
||||||
|
|
||||||
|
files.directory(
|
||||||
|
name="Add opendkim directory to /etc",
|
||||||
|
path="/etc/opendkim",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="750",
|
||||||
|
present=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
keytable = files.template(
|
||||||
|
src=get_resource("opendkim/KeyTable"),
|
||||||
|
dest="/etc/dkimkeys/KeyTable",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
||||||
|
)
|
||||||
|
need_restart |= keytable.changed
|
||||||
|
|
||||||
|
signing_table = files.template(
|
||||||
|
src=get_resource("opendkim/SigningTable"),
|
||||||
|
dest="/etc/dkimkeys/SigningTable",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="644",
|
||||||
|
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
||||||
|
)
|
||||||
|
need_restart |= signing_table.changed
|
||||||
|
files.directory(
|
||||||
|
name="Add opendkim socket directory to /var/spool/postfix",
|
||||||
|
path="/var/spool/postfix/opendkim",
|
||||||
|
user="opendkim",
|
||||||
|
group="opendkim",
|
||||||
|
mode="750",
|
||||||
|
present=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not host.get_fact(File, f"/etc/dkimkeys/{dkim_selector}.private"):
|
||||||
|
server.shell(
|
||||||
|
name="Generate OpenDKIM domain keys",
|
||||||
|
commands=[
|
||||||
|
f"/usr/sbin/opendkim-genkey -D /etc/dkimkeys -d {domain} -s {dkim_selector}"
|
||||||
|
],
|
||||||
|
_use_su_login=True,
|
||||||
|
_su_user="opendkim",
|
||||||
|
)
|
||||||
|
|
||||||
|
service_file = files.put(
|
||||||
|
name="Configure opendkim to restart once a day",
|
||||||
|
src=get_resource("opendkim/systemd.conf"),
|
||||||
|
dest="/etc/systemd/system/opendkim.service.d/10-prevent-memory-leak.conf",
|
||||||
|
)
|
||||||
|
need_restart |= service_file.changed
|
||||||
|
|
||||||
|
self.need_restart = need_restart
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
systemd.service(
|
||||||
|
name="Start and enable OpenDKIM",
|
||||||
|
service="opendkim.service",
|
||||||
|
running=True,
|
||||||
|
enabled=True,
|
||||||
|
daemon_reload=self.need_restart,
|
||||||
|
restarted=self.need_restart,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
if odkim.internal_ip(ctx) == 1 then
|
mtaname = odkim.get_mtasymbol(ctx, "{daemon_name}")
|
||||||
|
if mtaname == "ORIGINATING" then
|
||||||
-- Outgoing message will be signed,
|
-- Outgoing message will be signed,
|
||||||
-- no need to look for signatures.
|
-- no need to look for signatures.
|
||||||
return nil
|
return nil
|
||||||
@@ -9,9 +10,11 @@ if nsigs == nil then
|
|||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
local valid = false
|
||||||
|
local error_msg = "No valid DKIM signature found."
|
||||||
for i = 1, nsigs do
|
for i = 1, nsigs do
|
||||||
sig = odkim.get_sighandle(ctx, i - 1)
|
sig = odkim.get_sighandle(ctx, i - 1)
|
||||||
sigres = odkim.sig_result(sig)
|
sigres = odkim.sig_result(sig)
|
||||||
|
|
||||||
-- All signatures that do not correspond to From:
|
-- All signatures that do not correspond to From:
|
||||||
-- were ignored in screen.lua and return sigres -1.
|
-- were ignored in screen.lua and return sigres -1.
|
||||||
@@ -19,10 +22,21 @@ for i = 1, nsigs do
|
|||||||
-- Any valid signature that was not ignored like this
|
-- Any valid signature that was not ignored like this
|
||||||
-- means the message is acceptable.
|
-- means the message is acceptable.
|
||||||
if sigres == 0 then
|
if sigres == 0 then
|
||||||
return nil
|
valid = true
|
||||||
end
|
else
|
||||||
|
error_msg = "DKIM signature is invalid, error code " .. tostring(sigres) .. ", search https://github.com/trusteddomainproject/OpenDKIM/blob/master/libopendkim/dkim.h#L108"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if valid then
|
||||||
|
-- Strip all DKIM-Signature headers after successful validation
|
||||||
|
-- Delete in reverse order to avoid index shifting.
|
||||||
|
for i = nsigs, 1, -1 do
|
||||||
|
odkim.del_header(ctx, "DKIM-Signature", i)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
odkim.set_reply(ctx, "554", "5.7.1", error_msg)
|
||||||
|
odkim.set_result(ctx, SMFIS_REJECT)
|
||||||
end
|
end
|
||||||
|
|
||||||
odkim.set_reply(ctx, "554", "5.7.1", "No valid DKIM signature found")
|
|
||||||
odkim.set_result(ctx, SMFIS_REJECT)
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ OversignHeaders From
|
|||||||
On-BadSignature reject
|
On-BadSignature reject
|
||||||
On-KeyNotFound reject
|
On-KeyNotFound reject
|
||||||
On-NoSignature reject
|
On-NoSignature reject
|
||||||
|
DNSTimeout 60
|
||||||
|
|
||||||
# Signing domain, selector, and key (required). For example, perform signing
|
# Signing domain, selector, and key (required). For example, perform signing
|
||||||
# for domain "example.com" with selector "2020" (2020._domainkey.example.com),
|
# for domain "example.com" with selector "2020" (2020._domainkey.example.com),
|
||||||
@@ -64,3 +65,9 @@ PidFile /run/opendkim/opendkim.pid
|
|||||||
# The trust anchor enables DNSSEC. In Debian, the trust anchor file is provided
|
# The trust anchor enables DNSSEC. In Debian, the trust anchor file is provided
|
||||||
# by the package dns-root-data.
|
# by the package dns-root-data.
|
||||||
TrustAnchorFile /usr/share/dns/root.key
|
TrustAnchorFile /usr/share/dns/root.key
|
||||||
|
|
||||||
|
# Sign messages when `-o milter_macro_daemon_name=ORIGINATING` is set.
|
||||||
|
MTA ORIGINATING
|
||||||
|
|
||||||
|
# No hosts are treated as internal, ORIGINATING daemon name should be set explicitly.
|
||||||
|
InternalHosts -
|
||||||
|
|||||||
3
cmdeploy/src/cmdeploy/policy-rc.d
Executable file
3
cmdeploy/src/cmdeploy/policy-rc.d
Executable file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
echo "All runlevel operations denied by policy" >&2
|
||||||
|
exit 101
|
||||||
103
cmdeploy/src/cmdeploy/postfix/deployer.py
Normal file
103
cmdeploy/src/cmdeploy/postfix/deployer.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
from pyinfra.operations import apt, files, server, systemd
|
||||||
|
|
||||||
|
from cmdeploy.basedeploy import Deployer, get_resource
|
||||||
|
|
||||||
|
|
||||||
|
class PostfixDeployer(Deployer):
|
||||||
|
required_users = [("postfix", None, ["opendkim"])]
|
||||||
|
daemon_reload = False
|
||||||
|
|
||||||
|
def __init__(self, config, disable_mail):
|
||||||
|
self.config = config
|
||||||
|
self.disable_mail = disable_mail
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
apt.packages(
|
||||||
|
name="Install Postfix",
|
||||||
|
packages="postfix",
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
config = self.config
|
||||||
|
need_restart = False
|
||||||
|
|
||||||
|
main_config = files.template(
|
||||||
|
src=get_resource("postfix/main.cf.j2"),
|
||||||
|
dest="/etc/postfix/main.cf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
config=config,
|
||||||
|
disable_ipv6=config.disable_ipv6,
|
||||||
|
)
|
||||||
|
need_restart |= main_config.changed
|
||||||
|
|
||||||
|
master_config = files.template(
|
||||||
|
src=get_resource("postfix/master.cf.j2"),
|
||||||
|
dest="/etc/postfix/master.cf",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
debug=False,
|
||||||
|
config=config,
|
||||||
|
)
|
||||||
|
need_restart |= master_config.changed
|
||||||
|
|
||||||
|
header_cleanup = files.put(
|
||||||
|
src=get_resource("postfix/submission_header_cleanup"),
|
||||||
|
dest="/etc/postfix/submission_header_cleanup",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= header_cleanup.changed
|
||||||
|
|
||||||
|
lmtp_header_cleanup = files.put(
|
||||||
|
src=get_resource("postfix/lmtp_header_cleanup"),
|
||||||
|
dest="/etc/postfix/lmtp_header_cleanup",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= lmtp_header_cleanup.changed
|
||||||
|
|
||||||
|
# Login map that 1:1 maps email address to login.
|
||||||
|
login_map = files.put(
|
||||||
|
src=get_resource("postfix/login_map"),
|
||||||
|
dest="/etc/postfix/login_map",
|
||||||
|
user="root",
|
||||||
|
group="root",
|
||||||
|
mode="644",
|
||||||
|
)
|
||||||
|
need_restart |= login_map.changed
|
||||||
|
|
||||||
|
restart_conf = files.put(
|
||||||
|
name="postfix: restart automatically on failure",
|
||||||
|
src=get_resource("service/10_restart.conf"),
|
||||||
|
dest="/etc/systemd/system/postfix@.service.d/10_restart.conf",
|
||||||
|
)
|
||||||
|
self.daemon_reload = restart_conf.changed
|
||||||
|
|
||||||
|
# Validate postfix configuration before restart
|
||||||
|
if need_restart:
|
||||||
|
server.shell(
|
||||||
|
name="Validate postfix configuration",
|
||||||
|
# Extract stderr and quit with error if non-zero
|
||||||
|
commands=["""bash -c 'w=$(postconf 2>&1 >/dev/null); [[ -z "$w" ]] || { echo "$w"; false; }'"""],
|
||||||
|
)
|
||||||
|
self.need_restart = need_restart
|
||||||
|
|
||||||
|
def activate(self):
|
||||||
|
restart = False if self.disable_mail else self.need_restart
|
||||||
|
|
||||||
|
systemd.service(
|
||||||
|
name="disable postfix for now"
|
||||||
|
if self.disable_mail
|
||||||
|
else "Start and enable Postfix",
|
||||||
|
service="postfix.service",
|
||||||
|
running=False if self.disable_mail else True,
|
||||||
|
enabled=False if self.disable_mail else True,
|
||||||
|
restarted=restart,
|
||||||
|
daemon_reload=self.daemon_reload,
|
||||||
|
)
|
||||||
|
self.need_restart = False
|
||||||
2
cmdeploy/src/cmdeploy/postfix/lmtp_header_cleanup
Normal file
2
cmdeploy/src/cmdeploy/postfix/lmtp_header_cleanup
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
/^DKIM-Signature:/ IGNORE
|
||||||
|
/^Authentication-Results:/ IGNORE
|
||||||
@@ -26,7 +26,8 @@ smtp_tls_security_level=verify
|
|||||||
smtp_tls_servername = hostname
|
smtp_tls_servername = hostname
|
||||||
smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
|
smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
|
||||||
smtp_tls_policy_maps = inline:{nauta.cu=may}
|
smtp_tls_policy_maps = inline:{nauta.cu=may}
|
||||||
smtpd_tls_protocols = >=TLSv1.2
|
smtp_tls_protocols = >=TLSv1.2
|
||||||
|
smtp_tls_mandatory_protocols = >=TLSv1.2
|
||||||
|
|
||||||
# Disable anonymous cipher suites
|
# Disable anonymous cipher suites
|
||||||
# and known insecure algorithms.
|
# and known insecure algorithms.
|
||||||
@@ -63,7 +64,11 @@ alias_database = hash:/etc/aliases
|
|||||||
mydestination =
|
mydestination =
|
||||||
|
|
||||||
relayhost =
|
relayhost =
|
||||||
|
{% if disable_ipv6 %}
|
||||||
|
mynetworks = 127.0.0.0/8
|
||||||
|
{% else %}
|
||||||
mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128
|
mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128
|
||||||
|
{% endif %}
|
||||||
mailbox_size_limit = 0
|
mailbox_size_limit = 0
|
||||||
message_size_limit = {{config.max_message_size}}
|
message_size_limit = {{config.max_message_size}}
|
||||||
recipient_delimiter = +
|
recipient_delimiter = +
|
||||||
@@ -76,6 +81,7 @@ inet_protocols = all
|
|||||||
|
|
||||||
virtual_transport = lmtp:unix:private/dovecot-lmtp
|
virtual_transport = lmtp:unix:private/dovecot-lmtp
|
||||||
virtual_mailbox_domains = {{ config.mail_domain }}
|
virtual_mailbox_domains = {{ config.mail_domain }}
|
||||||
|
lmtp_header_checks = regexp:/etc/postfix/lmtp_header_cleanup
|
||||||
|
|
||||||
mua_client_restrictions = permit_sasl_authenticated, reject
|
mua_client_restrictions = permit_sasl_authenticated, reject
|
||||||
mua_sender_restrictions = reject_sender_login_mismatch, permit_sasl_authenticated, reject
|
mua_sender_restrictions = reject_sender_login_mismatch, permit_sasl_authenticated, reject
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ smtp inet n - y - - smtpd -v
|
|||||||
{%- else %}
|
{%- else %}
|
||||||
smtp inet n - y - - smtpd
|
smtp inet n - y - - smtpd
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
-o smtpd_tls_security_level=encrypt
|
||||||
|
-o smtpd_tls_mandatory_protocols=>=TLSv1.2
|
||||||
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port_incoming }}
|
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port_incoming }}
|
||||||
submission inet n - y - 5000 smtpd
|
submission inet n - y - 5000 smtpd
|
||||||
-o syslog_name=postfix/submission
|
-o syslog_name=postfix/submission
|
||||||
@@ -29,7 +31,6 @@ submission inet n - y - 5000 smtpd
|
|||||||
-o smtpd_sender_restrictions=$mua_sender_restrictions
|
-o smtpd_sender_restrictions=$mua_sender_restrictions
|
||||||
-o smtpd_recipient_restrictions=
|
-o smtpd_recipient_restrictions=
|
||||||
-o smtpd_relay_restrictions=permit_sasl_authenticated,reject
|
-o smtpd_relay_restrictions=permit_sasl_authenticated,reject
|
||||||
-o milter_macro_daemon_name=ORIGINATING
|
|
||||||
-o smtpd_client_connection_count_limit=1000
|
-o smtpd_client_connection_count_limit=1000
|
||||||
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port }}
|
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port }}
|
||||||
smtps inet n - y - 5000 smtpd
|
smtps inet n - y - 5000 smtpd
|
||||||
@@ -47,7 +48,6 @@ smtps inet n - y - 5000 smtpd
|
|||||||
-o smtpd_recipient_restrictions=
|
-o smtpd_recipient_restrictions=
|
||||||
-o smtpd_relay_restrictions=permit_sasl_authenticated,reject
|
-o smtpd_relay_restrictions=permit_sasl_authenticated,reject
|
||||||
-o smtpd_client_connection_count_limit=1000
|
-o smtpd_client_connection_count_limit=1000
|
||||||
-o milter_macro_daemon_name=ORIGINATING
|
|
||||||
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port }}
|
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port }}
|
||||||
#628 inet n - y - - qmqpd
|
#628 inet n - y - - qmqpd
|
||||||
pickup unix n - y 60 1 pickup
|
pickup unix n - y 60 1 pickup
|
||||||
@@ -79,6 +79,7 @@ filter unix - n n - - lmtp
|
|||||||
# Local SMTP server for reinjecting outgoing filtered mail.
|
# Local SMTP server for reinjecting outgoing filtered mail.
|
||||||
127.0.0.1:{{ config.postfix_reinject_port }} inet n - n - 100 smtpd
|
127.0.0.1:{{ config.postfix_reinject_port }} inet n - n - 100 smtpd
|
||||||
-o syslog_name=postfix/reinject
|
-o syslog_name=postfix/reinject
|
||||||
|
-o milter_macro_daemon_name=ORIGINATING
|
||||||
-o smtpd_milters=unix:opendkim/opendkim.sock
|
-o smtpd_milters=unix:opendkim/opendkim.sock
|
||||||
-o cleanup_service_name=authclean
|
-o cleanup_service_name=authclean
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ All functions of this module
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from .rshell import CalledProcessError, shell, log_progress
|
from .rshell import CalledProcessError, log_progress, shell
|
||||||
|
|
||||||
|
|
||||||
def perform_initial_checks(mail_domain, pre_command=""):
|
def perform_initial_checks(mail_domain, pre_command=""):
|
||||||
@@ -26,7 +26,9 @@ def perform_initial_checks(mail_domain, pre_command=""):
|
|||||||
WWW = query_dns("CNAME", f"www.{mail_domain}")
|
WWW = query_dns("CNAME", f"www.{mail_domain}")
|
||||||
|
|
||||||
res = dict(mail_domain=mail_domain, A=A, AAAA=AAAA, MTA_STS=MTA_STS, WWW=WWW)
|
res = dict(mail_domain=mail_domain, A=A, AAAA=AAAA, MTA_STS=MTA_STS, WWW=WWW)
|
||||||
res["acme_account_url"] = shell(pre_command + "acmetool account-url", fail_ok=True, print=log_progress)
|
res["acme_account_url"] = shell(
|
||||||
|
pre_command + "acmetool account-url", fail_ok=True, print=log_progress
|
||||||
|
)
|
||||||
res["dkim_entry"], res["web_dkim_entry"] = get_dkim_entry(
|
res["dkim_entry"], res["web_dkim_entry"] = get_dkim_entry(
|
||||||
mail_domain, pre_command, dkim_selector="opendkim"
|
mail_domain, pre_command, dkim_selector="opendkim"
|
||||||
)
|
)
|
||||||
@@ -35,7 +37,10 @@ def perform_initial_checks(mail_domain, pre_command=""):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
# parse out sts-id if exists, example: "v=STSv1; id=2090123"
|
# parse out sts-id if exists, example: "v=STSv1; id=2090123"
|
||||||
parts = query_dns("TXT", f"_mta-sts.{mail_domain}").split("id=")
|
mta_sts_txt = query_dns("TXT", f"_mta-sts.{mail_domain}")
|
||||||
|
if not mta_sts_txt:
|
||||||
|
return res
|
||||||
|
parts = mta_sts_txt.split("id=")
|
||||||
res["sts_id"] = parts[1].rstrip('"') if len(parts) == 2 else ""
|
res["sts_id"] = parts[1].rstrip('"') if len(parts) == 2 else ""
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@@ -45,7 +50,7 @@ def get_dkim_entry(mail_domain, pre_command, dkim_selector):
|
|||||||
dkim_pubkey = shell(
|
dkim_pubkey = shell(
|
||||||
f"{pre_command}openssl rsa -in /etc/dkimkeys/{dkim_selector}.private "
|
f"{pre_command}openssl rsa -in /etc/dkimkeys/{dkim_selector}.private "
|
||||||
"-pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'",
|
"-pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'",
|
||||||
print=log_progress
|
print=log_progress,
|
||||||
)
|
)
|
||||||
except CalledProcessError:
|
except CalledProcessError:
|
||||||
return
|
return
|
||||||
@@ -62,9 +67,9 @@ def query_dns(typ, domain):
|
|||||||
# Get autoritative nameserver from the SOA record.
|
# Get autoritative nameserver from the SOA record.
|
||||||
soa_answers = [
|
soa_answers = [
|
||||||
x.split()
|
x.split()
|
||||||
for x in shell(f"dig -r -q {domain} -t SOA +noall +authority +answer", print=log_progress).split(
|
for x in shell(
|
||||||
"\n"
|
f"dig -r -q {domain} -t SOA +noall +authority +answer", print=log_progress
|
||||||
)
|
).split("\n")
|
||||||
]
|
]
|
||||||
soa = [a for a in soa_answers if len(a) >= 3 and a[3] == "SOA"]
|
soa = [a for a in soa_answers if len(a) >= 3 and a[3] == "SOA"]
|
||||||
if not soa:
|
if not soa:
|
||||||
@@ -73,9 +78,7 @@ def query_dns(typ, domain):
|
|||||||
|
|
||||||
# Query authoritative nameserver directly to bypass DNS cache.
|
# Query authoritative nameserver directly to bypass DNS cache.
|
||||||
res = shell(f"dig @{ns} -r -q {domain} -t {typ} +short", print=log_progress)
|
res = shell(f"dig @{ns} -r -q {domain} -t {typ} +short", print=log_progress)
|
||||||
if res:
|
return next((line for line in res.split("\n") if not line.startswith(";")), "")
|
||||||
return res.split("\n")[0]
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def check_zonefile(zonefile, verbose=True):
|
def check_zonefile(zonefile, verbose=True):
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from subprocess import DEVNULL, CalledProcessError, check_output
|
from subprocess import DEVNULL, CalledProcessError, check_output
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,9 @@ import os
|
|||||||
|
|
||||||
import pyinfra
|
import pyinfra
|
||||||
|
|
||||||
from cmdeploy import deploy_chatmail
|
# pyinfra runs this module as a python file and not as a module so
|
||||||
|
# import paths must be absolute
|
||||||
|
from cmdeploy.deployers import deploy_chatmail
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -12,8 +14,9 @@ def main():
|
|||||||
importlib.resources.files("cmdeploy").joinpath("../../../chatmail.ini"),
|
importlib.resources.files("cmdeploy").joinpath("../../../chatmail.ini"),
|
||||||
)
|
)
|
||||||
disable_mail = bool(os.environ.get("CHATMAIL_DISABLE_MAIL"))
|
disable_mail = bool(os.environ.get("CHATMAIL_DISABLE_MAIL"))
|
||||||
|
website_only = bool(os.environ.get("CHATMAIL_WEBSITE_ONLY"))
|
||||||
|
|
||||||
deploy_chatmail(config_path, disable_mail)
|
deploy_chatmail(config_path, disable_mail, website_only)
|
||||||
|
|
||||||
|
|
||||||
if pyinfra.is_cli:
|
if pyinfra.is_cli:
|
||||||
3
cmdeploy/src/cmdeploy/service/10_restart.conf
Normal file
3
cmdeploy/src/cmdeploy/service/10_restart.conf
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[Service]
|
||||||
|
Restart=always
|
||||||
|
RestartSec=30
|
||||||
9
cmdeploy/src/cmdeploy/service/chatmail-expire.service.f
Normal file
9
cmdeploy/src/cmdeploy/service/chatmail-expire.service.f
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=chatmail mail storage expiration job
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
User=vmail
|
||||||
|
ExecStart=/usr/local/lib/chatmaild/venv/bin/chatmail-expire /usr/local/lib/chatmaild/chatmail.ini -v --remove
|
||||||
|
|
||||||
8
cmdeploy/src/cmdeploy/service/chatmail-expire.timer.f
Normal file
8
cmdeploy/src/cmdeploy/service/chatmail-expire.timer.f
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Run Daily chatmail-expire job
|
||||||
|
|
||||||
|
[Timer]
|
||||||
|
OnCalendar=*-*-* 00:02:00
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=timers.target
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=chatmail file system storage reporting job
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
User=vmail
|
||||||
|
ExecStart=/usr/local/lib/chatmaild/venv/bin/chatmail-fsreport /usr/local/lib/chatmaild/chatmail.ini
|
||||||
|
|
||||||
9
cmdeploy/src/cmdeploy/service/chatmail-fsreport.timer.f
Normal file
9
cmdeploy/src/cmdeploy/service/chatmail-fsreport.timer.f
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Run Daily Chatmail fsreport Job
|
||||||
|
|
||||||
|
[Timer]
|
||||||
|
OnCalendar=*-*-* 08:02:00
|
||||||
|
Persistent=true
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=timers.target
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Chatmail echo bot for testing it works
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart={execpath} {config_path}
|
|
||||||
Environment="PATH={remote_venv_dir}:$PATH"
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
|
|
||||||
User=echobot
|
|
||||||
Group=echobot
|
|
||||||
|
|
||||||
# Create /var/lib/echobot
|
|
||||||
StateDirectory=echobot
|
|
||||||
|
|
||||||
# Create /run/echobot
|
|
||||||
#
|
|
||||||
# echobot stores /run/echobot/password
|
|
||||||
# with a password there, which doveauth then reads.
|
|
||||||
RuntimeDirectory=echobot
|
|
||||||
|
|
||||||
WorkingDirectory=/var/lib/echobot
|
|
||||||
|
|
||||||
# Apply security restrictions suggested by
|
|
||||||
# systemd-analyze security echobot.service
|
|
||||||
CapabilityBoundingSet=
|
|
||||||
LockPersonality=true
|
|
||||||
MemoryDenyWriteExecute=true
|
|
||||||
NoNewPrivileges=true
|
|
||||||
PrivateDevices=true
|
|
||||||
PrivateMounts=true
|
|
||||||
PrivateTmp=true
|
|
||||||
|
|
||||||
# We need to know about doveauth user to give it access to /run/echobot/password
|
|
||||||
PrivateUsers=false
|
|
||||||
|
|
||||||
ProtectClock=true
|
|
||||||
ProtectControlGroups=true
|
|
||||||
ProtectHostname=true
|
|
||||||
ProtectKernelLogs=true
|
|
||||||
ProtectKernelModules=true
|
|
||||||
ProtectKernelTunables=true
|
|
||||||
ProtectProc=noaccess
|
|
||||||
|
|
||||||
# Should be "strict", but we currently write /accounts folder in a protected path
|
|
||||||
ProtectSystem=full
|
|
||||||
|
|
||||||
RemoveIPC=true
|
|
||||||
RestrictAddressFamilies=AF_INET AF_INET6
|
|
||||||
RestrictNamespaces=true
|
|
||||||
RestrictRealtime=true
|
|
||||||
RestrictSUIDSGID=true
|
|
||||||
SystemCallArchitectures=native
|
|
||||||
SystemCallFilter=~@clock
|
|
||||||
SystemCallFilter=~@cpu-emulation
|
|
||||||
SystemCallFilter=~@debug
|
|
||||||
SystemCallFilter=~@module
|
|
||||||
SystemCallFilter=~@mount
|
|
||||||
SystemCallFilter=~@obsolete
|
|
||||||
SystemCallFilter=~@raw-io
|
|
||||||
SystemCallFilter=~@reboot
|
|
||||||
SystemCallFilter=~@resources
|
|
||||||
SystemCallFilter=~@swap
|
|
||||||
UMask=0077
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -93,7 +93,7 @@ class LocalExec:
|
|||||||
where = "locally"
|
where = "locally"
|
||||||
if self.docker:
|
if self.docker:
|
||||||
if call == remote.rdns.perform_initial_checks:
|
if call == remote.rdns.perform_initial_checks:
|
||||||
kwargs['pre_command'] = "docker exec chatmail "
|
kwargs["pre_command"] = "docker exec chatmail "
|
||||||
where = "in docker"
|
where = "in docker"
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print(f"Running {where}: {call.__name__}(**{kwargs})")
|
print(f"Running {where}: {call.__name__}(**{kwargs})")
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ class TestDC:
|
|||||||
|
|
||||||
def test_ping_pong(self, benchmark, cmfactory):
|
def test_ping_pong(self, benchmark, cmfactory):
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_protected_chat(ac1, ac2)
|
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
||||||
|
|
||||||
def dc_ping_pong():
|
def dc_ping_pong():
|
||||||
chat.send_text("ping")
|
chat.send_text("ping")
|
||||||
@@ -49,7 +49,7 @@ class TestDC:
|
|||||||
|
|
||||||
def test_send_10_receive_10(self, benchmark, cmfactory, lp):
|
def test_send_10_receive_10(self, benchmark, cmfactory, lp):
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_protected_chat(ac1, ac2)
|
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
||||||
|
|
||||||
def dc_send_10_receive_10():
|
def dc_send_10_receive_10():
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import queue
|
import queue
|
||||||
import socket
|
import smtplib
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@@ -91,25 +91,23 @@ def test_concurrent_logins_same_account(
|
|||||||
|
|
||||||
def test_no_vrfy(chatmail_config):
|
def test_no_vrfy(chatmail_config):
|
||||||
domain = chatmail_config.mail_domain
|
domain = chatmail_config.mail_domain
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
||||||
sock.settimeout(10)
|
s = smtplib.SMTP(domain)
|
||||||
try:
|
s.starttls()
|
||||||
sock.connect((domain, 25))
|
|
||||||
except socket.timeout:
|
s.putcmd("vrfy", f"wrongaddress@{chatmail_config.mail_domain}")
|
||||||
pytest.skip(f"port 25 not reachable for {domain}")
|
result = s.getreply()
|
||||||
banner = sock.recv(1024)
|
|
||||||
print(banner)
|
|
||||||
sock.send(b"VRFY wrongaddress@%s\r\n" % (chatmail_config.mail_domain.encode(),))
|
|
||||||
result = sock.recv(1024)
|
|
||||||
print(result)
|
print(result)
|
||||||
sock.send(b"VRFY echo@%s\r\n" % (chatmail_config.mail_domain.encode(),))
|
s.putcmd("vrfy", f"echo@{chatmail_config.mail_domain}")
|
||||||
result2 = sock.recv(1024)
|
result2 = s.getreply()
|
||||||
print(result2)
|
print(result2)
|
||||||
assert result[0:10] == result2[0:10]
|
assert result[0] == result2[0] == 252
|
||||||
sock.send(b"VRFY wrongaddress\r\n")
|
assert result[1][0:6] == result2[1][0:6] == b"2.0.0 "
|
||||||
result = sock.recv(1024)
|
s.putcmd("vrfy", "wrongaddress")
|
||||||
|
result = s.getreply()
|
||||||
print(result)
|
print(result)
|
||||||
sock.send(b"VRFY echo\r\n")
|
s.putcmd("vrfy", "echo")
|
||||||
result2 = sock.recv(1024)
|
result2 = s.getreply()
|
||||||
print(result2)
|
print(result2)
|
||||||
assert result[0:10] == result2[0:10] == b"252 2.0.0 "
|
assert result[0] == result2[0] == 252
|
||||||
|
assert result[1][0:6] == result2[1][0:6] == b"2.0.0 "
|
||||||
|
|||||||
@@ -10,37 +10,10 @@ from cmdeploy import remote
|
|||||||
from cmdeploy.sshexec import SSHExec
|
from cmdeploy.sshexec import SSHExec
|
||||||
|
|
||||||
|
|
||||||
class FuncError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DockerExec:
|
|
||||||
FuncError = FuncError
|
|
||||||
|
|
||||||
def __init__(self, pre_command):
|
|
||||||
self.pre_command = pre_command
|
|
||||||
|
|
||||||
def __call__(self, call, kwargs=None):
|
|
||||||
if kwargs is None:
|
|
||||||
kwargs = {}
|
|
||||||
return call(**kwargs)
|
|
||||||
|
|
||||||
def logged(self, call, kwargs):
|
|
||||||
title = call.__doc__
|
|
||||||
if not title:
|
|
||||||
title = call.__name__
|
|
||||||
print("[ssh] " + title)
|
|
||||||
return self(call, kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class TestSSHExecutor:
|
class TestSSHExecutor:
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def sshexec(self, sshdomain):
|
def sshexec(self, sshdomain):
|
||||||
try:
|
return SSHExec(sshdomain)
|
||||||
sshexec = SSHExec(sshdomain)
|
|
||||||
except FileNotFoundError:
|
|
||||||
sshexec = DockerExec("docker exec chatmail ")
|
|
||||||
return sshexec
|
|
||||||
|
|
||||||
def test_ls(self, sshexec):
|
def test_ls(self, sshexec):
|
||||||
out = sshexec(call=remote.rdns.shell, kwargs=dict(command="ls"))
|
out = sshexec(call=remote.rdns.shell, kwargs=dict(command="ls"))
|
||||||
@@ -54,15 +27,13 @@ class TestSSHExecutor:
|
|||||||
assert res["A"] or res["AAAA"]
|
assert res["A"] or res["AAAA"]
|
||||||
|
|
||||||
def test_logged(self, sshexec, maildomain, capsys):
|
def test_logged(self, sshexec, maildomain, capsys):
|
||||||
if isinstance(sshexec, DockerExec):
|
|
||||||
pytest.skip("This test only works via SSH")
|
|
||||||
sshexec.logged(
|
sshexec.logged(
|
||||||
remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=maildomain)
|
remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=maildomain)
|
||||||
)
|
)
|
||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
assert err.startswith("Collecting")
|
assert err.startswith("Collecting")
|
||||||
# XXX could not figure out how capturing can be made to work properly
|
# XXX could not figure out how capturing can be made to work properly
|
||||||
#assert err.endswith("....\n")
|
# assert err.endswith("....\n")
|
||||||
assert err.count("\n") == 1
|
assert err.count("\n") == 1
|
||||||
|
|
||||||
sshexec.verbose = True
|
sshexec.verbose = True
|
||||||
@@ -72,7 +43,7 @@ class TestSSHExecutor:
|
|||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
lines = err.split("\n")
|
lines = err.split("\n")
|
||||||
# XXX could not figure out how capturing can be made to work properly
|
# XXX could not figure out how capturing can be made to work properly
|
||||||
#assert len(lines) > 4
|
# assert len(lines) > 4
|
||||||
assert remote.rdns.perform_initial_checks.__doc__ in lines[0]
|
assert remote.rdns.perform_initial_checks.__doc__ in lines[0]
|
||||||
|
|
||||||
def test_exception(self, sshexec, capsys):
|
def test_exception(self, sshexec, capsys):
|
||||||
@@ -84,8 +55,6 @@ class TestSSHExecutor:
|
|||||||
except sshexec.FuncError as e:
|
except sshexec.FuncError as e:
|
||||||
assert "rdns.py" in str(e)
|
assert "rdns.py" in str(e)
|
||||||
assert "AssertionError" in str(e)
|
assert "AssertionError" in str(e)
|
||||||
except AssertionError:
|
|
||||||
assert isinstance(sshexec, DockerExec)
|
|
||||||
else:
|
else:
|
||||||
pytest.fail("didn't raise exception")
|
pytest.fail("didn't raise exception")
|
||||||
|
|
||||||
@@ -174,6 +143,7 @@ def test_reject_missing_dkim(cmsetup, maildata, from_addr):
|
|||||||
"encrypted.eml", from_addr=from_addr, to_addr=recipient.addr
|
"encrypted.eml", from_addr=from_addr, to_addr=recipient.addr
|
||||||
).as_string()
|
).as_string()
|
||||||
conn = smtplib.SMTP(cmsetup.maildomain, 25, timeout=10)
|
conn = smtplib.SMTP(cmsetup.maildomain, 25, timeout=10)
|
||||||
|
conn.starttls()
|
||||||
|
|
||||||
with conn as s:
|
with conn as s:
|
||||||
with pytest.raises(smtplib.SMTPDataError, match="No valid DKIM signature"):
|
with pytest.raises(smtplib.SMTPDataError, match="No valid DKIM signature"):
|
||||||
@@ -219,12 +189,14 @@ def test_exceed_rate_limit(cmsetup, gencreds, maildata, chatmail_config):
|
|||||||
mail = maildata(
|
mail = maildata(
|
||||||
"encrypted.eml", from_addr=user1.addr, to_addr=user2.addr
|
"encrypted.eml", from_addr=user1.addr, to_addr=user2.addr
|
||||||
).as_string()
|
).as_string()
|
||||||
for i in range(chatmail_config.max_user_send_per_minute + 5):
|
|
||||||
print("Sending mail", str(i))
|
start = time.time()
|
||||||
|
for i in range(chatmail_config.max_user_send_per_minute * 3):
|
||||||
|
print("Sending mail", str(i + 1), "at", time.time() - start, "s.")
|
||||||
try:
|
try:
|
||||||
user1.smtp.sendmail(user1.addr, [user2.addr], mail)
|
user1.smtp.sendmail(user1.addr, [user2.addr], mail)
|
||||||
except smtplib.SMTPException as e:
|
except smtplib.SMTPException as e:
|
||||||
if i < chatmail_config.max_user_send_per_minute:
|
if i < chatmail_config.max_user_send_burst_size:
|
||||||
pytest.fail(f"rate limit was exceeded too early with msg {i}")
|
pytest.fail(f"rate limit was exceeded too early with msg {i}")
|
||||||
outcome = e.recipients[user2.addr]
|
outcome = e.recipients[user2.addr]
|
||||||
assert outcome[0] == 450
|
assert outcome[0] == 450
|
||||||
@@ -254,8 +226,14 @@ def test_expunged(remote, chatmail_config):
|
|||||||
|
|
||||||
|
|
||||||
def test_deployed_state(remote):
|
def test_deployed_state(remote):
|
||||||
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
try:
|
||||||
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
||||||
|
except Exception:
|
||||||
|
git_hash = "unknown\n"
|
||||||
|
try:
|
||||||
|
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
||||||
|
except Exception:
|
||||||
|
git_diff = ""
|
||||||
git_status = [git_hash.strip()]
|
git_status = [git_hash.strip()]
|
||||||
for line in git_diff.splitlines():
|
for line in git_diff.splitlines():
|
||||||
git_status.append(line.strip().lower())
|
git_status.append(line.strip().lower())
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ def imap_mailbox(cmfactory):
|
|||||||
password = ac1.get_config("mail_pw")
|
password = ac1.get_config("mail_pw")
|
||||||
mailbox = imap_tools.MailBox(user.split("@")[1])
|
mailbox = imap_tools.MailBox(user.split("@")[1])
|
||||||
mailbox.login(user, password)
|
mailbox.login(user, password)
|
||||||
|
mailbox.dc_ac = ac1
|
||||||
return mailbox
|
return mailbox
|
||||||
|
|
||||||
|
|
||||||
@@ -56,7 +57,7 @@ class TestEndToEndDeltaChat:
|
|||||||
"""Test that a DC account can send a message to a second DC account
|
"""Test that a DC account can send a message to a second DC account
|
||||||
on the same chat-mail instance."""
|
on the same chat-mail instance."""
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_protected_chat(ac1, ac2)
|
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
||||||
chat.send_text("message0")
|
chat.send_text("message0")
|
||||||
|
|
||||||
lp.sec("wait for ac2 to receive message")
|
lp.sec("wait for ac2 to receive message")
|
||||||
@@ -70,7 +71,7 @@ class TestEndToEndDeltaChat:
|
|||||||
before quota is exceeded, and thus depends on the speed of the upload.
|
before quota is exceeded, and thus depends on the speed of the upload.
|
||||||
"""
|
"""
|
||||||
ac1, ac2 = cmfactory.get_online_accounts(2)
|
ac1, ac2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_protected_chat(ac1, ac2)
|
chat = cmfactory.get_accepted_chat(ac1, ac2)
|
||||||
|
|
||||||
user = ac2.get_config("configured_addr")
|
user = ac2.get_config("configured_addr")
|
||||||
|
|
||||||
@@ -121,6 +122,28 @@ class TestEndToEndDeltaChat:
|
|||||||
assert ch.id >= 10
|
assert ch.id >= 10
|
||||||
ac1._evtracker.wait_securejoin_inviter_progress(1000)
|
ac1._evtracker.wait_securejoin_inviter_progress(1000)
|
||||||
|
|
||||||
|
def test_dkim_header_stripped(self, cmfactory, maildomain2, lp, imap_mailbox):
|
||||||
|
"""Test that if a DC address receives a message, it has no
|
||||||
|
DKIM-Signature and Authentication-Results headers."""
|
||||||
|
ac1 = cmfactory.new_online_configuring_account(cache=False)
|
||||||
|
cmfactory.switch_maildomain(maildomain2)
|
||||||
|
ac2 = cmfactory.new_online_configuring_account(cache=False)
|
||||||
|
cmfactory.bring_accounts_online()
|
||||||
|
chat = cmfactory.get_accepted_chat(ac1, imap_mailbox.dc_ac)
|
||||||
|
chat.send_text("message0")
|
||||||
|
chat2 = cmfactory.get_accepted_chat(ac2, imap_mailbox.dc_ac)
|
||||||
|
chat2.send_text("message1")
|
||||||
|
|
||||||
|
lp.sec("receive message with ac1...")
|
||||||
|
received = 0
|
||||||
|
while received < 2:
|
||||||
|
msgs = imap_mailbox.fetch()
|
||||||
|
for msg in msgs:
|
||||||
|
lp.sec(f"ac1 received msg from {msg.from_}")
|
||||||
|
received += 1
|
||||||
|
assert "authentication-results" not in msg.headers
|
||||||
|
assert "dkim-signature" not in msg.headers
|
||||||
|
|
||||||
def test_read_receipts_between_instances(self, cmfactory, lp, maildomain2):
|
def test_read_receipts_between_instances(self, cmfactory, lp, maildomain2):
|
||||||
ac1 = cmfactory.new_online_configuring_account(cache=False)
|
ac1 = cmfactory.new_online_configuring_account(cache=False)
|
||||||
cmfactory.switch_maildomain(maildomain2)
|
cmfactory.switch_maildomain(maildomain2)
|
||||||
@@ -153,29 +176,10 @@ def test_hide_senders_ip_address(cmfactory):
|
|||||||
assert ipaddress.ip_address(public_ip)
|
assert ipaddress.ip_address(public_ip)
|
||||||
|
|
||||||
user1, user2 = cmfactory.get_online_accounts(2)
|
user1, user2 = cmfactory.get_online_accounts(2)
|
||||||
chat = cmfactory.get_protected_chat(user1, user2)
|
chat = cmfactory.get_accepted_chat(user1, user2)
|
||||||
|
|
||||||
chat.send_text("testing submission header cleanup")
|
chat.send_text("testing submission header cleanup")
|
||||||
user2._evtracker.wait_next_incoming_message()
|
user2._evtracker.wait_next_incoming_message()
|
||||||
user2.direct_imap.select_folder("Inbox")
|
user2.direct_imap.select_folder("Inbox")
|
||||||
msg = user2.direct_imap.get_all_messages()[0]
|
msg = user2.direct_imap.get_all_messages()[0]
|
||||||
assert public_ip not in msg.obj.as_string()
|
assert public_ip not in msg.obj.as_string()
|
||||||
|
|
||||||
|
|
||||||
def test_echobot(cmfactory, chatmail_config, lp, sshdomain):
|
|
||||||
ac = cmfactory.get_online_accounts(1)[0]
|
|
||||||
|
|
||||||
# establish contact with echobot
|
|
||||||
sshexec = SSHExec(sshdomain)
|
|
||||||
command = "cat /var/lib/echobot/invite-link.txt"
|
|
||||||
echo_invite_link = sshexec(call=rshell.shell, kwargs=dict(command=command))
|
|
||||||
chat = ac.qr_setup_contact(echo_invite_link)
|
|
||||||
ac._evtracker.wait_securejoin_joiner_progress(1000)
|
|
||||||
|
|
||||||
# send message and check it gets replied back
|
|
||||||
lp.sec("Send message to echobot")
|
|
||||||
text = "hi, I hope you text me back"
|
|
||||||
chat.send_text(text)
|
|
||||||
lp.sec("Wait for reply from echobot")
|
|
||||||
reply = ac._evtracker.wait_next_incoming_message()
|
|
||||||
assert reply.text == text
|
|
||||||
|
|||||||
49
cmdeploy/src/cmdeploy/tests/online/test_3_status.py
Normal file
49
cmdeploy/src/cmdeploy/tests/online/test_3_status.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from cmdeploy.cmdeploy import main
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_cmd(chatmail_config, capsys, request):
|
||||||
|
os.chdir(request.config.invocation_params.dir)
|
||||||
|
assert main(["status"]) == 0
|
||||||
|
status_out = capsys.readouterr()
|
||||||
|
print(status_out.out)
|
||||||
|
|
||||||
|
assert len(status_out.out.splitlines()) > 5
|
||||||
|
|
||||||
|
"""
|
||||||
|
don't test actual server state:
|
||||||
|
|
||||||
|
services = [
|
||||||
|
"acmetool-redirector",
|
||||||
|
"chatmail-metadata",
|
||||||
|
"doveauth",
|
||||||
|
"dovecot",
|
||||||
|
"fcgiwrap",
|
||||||
|
"filtermail-incoming",
|
||||||
|
"filtermail",
|
||||||
|
"lastlogin",
|
||||||
|
"nginx",
|
||||||
|
"opendkim",
|
||||||
|
"postfix@-",
|
||||||
|
"systemd-journald",
|
||||||
|
"turnserver",
|
||||||
|
"unbound",
|
||||||
|
]
|
||||||
|
not_running = []
|
||||||
|
for service in services:
|
||||||
|
active = False
|
||||||
|
for line in status_out:
|
||||||
|
if service in line:
|
||||||
|
active = True
|
||||||
|
if not "loaded" in line:
|
||||||
|
active = False
|
||||||
|
if not "active" in line:
|
||||||
|
active = False
|
||||||
|
if not "running" in line:
|
||||||
|
active = False
|
||||||
|
break
|
||||||
|
if not active:
|
||||||
|
not_running.append(service)
|
||||||
|
assert not_running == []
|
||||||
|
"""
|
||||||
@@ -337,14 +337,10 @@ class Remote:
|
|||||||
|
|
||||||
def iter_output(self, logcmd=""):
|
def iter_output(self, logcmd=""):
|
||||||
getjournal = "journalctl -f" if not logcmd else logcmd
|
getjournal = "journalctl -f" if not logcmd else logcmd
|
||||||
try:
|
self.popen = subprocess.Popen(
|
||||||
self.popen = subprocess.Popen(
|
["ssh", f"root@{self.sshdomain}", getjournal],
|
||||||
["ssh", f"root@{self.sshdomain}", getjournal],
|
stdout=subprocess.PIPE,
|
||||||
stdout=subprocess.PIPE,
|
)
|
||||||
)
|
|
||||||
except FileNotFoundError:
|
|
||||||
# inside docker container, run locally
|
|
||||||
self.popen = subprocess.Popen([getjournal], stdout=subprocess.PIPE)
|
|
||||||
while 1:
|
while 1:
|
||||||
line = self.popen.stdout.readline()
|
line = self.popen.stdout.readline()
|
||||||
res = line.decode().strip().lower()
|
res = line.decode().strip().lower()
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from cmdeploy import remote
|
from cmdeploy import remote
|
||||||
@@ -8,38 +10,65 @@ from cmdeploy.dns import check_full_zone, check_initial_remote_data
|
|||||||
def mockdns_base(monkeypatch):
|
def mockdns_base(monkeypatch):
|
||||||
qdict = {}
|
qdict = {}
|
||||||
|
|
||||||
def query_dns(typ, domain):
|
def shell(command, fail_ok=False, print=print):
|
||||||
try:
|
if command.startswith("dig"):
|
||||||
return qdict[typ][domain]
|
if command == "dig":
|
||||||
except KeyError:
|
return "."
|
||||||
return ""
|
if "SOA" in command:
|
||||||
|
return (
|
||||||
|
"delta.chat. 21600 IN SOA ns1.first-ns.de. dns.hetzner.com."
|
||||||
|
" 2025102800 14400 1800 604800 3600"
|
||||||
|
)
|
||||||
|
command_chunks = command.split()
|
||||||
|
domain, typ = command_chunks[4], command_chunks[6]
|
||||||
|
try:
|
||||||
|
return qdict[typ][domain]
|
||||||
|
except KeyError:
|
||||||
|
return ""
|
||||||
|
return remote.rshell.shell(command=command, fail_ok=fail_ok, print=print)
|
||||||
|
|
||||||
monkeypatch.setattr(remote.rdns, query_dns.__name__, query_dns)
|
monkeypatch.setattr(remote.rdns, shell.__name__, shell)
|
||||||
return qdict
|
return qdict
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mockdns(mockdns_base):
|
def mockdns_expected():
|
||||||
mockdns_base.update(
|
return {
|
||||||
{
|
"A": {"some.domain": "1.1.1.1"},
|
||||||
"A": {"some.domain": "1.1.1.1"},
|
"AAAA": {"some.domain": "fde5:cd7a:9e1c:3240:5a99:936f:cdac:53ae"},
|
||||||
"AAAA": {"some.domain": "fde5:cd7a:9e1c:3240:5a99:936f:cdac:53ae"},
|
"CNAME": {
|
||||||
"CNAME": {
|
"mta-sts.some.domain": "some.domain.",
|
||||||
"mta-sts.some.domain": "some.domain.",
|
"www.some.domain": "some.domain.",
|
||||||
"www.some.domain": "some.domain.",
|
},
|
||||||
},
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
|
@pytest.fixture(params=["plain", "with-dns-comments"])
|
||||||
|
def mockdns(request, mockdns_base, mockdns_expected):
|
||||||
|
mockdns_base.update(deepcopy(mockdns_expected))
|
||||||
|
match request.param:
|
||||||
|
case "plain":
|
||||||
|
pass
|
||||||
|
case "with-dns-comments":
|
||||||
|
for typ, data in mockdns_base.items():
|
||||||
|
for host, result in data.items():
|
||||||
|
mockdns_base[typ][host] = (
|
||||||
|
";; some unsuccessful attempt result\n"
|
||||||
|
"; and another with a single semicolon\n"
|
||||||
|
f"{result}"
|
||||||
|
)
|
||||||
return mockdns_base
|
return mockdns_base
|
||||||
|
|
||||||
|
|
||||||
class TestPerformInitialChecks:
|
class TestPerformInitialChecks:
|
||||||
def test_perform_initial_checks_ok1(self, mockdns):
|
def test_perform_initial_checks_ok1(self, mockdns, mockdns_expected):
|
||||||
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
||||||
assert remote_data["A"] == mockdns["A"]["some.domain"]
|
assert remote_data["A"] == mockdns_expected["A"]["some.domain"]
|
||||||
assert remote_data["AAAA"] == mockdns["AAAA"]["some.domain"]
|
assert remote_data["AAAA"] == mockdns_expected["AAAA"]["some.domain"]
|
||||||
assert remote_data["MTA_STS"] == mockdns["CNAME"]["mta-sts.some.domain"]
|
assert (
|
||||||
assert remote_data["WWW"] == mockdns["CNAME"]["www.some.domain"]
|
remote_data["MTA_STS"] == mockdns_expected["CNAME"]["mta-sts.some.domain"]
|
||||||
|
)
|
||||||
|
assert remote_data["WWW"] == mockdns_expected["CNAME"]["www.some.domain"]
|
||||||
|
|
||||||
@pytest.mark.parametrize("drop", ["A", "AAAA"])
|
@pytest.mark.parametrize("drop", ["A", "AAAA"])
|
||||||
def test_perform_initial_checks_with_one_of_A_AAAA(self, mockdns, drop):
|
def test_perform_initial_checks_with_one_of_A_AAAA(self, mockdns, drop):
|
||||||
|
|||||||
4
cmdeploy/src/cmdeploy/unbound/unbound.conf.j2
Normal file
4
cmdeploy/src/cmdeploy/unbound/unbound.conf.j2
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Managed by cmdeploy: disable IPv6 in unbound.
|
||||||
|
server:
|
||||||
|
interface: 127.0.0.1
|
||||||
|
do-ip6: no
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import importlib.resources
|
import importlib.resources
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import webbrowser
|
import webbrowser
|
||||||
@@ -11,6 +12,10 @@ from jinja2 import Template
|
|||||||
|
|
||||||
from .genqr import gen_qr_png_data
|
from .genqr import gen_qr_png_data
|
||||||
|
|
||||||
|
_MERGE_CONFLICT_RE = re.compile(
|
||||||
|
r"^<<<<<<<.+^=======.+^>>>>>>>", re.DOTALL | re.MULTILINE
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def snapshot_dir_stats(somedir):
|
def snapshot_dir_stats(somedir):
|
||||||
d = {}
|
d = {}
|
||||||
@@ -116,6 +121,17 @@ def _build_webpages(src_dir, build_dir, config):
|
|||||||
return build_dir
|
return build_dir
|
||||||
|
|
||||||
|
|
||||||
|
def find_merge_conflict(src_dir) -> Path:
|
||||||
|
assert src_dir.exists(), src_dir
|
||||||
|
result = None
|
||||||
|
for path in src_dir.iterdir():
|
||||||
|
if path.suffix in [".css", ".html", ".md"]:
|
||||||
|
if _MERGE_CONFLICT_RE.search(path.read_text()):
|
||||||
|
result = path
|
||||||
|
break
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
path = importlib.resources.files(__package__)
|
path = importlib.resources.files(__package__)
|
||||||
reporoot = path.joinpath("../../../").resolve()
|
reporoot = path.joinpath("../../../").resolve()
|
||||||
@@ -124,34 +140,34 @@ def main():
|
|||||||
config.webdev = True
|
config.webdev = True
|
||||||
assert config.mail_domain
|
assert config.mail_domain
|
||||||
|
|
||||||
# start web page generation, open a browser and wait for changes
|
|
||||||
www_path, src_path, build_dir = get_paths(config)
|
www_path, src_path, build_dir = get_paths(config)
|
||||||
build_dir = build_webpages(src_path, build_dir, config)
|
build_dir = build_webpages(src_path, build_dir, config)
|
||||||
index_path = build_dir.joinpath("index.html")
|
index_path = build_dir.joinpath("index.html")
|
||||||
webbrowser.open(str(index_path))
|
webbrowser.open(str(index_path))
|
||||||
stats = snapshot_dir_stats(src_path)
|
|
||||||
print(f"\nOpened URL: file://{index_path.resolve()}\n")
|
print(f"\nOpened URL: file://{index_path.resolve()}\n")
|
||||||
print(f"watching {src_path} directory for changes")
|
print(f"Watching {src_path} directory for changes...")
|
||||||
|
|
||||||
|
stats = snapshot_dir_stats(src_path)
|
||||||
changenum = 0
|
changenum = 0
|
||||||
count = 0
|
debounce_time = 0.5 # wait 0.5s after detecting a change
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
time.sleep(1)
|
||||||
newstats = snapshot_dir_stats(src_path)
|
newstats = snapshot_dir_stats(src_path)
|
||||||
if newstats == stats and count % 60 != 0:
|
|
||||||
count += 1
|
|
||||||
time.sleep(1.0)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for key in newstats:
|
if newstats != stats:
|
||||||
if stats[key] != newstats[key]:
|
changed_files = [f for f in newstats if stats.get(f) != newstats[f]]
|
||||||
print(f"*** CHANGED: {key}")
|
for f in changed_files:
|
||||||
changenum += 1
|
print(f"*** CHANGED: {f}")
|
||||||
|
|
||||||
stats = newstats
|
stats = newstats
|
||||||
build_webpages(src_path, build_dir, config)
|
changenum += 1
|
||||||
print(f"[{changenum}] regenerated web pages at: {index_path}")
|
build_webpages(src_path, build_dir, config)
|
||||||
print(f"URL: file://{index_path.resolve()}\n\n")
|
print(f"[{changenum}] regenerated web pages at: {index_path}")
|
||||||
count = 0
|
print(f"URL: file://{index_path.resolve()}\n\n")
|
||||||
|
|
||||||
|
time.sleep(debounce_time) # simple debounce
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
24
doc/Makefile
Normal file
24
doc/Makefile
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line, and also
|
||||||
|
# from the environment for the first two.
|
||||||
|
SPHINXOPTS ?=
|
||||||
|
SPHINXBUILD ?= sphinx-build
|
||||||
|
SOURCEDIR = source
|
||||||
|
BUILDDIR = build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
auto:
|
||||||
|
sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile auto
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
17
doc/README.md
Normal file
17
doc/README.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
|
||||||
|
|
||||||
|
## Building the documentation
|
||||||
|
|
||||||
|
You can use the `make` command and `make html` to build web pages.
|
||||||
|
|
||||||
|
You need a Python environment where the following install was excuted:
|
||||||
|
|
||||||
|
pip install furo sphinx-autobuild
|
||||||
|
|
||||||
|
To develop/change documentation, you can then do:
|
||||||
|
|
||||||
|
make auto
|
||||||
|
|
||||||
|
A page will open at https://127.0.0.1:8000/ serving the docs and it will
|
||||||
|
react to changes to source files pretty fast.
|
||||||
|
|
||||||
1
doc/source/_static/chatmail.svg
Normal file
1
doc/source/_static/chatmail.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg" id="svg4" width="145" height="145" version="1.1"><g id="text2" aria-label="@" style="font-size:144px;font-family:Arial" transform="matrix(1.0934997,0,0,1.0934997,-6.7787266,-6.7787281)"><path id="path347" d="m 79.927878,94.422406 c -2.704286,3.120332 -5.741407,5.637394 -9.111364,7.551194 -3.328352,1.87221 -6.677506,2.80831 -10.047463,2.80831 -3.702792,0 -7.301573,-1.08172 -10.796342,-3.24515 -3.49477,-2.163426 -6.344671,-5.491779 -8.549704,-9.985058 -2.163429,-4.493275 -3.245144,-9.423397 -3.245144,-14.790365 0,-6.615099 1.684978,-13.230199 5.054935,-19.845299 3.411561,-6.656705 7.634407,-11.649233 12.66854,-14.977585 5.034133,-3.328352 9.92265,-4.992528 14.665552,-4.992528 3.619583,0 7.072748,0.956901 10.359496,2.870704 3.286748,1.872198 6.115847,4.742902 8.487297,8.612111 l 2.121825,-9.673023 h 11.170784 l -8.986557,41.87483 c -1.248129,5.824616 -1.872194,9.048957 -1.872194,9.673023 0,1.123319 0.416044,2.101022 1.248132,2.93311 0.873692,0.790484 1.913802,1.185726 3.120332,1.185726 2.20503,0 5.096537,-1.268934 8.674517,-3.806803 4.7429,-3.328352 8.4873,-7.780023 11.23319,-13.355013 2.78749,-5.616594 4.18124,-11.399606 4.18124,-17.349035 0,-6.947935 -1.78899,-13.438222 -5.36697,-19.47086 -3.53637,-6.032638 -8.84094,-10.858749 -15.913687,-14.478332 -7.03114,-3.619583 -14.811161,-5.429374 -23.340064,-5.429374 -9.73543,0 -18.638772,2.288242 -26.710026,6.864726 -8.029649,4.534879 -14.27031,11.06677 -18.721981,19.595673 -4.410066,8.487298 -6.615099,17.598662 -6.615099,27.334092 0,10.193078 2.205033,18.971607 6.615099,26.33559 2.290454,3.78888 -7.136335,18.96983 -3.810585,21.73443 3.138096,2.60861 18.971963,-7.14297 23.031819,-5.44631 8.404089,3.53637 17.702673,5.30456 27.895752,5.30456 10.90035,0 20.032515,-1.83059 27.396492,-5.49178 7.36399,-3.66119 12.87657,-8.11286 16.53776,-13.35501 l 9.29559,4 c -2.12183,4.36846 -3.76221,4.82013 -8.92116,9.35501 -5.15895,4.53488 -11.2956,8.11286 -18.40995,10.73393 -7.114346,2.66268 -15.684851,3.99402 -25.711512,3.99402 -9.236177,0 -17.76508,-1.18572 -25.586707,-3.55717 -7.780023,-2.37145 -29.296198,9.26152 -34.78798,4.47701 -5.49178,-4.7429 5.248856,-25.42482 2.461361,-31.62388 -3.49477,-7.863231 -5.242155,-16.350531 -5.242155,-25.461894 0,-10.151474 2.08022,-19.824498 6.240661,-29.019071 5.075736,-11.274793 12.273297,-19.907706 21.592683,-25.898739 9.360991,-5.991034 20.69819,-8.986551 34.011599,-8.986551 10.317891,0 19.574873,2.121824 27.77093,6.365473 8.23767,4.202045 14.72796,10.484309 19.47086,18.846794 4.03563,7.197561 6.05344,15.019189 6.05344,23.464883 0,12.065277 -4.24365,22.77841 -12.73094,32.1394 -7.572,8.404095 -15.85128,12.606135 -24.837827,12.606135 -2.870704,0 -5.200551,-0.43684 -6.98954,-1.31053 -1.747385,-0.8737 -3.037121,-2.12183 -3.869209,-3.744402 -0.540857,-1.040114 -0.936099,-2.829105 -1.185726,-5.366972 z M 49.723082,77.510217 c 0,5.699803 1.352143,10.130671 4.05643,13.292606 2.704286,3.161935 5.803814,4.742902 9.298583,4.742902 2.329847,0 4.784506,-0.686473 7.363979,-2.059418 2.579473,-1.41455 5.034133,-3.49477 7.363979,-6.240661 2.371451,-2.74589 4.306056,-6.219857 5.803815,-10.421902 1.497759,-4.243649 2.246638,-8.487298 2.246638,-12.730947 0,-5.658198 -1.41455,-10.047462 -4.243649,-13.167793 -2.787495,-3.12033 -6.199056,-4.680495 -10.234683,-4.680495 -2.662682,0 -5.179749,0.686473 -7.5512,2.059418 -2.329846,1.331341 -4.597286,3.494769 -6.802319,6.490286 -2.205033,2.995517 -3.97322,6.635903 -5.304561,10.921156 -1.331341,4.285253 -1.997012,8.216869 -1.997012,11.794848 z" style="stroke-width:.887561"/></g></svg>
|
||||||
|
After Width: | Height: | Size: 3.5 KiB |
21
doc/source/_static/custom.css
Normal file
21
doc/source/_static/custom.css
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
/* Tweak how the sidebar logo is presented */
|
||||||
|
.sidebar-logo {
|
||||||
|
width: 70%;
|
||||||
|
}
|
||||||
|
.sidebar-brand {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* The landing pages' sidebar-in-content highlights */
|
||||||
|
#features ul {
|
||||||
|
padding-left: 1rem;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
#features ul li {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
@media (min-width: 46em) {
|
||||||
|
#features {
|
||||||
|
width: 50%;
|
||||||
|
}
|
||||||
|
}
|
||||||
41
doc/source/conf.py
Normal file
41
doc/source/conf.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Configuration file for the Sphinx documentation builder.
|
||||||
|
#
|
||||||
|
# For the full list of built-in configuration values, see the documentation:
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||||
|
|
||||||
|
# -- Project information -----------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||||
|
|
||||||
|
project = 'chatmail relay documentation'
|
||||||
|
copyright = '2025, chatmail collective'
|
||||||
|
author = 'chatmail collective'
|
||||||
|
|
||||||
|
# -- General configuration ---------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||||
|
|
||||||
|
extensions = [
|
||||||
|
#'sphinx.ext.autodoc',
|
||||||
|
#'sphinx.ext.viewdoc',
|
||||||
|
'sphinxcontrib.mermaid',
|
||||||
|
]
|
||||||
|
|
||||||
|
templates_path = ['_templates']
|
||||||
|
exclude_patterns = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output -------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||||
|
|
||||||
|
html_theme = 'furo'
|
||||||
|
html_static_path = ['_static']
|
||||||
|
html_css_files = [
|
||||||
|
"custom.css",
|
||||||
|
]
|
||||||
|
|
||||||
|
html_title = "chatmail relay documentation"
|
||||||
|
#html_short_title = f"chatmail-{release}"
|
||||||
|
|
||||||
|
html_logo = "_static/chatmail.svg"
|
||||||
|
|
||||||
|
|
||||||
61
doc/source/faq.rst
Normal file
61
doc/source/faq.rst
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
|
||||||
|
|
||||||
|
Frequently asked questions
|
||||||
|
===========================
|
||||||
|
|
||||||
|
What is the difference between chatmail relays and classic email servers?
|
||||||
|
--------------------------------------------------------------------------
|
||||||
|
|
||||||
|
A chatmail relay is a minimal Mail Transport Agent (MTA) setup that
|
||||||
|
goes beyond what classic email servers offer:
|
||||||
|
|
||||||
|
- **Zero State:** no private data or metadata collected, messages are auto-deleted, low disk usage
|
||||||
|
|
||||||
|
- **Instant/Realtime:** sub-second message delivery, realtime P2P
|
||||||
|
streaming, privacy-preserving Push Notifications for Apple, Google, and `Ubuntu Touch <https://docs.ubports.com/en/latest/appdev/guides/pushnotifications.html>`_;
|
||||||
|
|
||||||
|
- **Security Enforcement**: only strict TLS, DKIM and OpenPGP with minimized metadata accepted
|
||||||
|
|
||||||
|
- **Reliable Federation and Decentralization:** No spam or IP reputation checks, federating
|
||||||
|
depends on established IETF standards and protocols.
|
||||||
|
|
||||||
|
|
||||||
|
How about interoperability with classic email servers?
|
||||||
|
-------------------------------------------------------
|
||||||
|
|
||||||
|
Generally, chatmail relays interoperate well with classic email servers.
|
||||||
|
However, some chatmail relays may be blocked by Big-Tech email
|
||||||
|
providers that use intransparent and proprietary techniques for scanning
|
||||||
|
and looking at cleartext email messages between users, or because they
|
||||||
|
use questionable IP-reputation systems that break interoperability.
|
||||||
|
|
||||||
|
**Chatmail relays instead use and require strong cryptography, allowing
|
||||||
|
anyone to participate, without having to submit to Big-Tech
|
||||||
|
restrictions.**
|
||||||
|
|
||||||
|
.. _selfhosted:
|
||||||
|
|
||||||
|
How are chatmail relays run? Can I run one myself?
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Chatmail relays are designed to be very cheap to run, and are generally
|
||||||
|
self-funded by respective operators. All chatmail relays are
|
||||||
|
automatically deployed and updated using `the chatmail relay
|
||||||
|
repository <https://github.com/chatmail/relay>`__. Chatmail relays are
|
||||||
|
composed of proven standard email server components, Postfix and
|
||||||
|
Dovecot, and are configured to run unattended without much maintenance
|
||||||
|
effort. Chatmail relays happily run on low-end hardware like a Raspberry
|
||||||
|
Pi.
|
||||||
|
|
||||||
|
|
||||||
|
How trustable are chatmail relays?
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
Chatmail relays enforce end-to-end encryption,
|
||||||
|
and chatmail clients like `Delta Chat <https://delta.chat>`_
|
||||||
|
enforce end-to-end encryption on their own.
|
||||||
|
|
||||||
|
The end-to-end encryption protection includes attached media, user
|
||||||
|
display names, avatars and group names. What is visible to operators is:
|
||||||
|
message date, sender and receiver addresses.
|
||||||
|
Please see the `Delta Chat FAQ on encryption and security <https://delta.chat/en/help#e2ee>`_ for further info.
|
||||||
191
doc/source/getting_started.rst
Normal file
191
doc/source/getting_started.rst
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
Setting up a chatmail relay
|
||||||
|
===========================
|
||||||
|
|
||||||
|
This section contains everything needed to setup a ready-to-use chatmail relay.
|
||||||
|
The automated setup is designed and optimized for providing chatmail
|
||||||
|
addresses for immediate permission-free onboarding through chat apps and bots.
|
||||||
|
Chatmail addresses are automatically created at first login,
|
||||||
|
after which the initially specified password is required
|
||||||
|
for sending and receiving messages through them.
|
||||||
|
|
||||||
|
|
||||||
|
Minimal requirements and prerequisites
|
||||||
|
--------------------------------------
|
||||||
|
|
||||||
|
You will need the following:
|
||||||
|
|
||||||
|
- Control over a domain through a DNS provider of your choice.
|
||||||
|
|
||||||
|
- A Debian 12 **deployment server** with reachable SMTP/SUBMISSIONS/IMAPS/HTTPS ports.
|
||||||
|
IPv6 is encouraged if available. Chatmail relay servers only require
|
||||||
|
1GB RAM, one CPU, and perhaps 10GB storage for a few thousand active
|
||||||
|
chatmail addresses.
|
||||||
|
|
||||||
|
- A Linux or Unix **build machine** with key-based SSH access to the root
|
||||||
|
user of the deployment server.
|
||||||
|
You must add a passphrase-protected private key to your local ssh-agent because you
|
||||||
|
can’t type in your passphrase during deployment.
|
||||||
|
(An ed25519 private key is required due to an `upstream bug in
|
||||||
|
paramiko <https://github.com/paramiko/paramiko/issues/2191>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Setup with ``scripts/cmdeploy``
|
||||||
|
-------------------------------------
|
||||||
|
|
||||||
|
We use ``chat.example.org`` as the chatmail domain in the following
|
||||||
|
steps. Please substitute it with your own domain.
|
||||||
|
|
||||||
|
1. Setup the initial DNS records for your deployment server.
|
||||||
|
The following is an example in the
|
||||||
|
familiar BIND zone file format with a TTL of 1 hour (3600 seconds).
|
||||||
|
Please substitute your domain and IP addresses.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
chat.example.org. 3600 IN A 198.51.100.5
|
||||||
|
chat.example.org. 3600 IN AAAA 2001:db8::5
|
||||||
|
www.chat.example.org. 3600 IN CNAME chat.example.org.
|
||||||
|
mta-sts.chat.example.org. 3600 IN CNAME chat.example.org.
|
||||||
|
|
||||||
|
2. On your local PC, clone the repository and bootstrap the Python
|
||||||
|
virtualenv.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
git clone https://github.com/chatmail/relay
|
||||||
|
cd relay
|
||||||
|
scripts/initenv.sh
|
||||||
|
|
||||||
|
3. On your local build machine (PC), create a chatmail configuration file
|
||||||
|
``chatmail.ini``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy init chat.example.org # <-- use your domain
|
||||||
|
|
||||||
|
4. Verify that SSH root login to the deployment server server works:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
ssh root@chat.example.org # <-- use your domain
|
||||||
|
|
||||||
|
5. From your local build machine, setup and configure the remote deployment server:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy run
|
||||||
|
|
||||||
|
This script will also check that you have all necessary DNS records.
|
||||||
|
If DNS records are missing, it will recommend which you should
|
||||||
|
configure at your DNS provider (it can take some time until they are
|
||||||
|
public).
|
||||||
|
|
||||||
|
Other helpful commands
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
To check the status of your deployment server running the chatmail service:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy status
|
||||||
|
|
||||||
|
To display and check all recommended DNS records:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy dns
|
||||||
|
|
||||||
|
To test whether your chatmail service is working correctly:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy test
|
||||||
|
|
||||||
|
To measure the performance of your chatmail service:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy bench
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Modifying the home page
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
``cmdeploy run`` also creates default static web pages and deploys them
|
||||||
|
to a Nginx web server with:
|
||||||
|
|
||||||
|
- a default ``index.html`` along with a QR code that users can click to
|
||||||
|
create an address on your chatmail relay
|
||||||
|
|
||||||
|
- a default ``info.html`` that is linked from the home page
|
||||||
|
|
||||||
|
- a default ``policy.html`` that is linked from the home page
|
||||||
|
|
||||||
|
All ``.html`` files are generated by the according markdown ``.md`` file
|
||||||
|
in the ``www/src`` directory.
|
||||||
|
|
||||||
|
Refining the web pages
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy webdev
|
||||||
|
|
||||||
|
This starts a local live development cycle for chatmail web pages:
|
||||||
|
|
||||||
|
- uses the ``www/src/page-layout.html`` file for producing static HTML
|
||||||
|
pages from ``www/src/*.md`` files
|
||||||
|
|
||||||
|
- continously builds the web presence reading files from ``www/src``
|
||||||
|
directory and generating HTML files and copying assets to the
|
||||||
|
``www/build`` directory.
|
||||||
|
|
||||||
|
- Starts a browser window automatically where you can “refresh” as
|
||||||
|
needed.
|
||||||
|
|
||||||
|
Custom web pages
|
||||||
|
----------------
|
||||||
|
|
||||||
|
You can skip uploading a web page by setting ``www_folder=disabled`` in
|
||||||
|
``chatmail.ini``.
|
||||||
|
|
||||||
|
If you want to manage your web pages outside this git repository, you
|
||||||
|
can set ``www_folder`` in ``chatmail.ini`` to a custom directory on your
|
||||||
|
computer. ``cmdeploy run`` will upload it as the server’s home page, and
|
||||||
|
if it contains a ``src/index.md`` file, will build it with hugo.
|
||||||
|
|
||||||
|
|
||||||
|
Disable automatic address creation
|
||||||
|
--------------------------------------------------------
|
||||||
|
|
||||||
|
If you need to stop address creation, e.g. because some script is wildly
|
||||||
|
creating addresses, login with ssh to the deployment machine and run:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
touch /etc/chatmail-nocreate
|
||||||
|
|
||||||
|
Chatmail address creation will be denied while this file is present.
|
||||||
|
|
||||||
|
|
||||||
|
Migrating to a new build machine
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
To move or add a build machine,
|
||||||
|
clone the relay repository on the new build machine, and copy the ``chatmail.ini`` file from the old build machine.
|
||||||
|
Make sure ``rsync`` is installed, then initialize the environment:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
./scripts/initenv.sh
|
||||||
|
|
||||||
|
Run safety checks before a new deployment:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
./scripts/cmdeploy dns
|
||||||
|
./scripts/cmdeploy status
|
||||||
|
|
||||||
|
If you keep multiple build machines (ie laptop and desktop), keep ``chatmail.ini`` in sync between
|
||||||
|
them.
|
||||||
20
doc/source/index.rst
Normal file
20
doc/source/index.rst
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
*******************************************
|
||||||
|
chatmail relay documentation
|
||||||
|
*******************************************
|
||||||
|
|
||||||
|
.. image:: ../../www/src/collage-top.png
|
||||||
|
:target: https://testrun.org
|
||||||
|
|
||||||
|
This documentation details how to setup, maintain and understand `chatmail <https://chatmail.at>`_ relays.
|
||||||
|
|
||||||
|
Contributions and feedback welcome through the https://github.com/chatmail/relay repository.
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 5
|
||||||
|
|
||||||
|
getting_started
|
||||||
|
proxy
|
||||||
|
migrate
|
||||||
|
overview
|
||||||
|
related
|
||||||
|
faq
|
||||||
98
doc/source/migrate.rst
Normal file
98
doc/source/migrate.rst
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
|
||||||
|
Migrating to a new machine
|
||||||
|
===========================
|
||||||
|
|
||||||
|
This migration tutorial provides a step-wise approach
|
||||||
|
to safely migrate a chatmail relay from one remote machine to another.
|
||||||
|
|
||||||
|
Preliminary notes and assumptions
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
- If the migration is a planned move,
|
||||||
|
it's recommended to lower the Time To Live (TTL) of your DNS records to a value such as 300 (5 minutes),
|
||||||
|
at best much earlier than the actual planned migration.
|
||||||
|
This speeds up propagation of DNS changes in the Internet after the migration is complete.
|
||||||
|
|
||||||
|
- The migration steps were tested with a Linux laptop; you might need to adjust some of the steps to your local environment.
|
||||||
|
|
||||||
|
- Your ``mail_domain`` is ``mail.example.org``.
|
||||||
|
|
||||||
|
- All remote machines run Debian 12.
|
||||||
|
|
||||||
|
- The old site’s IP version 4 address is ``$OLD_IP4``.
|
||||||
|
|
||||||
|
- The new site’s IP addresses are ``$NEW_IP4`` and ``$NEW_IPV6``.
|
||||||
|
|
||||||
|
|
||||||
|
The six steps to migrate
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Note that during some of the following steps you might get a warning about changed SSH Host keys;
|
||||||
|
in this case, just run ``ssh-keygen -R "mail.example.org"`` as recommended.
|
||||||
|
|
||||||
|
|
||||||
|
1. **Initially transfer mailboxes from old to new site.**
|
||||||
|
|
||||||
|
Login to old site, forwarding your ssh-agent with ``ssh -A``
|
||||||
|
to allow using ssh to directly copy files from old to new site.
|
||||||
|
::
|
||||||
|
|
||||||
|
ssh -A root@$OLD_IP4
|
||||||
|
tar c /home/vmail/mail | ssh root@$NEW_IP4 "tar x -C /"
|
||||||
|
|
||||||
|
|
||||||
|
2. **Pre-configure the new site but keep it inactive until step 6**
|
||||||
|
::
|
||||||
|
|
||||||
|
CMDEPLOY_STAGES=install,configure scripts/cmdeploy run --ssh-host $NEW_IP4
|
||||||
|
|
||||||
|
|
||||||
|
3. **It's getting serious: disable mail services on the old site.**
|
||||||
|
Users will not be able to send or receive messages until all steps are completed.
|
||||||
|
Other relays and mail servers will retry delivering messages from time to time,
|
||||||
|
so nothing is lost for users.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
scripts/cmdeploy run --disable-mail --ssh-host $OLD_IP4
|
||||||
|
|
||||||
|
|
||||||
|
4. **Final synchronization of TLS/DKIM secrets, mail queues and mailboxes.**
|
||||||
|
Again we use ssh-agent forwarding (``-A``) to allow transfering all important data directly
|
||||||
|
from the old to the new site.
|
||||||
|
::
|
||||||
|
|
||||||
|
ssh -A root@$OLD_IP4
|
||||||
|
tar c /var/lib/acme /etc/dkimkeys /var/spool/postfix | ssh root@$NEW_IP4 "tar x -C /"
|
||||||
|
rsync -azH /home/vmail/mail root@$NEW_IP4:/home/vmail/
|
||||||
|
|
||||||
|
Login to the new site and ensure file ownerships are correctly set:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
ssh root@$NEW_IP4
|
||||||
|
chown root: -R /var/lib/acme
|
||||||
|
chown opendkim: -R /etc/dkimkeys
|
||||||
|
chown vmail: -R /home/vmail/mail
|
||||||
|
|
||||||
|
|
||||||
|
5. **Update the DNS entries to point to the new site.**
|
||||||
|
You only need to change the ``A`` and ``AAAA`` records, for example:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
mail.example.org. IN A $NEW_IP4
|
||||||
|
mail.example.org. IN AAAA $NEW_IP6
|
||||||
|
|
||||||
|
|
||||||
|
6. **Activate chatmail relay on new site.**
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
CMDEPLOY_STAGES=activate scripts/cmdeploy run --ssh-host $NEW_IP4
|
||||||
|
|
||||||
|
Voilà!
|
||||||
|
Users will be able to use the relay as soon as the DNS changes have propagated.
|
||||||
|
If you have lowered the Time-to-Live for DNS records in step 1,
|
||||||
|
better use a higher value again (between 14400 and 86400 seconds) once you are sure everything works.
|
||||||
|
|
||||||
370
doc/source/overview.rst
Normal file
370
doc/source/overview.rst
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
|
||||||
|
Technical overview
|
||||||
|
======================
|
||||||
|
|
||||||
|
|
||||||
|
Directories of the relay repository
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
The `chatmail relay repository <https://github.com/chatmail/relay/tree/main/>`_
|
||||||
|
has four main directories.
|
||||||
|
|
||||||
|
``scripts/``
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
`scripts <https://github.com/chatmail/relay/tree/main/scripts>`_
|
||||||
|
offers two convenience tools for beginners:
|
||||||
|
|
||||||
|
- ``initenv.sh`` installs a local virtualenv Python environment and
|
||||||
|
installs necessary dependencies
|
||||||
|
|
||||||
|
- ``scripts/cmdeploy`` script enables you to run the ``cmdeploy``
|
||||||
|
command line tool in the local Python virtual environment.
|
||||||
|
|
||||||
|
|
||||||
|
``cmdeploy/``
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The ``cmdeploy`` directory contains the Python package and command line tool
|
||||||
|
to setup a chatmail relay remotely via SSH:
|
||||||
|
|
||||||
|
- ``cmdeploy init`` creates the ``chatmail.ini`` config file locally.
|
||||||
|
|
||||||
|
- ``cmdeploy run`` under the hood uses pyinfra_
|
||||||
|
to automatically install or upgrade all chatmail components on a relay,
|
||||||
|
according to the local ``chatmail.ini`` config.
|
||||||
|
|
||||||
|
The deployed system components of a chatmail relay are:
|
||||||
|
|
||||||
|
- Postfix_ is the Mail Transport Agent (MTA) and
|
||||||
|
accepts messages from, and sends messages to, the wider email MTA network
|
||||||
|
|
||||||
|
- Dovecot_ is the Mail Delivery Agent (MDA) and
|
||||||
|
stores messages for users until they download them
|
||||||
|
|
||||||
|
- `filtermail <https://github.com/chatmail/filtermail>`_
|
||||||
|
prevents unencrypted email from leaving or entering the chatmail
|
||||||
|
service and is integrated into Postfix’s outbound and inbound mail
|
||||||
|
pipelines.
|
||||||
|
|
||||||
|
- Nginx_ shows the web page with privacy policy and additional information
|
||||||
|
|
||||||
|
- `acmetool <https://hlandau.github.io/acmetool/>`_ manages TLS
|
||||||
|
certificates for Dovecot, Postfix, and Nginx
|
||||||
|
|
||||||
|
- `OpenDKIM <http://www.opendkim.org/>`_ for signing messages with
|
||||||
|
DKIM and rejecting inbound messages without DKIM
|
||||||
|
|
||||||
|
- `mtail <https://google.github.io/mtail/>`_ for collecting anonymized
|
||||||
|
metrics in case you have monitoring
|
||||||
|
|
||||||
|
- `Iroh relay <https://www.iroh.computer/docs/concepts/relay>`_ which
|
||||||
|
helps client devices to establish Peer-to-Peer connections
|
||||||
|
|
||||||
|
- `TURN <https://github.com/chatmail/chatmail-turn>`_ to enable relay
|
||||||
|
users to start webRTC calls even if a p2p connection can’t be
|
||||||
|
established
|
||||||
|
|
||||||
|
- and the chatmaild services, explained in the next section:
|
||||||
|
|
||||||
|
|
||||||
|
``chatmaild/``
|
||||||
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
`chatmaild <https://github.com/chatmail/relay/tree/main/chatmaild>`_
|
||||||
|
is a Python package containing several small services which handle
|
||||||
|
authentication, trigger push notifications on new messages, ensure
|
||||||
|
that outbound mails are encrypted, delete inactive users, and some
|
||||||
|
other minor things. chatmaild can also be installed as a stand-alone
|
||||||
|
Python package.
|
||||||
|
|
||||||
|
``chatmaild`` implements various systemd-controlled services
|
||||||
|
that integrate with Dovecot and Postfix to achieve instant-onboarding
|
||||||
|
and only relaying OpenPGP end-to-end messages encrypted messages. A
|
||||||
|
short overview of ``chatmaild`` services:
|
||||||
|
|
||||||
|
- `doveauth <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/doveauth.py>`_
|
||||||
|
implements create-on-login address semantics and is used by Dovecot
|
||||||
|
during IMAP login and by Postfix during SMTP/SUBMISSION login which
|
||||||
|
in turn uses `Dovecot SASL
|
||||||
|
<https://doc.dovecot.org/2.3/configuration_manual/authentication/dict/#complete-example-for-authenticating-via-a-unix-socket>`_
|
||||||
|
to authenticate logins.
|
||||||
|
|
||||||
|
- `chatmail-metadata <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metadata.py>`_
|
||||||
|
is contacted by a `Dovecot lua
|
||||||
|
script <https://github.com/chatmail/relay/blob/main/cmdeploy/src/cmdeploy/dovecot/push_notification.lua>`_
|
||||||
|
to store user-specific relay-side config. On new messages, it `passes
|
||||||
|
the user’s push notification
|
||||||
|
token <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/notifier.py>`_
|
||||||
|
to
|
||||||
|
`notifications.delta.chat <https://delta.chat/en/help#instant-delivery>`_
|
||||||
|
so the push notifications on the user’s phone can be triggered by
|
||||||
|
Apple/Google/Huawei.
|
||||||
|
|
||||||
|
- `chatmail-expire <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/expire.py>`_
|
||||||
|
deletes users if they have not logged in for a longer while.
|
||||||
|
The timeframe can be configured in ``chatmail.ini``.
|
||||||
|
|
||||||
|
- `lastlogin <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/lastlogin.py>`_
|
||||||
|
is contacted by Dovecot when a user logs in and stores the date of
|
||||||
|
the login.
|
||||||
|
|
||||||
|
- `metrics <https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metrics.py>`_
|
||||||
|
collects some metrics and displays them at
|
||||||
|
``https://example.org/metrics``.
|
||||||
|
|
||||||
|
``www/``
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
`www <https://github.com/chatmail/relay/tree/main/www>`_ contains
|
||||||
|
the html, css, and markdown files which make up a chatmail relay’s
|
||||||
|
web page. Edit them before deploying to make your chatmail relay
|
||||||
|
stand out.
|
||||||
|
|
||||||
|
|
||||||
|
Chatmail relay dependency diagram
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
.. mermaid::
|
||||||
|
:caption: This diagram shows relay components and dependencies/communication paths.
|
||||||
|
|
||||||
|
graph LR;
|
||||||
|
letsencrypt --- |80|acmetool-redirector;
|
||||||
|
acmetool-redirector --- |443|nginx-right(["`nginx
|
||||||
|
(external)`"]);
|
||||||
|
nginx-external --- |465|postfix;
|
||||||
|
nginx-external(["`nginx
|
||||||
|
(external)`"]) --- |8443|nginx-internal["`nginx
|
||||||
|
(internal)`"];
|
||||||
|
nginx-internal --- website["`Website
|
||||||
|
/var/www/html`"];
|
||||||
|
nginx-internal --- newemail.py;
|
||||||
|
nginx-internal --- autoconfig.xml;
|
||||||
|
certs-nginx[("`TLS certs
|
||||||
|
/var/lib/acme`")] --> nginx-internal;
|
||||||
|
systemd-timer --- chatmail-metrics;
|
||||||
|
systemd-timer --- acmetool;
|
||||||
|
systemd-timer --- chatmail-expire-daily;
|
||||||
|
systemd-timer --- chatmail-fsreport-daily;
|
||||||
|
chatmail-metrics --- website;
|
||||||
|
acmetool --> certs[("`TLS certs
|
||||||
|
/var/lib/acme`")];
|
||||||
|
nginx-external --- |993|dovecot;
|
||||||
|
postfix --- |SASL|dovecot;
|
||||||
|
autoconfig.xml --- postfix;
|
||||||
|
autoconfig.xml --- dovecot;
|
||||||
|
postfix --- |10080|filtermail-outgoing;
|
||||||
|
postfix --- |10081|filtermail-incoming;
|
||||||
|
filtermail-outgoing --- |10025 reinject|postfix;
|
||||||
|
filtermail-incoming --- |10026 reinject|postfix;
|
||||||
|
dovecot --- |doveauth.socket|doveauth;
|
||||||
|
dovecot --- |message delivery|maildir["maildir
|
||||||
|
/home/vmail/.../user"];
|
||||||
|
dovecot --- |lastlogin.socket|lastlogin;
|
||||||
|
dovecot --- chatmail-metadata;
|
||||||
|
lastlogin --- maildir;
|
||||||
|
doveauth --- maildir;
|
||||||
|
chatmail-expire-daily --- maildir;
|
||||||
|
chatmail-fsreport-daily --- maildir;
|
||||||
|
chatmail-metadata --- iroh-relay;
|
||||||
|
chatmail-metadata --- |encrypted device token| notifications.delta.chat;
|
||||||
|
certs-nginx --> postfix;
|
||||||
|
certs-nginx --> dovecot;
|
||||||
|
style certs fill:#ff6;
|
||||||
|
style website fill:#ff6;
|
||||||
|
style maildir fill:#ff6;
|
||||||
|
style certs-nginx fill:#ff6;
|
||||||
|
style nginx-external fill:#f66;
|
||||||
|
style nginx-right fill:#f66;
|
||||||
|
style postfix fill:#f66;
|
||||||
|
style dovecot fill:#f66;
|
||||||
|
style notification-proxy fill:#f66;
|
||||||
|
|
||||||
|
Message between users on the same relay
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. mermaid::
|
||||||
|
:caption: This diagram shows the path a non-federated message takes.
|
||||||
|
|
||||||
|
graph LR;
|
||||||
|
sender --> |465|smtps/smtpd;
|
||||||
|
sender --> |587|submission/smtpd;
|
||||||
|
smtps/smtpd --> |10080|filtermail;
|
||||||
|
submission/smtpd --> |10080|filtermail;
|
||||||
|
filtermail --> |10025|smtpd_reinject;
|
||||||
|
smtpd_reinject --> cleanup;
|
||||||
|
cleanup --> qmgr;
|
||||||
|
qmgr --> smtpd_accepts_message;
|
||||||
|
qmgr --> |lmtp|dovecot;
|
||||||
|
dovecot --> recipient;
|
||||||
|
dovecot --> sender's_other_devices;
|
||||||
|
|
||||||
|
Operational details of a chatmail relay
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
Mailbox directory layout
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Fresh chatmail addresses have a mailbox directory that contains:
|
||||||
|
|
||||||
|
- a ``password`` file with the salted password required for
|
||||||
|
authenticating whether a login may use the address to send/receive
|
||||||
|
messages. If you modify the password file manually, you effectively
|
||||||
|
block the user.
|
||||||
|
|
||||||
|
- ``enforceE2EEincoming`` is a default-created file with each address.
|
||||||
|
If present the file indicates that this chatmail address rejects
|
||||||
|
incoming cleartext messages. If absent the address accepts incoming
|
||||||
|
cleartext messages.
|
||||||
|
|
||||||
|
- ``dovecot*``, ``cur``, ``new`` and ``tmp`` represent IMAP/mailbox
|
||||||
|
state. If the address is only used by one device, the Maildir
|
||||||
|
directories will typically be empty unless the user of that address
|
||||||
|
hasn’t been online for a while.
|
||||||
|
|
||||||
|
Active ports
|
||||||
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Postfix_ listens on ports
|
||||||
|
|
||||||
|
- 25 (SMTP)
|
||||||
|
|
||||||
|
- 587 (SUBMISSION) and
|
||||||
|
|
||||||
|
- 465 (SUBMISSIONS)
|
||||||
|
|
||||||
|
Dovecot_ listens on ports
|
||||||
|
|
||||||
|
- 143 (IMAP) and
|
||||||
|
|
||||||
|
- 993 (IMAPS)
|
||||||
|
|
||||||
|
Nginx_ listens on port
|
||||||
|
|
||||||
|
- 8443 (HTTPS-ALT) and
|
||||||
|
|
||||||
|
- 443 (HTTPS) which multiplexes HTTPS, IMAP and SMTP using ALPN
|
||||||
|
to redirect connections to ports 8443, 465 or 993.
|
||||||
|
|
||||||
|
`acmetool <https://hlandau.github.io/acmetool/>`_ listens on port:
|
||||||
|
|
||||||
|
- 80 (HTTP).
|
||||||
|
|
||||||
|
`chatmail-turn <https://github.com/chatmail/chatmail-turn>`_ listens on port
|
||||||
|
|
||||||
|
- 3478 UDP (STUN/TURN), and temporarily opens further UDP ports
|
||||||
|
when users request them. UDP port range is not restricted, any free port
|
||||||
|
may be allocated.
|
||||||
|
|
||||||
|
chatmail-core based apps will, however, discover all ports and
|
||||||
|
configurations automatically by reading the `autoconfig XML
|
||||||
|
file <https://www.ietf.org/archive/id/draft-bucksch-autoconfig-00.html>`_
|
||||||
|
from the chatmail relay server.
|
||||||
|
|
||||||
|
Email domain authentication (DKIM)
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Chatmail relays enforce :rfc:`DKIM <6376>` to authenticate incoming emails.
|
||||||
|
Incoming emails must have a valid DKIM signature with
|
||||||
|
Signing Domain Identifier (SDID, ``d=`` parameter in the DKIM-Signature
|
||||||
|
header) equal to the ``From:`` header domain. This property is checked
|
||||||
|
by OpenDKIM screen policy script before validating the signatures. This
|
||||||
|
corresponds to strict :rfc:`DMARC <7489>` alignment (``adkim=s``).
|
||||||
|
If there is no valid DKIM signature on the incoming email, the
|
||||||
|
sender receives a “5.7.1 No valid DKIM signature found” error.
|
||||||
|
After validating the DKIM signature,
|
||||||
|
the `final.lua` script strips all ``OpenDKIM:`` headers to reduce message size on disc.
|
||||||
|
|
||||||
|
Note that chatmail relays
|
||||||
|
|
||||||
|
- do **not** rely on DMARC and do not consult the sender policy published in DMARC records;
|
||||||
|
|
||||||
|
- do **not** rely on legacy authentication mechanisms such as
|
||||||
|
:rfc:`iprev <8601#section-2.7.3>` and :rfc:`SPF <7208>`.
|
||||||
|
Any IP address is accepted if the DKIM signature was valid.
|
||||||
|
|
||||||
|
Outgoing emails must be sent over authenticated connection with envelope
|
||||||
|
``MAIL FROM`` (return path) corresponding to the login.
|
||||||
|
This is ensured by Postfix which maps login username to ``MAIL FROM`` with
|
||||||
|
`smtpd_sender_login_maps <https://www.postfix.org/postconf.5.html#smtpd_sender_login_maps>`_
|
||||||
|
and rejects incorrectly authenticated emails with
|
||||||
|
`reject_sender_login_mismatch <https://www.postfix.org/postconf.5.html#reject_sender_login_mismatch>`_ policy.
|
||||||
|
``From:`` header must correspond to envelope ``MAIL FROM``, this is
|
||||||
|
ensured by ``filtermail`` proxy.
|
||||||
|
|
||||||
|
TLS requirements
|
||||||
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Postfix is configured to require valid TLS by setting
|
||||||
|
`smtp_tls_security_level <https://www.postfix.org/postconf.5.html#smtp_tls_security_level>`_
|
||||||
|
to ``verify``. If emails don’t arrive at your chatmail relay server, the
|
||||||
|
problem is likely that your relay does not have a valid TLS certificate.
|
||||||
|
|
||||||
|
You can test it by resolving ``MX`` records of your relay domain and
|
||||||
|
then connecting to MX relays (e.g ``mx.example.org``) with
|
||||||
|
``openssl s_client -connect mx.example.org:25 -verify_hostname mx.example.org -verify_return_error -starttls smtp``
|
||||||
|
from the host that has open port 25 to verify that certificate is valid.
|
||||||
|
|
||||||
|
When providing a TLS certificate to your chatmail relay server, make
|
||||||
|
sure to provide the full certificate chain and not just the last
|
||||||
|
certificate.
|
||||||
|
|
||||||
|
If you are running an Exim server and don’t see incoming connections
|
||||||
|
from a chatmail relay server in the logs, make sure ``smtp_no_mail`` log
|
||||||
|
item is enabled in the config with ``log_selector = +smtp_no_mail``. By
|
||||||
|
default Exim does not log sessions that are closed before sending the
|
||||||
|
``MAIL`` command. This happens if certificate is not recognized as valid
|
||||||
|
by Postfix, so you might think that connection is not established while
|
||||||
|
actually it is a problem with your TLS certificate.
|
||||||
|
|
||||||
|
|
||||||
|
.. _dovecot: https://dovecot.org
|
||||||
|
.. _postfix: https://www.postfix.org
|
||||||
|
.. _nginx: https://nginx.org
|
||||||
|
.. _pyinfra: https://pyinfra.com
|
||||||
|
|
||||||
|
|
||||||
|
Architecture of cmdeploy
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
cmdeploy is a Python program that uses the pyinfra library to deploy
|
||||||
|
chatmail relays, with all the necessary software, configuration, and
|
||||||
|
services. The deployment process performs three primary types of
|
||||||
|
operation:
|
||||||
|
|
||||||
|
1. Installation of software, universal across all deployments.
|
||||||
|
2. Configuration of software, with deploy-specific variations.
|
||||||
|
3. Activation of services.
|
||||||
|
|
||||||
|
The process is implemented through a family of "deployer" objects
|
||||||
|
which all derive from a common ``Deployer`` base class, defined in
|
||||||
|
cmdeploy/src/cmdeploy/deployer.py. Each object provides
|
||||||
|
implementation methods for the three stages -- install, configure, and
|
||||||
|
activate. The top-level procedure in ``deploy_chatmail()`` calls
|
||||||
|
these methods for all the deployer objects, via the
|
||||||
|
``Deployment.perform_stages()`` method, also defined in deployer.py.
|
||||||
|
This first calls all the install methods, then the configure methods,
|
||||||
|
then the activate methods.
|
||||||
|
|
||||||
|
The ``Deployment`` class also implements support for a CMDEPLOY_STAGES
|
||||||
|
environment variable, which allows limiting the process to specific
|
||||||
|
stages. Note that some deployers are stateful between the stages
|
||||||
|
(this is one reason why they are implemented as objects), and that
|
||||||
|
state will not get propagated between stages when run in separate
|
||||||
|
invocations of cmdeploy. This environment variable is intended for
|
||||||
|
use in future revisions to support building Docker images with
|
||||||
|
software pre-installed, and configuration of containers at run time
|
||||||
|
from environment variables.
|
||||||
|
|
||||||
|
The, ``install()`` methods for the deployer classes should use 'self'
|
||||||
|
as little as possible, preferably not at all. In particular,
|
||||||
|
``install()`` methods should never depend on "config" data, such as
|
||||||
|
the config dictionary in ``self.config`` or specific values like
|
||||||
|
``self.mail_domain``. This ensures that these methods can be used to
|
||||||
|
perform generic installation operations that are applicable across
|
||||||
|
multiple relay deployments, and therefore can be called in the process
|
||||||
|
of building a general-purpose container image.
|
||||||
|
|
||||||
|
Operations that start services for systemd-based deployments should
|
||||||
|
only be called from the ``activate_impl()`` methods. These methods
|
||||||
|
will not be called in non-systemd container environments.
|
||||||
175
doc/source/proxy.rst
Normal file
175
doc/source/proxy.rst
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
|
||||||
|
Setting up a reverse proxy
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
A chatmail relay MTA does not track or depend on the client IP address
|
||||||
|
for its operation, so it can be run behind a reverse proxy. This will
|
||||||
|
not even affect incoming mail authentication as DKIM only checks the
|
||||||
|
cryptographic signature of the message and does not use the IP address
|
||||||
|
as the input.
|
||||||
|
|
||||||
|
For example, you may want to self-host your chatmail relay and only use
|
||||||
|
hosted VPS to provide a public IP address for client connections and
|
||||||
|
incoming mail. You can connect chatmail relay to VPS using a tunnel
|
||||||
|
protocol such as `WireGuard <https://www.wireguard.com/>`_ and setup a
|
||||||
|
reverse proxy on a VPS to forward connections to the chatmail relay over
|
||||||
|
the tunnel. You can also setup multiple reverse proxies for your
|
||||||
|
chatmail relay in different networks to ensure your relay is reachable
|
||||||
|
even when one of the IPs becomes inaccessible due to hosting or routing
|
||||||
|
problems.
|
||||||
|
|
||||||
|
Note that your chatmail relay still needs to be able to make outgoing
|
||||||
|
connections on port 25 to send messages outside.
|
||||||
|
|
||||||
|
To setup a reverse proxy (or rather Destination NAT, DNAT) for your
|
||||||
|
chatmail relay, follow these instructions:
|
||||||
|
|
||||||
|
Linux
|
||||||
|
^^^^^
|
||||||
|
|
||||||
|
Put the following configuration in
|
||||||
|
``/etc/nftables.conf``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
#!/usr/sbin/nft -f
|
||||||
|
|
||||||
|
flush ruleset
|
||||||
|
|
||||||
|
define wan = eth0
|
||||||
|
|
||||||
|
# Which ports to proxy.
|
||||||
|
#
|
||||||
|
# Note that SSH is not proxied
|
||||||
|
# so it is possible to log into the proxy server
|
||||||
|
# and not the original one.
|
||||||
|
define ports = { smtp, http, https, imap, imaps, submission, submissions }
|
||||||
|
|
||||||
|
# The host we want to proxy to.
|
||||||
|
define ipv4_address = AAA.BBB.CCC.DDD
|
||||||
|
define ipv6_address = [XXX::1]
|
||||||
|
|
||||||
|
table ip nat {
|
||||||
|
chain prerouting {
|
||||||
|
type nat hook prerouting priority dstnat; policy accept;
|
||||||
|
iif $wan tcp dport $ports dnat to $ipv4_address
|
||||||
|
}
|
||||||
|
|
||||||
|
chain postrouting {
|
||||||
|
type nat hook postrouting priority 0;
|
||||||
|
|
||||||
|
oifname $wan masquerade
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table ip6 nat {
|
||||||
|
chain prerouting {
|
||||||
|
type nat hook prerouting priority dstnat; policy accept;
|
||||||
|
iif $wan tcp dport $ports dnat to $ipv6_address
|
||||||
|
}
|
||||||
|
|
||||||
|
chain postrouting {
|
||||||
|
type nat hook postrouting priority 0;
|
||||||
|
|
||||||
|
oifname $wan masquerade
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table inet filter {
|
||||||
|
chain input {
|
||||||
|
type filter hook input priority filter; policy drop;
|
||||||
|
|
||||||
|
# Accept ICMP.
|
||||||
|
# It is especially important to accept ICMPv6 ND messages,
|
||||||
|
# otherwise IPv6 connectivity breaks.
|
||||||
|
icmp type { echo-request } accept
|
||||||
|
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
||||||
|
|
||||||
|
# Allow incoming SSH connections.
|
||||||
|
tcp dport { ssh } accept
|
||||||
|
|
||||||
|
ct state established accept
|
||||||
|
}
|
||||||
|
chain forward {
|
||||||
|
type filter hook forward priority filter; policy drop;
|
||||||
|
|
||||||
|
ct state established accept
|
||||||
|
ip daddr $ipv4_address counter accept
|
||||||
|
ip6 daddr $ipv6_address counter accept
|
||||||
|
}
|
||||||
|
chain output {
|
||||||
|
type filter hook output priority filter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Run ``systemctl enable nftables.service`` to ensure configuration is
|
||||||
|
reloaded when the proxy relay reboots.
|
||||||
|
|
||||||
|
Uncomment in ``/etc/sysctl.conf`` the following two lines:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
net.ipv4.ip_forward=1
|
||||||
|
net.ipv6.conf.all.forwarding=1
|
||||||
|
|
||||||
|
Then reboot the relay or do ``sysctl -p`` and
|
||||||
|
``nft -f /etc/nftables.conf``.
|
||||||
|
|
||||||
|
FreeBSD / pf
|
||||||
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Put the following configuration in
|
||||||
|
``/etc/pf.conf``:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
ext_if = "em0"
|
||||||
|
forward_ports = "{ 25, 80, 143, 443, 465, 587, 993 }"
|
||||||
|
chatmail_ipv4 = "AAA.BBB.CCC.DDD"
|
||||||
|
icmp_types = "{ echoreq, echorep, unreach, timex }"
|
||||||
|
chatmail_ipv6 = "XXX::1"
|
||||||
|
icmp6_types = "{ echorep, echoreq, neighbradv, neighbrsol, routeradv, routersol, unreach, toobig, timex }"
|
||||||
|
|
||||||
|
set skip on lo0
|
||||||
|
|
||||||
|
nat on $ext_if inet from any to any -> ($ext_if:0)
|
||||||
|
nat on $ext_if inet6 from any to any -> ($ext_if:0)
|
||||||
|
|
||||||
|
# Define the redirect rules
|
||||||
|
rdr on $ext_if inet proto tcp from any to ($ext_if:0) port $forward_ports -> $chatmail_ipv4
|
||||||
|
rdr on $ext_if inet6 proto tcp from any to ($ext_if:0) port $forward_ports -> $chatmail_ipv6
|
||||||
|
|
||||||
|
# Accept the incoming traffic to the specified ports we will NAT redirect
|
||||||
|
pass in quick on $ext_if inet proto tcp from any to any port $forward_ports flags S/SA modulate state
|
||||||
|
pass in quick on $ext_if inet6 proto tcp from any to any port $forward_ports flags S/SA modulate state
|
||||||
|
|
||||||
|
# Allow incoming SSH for host mgmt
|
||||||
|
pass in quick on $ext_if proto tcp from any to ($ext_if) port 22 flags S/SA modulate state
|
||||||
|
|
||||||
|
# Allow ICMP
|
||||||
|
pass in quick on $ext_if inet proto icmp all icmp-type $icmp_types keep state
|
||||||
|
pass in quick on $ext_if inet6 proto ipv6-icmp all icmp6-type $icmp6_types keep state
|
||||||
|
|
||||||
|
# Allow traffic from anyone to go through the NAT
|
||||||
|
pass on $ext_if inet proto tcp from any to $chatmail_ipv4 flags S/SA modulate state
|
||||||
|
pass on $ext_if inet6 proto tcp from any to $chatmail_ipv6 flags S/SA modulate state
|
||||||
|
|
||||||
|
# Default allow out
|
||||||
|
pass out quick on $ext_if from any to any
|
||||||
|
|
||||||
|
# Default block
|
||||||
|
block drop in log all
|
||||||
|
|
||||||
|
Insert into ``/etc/sysctl.conf.local`` the following two lines:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
net.inet.ip.forwarding=1
|
||||||
|
net.inet6.ip6.forwarding=1
|
||||||
|
|
||||||
|
Activate the sysctls with ``service sysctl onestart``.
|
||||||
|
Enable the pf firewall with ``service pf enable``.
|
||||||
|
Apply the firewall rules with ``service pf start`` or ``pfctl -f /etc/pf.conf``.
|
||||||
|
Note, enabling the firewall may interrupt your SSH session, but you can reconnect.
|
||||||
|
|
||||||
|
Once proxy relay is set up, you can add its IP address to the DNS.
|
||||||
27
doc/source/related.rst
Normal file
27
doc/source/related.rst
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
Community developments
|
||||||
|
======================
|
||||||
|
|
||||||
|
Active development takes place in the `chatmail/relay github repository <https://github.com/chatmail/relay>`_.
|
||||||
|
|
||||||
|
You can check out the `'chatmail' tag in the support.delta.chat forum <https://support.delta.chat/tag/chatmail>`_
|
||||||
|
and ask to get added to a non-public support chat for debugging issues.
|
||||||
|
|
||||||
|
We know of three work-in-progress alternative implementation efforts:
|
||||||
|
|
||||||
|
- `Mox <https://github.com/mjl-/mox>`_: A Golang email server. `Work
|
||||||
|
is in progress <https://github.com/mjl-/mox/issues/251>`_ to modify
|
||||||
|
it to support all of the features and configuration settings required
|
||||||
|
to operate as a chatmail relay.
|
||||||
|
|
||||||
|
- `Madmail <https://github.com/omidz4t/madmail>`_: an
|
||||||
|
experimental fork of Maddy Mail Server <https://maddy.email/>`_ optimized
|
||||||
|
for chatmail deployments. It provides a single binary solution
|
||||||
|
for running a chatmail relay.
|
||||||
|
|
||||||
|
- `Chatmail Cookbook <https://github.com/feld/chatmail-cookbook>`_:
|
||||||
|
A Chef Cookbook implementing a relay server. The project follows the
|
||||||
|
official relay server software and configurations converted to a Chef
|
||||||
|
Cookbook with only minor differences. The cookbook uses DNS-01 for
|
||||||
|
certificate validation and additionally supports FreeBSD. It does not
|
||||||
|
require a Chef server to use.
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
FROM jrei/systemd-debian:12 AS base
|
|
||||||
|
|
||||||
ENV LANG=en_US.UTF-8
|
|
||||||
|
|
||||||
RUN echo 'APT::Install-Recommends "0";' > /etc/apt/apt.conf.d/01norecommend && \
|
|
||||||
echo 'APT::Install-Suggests "0";' >> /etc/apt/apt.conf.d/01norecommend && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y \
|
|
||||||
ca-certificates && \
|
|
||||||
DEBIAN_FRONTEND=noninteractive \
|
|
||||||
TZ=Europe/London \
|
|
||||||
apt-get install -y tzdata && \
|
|
||||||
apt-get install -y locales && \
|
|
||||||
sed -i -e "s/# $LANG.*/$LANG UTF-8/" /etc/locale.gen && \
|
|
||||||
dpkg-reconfigure --frontend=noninteractive locales && \
|
|
||||||
update-locale LANG=$LANG \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y \
|
|
||||||
git \
|
|
||||||
python3 \
|
|
||||||
python3-venv \
|
|
||||||
python3-virtualenv \
|
|
||||||
gcc \
|
|
||||||
python3-dev \
|
|
||||||
opendkim \
|
|
||||||
opendkim-tools \
|
|
||||||
curl \
|
|
||||||
rsync \
|
|
||||||
unbound \
|
|
||||||
unbound-anchor \
|
|
||||||
dnsutils \
|
|
||||||
postfix \
|
|
||||||
acl \
|
|
||||||
nginx \
|
|
||||||
libnginx-mod-stream \
|
|
||||||
fcgiwrap \
|
|
||||||
cron \
|
|
||||||
&& for pkg in core imapd lmtpd; do \
|
|
||||||
case "$pkg" in \
|
|
||||||
core) sha256="43f593332e22ac7701c62d58b575d2ca409e0f64857a2803be886c22860f5587" ;; \
|
|
||||||
imapd) sha256="8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86" ;; \
|
|
||||||
lmtpd) sha256="2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab" ;; \
|
|
||||||
esac; \
|
|
||||||
url="https://download.delta.chat/dovecot/dovecot-${pkg}_2.3.21%2Bdfsg1-3_amd64.deb"; \
|
|
||||||
file="/tmp/$(basename "$url")"; \
|
|
||||||
curl -fsSL "$url" -o "$file"; \
|
|
||||||
echo "$sha256 $file" | sha256sum -c -; \
|
|
||||||
apt-get install -y "$file"; \
|
|
||||||
rm -f "$file"; \
|
|
||||||
done \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /opt/chatmail
|
|
||||||
|
|
||||||
ARG SETUP_CHATMAIL_SERVICE_PATH=/lib/systemd/system/setup_chatmail.service
|
|
||||||
COPY ./files/setup_chatmail.service "$SETUP_CHATMAIL_SERVICE_PATH"
|
|
||||||
RUN ln -sf "$SETUP_CHATMAIL_SERVICE_PATH" "/etc/systemd/system/multi-user.target.wants/setup_chatmail.service"
|
|
||||||
|
|
||||||
COPY --chmod=555 ./files/setup_chatmail_docker.sh /setup_chatmail_docker.sh
|
|
||||||
COPY --chmod=555 ./files/update_ini.sh /update_ini.sh
|
|
||||||
COPY --chmod=555 ./files/entrypoint.sh /entrypoint.sh
|
|
||||||
|
|
||||||
## TODO: add git clone.
|
|
||||||
## Problem: how correct save only required files inside container....
|
|
||||||
# RUN git clone https://github.com/chatmail/relay.git -b master . \
|
|
||||||
# && ./scripts/initenv.sh
|
|
||||||
|
|
||||||
# EXPOSE 443 25 587 143 993
|
|
||||||
|
|
||||||
VOLUME ["/sys/fs/cgroup", "/home"]
|
|
||||||
|
|
||||||
STOPSIGNAL SIGRTMIN+3
|
|
||||||
|
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
|
||||||
|
|
||||||
CMD [ "--default-standard-output=journal+console", \
|
|
||||||
"--default-standard-error=journal+console" ]
|
|
||||||
|
|
||||||
## TODO: Add installation and configuration of chatmaild inside the Dockerfile.
|
|
||||||
## This is required to ensure repeatable deployment.
|
|
||||||
## In the current MVP, the chatmaild server is updated on every container restart.
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
services:
|
|
||||||
chatmail:
|
|
||||||
build:
|
|
||||||
context: ./docker
|
|
||||||
dockerfile: chatmail_relay.dockerfile
|
|
||||||
tags:
|
|
||||||
- chatmail-relay:latest
|
|
||||||
image: chatmail-relay:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
container_name: chatmail
|
|
||||||
cgroup: host # required for systemd
|
|
||||||
tty: true # required for logs
|
|
||||||
tmpfs: # required for systemd
|
|
||||||
- /tmp
|
|
||||||
- /run
|
|
||||||
- /run/lock
|
|
||||||
logging:
|
|
||||||
driver: json-file
|
|
||||||
options:
|
|
||||||
max-size: "10m"
|
|
||||||
max-file: "3"
|
|
||||||
environment:
|
|
||||||
MAIL_DOMAIN: $MAIL_DOMAIN
|
|
||||||
CHANGE_KERNEL_SETTINGS: "False"
|
|
||||||
ACME_EMAIL: $ACME_EMAIL
|
|
||||||
# RECREATE_VENV: "false"
|
|
||||||
# MAX_MESSAGE_SIZE: "50M"
|
|
||||||
# DEBUG_COMMANDS_ENABLED: "true"
|
|
||||||
# FORCE_REINIT_INI_FILE: "true"
|
|
||||||
# USE_FOREIGN_CERT_MANAGER: "True"
|
|
||||||
# ENABLE_CERTS_MONITORING: "true"
|
|
||||||
# CERTS_MONITORING_TIMEOUT: 10
|
|
||||||
# IS_DEVELOPMENT_INSTANCE: "True"
|
|
||||||
ports:
|
|
||||||
- "80:80"
|
|
||||||
- "443:443"
|
|
||||||
- "25:25"
|
|
||||||
- "587:587"
|
|
||||||
- "143:143"
|
|
||||||
- "465:465"
|
|
||||||
- "993:993"
|
|
||||||
volumes:
|
|
||||||
## system
|
|
||||||
- /sys/fs/cgroup:/sys/fs/cgroup:rw # required for systemd
|
|
||||||
- ./:/opt/chatmail
|
|
||||||
|
|
||||||
## data
|
|
||||||
- ./data/chatmail:/home
|
|
||||||
- ./data/chatmail-dkimkeys:/etc/dkimkeys
|
|
||||||
- ./data/chatmail-echobot:/run/echobot
|
|
||||||
- ./data/chatmail-acme:/var/lib/acme
|
|
||||||
|
|
||||||
## custom resources
|
|
||||||
# - ./custom/www/src/index.md:/opt/chatmail/www/src/index.md
|
|
||||||
|
|
||||||
## debug
|
|
||||||
# - ./docker/files/setup_chatmail_docker.sh:/setup_chatmail_docker.sh
|
|
||||||
# - ./docker/files/entrypoint.sh:/entrypoint.sh
|
|
||||||
# - ./docker/files/update_ini.sh:/update_ini.sh
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
MAIL_DOMAIN="chat.example.com"
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
unlink /etc/nginx/sites-enabled/default || true
|
|
||||||
|
|
||||||
SETUP_CHATMAIL_SERVICE_PATH="${SETUP_CHATMAIL_SERVICE_PATH:-/lib/systemd/system/setup_chatmail.service}"
|
|
||||||
|
|
||||||
env_vars=$(printenv | cut -d= -f1 | xargs)
|
|
||||||
sed -i "s|<envs_list>|$env_vars|g" $SETUP_CHATMAIL_SERVICE_PATH
|
|
||||||
|
|
||||||
exec /lib/systemd/systemd $@
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user