mirror of
https://github.com/chatmail/relay.git
synced 2026-05-10 16:04:37 +00:00
Compare commits
10 Commits
cmdeploy-p
...
reorgtest
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77727e259e | ||
|
|
732fdb3dab | ||
|
|
fe648f4784 | ||
|
|
d43e046c5d | ||
|
|
3716f2e429 | ||
|
|
00b4c484ff | ||
|
|
0950d7ea8f | ||
|
|
7dd2d0b9b4 | ||
|
|
dd232689a7 | ||
|
|
c613ca24af |
4
.github/CODE_OF_CONDUCT.md
vendored
4
.github/CODE_OF_CONDUCT.md
vendored
@@ -1,4 +0,0 @@
|
|||||||
|
|
||||||
Please refer to
|
|
||||||
[Delta Chat community standards and practices](https://delta.chat/en/community-standards)
|
|
||||||
which also apply for all chatmail developments.
|
|
||||||
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,33 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Report something that isn't working.
|
|
||||||
title: ''
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Please fill out as much of this form as you can (leaving out stuff that is not applicable is ok).
|
|
||||||
-->
|
|
||||||
|
|
||||||
- Server OS (Operating System) - preferably Debian 12:
|
|
||||||
- On which OS you run cmdeploy:
|
|
||||||
- chatmail/relay version: `git rev-parse HEAD`
|
|
||||||
|
|
||||||
## Expected behavior
|
|
||||||
|
|
||||||
*What did you try to achieve?*
|
|
||||||
|
|
||||||
## Actual behavior
|
|
||||||
|
|
||||||
*What happened instead?*
|
|
||||||
|
|
||||||
### Steps to reproduce the problem:
|
|
||||||
|
|
||||||
1.
|
|
||||||
2.
|
|
||||||
|
|
||||||
### Screenshots
|
|
||||||
|
|
||||||
### Logs
|
|
||||||
|
|
||||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +0,0 @@
|
|||||||
blank_issues_enabled: true
|
|
||||||
contact_links:
|
|
||||||
- name: Mutual Help Chat Group
|
|
||||||
url: https://i.delta.chat/#6CBFF8FFD505C0FDEA20A66674F2916EA8FBEE99&a=invitebot%40nine.testrun.org&g=Chatmail%20Mutual%20Help&x=7sFF7Ik50pWv6J1z7RVC5527&i=X69wTFfvCfs3d-JzqP0kVA3i&s=ibp-447dU-wUq-52QanwAtWc
|
|
||||||
about: If you have troubles setting up the relay server, feel free to ask here.
|
|
||||||
38
.github/workflows/ci.yaml
vendored
38
.github/workflows/ci.yaml
vendored
@@ -6,36 +6,26 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tox:
|
tox:
|
||||||
name: isolated chatmaild tests
|
name: chatmail tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
# Checkout pull request HEAD commit instead of merge commit
|
|
||||||
# Otherwise `test_deployed_state` will be unhappy.
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
|
|
||||||
- name: run chatmaild tests
|
- name: run chatmaild tests
|
||||||
working-directory: chatmaild
|
working-directory: chatmaild
|
||||||
run: pipx run tox
|
run: pipx run tox
|
||||||
|
- name: run deploy-chatmail offline tests
|
||||||
|
working-directory: deploy-chatmail
|
||||||
|
run: pipx run tox
|
||||||
|
- name: run deploy-chatmail offline tests
|
||||||
|
working-directory: deploy-chatmail
|
||||||
|
run: pipx run tox
|
||||||
|
|
||||||
scripts:
|
scripts:
|
||||||
name: deploy-chatmail tests
|
name: chatmail script invocations
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
- name: run init.sh
|
||||||
- name: initenv
|
run: ./scripts/init.sh
|
||||||
run: scripts/initenv.sh
|
- name: run test.sh
|
||||||
|
run: ./scripts/test.sh
|
||||||
- name: append venv/bin to PATH
|
|
||||||
run: echo venv/bin >>$GITHUB_PATH
|
|
||||||
|
|
||||||
- name: run formatting checks
|
|
||||||
run: cmdeploy fmt -v
|
|
||||||
|
|
||||||
- name: run deploy-chatmail offline tests
|
|
||||||
run: pytest --pyargs cmdeploy
|
|
||||||
|
|
||||||
# all other cmdeploy commands require a staging server
|
|
||||||
# see https://github.com/deltachat/chatmail/issues/100
|
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
;; Zone file for staging-ipv4.testrun.org
|
|
||||||
|
|
||||||
$ORIGIN staging-ipv4.testrun.org.
|
|
||||||
$TTL 300
|
|
||||||
|
|
||||||
@ IN SOA ns.testrun.org. root.nine.testrun.org (
|
|
||||||
2023010101 ; Serial
|
|
||||||
7200 ; Refresh
|
|
||||||
3600 ; Retry
|
|
||||||
1209600 ; Expire
|
|
||||||
3600 ; Negative response caching TTL
|
|
||||||
)
|
|
||||||
|
|
||||||
;; Nameservers.
|
|
||||||
@ IN NS ns.testrun.org.
|
|
||||||
|
|
||||||
;; DNS records.
|
|
||||||
@ IN A 37.27.95.249
|
|
||||||
mta-sts.staging-ipv4.testrun.org. CNAME staging-ipv4.testrun.org.
|
|
||||||
www.staging-ipv4.testrun.org. CNAME staging-ipv4.testrun.org.
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
;; Zone file for staging2.testrun.org
|
|
||||||
|
|
||||||
$ORIGIN staging2.testrun.org.
|
|
||||||
$TTL 300
|
|
||||||
|
|
||||||
@ IN SOA ns.testrun.org. root.nine.testrun.org (
|
|
||||||
2023010101 ; Serial
|
|
||||||
7200 ; Refresh
|
|
||||||
3600 ; Retry
|
|
||||||
1209600 ; Expire
|
|
||||||
3600 ; Negative response caching TTL
|
|
||||||
)
|
|
||||||
|
|
||||||
;; Nameservers.
|
|
||||||
@ IN NS ns.testrun.org.
|
|
||||||
|
|
||||||
;; DNS records.
|
|
||||||
@ IN A 37.27.24.139
|
|
||||||
mta-sts.staging2.testrun.org. CNAME staging2.testrun.org.
|
|
||||||
www.staging2.testrun.org. CNAME staging2.testrun.org.
|
|
||||||
|
|
||||||
97
.github/workflows/test-and-deploy-ipv4only.yaml
vendored
97
.github/workflows/test-and-deploy-ipv4only.yaml
vendored
@@ -1,97 +0,0 @@
|
|||||||
name: deploy on staging-ipv4.testrun.org, and run tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
paths-ignore:
|
|
||||||
- 'scripts/**'
|
|
||||||
- '**/README.md'
|
|
||||||
- 'CHANGELOG.md'
|
|
||||||
- 'LICENSE'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
name: deploy on staging-ipv4.testrun.org, and run tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
concurrency:
|
|
||||||
group: ci-ipv4-${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
|
||||||
steps:
|
|
||||||
- uses: jsok/serialize-workflow-action@v1
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: prepare SSH
|
|
||||||
run: |
|
|
||||||
mkdir ~/.ssh
|
|
||||||
echo "${{ secrets.STAGING_SSH_KEY }}" >> ~/.ssh/id_ed25519
|
|
||||||
chmod 600 ~/.ssh/id_ed25519
|
|
||||||
ssh-keyscan staging-ipv4.testrun.org > ~/.ssh/known_hosts
|
|
||||||
# save previous acme & dkim state
|
|
||||||
rsync -avz root@staging-ipv4.testrun.org:/var/lib/acme acme-ipv4 || true
|
|
||||||
rsync -avz root@staging-ipv4.testrun.org:/etc/dkimkeys dkimkeys-ipv4 || true
|
|
||||||
# store previous acme & dkim state on ns.testrun.org, if it contains useful certs
|
|
||||||
if [ -f dkimkeys-ipv4/dkimkeys/opendkim.private ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" dkimkeys-ipv4 root@ns.testrun.org:/tmp/ || true; fi
|
|
||||||
if [ "$(ls -A acme-ipv4/acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme-ipv4 root@ns.testrun.org:/tmp/ || true; fi
|
|
||||||
# make sure CAA record isn't set
|
|
||||||
scp -o StrictHostKeyChecking=accept-new .github/workflows/staging-ipv4.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging-ipv4.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org sed -i '/CAA/d' /etc/nsd/staging-ipv4.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org nsd-checkzone staging-ipv4.testrun.org /etc/nsd/staging-ipv4.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org systemctl reload nsd
|
|
||||||
|
|
||||||
- name: rebuild staging-ipv4.testrun.org to have a clean VPS
|
|
||||||
run: |
|
|
||||||
curl -X POST \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.HETZNER_API_TOKEN }}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"image":"debian-12"}' \
|
|
||||||
"https://api.hetzner.cloud/v1/servers/${{ secrets.STAGING_IPV4_SERVER_ID }}/actions/rebuild"
|
|
||||||
|
|
||||||
- run: scripts/initenv.sh
|
|
||||||
|
|
||||||
- name: append venv/bin to PATH
|
|
||||||
run: echo venv/bin >>$GITHUB_PATH
|
|
||||||
|
|
||||||
- name: upload TLS cert after rebuilding
|
|
||||||
run: |
|
|
||||||
echo " --- wait until staging-ipv4.testrun.org VPS is rebuilt --- "
|
|
||||||
rm ~/.ssh/known_hosts
|
|
||||||
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org id -u ; do sleep 1 ; done
|
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org id -u
|
|
||||||
# download acme & dkim state from ns.testrun.org
|
|
||||||
rsync -e "ssh -o StrictHostKeyChecking=accept-new" -avz root@ns.testrun.org:/tmp/acme-ipv4/acme acme-restore || true
|
|
||||||
rsync -avz root@ns.testrun.org:/tmp/dkimkeys-ipv4/dkimkeys dkimkeys-restore || true
|
|
||||||
# restore acme & dkim state to staging2.testrun.org
|
|
||||||
rsync -avz acme-restore/acme root@staging-ipv4.testrun.org:/var/lib/ || true
|
|
||||||
rsync -avz dkimkeys-restore/dkimkeys root@staging-ipv4.testrun.org:/etc/ || true
|
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org chown root:root -R /var/lib/acme || true
|
|
||||||
|
|
||||||
- name: run deploy-chatmail offline tests
|
|
||||||
run: pytest --pyargs cmdeploy
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
cmdeploy init staging-ipv4.testrun.org
|
|
||||||
sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' chatmail.ini
|
|
||||||
|
|
||||||
- run: cmdeploy run
|
|
||||||
|
|
||||||
- name: set DNS entries
|
|
||||||
run: |
|
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
|
|
||||||
cmdeploy dns --zonefile staging-generated.zone
|
|
||||||
cat staging-generated.zone >> .github/workflows/staging-ipv4.testrun.org-default.zone
|
|
||||||
cat .github/workflows/staging-ipv4.testrun.org-default.zone
|
|
||||||
scp .github/workflows/staging-ipv4.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging-ipv4.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org nsd-checkzone staging-ipv4.testrun.org /etc/nsd/staging-ipv4.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org systemctl reload nsd
|
|
||||||
|
|
||||||
- name: cmdeploy test
|
|
||||||
run: CHATMAIL_DOMAIN2=nine.testrun.org cmdeploy test --slow
|
|
||||||
|
|
||||||
- name: cmdeploy dns
|
|
||||||
run: cmdeploy dns -v
|
|
||||||
|
|
||||||
95
.github/workflows/test-and-deploy.yaml
vendored
95
.github/workflows/test-and-deploy.yaml
vendored
@@ -1,95 +0,0 @@
|
|||||||
name: deploy on staging2.testrun.org, and run tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
paths-ignore:
|
|
||||||
- 'scripts/**'
|
|
||||||
- '**/README.md'
|
|
||||||
- 'CHANGELOG.md'
|
|
||||||
- 'LICENSE'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
name: deploy on staging2.testrun.org, and run tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
concurrency:
|
|
||||||
group: ci-${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
|
||||||
steps:
|
|
||||||
- uses: jsok/serialize-workflow-action@v1
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: prepare SSH
|
|
||||||
run: |
|
|
||||||
mkdir ~/.ssh
|
|
||||||
echo "${{ secrets.STAGING_SSH_KEY }}" >> ~/.ssh/id_ed25519
|
|
||||||
chmod 600 ~/.ssh/id_ed25519
|
|
||||||
ssh-keyscan staging2.testrun.org > ~/.ssh/known_hosts
|
|
||||||
# save previous acme & dkim state
|
|
||||||
rsync -avz root@staging2.testrun.org:/var/lib/acme . || true
|
|
||||||
rsync -avz root@staging2.testrun.org:/etc/dkimkeys . || true
|
|
||||||
# store previous acme & dkim state on ns.testrun.org, if it contains useful certs
|
|
||||||
if [ -f dkimkeys/opendkim.private ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" dkimkeys root@ns.testrun.org:/tmp/ || true; fi
|
|
||||||
if [ "$(ls -A acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme root@ns.testrun.org:/tmp/ || true; fi
|
|
||||||
# make sure CAA record isn't set
|
|
||||||
scp -o StrictHostKeyChecking=accept-new .github/workflows/staging.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging2.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org sed -i '/CAA/d' /etc/nsd/staging2.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org nsd-checkzone staging2.testrun.org /etc/nsd/staging2.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org systemctl reload nsd
|
|
||||||
|
|
||||||
- name: rebuild staging2.testrun.org to have a clean VPS
|
|
||||||
run: |
|
|
||||||
curl -X POST \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.HETZNER_API_TOKEN }}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"image":"debian-12"}' \
|
|
||||||
"https://api.hetzner.cloud/v1/servers/${{ secrets.STAGING_SERVER_ID }}/actions/rebuild"
|
|
||||||
|
|
||||||
- run: scripts/initenv.sh
|
|
||||||
|
|
||||||
- name: append venv/bin to PATH
|
|
||||||
run: echo venv/bin >>$GITHUB_PATH
|
|
||||||
|
|
||||||
- name: upload TLS cert after rebuilding
|
|
||||||
run: |
|
|
||||||
echo " --- wait until staging2.testrun.org VPS is rebuilt --- "
|
|
||||||
rm ~/.ssh/known_hosts
|
|
||||||
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org id -u ; do sleep 1 ; done
|
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org id -u
|
|
||||||
# download acme & dkim state from ns.testrun.org
|
|
||||||
rsync -e "ssh -o StrictHostKeyChecking=accept-new" -avz root@ns.testrun.org:/tmp/acme acme-restore || true
|
|
||||||
rsync -avz root@ns.testrun.org:/tmp/dkimkeys dkimkeys-restore || true
|
|
||||||
# restore acme & dkim state to staging2.testrun.org
|
|
||||||
rsync -avz acme-restore/acme root@staging2.testrun.org:/var/lib/ || true
|
|
||||||
rsync -avz dkimkeys-restore/dkimkeys root@staging2.testrun.org:/etc/ || true
|
|
||||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
|
|
||||||
|
|
||||||
- name: run deploy-chatmail offline tests
|
|
||||||
run: pytest --pyargs cmdeploy
|
|
||||||
|
|
||||||
- run: cmdeploy init staging2.testrun.org
|
|
||||||
|
|
||||||
- run: cmdeploy run --verbose
|
|
||||||
|
|
||||||
- name: set DNS entries
|
|
||||||
run: |
|
|
||||||
ssh -o StrictHostKeyChecking=accept-new root@staging2.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
|
|
||||||
cmdeploy dns --zonefile staging-generated.zone --verbose
|
|
||||||
cat staging-generated.zone >> .github/workflows/staging.testrun.org-default.zone
|
|
||||||
cat .github/workflows/staging.testrun.org-default.zone
|
|
||||||
scp .github/workflows/staging.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging2.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org nsd-checkzone staging2.testrun.org /etc/nsd/staging2.testrun.org.zone
|
|
||||||
ssh root@ns.testrun.org systemctl reload nsd
|
|
||||||
|
|
||||||
- name: cmdeploy test
|
|
||||||
run: CHATMAIL_DOMAIN2=nine.testrun.org cmdeploy test --slow
|
|
||||||
|
|
||||||
- name: cmdeploy dns
|
|
||||||
run: cmdeploy dns -v
|
|
||||||
|
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -3,9 +3,6 @@ __pycache__/
|
|||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
*.swp
|
*.swp
|
||||||
*qr-*.png
|
|
||||||
chatmail.ini
|
|
||||||
|
|
||||||
|
|
||||||
# C extensions
|
# C extensions
|
||||||
*.so
|
*.so
|
||||||
@@ -162,5 +159,3 @@ cython_debug/
|
|||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
#.idea/
|
||||||
|
|
||||||
chatmail.zone
|
|
||||||
|
|||||||
@@ -1,50 +0,0 @@
|
|||||||
This diagram shows components of the chatmail server; this is a draft
|
|
||||||
overview as of mid-August 2025:
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
graph LR;
|
|
||||||
cmdeploy --- sshd;
|
|
||||||
letsencrypt --- |80|acmetool-redirector;
|
|
||||||
acmetool-redirector --- |443|nginx-right(["`nginx
|
|
||||||
(external)`"]);
|
|
||||||
nginx-external --- |465|postfix;
|
|
||||||
nginx-external(["`nginx
|
|
||||||
(external)`"]) --- |8443|nginx-internal["`nginx
|
|
||||||
(internal)`"];
|
|
||||||
nginx-internal --- website["`Website
|
|
||||||
/var/www/html`"];
|
|
||||||
nginx-internal --- newemail.py;
|
|
||||||
nginx-internal --- autoconfig.xml;
|
|
||||||
certs-nginx[("`TLS certs
|
|
||||||
/var/lib/acme`")] --> nginx-internal;
|
|
||||||
cron --- chatmail-metrics;
|
|
||||||
cron --- acmetool;
|
|
||||||
cron --- expunge;
|
|
||||||
chatmail-metrics --- website;
|
|
||||||
acmetool --> certs[("`TLS certs
|
|
||||||
/var/lib/acme`")];
|
|
||||||
nginx-external --- |993|dovecot;
|
|
||||||
autoconfig.xml --- postfix;
|
|
||||||
autoconfig.xml --- dovecot;
|
|
||||||
postfix --- echobot;
|
|
||||||
postfix --- |10080,10081|filtermail;
|
|
||||||
postfix --- users["`User data
|
|
||||||
home/vmail/mail`"];
|
|
||||||
postfix --- |doveauth.socket|doveauth;
|
|
||||||
dovecot --- |doveauth.socket|doveauth;
|
|
||||||
dovecot --- users;
|
|
||||||
dovecot --- |metadata.socket|chatmail-metadata;
|
|
||||||
doveauth --- users;
|
|
||||||
expunge --- users;
|
|
||||||
chatmail-metadata --- iroh-relay;
|
|
||||||
certs-nginx --> postfix;
|
|
||||||
certs-nginx --> dovecot;
|
|
||||||
style certs fill:#ff6;
|
|
||||||
style certs-nginx fill:#ff6;
|
|
||||||
style nginx-external fill:#fc9;
|
|
||||||
style nginx-right fill:#fc9;
|
|
||||||
```
|
|
||||||
|
|
||||||
The edges in this graph should not be taken too literally; they
|
|
||||||
reflect some sort of communication path or dependency relationship
|
|
||||||
between components of the chatmail server.
|
|
||||||
417
CHANGELOG.md
417
CHANGELOG.md
@@ -1,417 +0,0 @@
|
|||||||
# Changelog for chatmail deployment
|
|
||||||
|
|
||||||
## untagged
|
|
||||||
|
|
||||||
- cmdeploy: make --ssh-host work with localhost
|
|
||||||
([#659](https://github.com/chatmail/relay/pull/659))
|
|
||||||
|
|
||||||
- Update iroh-relay to 0.35.0
|
|
||||||
([#650](https://github.com/chatmail/relay/pull/650))
|
|
||||||
|
|
||||||
- Ignore all RCPT TO: parameters
|
|
||||||
([#651](https://github.com/chatmail/relay/pull/651))
|
|
||||||
|
|
||||||
- Use max username length in newemail.py, not min
|
|
||||||
([#648](https://github.com/chatmail/relay/pull/648))
|
|
||||||
|
|
||||||
- Increase maxproc for reinjecting ports from 10 to 100
|
|
||||||
([#646](https://github.com/chatmail/relay/pull/646))
|
|
||||||
|
|
||||||
- Allow ports 143 and 993 to be used by `dovecot` process
|
|
||||||
([#639](https://github.com/chatmail/relay/pull/639))
|
|
||||||
|
|
||||||
## 1.7.0 2025-09-11
|
|
||||||
|
|
||||||
- Make www upload path configurable
|
|
||||||
([#618](https://github.com/chatmail/relay/pull/618))
|
|
||||||
|
|
||||||
- Check whether GCC is installed in initenv.sh
|
|
||||||
([#608](https://github.com/chatmail/relay/pull/608))
|
|
||||||
|
|
||||||
- Expire push notification tokens after 90 days
|
|
||||||
([#583](https://github.com/chatmail/relay/pull/583))
|
|
||||||
|
|
||||||
- Use official `mtail` binary instead of `mtail` package
|
|
||||||
([#581](https://github.com/chatmail/relay/pull/581))
|
|
||||||
|
|
||||||
- dovecot: install from download.delta.chat instead of openSUSE Build Service
|
|
||||||
([#590](https://github.com/chatmail/relay/pull/590))
|
|
||||||
|
|
||||||
- Reconfigure Dovecot imap-login service to high-performance mode
|
|
||||||
([#578](https://github.com/chatmail/relay/pull/578))
|
|
||||||
|
|
||||||
- Set timezone to improve dovecot performance
|
|
||||||
([#584](https://github.com/chatmail/relay/pull/584))
|
|
||||||
|
|
||||||
- Increase nginx connection limits
|
|
||||||
([#576](https://github.com/chatmail/relay/pull/576))
|
|
||||||
|
|
||||||
- If `dns-utils` needs to be installed before cmdeploy run, apt update to make sure it works
|
|
||||||
([#560](https://github.com/chatmail/relay/pull/560))
|
|
||||||
|
|
||||||
- filtermail: respect config message size limit
|
|
||||||
([#572](https://github.com/chatmail/relay/pull/572))
|
|
||||||
|
|
||||||
- Don't deploy if one of the ports used for chatmail relay services is occupied by an unexpected process
|
|
||||||
([#568](https://github.com/chatmail/relay/pull/568))
|
|
||||||
|
|
||||||
- Add config value after how many days large files are deleted
|
|
||||||
([#555](https://github.com/chatmail/relay/pull/555))
|
|
||||||
|
|
||||||
- cmdeploy: push relay version to /etc/chatmail-version
|
|
||||||
([#573](https://github.com/chatmail/relay/pull/573))
|
|
||||||
|
|
||||||
- filtermail: allow partial body length in OpenPGP payloads
|
|
||||||
([#570](https://github.com/chatmail/relay/pull/570))
|
|
||||||
|
|
||||||
- chatmaild: allow echobot to receive unencrypted messages by default
|
|
||||||
([#556](https://github.com/chatmail/relay/pull/556))
|
|
||||||
|
|
||||||
|
|
||||||
## 1.6.0 2025-04-11
|
|
||||||
|
|
||||||
- Handle Port-25 connect errors more gracefully (common with VPNs)
|
|
||||||
([#552](https://github.com/chatmail/relay/pull/552))
|
|
||||||
|
|
||||||
- Avoid "acmetool not found" during initial run
|
|
||||||
([#550](https://github.com/chatmail/relay/pull/550))
|
|
||||||
|
|
||||||
- Fix timezone handling such that client/servers do not need to use
|
|
||||||
same timezone.
|
|
||||||
([#553](https://github.com/chatmail/relay/pull/553))
|
|
||||||
|
|
||||||
- Enforce end-to-end encryption for incoming messages.
|
|
||||||
New user address mailboxes now get a `enforceE2EEincoming` file
|
|
||||||
which prohibits incoming cleartext messages from other domains.
|
|
||||||
An outside MTA trying to submit a cleartext message will
|
|
||||||
get a "523 Encryption Needed" response, see RFC5248.
|
|
||||||
If the file does not exist (as it the case for all existing accounts)
|
|
||||||
incoming cleartext messages are accepted.
|
|
||||||
([#538](https://github.com/chatmail/server/pull/538))
|
|
||||||
|
|
||||||
- Enforce end-to-end encryption between local addresses
|
|
||||||
([#535](https://github.com/chatmail/server/pull/535))
|
|
||||||
|
|
||||||
- unbound: check that port 53 is not occupied by a different process
|
|
||||||
([#537](https://github.com/chatmail/server/pull/537))
|
|
||||||
|
|
||||||
- unbound: before unbound is there, use 9.9.9.9 for resolving
|
|
||||||
([#518](https://github.com/chatmail/relay/pull/518))
|
|
||||||
|
|
||||||
- Limit the bind for the HTTPS server on 8443 to 127.0.0.1
|
|
||||||
([#522](https://github.com/chatmail/server/pull/522))
|
|
||||||
([#532](https://github.com/chatmail/server/pull/532))
|
|
||||||
|
|
||||||
- Send SNI when connecting to outside servers
|
|
||||||
([#524](https://github.com/chatmail/server/pull/524))
|
|
||||||
|
|
||||||
- postfix master.cf: use 127.0.0.1 for consistency
|
|
||||||
([#544](https://github.com/chatmail/relay/pull/544))
|
|
||||||
|
|
||||||
- Pass through `original_content` instead of `content` in filtermail
|
|
||||||
([#509](https://github.com/chatmail/server/pull/509))
|
|
||||||
|
|
||||||
- Document TLS requirements in the readme
|
|
||||||
([#514](https://github.com/chatmail/server/pull/514))
|
|
||||||
|
|
||||||
- Remove cleanup service from submission ports
|
|
||||||
([#512](https://github.com/chatmail/server/pull/512))
|
|
||||||
|
|
||||||
- cmdeploy dovecot: delete big messages after 7 days
|
|
||||||
([#504](https://github.com/chatmail/server/pull/504))
|
|
||||||
|
|
||||||
- mtail: fix getting logs from STDIN
|
|
||||||
([#502](https://github.com/chatmail/server/pull/502))
|
|
||||||
|
|
||||||
- filtermail: don't require exactly 2 lines after openPGP payload
|
|
||||||
([#497](https://github.com/chatmail/server/pull/497))
|
|
||||||
|
|
||||||
- cmdeploy dns: offer alternative DKIM record format for some web interfaces
|
|
||||||
([#470](https://github.com/chatmail/server/pull/470))
|
|
||||||
|
|
||||||
- journald: remove old logs from disk
|
|
||||||
([#490](https://github.com/chatmail/server/pull/490))
|
|
||||||
|
|
||||||
- opendkim: restart once every day to mend RAM leaks
|
|
||||||
([#498](https://github.com/chatmail/server/pull/498)
|
|
||||||
|
|
||||||
- migration guide: let opendkim own the DKIM keys directory
|
|
||||||
([#468](https://github.com/chatmail/server/pull/468))
|
|
||||||
|
|
||||||
- improve secure-join message detection
|
|
||||||
([#473](https://github.com/chatmail/server/pull/473))
|
|
||||||
|
|
||||||
- use old crypt lib in python < 3.11
|
|
||||||
([#483](https://github.com/chatmail/server/pull/483))
|
|
||||||
|
|
||||||
- chatmaild: set umask to 0700 for doveauth + metadata
|
|
||||||
([#490](https://github.com/chatmail/server/pull/492))
|
|
||||||
|
|
||||||
- remove MTA-STS daemon
|
|
||||||
([#488](https://github.com/chatmail/server/pull/488))
|
|
||||||
|
|
||||||
- replace `Subject` with `[...]` for all outgoing mails.
|
|
||||||
([#481](https://github.com/chatmail/server/pull/481))
|
|
||||||
|
|
||||||
- opendkim: use su instead of sudo
|
|
||||||
([#491](https://github.com/chatmail/server/pull/491))
|
|
||||||
|
|
||||||
## 1.5.0 2024-12-20
|
|
||||||
|
|
||||||
- cmdeploy dns: always show recommended DNS records
|
|
||||||
([#463](https://github.com/chatmail/server/pull/463))
|
|
||||||
|
|
||||||
- add `--all` to `cmdeploy dns`
|
|
||||||
([#462](https://github.com/chatmail/server/pull/462))
|
|
||||||
|
|
||||||
- fix `_mta-sts` TXT DNS record
|
|
||||||
([#461](https://github.com/chatmail/server/pull/461)
|
|
||||||
|
|
||||||
- deploy `iroh-relay` and also update "realtime relay services" in privacy policy.
|
|
||||||
([#434](https://github.com/chatmail/server/pull/434))
|
|
||||||
([#451](https://github.com/chatmail/server/pull/451))
|
|
||||||
|
|
||||||
- add guide to migrate chatmail to a new server
|
|
||||||
([#429](https://github.com/chatmail/server/pull/429))
|
|
||||||
|
|
||||||
- disable anvil authentication penalty
|
|
||||||
([#414](https://github.com/chatmail/server/pull/444)
|
|
||||||
|
|
||||||
- increase `request_queue_size` for UNIX sockets to 1000.
|
|
||||||
([#437](https://github.com/chatmail/server/pull/437))
|
|
||||||
|
|
||||||
- add argument to `cmdeploy run` for specifying
|
|
||||||
a different SSH host than `mail_domain`
|
|
||||||
([#439](https://github.com/chatmail/server/pull/439))
|
|
||||||
|
|
||||||
- query autoritative nameserver to bypass DNS cache
|
|
||||||
([#424](https://github.com/chatmail/server/pull/424))
|
|
||||||
|
|
||||||
- add mtail support (new optional `mtail_address` ini value)
|
|
||||||
This defines the address on which [`mtail`](https://google.github.io/mtail/)
|
|
||||||
exposes its metrics collected from the logs.
|
|
||||||
If you want to collect the metrics with Prometheus,
|
|
||||||
setup a private network (e.g. WireGuard interface)
|
|
||||||
and assign an IP address from this network to the host.
|
|
||||||
If you do not plan to collect metrics,
|
|
||||||
keep this setting unset.
|
|
||||||
([#388](https://github.com/chatmail/server/pull/388))
|
|
||||||
|
|
||||||
- fix checking for required DNS records
|
|
||||||
([#412](https://github.com/chatmail/server/pull/412))
|
|
||||||
|
|
||||||
- add support for specifying whole domains for recipient passthrough list
|
|
||||||
([#408](https://github.com/chatmail/server/pull/408))
|
|
||||||
|
|
||||||
- add a paragraph about "account deletion" to info page
|
|
||||||
([#405](https://github.com/chatmail/server/pull/405))
|
|
||||||
|
|
||||||
- avoid nginx listening on ipv6 if v6 is dsiabled
|
|
||||||
([#402](https://github.com/chatmail/server/pull/402))
|
|
||||||
|
|
||||||
- refactor ssh-based execution to allow organizing remote functions in
|
|
||||||
modules.
|
|
||||||
([#396](https://github.com/chatmail/server/pull/396))
|
|
||||||
|
|
||||||
- trigger "apt upgrade" during "cmdeploy run"
|
|
||||||
([#398](https://github.com/chatmail/server/pull/398))
|
|
||||||
|
|
||||||
- drop hispanilandia passthrough address
|
|
||||||
([#401](https://github.com/chatmail/server/pull/401))
|
|
||||||
|
|
||||||
- set CAA record flags to 0
|
|
||||||
|
|
||||||
- add IMAP capabilities instead of overwriting them
|
|
||||||
([#413](https://github.com/chatmail/server/pull/413))
|
|
||||||
|
|
||||||
- fix OpenPGP payload check
|
|
||||||
([#435](https://github.com/chatmail/server/pull/435))
|
|
||||||
|
|
||||||
- fix Dovecot quota_max_mail_size to use max_message_size config value
|
|
||||||
([#438](https://github.com/chatmail/server/pull/438))
|
|
||||||
|
|
||||||
|
|
||||||
## 1.4.1 2024-07-31
|
|
||||||
|
|
||||||
- fix metadata dictproxy which would confuse transactions
|
|
||||||
resulting in missed notifications and other issues.
|
|
||||||
([#393](https://github.com/chatmail/server/pull/393))
|
|
||||||
([#394](https://github.com/chatmail/server/pull/394))
|
|
||||||
|
|
||||||
- add optional "imap_rawlog" config option. If true,
|
|
||||||
.in/.out files are created in user home dirs
|
|
||||||
containing the imap protocol messages.
|
|
||||||
([#389](https://github.com/chatmail/server/pull/389))
|
|
||||||
|
|
||||||
## 1.4.0 2024-07-28
|
|
||||||
|
|
||||||
- Add `disable_ipv6` config option to chatmail.ini.
|
|
||||||
Required if the server doesn't have IPv6 connectivity.
|
|
||||||
([#312](https://github.com/chatmail/server/pull/312))
|
|
||||||
|
|
||||||
- allow current K9/Thunderbird-mail releases to send encrypted messages
|
|
||||||
outside by accepting their localized "encrypted subject" strings.
|
|
||||||
([#370](https://github.com/chatmail/server/pull/370))
|
|
||||||
|
|
||||||
- Migrate and remove sqlite database in favor of password/lastlogin tracking
|
|
||||||
in a user's maildir.
|
|
||||||
([#379](https://github.com/chatmail/server/pull/379))
|
|
||||||
|
|
||||||
- Require pyinfra V3 installed on the client side,
|
|
||||||
run `./scripts/initenv.sh` to upgrade locally.
|
|
||||||
([#378](https://github.com/chatmail/server/pull/378))
|
|
||||||
|
|
||||||
- don't hardcode "/home/vmail" paths but rather set them
|
|
||||||
once in the config object and use it everywhere else,
|
|
||||||
thereby also improving testability.
|
|
||||||
([#351](https://github.com/chatmail/server/pull/351))
|
|
||||||
temporarily introduced obligatory "passdb_path" and "mailboxes_dir"
|
|
||||||
settings but they were removed/obsoleted in
|
|
||||||
([#380](https://github.com/chatmail/server/pull/380))
|
|
||||||
|
|
||||||
- BREAKING: new required chatmail.ini value 'delete_inactive_users_after = 100'
|
|
||||||
which removes users from database and mails after 100 days without any login.
|
|
||||||
([#350](https://github.com/chatmail/server/pull/350))
|
|
||||||
|
|
||||||
- Refine DNS checking to distinguish between "required" and "recommended" settings
|
|
||||||
([#372](https://github.com/chatmail/server/pull/372))
|
|
||||||
|
|
||||||
- reload nginx in the acmetool cronjob
|
|
||||||
([#360](https://github.com/chatmail/server/pull/360))
|
|
||||||
|
|
||||||
- remove checking of reverse-DNS PTR records. Chatmail-servers don't
|
|
||||||
depend on it and even in the wider e-mail system it's not common anymore.
|
|
||||||
If it's an issue, a chatmail operator can still care to properly set reverse DNS.
|
|
||||||
([#348](https://github.com/chatmail/server/pull/348))
|
|
||||||
|
|
||||||
- Make DNS-checking faster and more interactive, run it fully during "cmdeploy run",
|
|
||||||
also introducing a generic mechanism for rapid remote ssh-based python function execution.
|
|
||||||
([#346](https://github.com/chatmail/server/pull/346))
|
|
||||||
|
|
||||||
- Don't fix file owner ship of /home/vmail
|
|
||||||
([#345](https://github.com/chatmail/server/pull/345))
|
|
||||||
|
|
||||||
- Support iterating over all users with doveadm commands
|
|
||||||
([#344](https://github.com/chatmail/server/pull/344))
|
|
||||||
|
|
||||||
- Test and fix for attempts to create inadmissible accounts
|
|
||||||
([#333](https://github.com/chatmail/server/pull/321))
|
|
||||||
|
|
||||||
- check that OpenPGP has only PKESK, SKESK and SEIPD packets
|
|
||||||
([#323](https://github.com/chatmail/server/pull/323),
|
|
||||||
[#324](https://github.com/chatmail/server/pull/324))
|
|
||||||
|
|
||||||
- improve filtermail checks for encrypted messages and drop support for unencrypted MDNs
|
|
||||||
([#320](https://github.com/chatmail/server/pull/320))
|
|
||||||
|
|
||||||
- replace `bash` with `/bin/sh`
|
|
||||||
([#334](https://github.com/chatmail/server/pull/334))
|
|
||||||
|
|
||||||
- Increase number of logged in IMAP sessions to 50000
|
|
||||||
([#335](https://github.com/chatmail/server/pull/335))
|
|
||||||
|
|
||||||
- filtermail: do not allow ASCII armor without actual payload
|
|
||||||
([#325](https://github.com/chatmail/server/pull/325))
|
|
||||||
|
|
||||||
- Remove sieve to enable hardlink deduplication in LMTP
|
|
||||||
([#343](https://github.com/chatmail/server/pull/343))
|
|
||||||
|
|
||||||
- dovecot: enable gzip compression on disk
|
|
||||||
([#341](https://github.com/chatmail/server/pull/341))
|
|
||||||
|
|
||||||
- DKIM-sign Content-Type and oversign all signed headers
|
|
||||||
([#296](https://github.com/chatmail/server/pull/296))
|
|
||||||
|
|
||||||
- Add nonci_accounts metric
|
|
||||||
([#347](https://github.com/chatmail/server/pull/347))
|
|
||||||
|
|
||||||
- doveauth: log when a new account is created
|
|
||||||
([#349](https://github.com/chatmail/server/pull/349))
|
|
||||||
|
|
||||||
- Multiplex HTTPS, IMAP and SMTP on port 443
|
|
||||||
([#357](https://github.com/chatmail/server/pull/357))
|
|
||||||
|
|
||||||
## 1.3.0 - 2024-06-06
|
|
||||||
|
|
||||||
- don't check necessary DNS records on cmdeploy init anymore
|
|
||||||
([#316](https://github.com/chatmail/server/pull/316))
|
|
||||||
|
|
||||||
- ensure cron and acl are installed
|
|
||||||
([#293](https://github.com/chatmail/server/pull/293),
|
|
||||||
[#310](https://github.com/chatmail/server/pull/310))
|
|
||||||
|
|
||||||
- change default for delete_mails_after from 40 to 20 days
|
|
||||||
([#300](https://github.com/chatmail/server/pull/300))
|
|
||||||
|
|
||||||
- save journald logs only to memory and save nginx logs to journald instead of file
|
|
||||||
([#299](https://github.com/chatmail/server/pull/299))
|
|
||||||
|
|
||||||
- fix writing of multiple obs repositories in `/etc/apt/sources.list`
|
|
||||||
([#290](https://github.com/chatmail/server/pull/290))
|
|
||||||
|
|
||||||
- metadata: add support for `/shared/vendor/deltachat/irohrelay`
|
|
||||||
([#284](https://github.com/chatmail/server/pull/284))
|
|
||||||
|
|
||||||
- Emit "XCHATMAIL" capability from IMAP server
|
|
||||||
([#278](https://github.com/chatmail/server/pull/278))
|
|
||||||
|
|
||||||
- Move echobot `into /var/lib/echobot`
|
|
||||||
([#281](https://github.com/chatmail/server/pull/281))
|
|
||||||
|
|
||||||
- Accept Let's Encrypt's new Terms of Services
|
|
||||||
([#275](https://github.com/chatmail/server/pull/276))
|
|
||||||
|
|
||||||
- Reload Dovecot and Postfix when TLS certificate updates
|
|
||||||
([#271](https://github.com/chatmail/server/pull/271))
|
|
||||||
|
|
||||||
- Use forked version of dovecot without hardcoded delays
|
|
||||||
([#270](https://github.com/chatmail/server/pull/270))
|
|
||||||
|
|
||||||
## 1.2.0 - 2024-04-04
|
|
||||||
|
|
||||||
- Install dig on the server to resolve DNS records
|
|
||||||
([#267](https://github.com/chatmail/server/pull/267))
|
|
||||||
|
|
||||||
- preserve notification order and exponentially backoff with
|
|
||||||
retries for tokens where we didn't get a successful return
|
|
||||||
([#265](https://github.com/chatmail/server/pull/263))
|
|
||||||
|
|
||||||
- Run chatmail-metadata and doveauth as vmail
|
|
||||||
([#261](https://github.com/chatmail/server/pull/261))
|
|
||||||
|
|
||||||
- Apply systemd restrictions to echobot
|
|
||||||
([#259](https://github.com/chatmail/server/pull/259))
|
|
||||||
|
|
||||||
- re-enable running the CI in pull requests, but not concurrently
|
|
||||||
([#258](https://github.com/chatmail/server/pull/258))
|
|
||||||
|
|
||||||
|
|
||||||
## 1.1.0 - 2024-03-28
|
|
||||||
|
|
||||||
### The changelog starts to record changes from March 15th, 2024
|
|
||||||
|
|
||||||
- Move systemd unit templates to cmdeploy package
|
|
||||||
([#255](https://github.com/chatmail/server/pull/255))
|
|
||||||
|
|
||||||
- Persist push tokens and support multiple device per address
|
|
||||||
([#254](https://github.com/chatmail/server/pull/254))
|
|
||||||
|
|
||||||
- Avoid warning for regular doveauth protocol's hello message.
|
|
||||||
([#250](https://github.com/chatmail/server/pull/250))
|
|
||||||
|
|
||||||
- Fix various tests to pass again with "cmdeploy test".
|
|
||||||
([#245](https://github.com/chatmail/server/pull/245),
|
|
||||||
[#242](https://github.com/chatmail/server/pull/242)
|
|
||||||
|
|
||||||
- Ensure lets-encrypt certificates are reloaded after renewal
|
|
||||||
([#244]) https://github.com/chatmail/server/pull/244
|
|
||||||
|
|
||||||
- Persist tokens to avoid iOS users loosing push-notifications when the
|
|
||||||
chatmail metadata service is restarted (happens regularly during deploys)
|
|
||||||
([#238](https://github.com/chatmail/server/pull/239)
|
|
||||||
|
|
||||||
- Fix failing sieve-script compile errors on incoming messages
|
|
||||||
([#237](https://github.com/chatmail/server/pull/239)
|
|
||||||
|
|
||||||
- Fix quota reporting after expunging of old mails
|
|
||||||
([#233](https://github.com/chatmail/server/pull/239)
|
|
||||||
21
LICENSE
21
LICENSE
@@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2023, chatmail and delta chat teams
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
|
||||||
of the Software, and to permit persons to whom the Software is furnished to do
|
|
||||||
so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
484
README.md
484
README.md
@@ -1,479 +1,59 @@
|
|||||||
|
# Chat Mail server configuration
|
||||||
|
|
||||||
<img width="800px" src="www/src/collage-top.png"/>
|
This repository setups a ready-to-go chatmail instance
|
||||||
|
|
||||||
# Chatmail relays for end-to-end encrypted e-mail
|
|
||||||
|
|
||||||
Chatmail relay servers are interoperable Mail Transport Agents (MTAs) designed for:
|
|
||||||
|
|
||||||
- **Convenience:** Low friction instant onboarding
|
|
||||||
|
|
||||||
- **Privacy:** No name, phone numbers, email required or collected
|
|
||||||
|
|
||||||
- **End-to-End Encryption enforced**: only OpenPGP messages with metadata minimization allowed
|
|
||||||
|
|
||||||
- **Instant:** Privacy-preserving Push Notifications for Apple, Google, and Huawei
|
|
||||||
|
|
||||||
- **Speed:** Message delivery in half a second, with optional P2P realtime connections
|
|
||||||
|
|
||||||
- **Transport Security:** Strict TLS and DKIM enforced
|
|
||||||
|
|
||||||
- **Reliability:** No spam or IP reputation checks; rate-limits are suitable for realtime chats
|
|
||||||
|
|
||||||
- **Efficiency:** Messages are only stored for transit and removed automatically
|
|
||||||
|
|
||||||
This repository contains everything needed to setup a ready-to-use chatmail relay
|
|
||||||
comprised of a minimal setup of the battle-tested
|
comprised of a minimal setup of the battle-tested
|
||||||
[Postfix SMTP](https://www.postfix.org) and [Dovecot IMAP](https://www.dovecot.org) MTAs/MDAs.
|
[postfix smtp server](https://www.postfix.org) and [dovecot imap server](https://www.dovecot.org).
|
||||||
|
|
||||||
The automated setup is designed and optimized for providing chatmail addresses
|
|
||||||
for immediate permission-free onboarding through chat apps and bots.
|
|
||||||
Chatmail addresses are automatically created at first login,
|
|
||||||
after which the initially specified password is required
|
|
||||||
for sending and receiving messages through them.
|
|
||||||
|
|
||||||
Please see [this list of known apps and client projects](https://chatmail.at/clients.html)
|
|
||||||
and [this list of known public 3rd party chatmail relay servers](https://chatmail.at/relays).
|
|
||||||
|
|
||||||
|
|
||||||
## Minimal requirements, Prerequisites
|
|
||||||
|
|
||||||
You will need the following:
|
|
||||||
|
|
||||||
- Control over a domain through a DNS provider of your choice.
|
|
||||||
|
|
||||||
- A Debian 12 server with reachable SMTP/SUBMISSIONS/IMAPS/HTTPS ports.
|
|
||||||
IPv6 is encouraged if available.
|
|
||||||
Chatmail relay servers only require 1GB RAM, one CPU, and perhaps 10GB storage for a
|
|
||||||
few thousand active chatmail addresses.
|
|
||||||
|
|
||||||
- Key-based SSH authentication to the root user.
|
|
||||||
You must add a passphrase-protected private key to your local ssh-agent
|
|
||||||
because you can't type in your passphrase during deployment.
|
|
||||||
(An ed25519 private key is required due to an [upstream bug in paramiko](https://github.com/paramiko/paramiko/issues/2191))
|
|
||||||
|
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
We use `chat.example.org` as the chatmail domain in the following steps.
|
1. prepare your local system:
|
||||||
Please substitute it with your own domain.
|
|
||||||
|
|
||||||
1. Setup the initial DNS records.
|
scripts/init.sh
|
||||||
The following is an example in the familiar BIND zone file format with
|
|
||||||
a TTL of 1 hour (3600 seconds).
|
|
||||||
Please substitute your domain and IP addresses.
|
|
||||||
|
|
||||||
```
|
2. set environment variable to the chatmail domain you want to setup:
|
||||||
chat.example.com. 3600 IN A 198.51.100.5
|
|
||||||
chat.example.com. 3600 IN AAAA 2001:db8::5
|
|
||||||
www.chat.example.com. 3600 IN CNAME chat.example.com.
|
|
||||||
mta-sts.chat.example.com. 3600 IN CNAME chat.example.com.
|
|
||||||
```
|
|
||||||
|
|
||||||
2. On your local PC, clone the repository and bootstrap the Python virtualenv.
|
export CHATMAIL_DOMAIN=c1.testrun.org # replace with your host
|
||||||
|
|
||||||
```
|
3. run the deploy of the chat mail instance:
|
||||||
git clone https://github.com/chatmail/relay
|
|
||||||
cd relay
|
|
||||||
scripts/initenv.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
3. On your local PC, create chatmail configuration file `chatmail.ini`:
|
scripts/deploy.sh
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy init chat.example.org # <-- use your domain
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Verify that SSH root login to your remote server works:
|
## Running tests and benchmarks (offline and online)
|
||||||
|
|
||||||
```
|
1. Set `CHATMAIL_SSH` so that `ssh root@$CHATMAIL_SSH` allows
|
||||||
ssh root@chat.example.org # <-- use your domain
|
to login to the chatmail instance server.
|
||||||
```
|
|
||||||
|
|
||||||
5. From your local PC, deploy the remote chatmail relay server:
|
2. To run local and online tests:
|
||||||
|
|
||||||
```
|
scripts/test.sh
|
||||||
scripts/cmdeploy run
|
|
||||||
```
|
|
||||||
This script will also check that you have all necessary DNS records.
|
|
||||||
If DNS records are missing, it will recommend
|
|
||||||
which you should configure at your DNS provider
|
|
||||||
(it can take some time until they are public).
|
|
||||||
|
|
||||||
### Other helpful commands
|
3. To run benchmarks against your chatmail instance:
|
||||||
|
|
||||||
To check the status of your remotely running chatmail service:
|
scripts/bench.sh
|
||||||
|
|
||||||
|
## Running tests (offline and online)
|
||||||
|
|
||||||
```
|
```
|
||||||
scripts/cmdeploy status
|
## Dovecot/Postfix configuration
|
||||||
```
|
|
||||||
|
|
||||||
To display and check all recommended DNS records:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy dns
|
|
||||||
```
|
|
||||||
|
|
||||||
To test whether your chatmail service is working correctly:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy test
|
|
||||||
```
|
|
||||||
|
|
||||||
To measure the performance of your chatmail service:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy bench
|
|
||||||
```
|
|
||||||
|
|
||||||
## Overview of this repository
|
|
||||||
|
|
||||||
This repository has four directories:
|
|
||||||
|
|
||||||
- [cmdeploy](https://github.com/chatmail/relay/tree/main/cmdeploy)
|
|
||||||
is a collection of configuration files
|
|
||||||
and a [pyinfra](https://pyinfra.com)-based deployment script.
|
|
||||||
|
|
||||||
- [chatmaild](https://github.com/chatmail/relay/tree/main/chatmaild)
|
|
||||||
is a Python package containing several small services
|
|
||||||
which handle authentication,
|
|
||||||
trigger push notifications on new messages,
|
|
||||||
ensure that outbound mails are encrypted,
|
|
||||||
delete inactive users,
|
|
||||||
and some other minor things.
|
|
||||||
chatmaild can also be installed as a stand-alone Python package.
|
|
||||||
|
|
||||||
- [www](https://github.com/chatmail/relay/tree/main/www)
|
|
||||||
contains the html, css, and markdown files
|
|
||||||
which make up a chatmail relay's web page.
|
|
||||||
Edit them before deploying to make your chatmail relay stand out.
|
|
||||||
|
|
||||||
- [scripts](https://github.com/chatmail/relay/tree/main/scripts)
|
|
||||||
offers two convenience tools for beginners;
|
|
||||||
`initenv.sh` installs the necessary dependencies to a local virtual environment,
|
|
||||||
and the `scripts/cmdeploy` script enables you
|
|
||||||
to run the `cmdeploy` command line tool in the local virtual environment.
|
|
||||||
|
|
||||||
### cmdeploy
|
|
||||||
|
|
||||||
The `cmdeploy/src/cmdeploy/cmdeploy.py` command line tool
|
|
||||||
helps with setting up and managing the chatmail service.
|
|
||||||
`cmdeploy init` creates the `chatmail.ini` config file.
|
|
||||||
`cmdeploy run` uses a [pyinfra](https://pyinfra.com/)-based [`script`](cmdeploy/src/cmdeploy/__init__.py)
|
|
||||||
to automatically install or upgrade all chatmail components on a relay,
|
|
||||||
according to the `chatmail.ini` config.
|
|
||||||
|
|
||||||
The components of chatmail are:
|
|
||||||
|
|
||||||
- [Postfix SMTP MTA](https://www.postfix.org) accepts and relays messages
|
|
||||||
(both from your users and from the wider e-mail MTA network)
|
|
||||||
|
|
||||||
- [Dovecot IMAP MDA](https://www.dovecot.org) stores messages for your users until they download them
|
|
||||||
|
|
||||||
- [Nginx](https://nginx.org/) shows the web page with your privacy policy and additional information
|
|
||||||
|
|
||||||
- [acmetool](https://hlandau.github.io/acmetool/) manages TLS certificates for Dovecot, Postfix, and Nginx
|
|
||||||
|
|
||||||
- [OpenDKIM](http://www.opendkim.org/) for signing messages with DKIM and rejecting inbound messages without DKIM
|
|
||||||
|
|
||||||
- [mtail](https://google.github.io/mtail/) for collecting anonymized metrics in case you have monitoring
|
|
||||||
|
|
||||||
- [Iroh relay](https://www.iroh.computer/docs/concepts/relay)
|
|
||||||
which helps client devices to establish Peer-to-Peer connections
|
|
||||||
|
|
||||||
- and the chatmaild services, explained in the next section:
|
|
||||||
|
|
||||||
### chatmaild
|
|
||||||
|
|
||||||
`chatmaild` implements various systemd-controlled services
|
|
||||||
that integrate with Dovecot and Postfix to achieve instant-onboarding and
|
|
||||||
only relaying OpenPGP end-to-end messages encrypted messages.
|
|
||||||
A short overview of `chatmaild` services:
|
|
||||||
|
|
||||||
- [`doveauth`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/doveauth.py)
|
|
||||||
implements create-on-login address semantics and is used
|
|
||||||
by Dovecot during IMAP login and by Postfix during SMTP/SUBMISSION login
|
|
||||||
which in turn uses [Dovecot SASL](https://doc.dovecot.org/configuration_manual/authentication/dict/#complete-example-for-authenticating-via-a-unix-socket)
|
|
||||||
to authenticate logins.
|
|
||||||
|
|
||||||
- [`filtermail`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/filtermail.py)
|
|
||||||
prevents unencrypted email from leaving or entering the chatmail service
|
|
||||||
and is integrated into Postfix's outbound and inbound mail pipelines.
|
|
||||||
|
|
||||||
- [`chatmail-metadata`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metadata.py) is contacted by a
|
|
||||||
[Dovecot lua script](https://github.com/chatmail/relay/blob/main/cmdeploy/src/cmdeploy/dovecot/push_notification.lua)
|
|
||||||
to store user-specific relay-side config.
|
|
||||||
On new messages,
|
|
||||||
it [passes the user's push notification token](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/notifier.py)
|
|
||||||
to [notifications.delta.chat](https://delta.chat/help#instant-delivery)
|
|
||||||
so the push notifications on the user's phone can be triggered
|
|
||||||
by Apple/Google/Huawei.
|
|
||||||
|
|
||||||
- [`delete_inactive_users`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/delete_inactive_users.py)
|
|
||||||
deletes users if they have not logged in for a very long time.
|
|
||||||
The timeframe can be configured in `chatmail.ini`.
|
|
||||||
|
|
||||||
- [`lastlogin`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/lastlogin.py)
|
|
||||||
is contacted by Dovecot when a user logs in
|
|
||||||
and stores the date of the login.
|
|
||||||
|
|
||||||
- [`echobot`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/echo.py)
|
|
||||||
is a small bot for test purposes.
|
|
||||||
It simply echoes back messages from users.
|
|
||||||
|
|
||||||
- [`chatmail-metrics`](https://github.com/chatmail/relay/blob/main/chatmaild/src/chatmaild/metrics.py)
|
|
||||||
collects some metrics and displays them at `https://example.org/metrics`.
|
|
||||||
|
|
||||||
### Home page and getting started for users
|
|
||||||
|
|
||||||
`cmdeploy run` also creates default static web pages and deploys them
|
|
||||||
to a Nginx web server with:
|
|
||||||
|
|
||||||
- a default `index.html` along with a QR code that users can click to
|
|
||||||
create an address on your chatmail relay
|
|
||||||
|
|
||||||
- a default `info.html` that is linked from the home page
|
|
||||||
|
|
||||||
- a default `policy.html` that is linked from the home page
|
|
||||||
|
|
||||||
All `.html` files are generated
|
|
||||||
by the according markdown `.md` file in the `www/src` directory.
|
|
||||||
|
|
||||||
|
|
||||||
### Refining the web pages
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy webdev
|
|
||||||
```
|
|
||||||
|
|
||||||
This starts a local live development cycle for chatmail web pages:
|
|
||||||
|
|
||||||
- uses the `www/src/page-layout.html` file for producing static
|
|
||||||
HTML pages from `www/src/*.md` files
|
|
||||||
|
|
||||||
- continously builds the web presence reading files from `www/src` directory
|
|
||||||
and generating HTML files and copying assets to the `www/build` directory.
|
|
||||||
|
|
||||||
- Starts a browser window automatically where you can "refresh" as needed.
|
|
||||||
|
|
||||||
#### Custom web pages
|
|
||||||
|
|
||||||
You can skip uploading a web page
|
|
||||||
by setting `www_folder=disabled` in `chatmail.ini`.
|
|
||||||
|
|
||||||
If you want to manage your web pages outside this git repository,
|
|
||||||
you can set `www_folder` in `chatmail.ini` to a custom directory on your computer.
|
|
||||||
`cmdeploy run` will upload it as the server's home page,
|
|
||||||
and if it contains a `src/index.md` file,
|
|
||||||
will build it with hugo.
|
|
||||||
|
|
||||||
|
|
||||||
## Mailbox directory layout
|
|
||||||
|
|
||||||
Fresh chatmail addresses have a mailbox directory that contains:
|
|
||||||
|
|
||||||
- a `password` file with the salted password required for authenticating
|
|
||||||
whether a login may use the address to send/receive messages.
|
|
||||||
If you modify the password file manually, you effectively block the user.
|
|
||||||
|
|
||||||
- `enforceE2EEincoming` is a default-created file with each address.
|
|
||||||
If present the file indicates that this chatmail address rejects incoming cleartext messages.
|
|
||||||
If absent the address accepts incoming cleartext messages.
|
|
||||||
|
|
||||||
- `dovecot*`, `cur`, `new` and `tmp` represent IMAP/mailbox state.
|
|
||||||
If the address is only used by one device, the Maildir directories
|
|
||||||
will typically be empty unless the user of that address hasn't been online
|
|
||||||
for a while.
|
|
||||||
|
|
||||||
|
|
||||||
## Emergency Commands to disable automatic address creation
|
|
||||||
|
|
||||||
If you need to stop address creation,
|
|
||||||
e.g. because some script is wildly creating addresses,
|
|
||||||
login with ssh and run:
|
|
||||||
|
|
||||||
```
|
|
||||||
touch /etc/chatmail-nocreate
|
|
||||||
```
|
|
||||||
|
|
||||||
Chatmail address creation will be denied while this file is present.
|
|
||||||
|
|
||||||
### Ports
|
### Ports
|
||||||
|
|
||||||
[Postfix](http://www.postfix.org/) listens on ports 25 (SMTP) and 587 (SUBMISSION) and 465 (SUBMISSIONS).
|
Postfix listens on ports 25 (smtp) and 587 (submission) and 465 (submissions).
|
||||||
[Dovecot](https://www.dovecot.org/) listens on ports 143 (IMAP) and 993 (IMAPS).
|
Dovecot listens on ports 143(imap) and 993 (imaps).
|
||||||
[Nginx](https://www.nginx.com/) listens on port 8443 (HTTPS-ALT) and 443 (HTTPS).
|
|
||||||
Port 443 multiplexes HTTPS, IMAP and SMTP using ALPN to redirect connections to ports 8443, 465 or 993.
|
|
||||||
[acmetool](https://hlandau.github.io/acmetool/) listens on port 80 (HTTP).
|
|
||||||
|
|
||||||
chatmail-core based apps will, however, discover all ports and configurations
|
## DNS
|
||||||
automatically by reading the [autoconfig XML file](https://www.ietf.org/archive/id/draft-bucksch-autoconfig-00.html) from the chatmail relay server.
|
|
||||||
|
|
||||||
## Email authentication
|
For DKIM you must add a DNS entry as found in /etc/opendkim/selector.txt on your chatmail instance.
|
||||||
|
The above `scripts/deploy.sh` prints out the DKIM selector and DNS entry you
|
||||||
|
need to setup with your DNS provider.
|
||||||
|
|
||||||
Chatmail relays enforce [DKIM](https://www.rfc-editor.org/rfc/rfc6376)
|
## Emergency Commands
|
||||||
to authenticate incoming emails.
|
|
||||||
Incoming emails must have a valid DKIM signature with
|
|
||||||
Signing Domain Identifier (SDID, `d=` parameter in the DKIM-Signature header)
|
|
||||||
equal to the `From:` header domain.
|
|
||||||
This property is checked by OpenDKIM screen policy script
|
|
||||||
before validating the signatures.
|
|
||||||
This correpsonds to strict [DMARC](https://www.rfc-editor.org/rfc/rfc7489) alignment (`adkim=s`),
|
|
||||||
but chatmail does not rely on DMARC and does not consult the sender policy published in DMARC records.
|
|
||||||
Other legacy authentication mechanisms such as [iprev](https://www.rfc-editor.org/rfc/rfc8601#section-2.7.3)
|
|
||||||
and [SPF](https://www.rfc-editor.org/rfc/rfc7208) are also not taken into account.
|
|
||||||
If there is no valid DKIM signature on the incoming email,
|
|
||||||
the sender receives a "5.7.1 No valid DKIM signature found" error.
|
|
||||||
|
|
||||||
Outgoing emails must be sent over authenticated connection
|
If you need to stop account creation,
|
||||||
with envelope MAIL FROM (return path) corresponding to the login.
|
e.g. because some script is wildly creating accounts,
|
||||||
This is ensured by Postfix which maps login username
|
just run `touch /tmp/nocreate`.
|
||||||
to MAIL FROM with
|
You can remove the file
|
||||||
[`smtpd_sender_login_maps`](https://www.postfix.org/postconf.5.html#smtpd_sender_login_maps)
|
as soon as the attacker was banned
|
||||||
and rejects incorrectly authenticated emails with [`reject_sender_login_mismatch`](reject_sender_login_mismatch) policy.
|
by different means.
|
||||||
`From:` header must correspond to envelope MAIL FROM,
|
|
||||||
this is ensured by `filtermail` proxy.
|
|
||||||
|
|
||||||
## TLS requirements
|
|
||||||
|
|
||||||
Postfix is configured to require valid TLS
|
|
||||||
by setting [`smtp_tls_security_level`](https://www.postfix.org/postconf.5.html#smtp_tls_security_level) to `verify`.
|
|
||||||
If emails don't arrive at your chatmail relay server,
|
|
||||||
the problem is likely that your relay does not have a valid TLS certificate.
|
|
||||||
|
|
||||||
You can test it by resolving `MX` records of your relay domain
|
|
||||||
and then connecting to MX relays (e.g `mx.example.org`) with
|
|
||||||
`openssl s_client -connect mx.example.org:25 -verify_hostname mx.example.org -verify_return_error -starttls smtp`
|
|
||||||
from the host that has open port 25 to verify that certificate is valid.
|
|
||||||
|
|
||||||
When providing a TLS certificate to your chatmail relay server,
|
|
||||||
make sure to provide the full certificate chain
|
|
||||||
and not just the last certificate.
|
|
||||||
|
|
||||||
If you are running an Exim server and don't see incoming connections
|
|
||||||
from a chatmail relay server in the logs,
|
|
||||||
make sure `smtp_no_mail` log item is enabled in the config
|
|
||||||
with `log_selector = +smtp_no_mail`.
|
|
||||||
By default Exim does not log sessions that are closed
|
|
||||||
before sending the `MAIL` command.
|
|
||||||
This happens if certificate is not recognized as valid by Postfix,
|
|
||||||
so you might think that connection is not established
|
|
||||||
while actually it is a problem with your TLS certificate.
|
|
||||||
|
|
||||||
## Migrating a chatmail relay to a new host
|
|
||||||
|
|
||||||
If you want to migrate chatmail relay from an old machine
|
|
||||||
to a new machine,
|
|
||||||
you can use these steps.
|
|
||||||
They were tested with a Linux laptop;
|
|
||||||
you might need to adjust some of the steps to your environment.
|
|
||||||
|
|
||||||
Let's assume that your `mail_domain` is `mail.example.org`,
|
|
||||||
all involved machines run Debian 12,
|
|
||||||
your old site's IP address is `13.37.13.37`,
|
|
||||||
and your new site's IP address is `13.12.23.42`.
|
|
||||||
|
|
||||||
Note, you should lower the TTLs of your DNS records to a value
|
|
||||||
such as 300 (5 minutes) so the migration happens as smoothly as possible.
|
|
||||||
|
|
||||||
During the guide you might get a warning about changed SSH Host keys;
|
|
||||||
in this case, just run `ssh-keygen -R "mail.example.org"` as recommended.
|
|
||||||
|
|
||||||
1. First, disable mail services on the old site.
|
|
||||||
|
|
||||||
```
|
|
||||||
cmdeploy run --disable-mail --ssh-host 13.37.13.37
|
|
||||||
```
|
|
||||||
|
|
||||||
Now your users will notice the migration
|
|
||||||
and will not be able to send or receive messages
|
|
||||||
until the migration is completed.
|
|
||||||
|
|
||||||
2. Now we want to copy `/home/vmail`, `/var/lib/acme`, `/etc/dkimkeys`, `/run/echobot`, and `/var/spool/postfix` to the new site.
|
|
||||||
Login to the old site while forwarding your SSH agent
|
|
||||||
so you can copy directly from the old to the new site with your SSH key:
|
|
||||||
```
|
|
||||||
ssh -A root@13.37.13.37
|
|
||||||
tar c - /home/vmail/mail /var/lib/acme /etc/dkimkeys /run/echobot /var/spool/postfix | ssh root@13.12.23.42 "tar x -C /"
|
|
||||||
```
|
|
||||||
|
|
||||||
This transfers all addresses, the TLS certificate, DKIM keys (so DKIM DNS record remains valid), and the echobot's password so it continues to function.
|
|
||||||
It also preserves the Postfix mail spool so any messages pending delivery will still be delivered.
|
|
||||||
|
|
||||||
3. Install chatmail on the new machine:
|
|
||||||
|
|
||||||
```
|
|
||||||
cmdeploy run --disable-mail --ssh-host 13.12.23.42
|
|
||||||
```
|
|
||||||
Postfix and Dovecot are disabled for now; we will enable them later.
|
|
||||||
We first need to make the new site fully operational.
|
|
||||||
|
|
||||||
3. On the new site, run the following to ensure the ownership is correct in case UIDs/GIDs changed:
|
|
||||||
|
|
||||||
```
|
|
||||||
chown root: -R /var/lib/acme
|
|
||||||
chown opendkim: -R /etc/dkimkeys
|
|
||||||
chown vmail: -R /home/vmail/mail
|
|
||||||
chown echobot: -R /run/echobot
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Now, update DNS entries.
|
|
||||||
|
|
||||||
If other MTAs try to deliver messages to your chatmail domain they may fail intermittently,
|
|
||||||
as DNS catches up with the new site settings
|
|
||||||
but normally will retry delivering messages
|
|
||||||
for at least a week, so messages will not be lost.
|
|
||||||
|
|
||||||
5. Finally, you can execute `cmdeploy run --ssh-host 13.12.23.42` to turn on chatmail on the new relay.
|
|
||||||
Your users will be able to use the chatmail relay as soon as the DNS changes have propagated.
|
|
||||||
Voilà!
|
|
||||||
|
|
||||||
## Setting up a reverse proxy
|
|
||||||
|
|
||||||
A chatmail relay MTA does not track or depend on the client IP address
|
|
||||||
for its operation, so it can be run behind a reverse proxy.
|
|
||||||
This will not even affect incoming mail authentication
|
|
||||||
as DKIM only checks the cryptographic signature
|
|
||||||
of the message and does not use the IP address as the input.
|
|
||||||
|
|
||||||
For example, you may want to self-host your chatmail relay
|
|
||||||
and only use hosted VPS to provide a public IP address
|
|
||||||
for client connections and incoming mail.
|
|
||||||
You can connect chatmail relay to VPS
|
|
||||||
using a tunnel protocol
|
|
||||||
such as [WireGuard](https://www.wireguard.com/)
|
|
||||||
and setup a reverse proxy on a VPS
|
|
||||||
to forward connections to the chatmail relay
|
|
||||||
over the tunnel.
|
|
||||||
You can also setup multiple reverse proxies
|
|
||||||
for your chatmail relay in different networks
|
|
||||||
to ensure your relay is reachable even when
|
|
||||||
one of the IPs becomes inaccessible due to
|
|
||||||
hosting or routing problems.
|
|
||||||
|
|
||||||
Note that your chatmail relay still needs
|
|
||||||
to be able to make outgoing connections on port 25
|
|
||||||
to send messages outside.
|
|
||||||
|
|
||||||
To setup a reverse proxy
|
|
||||||
(or rather Destination NAT, DNAT)
|
|
||||||
for your chatmail relay, run:
|
|
||||||
|
|
||||||
```
|
|
||||||
scripts/cmdeploy proxy <proxy_ip_address> --relay-ipv4 <relay_ipv4_address> --relay-ipv6 <relay_ipv6_address>
|
|
||||||
```
|
|
||||||
|
|
||||||
Once proxy relay is set up,
|
|
||||||
you can add its IP address to the DNS,
|
|
||||||
or distribute it as you wish.
|
|
||||||
|
|
||||||
## Neighbors and Acquaintances
|
|
||||||
|
|
||||||
Here are some related projects that you may be interested in:
|
|
||||||
|
|
||||||
- [Mox](https://github.com/mjl-/mox): A Golang email server. [Work is in
|
|
||||||
progress](https://github.com/mjl-/mox/issues/251) to modify it to support all
|
|
||||||
of the features and configuration settings required to operate as a chatmail
|
|
||||||
relay.
|
|
||||||
- [Maddy-Chatmail](https://github.com/sadraiiali/maddy_chatmail): a plugin for the
|
|
||||||
[Maddy email server](https://maddy.email/) which aims to implement the
|
|
||||||
chatmail relay features and configuration options.
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
include src/chatmaild/ini/*.ini.f
|
|
||||||
include src/chatmaild/ini/*.ini
|
|
||||||
include src/chatmaild/tests/mail-data/*
|
|
||||||
@@ -1,56 +1,20 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools>=61"]
|
requires = ["setuptools>=45"]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "chatmaild"
|
name = "chatmaild"
|
||||||
version = "0.2"
|
version = "0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiosmtpd",
|
"aiosmtpd"
|
||||||
"iniconfig",
|
|
||||||
"deltachat-rpc-server",
|
|
||||||
"deltachat-rpc-client",
|
|
||||||
"filelock",
|
|
||||||
"requests",
|
|
||||||
"crypt-r >= 3.13.1 ; python_version >= '3.11'",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.setuptools]
|
|
||||||
include-package-data = true
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ['src']
|
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
doveauth = "chatmaild.doveauth:main"
|
doveauth-dictproxy = "chatmaild.dictproxy:main"
|
||||||
chatmail-metadata = "chatmaild.metadata:main"
|
|
||||||
filtermail = "chatmaild.filtermail:main"
|
filtermail = "chatmaild.filtermail:main"
|
||||||
echobot = "chatmaild.echo:main"
|
|
||||||
chatmail-metrics = "chatmaild.metrics:main"
|
|
||||||
delete_inactive_users = "chatmaild.delete_inactive_users:main"
|
|
||||||
lastlogin = "chatmaild.lastlogin:main"
|
|
||||||
|
|
||||||
[project.entry-points.pytest11]
|
|
||||||
"chatmaild.testplugin" = "chatmaild.tests.plugin"
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "-v -ra --strict-markers"
|
addopts = "-v -ra --strict-markers"
|
||||||
log_format = "%(asctime)s %(levelname)s %(message)s"
|
|
||||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
|
||||||
log_level = "INFO"
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
lint.select = [
|
|
||||||
"F", # Pyflakes
|
|
||||||
"I", # isort
|
|
||||||
|
|
||||||
"PLC", # Pylint Convention
|
|
||||||
"PLE", # Pylint Error
|
|
||||||
"PLW", # Pylint Warning
|
|
||||||
]
|
|
||||||
lint.ignore = [
|
|
||||||
"PLC0415" # import-outside-top-level
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.tox]
|
[tool.tox]
|
||||||
legacy_tox_ini = """
|
legacy_tox_ini = """
|
||||||
@@ -63,12 +27,14 @@ skipdist = True
|
|||||||
skip_install = True
|
skip_install = True
|
||||||
deps =
|
deps =
|
||||||
ruff
|
ruff
|
||||||
|
black
|
||||||
commands =
|
commands =
|
||||||
ruff format --quiet --diff src/
|
black --quiet --check --diff src/
|
||||||
ruff check src/
|
ruff src/
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
|
passenv = CHATMAIL_DOMAIN
|
||||||
deps = pytest
|
deps = pytest
|
||||||
pdbpp
|
pdbpp
|
||||||
commands = pytest -v -rsXx {posargs}
|
commands = pytest -v -rsXx {posargs: ../tests/chatmaild}
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
|
|||||||
@@ -1,133 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import iniconfig
|
|
||||||
|
|
||||||
from chatmaild.user import User
|
|
||||||
|
|
||||||
echobot_password_path = Path("/run/echobot/password")
|
|
||||||
|
|
||||||
|
|
||||||
def read_config(inipath):
|
|
||||||
assert Path(inipath).exists(), inipath
|
|
||||||
cfg = iniconfig.IniConfig(inipath)
|
|
||||||
params = cfg.sections["params"]
|
|
||||||
default_config_content = get_default_config_content(params["mail_domain"])
|
|
||||||
df_params = iniconfig.IniConfig("ini", data=default_config_content)["params"]
|
|
||||||
new_params = dict(df_params.items())
|
|
||||||
new_params.update(params)
|
|
||||||
return Config(inipath, params=new_params)
|
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
def __init__(self, inipath, params):
|
|
||||||
self._inipath = inipath
|
|
||||||
self.mail_domain = params["mail_domain"]
|
|
||||||
self.max_user_send_per_minute = int(params["max_user_send_per_minute"])
|
|
||||||
self.max_mailbox_size = params["max_mailbox_size"]
|
|
||||||
self.max_message_size = int(params.get("max_message_size", "31457280"))
|
|
||||||
self.delete_mails_after = params["delete_mails_after"]
|
|
||||||
self.delete_large_after = params["delete_large_after"]
|
|
||||||
self.delete_inactive_users_after = int(params["delete_inactive_users_after"])
|
|
||||||
self.username_min_length = int(params["username_min_length"])
|
|
||||||
self.username_max_length = int(params["username_max_length"])
|
|
||||||
self.password_min_length = int(params["password_min_length"])
|
|
||||||
self.passthrough_senders = params["passthrough_senders"].split()
|
|
||||||
self.passthrough_recipients = params["passthrough_recipients"].split()
|
|
||||||
self.www_folder = params.get("www_folder", "")
|
|
||||||
self.filtermail_smtp_port = int(params["filtermail_smtp_port"])
|
|
||||||
self.filtermail_smtp_port_incoming = int(
|
|
||||||
params["filtermail_smtp_port_incoming"]
|
|
||||||
)
|
|
||||||
self.postfix_reinject_port = int(params["postfix_reinject_port"])
|
|
||||||
self.postfix_reinject_port_incoming = int(
|
|
||||||
params["postfix_reinject_port_incoming"]
|
|
||||||
)
|
|
||||||
self.mtail_address = params.get("mtail_address")
|
|
||||||
self.disable_ipv6 = params.get("disable_ipv6", "false").lower() == "true"
|
|
||||||
self.imap_rawlog = params.get("imap_rawlog", "false").lower() == "true"
|
|
||||||
if "iroh_relay" not in params:
|
|
||||||
self.iroh_relay = "https://" + params["mail_domain"]
|
|
||||||
self.enable_iroh_relay = True
|
|
||||||
else:
|
|
||||||
self.iroh_relay = params["iroh_relay"].strip()
|
|
||||||
self.enable_iroh_relay = False
|
|
||||||
self.privacy_postal = params.get("privacy_postal")
|
|
||||||
self.privacy_mail = params.get("privacy_mail")
|
|
||||||
self.privacy_pdo = params.get("privacy_pdo")
|
|
||||||
self.privacy_supervisor = params.get("privacy_supervisor")
|
|
||||||
|
|
||||||
# deprecated option
|
|
||||||
mbdir = params.get("mailboxes_dir", f"/home/vmail/mail/{self.mail_domain}")
|
|
||||||
self.mailboxes_dir = Path(mbdir.strip())
|
|
||||||
|
|
||||||
# old unused option (except for first migration from sqlite to maildir store)
|
|
||||||
self.passdb_path = Path(params.get("passdb_path", "/home/vmail/passdb.sqlite"))
|
|
||||||
|
|
||||||
def _getbytefile(self):
|
|
||||||
return open(self._inipath, "rb")
|
|
||||||
|
|
||||||
def get_user(self, addr) -> User:
|
|
||||||
if not addr or "@" not in addr or "/" in addr:
|
|
||||||
raise ValueError(f"invalid address {addr!r}")
|
|
||||||
|
|
||||||
maildir = self.mailboxes_dir.joinpath(addr)
|
|
||||||
if addr.startswith("echo@"):
|
|
||||||
password_path = echobot_password_path
|
|
||||||
else:
|
|
||||||
password_path = maildir.joinpath("password")
|
|
||||||
|
|
||||||
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
|
|
||||||
|
|
||||||
|
|
||||||
def write_initial_config(inipath, mail_domain, overrides):
|
|
||||||
"""Write out default config file, using the specified config value overrides."""
|
|
||||||
content = get_default_config_content(mail_domain, **overrides)
|
|
||||||
inipath.write_text(content)
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_config_content(mail_domain, **overrides):
|
|
||||||
from importlib.resources import files
|
|
||||||
|
|
||||||
inidir = files(__package__).joinpath("ini")
|
|
||||||
source_inipath = inidir.joinpath("chatmail.ini.f")
|
|
||||||
content = source_inipath.read_text().format(mail_domain=mail_domain)
|
|
||||||
|
|
||||||
# apply config overrides
|
|
||||||
new_lines = []
|
|
||||||
extra = overrides.copy()
|
|
||||||
for line in content.split("\n"):
|
|
||||||
new_line = line.strip()
|
|
||||||
if new_line and new_line[0] not in "#[":
|
|
||||||
name, value = map(str.strip, new_line.split("=", maxsplit=1))
|
|
||||||
value = extra.pop(name, value)
|
|
||||||
new_line = f"{name} = {value}"
|
|
||||||
new_lines.append(new_line)
|
|
||||||
|
|
||||||
for name, value in extra.items():
|
|
||||||
new_line = f"{name} = {value}"
|
|
||||||
new_lines.append(new_line)
|
|
||||||
|
|
||||||
content = "\n".join(new_lines)
|
|
||||||
|
|
||||||
# apply testrun privacy overrides
|
|
||||||
|
|
||||||
if mail_domain.endswith(".testrun.org"):
|
|
||||||
override_inipath = inidir.joinpath("override-testrun.ini")
|
|
||||||
privacy = iniconfig.IniConfig(override_inipath)["privacy"]
|
|
||||||
lines = []
|
|
||||||
for line in content.split("\n"):
|
|
||||||
for key, value in privacy.items():
|
|
||||||
value_lines = value.format(mail_domain=mail_domain).strip().split("\n")
|
|
||||||
if not line.startswith(f"{key} =") or not value_lines:
|
|
||||||
continue
|
|
||||||
if len(value_lines) == 1:
|
|
||||||
lines.append(f"{key} = {value}")
|
|
||||||
else:
|
|
||||||
lines.append(f"{key} =")
|
|
||||||
for vl in value_lines:
|
|
||||||
lines.append(f" {vl}")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
lines.append(line)
|
|
||||||
content = "\n".join(lines)
|
|
||||||
return content
|
|
||||||
140
chatmaild/src/chatmaild/database.py
Normal file
140
chatmaild/src/chatmaild/database.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import sqlite3
|
||||||
|
import contextlib
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class DBError(Exception):
|
||||||
|
"""error during an operation on the database."""
|
||||||
|
|
||||||
|
|
||||||
|
class Connection:
|
||||||
|
def __init__(self, sqlconn, write):
|
||||||
|
self._sqlconn = sqlconn
|
||||||
|
self._write = write
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self._sqlconn.close()
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
self._sqlconn.commit()
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
self._sqlconn.rollback()
|
||||||
|
|
||||||
|
def execute(self, query, params=()):
|
||||||
|
cur = self.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute(query, params)
|
||||||
|
except sqlite3.IntegrityError as e:
|
||||||
|
raise DBError(e)
|
||||||
|
return cur
|
||||||
|
|
||||||
|
def cursor(self):
|
||||||
|
return self._sqlconn.cursor()
|
||||||
|
|
||||||
|
def create_user(self, addr: str, password: str):
|
||||||
|
"""Create a row in the users table."""
|
||||||
|
self.execute("PRAGMA foreign_keys=on")
|
||||||
|
q = """INSERT INTO users (addr, password, last_login)
|
||||||
|
VALUES (?, ?, ?)"""
|
||||||
|
self.execute(q, (addr, password, int(time.time())))
|
||||||
|
|
||||||
|
def get_user(self, addr: str) -> {}:
|
||||||
|
"""Get a row from the users table."""
|
||||||
|
q = "SELECT addr, password, last_login from users WHERE addr = ?"
|
||||||
|
row = self._sqlconn.execute(q, (addr,)).fetchone()
|
||||||
|
result = {}
|
||||||
|
if row:
|
||||||
|
result = dict(
|
||||||
|
user=row[0],
|
||||||
|
password=row[1],
|
||||||
|
last_login=row[2],
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class Database:
|
||||||
|
def __init__(self, path: str):
|
||||||
|
self.path = Path(path)
|
||||||
|
self.ensure_tables()
|
||||||
|
|
||||||
|
def _get_connection(
|
||||||
|
self, write=False, transaction=False, closing=False
|
||||||
|
) -> Connection:
|
||||||
|
# we let the database serialize all writers at connection time
|
||||||
|
# to play it very safe (we don't have massive amounts of writes).
|
||||||
|
mode = "ro"
|
||||||
|
if write:
|
||||||
|
mode = "rw"
|
||||||
|
if not self.path.exists():
|
||||||
|
mode = "rwc"
|
||||||
|
uri = "file:%s?mode=%s" % (self.path, mode)
|
||||||
|
sqlconn = sqlite3.connect(
|
||||||
|
uri,
|
||||||
|
timeout=60,
|
||||||
|
isolation_level=None if transaction else "DEFERRED",
|
||||||
|
uri=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Enable Write-Ahead Logging to avoid readers blocking writers and vice versa.
|
||||||
|
if write:
|
||||||
|
sqlconn.execute("PRAGMA journal_mode=wal")
|
||||||
|
|
||||||
|
if transaction:
|
||||||
|
start_time = time.time()
|
||||||
|
while 1:
|
||||||
|
try:
|
||||||
|
sqlconn.execute("begin immediate")
|
||||||
|
break
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
# another thread may be writing, give it a chance to finish
|
||||||
|
time.sleep(0.1)
|
||||||
|
if time.time() - start_time > 5:
|
||||||
|
# if it takes this long, something is wrong
|
||||||
|
raise
|
||||||
|
conn = Connection(sqlconn, write=write)
|
||||||
|
if closing:
|
||||||
|
conn = contextlib.closing(conn)
|
||||||
|
return conn
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def write_transaction(self):
|
||||||
|
conn = self._get_connection(closing=False, write=True, transaction=True)
|
||||||
|
try:
|
||||||
|
yield conn
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
conn.close()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def read_connection(self, closing=True) -> Connection:
|
||||||
|
return self._get_connection(closing=closing, write=False)
|
||||||
|
|
||||||
|
def get_schema_version(self) -> int:
|
||||||
|
with self.read_connection() as conn:
|
||||||
|
dbversion = conn.execute("PRAGMA user_version").fetchone()[0]
|
||||||
|
return dbversion
|
||||||
|
|
||||||
|
CURRENT_DBVERSION = 1
|
||||||
|
|
||||||
|
def ensure_tables(self):
|
||||||
|
with self.write_transaction() as conn:
|
||||||
|
if self.get_schema_version() > 1:
|
||||||
|
raise DBError(
|
||||||
|
"version is %s; downgrading schema is not supported"
|
||||||
|
% (self.get_schema_version(),)
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
addr TEXT PRIMARY KEY,
|
||||||
|
password TEXT,
|
||||||
|
last_login INTEGER
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
conn.execute("PRAGMA user_version=%s" % (self.CURRENT_DBVERSION,))
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
"""
|
|
||||||
Remove inactive users
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from .config import read_config
|
|
||||||
|
|
||||||
|
|
||||||
def delete_inactive_users(config):
|
|
||||||
cutoff_date = time.time() - config.delete_inactive_users_after * 86400
|
|
||||||
for addr in os.listdir(config.mailboxes_dir):
|
|
||||||
try:
|
|
||||||
user = config.get_user(addr)
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
read_timestamp = user.get_last_login_timestamp()
|
|
||||||
if read_timestamp and read_timestamp < cutoff_date:
|
|
||||||
path = config.mailboxes_dir.joinpath(addr)
|
|
||||||
assert path == user.maildir
|
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
(cfgpath,) = sys.argv[1:]
|
|
||||||
config = read_config(cfgpath)
|
|
||||||
delete_inactive_users(config)
|
|
||||||
@@ -1,98 +1,119 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from socketserver import StreamRequestHandler, ThreadingUnixStreamServer
|
import sys
|
||||||
|
import json
|
||||||
|
import crypt
|
||||||
|
from socketserver import (
|
||||||
|
UnixStreamServer,
|
||||||
|
StreamRequestHandler,
|
||||||
|
ThreadingMixIn,
|
||||||
|
)
|
||||||
|
import pwd
|
||||||
|
|
||||||
|
from .database import Database
|
||||||
|
|
||||||
|
NOCREATE_FILE = "/etc/chatmail-nocreate"
|
||||||
|
|
||||||
|
|
||||||
class DictProxy:
|
def encrypt_password(password: str):
|
||||||
def loop_forever(self, rfile, wfile):
|
# https://doc.dovecot.org/configuration_manual/authentication/password_schemes/
|
||||||
# Transaction storage is local to each handler loop.
|
passhash = crypt.crypt(password, crypt.METHOD_SHA512)
|
||||||
# Dovecot reuses transaction IDs across connections,
|
return "{SHA512-CRYPT}" + passhash
|
||||||
# starting transaction with the name `1`
|
|
||||||
# on two different connections to the same proxy sometimes.
|
|
||||||
transactions = {}
|
|
||||||
|
|
||||||
while True:
|
|
||||||
msg = rfile.readline().strip().decode()
|
|
||||||
if not msg:
|
|
||||||
break
|
|
||||||
|
|
||||||
res = self.handle_dovecot_request(msg, transactions)
|
def create_user(db, user, password):
|
||||||
if res:
|
if os.path.exists(NOCREATE_FILE):
|
||||||
wfile.write(res.encode("ascii"))
|
logging.warning(
|
||||||
wfile.flush()
|
f"Didn't create account: {NOCREATE_FILE} exists. Delete the file to enable account creation."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
with db.write_transaction() as conn:
|
||||||
|
conn.create_user(user, password)
|
||||||
|
return dict(home=f"/home/vmail/{user}", uid="vmail", gid="vmail", password=password)
|
||||||
|
|
||||||
def handle_dovecot_request(self, msg, transactions):
|
|
||||||
# see https://doc.dovecot.org/developer_manual/design/dict_protocol/#dovecot-dict-protocol
|
def get_user_data(db, user):
|
||||||
short_command = msg[0]
|
with db.read_connection() as conn:
|
||||||
|
result = conn.get_user(user)
|
||||||
|
if result:
|
||||||
|
result["uid"] = "vmail"
|
||||||
|
result["gid"] = "vmail"
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def lookup_userdb(db, user):
|
||||||
|
return get_user_data(db, user)
|
||||||
|
|
||||||
|
|
||||||
|
def lookup_passdb(db, user, password):
|
||||||
|
userdata = get_user_data(db, user)
|
||||||
|
if not userdata:
|
||||||
|
return create_user(db, user, encrypt_password(password))
|
||||||
|
userdata["password"] = userdata["password"].strip()
|
||||||
|
return userdata
|
||||||
|
|
||||||
|
|
||||||
|
def handle_dovecot_request(msg, db, mail_domain):
|
||||||
|
print(f"received msg: {msg!r}", file=sys.stderr)
|
||||||
|
short_command = msg[0]
|
||||||
|
if short_command == "L": # LOOKUP
|
||||||
parts = msg[1:].split("\t")
|
parts = msg[1:].split("\t")
|
||||||
|
keyname, user = parts[:2]
|
||||||
|
namespace, type, *args = keyname.split("/")
|
||||||
|
reply_command = "F"
|
||||||
|
res = ""
|
||||||
|
if namespace == "shared":
|
||||||
|
if type == "userdb":
|
||||||
|
if user.endswith(f"@{mail_domain}"):
|
||||||
|
res = lookup_userdb(db, user)
|
||||||
|
if res:
|
||||||
|
reply_command = "O"
|
||||||
|
else:
|
||||||
|
reply_command = "N"
|
||||||
|
elif type == "passdb":
|
||||||
|
if user.endswith(f"@{mail_domain}"):
|
||||||
|
res = lookup_passdb(db, user, password=args[0])
|
||||||
|
if res:
|
||||||
|
reply_command = "O"
|
||||||
|
else:
|
||||||
|
reply_command = "N"
|
||||||
|
print(f"res: {res!r}", file=sys.stderr)
|
||||||
|
json_res = json.dumps(res) if res else ""
|
||||||
|
return f"{reply_command}{json_res}\n"
|
||||||
|
return None
|
||||||
|
|
||||||
if short_command == "L":
|
|
||||||
return self.handle_lookup(parts)
|
|
||||||
elif short_command == "I":
|
|
||||||
return self.handle_iterate(parts)
|
|
||||||
elif short_command == "H":
|
|
||||||
return # no version checking
|
|
||||||
|
|
||||||
if short_command not in ("BSC"):
|
class ThreadedUnixStreamServer(ThreadingMixIn, UnixStreamServer):
|
||||||
logging.warning(f"unknown dictproxy request: {msg!r}")
|
pass
|
||||||
return
|
|
||||||
|
|
||||||
transaction_id = parts[0]
|
|
||||||
|
|
||||||
if short_command == "B":
|
def main():
|
||||||
return self.handle_begin_transaction(transaction_id, parts, transactions)
|
socket = sys.argv[1]
|
||||||
elif short_command == "C":
|
passwd_entry = pwd.getpwnam(sys.argv[2])
|
||||||
return self.handle_commit_transaction(transaction_id, parts, transactions)
|
db = Database(sys.argv[3])
|
||||||
elif short_command == "S":
|
with open("/etc/mailname", "r") as fp:
|
||||||
addr = transactions[transaction_id]["addr"]
|
mail_domain = fp.read().strip()
|
||||||
if not self.handle_set(addr, parts):
|
|
||||||
transactions[transaction_id]["res"] = "F\n"
|
|
||||||
logging.error(f"dictproxy-set failed for {addr!r}: {msg!r}")
|
|
||||||
|
|
||||||
def handle_lookup(self, parts):
|
class Handler(StreamRequestHandler):
|
||||||
logging.warning(f"lookup ignored: {parts!r}")
|
def handle(self):
|
||||||
return "N\n"
|
while True:
|
||||||
|
msg = self.rfile.readline().strip().decode()
|
||||||
|
if not msg:
|
||||||
|
break
|
||||||
|
res = handle_dovecot_request(msg, db, mail_domain)
|
||||||
|
if res:
|
||||||
|
print(f"sending result: {res!r}", file=sys.stderr)
|
||||||
|
self.wfile.write(res.encode("ascii"))
|
||||||
|
self.wfile.flush()
|
||||||
|
|
||||||
def handle_iterate(self, parts):
|
try:
|
||||||
# Empty line means ITER_FINISHED.
|
os.unlink(socket)
|
||||||
# If we don't return empty line Dovecot will timeout.
|
except FileNotFoundError:
|
||||||
return "\n"
|
pass
|
||||||
|
|
||||||
def handle_begin_transaction(self, transaction_id, parts, transactions):
|
|
||||||
addr = parts[1]
|
|
||||||
transactions[transaction_id] = dict(addr=addr, res="O\n")
|
|
||||||
|
|
||||||
def handle_set(self, addr, parts):
|
|
||||||
# For documentation on key structure see
|
|
||||||
# https://github.com/dovecot/core/blob/main/src/lib-storage/mailbox-attribute.h
|
|
||||||
return False
|
|
||||||
|
|
||||||
def handle_commit_transaction(self, transaction_id, parts, transactions):
|
|
||||||
# return whatever "set" command(s) set as result.
|
|
||||||
return transactions.pop(transaction_id)["res"]
|
|
||||||
|
|
||||||
def serve_forever_from_socket(self, socket):
|
|
||||||
dictproxy = self
|
|
||||||
|
|
||||||
class Handler(StreamRequestHandler):
|
|
||||||
def handle(self):
|
|
||||||
try:
|
|
||||||
dictproxy.loop_forever(self.rfile, self.wfile)
|
|
||||||
except Exception:
|
|
||||||
logging.exception("Exception in the handler")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
with ThreadedUnixStreamServer(socket, Handler) as server:
|
||||||
|
os.chown(socket, uid=passwd_entry.pw_uid, gid=passwd_entry.pw_gid)
|
||||||
try:
|
try:
|
||||||
os.unlink(socket)
|
server.serve_forever()
|
||||||
except FileNotFoundError:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
with CustomThreadingUnixStreamServer(socket, Handler) as server:
|
|
||||||
try:
|
|
||||||
server.serve_forever()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CustomThreadingUnixStreamServer(ThreadingUnixStreamServer):
|
|
||||||
request_queue_size = 1000
|
|
||||||
|
|||||||
10
chatmaild/src/chatmaild/doveauth-dictproxy.service
Normal file
10
chatmaild/src/chatmaild/doveauth-dictproxy.service
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Dict authentication proxy for dovecot
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/local/bin/doveauth-dictproxy /run/dovecot/doveauth.socket vmail /home/vmail/passdb.sqlite
|
||||||
|
Restart=always
|
||||||
|
RestartSec=30
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import crypt_r
|
|
||||||
except ImportError:
|
|
||||||
import crypt as crypt_r
|
|
||||||
|
|
||||||
from .config import Config, read_config
|
|
||||||
from .dictproxy import DictProxy
|
|
||||||
from .migrate_db import migrate_from_db_to_maildir
|
|
||||||
|
|
||||||
NOCREATE_FILE = "/etc/chatmail-nocreate"
|
|
||||||
|
|
||||||
|
|
||||||
def encrypt_password(password: str):
|
|
||||||
# https://doc.dovecot.org/configuration_manual/authentication/password_schemes/
|
|
||||||
passhash = crypt_r.crypt(password, crypt_r.METHOD_SHA512)
|
|
||||||
return "{SHA512-CRYPT}" + passhash
|
|
||||||
|
|
||||||
|
|
||||||
def is_allowed_to_create(config: Config, user, cleartext_password) -> bool:
|
|
||||||
"""Return True if user and password are admissable."""
|
|
||||||
if os.path.exists(NOCREATE_FILE):
|
|
||||||
logging.warning(f"blocked account creation because {NOCREATE_FILE!r} exists.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if len(cleartext_password) < config.password_min_length:
|
|
||||||
logging.warning(
|
|
||||||
"Password needs to be at least %s characters long",
|
|
||||||
config.password_min_length,
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
parts = user.split("@")
|
|
||||||
if len(parts) != 2:
|
|
||||||
logging.warning(f"user {user!r} is not a proper e-mail address")
|
|
||||||
return False
|
|
||||||
localpart, domain = parts
|
|
||||||
|
|
||||||
if localpart == "echo":
|
|
||||||
# echobot account should not be created in the database
|
|
||||||
return False
|
|
||||||
|
|
||||||
if (
|
|
||||||
len(localpart) > config.username_max_length
|
|
||||||
or len(localpart) < config.username_min_length
|
|
||||||
):
|
|
||||||
logging.warning(
|
|
||||||
"localpart %s has to be between %s and %s chars long",
|
|
||||||
localpart,
|
|
||||||
config.username_min_length,
|
|
||||||
config.username_max_length,
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def split_and_unescape(s):
|
|
||||||
"""Split strings using double quote as a separator and backslash as escape character
|
|
||||||
into parts."""
|
|
||||||
|
|
||||||
out = ""
|
|
||||||
i = 0
|
|
||||||
while i < len(s):
|
|
||||||
c = s[i]
|
|
||||||
if c == "\\":
|
|
||||||
# Skip escape character.
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# This will raise IndexError if there is no character
|
|
||||||
# after escape character. This is expected
|
|
||||||
# as this is an invalid input.
|
|
||||||
out += s[i]
|
|
||||||
elif c == '"':
|
|
||||||
# Separator
|
|
||||||
yield out
|
|
||||||
out = ""
|
|
||||||
else:
|
|
||||||
out += c
|
|
||||||
i += 1
|
|
||||||
yield out
|
|
||||||
|
|
||||||
|
|
||||||
class AuthDictProxy(DictProxy):
|
|
||||||
def __init__(self, config):
|
|
||||||
super().__init__()
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
def handle_lookup(self, parts):
|
|
||||||
# Dovecot <2.3.17 has only one part,
|
|
||||||
# do not attempt to read any other parts for compatibility.
|
|
||||||
keyname = parts[0]
|
|
||||||
|
|
||||||
namespace, type, args = keyname.split("/", 2)
|
|
||||||
args = list(split_and_unescape(args))
|
|
||||||
|
|
||||||
config = self.config
|
|
||||||
reply_command = "F"
|
|
||||||
res = ""
|
|
||||||
if namespace == "shared":
|
|
||||||
if type == "userdb":
|
|
||||||
user = args[0]
|
|
||||||
if user.endswith(f"@{config.mail_domain}"):
|
|
||||||
res = self.lookup_userdb(user)
|
|
||||||
if res:
|
|
||||||
reply_command = "O"
|
|
||||||
else:
|
|
||||||
reply_command = "N"
|
|
||||||
elif type == "passdb":
|
|
||||||
user = args[1]
|
|
||||||
if user.endswith(f"@{config.mail_domain}"):
|
|
||||||
res = self.lookup_passdb(user, cleartext_password=args[0])
|
|
||||||
if res:
|
|
||||||
reply_command = "O"
|
|
||||||
else:
|
|
||||||
reply_command = "N"
|
|
||||||
json_res = json.dumps(res) if res else ""
|
|
||||||
return f"{reply_command}{json_res}\n"
|
|
||||||
|
|
||||||
def handle_iterate(self, parts):
|
|
||||||
# example: I0\t0\tshared/userdb/
|
|
||||||
if parts[2] == "shared/userdb/":
|
|
||||||
result = "".join(
|
|
||||||
f"Oshared/userdb/{user}\t\n" for user in self.iter_userdb()
|
|
||||||
)
|
|
||||||
return f"{result}\n"
|
|
||||||
|
|
||||||
def iter_userdb(self) -> list:
|
|
||||||
"""Get a list of all user addresses."""
|
|
||||||
return [x for x in os.listdir(self.config.mailboxes_dir) if "@" in x]
|
|
||||||
|
|
||||||
def lookup_userdb(self, addr):
|
|
||||||
return self.config.get_user(addr).get_userdb_dict()
|
|
||||||
|
|
||||||
def lookup_passdb(self, addr, cleartext_password):
|
|
||||||
user = self.config.get_user(addr)
|
|
||||||
userdata = user.get_userdb_dict()
|
|
||||||
if userdata:
|
|
||||||
return userdata
|
|
||||||
if not is_allowed_to_create(self.config, addr, cleartext_password):
|
|
||||||
return
|
|
||||||
|
|
||||||
user.set_password(encrypt_password(cleartext_password))
|
|
||||||
print(f"Created address: {addr}", file=sys.stderr)
|
|
||||||
return user.get_userdb_dict()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
socket, cfgpath = sys.argv[1:]
|
|
||||||
config = read_config(cfgpath)
|
|
||||||
|
|
||||||
migrate_from_db_to_maildir(config)
|
|
||||||
|
|
||||||
dictproxy = AuthDictProxy(config=config)
|
|
||||||
|
|
||||||
dictproxy.serve_forever_from_socket(socket)
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Advanced echo bot example.
|
|
||||||
|
|
||||||
it will echo back any message that has non-empty text and also supports the /help command.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from deltachat_rpc_client import Bot, DeltaChat, EventType, Rpc, events
|
|
||||||
|
|
||||||
from chatmaild.config import echobot_password_path, read_config
|
|
||||||
from chatmaild.doveauth import encrypt_password
|
|
||||||
from chatmaild.newemail import create_newemail_dict
|
|
||||||
|
|
||||||
hooks = events.HookCollection()
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.RawEvent)
|
|
||||||
def log_event(event):
|
|
||||||
if event.kind == EventType.INFO:
|
|
||||||
logging.info(event.msg)
|
|
||||||
elif event.kind == EventType.WARNING:
|
|
||||||
logging.warning(event.msg)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.RawEvent(EventType.ERROR))
|
|
||||||
def log_error(event):
|
|
||||||
logging.error("%s", event.msg)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.MemberListChanged)
|
|
||||||
def on_memberlist_changed(event):
|
|
||||||
logging.info(
|
|
||||||
"member %s was %s", event.member, "added" if event.member_added else "removed"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.GroupImageChanged)
|
|
||||||
def on_group_image_changed(event):
|
|
||||||
logging.info("group image %s", "deleted" if event.image_deleted else "changed")
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.GroupNameChanged)
|
|
||||||
def on_group_name_changed(event):
|
|
||||||
logging.info(f"group name changed, old name: {event.old_name}")
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.NewMessage(func=lambda e: not e.command))
|
|
||||||
def echo(event):
|
|
||||||
snapshot = event.message_snapshot
|
|
||||||
if snapshot.is_info:
|
|
||||||
# Ignore info messages
|
|
||||||
return
|
|
||||||
if snapshot.text or snapshot.file:
|
|
||||||
snapshot.chat.send_message(text=snapshot.text, file=snapshot.file)
|
|
||||||
|
|
||||||
|
|
||||||
@hooks.on(events.NewMessage(command="/help"))
|
|
||||||
def help_command(event):
|
|
||||||
snapshot = event.message_snapshot
|
|
||||||
snapshot.chat.send_text("Send me any message and I will echo it back")
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
path = os.environ.get("PATH")
|
|
||||||
venv_path = sys.argv[0].strip("echobot")
|
|
||||||
os.environ["PATH"] = path + ":" + venv_path
|
|
||||||
with Rpc() as rpc:
|
|
||||||
deltachat = DeltaChat(rpc)
|
|
||||||
system_info = deltachat.get_system_info()
|
|
||||||
logging.info(f"Running deltachat core {system_info.deltachat_core_version}")
|
|
||||||
|
|
||||||
accounts = deltachat.get_all_accounts()
|
|
||||||
account = accounts[0] if accounts else deltachat.add_account()
|
|
||||||
|
|
||||||
bot = Bot(account, hooks)
|
|
||||||
|
|
||||||
config = read_config(sys.argv[1])
|
|
||||||
addr = "echo@" + config.mail_domain
|
|
||||||
|
|
||||||
# Create password file
|
|
||||||
if bot.is_configured():
|
|
||||||
password = bot.account.get_config("mail_pw")
|
|
||||||
else:
|
|
||||||
password = create_newemail_dict(config)["password"]
|
|
||||||
|
|
||||||
echobot_password_path.write_text(encrypt_password(password))
|
|
||||||
# Give the user which doveauth runs as access to the password file.
|
|
||||||
subprocess.check_call(
|
|
||||||
["/usr/bin/setfacl", "-m", "user:vmail:r", echobot_password_path],
|
|
||||||
)
|
|
||||||
|
|
||||||
if not bot.is_configured():
|
|
||||||
bot.configure(addr, password)
|
|
||||||
|
|
||||||
# write invite link to working directory
|
|
||||||
invitelink = bot.account.get_qr_code()
|
|
||||||
Path("invite-link.txt").write_text(invitelink)
|
|
||||||
|
|
||||||
bot.run_forever()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from random import randint
|
|
||||||
|
|
||||||
import filelock
|
|
||||||
|
|
||||||
|
|
||||||
class FileDict:
|
|
||||||
"""Concurrency-safe multi-reader/single-writer persistent dict."""
|
|
||||||
|
|
||||||
def __init__(self, path):
|
|
||||||
self.path = path
|
|
||||||
self.lock_path = path.with_name(path.name + ".lock")
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def modify(self):
|
|
||||||
# the OS will release the lock if the process dies,
|
|
||||||
# and the contextmanager will otherwise guarantee release
|
|
||||||
with filelock.FileLock(self.lock_path):
|
|
||||||
data = self.read()
|
|
||||||
yield data
|
|
||||||
write_path = self.path.with_name(self.path.name + ".tmp")
|
|
||||||
with write_path.open("w") as f:
|
|
||||||
json.dump(data, f)
|
|
||||||
os.rename(write_path, self.path)
|
|
||||||
|
|
||||||
def read(self):
|
|
||||||
try:
|
|
||||||
with self.path.open("r") as f:
|
|
||||||
return json.load(f)
|
|
||||||
except FileNotFoundError:
|
|
||||||
return {}
|
|
||||||
except Exception:
|
|
||||||
logging.warning(f"corrupt serialization state at: {self.path!r}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def write_bytes_atomic(path, content):
|
|
||||||
rint = randint(0, 10000000)
|
|
||||||
tmp = path.with_name(path.name + f".tmp-{rint}")
|
|
||||||
tmp.write_bytes(content)
|
|
||||||
os.rename(tmp, path)
|
|
||||||
@@ -1,221 +1,52 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import asyncio
|
import asyncio
|
||||||
import base64
|
|
||||||
import binascii
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
from email import policy
|
import sys
|
||||||
from email.parser import BytesParser
|
from email.parser import BytesParser
|
||||||
|
from email import policy
|
||||||
from email.utils import parseaddr
|
from email.utils import parseaddr
|
||||||
from smtplib import SMTP as SMTPClient
|
|
||||||
|
|
||||||
from aiosmtpd.controller import Controller
|
|
||||||
from aiosmtpd.smtp import SMTP
|
from aiosmtpd.smtp import SMTP
|
||||||
|
from aiosmtpd.controller import Controller
|
||||||
from .config import read_config
|
from smtplib import SMTP as SMTPClient
|
||||||
|
|
||||||
ENCRYPTION_NEEDED_523 = "523 Encryption Needed: Invalid Unencrypted Mail"
|
|
||||||
|
|
||||||
|
|
||||||
def check_openpgp_payload(payload: bytes):
|
|
||||||
"""Checks the OpenPGP payload.
|
|
||||||
|
|
||||||
OpenPGP payload must consist only of PKESK and SKESK packets
|
|
||||||
terminated by a single SEIPD packet.
|
|
||||||
|
|
||||||
Returns True if OpenPGP payload is correct,
|
|
||||||
False otherwise.
|
|
||||||
|
|
||||||
May raise IndexError while trying to read OpenPGP packet header
|
|
||||||
if it is truncated.
|
|
||||||
"""
|
|
||||||
i = 0
|
|
||||||
while i < len(payload):
|
|
||||||
# Only OpenPGP format is allowed.
|
|
||||||
if payload[i] & 0xC0 != 0xC0:
|
|
||||||
return False
|
|
||||||
|
|
||||||
packet_type_id = payload[i] & 0x3F
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
while payload[i] >= 224 and payload[i] < 255:
|
|
||||||
# Partial body length.
|
|
||||||
partial_length = 1 << (payload[i] & 0x1F)
|
|
||||||
i += 1 + partial_length
|
|
||||||
|
|
||||||
if payload[i] < 192:
|
|
||||||
# One-octet length.
|
|
||||||
body_len = payload[i]
|
|
||||||
i += 1
|
|
||||||
elif payload[i] < 224:
|
|
||||||
# Two-octet length.
|
|
||||||
body_len = ((payload[i] - 192) << 8) + payload[i + 1] + 192
|
|
||||||
i += 2
|
|
||||||
elif payload[i] == 255:
|
|
||||||
# Five-octet length.
|
|
||||||
body_len = (
|
|
||||||
(payload[i + 1] << 24)
|
|
||||||
| (payload[i + 2] << 16)
|
|
||||||
| (payload[i + 3] << 8)
|
|
||||||
| payload[i + 4]
|
|
||||||
)
|
|
||||||
i += 5
|
|
||||||
else:
|
|
||||||
# Impossible, partial body length was processed above.
|
|
||||||
return False
|
|
||||||
|
|
||||||
i += body_len
|
|
||||||
|
|
||||||
if i == len(payload):
|
|
||||||
# Last packet should be
|
|
||||||
# Symmetrically Encrypted and Integrity Protected Data Packet (SEIPD)
|
|
||||||
#
|
|
||||||
# This is the only place where this function may return `True`.
|
|
||||||
return packet_type_id == 18
|
|
||||||
elif packet_type_id not in [1, 3]:
|
|
||||||
# All packets except the last one must be either
|
|
||||||
# Public-Key Encrypted Session Key Packet (PKESK)
|
|
||||||
# or
|
|
||||||
# Symmetric-Key Encrypted Session Key Packet (SKESK)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def check_armored_payload(payload: str):
|
|
||||||
prefix = "-----BEGIN PGP MESSAGE-----\r\n\r\n"
|
|
||||||
if not payload.startswith(prefix):
|
|
||||||
return False
|
|
||||||
payload = payload.removeprefix(prefix)
|
|
||||||
|
|
||||||
while payload.endswith("\r\n"):
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
suffix = "-----END PGP MESSAGE-----"
|
|
||||||
if not payload.endswith(suffix):
|
|
||||||
return False
|
|
||||||
payload = payload.removesuffix(suffix)
|
|
||||||
|
|
||||||
# Remove CRC24.
|
|
||||||
payload = payload.rpartition("=")[0]
|
|
||||||
|
|
||||||
try:
|
|
||||||
payload = base64.b64decode(payload)
|
|
||||||
except binascii.Error:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
return check_openpgp_payload(payload)
|
|
||||||
except IndexError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_securejoin(message):
|
|
||||||
if message.get("secure-join") not in ["vc-request", "vg-request"]:
|
|
||||||
return False
|
|
||||||
if not message.is_multipart():
|
|
||||||
return False
|
|
||||||
parts_count = 0
|
|
||||||
for part in message.iter_parts():
|
|
||||||
parts_count += 1
|
|
||||||
if parts_count > 1:
|
|
||||||
return False
|
|
||||||
if part.is_multipart():
|
|
||||||
return False
|
|
||||||
if part.get_content_type() != "text/plain":
|
|
||||||
return False
|
|
||||||
|
|
||||||
payload = part.get_payload().strip().lower()
|
|
||||||
if payload not in ("secure-join: vc-request", "secure-join: vg-request"):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def check_encrypted(message):
|
def check_encrypted(message):
|
||||||
"""Check that the message is an OpenPGP-encrypted message.
|
"""Check that the message is an OpenPGP-encrypted message."""
|
||||||
|
|
||||||
MIME structure of the message must correspond to <https://www.rfc-editor.org/rfc/rfc3156>.
|
|
||||||
"""
|
|
||||||
if not message.is_multipart():
|
if not message.is_multipart():
|
||||||
return False
|
return False
|
||||||
|
if message.get("subject") != "...":
|
||||||
|
return False
|
||||||
if message.get_content_type() != "multipart/encrypted":
|
if message.get_content_type() != "multipart/encrypted":
|
||||||
return False
|
return False
|
||||||
parts_count = 0
|
parts_count = 0
|
||||||
for part in message.iter_parts():
|
for part in message.iter_parts():
|
||||||
# We explicitly check Content-Type of each part later,
|
|
||||||
# but this is to be absolutely sure `get_payload()` returns string and not list.
|
|
||||||
if part.is_multipart():
|
|
||||||
return False
|
|
||||||
|
|
||||||
if parts_count == 0:
|
if parts_count == 0:
|
||||||
if part.get_content_type() != "application/pgp-encrypted":
|
if part.get_content_type() != "application/pgp-encrypted":
|
||||||
return False
|
return False
|
||||||
|
|
||||||
payload = part.get_payload()
|
|
||||||
if payload.strip() != "Version: 1":
|
|
||||||
return False
|
|
||||||
elif parts_count == 1:
|
elif parts_count == 1:
|
||||||
if part.get_content_type() != "application/octet-stream":
|
if part.get_content_type() != "application/octet-stream":
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not check_armored_payload(part.get_payload()):
|
|
||||||
return False
|
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
parts_count += 1
|
parts_count += 1
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def asyncmain_beforequeue(config, mode):
|
class SMTPController(Controller):
|
||||||
if mode == "outgoing":
|
|
||||||
port = config.filtermail_smtp_port
|
|
||||||
handler = OutgoingBeforeQueueHandler(config)
|
|
||||||
else:
|
|
||||||
port = config.filtermail_smtp_port_incoming
|
|
||||||
handler = IncomingBeforeQueueHandler(config)
|
|
||||||
HackedController(
|
|
||||||
handler,
|
|
||||||
hostname="127.0.0.1",
|
|
||||||
port=port,
|
|
||||||
data_size_limit=config.max_message_size,
|
|
||||||
).start()
|
|
||||||
|
|
||||||
|
|
||||||
def recipient_matches_passthrough(recipient, passthrough_recipients):
|
|
||||||
for addr in passthrough_recipients:
|
|
||||||
if recipient == addr:
|
|
||||||
return True
|
|
||||||
if addr[0] == "@" and recipient.endswith(addr):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class HackedController(Controller):
|
|
||||||
def factory(self):
|
def factory(self):
|
||||||
return SMTPDiscardRCPTO_options(self.handler, **self.SMTP_kwargs)
|
return SMTP(self.handler, **self.SMTP_kwargs)
|
||||||
|
|
||||||
|
|
||||||
class SMTPDiscardRCPTO_options(SMTP):
|
class BeforeQueueHandler:
|
||||||
def _getparams(self, params):
|
def __init__(self):
|
||||||
# Ignore RCPT TO parameters.
|
|
||||||
#
|
|
||||||
# Otherwise parameters such as `ORCPT=...`
|
|
||||||
# or `NOTIFY=DELAY,FAILURE` (generated by Stalwart)
|
|
||||||
# make aiosmtpd reject the message here:
|
|
||||||
# <https://github.com/aio-libs/aiosmtpd/blob/98f578389ae86e5345cc343fa4e5a17b21d9c96d/aiosmtpd/smtp.py#L1379-L1384>
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class OutgoingBeforeQueueHandler:
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
self.send_rate_limiter = SendRateLimiter()
|
self.send_rate_limiter = SendRateLimiter()
|
||||||
|
|
||||||
async def handle_MAIL(self, server, session, envelope, address, mail_options):
|
async def handle_MAIL(self, server, session, envelope, address, mail_options):
|
||||||
logging.info(f"handle_MAIL from {address}")
|
logging.info(f"handle_MAIL from {address}")
|
||||||
envelope.mail_from = address
|
envelope.mail_from = address
|
||||||
max_sent = self.config.max_user_send_per_minute
|
if not self.send_rate_limiter.is_sending_allowed(address):
|
||||||
if not self.send_rate_limiter.is_sending_allowed(address, max_sent):
|
|
||||||
return f"450 4.7.1: Too much mail from {address}"
|
return f"450 4.7.1: Too much mail from {address}"
|
||||||
|
|
||||||
parts = envelope.mail_from.split("@")
|
parts = envelope.mail_from.split("@")
|
||||||
@@ -226,115 +57,59 @@ class OutgoingBeforeQueueHandler:
|
|||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
async def handle_DATA(self, server, session, envelope):
|
||||||
logging.info("handle_DATA before-queue")
|
logging.info("handle_DATA before-queue")
|
||||||
error = self.check_DATA(envelope)
|
error = check_DATA(envelope)
|
||||||
if error:
|
if error:
|
||||||
return error
|
return error
|
||||||
logging.info("re-injecting the mail that passed checks")
|
logging.info("re-injecting the mail that passed checks")
|
||||||
client = SMTPClient("localhost", self.config.postfix_reinject_port)
|
client = SMTPClient("localhost", "10025")
|
||||||
client.sendmail(
|
client.sendmail(envelope.mail_from, envelope.rcpt_tos, envelope.content)
|
||||||
envelope.mail_from, envelope.rcpt_tos, envelope.original_content
|
|
||||||
)
|
|
||||||
return "250 OK"
|
return "250 OK"
|
||||||
|
|
||||||
def check_DATA(self, envelope):
|
|
||||||
"""the central filtering function for e-mails."""
|
|
||||||
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
|
||||||
|
|
||||||
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
async def asyncmain_beforequeue(port):
|
||||||
mail_encrypted = check_encrypted(message)
|
Controller(BeforeQueueHandler(), hostname="127.0.0.1", port=port).start()
|
||||||
|
|
||||||
_, from_addr = parseaddr(message.get("from").strip())
|
|
||||||
|
|
||||||
if envelope.mail_from.lower() != from_addr.lower():
|
|
||||||
return f"500 Invalid FROM <{from_addr!r}> for <{envelope.mail_from!r}>"
|
|
||||||
|
|
||||||
if mail_encrypted or is_securejoin(message):
|
|
||||||
print("Outgoing: Filtering encrypted mail.", file=sys.stderr)
|
|
||||||
return
|
|
||||||
|
|
||||||
print("Outgoing: Filtering unencrypted mail.", file=sys.stderr)
|
|
||||||
|
|
||||||
if envelope.mail_from in self.config.passthrough_senders:
|
|
||||||
return
|
|
||||||
|
|
||||||
# allow self-sent Autocrypt Setup Message
|
|
||||||
if envelope.rcpt_tos == [from_addr]:
|
|
||||||
if message.get("subject") == "Autocrypt Setup Message":
|
|
||||||
if message.get_content_type() == "multipart/mixed":
|
|
||||||
return
|
|
||||||
|
|
||||||
passthrough_recipients = self.config.passthrough_recipients
|
|
||||||
|
|
||||||
for recipient in envelope.rcpt_tos:
|
|
||||||
if recipient_matches_passthrough(recipient, passthrough_recipients):
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Rejected unencrypted mail.", file=sys.stderr)
|
|
||||||
return ENCRYPTION_NEEDED_523
|
|
||||||
|
|
||||||
|
|
||||||
class IncomingBeforeQueueHandler:
|
def check_DATA(envelope):
|
||||||
def __init__(self, config):
|
"""the central filtering function for e-mails."""
|
||||||
self.config = config
|
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
||||||
logging.info("handle_DATA before-queue")
|
mail_encrypted = check_encrypted(message)
|
||||||
error = self.check_DATA(envelope)
|
|
||||||
if error:
|
|
||||||
return error
|
|
||||||
logging.info("re-injecting the mail that passed checks")
|
|
||||||
|
|
||||||
# the smtp daemon on reinject_port_incoming gives it to dkim milter
|
_, from_addr = parseaddr(message.get("from").strip())
|
||||||
# which looks at source address to determine whether to verify or sign
|
logging.info(f"mime-from: {from_addr} envelope-from: {envelope.mail_from!r}")
|
||||||
client = SMTPClient(
|
if envelope.mail_from.lower() != from_addr.lower():
|
||||||
"localhost",
|
return f"500 Invalid FROM <{from_addr!r}> for <{envelope.mail_from!r}>"
|
||||||
self.config.postfix_reinject_port_incoming,
|
|
||||||
source_address=("127.0.0.2", 0),
|
|
||||||
)
|
|
||||||
client.sendmail(
|
|
||||||
envelope.mail_from, envelope.rcpt_tos, envelope.original_content
|
|
||||||
)
|
|
||||||
return "250 OK"
|
|
||||||
|
|
||||||
def check_DATA(self, envelope):
|
envelope_from_domain = from_addr.split("@").pop()
|
||||||
"""the central filtering function for e-mails."""
|
for recipient in envelope.rcpt_tos:
|
||||||
logging.info(f"Processing DATA message from {envelope.mail_from}")
|
if envelope.mail_from == recipient:
|
||||||
|
# Always allow sending emails to self.
|
||||||
|
continue
|
||||||
|
res = recipient.split("@")
|
||||||
|
if len(res) != 2:
|
||||||
|
return f"500 Invalid address <{recipient}>"
|
||||||
|
_recipient_addr, recipient_domain = res
|
||||||
|
|
||||||
message = BytesParser(policy=policy.default).parsebytes(envelope.content)
|
is_outgoing = recipient_domain != envelope_from_domain
|
||||||
mail_encrypted = check_encrypted(message)
|
if is_outgoing and not mail_encrypted:
|
||||||
|
is_securejoin = message.get("secure-join") in ["vc-request", "vg-request"]
|
||||||
if mail_encrypted or is_securejoin(message):
|
if not is_securejoin:
|
||||||
print("Incoming: Filtering encrypted mail.", file=sys.stderr)
|
return f"500 Invalid unencrypted mail to <{recipient}>"
|
||||||
return
|
|
||||||
|
|
||||||
print("Incoming: Filtering unencrypted mail.", file=sys.stderr)
|
|
||||||
|
|
||||||
# we want cleartext mailer-daemon messages to pass through
|
|
||||||
# chatmail core will typically not display them as normal messages
|
|
||||||
if message.get("auto-submitted"):
|
|
||||||
_, from_addr = parseaddr(message.get("from").strip())
|
|
||||||
if from_addr.lower().startswith("mailer-daemon@"):
|
|
||||||
if message.get_content_type() == "multipart/report":
|
|
||||||
return
|
|
||||||
|
|
||||||
for recipient in envelope.rcpt_tos:
|
|
||||||
user = self.config.get_user(recipient)
|
|
||||||
if user is None or user.is_incoming_cleartext_ok():
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Rejected unencrypted mail.", file=sys.stderr)
|
|
||||||
return ENCRYPTION_NEEDED_523
|
|
||||||
|
|
||||||
|
|
||||||
class SendRateLimiter:
|
class SendRateLimiter:
|
||||||
|
MAX_USER_SEND_PER_MINUTE = 80
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.addr2timestamps = {}
|
self.addr2timestamps = {}
|
||||||
|
|
||||||
def is_sending_allowed(self, mail_from, max_send_per_minute):
|
def is_sending_allowed(self, mail_from):
|
||||||
last = self.addr2timestamps.setdefault(mail_from, [])
|
last = self.addr2timestamps.setdefault(mail_from, [])
|
||||||
now = time.time()
|
now = time.time()
|
||||||
last[:] = [ts for ts in last if ts >= (now - 60)]
|
last[:] = [ts for ts in last if ts >= (now - 60)]
|
||||||
if len(last) <= max_send_per_minute:
|
if len(last) <= self.MAX_USER_SEND_PER_MINUTE:
|
||||||
last.append(now)
|
last.append(now)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -342,14 +117,10 @@ class SendRateLimiter:
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
assert len(args) == 2
|
assert len(args) == 1
|
||||||
config = read_config(args[0])
|
logging.basicConfig(level=logging.INFO)
|
||||||
mode = args[1]
|
|
||||||
logging.basicConfig(level=logging.WARN)
|
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
assert mode in ["incoming", "outgoing"]
|
task = asyncmain_beforequeue(port=int(args[0]))
|
||||||
task = asyncmain_beforequeue(config, mode)
|
|
||||||
loop.create_task(task)
|
loop.create_task(task)
|
||||||
logging.info("entering serving loop")
|
|
||||||
loop.run_forever()
|
loop.run_forever()
|
||||||
|
|||||||
10
chatmaild/src/chatmaild/filtermail.service
Normal file
10
chatmaild/src/chatmaild/filtermail.service
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Chatmail Postfix BeforeQeue filter
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/local/bin/filtermail 10080
|
||||||
|
Restart=always
|
||||||
|
RestartSec=30
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
[params]
|
|
||||||
|
|
||||||
# mail domain (MUST be set to fully qualified chat mail domain)
|
|
||||||
mail_domain = {mail_domain}
|
|
||||||
|
|
||||||
#
|
|
||||||
# If you only do private test deploys, you don't need to modify any settings below
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# Restrictions on user addresses
|
|
||||||
#
|
|
||||||
|
|
||||||
# how many mails a user can send out per minute
|
|
||||||
max_user_send_per_minute = 60
|
|
||||||
|
|
||||||
# maximum mailbox size of a chatmail address
|
|
||||||
max_mailbox_size = 100M
|
|
||||||
|
|
||||||
# maximum message size for an e-mail in bytes
|
|
||||||
max_message_size = 31457280
|
|
||||||
|
|
||||||
# days after which mails are unconditionally deleted
|
|
||||||
delete_mails_after = 20
|
|
||||||
|
|
||||||
# days after which large messages (>200k) are unconditionally deleted
|
|
||||||
delete_large_after = 7
|
|
||||||
|
|
||||||
# days after which users without a successful login are deleted (database and mails)
|
|
||||||
delete_inactive_users_after = 90
|
|
||||||
|
|
||||||
# minimum length a username must have
|
|
||||||
username_min_length = 9
|
|
||||||
|
|
||||||
# maximum length a username can have
|
|
||||||
username_max_length = 9
|
|
||||||
|
|
||||||
# minimum length a password must have
|
|
||||||
password_min_length = 9
|
|
||||||
|
|
||||||
# list of chatmail addresses which can send outbound un-encrypted mail
|
|
||||||
passthrough_senders =
|
|
||||||
|
|
||||||
# list of e-mail recipients for which to accept outbound un-encrypted mails
|
|
||||||
# (space-separated, item may start with "@" to whitelist whole recipient domains)
|
|
||||||
passthrough_recipients = xstore@testrun.org echo@{mail_domain}
|
|
||||||
|
|
||||||
#
|
|
||||||
# Deployment Details
|
|
||||||
#
|
|
||||||
|
|
||||||
# SMTP outgoing filtermail and reinjection
|
|
||||||
filtermail_smtp_port = 10080
|
|
||||||
postfix_reinject_port = 10025
|
|
||||||
|
|
||||||
# SMTP incoming filtermail and reinjection
|
|
||||||
filtermail_smtp_port_incoming = 10081
|
|
||||||
postfix_reinject_port_incoming = 10026
|
|
||||||
|
|
||||||
# if set to "True" IPv6 is disabled
|
|
||||||
disable_ipv6 = False
|
|
||||||
|
|
||||||
# Defaults to https://iroh.{{mail_domain}} and running `iroh-relay` on the chatmail
|
|
||||||
# service.
|
|
||||||
# If you set it to anything else, the service will be disabled
|
|
||||||
# and users will be directed to use the given iroh relay URL.
|
|
||||||
# Set it to empty string if you want users to use their default iroh relay.
|
|
||||||
# iroh_relay =
|
|
||||||
|
|
||||||
# Address on which `mtail` listens,
|
|
||||||
# e.g. 127.0.0.1 or some private network
|
|
||||||
# address like 192.168.10.1.
|
|
||||||
# You can point Prometheus
|
|
||||||
# or some other OpenMetrics-compatible
|
|
||||||
# collector to
|
|
||||||
# http://{{mtail_address}}:3903/metrics
|
|
||||||
# and display collected metrics with Grafana.
|
|
||||||
#
|
|
||||||
# WARNING: do not expose this service
|
|
||||||
# to the public IP address.
|
|
||||||
#
|
|
||||||
# `mtail is not running if the setting is not set.
|
|
||||||
|
|
||||||
# mtail_address = 127.0.0.1
|
|
||||||
|
|
||||||
#
|
|
||||||
# Debugging options
|
|
||||||
#
|
|
||||||
|
|
||||||
# set to True if you want to track imap protocol execution
|
|
||||||
# in per-maildir ".in/.out" files.
|
|
||||||
# Note that you need to manually cleanup these files
|
|
||||||
# so use this option with caution on production servers.
|
|
||||||
imap_rawlog = false
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Privacy Policy
|
|
||||||
#
|
|
||||||
|
|
||||||
# postal address of privacy contact
|
|
||||||
privacy_postal =
|
|
||||||
|
|
||||||
# email address of privacy contact
|
|
||||||
privacy_mail =
|
|
||||||
|
|
||||||
# postal address of the privacy data officer
|
|
||||||
privacy_pdo =
|
|
||||||
|
|
||||||
# postal address of the privacy supervisor
|
|
||||||
privacy_supervisor =
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
|
|
||||||
[privacy]
|
|
||||||
|
|
||||||
passthrough_recipients = privacy@testrun.org xstore@testrun.org echo@{mail_domain}
|
|
||||||
|
|
||||||
privacy_postal =
|
|
||||||
Merlinux GmbH, Represented by the managing director H. Krekel,
|
|
||||||
Reichgrafen Str. 20, 79102 Freiburg, Germany
|
|
||||||
|
|
||||||
privacy_mail = privacy@testrun.org
|
|
||||||
privacy_pdo =
|
|
||||||
Prof. Dr. Fabian Schmieder, lexICT UG (limited), Ostfeldstr. 49, 30559 Hannover.
|
|
||||||
You can contact him at *delta-privacy@merlinux.eu* (Keyword: DPO)
|
|
||||||
privacy_supervisor =
|
|
||||||
State Commissioner for Data Protection and Freedom of Information of
|
|
||||||
Baden-Württemberg in 70173 Stuttgart, Germany.
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
from .config import read_config
|
|
||||||
from .dictproxy import DictProxy
|
|
||||||
|
|
||||||
|
|
||||||
class LastLoginDictProxy(DictProxy):
|
|
||||||
def __init__(self, config):
|
|
||||||
super().__init__()
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
def handle_set(self, addr, parts):
|
|
||||||
keyname = parts[1].split("/")
|
|
||||||
value = parts[2] if len(parts) > 2 else ""
|
|
||||||
if keyname[0] == "shared" and keyname[1] == "last-login":
|
|
||||||
if addr.startswith("echo@"):
|
|
||||||
return True
|
|
||||||
addr = keyname[2]
|
|
||||||
timestamp = int(value)
|
|
||||||
user = self.config.get_user(addr)
|
|
||||||
user.set_last_login_timestamp(timestamp)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
socket, config_path = sys.argv[1:]
|
|
||||||
config = read_config(config_path)
|
|
||||||
dictproxy = LastLoginDictProxy(config=config)
|
|
||||||
dictproxy.serve_forever_from_socket(socket)
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
import logging
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from contextlib import contextmanager
|
|
||||||
|
|
||||||
from .config import read_config
|
|
||||||
from .dictproxy import DictProxy
|
|
||||||
from .filedict import FileDict
|
|
||||||
from .notifier import Notifier
|
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_token_timestamp(timestamp, now):
|
|
||||||
# Token if invalid after 90 days
|
|
||||||
# or if the timestamp is in the future.
|
|
||||||
return timestamp > now - 3600 * 24 * 90 and timestamp < now + 60
|
|
||||||
|
|
||||||
|
|
||||||
class Metadata:
|
|
||||||
# each SETMETADATA on this key appends to dictionary
|
|
||||||
# mapping of unique device tokens
|
|
||||||
# which only ever get removed if the upstream indicates the token is invalid
|
|
||||||
DEVICETOKEN_KEY = "devicetoken"
|
|
||||||
|
|
||||||
def __init__(self, vmail_dir):
|
|
||||||
self.vmail_dir = vmail_dir
|
|
||||||
|
|
||||||
def get_metadata_dict(self, addr):
|
|
||||||
return FileDict(self.vmail_dir / addr / "metadata.json")
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def _modify_tokens(self, addr):
|
|
||||||
with self.get_metadata_dict(addr).modify() as data:
|
|
||||||
tokens = data.setdefault(self.DEVICETOKEN_KEY, {})
|
|
||||||
now = int(time.time())
|
|
||||||
if isinstance(tokens, list):
|
|
||||||
data[self.DEVICETOKEN_KEY] = tokens = {t: now for t in tokens}
|
|
||||||
|
|
||||||
expired_tokens = [
|
|
||||||
token
|
|
||||||
for token, timestamp in tokens.items()
|
|
||||||
if not _is_valid_token_timestamp(tokens[token], now)
|
|
||||||
]
|
|
||||||
for expired_token in expired_tokens:
|
|
||||||
del tokens[expired_token]
|
|
||||||
|
|
||||||
yield tokens
|
|
||||||
|
|
||||||
def add_token_to_addr(self, addr, token):
|
|
||||||
with self._modify_tokens(addr) as tokens:
|
|
||||||
tokens[token] = int(time.time())
|
|
||||||
|
|
||||||
def remove_token_from_addr(self, addr, token):
|
|
||||||
with self._modify_tokens(addr) as tokens:
|
|
||||||
if token in tokens:
|
|
||||||
del tokens[token]
|
|
||||||
|
|
||||||
def get_tokens_for_addr(self, addr):
|
|
||||||
mdict = self.get_metadata_dict(addr).read()
|
|
||||||
tokens = mdict.get(self.DEVICETOKEN_KEY, {})
|
|
||||||
|
|
||||||
now = int(time.time())
|
|
||||||
if isinstance(tokens, dict):
|
|
||||||
token_list = [
|
|
||||||
token
|
|
||||||
for token, timestamp in tokens.items()
|
|
||||||
if _is_valid_token_timestamp(timestamp, now)
|
|
||||||
]
|
|
||||||
if len(token_list) < len(tokens):
|
|
||||||
# Some tokens have expired, remove them.
|
|
||||||
with self._modify_tokens(addr) as _tokens:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
token_list = []
|
|
||||||
return token_list
|
|
||||||
|
|
||||||
|
|
||||||
class MetadataDictProxy(DictProxy):
|
|
||||||
def __init__(self, notifier, metadata, iroh_relay=None):
|
|
||||||
super().__init__()
|
|
||||||
self.notifier = notifier
|
|
||||||
self.metadata = metadata
|
|
||||||
self.iroh_relay = iroh_relay
|
|
||||||
|
|
||||||
def handle_lookup(self, parts):
|
|
||||||
# Lpriv/43f5f508a7ea0366dff30200c15250e3/devicetoken\tlkj123poi@c2.testrun.org
|
|
||||||
keyparts = parts[0].split("/", 2)
|
|
||||||
if keyparts[0] == "priv":
|
|
||||||
keyname = keyparts[2]
|
|
||||||
addr = parts[1]
|
|
||||||
if keyname == self.metadata.DEVICETOKEN_KEY:
|
|
||||||
res = " ".join(self.metadata.get_tokens_for_addr(addr))
|
|
||||||
return f"O{res}\n"
|
|
||||||
elif keyparts[0] == "shared":
|
|
||||||
keyname = keyparts[2]
|
|
||||||
if (
|
|
||||||
keyname == "vendor/vendor.dovecot/pvt/server/vendor/deltachat/irohrelay"
|
|
||||||
and self.iroh_relay
|
|
||||||
):
|
|
||||||
# Handle `GETMETADATA "" /shared/vendor/deltachat/irohrelay`
|
|
||||||
return f"O{self.iroh_relay}\n"
|
|
||||||
logging.warning(f"lookup ignored: {parts!r}")
|
|
||||||
return "N\n"
|
|
||||||
|
|
||||||
def handle_set(self, addr, parts):
|
|
||||||
# For documentation on key structure see
|
|
||||||
# https://github.com/dovecot/core/blob/main/src/lib-storage/mailbox-attribute.h
|
|
||||||
keyname = parts[1].split("/")
|
|
||||||
value = parts[2] if len(parts) > 2 else ""
|
|
||||||
if keyname[0] == "priv" and keyname[2] == self.metadata.DEVICETOKEN_KEY:
|
|
||||||
self.metadata.add_token_to_addr(addr, value)
|
|
||||||
return True
|
|
||||||
elif keyname[0] == "priv" and keyname[2] == "messagenew":
|
|
||||||
self.notifier.new_message_for_addr(addr, self.metadata)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
socket, config_path = sys.argv[1:]
|
|
||||||
|
|
||||||
config = read_config(config_path)
|
|
||||||
iroh_relay = config.iroh_relay
|
|
||||||
|
|
||||||
vmail_dir = config.mailboxes_dir
|
|
||||||
if not vmail_dir.exists():
|
|
||||||
logging.error("vmail dir does not exist: %r", vmail_dir)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
queue_dir = vmail_dir / "pending_notifications"
|
|
||||||
queue_dir.mkdir(exist_ok=True)
|
|
||||||
metadata = Metadata(vmail_dir)
|
|
||||||
notifier = Notifier(queue_dir)
|
|
||||||
notifier.start_notification_threads(metadata.remove_token_from_addr)
|
|
||||||
|
|
||||||
dictproxy = MetadataDictProxy(
|
|
||||||
notifier=notifier, metadata=metadata, iroh_relay=iroh_relay
|
|
||||||
)
|
|
||||||
|
|
||||||
dictproxy.serve_forever_from_socket(socket)
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def main(vmail_dir=None):
|
|
||||||
if vmail_dir is None:
|
|
||||||
vmail_dir = sys.argv[1]
|
|
||||||
|
|
||||||
accounts = 0
|
|
||||||
ci_accounts = 0
|
|
||||||
|
|
||||||
for path in Path(vmail_dir).iterdir():
|
|
||||||
if not path.joinpath("cur").is_dir():
|
|
||||||
continue
|
|
||||||
accounts += 1
|
|
||||||
if path.name[:3] in ("ci-", "ac_"):
|
|
||||||
ci_accounts += 1
|
|
||||||
|
|
||||||
print("# HELP total number of accounts")
|
|
||||||
print("# TYPE accounts gauge")
|
|
||||||
print(f"accounts {accounts}")
|
|
||||||
print("# HELP number of CI accounts")
|
|
||||||
print("# TYPE ci_accounts gauge")
|
|
||||||
print(f"ci_accounts {ci_accounts}")
|
|
||||||
print("# HELP number of non-CI accounts")
|
|
||||||
print("# TYPE nonci_accounts gauge")
|
|
||||||
print(f"nonci_accounts {accounts - ci_accounts}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
"""
|
|
||||||
migration code from old sqlite databases into per-maildir "password" files
|
|
||||||
where mtime reflects and is updated to be the "last-login" time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sqlite3
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from chatmaild.config import read_config
|
|
||||||
|
|
||||||
|
|
||||||
def get_all_rows(path):
|
|
||||||
assert path.exists()
|
|
||||||
uri = f"file:{path}?mode=ro"
|
|
||||||
sqlconn = sqlite3.connect(uri, timeout=60, isolation_level="DEFERRED", uri=True)
|
|
||||||
cur = sqlconn.cursor()
|
|
||||||
cur.execute("SELECT * from users")
|
|
||||||
rows = cur.fetchall()
|
|
||||||
sqlconn.close()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_from_db_to_maildir(config, chunking=10000):
|
|
||||||
path = config.passdb_path
|
|
||||||
if not path.exists():
|
|
||||||
return
|
|
||||||
|
|
||||||
all_rows = get_all_rows(path)
|
|
||||||
|
|
||||||
# don't transfer special/CI accounts
|
|
||||||
rows = [row for row in all_rows if row[0][:3] not in ("ci-", "ac_")]
|
|
||||||
|
|
||||||
logging.info(f"ignoring {len(all_rows) - len(rows)} CI accounts")
|
|
||||||
logging.info(f"migrating {len(rows)} sqlite database passwords to user dirs")
|
|
||||||
|
|
||||||
for i, row in enumerate(rows):
|
|
||||||
addr = row[0]
|
|
||||||
enc_password = row[1]
|
|
||||||
user = config.get_user(addr)
|
|
||||||
user.set_password(enc_password)
|
|
||||||
|
|
||||||
if len(row) == 3 and row[2]:
|
|
||||||
timestamp = int(row[2])
|
|
||||||
user.set_last_login_timestamp(timestamp)
|
|
||||||
|
|
||||||
if i > 0 and i % chunking == 0:
|
|
||||||
logging.info(f"migration-progress: {i} passwords transferred")
|
|
||||||
|
|
||||||
logging.info("migration: all passwords migrated")
|
|
||||||
oldpath = config.passdb_path.with_suffix(config.passdb_path.suffix + ".old")
|
|
||||||
os.rename(config.passdb_path, oldpath)
|
|
||||||
for path in config.passdb_path.parent.iterdir():
|
|
||||||
if path.name.startswith(config.passdb_path.name + "-"):
|
|
||||||
path.unlink()
|
|
||||||
logging.info(f"migration: moved database to {oldpath!r}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
config = read_config(sys.argv[1])
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
migrate_from_db_to_maildir(config)
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/usr/local/lib/chatmaild/venv/bin/python3
|
|
||||||
|
|
||||||
"""CGI script for creating new accounts."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
import secrets
|
|
||||||
import string
|
|
||||||
|
|
||||||
from chatmaild.config import Config, read_config
|
|
||||||
|
|
||||||
CONFIG_PATH = "/usr/local/lib/chatmaild/chatmail.ini"
|
|
||||||
ALPHANUMERIC = string.ascii_lowercase + string.digits
|
|
||||||
ALPHANUMERIC_PUNCT = string.ascii_letters + string.digits + string.punctuation
|
|
||||||
|
|
||||||
|
|
||||||
def create_newemail_dict(config: Config):
|
|
||||||
user = "".join(random.choices(ALPHANUMERIC, k=config.username_max_length))
|
|
||||||
password = "".join(
|
|
||||||
secrets.choice(ALPHANUMERIC_PUNCT)
|
|
||||||
for _ in range(config.password_min_length + 3)
|
|
||||||
)
|
|
||||||
return dict(email=f"{user}@{config.mail_domain}", password=f"{password}")
|
|
||||||
|
|
||||||
|
|
||||||
def print_new_account():
|
|
||||||
config = read_config(CONFIG_PATH)
|
|
||||||
creds = create_newemail_dict(config)
|
|
||||||
|
|
||||||
print("Content-Type: application/json")
|
|
||||||
print("")
|
|
||||||
print(json.dumps(creds))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
print_new_account()
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
"""
|
|
||||||
This modules provides notification machinery for transmitting device tokens to
|
|
||||||
a central notification server which in turn contacts a phone provider's notification server
|
|
||||||
to trigger Delta Chat apps to retrieve messages and provide instant notifications to users.
|
|
||||||
|
|
||||||
The Notifier class arranges the queuing of tokens in separate PriorityQueues
|
|
||||||
from which NotifyThreads take and transmit them via HTTPS
|
|
||||||
to the `notifications.delta.chat` service.
|
|
||||||
The current lack of proper HTTP/2-support in Python leads us
|
|
||||||
to use multiple threads and connections to the Rust-implemented `notifications.delta.chat`
|
|
||||||
which itself uses HTTP/2 and thus only a single connection to phone-notification providers.
|
|
||||||
|
|
||||||
If a token fails to cause a successful notification
|
|
||||||
it is moved to a retry-number specific PriorityQueue
|
|
||||||
which handles all tokens that failed a particular number of times
|
|
||||||
and which are scheduled for retry using exponential back-off timing.
|
|
||||||
If a token notification would be scheduled more than DROP_DEADLINE seconds
|
|
||||||
after its first attempt, it is dropped with a log error.
|
|
||||||
|
|
||||||
Note that tokens are opaque to the notification machinery here
|
|
||||||
and are encrypted foreclosing all ability to distinguish
|
|
||||||
which device token ultimately goes to which phone-provider notification service,
|
|
||||||
or to understand the relation of "device tokens" and chatmail addresses.
|
|
||||||
The meaning and format of tokens is basically a matter of chatmail Core and
|
|
||||||
the `notification.delta.chat` service.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import math
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from pathlib import Path
|
|
||||||
from queue import PriorityQueue
|
|
||||||
from threading import Thread
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PersistentQueueItem:
|
|
||||||
path: Path
|
|
||||||
addr: str
|
|
||||||
start_ts: int
|
|
||||||
token: str
|
|
||||||
|
|
||||||
def delete(self):
|
|
||||||
self.path.unlink(missing_ok=True)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, queue_dir, addr, start_ts, token):
|
|
||||||
queue_id = uuid4().hex
|
|
||||||
start_ts = int(start_ts)
|
|
||||||
path = queue_dir.joinpath(queue_id)
|
|
||||||
tmp_path = path.with_name(path.name + ".tmp")
|
|
||||||
tmp_path.write_text(f"{addr}\n{start_ts}\n{token}")
|
|
||||||
os.rename(tmp_path, path)
|
|
||||||
return cls(path, addr, start_ts, token)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def read_from_path(cls, path):
|
|
||||||
addr, start_ts, token = path.read_text().split("\n", maxsplit=2)
|
|
||||||
return cls(path, addr, int(start_ts), token)
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
|
||||||
return self.start_ts < other.start_ts
|
|
||||||
|
|
||||||
|
|
||||||
class Notifier:
|
|
||||||
URL = "https://notifications.delta.chat/notify"
|
|
||||||
CONNECTION_TIMEOUT = 60.0 # seconds until http-request is given up
|
|
||||||
BASE_DELAY = 8.0 # base seconds for exponential back-off delay
|
|
||||||
DROP_DEADLINE = 5 * 60 * 60 # drop notifications after 5 hours
|
|
||||||
|
|
||||||
def __init__(self, queue_dir):
|
|
||||||
self.queue_dir = queue_dir
|
|
||||||
max_tries = int(math.log(self.DROP_DEADLINE, self.BASE_DELAY)) + 1
|
|
||||||
self.retry_queues = [PriorityQueue() for _ in range(max_tries)]
|
|
||||||
|
|
||||||
def compute_delay(self, retry_num):
|
|
||||||
return 0 if retry_num == 0 else pow(self.BASE_DELAY, retry_num)
|
|
||||||
|
|
||||||
def new_message_for_addr(self, addr, metadata):
|
|
||||||
start_ts = int(time.time())
|
|
||||||
for token in metadata.get_tokens_for_addr(addr):
|
|
||||||
queue_item = PersistentQueueItem.create(
|
|
||||||
self.queue_dir, addr, start_ts, token
|
|
||||||
)
|
|
||||||
self.queue_for_retry(queue_item)
|
|
||||||
|
|
||||||
def requeue_persistent_queue_items(self):
|
|
||||||
for queue_path in self.queue_dir.iterdir():
|
|
||||||
if queue_path.name.endswith(".tmp"):
|
|
||||||
logging.warning(f"removing spurious queue item: {queue_path!r}")
|
|
||||||
queue_path.unlink()
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
queue_item = PersistentQueueItem.read_from_path(queue_path)
|
|
||||||
except ValueError:
|
|
||||||
logging.warning(f"removing spurious queue item: {queue_path!r}")
|
|
||||||
queue_path.unlink()
|
|
||||||
continue
|
|
||||||
self.queue_for_retry(queue_item)
|
|
||||||
|
|
||||||
def queue_for_retry(self, queue_item, retry_num=0):
|
|
||||||
delay = self.compute_delay(retry_num)
|
|
||||||
when = int(time.time()) + delay
|
|
||||||
deadline = queue_item.start_ts + self.DROP_DEADLINE
|
|
||||||
if retry_num >= len(self.retry_queues) or when > deadline:
|
|
||||||
queue_item.delete()
|
|
||||||
logging.error(f"notification exceeded deadline: {queue_item.token!r}")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.retry_queues[retry_num].put((when, queue_item))
|
|
||||||
|
|
||||||
def start_notification_threads(self, remove_token_from_addr):
|
|
||||||
self.requeue_persistent_queue_items()
|
|
||||||
threads = {}
|
|
||||||
for retry_num in range(len(self.retry_queues)):
|
|
||||||
# use 4 threads for first-try tokens and less for subsequent tries
|
|
||||||
num_threads = 4 if retry_num == 0 else 2
|
|
||||||
threads[retry_num] = []
|
|
||||||
for _ in range(num_threads):
|
|
||||||
thread = NotifyThread(self, retry_num, remove_token_from_addr)
|
|
||||||
threads[retry_num].append(thread)
|
|
||||||
thread.start()
|
|
||||||
return threads
|
|
||||||
|
|
||||||
|
|
||||||
class NotifyThread(Thread):
|
|
||||||
def __init__(self, notifier, retry_num, remove_token_from_addr):
|
|
||||||
super().__init__(daemon=True)
|
|
||||||
self.notifier = notifier
|
|
||||||
self.retry_num = retry_num
|
|
||||||
self.remove_token_from_addr = remove_token_from_addr
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.notifier.retry_queues[self.retry_num].put((None, None))
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
requests_session = requests.Session()
|
|
||||||
while self.retry_one(requests_session):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def retry_one(self, requests_session, sleep=time.sleep):
|
|
||||||
when, queue_item = self.notifier.retry_queues[self.retry_num].get()
|
|
||||||
if when is None:
|
|
||||||
return False
|
|
||||||
wait_time = when - int(time.time())
|
|
||||||
if wait_time > 0:
|
|
||||||
sleep(wait_time)
|
|
||||||
self.perform_request_to_notification_server(requests_session, queue_item)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def perform_request_to_notification_server(self, requests_session, queue_item):
|
|
||||||
timeout = self.notifier.CONNECTION_TIMEOUT
|
|
||||||
token = queue_item.token
|
|
||||||
try:
|
|
||||||
res = requests_session.post(self.notifier.URL, data=token, timeout=timeout)
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
res = e
|
|
||||||
else:
|
|
||||||
if res.status_code in (200, 410):
|
|
||||||
if res.status_code == 410:
|
|
||||||
self.remove_token_from_addr(queue_item.addr, token)
|
|
||||||
queue_item.delete()
|
|
||||||
return
|
|
||||||
|
|
||||||
logging.warning(f"Notification request failed: {res!r}")
|
|
||||||
self.notifier.queue_for_retry(queue_item, retry_num=self.retry_num + 1)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
From: {from_addr}
|
|
||||||
To: {to_addr}
|
|
||||||
Autocrypt-Setup-Message: v1
|
|
||||||
Subject: Autocrypt Setup Message
|
|
||||||
Date: Tue, 22 Jan 2019 12:56:29 +0100
|
|
||||||
Content-type: multipart/mixed; boundary="Y6fyGi9SoGeH8WwRaEdC6bbBcYOedDzrQ"
|
|
||||||
|
|
||||||
--Y6fyGi9SoGeH8WwRaEdC6bbBcYOedDzrQ
|
|
||||||
Content-Type: text/plain
|
|
||||||
|
|
||||||
This message contains all information to transfer your Autocrypt
|
|
||||||
settings along with your secret key securely from your original
|
|
||||||
device.
|
|
||||||
|
|
||||||
To set up your new device for Autocrypt, please follow the
|
|
||||||
instuctions that should be presented by your new device.
|
|
||||||
|
|
||||||
You can keep this message and use it as a backup for your secret
|
|
||||||
key. If you want to do this, you should write down the Setup Code
|
|
||||||
and store it securely.
|
|
||||||
--Y6fyGi9SoGeH8WwRaEdC6bbBcYOedDzrQ
|
|
||||||
Content-Type: application/autocrypt-setup
|
|
||||||
Content-Disposition: attachment; filename="autocrypt-setup-message.html"
|
|
||||||
|
|
||||||
<html><body>
|
|
||||||
<p>
|
|
||||||
This is the Autocrypt setup file used to transfer settings and
|
|
||||||
keys between clients. You can decrypt it using the Setup Code
|
|
||||||
presented on your old device, and then import the contained key
|
|
||||||
into your keyring.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<pre>
|
|
||||||
-----BEGIN PGP MESSAGE-----
|
|
||||||
Passphrase-Format: numeric9x4
|
|
||||||
Passphrase-Begin: 17
|
|
||||||
|
|
||||||
jA0EBwMCFAxADoCdzeX/0ukBlqI5+pfpKb751qd/7nLNbkpy3gVcaf1QwRPZYt40
|
|
||||||
Ynp08UqRQ2g48ZlnzHLSwlTGOPTuv2Jt8ka+pgZ45xzvJSG2gau03xP4VsC271kR
|
|
||||||
VmCjdb0Y6Rk96mAwfGzrkbaRQ9Z7fIoL866GOv6h9neiVIkp+JYlTV6ISD0ZQJ4Q
|
|
||||||
I6dOQkB/TWZyVjtiJDOQHdfNWliA6NtqaLq19wlu9L5xXjuNpY95KwR8EJXWe0+o
|
|
||||||
Y3d2U/KxOAkXKghP2Qg1GtlPVeGC5T4p03TGI6pzKT+kHX6Rrm9wK6sM9aTquMmF
|
|
||||||
Vok84Jg1DFnwivWC2RILR81rXi7k/+Y6MUbveFgJ9cQduqpxnmD7TjOblYu7M6zp
|
|
||||||
YGAUxh8DRKlIMn2QsA++DBYQ6ACZvwuY8qTDLkqPDo4WqM313dsMJbyGjDdVE7EM
|
|
||||||
PESS+RlABETpZXz8g/ycr6DIUNdlbPcmYlsBfHWDOuR2GFFTwmlv5slWS39dJv38
|
|
||||||
E0eIe1CwdxI801Se7t7dUUS/ZF8wb6GlmxOcqGbF8eko1Z0S64IAm7/h13MRQCxI
|
|
||||||
geQnHfGYVJ2FOimoCMEKwfa9x++RFTDW0u7spDC2uWvK/1viV8OfRppFhLr/kmKb
|
|
||||||
18lWXuAz80DAjUDUsVqEq2MvJBJGoCJUEyjuRsLkHYRM5jYk4v50LyyR0Om73nWF
|
|
||||||
nZBqmqNzdr7Xb9PHHdFhnEc0VvoYbrcM0RVYcEMW3YbmejM891j1d6Iv+/n/qND/
|
|
||||||
NdebGrfWJMmFLf/iEkzTZ3/v5inW9LpWoRc94ioCjJTaEo8Rib6ARRFaJVIsmNXi
|
|
||||||
YicFGO98D+zX+a2t9Yz6IpPajVslnOp6ScpmXgts/2XWD7oE+JgxSAqo/dLVsHgP
|
|
||||||
Ufo=
|
|
||||||
=pulM
|
|
||||||
-----END PGP MESSAGE-----
|
|
||||||
</pre></body></html>
|
|
||||||
--Y6fyGi9SoGeH8WwRaEdC6bbBcYOedDzrQ--
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
From: {from_addr}
|
|
||||||
To: {to_addr}
|
|
||||||
Subject: ...
|
|
||||||
Date: Sun, 15 Oct 2023 16:43:21 +0000
|
|
||||||
Message-ID: <Mr.UVyJWZmkCKM.hGzNc6glBE_@c2.testrun.org>
|
|
||||||
In-Reply-To: <Mr.MvmCz-GQbi_.6FGRkhDf05c@c2.testrun.org>
|
|
||||||
References: <Mr.3gckbNy5bch.uK3Hd2Ws6-w@c2.testrun.org>
|
|
||||||
<Mr.MvmCz-GQbi_.6FGRkhDf05c@c2.testrun.org>
|
|
||||||
Chat-Version: 1.0
|
|
||||||
Autocrypt: addr={from_addr}; prefer-encrypt=mutual;
|
|
||||||
keydata=xjMEZSwWjhYJKwYBBAHaRw8BAQdAQBEhqeJh0GueHB6kF/DUQqYCxARNBVokg/AzT+7LqH
|
|
||||||
rNFzxiYXJiYXpAYzIudGVzdHJ1bi5vcmc+wosEEBYIADMCGQEFAmUsFo4CGwMECwkIBwYVCAkKCwID
|
|
||||||
FgIBFiEEFTfUNvVnY3b9F7yHnmme1PfUhX8ACgkQnmme1PfUhX9A4AEAnHWHp49eBCMHK5t66gYPiW
|
|
||||||
XQuB1mwUjzGfYWB+0RXUoA/0xcQ3FbUNlGKW7Blp6eMFfViv6Mv2d3kNSXACB6nmcMzjgEZSwWjhIK
|
|
||||||
KwYBBAGXVQEFAQEHQBpY5L2M1XHo0uxf8SX1wNLBp/OVvidoWHQF2Jz+kJsUAwEIB8J4BBgWCAAgBQ
|
|
||||||
JlLBaOAhsMFiEEFTfUNvVnY3b9F7yHnmme1PfUhX8ACgkQnmme1PfUhX/INgEA37AJaNvruYsJVanP
|
|
||||||
IXnYw4CKd55UAwl8Zcy+M2diAbkA/0fHHcGV4r78hpbbL1Os52DPOdqYQRauIeJUeG+G6bQO
|
|
||||||
MIME-Version: 1.0
|
|
||||||
Content-Type: multipart/encrypted; protocol="application/pgp-encrypted";
|
|
||||||
boundary="YFrteb74qSXmggbOxZL9dRnhymywAi"
|
|
||||||
|
|
||||||
|
|
||||||
--YFrteb74qSXmggbOxZL9dRnhymywAi
|
|
||||||
Content-Description: PGP/MIME version identification
|
|
||||||
Content-Type: application/pgp-encrypted
|
|
||||||
|
|
||||||
Version: 1
|
|
||||||
|
|
||||||
|
|
||||||
--YFrteb74qSXmggbOxZL9dRnhymywAi
|
|
||||||
Content-Description: OpenPGP encrypted message
|
|
||||||
Content-Disposition: inline; filename="encrypted.asc";
|
|
||||||
Content-Type: application/octet-stream; name="encrypted.asc"
|
|
||||||
|
|
||||||
-----BEGIN PGP MESSAGE-----
|
|
||||||
|
|
||||||
yxJiAAAAAABIZWxsbyB3b3JsZCE=
|
|
||||||
=1I/B
|
|
||||||
-----END PGP MESSAGE-----
|
|
||||||
|
|
||||||
|
|
||||||
--YFrteb74qSXmggbOxZL9dRnhymywAi--
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
Date: Fri, 8 Jul 1994 09:21:47 -0400
|
|
||||||
From: Mail Delivery Subsystem <MAILER-DAEMON@example.org>
|
|
||||||
Subject: Returned mail: User unknown
|
|
||||||
To: <owner-ups-mib@CS.UTK.EDU>
|
|
||||||
Auto-Submitted: auto-replied
|
|
||||||
MIME-Version: 1.0
|
|
||||||
Content-Type: multipart/report; report-type=delivery-status;
|
|
||||||
boundary="JAA13167.773673707/CS.UTK.EDU"
|
|
||||||
|
|
||||||
--JAA13167.773673707/CS.UTK.EDU
|
|
||||||
content-type: text/plain; charset=us-ascii
|
|
||||||
|
|
||||||
----- The following addresses had delivery problems -----
|
|
||||||
<arathib@vnet.ibm.com> (unrecoverable error)
|
|
||||||
<wsnell@sdcc13.ucsd.edu> (unrecoverable error)
|
|
||||||
|
|
||||||
--JAA13167.773673707/CS.UTK.EDU
|
|
||||||
content-type: message/delivery-status
|
|
||||||
|
|
||||||
Reporting-MTA: dns; cs.utk.edu
|
|
||||||
|
|
||||||
Original-Recipient: rfc822;arathib@vnet.ibm.com
|
|
||||||
Final-Recipient: rfc822;arathib@vnet.ibm.com
|
|
||||||
Action: failed
|
|
||||||
Status: 5.0.0 (permanent failure)
|
|
||||||
Diagnostic-Code: smtp;
|
|
||||||
550 'arathib@vnet.IBM.COM' is not a registered gateway user
|
|
||||||
Remote-MTA: dns; vnet.ibm.com
|
|
||||||
|
|
||||||
Original-Recipient: rfc822;johnh@hpnjld.njd.hp.com
|
|
||||||
Final-Recipient: rfc822;johnh@hpnjld.njd.hp.com
|
|
||||||
Action: delayed
|
|
||||||
Status: 4.0.0 (hpnjld.njd.jp.com: host name lookup failure)
|
|
||||||
|
|
||||||
Original-Recipient: rfc822;wsnell@sdcc13.ucsd.edu
|
|
||||||
Final-Recipient: rfc822;wsnell@sdcc13.ucsd.edu
|
|
||||||
Action: failed
|
|
||||||
Status: 5.0.0
|
|
||||||
Diagnostic-Code: smtp; 550 user unknown
|
|
||||||
Remote-MTA: dns; sdcc13.ucsd.edu
|
|
||||||
|
|
||||||
--JAA13167.773673707/CS.UTK.EDU
|
|
||||||
content-type: message/rfc822
|
|
||||||
|
|
||||||
[original message goes here]
|
|
||||||
--JAA13167.773673707/CS.UTK.EDU--
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
Subject: Message from {from_addr}
|
|
||||||
From: <{from_addr}>
|
|
||||||
To: <{to_addr}>
|
|
||||||
Date: Sun, 15 Oct 2023 16:43:25 +0000
|
|
||||||
Message-ID: <Mr.78MWtlV7RAi.goCFzBhCYfy@c2.testrun.org>
|
|
||||||
Chat-Version: 1.0
|
|
||||||
Secure-Join: vc-request
|
|
||||||
Secure-Join-Invitenumber: RANDOM-TOKEN
|
|
||||||
MIME-Version: 1.0
|
|
||||||
Content-Type: multipart/mixed; boundary="Gl92xgZjOShJ5PGHntqYkoo2OK2Dvi"
|
|
||||||
|
|
||||||
|
|
||||||
--Gl92xgZjOShJ5PGHntqYkoo2OK2Dvi
|
|
||||||
Content-Type: text/plain; charset=utf-8
|
|
||||||
|
|
||||||
Buy viagra!
|
|
||||||
|
|
||||||
|
|
||||||
--Gl92xgZjOShJ5PGHntqYkoo2OK2Dvi--
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
Subject: Message from {from_addr}
|
|
||||||
From: <{from_addr}>
|
|
||||||
To: <{to_addr}>
|
|
||||||
Date: Sun, 15 Oct 2023 16:43:25 +0000
|
|
||||||
Message-ID: <Mr.78MWtlV7RAi.goCFzBhCYfy@c2.testrun.org>
|
|
||||||
Chat-Version: 1.0
|
|
||||||
Secure-Join: vc-request
|
|
||||||
Secure-Join-Invitenumber: RANDOM-TOKEN
|
|
||||||
MIME-Version: 1.0
|
|
||||||
Content-Type: multipart/mixed; boundary="Gl92xgZjOShJ5PGHntqYkoo2OK2Dvi"
|
|
||||||
|
|
||||||
|
|
||||||
--Gl92xgZjOShJ5PGHntqYkoo2OK2Dvi
|
|
||||||
Content-Type: text/plain; charset=utf-8
|
|
||||||
|
|
||||||
Secure-Join: vc-request
|
|
||||||
|
|
||||||
|
|
||||||
--Gl92xgZjOShJ5PGHntqYkoo2OK2Dvi--
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
import importlib.resources
|
|
||||||
import itertools
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
from email import policy
|
|
||||||
from email.parser import BytesParser
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from chatmaild.config import read_config, write_initial_config
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def make_config(tmp_path):
|
|
||||||
inipath = tmp_path.joinpath("chatmail.ini")
|
|
||||||
|
|
||||||
def make_conf(mail_domain, settings=None):
|
|
||||||
basedir = tmp_path.joinpath(f"vmail/{mail_domain}")
|
|
||||||
basedir.mkdir(parents=True, exist_ok=True)
|
|
||||||
overrides = settings.copy() if settings else {}
|
|
||||||
overrides["mailboxes_dir"] = str(basedir)
|
|
||||||
write_initial_config(inipath, mail_domain, overrides=overrides)
|
|
||||||
return read_config(inipath)
|
|
||||||
|
|
||||||
return make_conf
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def example_config(make_config):
|
|
||||||
return make_config("chat.example.org")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def maildomain(example_config):
|
|
||||||
return example_config.mail_domain
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def testaddr(maildomain):
|
|
||||||
return f"user.name@{maildomain}"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def gencreds(maildomain):
|
|
||||||
count = itertools.count()
|
|
||||||
next(count)
|
|
||||||
|
|
||||||
def gen(domain=None):
|
|
||||||
domain = domain if domain else maildomain
|
|
||||||
while 1:
|
|
||||||
num = next(count)
|
|
||||||
alphanumeric = "abcdefghijklmnopqrstuvwxyz1234567890"
|
|
||||||
user = "".join(random.choices(alphanumeric, k=10))
|
|
||||||
user = f"ac{num}_{user}"[:9]
|
|
||||||
password = "".join(random.choices(alphanumeric, k=12))
|
|
||||||
yield f"{user}@{domain}", f"{password}"
|
|
||||||
|
|
||||||
return lambda domain=None: next(gen(domain))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def maildata(request):
|
|
||||||
try:
|
|
||||||
datadir = importlib.resources.files(__package__).joinpath("mail-data")
|
|
||||||
except TypeError:
|
|
||||||
# in python3.9 or lower, the above doesn't work, so we get datadir this way:
|
|
||||||
datadir = Path(os.getcwd()).joinpath("chatmaild/src/chatmaild/tests/mail-data")
|
|
||||||
|
|
||||||
assert datadir.exists(), datadir
|
|
||||||
|
|
||||||
def maildata(name, from_addr, to_addr, subject="[...]"):
|
|
||||||
# Using `.read_bytes().decode()` instead of `.read_text()` to preserve newlines.
|
|
||||||
data = datadir.joinpath(name).read_bytes().decode()
|
|
||||||
text = data.format(from_addr=from_addr, to_addr=to_addr, subject=subject)
|
|
||||||
return BytesParser(policy=policy.SMTP).parsebytes(text.encode())
|
|
||||||
|
|
||||||
return maildata
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mockout():
|
|
||||||
class MockOut:
|
|
||||||
captured_red = []
|
|
||||||
captured_green = []
|
|
||||||
captured_plain = []
|
|
||||||
|
|
||||||
def red(self, msg):
|
|
||||||
self.captured_red.append(msg)
|
|
||||||
|
|
||||||
def green(self, msg):
|
|
||||||
self.captured_green.append(msg)
|
|
||||||
|
|
||||||
def __call__(self, msg):
|
|
||||||
self.captured_plain.append(msg)
|
|
||||||
|
|
||||||
return MockOut()
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
import pytest
|
|
||||||
|
|
||||||
from chatmaild.config import read_config
|
|
||||||
|
|
||||||
|
|
||||||
def test_read_config_basic(example_config):
|
|
||||||
assert example_config.mail_domain == "chat.example.org"
|
|
||||||
assert not example_config.privacy_supervisor and not example_config.privacy_mail
|
|
||||||
assert not example_config.privacy_pdo and not example_config.privacy_postal
|
|
||||||
|
|
||||||
inipath = example_config._inipath
|
|
||||||
inipath.write_text(inipath.read_text().replace("60", "37"))
|
|
||||||
example_config = read_config(inipath)
|
|
||||||
assert example_config.max_user_send_per_minute == 37
|
|
||||||
assert example_config.mail_domain == "chat.example.org"
|
|
||||||
|
|
||||||
|
|
||||||
def test_read_config_basic_using_defaults(tmp_path, maildomain):
|
|
||||||
inipath = tmp_path.joinpath("chatmail.ini")
|
|
||||||
inipath.write_text(f"[params]\nmail_domain = {maildomain}")
|
|
||||||
example_config = read_config(inipath)
|
|
||||||
assert example_config.max_user_send_per_minute == 60
|
|
||||||
assert example_config.filtermail_smtp_port_incoming == 10081
|
|
||||||
|
|
||||||
|
|
||||||
def test_read_config_testrun(make_config):
|
|
||||||
config = make_config("something.testrun.org")
|
|
||||||
assert config.mail_domain == "something.testrun.org"
|
|
||||||
assert len(config.privacy_postal.split("\n")) > 1
|
|
||||||
assert len(config.privacy_supervisor.split("\n")) > 1
|
|
||||||
assert len(config.privacy_pdo.split("\n")) > 1
|
|
||||||
assert config.privacy_mail == "privacy@testrun.org"
|
|
||||||
assert config.filtermail_smtp_port == 10080
|
|
||||||
assert config.postfix_reinject_port == 10025
|
|
||||||
assert config.max_user_send_per_minute == 60
|
|
||||||
assert config.max_mailbox_size == "100M"
|
|
||||||
assert config.delete_mails_after == "20"
|
|
||||||
assert config.delete_large_after == "7"
|
|
||||||
assert config.username_min_length == 9
|
|
||||||
assert config.username_max_length == 9
|
|
||||||
assert config.password_min_length == 9
|
|
||||||
assert "privacy@testrun.org" in config.passthrough_recipients
|
|
||||||
assert config.passthrough_senders == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_userstate_paths(make_config, tmp_path):
|
|
||||||
config = make_config("something.testrun.org")
|
|
||||||
mailboxes_dir = config.mailboxes_dir
|
|
||||||
passdb_path = config.passdb_path
|
|
||||||
assert mailboxes_dir.name == "something.testrun.org"
|
|
||||||
assert str(passdb_path) == "/home/vmail/passdb.sqlite"
|
|
||||||
assert config.mail_domain == "something.testrun.org"
|
|
||||||
path = config.get_user("user1@something.testrun.org").maildir
|
|
||||||
assert not path.exists()
|
|
||||||
assert path == mailboxes_dir.joinpath("user1@something.testrun.org")
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
config.get_user("")
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
config.get_user(None)
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
config.get_user("../some@something.testrun.org").maildir
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
config.get_user("..").maildir
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
config.get_user(".")
|
|
||||||
|
|
||||||
|
|
||||||
def test_config_max_message_size(make_config, tmp_path):
|
|
||||||
config = make_config("something.testrun.org", dict(max_message_size="10000"))
|
|
||||||
assert config.max_message_size == 10000
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
import time
|
|
||||||
|
|
||||||
from chatmaild.delete_inactive_users import delete_inactive_users
|
|
||||||
from chatmaild.doveauth import AuthDictProxy
|
|
||||||
|
|
||||||
|
|
||||||
def test_login_timestamps(example_config):
|
|
||||||
testaddr = "someuser@chat.example.org"
|
|
||||||
user = example_config.get_user(testaddr)
|
|
||||||
|
|
||||||
# password file needs to be set because it's mtime tracks last-login time
|
|
||||||
user.set_password("1l2k3j1l2k3j123")
|
|
||||||
for i in range(10):
|
|
||||||
user.set_last_login_timestamp(86400 * 4 + i)
|
|
||||||
assert user.get_last_login_timestamp() == 86400 * 4
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_inactive_users(example_config):
|
|
||||||
new = time.time()
|
|
||||||
old = new - (example_config.delete_inactive_users_after * 86400) - 1
|
|
||||||
dictproxy = AuthDictProxy(example_config)
|
|
||||||
|
|
||||||
def create_user(addr, last_login):
|
|
||||||
dictproxy.lookup_passdb(addr, "q9mr3faue")
|
|
||||||
user = example_config.get_user(addr)
|
|
||||||
user.maildir.joinpath("cur").mkdir()
|
|
||||||
user.maildir.joinpath("cur", "something").mkdir()
|
|
||||||
user.set_last_login_timestamp(timestamp=last_login)
|
|
||||||
|
|
||||||
# create some stale and some new accounts
|
|
||||||
to_remove = []
|
|
||||||
for i in range(150):
|
|
||||||
addr = f"oldold{i:03}@chat.example.org"
|
|
||||||
create_user(addr, last_login=old)
|
|
||||||
to_remove.append(addr)
|
|
||||||
|
|
||||||
remain = []
|
|
||||||
for i in range(5):
|
|
||||||
addr = f"newnew{i:03}@chat.example.org"
|
|
||||||
create_user(addr, last_login=new)
|
|
||||||
remain.append(addr)
|
|
||||||
|
|
||||||
# check pre and post-conditions for delete_inactive_users()
|
|
||||||
|
|
||||||
for addr in to_remove:
|
|
||||||
assert example_config.get_user(addr).maildir.exists()
|
|
||||||
|
|
||||||
delete_inactive_users(example_config)
|
|
||||||
|
|
||||||
for p in example_config.mailboxes_dir.iterdir():
|
|
||||||
assert not p.name.startswith("old")
|
|
||||||
|
|
||||||
for addr in to_remove:
|
|
||||||
assert not example_config.get_user(addr).maildir.exists()
|
|
||||||
|
|
||||||
for addr in remain:
|
|
||||||
userdir = example_config.get_user(addr).maildir
|
|
||||||
assert userdir.exists()
|
|
||||||
assert userdir.joinpath("password").read_text()
|
|
||||||
@@ -1,150 +0,0 @@
|
|||||||
import io
|
|
||||||
import json
|
|
||||||
import queue
|
|
||||||
import threading
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import chatmaild.doveauth
|
|
||||||
from chatmaild.doveauth import (
|
|
||||||
AuthDictProxy,
|
|
||||||
is_allowed_to_create,
|
|
||||||
)
|
|
||||||
from chatmaild.newemail import create_newemail_dict
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def dictproxy(example_config):
|
|
||||||
return AuthDictProxy(config=example_config)
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic(dictproxy, gencreds):
|
|
||||||
addr, password = gencreds()
|
|
||||||
dictproxy.lookup_passdb(addr, password)
|
|
||||||
data = dictproxy.lookup_userdb(addr)
|
|
||||||
assert data
|
|
||||||
data2 = dictproxy.lookup_passdb(addr, password)
|
|
||||||
assert data == data2
|
|
||||||
|
|
||||||
|
|
||||||
def test_iterate_addresses(dictproxy):
|
|
||||||
addresses = []
|
|
||||||
|
|
||||||
for i in range(10):
|
|
||||||
addresses.append(f"asdf1234{i}@chat.example.org")
|
|
||||||
dictproxy.lookup_passdb(addresses[-1], "q9mr3faue")
|
|
||||||
|
|
||||||
res = dictproxy.iter_userdb()
|
|
||||||
assert set(res) == set(addresses)
|
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_username_length(example_config):
|
|
||||||
config = example_config
|
|
||||||
config.username_min_length = 6
|
|
||||||
config.username_max_length = 10
|
|
||||||
password = create_newemail_dict(config)["password"]
|
|
||||||
assert not is_allowed_to_create(config, f"a1234@{config.mail_domain}", password)
|
|
||||||
assert is_allowed_to_create(config, f"012345@{config.mail_domain}", password)
|
|
||||||
assert is_allowed_to_create(config, f"0123456@{config.mail_domain}", password)
|
|
||||||
assert is_allowed_to_create(config, f"0123456789@{config.mail_domain}", password)
|
|
||||||
assert not is_allowed_to_create(
|
|
||||||
config, f"0123456789x@{config.mail_domain}", password
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_dont_overwrite_password_on_wrong_login(dictproxy):
|
|
||||||
"""Test that logging in with a different password doesn't create a new user"""
|
|
||||||
res = dictproxy.lookup_passdb(
|
|
||||||
"newuser12@chat.example.org", "kajdlkajsldk12l3kj1983"
|
|
||||||
)
|
|
||||||
assert res["password"]
|
|
||||||
res2 = dictproxy.lookup_passdb("newuser12@chat.example.org", "kajdslqwe")
|
|
||||||
# this function always returns a password hash, which is actually compared by dovecot.
|
|
||||||
assert res["password"] == res2["password"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_nocreate_file(monkeypatch, tmpdir, dictproxy):
|
|
||||||
p = tmpdir.join("nocreate")
|
|
||||||
p.write("")
|
|
||||||
monkeypatch.setattr(chatmaild.doveauth, "NOCREATE_FILE", str(p))
|
|
||||||
dictproxy.lookup_passdb("newuser12@chat.example.org", "zequ0Aimuchoodaechik")
|
|
||||||
assert not dictproxy.lookup_userdb("newuser12@chat.example.org")
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_request(dictproxy):
|
|
||||||
transactions = {}
|
|
||||||
# Test that password can contain ", ', \ and /
|
|
||||||
msg = (
|
|
||||||
'Lshared/passdb/laksjdlaksjdlak\\\\sjdlk\\"12j\\\'3l1/k2j3123"'
|
|
||||||
"some42123@chat.example.org\tsome42123@chat.example.org"
|
|
||||||
)
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert res
|
|
||||||
assert res[0] == "O" and res.endswith("\n")
|
|
||||||
userdata = json.loads(res[1:].strip())
|
|
||||||
assert userdata["home"].endswith("chat.example.org/some42123@chat.example.org")
|
|
||||||
assert userdata["uid"] == userdata["gid"] == "vmail"
|
|
||||||
assert userdata["password"].startswith("{SHA512-CRYPT}")
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_protocol_hello_is_skipped(example_config, caplog):
|
|
||||||
dictproxy = AuthDictProxy(config=example_config)
|
|
||||||
rfile = io.BytesIO(b"H3\t2\t0\t\tauth\n")
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert wfile.getvalue() == b""
|
|
||||||
assert not caplog.messages
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_protocol_user_not_exists(example_config):
|
|
||||||
dictproxy = AuthDictProxy(config=example_config)
|
|
||||||
rfile = io.BytesIO(
|
|
||||||
b"H3\t2\t0\t\tauth\nLshared/userdb/foobar@chat.example.org\tfoobar@chat.example.org\n"
|
|
||||||
)
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert wfile.getvalue() == b"N\n"
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_protocol_iterate(gencreds, example_config):
|
|
||||||
dictproxy = AuthDictProxy(config=example_config)
|
|
||||||
dictproxy.lookup_passdb("asdf00000@chat.example.org", "q9mr3faue")
|
|
||||||
dictproxy.lookup_passdb("asdf11111@chat.example.org", "q9mr3faue")
|
|
||||||
rfile = io.BytesIO(b"H3\t2\t0\t\tauth\nI0\t0\tshared/userdb/")
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
lines = wfile.getvalue().decode("ascii").split("\n")
|
|
||||||
assert "Oshared/userdb/asdf00000@chat.example.org\t" in lines
|
|
||||||
assert "Oshared/userdb/asdf11111@chat.example.org\t" in lines
|
|
||||||
assert not lines[2]
|
|
||||||
|
|
||||||
|
|
||||||
def test_50_concurrent_lookups_different_accounts(gencreds, dictproxy):
|
|
||||||
num_threads = 50
|
|
||||||
req_per_thread = 5
|
|
||||||
results = queue.Queue()
|
|
||||||
|
|
||||||
def lookup():
|
|
||||||
for i in range(req_per_thread):
|
|
||||||
addr, password = gencreds()
|
|
||||||
try:
|
|
||||||
dictproxy.lookup_passdb(addr, password)
|
|
||||||
except Exception:
|
|
||||||
results.put(traceback.format_exc())
|
|
||||||
else:
|
|
||||||
results.put(None)
|
|
||||||
|
|
||||||
threads = []
|
|
||||||
for i in range(num_threads):
|
|
||||||
thread = threading.Thread(target=lookup, daemon=True)
|
|
||||||
threads.append(thread)
|
|
||||||
|
|
||||||
print(f"created {num_threads} threads, starting them and waiting for results")
|
|
||||||
for thread in threads:
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
for i in range(num_threads * req_per_thread):
|
|
||||||
res = results.get()
|
|
||||||
if res is not None:
|
|
||||||
pytest.fail(f"concurrent lookup failed\n{res}")
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
import threading
|
|
||||||
|
|
||||||
from chatmaild.filedict import FileDict, write_bytes_atomic
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic(tmp_path):
|
|
||||||
fdict = FileDict(tmp_path.joinpath("metadata"))
|
|
||||||
assert fdict.read() == {}
|
|
||||||
with fdict.modify() as d:
|
|
||||||
d["devicetoken"] = [1, 2, 3]
|
|
||||||
d["456"] = 4.2
|
|
||||||
new = fdict.read()
|
|
||||||
assert new["devicetoken"] == [1, 2, 3]
|
|
||||||
assert new["456"] == 4.2
|
|
||||||
|
|
||||||
|
|
||||||
def test_bad_marshal_file(tmp_path, caplog):
|
|
||||||
fdict1 = FileDict(tmp_path.joinpath("metadata"))
|
|
||||||
fdict1.path.write_bytes(b"l12k3l12k3l")
|
|
||||||
assert fdict1.read() == {}
|
|
||||||
assert "corrupt" in caplog.records[0].msg
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_bytes_atomic_concurrent(tmp_path):
|
|
||||||
p = tmp_path.joinpath("somefile.ext")
|
|
||||||
write_bytes_atomic(p, b"hello")
|
|
||||||
|
|
||||||
threads = []
|
|
||||||
for i in range(30):
|
|
||||||
content = f"hello{i}".encode("ascii")
|
|
||||||
t = threading.Thread(target=lambda: write_bytes_atomic(p, content))
|
|
||||||
t.start()
|
|
||||||
threads.append(t)
|
|
||||||
|
|
||||||
for t in threads:
|
|
||||||
t.join()
|
|
||||||
|
|
||||||
assert p.read_text().strip() != "hello"
|
|
||||||
assert len(list(p.parent.iterdir())) == 1
|
|
||||||
@@ -1,348 +0,0 @@
|
|||||||
import pytest
|
|
||||||
|
|
||||||
from chatmaild.filtermail import (
|
|
||||||
IncomingBeforeQueueHandler,
|
|
||||||
OutgoingBeforeQueueHandler,
|
|
||||||
SendRateLimiter,
|
|
||||||
check_armored_payload,
|
|
||||||
check_encrypted,
|
|
||||||
is_securejoin,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def maildomain():
|
|
||||||
# let's not depend on a real chatmail instance for the offline tests below
|
|
||||||
return "chatmail.example.org"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def handler(make_config, maildomain):
|
|
||||||
config = make_config(maildomain)
|
|
||||||
return OutgoingBeforeQueueHandler(config)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def inhandler(make_config, maildomain):
|
|
||||||
config = make_config(maildomain)
|
|
||||||
return IncomingBeforeQueueHandler(config)
|
|
||||||
|
|
||||||
|
|
||||||
def test_reject_forged_from(maildata, gencreds, handler):
|
|
||||||
class env:
|
|
||||||
mail_from = gencreds()[0]
|
|
||||||
rcpt_tos = [gencreds()[0]]
|
|
||||||
|
|
||||||
# test that the filter lets good mail through
|
|
||||||
to_addr = gencreds()[0]
|
|
||||||
env.content = maildata(
|
|
||||||
"encrypted.eml", from_addr=env.mail_from, to_addr=to_addr
|
|
||||||
).as_bytes()
|
|
||||||
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
# test that the filter rejects forged mail
|
|
||||||
env.content = maildata(
|
|
||||||
"encrypted.eml", from_addr="forged@c3.testrun.org", to_addr=to_addr
|
|
||||||
).as_bytes()
|
|
||||||
error = handler.check_DATA(envelope=env)
|
|
||||||
assert "500" in error
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_no_encryption_detection(maildata):
|
|
||||||
msg = maildata(
|
|
||||||
"plain.eml", from_addr="some@example.org", to_addr="other@example.org"
|
|
||||||
)
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
# https://xkcd.com/1181/
|
|
||||||
msg = maildata(
|
|
||||||
"fake-encrypted.eml", from_addr="some@example.org", to_addr="other@example.org"
|
|
||||||
)
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_securejoin_detection(maildata):
|
|
||||||
msg = maildata(
|
|
||||||
"securejoin-vc.eml", from_addr="some@example.org", to_addr="other@example.org"
|
|
||||||
)
|
|
||||||
assert is_securejoin(msg)
|
|
||||||
|
|
||||||
msg = maildata(
|
|
||||||
"securejoin-vc-fake.eml",
|
|
||||||
from_addr="some@example.org",
|
|
||||||
to_addr="other@example.org",
|
|
||||||
)
|
|
||||||
assert not is_securejoin(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_encryption_detection(maildata):
|
|
||||||
msg = maildata(
|
|
||||||
"encrypted.eml",
|
|
||||||
from_addr="1@example.org",
|
|
||||||
to_addr="2@example.org",
|
|
||||||
subject="Subject does not matter, will be replaced anyway",
|
|
||||||
)
|
|
||||||
assert check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_no_literal_packets(maildata):
|
|
||||||
"""Test that literal OpenPGP packet is not considered an encrypted mail."""
|
|
||||||
msg = maildata("literal.eml", from_addr="1@example.org", to_addr="2@example.org")
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filtermail_unencrypted_mdn(maildata, gencreds):
|
|
||||||
"""Unencrypted MDNs should not pass."""
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = gencreds()[0] + ".other"
|
|
||||||
msg = maildata("mdn.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
assert not check_encrypted(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def test_send_rate_limiter():
|
|
||||||
limiter = SendRateLimiter()
|
|
||||||
for i in range(100):
|
|
||||||
if limiter.is_sending_allowed("some@example.org", 10):
|
|
||||||
if i <= 10:
|
|
||||||
continue
|
|
||||||
pytest.fail("limiter didn't work")
|
|
||||||
else:
|
|
||||||
assert i == 11
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_excempt_privacy(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = "privacy@testrun.org"
|
|
||||||
handler.config.passthrough_recipients = [to_addr]
|
|
||||||
false_to = "privacy@something.org"
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
# assert that None/no error is returned
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
class env2:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr, false_to]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert "523" in handler.check_DATA(envelope=env2)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_self_send_autocrypt_setup_message(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = from_addr
|
|
||||||
|
|
||||||
msg = maildata("asm.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_send_fails(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = gencreds()[0]
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
res = handler.check_DATA(envelope=env)
|
|
||||||
assert "523 Encryption Needed" in res
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_incoming_fails(maildata, gencreds, inhandler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr, password = gencreds()
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
user = inhandler.config.get_user(to_addr)
|
|
||||||
user.set_password(password)
|
|
||||||
res = inhandler.check_DATA(envelope=env)
|
|
||||||
assert "523 Encryption Needed" in res
|
|
||||||
|
|
||||||
user.allow_incoming_cleartext()
|
|
||||||
assert not inhandler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_incoming_mailer_daemon(maildata, gencreds, inhandler):
|
|
||||||
from_addr = "mailer-daemon@example.org"
|
|
||||||
to_addr = gencreds()[0]
|
|
||||||
|
|
||||||
msg = maildata("mailer-daemon.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert not inhandler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_passthrough_domains(maildata, gencreds, handler):
|
|
||||||
from_addr = gencreds()[0]
|
|
||||||
to_addr = "privacy@x.y.z"
|
|
||||||
handler.config.passthrough_recipients = ["@x.y.z"]
|
|
||||||
false_to = "something@x.y"
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
# assert that None/no error is returned
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
class env2:
|
|
||||||
mail_from = from_addr
|
|
||||||
rcpt_tos = [to_addr, false_to]
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
assert "523" in handler.check_DATA(envelope=env2)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleartext_passthrough_senders(gencreds, handler, maildata):
|
|
||||||
acc1 = gencreds()[0]
|
|
||||||
to_addr = "recipient@something.org"
|
|
||||||
handler.config.passthrough_senders = [acc1]
|
|
||||||
|
|
||||||
msg = maildata("plain.eml", from_addr=acc1, to_addr=to_addr)
|
|
||||||
|
|
||||||
class env:
|
|
||||||
mail_from = acc1
|
|
||||||
rcpt_tos = to_addr
|
|
||||||
content = msg.as_bytes()
|
|
||||||
|
|
||||||
# assert that None/no error is returned
|
|
||||||
assert not handler.check_DATA(envelope=env)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_armored_payload():
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
wU4DSqFx0d1yqAoSAQdAYkX/ZN/Az4B0k7X47zKyWrXxlDEdS3WOy0Yf2+GJTFgg\r
|
|
||||||
Zk5ql0mLG8Ze+ZifCS0XMO4otlemSyJ0K1ZPdFMGzUDBTgNqzkFabxXoXRIBB0AM\r
|
|
||||||
755wlX41X6Ay3KhnwBq7yEqSykVH6F3x11iHPKraLCAGZoaS8bKKNy/zg5slda1X\r
|
|
||||||
pt14b4aC1VwtSnYhcRRELNLD/wE2TFif+g7poMmFY50VyMPLYjVP96Z5QCT4+z4H\r
|
|
||||||
Ikh/pRRN8S3JNMrRJHc6prooSJmLcx47Y5un7VFy390MsJ+LiUJuQMDdYWRAinfs\r
|
|
||||||
Ebm89Ezjm7F03qbFPXE0X4ZNzVXS/eKO0uhJQdiov/vmbn41rNtHmNpqjaO0vi5+\r
|
|
||||||
sS9tR7yDUrIXiCUCN78eBLVioxtktsPZm5cDORbQWzv+7nmCEz9/JowCUcBVdCGn\r
|
|
||||||
1ofOaH82JCAX/cRx08pLaDNj6iolVBsi56Dd+2bGxJOZOG2AMcEyz0pXY0dOAJCD\r
|
|
||||||
iUThcQeGIdRnU3j8UBcnIEsjLu2+C+rrwMZQESMWKnJ0rnqTk0pK5kXScr6F/L0L\r
|
|
||||||
UE49ccIexNm3xZvYr5drszr6wz3Tv5fdue87P4etBt90gF/Vzknck+g1LLlkzZkp\r
|
|
||||||
d8dI0k2tOSPjUbDPnSy1x+X73WGpPZmj0kWT+RGvq0nH6UkJj3AQTG2qf1T8jK+3\r
|
|
||||||
rTp3LR9vDkMwDjX4R8SA9c0wdnUzzr79OYQC9lTnzcx+fM6BBmgQ2GrS33jaFLp7\r
|
|
||||||
L6/DFpCl5zhnPjM/2dKvMkw/Kd6XS/vjwsO405FQdjSDiQEEAZA+ZvAfcjdccbbU\r
|
|
||||||
yCO+x0QNdeBsufDVnh3xvzuWy4CICdTQT4s1AWRPCzjOj+SGmx5WqCLWfsd8Ma0+\r
|
|
||||||
w/C7SfTYu1FDQILLM+llpq1M/9GPley4QZ8JQjo262AyPXsPF/OW48uuZz0Db1xT\r
|
|
||||||
Yh4iHBztj4VSdy7l2+IyaIf7cnL4EEBFxv/MwmVDXvDlxyvfAfIsd3D9SvJESzKZ\r
|
|
||||||
VWDYwaocgeCN+ojKu1p885lu1EfRbX3fr3YO02K5/c2JYDkc0Py0W3wUP/J1XUax\r
|
|
||||||
pbKpzwlkxEgtmzsGqsOfMJqBV3TNDrOA2uBsa+uBqP5MGYLZ49S/4v/bW9I01Cr1\r
|
|
||||||
D2ZkV510Y1Vgo66WlP8mRqOTyt/5WRhPD+MxXdk67BNN/PmO6tMlVoJDuk+XwWPR\r
|
|
||||||
t2TvNaND/yabT9eYI55Og4fzKD6RIjouUX8DvKLkm+7aXxVs2uuLQ3Jco3O82z55\r
|
|
||||||
dbShU1jYsrw9oouXUz06MHPbkdhNbF/2hfhZ2qA31sNeovJw65iUv7sDKX3LVWgJ\r
|
|
||||||
10jlywcDwqlU8CO7WC9lGixYTbnOkYZpXCGEl8e6Jbs79l42YFo4ogYpFK1NXFhV\r
|
|
||||||
kOXRmDf/wmfj+c/ld3L2PkvwlgofhCudOQknZbo3ub1gjiTn7L+lMGHIj/3suMIl\r
|
|
||||||
ID4EUxAXScIM1ZEz2fjtW5jATlqYcLjLTbf/olw6HFyPNH+9IssqXeZNKnGwPUB9\r
|
|
||||||
3lTXsg0tpzl+x7F/2WjEw1DSNhjC0KnHt1vEYNMkUGDGFdN9y3ERLqX/FIgiASUb\r
|
|
||||||
bTvAVupnAK3raBezGmhrs6LsQtLS9P0VvQiLU3uDhMqw8Z4SISLpcD+NnVBHzQqm\r
|
|
||||||
6W5Qn/8xsCL6av18yUVTi2G3igt3QCNoYx9evt2ZcIkNoyyagUVjfZe5GHXh8Dnz\r
|
|
||||||
GaBXW/hg3HlXLRGaQu4RYCzBMJILcO25OhZOg6jbkCLiEexQlm2e9krB5cXR49Al\r
|
|
||||||
UN4fiB0KR9JyG2ayUdNJVkXZSZLnHyRgiaadlpUo16LVvw==\r
|
|
||||||
=b5Kp\r
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
\r
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert check_armored_payload(payload) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
assert check_armored_payload(payload) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
assert check_armored_payload(payload) == True
|
|
||||||
|
|
||||||
payload = payload.removesuffix("\r\n")
|
|
||||||
assert check_armored_payload(payload) == True
|
|
||||||
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
HELLOWORLD
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
"""
|
|
||||||
assert check_armored_payload(payload) == False
|
|
||||||
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
=njUN
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
"""
|
|
||||||
assert check_armored_payload(payload) == False
|
|
||||||
|
|
||||||
# Test payload using partial body length
|
|
||||||
# as generated by GopenPGP.
|
|
||||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
|
||||||
\r
|
|
||||||
wV4DdCVjRfOT3TQSAQdAY5+pjT6mlCxPGdR3be4w7oJJRUGIPI/Vnh+mJxGSm34w\r
|
|
||||||
LNlVc89S1g22uQYFif2sUJsQWbpoHpNkuWpkSgOaHmNvrZiY/YU5iv+cZ3LbmtUG\r
|
|
||||||
0uoBisSHh9O1c+5sYZSbrvYZ1NOwlD7Fv/U5/Mw4E5+CjxfdgNGp5o3DDddzPK78\r
|
|
||||||
jseDhdSXxnaiIJC93hxNX6R1RPt3G2gukyzx69wciPQShcF8zf3W3o75Ed7B8etV\r
|
|
||||||
QEeB16xzdFhKa9JxdjTu3osgCs21IO7wpcFkjc7nZzlW6jPnELJJaNmv4yOOCjMp\r
|
|
||||||
6YAkaN/BkL+jHTznHDuDsT5ilnTXpwHDU1Cm9PIx/KFcNCQnIB+2DcdIHPHUH1ci\r
|
|
||||||
jvqoeXAVWjKXEjS7PqPFuP/xGbrWG2ugs+toXJOKbgRkExvKs1dwPFKrgghvCVbW\r
|
|
||||||
AcKejQKAPArLwpkA7aD875TZQShvGt74fNs45XBlGOYOnNOAJ1KAmzrXLIDViyyB\r
|
|
||||||
kDsmTBk785xofuCkjBpXSe6vsMprPzCteDfaUibh8FHeJjucxPerwuOPEmnogNaf\r
|
|
||||||
YyL4+iy8H8I9/p7pmUqILprxTG0jTOtlk0bTVzeiF56W1xbtSEMuOo4oFbQTyOM2\r
|
|
||||||
bKXaYo774Jm+rRtKAnnI2dtf9RpK19cog6YNzfYjesLKbXDsPZbN5rmwyFiCvvxC\r
|
|
||||||
kQ6JLob+B2fPdY2gzy7LypxktS8Zi1HJcWDHJGVmQodaDLqKUObb4M26bXDe6oxI\r
|
|
||||||
NS8PJz5exVbM3KhZnUOEn6PJRBBf5a/ZqxlhZPcQo/oBuhKpBRpO5kSDwPIUByu3\r
|
|
||||||
UlXLSkpMqe9pUarAOEuQjfl2RVY7U+RrQYp4YP5keMO+i8NCefAFbowTTufO1JIq\r
|
|
||||||
2nVgCi/QVnxZyEc9OYt/8AE3g4cdojE+vsSDifZLSWYIetpfrohHv3dT3StD1QRG\r
|
|
||||||
0QE6qq6oKpg/IL0cjvuX4c7a7bslv2fXp8t75y37RU6253qdIebhxc/cRhPbc/yu\r
|
|
||||||
p0YLyD4SrvKTLP2ZV95jT4IPEpqm4AN3QmiOzdtqR2gLyb62L8QfqI/FdwsIiRiM\r
|
|
||||||
hqydwoqt/lfSqG1WKPh+6EkMkH+TDiCC1BQdbN1MNcyUtcjb35PR2c8Ld2TF3guA\r
|
|
||||||
jLIqMt/Vb7hBoMb2FcsOYY25ka9oV62OwgKWLXnFzk+modMR5fzb4kxVVAYEqP+D\r
|
|
||||||
T5KO1Vs76v1fyPGOq6BbBCvLwTqe/e6IZInJles4v5jrhnLcGKmNGivCUDe6X6NY\r
|
|
||||||
UKNt5RsZllwDQpaAb5dMNhyrk8SgIE7TBI7rvqIdUCE52Vy+0JDxFg5olRpFUfO6\r
|
|
||||||
/MyTW3Yo/ekk/npHr7iYYqJTCc21bDGLWQcIo/XO7WPxrKNWGBNPFnkRdw0MaKr4\r
|
|
||||||
+cEM3V8NFnSEpC12xA+RX/CezuJtwXZK5MpG76eYqMO6qyC+c25YcFecEufDZDxx\r
|
|
||||||
ZLqRszVRyxyWPtk/oIeQK2v9wOqY6N9/ff01gHz69vqYqN5bUw/QKZsmx1zW+gPw\r
|
|
||||||
6x2tDK2BHeYl182gCbhlKISRFwCtbjqZSkiKWao/VtygHkw0fK34avJuyQ/X9YaN\r
|
|
||||||
BRy+7Lf3VA53pnB5WJ1xwRXN8VDvmZeXzv2krHveCMemj0OjnRoCLu117xN0A5m9\r
|
|
||||||
Fm/RoDix5PolDHtWTtr2m1n2hp2LHnj8at9lFEd0SKhAYHVL9KjzycwWODZRXt+x\r
|
|
||||||
zGDDuooEeTvdY5NLyKcl4gETz1ZP4Ez5jGGjhPSwSpq1mU7UaJ9ZXXdr4KHyifW6\r
|
|
||||||
ggNzNsGhXTap7IWZpTtqXABydfiBshmH2NjqtNDwBweJVSgP10+r0WhMWlaZs6xl\r
|
|
||||||
V3o5yskJt6GlkwpJxZrTvN6Tiww/eW7HFV6NGf7IRSWY5tJc/iA7/92tOmkdvJ1q\r
|
|
||||||
myLbG7cJB787QjplEyVe2P/JBO6xYvbkJLf9Q+HaviTO25rugRSrYsoKMDfO8VlQ\r
|
|
||||||
1CcnTPVtApPZJEQzAWJEgVAM8uIlkqWJJMgyWT34sTkdBeCUFGloXQFs9Yxd0AGf\r
|
|
||||||
/zHEkYZSTKpVSvAIGu4=\r
|
|
||||||
=6iHb\r
|
|
||||||
-----END PGP MESSAGE-----\r
|
|
||||||
"""
|
|
||||||
assert check_armored_payload(payload) == True
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import time
|
|
||||||
|
|
||||||
from chatmaild.doveauth import AuthDictProxy
|
|
||||||
from chatmaild.lastlogin import (
|
|
||||||
LastLoginDictProxy,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_request_last_login(testaddr, example_config):
|
|
||||||
dictproxy = LastLoginDictProxy(config=example_config)
|
|
||||||
|
|
||||||
authproxy = AuthDictProxy(config=example_config)
|
|
||||||
authproxy.lookup_passdb(testaddr, "1l2k3j1l2k3jl123")
|
|
||||||
|
|
||||||
dictproxy_transactions = {}
|
|
||||||
|
|
||||||
# Begin transaction
|
|
||||||
tx = "1111"
|
|
||||||
msg = f"B{tx}\t{testaddr}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
|
||||||
assert not res
|
|
||||||
assert dictproxy_transactions == {tx: dict(addr=testaddr, res="O\n")}
|
|
||||||
|
|
||||||
# set last-login info for user
|
|
||||||
user = dictproxy.config.get_user(testaddr)
|
|
||||||
timestamp = int(time.time())
|
|
||||||
msg = f"S{tx}\tshared/last-login/{testaddr}\t{timestamp}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
|
||||||
assert not res
|
|
||||||
assert len(dictproxy_transactions) == 1
|
|
||||||
read_timestamp = user.get_last_login_timestamp()
|
|
||||||
assert read_timestamp == timestamp // 86400 * 86400
|
|
||||||
|
|
||||||
# finish transaction
|
|
||||||
msg = f"C{tx}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
|
||||||
assert res == "O\n"
|
|
||||||
assert len(dictproxy_transactions) == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_request_last_login_echobot(example_config):
|
|
||||||
dictproxy = LastLoginDictProxy(config=example_config)
|
|
||||||
|
|
||||||
authproxy = AuthDictProxy(config=example_config)
|
|
||||||
testaddr = f"echo@{example_config.mail_domain}"
|
|
||||||
authproxy.lookup_passdb(testaddr, "ignore")
|
|
||||||
user = dictproxy.config.get_user(testaddr)
|
|
||||||
|
|
||||||
transactions = {}
|
|
||||||
|
|
||||||
# set last-login info for user
|
|
||||||
tx = "1111"
|
|
||||||
msg = f"B{tx}\t{testaddr}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert not res
|
|
||||||
assert transactions == {tx: dict(addr=testaddr, res="O\n")}
|
|
||||||
|
|
||||||
timestamp = int(time.time())
|
|
||||||
msg = f"S{tx}\tshared/last-login/{testaddr}\t{timestamp}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert not res
|
|
||||||
assert len(transactions) == 1
|
|
||||||
read_timestamp = user.get_last_login_timestamp()
|
|
||||||
assert read_timestamp is None
|
|
||||||
@@ -1,329 +0,0 @@
|
|||||||
import io
|
|
||||||
import time
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from chatmaild.metadata import (
|
|
||||||
Metadata,
|
|
||||||
MetadataDictProxy,
|
|
||||||
)
|
|
||||||
from chatmaild.notifier import (
|
|
||||||
Notifier,
|
|
||||||
NotifyThread,
|
|
||||||
PersistentQueueItem,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def notifier(metadata):
|
|
||||||
queue_dir = metadata.vmail_dir.joinpath("pending_notifications")
|
|
||||||
queue_dir.mkdir()
|
|
||||||
return Notifier(queue_dir)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def metadata(tmp_path):
|
|
||||||
vmail_dir = tmp_path.joinpath("vmaildir")
|
|
||||||
vmail_dir.mkdir()
|
|
||||||
return Metadata(vmail_dir)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def dictproxy(notifier, metadata):
|
|
||||||
return MetadataDictProxy(notifier=notifier, metadata=metadata)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def testaddr2():
|
|
||||||
return "user2@example.org"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def token():
|
|
||||||
return "01234"
|
|
||||||
|
|
||||||
|
|
||||||
def get_mocked_requests(statuslist):
|
|
||||||
class ReqMock:
|
|
||||||
requests = []
|
|
||||||
|
|
||||||
def post(self, url, data, timeout):
|
|
||||||
self.requests.append((url, data, timeout))
|
|
||||||
res = statuslist.pop(0)
|
|
||||||
if isinstance(res, Exception):
|
|
||||||
raise res
|
|
||||||
|
|
||||||
class Result:
|
|
||||||
status_code = res
|
|
||||||
|
|
||||||
return Result()
|
|
||||||
|
|
||||||
return ReqMock()
|
|
||||||
|
|
||||||
|
|
||||||
def test_metadata_persistence(tmp_path, testaddr, testaddr2):
|
|
||||||
metadata1 = Metadata(tmp_path)
|
|
||||||
metadata2 = Metadata(tmp_path)
|
|
||||||
assert not metadata1.get_tokens_for_addr(testaddr)
|
|
||||||
assert not metadata2.get_tokens_for_addr(testaddr)
|
|
||||||
|
|
||||||
metadata1.add_token_to_addr(testaddr, "01234")
|
|
||||||
metadata1.add_token_to_addr(testaddr2, "456")
|
|
||||||
assert metadata2.get_tokens_for_addr(testaddr) == ["01234"]
|
|
||||||
assert metadata2.get_tokens_for_addr(testaddr2) == ["456"]
|
|
||||||
metadata2.remove_token_from_addr(testaddr, "01234")
|
|
||||||
assert not metadata1.get_tokens_for_addr(testaddr)
|
|
||||||
assert metadata1.get_tokens_for_addr(testaddr2) == ["456"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_nonexisting(metadata, tmp_path, testaddr):
|
|
||||||
metadata.add_token_to_addr(testaddr, "123")
|
|
||||||
metadata.remove_token_from_addr(testaddr, "1l23k1l2k3")
|
|
||||||
assert metadata.get_tokens_for_addr(testaddr) == ["123"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_notifier_remove_without_set(metadata, testaddr):
|
|
||||||
metadata.remove_token_from_addr(testaddr, "123")
|
|
||||||
assert not metadata.get_tokens_for_addr(testaddr)
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_request_lookup_fails(dictproxy, testaddr):
|
|
||||||
transactions = {}
|
|
||||||
res = dictproxy.handle_dovecot_request(
|
|
||||||
f"Lpriv/123/chatmail\t{testaddr}", transactions
|
|
||||||
)
|
|
||||||
assert res == "N\n"
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_request_happy_path(dictproxy, testaddr, token):
|
|
||||||
metadata = dictproxy.metadata
|
|
||||||
transactions = {}
|
|
||||||
notifier = dictproxy.notifier
|
|
||||||
|
|
||||||
# set device token in a transaction
|
|
||||||
tx = "1111"
|
|
||||||
msg = f"B{tx}\t{testaddr}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert not res and not metadata.get_tokens_for_addr(testaddr)
|
|
||||||
assert transactions == {tx: dict(addr=testaddr, res="O\n")}
|
|
||||||
|
|
||||||
msg = f"S{tx}\tpriv/guid00/devicetoken\t{token}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert not res
|
|
||||||
assert len(transactions) == 1
|
|
||||||
assert metadata.get_tokens_for_addr(testaddr) == [token]
|
|
||||||
|
|
||||||
msg = f"C{tx}"
|
|
||||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
|
||||||
assert res == "O\n"
|
|
||||||
assert len(transactions) == 0
|
|
||||||
assert metadata.get_tokens_for_addr(testaddr) == [token]
|
|
||||||
|
|
||||||
# trigger notification for incoming message
|
|
||||||
tx2 = "2222"
|
|
||||||
assert dictproxy.handle_dovecot_request(f"B{tx2}\t{testaddr}", transactions) is None
|
|
||||||
msg = f"S{tx2}\tpriv/guid00/messagenew"
|
|
||||||
assert dictproxy.handle_dovecot_request(msg, transactions) is None
|
|
||||||
queue_item = notifier.retry_queues[0].get()[1]
|
|
||||||
assert queue_item.token == token
|
|
||||||
assert dictproxy.handle_dovecot_request(f"C{tx2}", transactions) == "O\n"
|
|
||||||
assert not transactions
|
|
||||||
assert queue_item.path.exists()
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_protocol_set_devicetoken(dictproxy):
|
|
||||||
rfile = io.BytesIO(
|
|
||||||
b"\n".join(
|
|
||||||
[
|
|
||||||
b"HELLO",
|
|
||||||
b"Btx00\tuser@example.org",
|
|
||||||
b"Stx00\tpriv/guid00/devicetoken\t01234",
|
|
||||||
b"Ctx00",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert wfile.getvalue() == b"O\n"
|
|
||||||
assert dictproxy.metadata.get_tokens_for_addr("user@example.org") == ["01234"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_protocol_set_get_devicetoken(dictproxy):
|
|
||||||
rfile = io.BytesIO(
|
|
||||||
b"\n".join(
|
|
||||||
[
|
|
||||||
b"HELLO",
|
|
||||||
b"Btx00\tuser@example.org",
|
|
||||||
b"Stx00\tpriv/guid00/devicetoken\t01234",
|
|
||||||
b"Ctx00",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert dictproxy.metadata.get_tokens_for_addr("user@example.org") == ["01234"]
|
|
||||||
assert wfile.getvalue() == b"O\n"
|
|
||||||
|
|
||||||
rfile = io.BytesIO(
|
|
||||||
b"\n".join([b"HELLO", b"Lpriv/0123/devicetoken\tuser@example.org"])
|
|
||||||
)
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert wfile.getvalue() == b"O01234\n"
|
|
||||||
|
|
||||||
|
|
||||||
def test_handle_dovecot_protocol_iterate(dictproxy):
|
|
||||||
rfile = io.BytesIO(
|
|
||||||
b"\n".join(
|
|
||||||
[
|
|
||||||
b"H",
|
|
||||||
b"I9\t0\tpriv/5cbe730f146fea6535be0d003dd4fc98/\tci-2dzsrs@nine.testrun.org",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert wfile.getvalue() == b"\n"
|
|
||||||
|
|
||||||
|
|
||||||
def test_notifier_thread_deletes_persistent_file(metadata, notifier, testaddr):
|
|
||||||
reqmock = get_mocked_requests([200])
|
|
||||||
metadata.add_token_to_addr(testaddr, "01234")
|
|
||||||
notifier.new_message_for_addr(testaddr, metadata)
|
|
||||||
NotifyThread(notifier, 0, None).retry_one(reqmock)
|
|
||||||
url, data, timeout = reqmock.requests[0]
|
|
||||||
assert data == "01234"
|
|
||||||
assert metadata.get_tokens_for_addr(testaddr) == ["01234"]
|
|
||||||
notifier.requeue_persistent_queue_items()
|
|
||||||
assert notifier.retry_queues[0].qsize() == 0
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("status", [requests.exceptions.RequestException(), 404, 500])
|
|
||||||
def test_notifier_thread_connection_failures(
|
|
||||||
metadata, notifier, testaddr, status, caplog
|
|
||||||
):
|
|
||||||
"""test that tokens keep getting retried until they are given up."""
|
|
||||||
metadata.add_token_to_addr(testaddr, "01234")
|
|
||||||
notifier.new_message_for_addr(testaddr, metadata)
|
|
||||||
notifier.NOTIFICATION_RETRY_DELAY = 5
|
|
||||||
max_tries = len(notifier.retry_queues)
|
|
||||||
for i in range(max_tries):
|
|
||||||
caplog.clear()
|
|
||||||
reqmock = get_mocked_requests([status])
|
|
||||||
sleep_calls = []
|
|
||||||
NotifyThread(notifier, i, None).retry_one(reqmock, sleep=sleep_calls.append)
|
|
||||||
assert notifier.retry_queues[i].qsize() == 0
|
|
||||||
assert "request failed" in caplog.records[0].msg
|
|
||||||
if i > 0:
|
|
||||||
assert len(sleep_calls) == 1
|
|
||||||
if i + 1 < max_tries:
|
|
||||||
assert notifier.retry_queues[i + 1].qsize() == 1
|
|
||||||
assert len(caplog.records) == 1
|
|
||||||
else:
|
|
||||||
assert len(caplog.records) == 2
|
|
||||||
assert "deadline" in caplog.records[1].msg
|
|
||||||
notifier.requeue_persistent_queue_items()
|
|
||||||
assert notifier.retry_queues[0].qsize() == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_requeue_removes_tmp_files(notifier, metadata, testaddr, caplog):
|
|
||||||
metadata.add_token_to_addr(testaddr, "01234")
|
|
||||||
notifier.new_message_for_addr(testaddr, metadata)
|
|
||||||
p = notifier.queue_dir.joinpath("1203981203.tmp")
|
|
||||||
p.touch()
|
|
||||||
notifier2 = notifier.__class__(notifier.queue_dir)
|
|
||||||
notifier2.requeue_persistent_queue_items()
|
|
||||||
assert "spurious" in caplog.records[0].msg
|
|
||||||
assert not p.exists()
|
|
||||||
assert notifier2.retry_queues[0].qsize() == 1
|
|
||||||
when, queue_item = notifier2.retry_queues[0].get()
|
|
||||||
assert when <= int(time.time())
|
|
||||||
assert queue_item.addr == testaddr
|
|
||||||
|
|
||||||
|
|
||||||
def test_requeue_removes_invalid_files(notifier, metadata, testaddr, caplog):
|
|
||||||
metadata.add_token_to_addr(testaddr, "01234")
|
|
||||||
notifier.new_message_for_addr(testaddr, metadata)
|
|
||||||
# empty/invalid files should be ignored
|
|
||||||
p = notifier.queue_dir.joinpath("1203981203")
|
|
||||||
p.touch()
|
|
||||||
notifier2 = notifier.__class__(notifier.queue_dir)
|
|
||||||
notifier2.requeue_persistent_queue_items()
|
|
||||||
assert "spurious" in caplog.records[0].msg
|
|
||||||
assert not p.exists()
|
|
||||||
assert notifier2.retry_queues[0].qsize() == 1
|
|
||||||
when, queue_item = notifier2.retry_queues[0].get()
|
|
||||||
assert when <= int(time.time())
|
|
||||||
assert queue_item.addr == testaddr
|
|
||||||
|
|
||||||
|
|
||||||
def test_start_and_stop_notification_threads(notifier, testaddr):
|
|
||||||
threads = notifier.start_notification_threads(None)
|
|
||||||
for retry_num, threadlist in threads.items():
|
|
||||||
for t in threadlist:
|
|
||||||
t.stop()
|
|
||||||
t.join()
|
|
||||||
|
|
||||||
|
|
||||||
def test_multi_device_notifier(metadata, notifier, testaddr):
|
|
||||||
metadata.add_token_to_addr(testaddr, "01234")
|
|
||||||
metadata.add_token_to_addr(testaddr, "56789")
|
|
||||||
notifier.new_message_for_addr(testaddr, metadata)
|
|
||||||
reqmock = get_mocked_requests([200, 200])
|
|
||||||
NotifyThread(notifier, 0, None).retry_one(reqmock)
|
|
||||||
NotifyThread(notifier, 0, None).retry_one(reqmock)
|
|
||||||
assert notifier.retry_queues[0].qsize() == 0
|
|
||||||
assert notifier.retry_queues[1].qsize() == 0
|
|
||||||
url, data, timeout = reqmock.requests[0]
|
|
||||||
assert data == "01234"
|
|
||||||
url, data, timeout = reqmock.requests[1]
|
|
||||||
assert data == "56789"
|
|
||||||
assert metadata.get_tokens_for_addr(testaddr) == ["01234", "56789"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_notifier_thread_run_gone_removes_token(metadata, notifier, testaddr):
|
|
||||||
metadata.add_token_to_addr(testaddr, "01234")
|
|
||||||
metadata.add_token_to_addr(testaddr, "45678")
|
|
||||||
notifier.new_message_for_addr(testaddr, metadata)
|
|
||||||
|
|
||||||
reqmock = get_mocked_requests([410, 200])
|
|
||||||
NotifyThread(notifier, 0, metadata.remove_token_from_addr).retry_one(reqmock)
|
|
||||||
NotifyThread(notifier, 0, None).retry_one(reqmock)
|
|
||||||
url, data, timeout = reqmock.requests[0]
|
|
||||||
assert data == "01234"
|
|
||||||
url, data, timeout = reqmock.requests[1]
|
|
||||||
assert data == "45678"
|
|
||||||
assert metadata.get_tokens_for_addr(testaddr) == ["45678"]
|
|
||||||
assert notifier.retry_queues[0].qsize() == 0
|
|
||||||
assert notifier.retry_queues[1].qsize() == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_persistent_queue_items(tmp_path, testaddr, token):
|
|
||||||
queue_item = PersistentQueueItem.create(tmp_path, testaddr, 432, token)
|
|
||||||
assert queue_item.addr == testaddr
|
|
||||||
assert queue_item.start_ts == 432
|
|
||||||
assert queue_item.token == token
|
|
||||||
item2 = PersistentQueueItem.read_from_path(queue_item.path)
|
|
||||||
assert item2.addr == testaddr
|
|
||||||
assert item2.start_ts == 432
|
|
||||||
assert item2.token == token
|
|
||||||
assert item2 == queue_item
|
|
||||||
item2.delete()
|
|
||||||
assert not item2.path.exists()
|
|
||||||
assert not queue_item < item2 and not item2 < queue_item
|
|
||||||
|
|
||||||
|
|
||||||
def test_iroh_relay(dictproxy):
|
|
||||||
rfile = io.BytesIO(
|
|
||||||
b"\n".join(
|
|
||||||
[
|
|
||||||
b"H",
|
|
||||||
b"Lshared/0123/vendor/vendor.dovecot/pvt/server/vendor/deltachat/irohrelay\tuser@example.org",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
wfile = io.BytesIO()
|
|
||||||
dictproxy.iroh_relay = "https://example.org/"
|
|
||||||
dictproxy.loop_forever(rfile, wfile)
|
|
||||||
assert wfile.getvalue() == b"Ohttps://example.org/\n"
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from chatmaild.metrics import main
|
|
||||||
|
|
||||||
|
|
||||||
def test_main(tmp_path, capsys):
|
|
||||||
paths = []
|
|
||||||
for x in ("ci-asllkj", "ac_12l3kj", "qweqwe", "ci-l1k2j31l2k3"):
|
|
||||||
p = tmp_path.joinpath(x)
|
|
||||||
p.mkdir()
|
|
||||||
p.joinpath("cur").mkdir()
|
|
||||||
paths.append(p)
|
|
||||||
|
|
||||||
tmp_path.joinpath("nomailbox").mkdir()
|
|
||||||
|
|
||||||
main(tmp_path)
|
|
||||||
out, _ = capsys.readouterr()
|
|
||||||
d = {}
|
|
||||||
for line in out.split("\n"):
|
|
||||||
if line.strip() and not line.startswith("#"):
|
|
||||||
name, num = line.split()
|
|
||||||
d[name] = int(num)
|
|
||||||
|
|
||||||
assert d["accounts"] == 4
|
|
||||||
assert d["ci_accounts"] == 3
|
|
||||||
assert d["nonci_accounts"] == 1
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
|
|
||||||
from chatmaild.migrate_db import migrate_from_db_to_maildir
|
|
||||||
|
|
||||||
|
|
||||||
def test_migration_not_exists(tmp_path, example_config):
|
|
||||||
example_config.passdb_path = tmp_path.joinpath("sqlite")
|
|
||||||
|
|
||||||
|
|
||||||
def test_migration(tmp_path, example_config, caplog):
|
|
||||||
passdb_path = tmp_path.joinpath("passdb.sqlite")
|
|
||||||
uri = f"file:{passdb_path}?mode=rwc"
|
|
||||||
sqlconn = sqlite3.connect(uri, timeout=60, uri=True)
|
|
||||||
sqlconn.execute(
|
|
||||||
"""
|
|
||||||
CREATE TABLE users (
|
|
||||||
addr TEXT PRIMARY KEY,
|
|
||||||
password TEXT,
|
|
||||||
last_login INTEGER
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
all = {}
|
|
||||||
|
|
||||||
for i in range(500):
|
|
||||||
values = (f"somsom{i:03}@example.org", f"passwo{i:03}", i * 86400)
|
|
||||||
sqlconn.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO users (addr, password, last_login)
|
|
||||||
VALUES (?, ?, ?)""",
|
|
||||||
values,
|
|
||||||
)
|
|
||||||
all[values[0]] = values[1:]
|
|
||||||
|
|
||||||
for i in range(500):
|
|
||||||
values = (f"pompom{i:03}@example.org", f"wopass{i:03}", "")
|
|
||||||
sqlconn.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO users (addr, password, last_login)
|
|
||||||
VALUES (?, ?, ?)""",
|
|
||||||
values,
|
|
||||||
)
|
|
||||||
all[values[0]] = values[1:]
|
|
||||||
|
|
||||||
sqlconn.commit()
|
|
||||||
sqlconn.close()
|
|
||||||
|
|
||||||
assert passdb_path.stat().st_size > 10000
|
|
||||||
|
|
||||||
example_config.passdb_path = passdb_path
|
|
||||||
|
|
||||||
assert not caplog.records
|
|
||||||
|
|
||||||
migrate_from_db_to_maildir(example_config, chunking=500)
|
|
||||||
assert len(caplog.records) > 3
|
|
||||||
|
|
||||||
for path in example_config.mailboxes_dir.iterdir():
|
|
||||||
if "@" not in path.name:
|
|
||||||
continue
|
|
||||||
password, last_login = all.pop(path.name)
|
|
||||||
user = example_config.get_user(path.name)
|
|
||||||
if last_login:
|
|
||||||
assert user.get_last_login_timestamp() == last_login
|
|
||||||
assert password == user.get_userdb_dict()["password"]
|
|
||||||
|
|
||||||
assert not all
|
|
||||||
assert not example_config.passdb_path.exists()
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
import chatmaild
|
|
||||||
from chatmaild.newemail import create_newemail_dict, print_new_account
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_newemail_dict(example_config):
|
|
||||||
ac1 = create_newemail_dict(example_config)
|
|
||||||
assert "@" in ac1["email"]
|
|
||||||
assert len(ac1["password"]) >= 10
|
|
||||||
|
|
||||||
ac2 = create_newemail_dict(example_config)
|
|
||||||
|
|
||||||
assert ac1["email"] != ac2["email"]
|
|
||||||
assert ac1["password"] != ac2["password"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_new_account(capsys, monkeypatch, maildomain, tmpdir, example_config):
|
|
||||||
monkeypatch.setattr(chatmaild.newemail, "CONFIG_PATH", str(example_config._inipath))
|
|
||||||
print_new_account()
|
|
||||||
out, err = capsys.readouterr()
|
|
||||||
lines = out.split("\n")
|
|
||||||
assert lines[0] == "Content-Type: application/json"
|
|
||||||
assert not lines[1]
|
|
||||||
dic = json.loads(lines[2])
|
|
||||||
assert dic["email"].endswith(f"@{example_config.mail_domain}")
|
|
||||||
assert len(dic["password"]) >= 10
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
def test_login_timestamp(testaddr, example_config):
|
|
||||||
user = example_config.get_user(testaddr)
|
|
||||||
user.set_password("someeqkjwelkqwjleqwe")
|
|
||||||
user.set_last_login_timestamp(100000)
|
|
||||||
assert user.get_last_login_timestamp() == 86400
|
|
||||||
|
|
||||||
user.set_last_login_timestamp(200000)
|
|
||||||
assert user.get_last_login_timestamp() == 86400 * 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_user_dict_not_set(testaddr, example_config, caplog):
|
|
||||||
user = example_config.get_user(testaddr)
|
|
||||||
assert not caplog.records
|
|
||||||
assert user.get_userdb_dict() == {}
|
|
||||||
assert len(caplog.records) == 0
|
|
||||||
|
|
||||||
user.set_password("")
|
|
||||||
assert user.get_userdb_dict() == {}
|
|
||||||
assert len(caplog.records) == 1
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_user_dict(make_config, tmp_path):
|
|
||||||
config = make_config("something.testrun.org")
|
|
||||||
addr = "user1@something.org"
|
|
||||||
user = config.get_user(addr)
|
|
||||||
enc_password = "l1k2j31lk2j3l1k23j123"
|
|
||||||
user.set_password(enc_password)
|
|
||||||
data = user.get_userdb_dict()
|
|
||||||
assert addr in str(data["home"])
|
|
||||||
assert data["uid"] == "vmail"
|
|
||||||
assert data["gid"] == "vmail"
|
|
||||||
assert data["password"] == enc_password
|
|
||||||
|
|
||||||
|
|
||||||
def test_no_mailboxes_dir(testaddr, example_config, tmp_path):
|
|
||||||
p = tmp_path.joinpath("a", "mailboxes")
|
|
||||||
example_config.mailboxes_dir = p
|
|
||||||
|
|
||||||
user = example_config.get_user(testaddr)
|
|
||||||
user.set_password("someeqkjwelkqwjleqwe")
|
|
||||||
user.set_last_login_timestamp(100000)
|
|
||||||
assert user.get_last_login_timestamp() == 86400
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_get_cleartext_flag(testaddr, example_config, tmp_path):
|
|
||||||
p = tmp_path.joinpath("a", "mailboxes")
|
|
||||||
example_config.mailboxes_dir = p
|
|
||||||
|
|
||||||
user = example_config.get_user(testaddr)
|
|
||||||
user.set_password("someeqkjwelkqwjleqwe")
|
|
||||||
user.set_last_login_timestamp(100000)
|
|
||||||
assert user.get_last_login_timestamp() == 86400
|
|
||||||
|
|
||||||
assert not user.is_incoming_cleartext_ok()
|
|
||||||
user.allow_incoming_cleartext()
|
|
||||||
assert user.is_incoming_cleartext_ok()
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from chatmaild.filedict import write_bytes_atomic
|
|
||||||
|
|
||||||
|
|
||||||
def get_daytimestamp(timestamp) -> int:
|
|
||||||
return int(timestamp) // 86400 * 86400
|
|
||||||
|
|
||||||
|
|
||||||
class User:
|
|
||||||
def __init__(self, maildir, addr, password_path, uid, gid):
|
|
||||||
self.maildir = maildir
|
|
||||||
self.addr = addr
|
|
||||||
self.password_path = password_path
|
|
||||||
self.enforce_E2EE_path = maildir.joinpath("enforceE2EEincoming")
|
|
||||||
self.uid = uid
|
|
||||||
self.gid = gid
|
|
||||||
|
|
||||||
@property
|
|
||||||
def can_track(self):
|
|
||||||
return "@" in self.addr and not self.addr.startswith("echo@")
|
|
||||||
|
|
||||||
def get_userdb_dict(self):
|
|
||||||
"""Return a non-empty dovecot 'userdb' style dict
|
|
||||||
if the user has an existing non-empty password"""
|
|
||||||
try:
|
|
||||||
pw = self.password_path.read_text()
|
|
||||||
except FileNotFoundError:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
if not pw:
|
|
||||||
logging.error(f"password is empty for: {self.addr}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
home = str(self.maildir)
|
|
||||||
return dict(addr=self.addr, home=home, uid=self.uid, gid=self.gid, password=pw)
|
|
||||||
|
|
||||||
def is_incoming_cleartext_ok(self):
|
|
||||||
return not self.enforce_E2EE_path.exists()
|
|
||||||
|
|
||||||
def allow_incoming_cleartext(self):
|
|
||||||
if self.enforce_E2EE_path.exists():
|
|
||||||
self.enforce_E2EE_path.unlink()
|
|
||||||
|
|
||||||
def set_password(self, enc_password):
|
|
||||||
"""Set the specified password for this user.
|
|
||||||
|
|
||||||
This method can be called concurrently
|
|
||||||
but there is no guarantee which of the password-set calls will win.
|
|
||||||
"""
|
|
||||||
self.maildir.mkdir(exist_ok=True, parents=True)
|
|
||||||
password = enc_password.encode("ascii")
|
|
||||||
|
|
||||||
try:
|
|
||||||
write_bytes_atomic(self.password_path, password)
|
|
||||||
except PermissionError:
|
|
||||||
if not self.addr.startswith("echo@"):
|
|
||||||
logging.error(f"could not write password for: {self.addr}")
|
|
||||||
raise
|
|
||||||
if not self.addr.startswith("echo@"):
|
|
||||||
self.enforce_E2EE_path.touch()
|
|
||||||
|
|
||||||
def set_last_login_timestamp(self, timestamp):
|
|
||||||
"""Track login time with daily granularity
|
|
||||||
to minimize touching files and to minimize metadata leakage."""
|
|
||||||
if not self.can_track:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
mtime = int(os.stat(self.password_path).st_mtime)
|
|
||||||
except FileNotFoundError:
|
|
||||||
logging.error(f"Can not get last login timestamp for {self.addr}")
|
|
||||||
return
|
|
||||||
|
|
||||||
timestamp = get_daytimestamp(timestamp)
|
|
||||||
if mtime != timestamp:
|
|
||||||
os.utime(self.password_path, (timestamp, timestamp))
|
|
||||||
|
|
||||||
def get_last_login_timestamp(self):
|
|
||||||
if self.can_track:
|
|
||||||
try:
|
|
||||||
return int(self.password_path.stat().st_mtime)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=68"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "cmdeploy"
|
|
||||||
version = "0.2"
|
|
||||||
dependencies = [
|
|
||||||
"pyinfra>=3",
|
|
||||||
"pillow",
|
|
||||||
"qrcode",
|
|
||||||
"markdown",
|
|
||||||
"pytest",
|
|
||||||
"setuptools>=68",
|
|
||||||
"termcolor",
|
|
||||||
"build",
|
|
||||||
"tox",
|
|
||||||
"ruff",
|
|
||||||
"pytest",
|
|
||||||
"pytest-xdist",
|
|
||||||
"execnet",
|
|
||||||
"imap_tools",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.scripts]
|
|
||||||
cmdeploy = "cmdeploy.cmdeploy:main"
|
|
||||||
|
|
||||||
[project.entry-points.pytest11]
|
|
||||||
"chatmaild.testplugin" = "chatmaild.tests.plugin"
|
|
||||||
"cmdeploy.testplugin" = "cmdeploy.tests.plugin"
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = "-v -ra --strict-markers"
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
lint.select = [
|
|
||||||
"F", # Pyflakes
|
|
||||||
"I", # isort
|
|
||||||
|
|
||||||
"PLC", # Pylint Convention
|
|
||||||
"PLE", # Pylint Error
|
|
||||||
"PLW", # Pylint Warning
|
|
||||||
]
|
|
||||||
lint.ignore = [
|
|
||||||
"PLC0415" # import-outside-top-level
|
|
||||||
]
|
|
||||||
@@ -1,869 +0,0 @@
|
|||||||
"""
|
|
||||||
Chat Mail pyinfra deploy.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import importlib.resources
|
|
||||||
import io
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from chatmaild.config import Config, read_config
|
|
||||||
from pyinfra import facts, host, logger
|
|
||||||
from pyinfra.api import FactBase
|
|
||||||
from pyinfra.facts.files import File, Sha256File
|
|
||||||
from pyinfra.facts.server import Sysctl
|
|
||||||
from pyinfra.facts.systemd import SystemdEnabled
|
|
||||||
from pyinfra.operations import apt, files, pip, server, systemd
|
|
||||||
|
|
||||||
from .acmetool import deploy_acmetool
|
|
||||||
|
|
||||||
|
|
||||||
class Port(FactBase):
|
|
||||||
"""
|
|
||||||
Returns the process occuping a port.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def command(self, port: int) -> str:
|
|
||||||
return (
|
|
||||||
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
|
||||||
% (port,)
|
|
||||||
)
|
|
||||||
|
|
||||||
def process(self, output: [str]) -> str:
|
|
||||||
return output[0]
|
|
||||||
|
|
||||||
|
|
||||||
def _build_chatmaild(dist_dir) -> None:
|
|
||||||
dist_dir = Path(dist_dir).resolve()
|
|
||||||
if dist_dir.exists():
|
|
||||||
shutil.rmtree(dist_dir)
|
|
||||||
dist_dir.mkdir()
|
|
||||||
subprocess.check_output(
|
|
||||||
[sys.executable, "-m", "build", "-n"]
|
|
||||||
+ ["--sdist", "chatmaild", "--outdir", str(dist_dir)]
|
|
||||||
)
|
|
||||||
entries = list(dist_dir.iterdir())
|
|
||||||
assert len(entries) == 1
|
|
||||||
return entries[0]
|
|
||||||
|
|
||||||
|
|
||||||
def remove_legacy_artifacts():
|
|
||||||
# disable legacy doveauth-dictproxy.service
|
|
||||||
if host.get_fact(SystemdEnabled).get("doveauth-dictproxy.service"):
|
|
||||||
systemd.service(
|
|
||||||
name="Disable legacy doveauth-dictproxy.service",
|
|
||||||
service="doveauth-dictproxy.service",
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _install_remote_venv_with_chatmaild(config) -> None:
|
|
||||||
remove_legacy_artifacts()
|
|
||||||
dist_file = _build_chatmaild(dist_dir=Path("chatmaild/dist"))
|
|
||||||
remote_base_dir = "/usr/local/lib/chatmaild"
|
|
||||||
remote_dist_file = f"{remote_base_dir}/dist/{dist_file.name}"
|
|
||||||
remote_venv_dir = f"{remote_base_dir}/venv"
|
|
||||||
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
|
||||||
root_owned = dict(user="root", group="root", mode="644")
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="apt install python3-virtualenv",
|
|
||||||
packages=["python3-virtualenv"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name="Upload chatmaild source package",
|
|
||||||
src=dist_file.open("rb"),
|
|
||||||
dest=remote_dist_file,
|
|
||||||
create_remote_dir=True,
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name=f"Upload {remote_chatmail_inipath}",
|
|
||||||
src=config._getbytefile(),
|
|
||||||
dest=remote_chatmail_inipath,
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
|
|
||||||
pip.virtualenv(
|
|
||||||
name=f"chatmaild virtualenv {remote_venv_dir}",
|
|
||||||
path=remote_venv_dir,
|
|
||||||
always_copy=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="install gcc and headers to build crypt_r source package",
|
|
||||||
packages=["gcc", "python3-dev"],
|
|
||||||
)
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name=f"forced pip-install {dist_file.name}",
|
|
||||||
commands=[
|
|
||||||
f"{remote_venv_dir}/bin/pip install --force-reinstall {remote_dist_file}"
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("metrics.cron.j2"),
|
|
||||||
dest="/etc/cron.d/chatmail-metrics",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={
|
|
||||||
"mailboxes_dir": config.mailboxes_dir,
|
|
||||||
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# install systemd units
|
|
||||||
for fn in (
|
|
||||||
"doveauth",
|
|
||||||
"filtermail",
|
|
||||||
"filtermail-incoming",
|
|
||||||
"echobot",
|
|
||||||
"chatmail-metadata",
|
|
||||||
"lastlogin",
|
|
||||||
):
|
|
||||||
execpath = fn if fn != "filtermail-incoming" else "filtermail"
|
|
||||||
params = dict(
|
|
||||||
execpath=f"{remote_venv_dir}/bin/{execpath}",
|
|
||||||
config_path=remote_chatmail_inipath,
|
|
||||||
remote_venv_dir=remote_venv_dir,
|
|
||||||
mail_domain=config.mail_domain,
|
|
||||||
)
|
|
||||||
source_path = importlib.resources.files(__package__).joinpath(
|
|
||||||
"service", f"{fn}.service.f"
|
|
||||||
)
|
|
||||||
content = source_path.read_text().format(**params).encode()
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name=f"Upload {fn}.service",
|
|
||||||
src=io.BytesIO(content),
|
|
||||||
dest=f"/etc/systemd/system/{fn}.service",
|
|
||||||
**root_owned,
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name=f"Setup {fn} service",
|
|
||||||
service=f"{fn}.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=True,
|
|
||||||
daemon_reload=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_opendkim(domain: str, dkim_selector: str = "dkim") -> bool:
|
|
||||||
"""Configures OpenDKIM"""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/opendkim.conf"),
|
|
||||||
dest="/etc/opendkim.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
screen_script = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/screen.lua"),
|
|
||||||
dest="/etc/opendkim/screen.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= screen_script.changed
|
|
||||||
|
|
||||||
final_script = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/final.lua"),
|
|
||||||
dest="/etc/opendkim/final.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= final_script.changed
|
|
||||||
|
|
||||||
files.directory(
|
|
||||||
name="Add opendkim directory to /etc",
|
|
||||||
path="/etc/opendkim",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="750",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
keytable = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/KeyTable"),
|
|
||||||
dest="/etc/dkimkeys/KeyTable",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= keytable.changed
|
|
||||||
|
|
||||||
signing_table = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/SigningTable"),
|
|
||||||
dest="/etc/dkimkeys/SigningTable",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": domain, "opendkim_selector": dkim_selector},
|
|
||||||
)
|
|
||||||
need_restart |= signing_table.changed
|
|
||||||
files.directory(
|
|
||||||
name="Add opendkim socket directory to /var/spool/postfix",
|
|
||||||
path="/var/spool/postfix/opendkim",
|
|
||||||
user="opendkim",
|
|
||||||
group="opendkim",
|
|
||||||
mode="750",
|
|
||||||
present=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="apt install opendkim opendkim-tools",
|
|
||||||
packages=["opendkim", "opendkim-tools"],
|
|
||||||
)
|
|
||||||
|
|
||||||
if not host.get_fact(File, f"/etc/dkimkeys/{dkim_selector}.private"):
|
|
||||||
server.shell(
|
|
||||||
name="Generate OpenDKIM domain keys",
|
|
||||||
commands=[
|
|
||||||
f"/usr/sbin/opendkim-genkey -D /etc/dkimkeys -d {domain} -s {dkim_selector}"
|
|
||||||
],
|
|
||||||
_use_su_login=True,
|
|
||||||
_su_user="opendkim",
|
|
||||||
)
|
|
||||||
|
|
||||||
service_file = files.put(
|
|
||||||
name="Configure opendkim to restart once a day",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("opendkim/systemd.conf"),
|
|
||||||
dest="/etc/systemd/system/opendkim.service.d/10-prevent-memory-leak.conf",
|
|
||||||
)
|
|
||||||
need_restart |= service_file.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _uninstall_mta_sts_daemon() -> None:
|
|
||||||
# Remove configuration.
|
|
||||||
files.file("/etc/mta-sts-daemon.yml", present=False)
|
|
||||||
|
|
||||||
files.directory("/usr/local/lib/postfix-mta-sts-resolver", present=False)
|
|
||||||
|
|
||||||
files.file("/etc/systemd/system/mta-sts-daemon.service", present=False)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Stop MTA-STS daemon",
|
|
||||||
service="mta-sts-daemon.service",
|
|
||||||
daemon_reload=True,
|
|
||||||
running=False,
|
|
||||||
enabled=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_postfix(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures Postfix SMTP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("postfix/main.cf.j2"),
|
|
||||||
dest="/etc/postfix/main.cf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
master_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("postfix/master.cf.j2"),
|
|
||||||
dest="/etc/postfix/master.cf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
debug=debug,
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
need_restart |= master_config.changed
|
|
||||||
|
|
||||||
header_cleanup = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"postfix/submission_header_cleanup"
|
|
||||||
),
|
|
||||||
dest="/etc/postfix/submission_header_cleanup",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= header_cleanup.changed
|
|
||||||
|
|
||||||
# Login map that 1:1 maps email address to login.
|
|
||||||
login_map = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("postfix/login_map"),
|
|
||||||
dest="/etc/postfix/login_map",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= login_map.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _install_dovecot_package(package: str, arch: str):
|
|
||||||
arch = "amd64" if arch == "x86_64" else arch
|
|
||||||
arch = "arm64" if arch == "aarch64" else arch
|
|
||||||
url = f"https://download.delta.chat/dovecot/dovecot-{package}_2.3.21%2Bdfsg1-3_{arch}.deb"
|
|
||||||
deb_filename = "/root/" + url.split("/")[-1]
|
|
||||||
|
|
||||||
match (package, arch):
|
|
||||||
case ("core", "amd64"):
|
|
||||||
sha256 = "43f593332e22ac7701c62d58b575d2ca409e0f64857a2803be886c22860f5587"
|
|
||||||
case ("core", "arm64"):
|
|
||||||
sha256 = "4d21eba1a83f51c100f08f2e49f0c9f8f52f721ebc34f75018e043306da993a7"
|
|
||||||
case ("imapd", "amd64"):
|
|
||||||
sha256 = "8d8dc6fc00bbb6cdb25d345844f41ce2f1c53f764b79a838eb2a03103eebfa86"
|
|
||||||
case ("imapd", "arm64"):
|
|
||||||
sha256 = "178fa877ddd5df9930e8308b518f4b07df10e759050725f8217a0c1fb3fd707f"
|
|
||||||
case ("lmtpd", "amd64"):
|
|
||||||
sha256 = "2f69ba5e35363de50962d42cccbfe4ed8495265044e244007d7ccddad77513ab"
|
|
||||||
case ("lmtpd", "arm64"):
|
|
||||||
sha256 = "89f52fb36524f5877a177dff4a713ba771fd3f91f22ed0af7238d495e143b38f"
|
|
||||||
case _:
|
|
||||||
apt.packages(packages=[f"dovecot-{package}"])
|
|
||||||
return
|
|
||||||
|
|
||||||
files.download(
|
|
||||||
name=f"Download dovecot-{package}",
|
|
||||||
src=url,
|
|
||||||
dest=deb_filename,
|
|
||||||
sha256sum=sha256,
|
|
||||||
cache_time=60 * 60 * 24 * 365 * 10, # never redownload the package
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.deb(name=f"Install dovecot-{package}", src=deb_filename)
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures Dovecot IMAP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("dovecot/dovecot.conf.j2"),
|
|
||||||
dest="/etc/dovecot/dovecot.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
debug=debug,
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
auth_config = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("dovecot/auth.conf"),
|
|
||||||
dest="/etc/dovecot/auth.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= auth_config.changed
|
|
||||||
lua_push_notification_script = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"dovecot/push_notification.lua"
|
|
||||||
),
|
|
||||||
dest="/etc/dovecot/push_notification.lua",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= lua_push_notification_script.changed
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("dovecot/expunge.cron.j2"),
|
|
||||||
dest="/etc/cron.d/expunge",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
|
|
||||||
# as per https://doc.dovecot.org/configuration_manual/os/
|
|
||||||
# it is recommended to set the following inotify limits
|
|
||||||
for name in ("max_user_instances", "max_user_watches"):
|
|
||||||
key = f"fs.inotify.{name}"
|
|
||||||
if host.get_fact(Sysctl)[key] > 65535:
|
|
||||||
# Skip updating limits if already sufficient
|
|
||||||
# (enables running in incus containers where sysctl readonly)
|
|
||||||
continue
|
|
||||||
server.sysctl(
|
|
||||||
name=f"Change {key}",
|
|
||||||
key=key,
|
|
||||||
value=65535,
|
|
||||||
persist=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
timezone_env = files.line(
|
|
||||||
name="Set TZ environment variable",
|
|
||||||
path="/etc/environment",
|
|
||||||
line="TZ=:/etc/localtime",
|
|
||||||
)
|
|
||||||
need_restart |= timezone_env.changed
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _configure_nginx(config: Config, debug: bool = False) -> bool:
|
|
||||||
"""Configures nginx HTTP server."""
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
main_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("nginx/nginx.conf.j2"),
|
|
||||||
dest="/etc/nginx/nginx.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
disable_ipv6=config.disable_ipv6,
|
|
||||||
)
|
|
||||||
need_restart |= main_config.changed
|
|
||||||
|
|
||||||
autoconfig = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("nginx/autoconfig.xml.j2"),
|
|
||||||
dest="/var/www/html/.well-known/autoconfig/mail/config-v1.1.xml",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
)
|
|
||||||
need_restart |= autoconfig.changed
|
|
||||||
|
|
||||||
mta_sts_config = files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("nginx/mta-sts.txt.j2"),
|
|
||||||
dest="/var/www/html/.well-known/mta-sts.txt",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
config={"domain_name": config.mail_domain},
|
|
||||||
)
|
|
||||||
need_restart |= mta_sts_config.changed
|
|
||||||
|
|
||||||
# install CGI newemail script
|
|
||||||
#
|
|
||||||
cgi_dir = "/usr/lib/cgi-bin"
|
|
||||||
files.directory(
|
|
||||||
name=f"Ensure {cgi_dir} exists",
|
|
||||||
path=cgi_dir,
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
name="Upload cgi newemail.py script",
|
|
||||||
src=importlib.resources.files("chatmaild").joinpath("newemail.py").open("rb"),
|
|
||||||
dest=f"{cgi_dir}/newemail.py",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="755",
|
|
||||||
)
|
|
||||||
|
|
||||||
return need_restart
|
|
||||||
|
|
||||||
|
|
||||||
def _remove_rspamd() -> None:
|
|
||||||
"""Remove rspamd"""
|
|
||||||
apt.packages(name="Remove rspamd", packages="rspamd", present=False)
|
|
||||||
|
|
||||||
|
|
||||||
def check_config(config):
|
|
||||||
mail_domain = config.mail_domain
|
|
||||||
if mail_domain != "testrun.org" and not mail_domain.endswith(".testrun.org"):
|
|
||||||
blocked_words = "merlinux schmieder testrun.org".split()
|
|
||||||
for key in config.__dict__:
|
|
||||||
value = config.__dict__[key]
|
|
||||||
if key.startswith("privacy") and any(
|
|
||||||
x in str(value) for x in blocked_words
|
|
||||||
):
|
|
||||||
raise ValueError(
|
|
||||||
f"please set your own privacy contacts/addresses in {config._inipath}"
|
|
||||||
)
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_mtail(config):
|
|
||||||
# Uninstall mtail package, we are going to install a static binary.
|
|
||||||
apt.packages(name="Uninstall mtail", packages=["mtail"], present=False)
|
|
||||||
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_amd64.tar.gz",
|
|
||||||
"123c2ee5f48c3eff12ebccee38befd2233d715da736000ccde49e3d5607724e4",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/google/mtail/releases/download/v3.0.8/mtail_3.0.8_linux_arm64.tar.gz",
|
|
||||||
"aa04811c0929b6754408676de520e050c45dddeb3401881888a092c9aea89cae",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name="Download mtail",
|
|
||||||
commands=[
|
|
||||||
f"(echo '{sha256sum} /usr/local/bin/mtail' | sha256sum -c) || (curl -L {url} | gunzip | tar -x -f - mtail -O >/usr/local/bin/mtail.new && mv /usr/local/bin/mtail.new /usr/local/bin/mtail)",
|
|
||||||
"chmod 755 /usr/local/bin/mtail",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Using our own systemd unit instead of `/usr/lib/systemd/system/mtail.service`.
|
|
||||||
# This allows to read from journalctl instead of log files.
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("mtail/mtail.service.j2"),
|
|
||||||
dest="/etc/systemd/system/mtail.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
address=config.mtail_address or "127.0.0.1",
|
|
||||||
port=3903,
|
|
||||||
)
|
|
||||||
|
|
||||||
mtail_conf = files.put(
|
|
||||||
name="Mtail configuration",
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"mtail/delivered_mail.mtail"
|
|
||||||
),
|
|
||||||
dest="/etc/mtail/delivered_mail.mtail",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable mtail",
|
|
||||||
service="mtail.service",
|
|
||||||
running=bool(config.mtail_address),
|
|
||||||
enabled=bool(config.mtail_address),
|
|
||||||
restarted=mtail_conf.changed,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_iroh_relay(config) -> None:
|
|
||||||
(url, sha256sum) = {
|
|
||||||
"x86_64": (
|
|
||||||
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-x86_64-unknown-linux-musl.tar.gz",
|
|
||||||
"45c81199dbd70f8c4c30fef7f3b9727ca6e3cea8f2831333eeaf8aa71bf0fac1",
|
|
||||||
),
|
|
||||||
"aarch64": (
|
|
||||||
"https://github.com/n0-computer/iroh/releases/download/v0.35.0/iroh-relay-v0.35.0-aarch64-unknown-linux-musl.tar.gz",
|
|
||||||
"f8ef27631fac213b3ef668d02acd5b3e215292746a3fc71d90c63115446008b1",
|
|
||||||
),
|
|
||||||
}[host.get_fact(facts.server.Arch)]
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install curl",
|
|
||||||
packages=["curl"],
|
|
||||||
)
|
|
||||||
|
|
||||||
need_restart = False
|
|
||||||
|
|
||||||
existing_sha256sum = host.get_fact(Sha256File, "/usr/local/bin/iroh-relay")
|
|
||||||
if existing_sha256sum != sha256sum:
|
|
||||||
server.shell(
|
|
||||||
name="Download iroh-relay",
|
|
||||||
commands=[
|
|
||||||
f"(curl -L {url} | gunzip | tar -x -f - ./iroh-relay -O >/usr/local/bin/iroh-relay.new && (echo '{sha256sum} /usr/local/bin/iroh-relay.new' | sha256sum -c) && mv /usr/local/bin/iroh-relay.new /usr/local/bin/iroh-relay)",
|
|
||||||
"chmod 755 /usr/local/bin/iroh-relay",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
need_restart = True
|
|
||||||
|
|
||||||
systemd_unit = files.put(
|
|
||||||
name="Upload iroh-relay systemd unit",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("iroh-relay.service"),
|
|
||||||
dest="/etc/systemd/system/iroh-relay.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= systemd_unit.changed
|
|
||||||
|
|
||||||
iroh_config = files.put(
|
|
||||||
name="Upload iroh-relay config",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("iroh-relay.toml"),
|
|
||||||
dest="/etc/iroh-relay.toml",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
need_restart |= iroh_config.changed
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable iroh-relay",
|
|
||||||
service="iroh-relay.service",
|
|
||||||
running=True,
|
|
||||||
enabled=config.enable_iroh_relay,
|
|
||||||
restarted=need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_chatmail(config_path: Path, disable_mail: bool) -> None:
|
|
||||||
"""Deploy a chat-mail instance.
|
|
||||||
|
|
||||||
:param config_path: path to chatmail.ini
|
|
||||||
:param disable_mail: whether to disable postfix & dovecot
|
|
||||||
"""
|
|
||||||
config = read_config(config_path)
|
|
||||||
check_config(config)
|
|
||||||
mail_domain = config.mail_domain
|
|
||||||
|
|
||||||
from .www import build_webpages, get_paths
|
|
||||||
|
|
||||||
server.group(name="Create vmail group", group="vmail", system=True)
|
|
||||||
server.user(name="Create vmail user", user="vmail", group="vmail", system=True)
|
|
||||||
server.group(name="Create opendkim group", group="opendkim", system=True)
|
|
||||||
server.user(
|
|
||||||
name="Create opendkim user",
|
|
||||||
user="opendkim",
|
|
||||||
groups=["opendkim"],
|
|
||||||
system=True,
|
|
||||||
)
|
|
||||||
server.user(
|
|
||||||
name="Add postfix user to opendkim group for socket access",
|
|
||||||
user="postfix",
|
|
||||||
groups=["opendkim"],
|
|
||||||
system=True,
|
|
||||||
)
|
|
||||||
server.user(name="Create echobot user", user="echobot", system=True)
|
|
||||||
server.user(name="Create iroh user", user="iroh", system=True)
|
|
||||||
|
|
||||||
# Add our OBS repository for dovecot_no_delay
|
|
||||||
files.put(
|
|
||||||
name="Add Deltachat OBS GPG key to apt keyring",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("obs-home-deltachat.gpg"),
|
|
||||||
dest="/etc/apt/keyrings/obs-home-deltachat.gpg",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.line(
|
|
||||||
name="Add DeltaChat OBS home repository to sources.list",
|
|
||||||
path="/etc/apt/sources.list",
|
|
||||||
line="deb [signed-by=/etc/apt/keyrings/obs-home-deltachat.gpg] https://download.opensuse.org/repositories/home:/deltachat/Debian_12/ ./",
|
|
||||||
escape_regex_characters=True,
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
if host.get_fact(Port, port=53) != "unbound":
|
|
||||||
files.line(
|
|
||||||
name="Add 9.9.9.9 to resolv.conf",
|
|
||||||
path="/etc/resolv.conf",
|
|
||||||
line="nameserver 9.9.9.9",
|
|
||||||
)
|
|
||||||
apt.update(name="apt update", cache_time=24 * 3600)
|
|
||||||
apt.upgrade(name="upgrade apt packages", auto_remove=True)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install rsync",
|
|
||||||
packages=["rsync"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Run local DNS resolver `unbound`.
|
|
||||||
# `resolvconf` takes care of setting up /etc/resolv.conf
|
|
||||||
# to use 127.0.0.1 as the resolver.
|
|
||||||
from cmdeploy.cmdeploy import Out
|
|
||||||
|
|
||||||
port_services = [
|
|
||||||
(["master", "smtpd"], 25),
|
|
||||||
("unbound", 53),
|
|
||||||
("acmetool", 80),
|
|
||||||
(["imap-login", "dovecot"], 143),
|
|
||||||
("nginx", 443),
|
|
||||||
(["master", "smtpd"], 465),
|
|
||||||
(["master", "smtpd"], 587),
|
|
||||||
(["imap-login", "dovecot"], 993),
|
|
||||||
("iroh-relay", 3340),
|
|
||||||
("nginx", 8443),
|
|
||||||
(["master", "smtpd"], config.postfix_reinject_port),
|
|
||||||
(["master", "smtpd"], config.postfix_reinject_port_incoming),
|
|
||||||
("filtermail", config.filtermail_smtp_port),
|
|
||||||
("filtermail", config.filtermail_smtp_port_incoming),
|
|
||||||
]
|
|
||||||
for service, port in port_services:
|
|
||||||
print(f"Checking if port {port} is available for {service}...")
|
|
||||||
running_service = host.get_fact(Port, port=port)
|
|
||||||
if running_service:
|
|
||||||
if running_service not in service:
|
|
||||||
Out().red(
|
|
||||||
f"Deploy failed: port {port} is occupied by: {running_service}"
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install unbound",
|
|
||||||
packages=["unbound", "unbound-anchor", "dnsutils"],
|
|
||||||
)
|
|
||||||
server.shell(
|
|
||||||
name="Generate root keys for validating DNSSEC",
|
|
||||||
commands=[
|
|
||||||
"unbound-anchor -a /var/lib/unbound/root.key || true",
|
|
||||||
"systemctl reset-failed unbound.service",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable unbound",
|
|
||||||
service="unbound.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
deploy_iroh_relay(config)
|
|
||||||
|
|
||||||
# Deploy acmetool to have TLS certificates.
|
|
||||||
tls_domains = [mail_domain, f"mta-sts.{mail_domain}", f"www.{mail_domain}"]
|
|
||||||
deploy_acmetool(
|
|
||||||
domains=tls_domains,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
# required for setfacl for echobot
|
|
||||||
name="Install acl",
|
|
||||||
packages="acl",
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install Postfix",
|
|
||||||
packages="postfix",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not "dovecot.service" in host.get_fact(SystemdEnabled):
|
|
||||||
_install_dovecot_package("core", host.get_fact(facts.server.Arch))
|
|
||||||
_install_dovecot_package("imapd", host.get_fact(facts.server.Arch))
|
|
||||||
_install_dovecot_package("lmtpd", host.get_fact(facts.server.Arch))
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install nginx",
|
|
||||||
packages=["nginx", "libnginx-mod-stream"],
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Install fcgiwrap",
|
|
||||||
packages=["fcgiwrap"],
|
|
||||||
)
|
|
||||||
|
|
||||||
www_path, src_dir, build_dir = get_paths(config)
|
|
||||||
# if www_folder was set to a non-existing folder, skip upload
|
|
||||||
if not www_path.is_dir():
|
|
||||||
logger.warning("Building web pages is disabled in chatmail.ini, skipping")
|
|
||||||
else:
|
|
||||||
# if www_folder is a hugo page, build it
|
|
||||||
if build_dir:
|
|
||||||
www_path = build_webpages(src_dir, build_dir, config)
|
|
||||||
# if it is not a hugo page, upload it as is
|
|
||||||
files.rsync(f"{www_path}/", "/var/www/html", flags=["-avz"])
|
|
||||||
|
|
||||||
_install_remote_venv_with_chatmaild(config)
|
|
||||||
debug = False
|
|
||||||
dovecot_need_restart = _configure_dovecot(config, debug=debug)
|
|
||||||
postfix_need_restart = _configure_postfix(config, debug=debug)
|
|
||||||
nginx_need_restart = _configure_nginx(config)
|
|
||||||
_uninstall_mta_sts_daemon()
|
|
||||||
|
|
||||||
_remove_rspamd()
|
|
||||||
opendkim_need_restart = _configure_opendkim(mail_domain, "opendkim")
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable OpenDKIM",
|
|
||||||
service="opendkim.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
daemon_reload=opendkim_need_restart,
|
|
||||||
restarted=opendkim_need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Dovecot should be started before Postfix
|
|
||||||
# because it creates authentication socket
|
|
||||||
# required by Postfix.
|
|
||||||
systemd.service(
|
|
||||||
name="disable dovecot for now" if disable_mail else "Start and enable Dovecot",
|
|
||||||
service="dovecot.service",
|
|
||||||
running=False if disable_mail else True,
|
|
||||||
enabled=False if disable_mail else True,
|
|
||||||
restarted=dovecot_need_restart if not disable_mail else False,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="disable postfix for now" if disable_mail else "Start and enable Postfix",
|
|
||||||
service="postfix.service",
|
|
||||||
running=False if disable_mail else True,
|
|
||||||
enabled=False if disable_mail else True,
|
|
||||||
restarted=postfix_need_restart if not disable_mail else False,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable nginx",
|
|
||||||
service="nginx.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=nginx_need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Restart echobot if postfix and dovecot were just started",
|
|
||||||
service="echobot.service",
|
|
||||||
restarted=postfix_need_restart and dovecot_need_restart,
|
|
||||||
)
|
|
||||||
|
|
||||||
# This file is used by auth proxy.
|
|
||||||
# https://wiki.debian.org/EtcMailName
|
|
||||||
server.shell(
|
|
||||||
name="Setup /etc/mailname",
|
|
||||||
commands=[f"echo {mail_domain} >/etc/mailname; chmod 644 /etc/mailname"],
|
|
||||||
)
|
|
||||||
|
|
||||||
journald_conf = files.put(
|
|
||||||
name="Configure journald",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("journald.conf"),
|
|
||||||
dest="/etc/systemd/journald.conf",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name="Start and enable journald",
|
|
||||||
service="systemd-journald.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=journald_conf.changed,
|
|
||||||
)
|
|
||||||
files.directory(
|
|
||||||
name="Ensure old logs on disk are deleted",
|
|
||||||
path="/var/log/journal/",
|
|
||||||
present=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
apt.packages(
|
|
||||||
name="Ensure cron is installed",
|
|
||||||
packages=["cron"],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
git_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_hash = "unknown\n"
|
|
||||||
try:
|
|
||||||
git_diff = subprocess.check_output(["git", "diff"]).decode()
|
|
||||||
except Exception:
|
|
||||||
git_diff = ""
|
|
||||||
files.put(
|
|
||||||
name="Upload chatmail relay git commiit hash",
|
|
||||||
src=StringIO(git_hash + git_diff),
|
|
||||||
dest="/etc/chatmail-version",
|
|
||||||
mode="700",
|
|
||||||
)
|
|
||||||
|
|
||||||
deploy_mtail(config)
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
import importlib.resources
|
|
||||||
|
|
||||||
from pyinfra.operations import apt, files, server, systemd
|
|
||||||
|
|
||||||
|
|
||||||
def deploy_acmetool(email="", domains=[]):
|
|
||||||
"""Deploy acmetool."""
|
|
||||||
apt.packages(
|
|
||||||
name="Install acmetool",
|
|
||||||
packages=["acmetool"],
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("acmetool.cron").open("rb"),
|
|
||||||
dest="/etc/cron.d/acmetool",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("acmetool.hook").open("rb"),
|
|
||||||
dest="/usr/lib/acme/hooks/nginx",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="744",
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("response-file.yaml.j2"),
|
|
||||||
dest="/var/lib/acme/conf/responses",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
email=email,
|
|
||||||
)
|
|
||||||
|
|
||||||
files.template(
|
|
||||||
src=importlib.resources.files(__package__).joinpath("target.yaml.j2"),
|
|
||||||
dest="/var/lib/acme/conf/target",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
service_file = files.put(
|
|
||||||
src=importlib.resources.files(__package__).joinpath(
|
|
||||||
"acmetool-redirector.service"
|
|
||||||
),
|
|
||||||
dest="/etc/systemd/system/acmetool-redirector.service",
|
|
||||||
user="root",
|
|
||||||
group="root",
|
|
||||||
mode="644",
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="Setup acmetool-redirector service",
|
|
||||||
service="acmetool-redirector.service",
|
|
||||||
running=True,
|
|
||||||
enabled=True,
|
|
||||||
restarted=service_file.changed,
|
|
||||||
)
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name=f"Request certificate for: {', '.join(domains)}",
|
|
||||||
commands=[f"acmetool want --xlog.severity=debug {' '.join(domains)}"],
|
|
||||||
)
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=acmetool HTTP redirector
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=notify
|
|
||||||
ExecStart=/usr/bin/acmetool redirector --service.uid=daemon
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
SHELL=/bin/sh
|
|
||||||
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
|
|
||||||
MAILTO=root
|
|
||||||
20 16 * * * root /usr/bin/acmetool --batch reconcile && systemctl reload dovecot && systemctl reload postfix && systemctl reload nginx
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
"acme-enter-email": "{{ email }}"
|
|
||||||
"acme-agreement:https://letsencrypt.org/documents/LE-SA-v1.5-February-24-2025.pdf": true
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
;
|
|
||||||
; Required DNS entries for chatmail servers
|
|
||||||
;
|
|
||||||
{% if A %}
|
|
||||||
{{ mail_domain }}. A {{ A }}
|
|
||||||
{% endif %}
|
|
||||||
{% if AAAA %}
|
|
||||||
{{ mail_domain }}. AAAA {{ AAAA }}
|
|
||||||
{% endif %}
|
|
||||||
{{ mail_domain }}. MX 10 {{ mail_domain }}.
|
|
||||||
_mta-sts.{{ mail_domain }}. TXT "v=STSv1; id={{ sts_id }}"
|
|
||||||
mta-sts.{{ mail_domain }}. CNAME {{ mail_domain }}.
|
|
||||||
www.{{ mail_domain }}. CNAME {{ mail_domain }}.
|
|
||||||
{{ dkim_entry }}
|
|
||||||
|
|
||||||
;
|
|
||||||
; Recommended DNS entries for interoperability and security-hardening
|
|
||||||
;
|
|
||||||
{{ mail_domain }}. TXT "v=spf1 a ~all"
|
|
||||||
_dmarc.{{ mail_domain }}. TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
|
|
||||||
|
|
||||||
{% if acme_account_url %}
|
|
||||||
{{ mail_domain }}. CAA 0 issue "letsencrypt.org;accounturi={{ acme_account_url }}"
|
|
||||||
{% endif %}
|
|
||||||
_adsp._domainkey.{{ mail_domain }}. TXT "dkim=discardable"
|
|
||||||
|
|
||||||
_submission._tcp.{{ mail_domain }}. SRV 0 1 587 {{ mail_domain }}.
|
|
||||||
_submissions._tcp.{{ mail_domain }}. SRV 0 1 465 {{ mail_domain }}.
|
|
||||||
_imap._tcp.{{ mail_domain }}. SRV 0 1 143 {{ mail_domain }}.
|
|
||||||
_imaps._tcp.{{ mail_domain }}. SRV 0 1 993 {{ mail_domain }}.
|
|
||||||
@@ -1,437 +0,0 @@
|
|||||||
"""
|
|
||||||
Provides the `cmdeploy` entry point function,
|
|
||||||
along with command line option and subcommand parsing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import importlib.resources
|
|
||||||
import importlib.util
|
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pyinfra
|
|
||||||
from chatmaild.config import read_config, write_initial_config
|
|
||||||
from packaging import version
|
|
||||||
from termcolor import colored
|
|
||||||
|
|
||||||
from . import dns, remote
|
|
||||||
from .sshexec import SSHExec
|
|
||||||
|
|
||||||
#
|
|
||||||
# cmdeploy sub commands and options
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def init_cmd_options(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"chatmail_domain",
|
|
||||||
action="store",
|
|
||||||
help="fully qualified DNS domain name for your chatmail instance",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def init_cmd(args, out):
|
|
||||||
"""Initialize chatmail config file."""
|
|
||||||
mail_domain = args.chatmail_domain
|
|
||||||
if args.inipath.exists():
|
|
||||||
print(f"Path exists, not modifying: {args.inipath}")
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
write_initial_config(args.inipath, mail_domain, overrides={})
|
|
||||||
out.green(f"created config file for {mail_domain} in {args.inipath}")
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd_options(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run",
|
|
||||||
dest="dry_run",
|
|
||||||
action="store_true",
|
|
||||||
help="don't actually modify the server",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--disable-mail",
|
|
||||||
dest="disable_mail",
|
|
||||||
action="store_true",
|
|
||||||
help="install/upgrade the server, but disable postfix & dovecot for now",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--ssh-host",
|
|
||||||
dest="ssh_host",
|
|
||||||
help="Deploy to 'localhost' or to a specific SSH host",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd(args, out):
|
|
||||||
"""Deploy chatmail services on the remote server."""
|
|
||||||
|
|
||||||
ssh_host = args.ssh_host if args.ssh_host else args.config.mail_domain
|
|
||||||
sshexec = get_sshexec(ssh_host)
|
|
||||||
require_iroh = args.config.enable_iroh_relay
|
|
||||||
remote_data = dns.get_initial_remote_data(sshexec, args.config.mail_domain)
|
|
||||||
if not dns.check_initial_remote_data(remote_data, print=out.red):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["CHATMAIL_INI"] = args.inipath
|
|
||||||
env["CHATMAIL_DISABLE_MAIL"] = "True" if args.disable_mail else ""
|
|
||||||
env["CHATMAIL_REQUIRE_IROH"] = "True" if require_iroh else ""
|
|
||||||
deploy_path = importlib.resources.files(__package__).joinpath("deploy.py").resolve()
|
|
||||||
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
|
||||||
|
|
||||||
cmd = f"{pyinf} --ssh-user root {ssh_host} {deploy_path} -y"
|
|
||||||
if ssh_host == "localhost":
|
|
||||||
cmd = f"{pyinf} @local {deploy_path} -y"
|
|
||||||
|
|
||||||
if version.parse(pyinfra.__version__) < version.parse("3"):
|
|
||||||
out.red("Please re-run scripts/initenv.sh to update pyinfra to version 3.")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
retcode = out.check_call(cmd, env=env)
|
|
||||||
if retcode == 0:
|
|
||||||
print("\nYou can try out the relay by talking to this echo bot: ")
|
|
||||||
sshexec = SSHExec(args.config.mail_domain, verbose=args.verbose)
|
|
||||||
print(
|
|
||||||
sshexec(
|
|
||||||
call=remote.rshell.shell,
|
|
||||||
kwargs=dict(command="cat /var/lib/echobot/invite-link.txt"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
out.green("Deploy completed, call `cmdeploy dns` next.")
|
|
||||||
elif not remote_data["acme_account_url"]:
|
|
||||||
out.red("Deploy completed but letsencrypt not configured")
|
|
||||||
out.red("Run 'cmdeploy run' again")
|
|
||||||
retcode = 0
|
|
||||||
else:
|
|
||||||
out.red("Deploy failed")
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
out.red("Deploy failed")
|
|
||||||
retcode = 1
|
|
||||||
return retcode
|
|
||||||
|
|
||||||
|
|
||||||
def dns_cmd_options(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--zonefile",
|
|
||||||
dest="zonefile",
|
|
||||||
type=pathlib.Path,
|
|
||||||
default=None,
|
|
||||||
help="write out a zonefile",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--ssh-host",
|
|
||||||
dest="ssh_host",
|
|
||||||
help="Run the DNS queries on 'localhost' or on a specific SSH host",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def dns_cmd(args, out):
|
|
||||||
"""Check DNS entries and optionally generate dns zone file."""
|
|
||||||
ssh_host = args.ssh_host if args.ssh_host else args.config.mail_domain
|
|
||||||
sshexec = get_sshexec(ssh_host, verbose=args.verbose)
|
|
||||||
remote_data = dns.get_initial_remote_data(sshexec, args.config.mail_domain)
|
|
||||||
if not remote_data:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if not remote_data["acme_account_url"]:
|
|
||||||
out.red("could not get letsencrypt account url, please run 'cmdeploy run'")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if not remote_data["dkim_entry"]:
|
|
||||||
out.red("could not determine dkim_entry, please run 'cmdeploy run'")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
zonefile = dns.get_filled_zone_file(remote_data)
|
|
||||||
|
|
||||||
if args.zonefile:
|
|
||||||
args.zonefile.write_text(zonefile)
|
|
||||||
out.green(f"DNS records successfully written to: {args.zonefile}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
retcode = dns.check_full_zone(
|
|
||||||
sshexec, remote_data=remote_data, zonefile=zonefile, out=out
|
|
||||||
)
|
|
||||||
return retcode
|
|
||||||
|
|
||||||
|
|
||||||
def status_cmd(args, out):
|
|
||||||
"""Display status for online chatmail instance."""
|
|
||||||
|
|
||||||
sshexec = args.get_sshexec()
|
|
||||||
|
|
||||||
out.green(f"chatmail domain: {args.config.mail_domain}")
|
|
||||||
if args.config.privacy_mail:
|
|
||||||
out.green("privacy settings: present")
|
|
||||||
else:
|
|
||||||
out.red("no privacy settings")
|
|
||||||
|
|
||||||
for line in sshexec(remote.rshell.get_systemd_running):
|
|
||||||
print(line)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cmd_options(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--slow",
|
|
||||||
dest="slow",
|
|
||||||
action="store_true",
|
|
||||||
help="also run slow tests",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cmd(args, out):
|
|
||||||
"""Run local and online tests for chatmail deployment.
|
|
||||||
|
|
||||||
This will automatically pip-install 'deltachat' if it's not available.
|
|
||||||
"""
|
|
||||||
|
|
||||||
x = importlib.util.find_spec("deltachat")
|
|
||||||
if x is None:
|
|
||||||
out.check_call(f"{sys.executable} -m pip install deltachat")
|
|
||||||
|
|
||||||
pytest_path = shutil.which("pytest")
|
|
||||||
pytest_args = [
|
|
||||||
pytest_path,
|
|
||||||
"cmdeploy/src/",
|
|
||||||
"-n4",
|
|
||||||
"-rs",
|
|
||||||
"-x",
|
|
||||||
"-v",
|
|
||||||
"--durations=5",
|
|
||||||
]
|
|
||||||
if args.slow:
|
|
||||||
pytest_args.append("--slow")
|
|
||||||
ret = out.run_ret(pytest_args)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def proxy_cmd_options(parser: argparse.ArgumentParser):
|
|
||||||
parser.add_argument(
|
|
||||||
"ip_address",
|
|
||||||
help="specify a server to deploy to; can also be an inventory.py file",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--relay-ipv4",
|
|
||||||
dest="relay_ipv4",
|
|
||||||
help="The ipv4 address of the relay you want to forward traffic to",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--relay-ipv6",
|
|
||||||
dest="relay_ipv6",
|
|
||||||
help="The ipv6 address of the relay you want to forward traffic to",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--dry-run",
|
|
||||||
dest="dry_run",
|
|
||||||
action="store_true",
|
|
||||||
help="don't actually modify the server",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def proxy_cmd(args, out):
|
|
||||||
"""Deploy reverse proxy on a second server."""
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["RELAY_IPV4"] = args.relay_ipv4
|
|
||||||
env["RELAY_IPV6"] = args.relay_ipv6
|
|
||||||
deploy_path = importlib.resources.files(__package__).joinpath("proxy-deploy.py").resolve()
|
|
||||||
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
|
||||||
|
|
||||||
sshexec = args.get_sshexec()
|
|
||||||
# :todo make sure relay is deployed to args.relay_ipv4 and args.relay_ipv6
|
|
||||||
|
|
||||||
# abort if IP address == the chatmail relay itself: if port 22 is open AND /etc/chatmail-version exists
|
|
||||||
if sshexec.logged(call=remote.rshell.get_port_service, args=[22]):
|
|
||||||
if sshexec.logged(call=remote.rshell.chatmail_version):
|
|
||||||
out.red("Can not deploy proxy on the chatmail relay itself, use another server")
|
|
||||||
return 1
|
|
||||||
cmd = f"{pyinf} --ssh-user root {args.ip_address} {deploy_path} -y"
|
|
||||||
out.check_call(cmd, env=env) # during first try, only set SSH port to 2222
|
|
||||||
|
|
||||||
cmd = f"{pyinf} --ssh-port 2222 --ssh-user root {args.ip_address} {deploy_path} -y"
|
|
||||||
try:
|
|
||||||
retcode = out.check_call(cmd, env=env)
|
|
||||||
if retcode == 0:
|
|
||||||
out.green("Reverse proxy deployed - you can distribute the IP address now.")
|
|
||||||
else:
|
|
||||||
out.red("Deploying reverse proxy failed")
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
out.red("Deploying reverse proxy failed")
|
|
||||||
retcode = 1
|
|
||||||
return retcode
|
|
||||||
|
|
||||||
|
|
||||||
def fmt_cmd_options(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--check",
|
|
||||||
"-c",
|
|
||||||
action="store_true",
|
|
||||||
help="only check but don't fix problems",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def fmt_cmd(args, out):
|
|
||||||
"""Run formattting fixes on all chatmail source code."""
|
|
||||||
|
|
||||||
sources = [str(importlib.resources.files(x)) for x in ("chatmaild", "cmdeploy")]
|
|
||||||
format_args = [shutil.which("ruff"), "format"]
|
|
||||||
check_args = [shutil.which("ruff"), "check"]
|
|
||||||
|
|
||||||
if args.check:
|
|
||||||
format_args.append("--diff")
|
|
||||||
else:
|
|
||||||
check_args.append("--fix")
|
|
||||||
|
|
||||||
if not args.verbose:
|
|
||||||
check_args.append("--quiet")
|
|
||||||
format_args.append("--quiet")
|
|
||||||
|
|
||||||
format_args.extend(sources)
|
|
||||||
check_args.extend(sources)
|
|
||||||
|
|
||||||
out.check_call(" ".join(format_args), quiet=not args.verbose)
|
|
||||||
out.check_call(" ".join(check_args), quiet=not args.verbose)
|
|
||||||
|
|
||||||
|
|
||||||
def bench_cmd(args, out):
|
|
||||||
"""Run benchmarks against an online chatmail instance."""
|
|
||||||
args = ["pytest", "--pyargs", "cmdeploy.tests.online.benchmark", "-vrx"]
|
|
||||||
cmdstring = " ".join(args)
|
|
||||||
out.green(f"[$ {cmdstring}]")
|
|
||||||
subprocess.check_call(args)
|
|
||||||
|
|
||||||
|
|
||||||
def webdev_cmd(args, out):
|
|
||||||
"""Run local web development loop for static web pages."""
|
|
||||||
from .www import main
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Parsing command line options and starting commands
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
class Out:
|
|
||||||
"""Convenience output printer providing coloring."""
|
|
||||||
|
|
||||||
def red(self, msg, file=sys.stderr):
|
|
||||||
print(colored(msg, "red"), file=file)
|
|
||||||
|
|
||||||
def green(self, msg, file=sys.stderr):
|
|
||||||
print(colored(msg, "green"), file=file)
|
|
||||||
|
|
||||||
def __call__(self, msg, red=False, green=False, file=sys.stdout):
|
|
||||||
color = "red" if red else ("green" if green else None)
|
|
||||||
print(colored(msg, color), file=file)
|
|
||||||
|
|
||||||
def check_call(self, arg, env=None, quiet=False):
|
|
||||||
if not quiet:
|
|
||||||
self(f"[$ {arg}]", file=sys.stderr)
|
|
||||||
return subprocess.check_call(arg, shell=True, env=env)
|
|
||||||
|
|
||||||
def run_ret(self, args, env=None, quiet=False):
|
|
||||||
if not quiet:
|
|
||||||
cmdstring = " ".join(args)
|
|
||||||
self(f"[$ {cmdstring}]", file=sys.stderr)
|
|
||||||
proc = subprocess.run(args, env=env, check=False)
|
|
||||||
return proc.returncode
|
|
||||||
|
|
||||||
|
|
||||||
def add_config_option(parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--config",
|
|
||||||
dest="inipath",
|
|
||||||
action="store",
|
|
||||||
default=Path("chatmail.ini"),
|
|
||||||
type=Path,
|
|
||||||
help="path to the chatmail.ini file",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--verbose",
|
|
||||||
"-v",
|
|
||||||
dest="verbose",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="provide verbose logging",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def add_subcommand(subparsers, func):
|
|
||||||
name = func.__name__
|
|
||||||
assert name.endswith("_cmd")
|
|
||||||
name = name[:-4]
|
|
||||||
doc = func.__doc__.strip()
|
|
||||||
help = doc.split("\n")[0].strip(".")
|
|
||||||
p = subparsers.add_parser(name, description=doc, help=help)
|
|
||||||
p.set_defaults(func=func)
|
|
||||||
add_config_option(p)
|
|
||||||
return p
|
|
||||||
|
|
||||||
|
|
||||||
description = """
|
|
||||||
Setup your chatmail server configuration and
|
|
||||||
deploy it via SSH to your remote location.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def get_parser():
|
|
||||||
"""Return an ArgumentParser for the 'cmdeploy' CLI"""
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=description.strip())
|
|
||||||
subparsers = parser.add_subparsers(title="subcommands")
|
|
||||||
|
|
||||||
# find all subcommands in the module namespace
|
|
||||||
glob = globals()
|
|
||||||
for name, func in glob.items():
|
|
||||||
if name.endswith("_cmd"):
|
|
||||||
subparser = add_subcommand(subparsers, func)
|
|
||||||
addopts = glob.get(name + "_options")
|
|
||||||
if addopts is not None:
|
|
||||||
addopts(subparser)
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def get_sshexec(ssh_host: str, verbose=True):
|
|
||||||
if ssh_host in ["localhost", "@local"]:
|
|
||||||
return "localhost"
|
|
||||||
if verbose:
|
|
||||||
print(f"[ssh] login to {ssh_host}")
|
|
||||||
return SSHExec(ssh_host, verbose=verbose)
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
|
||||||
"""Provide main entry point for 'cmdeploy' CLI invocation."""
|
|
||||||
parser = get_parser()
|
|
||||||
args = parser.parse_args(args=args)
|
|
||||||
if not hasattr(args, "func"):
|
|
||||||
return parser.parse_args(["-h"])
|
|
||||||
|
|
||||||
out = Out()
|
|
||||||
kwargs = {}
|
|
||||||
if args.func.__name__ not in ("init_cmd", "fmt_cmd"):
|
|
||||||
if not args.inipath.exists():
|
|
||||||
out.red(f"expecting {args.inipath} to exist, run init first?")
|
|
||||||
raise SystemExit(1)
|
|
||||||
try:
|
|
||||||
args.config = read_config(args.inipath)
|
|
||||||
except Exception as ex:
|
|
||||||
out.red(ex)
|
|
||||||
raise SystemExit(1)
|
|
||||||
|
|
||||||
try:
|
|
||||||
res = args.func(args, out, **kwargs)
|
|
||||||
if res is None:
|
|
||||||
res = 0
|
|
||||||
return res
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
out.red("KeyboardInterrupt")
|
|
||||||
sys.exit(130)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 13 KiB |
Binary file not shown.
@@ -1,20 +0,0 @@
|
|||||||
import importlib.resources
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pyinfra
|
|
||||||
|
|
||||||
from cmdeploy import deploy_chatmail
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
config_path = os.getenv(
|
|
||||||
"CHATMAIL_INI",
|
|
||||||
importlib.resources.files("cmdeploy").joinpath("../../../chatmail.ini"),
|
|
||||||
)
|
|
||||||
disable_mail = bool(os.environ.get("CHATMAIL_DISABLE_MAIL"))
|
|
||||||
|
|
||||||
deploy_chatmail(config_path, disable_mail)
|
|
||||||
|
|
||||||
|
|
||||||
if pyinfra.is_cli:
|
|
||||||
main()
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import importlib
|
|
||||||
|
|
||||||
from jinja2 import Template
|
|
||||||
|
|
||||||
from . import remote
|
|
||||||
|
|
||||||
|
|
||||||
def get_initial_remote_data(sshexec, mail_domain):
|
|
||||||
if sshexec == "localhost":
|
|
||||||
result = remote.rdns.perform_initial_checks(mail_domain)
|
|
||||||
else:
|
|
||||||
result = sshexec.logged(
|
|
||||||
call=remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=mail_domain)
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def check_initial_remote_data(remote_data, *, print=print):
|
|
||||||
mail_domain = remote_data["mail_domain"]
|
|
||||||
if not remote_data["A"] and not remote_data["AAAA"]:
|
|
||||||
print(f"Missing A and/or AAAA DNS records for {mail_domain}!")
|
|
||||||
elif remote_data["MTA_STS"] != f"{mail_domain}.":
|
|
||||||
print("Missing MTA-STS CNAME record:")
|
|
||||||
print(f"mta-sts.{mail_domain}. CNAME {mail_domain}.")
|
|
||||||
elif remote_data["WWW"] != f"{mail_domain}.":
|
|
||||||
print("Missing www CNAME record:")
|
|
||||||
print(f"www.{mail_domain}. CNAME {mail_domain}.")
|
|
||||||
else:
|
|
||||||
return remote_data
|
|
||||||
|
|
||||||
|
|
||||||
def get_filled_zone_file(remote_data):
|
|
||||||
sts_id = remote_data.get("sts_id")
|
|
||||||
if not sts_id:
|
|
||||||
remote_data["sts_id"] = datetime.datetime.now().strftime("%Y%m%d%H%M")
|
|
||||||
|
|
||||||
template = importlib.resources.files(__package__).joinpath("chatmail.zone.j2")
|
|
||||||
content = template.read_text()
|
|
||||||
zonefile = Template(content).render(**remote_data)
|
|
||||||
lines = [x.strip() for x in zonefile.split("\n") if x.strip()]
|
|
||||||
lines.append("")
|
|
||||||
zonefile = "\n".join(lines)
|
|
||||||
return zonefile
|
|
||||||
|
|
||||||
|
|
||||||
def check_full_zone(sshexec, remote_data, out, zonefile) -> int:
|
|
||||||
"""Check existing DNS records, optionally write them to zone file
|
|
||||||
and return (exitcode, remote_data) tuple."""
|
|
||||||
|
|
||||||
if sshexec == "localhost":
|
|
||||||
required_diff, recommended_diff = remote.rdns.check_zonefile(
|
|
||||||
zonefile=zonefile, verbose=False
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
required_diff, recommended_diff = sshexec.logged(
|
|
||||||
remote.rdns.check_zonefile, kwargs=dict(zonefile=zonefile, verbose=False),
|
|
||||||
)
|
|
||||||
|
|
||||||
returncode = 0
|
|
||||||
if required_diff:
|
|
||||||
out.red("Please set required DNS entries at your DNS provider:\n")
|
|
||||||
for line in required_diff:
|
|
||||||
out(line)
|
|
||||||
out("")
|
|
||||||
returncode = 1
|
|
||||||
if remote_data.get("dkim_entry") in required_diff:
|
|
||||||
out(
|
|
||||||
"If the DKIM entry above does not work with your DNS provider, you can try this one:\n"
|
|
||||||
)
|
|
||||||
out(remote_data.get("web_dkim_entry") + "\n")
|
|
||||||
if recommended_diff:
|
|
||||||
out("WARNING: these recommended DNS entries are not set:\n")
|
|
||||||
for line in recommended_diff:
|
|
||||||
out(line)
|
|
||||||
|
|
||||||
if not (recommended_diff or required_diff):
|
|
||||||
out.green("Great! All your DNS entries are verified and correct.")
|
|
||||||
return returncode
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
uri = proxy:/run/doveauth/doveauth.socket:auth
|
|
||||||
iterate_disable = no
|
|
||||||
iterate_prefix = userdb/
|
|
||||||
|
|
||||||
default_pass_scheme = plain
|
|
||||||
# %E escapes characters " (double quote), ' (single quote) and \ (backslash) with \ (backslash).
|
|
||||||
# See <https://doc.dovecot.org/configuration_manual/config_file/config_variables/#modifiers>
|
|
||||||
# for documentation.
|
|
||||||
#
|
|
||||||
# We escape user-provided input and use double quote as a separator.
|
|
||||||
password_key = passdb/%Ew"%Eu
|
|
||||||
user_key = userdb/%Eu
|
|
||||||
@@ -1,248 +0,0 @@
|
|||||||
## Dovecot configuration file
|
|
||||||
|
|
||||||
{% if disable_ipv6 %}
|
|
||||||
listen = *
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
protocols = imap lmtp
|
|
||||||
|
|
||||||
auth_mechanisms = plain
|
|
||||||
|
|
||||||
{% if debug == true %}
|
|
||||||
auth_verbose = yes
|
|
||||||
auth_debug = yes
|
|
||||||
auth_debug_passwords = yes
|
|
||||||
auth_verbose_passwords = plain
|
|
||||||
auth_cache_size = 100M
|
|
||||||
mail_debug = yes
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
# Prevent warnings similar to:
|
|
||||||
# config: Warning: service auth { client_limit=1000 } is lower than required under max. load (10200). Counted for protocol services with service_count != 1: service lmtp { process_limit=100 } + service imap-urlauth-login { process_limit=100 } + service imap-login { process_limit=10000 }
|
|
||||||
# config: Warning: service anvil { client_limit=1000 } is lower than required under max. load (10103). Counted with: service imap-urlauth-login { process_limit=100 } + service imap-login { process_limit=10000 } + service auth { process_limit=1 }
|
|
||||||
# master: Warning: service(stats): client_limit (1000) reached, client connections are being dropped
|
|
||||||
default_client_limit = 20000
|
|
||||||
|
|
||||||
# Increase number of logged in IMAP connections.
|
|
||||||
# Each connection is handled by a separate `imap` process.
|
|
||||||
# `imap` process should have `client_limit=1` as described in
|
|
||||||
# <https://doc.dovecot.org/configuration_manual/service_configuration/#service-limits>
|
|
||||||
# so each logged in IMAP session will need its own `imap` process.
|
|
||||||
#
|
|
||||||
# If this limit is reached,
|
|
||||||
# users will fail to LOGIN as `imap-login` process
|
|
||||||
# will accept them logging in but fail to transfer logged in
|
|
||||||
# connection to `imap` process until someone logs out and
|
|
||||||
# the following warning will be logged:
|
|
||||||
# Warning: service(imap): process_limit (1024) reached, client connections are being dropped
|
|
||||||
service imap {
|
|
||||||
process_limit = 50000
|
|
||||||
}
|
|
||||||
|
|
||||||
mail_server_admin = mailto:root@{{ config.mail_domain }}
|
|
||||||
mail_server_comment = Chatmail server
|
|
||||||
|
|
||||||
# `zlib` enables compressing messages stored in the maildir.
|
|
||||||
# See
|
|
||||||
# <https://doc.dovecot.org/configuration_manual/zlib_plugin/>
|
|
||||||
# for documentation.
|
|
||||||
#
|
|
||||||
# quota plugin documentation:
|
|
||||||
# <https://doc.dovecot.org/configuration_manual/quota_plugin/>
|
|
||||||
mail_plugins = zlib quota
|
|
||||||
|
|
||||||
imap_capability = +XDELTAPUSH XCHATMAIL
|
|
||||||
|
|
||||||
|
|
||||||
# Authentication for system users.
|
|
||||||
passdb {
|
|
||||||
driver = dict
|
|
||||||
args = /etc/dovecot/auth.conf
|
|
||||||
}
|
|
||||||
userdb {
|
|
||||||
driver = dict
|
|
||||||
args = /etc/dovecot/auth.conf
|
|
||||||
}
|
|
||||||
##
|
|
||||||
## Mailbox locations and namespaces
|
|
||||||
##
|
|
||||||
|
|
||||||
# Mailboxes are stored in the "mail" directory of the vmail user home.
|
|
||||||
mail_location = maildir:{{ config.mailboxes_dir }}/%u
|
|
||||||
|
|
||||||
namespace inbox {
|
|
||||||
inbox = yes
|
|
||||||
|
|
||||||
mailbox Drafts {
|
|
||||||
special_use = \Drafts
|
|
||||||
}
|
|
||||||
mailbox Junk {
|
|
||||||
special_use = \Junk
|
|
||||||
}
|
|
||||||
mailbox Trash {
|
|
||||||
special_use = \Trash
|
|
||||||
}
|
|
||||||
|
|
||||||
# For \Sent mailboxes there are two widely used names. We'll mark both of
|
|
||||||
# them as \Sent. User typically deletes one of them if duplicates are created.
|
|
||||||
mailbox Sent {
|
|
||||||
special_use = \Sent
|
|
||||||
}
|
|
||||||
mailbox "Sent Messages" {
|
|
||||||
special_use = \Sent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mail_uid = vmail
|
|
||||||
mail_gid = vmail
|
|
||||||
mail_privileged_group = vmail
|
|
||||||
|
|
||||||
##
|
|
||||||
## Mail processes
|
|
||||||
##
|
|
||||||
|
|
||||||
# Pass all IMAP METADATA requests to the server implementing Dovecot's dict protocol.
|
|
||||||
mail_attribute_dict = proxy:/run/chatmail-metadata/metadata.socket:metadata
|
|
||||||
|
|
||||||
# `imap_zlib` enables IMAP COMPRESS (RFC 4978).
|
|
||||||
# <https://datatracker.ietf.org/doc/html/rfc4978.html>
|
|
||||||
protocol imap {
|
|
||||||
mail_plugins = $mail_plugins imap_zlib imap_quota last_login
|
|
||||||
imap_metadata = yes
|
|
||||||
}
|
|
||||||
|
|
||||||
plugin {
|
|
||||||
last_login_dict = proxy:/run/chatmail-lastlogin/lastlogin.socket:lastlogin
|
|
||||||
#last_login_key = last-login/%u # default
|
|
||||||
last_login_precision = s
|
|
||||||
}
|
|
||||||
|
|
||||||
protocol lmtp {
|
|
||||||
# notify plugin is a dependency of push_notification plugin:
|
|
||||||
# <https://doc.dovecot.org/settings/plugin/notify-plugin/>
|
|
||||||
#
|
|
||||||
# push_notification plugin documentation:
|
|
||||||
# <https://doc.dovecot.org/configuration_manual/push_notification/>
|
|
||||||
#
|
|
||||||
# mail_lua and push_notification_lua are needed for Lua push notification handler.
|
|
||||||
# <https://doc.dovecot.org/configuration_manual/push_notification/#configuration>
|
|
||||||
mail_plugins = $mail_plugins mail_lua notify push_notification push_notification_lua
|
|
||||||
}
|
|
||||||
|
|
||||||
plugin {
|
|
||||||
zlib_save = gz
|
|
||||||
}
|
|
||||||
|
|
||||||
plugin {
|
|
||||||
imap_compress_deflate_level = 6
|
|
||||||
}
|
|
||||||
|
|
||||||
plugin {
|
|
||||||
# for now we define static quota-rules for all users
|
|
||||||
quota = maildir:User quota
|
|
||||||
quota_rule = *:storage={{ config.max_mailbox_size }}
|
|
||||||
quota_max_mail_size={{ config.max_message_size }}
|
|
||||||
quota_grace = 0
|
|
||||||
# quota_over_flag_value = TRUE
|
|
||||||
}
|
|
||||||
|
|
||||||
# push_notification configuration
|
|
||||||
plugin {
|
|
||||||
# <https://doc.dovecot.org/configuration_manual/push_notification/#lua-lua>
|
|
||||||
push_notification_driver = lua:file=/etc/dovecot/push_notification.lua
|
|
||||||
}
|
|
||||||
|
|
||||||
service lmtp {
|
|
||||||
user=vmail
|
|
||||||
|
|
||||||
unix_listener /var/spool/postfix/private/dovecot-lmtp {
|
|
||||||
group = postfix
|
|
||||||
mode = 0600
|
|
||||||
user = postfix
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
service auth {
|
|
||||||
unix_listener /var/spool/postfix/private/auth {
|
|
||||||
mode = 0660
|
|
||||||
user = postfix
|
|
||||||
group = postfix
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
service auth-worker {
|
|
||||||
# Default is root.
|
|
||||||
# Drop privileges we don't need.
|
|
||||||
user = vmail
|
|
||||||
}
|
|
||||||
|
|
||||||
service imap-login {
|
|
||||||
# High-performance mode as described in
|
|
||||||
# <https://doc.dovecot.org/2.3/admin_manual/login_processes/#high-performance-mode>
|
|
||||||
#
|
|
||||||
# So-called high-security mode described in
|
|
||||||
# <https://doc.dovecot.org/2.3/admin_manual/login_processes/#high-security-mode>
|
|
||||||
# and enabled by default with `service_count = 1` starts one process per connection
|
|
||||||
# and has problems logging in thousands of users after Dovecot restart.
|
|
||||||
service_count = 0
|
|
||||||
|
|
||||||
# Increase virtual memory size limit.
|
|
||||||
# Since imap-login processes handle TLS connections
|
|
||||||
# even after logging users in
|
|
||||||
# and many connections are handled by each process,
|
|
||||||
# memory size limit should be increased.
|
|
||||||
#
|
|
||||||
# Otherwise the whole process eventually dies
|
|
||||||
# with an error similar to
|
|
||||||
# imap-login: Fatal: master: service(imap-login):
|
|
||||||
# child 1422951 returned error 83
|
|
||||||
# (Out of memory (service imap-login { vsz_limit=256 MB },
|
|
||||||
# you may need to increase it)
|
|
||||||
# and takes down all its TLS connections at once.
|
|
||||||
vsz_limit = 1G
|
|
||||||
|
|
||||||
# Avoid startup latency for new connections.
|
|
||||||
#
|
|
||||||
# Should be set to at least the number of CPU cores
|
|
||||||
# according to the documentation.
|
|
||||||
process_min_avail = 10
|
|
||||||
}
|
|
||||||
|
|
||||||
service anvil {
|
|
||||||
# We are disabling anvil penalty on failed login attempts
|
|
||||||
# because it can only detect brute forcing by IP address
|
|
||||||
# not by username. As the correct IP address is not handed
|
|
||||||
# to dovecot anyway, it is more of hindrance than of use.
|
|
||||||
# See <https://www.dovecot.org/list/dovecot/2012-May/135485.html> for details.
|
|
||||||
unix_listener anvil-auth-penalty {
|
|
||||||
mode = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ssl = required
|
|
||||||
ssl_cert = </var/lib/acme/live/{{ config.mail_domain }}/fullchain
|
|
||||||
ssl_key = </var/lib/acme/live/{{ config.mail_domain }}/privkey
|
|
||||||
ssl_dh = </usr/share/dovecot/dh.pem
|
|
||||||
ssl_min_protocol = TLSv1.3
|
|
||||||
ssl_prefer_server_ciphers = yes
|
|
||||||
|
|
||||||
|
|
||||||
{% if config.imap_rawlog %}
|
|
||||||
service postlogin {
|
|
||||||
executable = script-login -d rawlog
|
|
||||||
unix_listener postlogin {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
service imap {
|
|
||||||
executable = imap postlogin
|
|
||||||
}
|
|
||||||
|
|
||||||
protocol imap {
|
|
||||||
#rawlog_dir = /tmp/rawlog/%u
|
|
||||||
# Put .in and .out imap protocol logging files into per-user homedir
|
|
||||||
# You can use a command like this to combine into one protocol stream:
|
|
||||||
# sort -sn <(sed 's/ / C: /' *.in) <(sed 's/ / S: /' cat *.out)
|
|
||||||
|
|
||||||
rawlog_dir = %h
|
|
||||||
}
|
|
||||||
{% endif %}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# delete already seen big mails after 7 days, in the INBOX
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_large_after }} -size +200k -type f -delete
|
|
||||||
# delete all mails after {{ config.delete_mails_after }} days, in the Inbox
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
# or in any IMAP subfolder
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
# even if they are unseen
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
# or only temporary (but then they shouldn't be around after {{ config.delete_mails_after }} days anyway).
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
|
||||||
3 0 * * * vmail find {{ config.mailboxes_dir }} -name 'maildirsize' -type f -delete
|
|
||||||
4 0 * * * vmail /usr/local/lib/chatmaild/venv/bin/delete_inactive_users /usr/local/lib/chatmaild/chatmail.ini
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
function dovecot_lua_notify_begin_txn(user)
|
|
||||||
return user
|
|
||||||
end
|
|
||||||
|
|
||||||
function dovecot_lua_notify_event_message_new(user, event)
|
|
||||||
local mbox = user:mailbox(event.mailbox)
|
|
||||||
mbox:sync()
|
|
||||||
|
|
||||||
if user.username ~= event.from_address then
|
|
||||||
-- Incoming message
|
|
||||||
-- Notify METADATA server about new message.
|
|
||||||
mbox:metadata_set("/private/messagenew", "")
|
|
||||||
end
|
|
||||||
|
|
||||||
mbox:free()
|
|
||||||
end
|
|
||||||
|
|
||||||
function dovecot_lua_notify_end_txn(ctx, success)
|
|
||||||
end
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
import importlib
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
|
|
||||||
import qrcode
|
|
||||||
from PIL import Image, ImageDraw, ImageFont
|
|
||||||
|
|
||||||
|
|
||||||
def gen_qr_png_data(maildomain):
|
|
||||||
url = f"DCACCOUNT:https://{maildomain}/new"
|
|
||||||
image = gen_qr(maildomain, url)
|
|
||||||
temp = io.BytesIO()
|
|
||||||
image.save(temp, format="png")
|
|
||||||
temp.seek(0)
|
|
||||||
return temp
|
|
||||||
|
|
||||||
|
|
||||||
def gen_qr(maildomain, url):
|
|
||||||
# taken and modified from
|
|
||||||
# https://github.com/deltachat/mailadm/blob/master/src/mailadm/gen_qr.py
|
|
||||||
|
|
||||||
# info = f"{maildomain} invite code"
|
|
||||||
info = ""
|
|
||||||
|
|
||||||
# load QR code
|
|
||||||
qr = qrcode.QRCode(
|
|
||||||
version=1,
|
|
||||||
error_correction=qrcode.constants.ERROR_CORRECT_H,
|
|
||||||
box_size=1,
|
|
||||||
border=1,
|
|
||||||
)
|
|
||||||
qr.add_data(url)
|
|
||||||
qr.make(fit=True)
|
|
||||||
qr_img = qr.make_image(fill_color="black", back_color="white")
|
|
||||||
|
|
||||||
# paint all elements
|
|
||||||
ttf_path = str(
|
|
||||||
importlib.resources.files(__package__).joinpath("data/opensans-regular.ttf")
|
|
||||||
)
|
|
||||||
logo_red_path = str(
|
|
||||||
importlib.resources.files(__package__).joinpath("data/delta-chat-bw.png")
|
|
||||||
)
|
|
||||||
|
|
||||||
assert os.path.exists(ttf_path), ttf_path
|
|
||||||
font_size = 16
|
|
||||||
font = ImageFont.truetype(font=ttf_path, size=font_size)
|
|
||||||
|
|
||||||
num_lines = ((info).count("\n") + 1) if info else 0
|
|
||||||
|
|
||||||
size = width = 384
|
|
||||||
qr_padding = 6
|
|
||||||
text_height = font_size * num_lines
|
|
||||||
height = size + text_height
|
|
||||||
|
|
||||||
image = Image.new("RGBA", (width, height), "white")
|
|
||||||
qr_final_size = width - (qr_padding * 2)
|
|
||||||
|
|
||||||
if num_lines:
|
|
||||||
draw = ImageDraw.Draw(image)
|
|
||||||
|
|
||||||
# draw text
|
|
||||||
if hasattr(font, "getsize"):
|
|
||||||
info_pos = (width - font.getsize(info.strip())[0]) // 2
|
|
||||||
else:
|
|
||||||
info_pos = (width - font.getbbox(info.strip())[3]) // 2
|
|
||||||
|
|
||||||
draw.multiline_text(
|
|
||||||
(info_pos, size - qr_padding // 2),
|
|
||||||
info,
|
|
||||||
font=font,
|
|
||||||
fill="black",
|
|
||||||
align="right",
|
|
||||||
)
|
|
||||||
|
|
||||||
# paste QR code
|
|
||||||
image.paste(
|
|
||||||
qr_img.resize((qr_final_size, qr_final_size), resample=Image.NEAREST),
|
|
||||||
(qr_padding, qr_padding),
|
|
||||||
)
|
|
||||||
|
|
||||||
# background delta logo
|
|
||||||
logo2_img = Image.open(logo_red_path)
|
|
||||||
logo2_width = int(size / 6)
|
|
||||||
logo2 = logo2_img.resize((logo2_width, logo2_width), resample=Image.NEAREST)
|
|
||||||
pos = int((size / 2) - (logo2_width / 2))
|
|
||||||
image.paste(logo2, (pos, pos), mask=logo2)
|
|
||||||
|
|
||||||
return image
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Iroh relay
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/local/bin/iroh-relay --config-path /etc/iroh-relay.toml
|
|
||||||
Restart=on-failure
|
|
||||||
RestartSec=5s
|
|
||||||
User=iroh
|
|
||||||
Group=iroh
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
enable_relay = true
|
|
||||||
http_bind_addr = "[::]:3340"
|
|
||||||
enable_stun = true
|
|
||||||
enable_metrics = false
|
|
||||||
metrics_bind_addr = "127.0.0.1:9092"
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
*/5 * * * * root {{ config.execpath }} {{ config.mailboxes_dir }} >/var/www/html/metrics
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
counter delivered_mail
|
|
||||||
/saved mail to INBOX$/ {
|
|
||||||
delivered_mail++
|
|
||||||
}
|
|
||||||
|
|
||||||
counter quota_exceeded
|
|
||||||
/Quota exceeded \(mailbox for user is full\)$/ {
|
|
||||||
quota_exceeded++
|
|
||||||
}
|
|
||||||
|
|
||||||
# Essentially the number of outgoing messages.
|
|
||||||
counter dkim_signed
|
|
||||||
/DKIM-Signature field added/ {
|
|
||||||
dkim_signed++
|
|
||||||
}
|
|
||||||
|
|
||||||
counter created_accounts
|
|
||||||
counter created_ci_accounts
|
|
||||||
counter created_nonci_accounts
|
|
||||||
|
|
||||||
/: Created address: (?P<addr>.*)$/ {
|
|
||||||
created_accounts++
|
|
||||||
|
|
||||||
$addr =~ /ci-/ {
|
|
||||||
created_ci_accounts++
|
|
||||||
} else {
|
|
||||||
created_nonci_accounts++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
counter postfix_timeouts
|
|
||||||
/timeout after DATA/ {
|
|
||||||
postfix_timeouts++
|
|
||||||
}
|
|
||||||
|
|
||||||
counter postfix_noqueue
|
|
||||||
/postfix\/.*NOQUEUE/ {
|
|
||||||
postfix_noqueue++
|
|
||||||
}
|
|
||||||
|
|
||||||
counter warning_count
|
|
||||||
/warning/ {
|
|
||||||
warning_count++
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
counter filtered_mail_count
|
|
||||||
|
|
||||||
counter encrypted_mail_count
|
|
||||||
/Filtering encrypted mail\./ {
|
|
||||||
encrypted_mail_count++
|
|
||||||
filtered_mail_count++
|
|
||||||
}
|
|
||||||
|
|
||||||
counter unencrypted_mail_count
|
|
||||||
/Filtering unencrypted mail\./ {
|
|
||||||
unencrypted_mail_count++
|
|
||||||
filtered_mail_count++
|
|
||||||
}
|
|
||||||
|
|
||||||
counter rejected_unencrypted_mail_count
|
|
||||||
/Rejected unencrypted mail\./ {
|
|
||||||
rejected_unencrypted_mail_count++
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=mtail
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
ExecStart=/bin/sh -c "journalctl -f -o short-iso -n 0 | /usr/local/bin/mtail --address={{ address }} --port={{ port }} --progs /etc/mtail --logtostderr --logs -"
|
|
||||||
Restart=on-failure
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
version: STSv1
|
|
||||||
mode: enforce
|
|
||||||
mx: {{ config.domain_name }}
|
|
||||||
max_age: 2419200
|
|
||||||
@@ -1,139 +0,0 @@
|
|||||||
load_module modules/ngx_stream_module.so;
|
|
||||||
|
|
||||||
user www-data;
|
|
||||||
worker_processes auto;
|
|
||||||
|
|
||||||
# Increase the number of connections
|
|
||||||
# that a worker process can open
|
|
||||||
# to avoid errors such as
|
|
||||||
# accept4() failed (24: Too many open files)
|
|
||||||
# and
|
|
||||||
# socket() failed (24: Too many open files) while connecting to upstream
|
|
||||||
# in the logs.
|
|
||||||
# <https://nginx.org/en/docs/ngx_core_module.html#worker_rlimit_nofile>
|
|
||||||
worker_rlimit_nofile 2048;
|
|
||||||
pid /run/nginx.pid;
|
|
||||||
error_log syslog:server=unix:/dev/log,facility=local3;
|
|
||||||
|
|
||||||
events {
|
|
||||||
# Increase to avoid errors such as
|
|
||||||
# 768 worker_connections are not enough while connecting to upstream
|
|
||||||
# in the logs.
|
|
||||||
# <https://nginx.org/en/docs/ngx_core_module.html#worker_connections>
|
|
||||||
worker_connections 2048;
|
|
||||||
# multi_accept on;
|
|
||||||
}
|
|
||||||
|
|
||||||
stream {
|
|
||||||
map $ssl_preread_alpn_protocols $proxy {
|
|
||||||
default 127.0.0.1:8443;
|
|
||||||
~\bsmtp\b 127.0.0.1:465;
|
|
||||||
~\bimap\b 127.0.0.1:993;
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 443;
|
|
||||||
{% if not disable_ipv6 %}
|
|
||||||
listen [::]:443;
|
|
||||||
{% endif %}
|
|
||||||
proxy_pass $proxy;
|
|
||||||
ssl_preread on;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
sendfile on;
|
|
||||||
tcp_nopush on;
|
|
||||||
|
|
||||||
# Do not emit nginx version on error pages.
|
|
||||||
server_tokens off;
|
|
||||||
|
|
||||||
include /etc/nginx/mime.types;
|
|
||||||
default_type application/octet-stream;
|
|
||||||
|
|
||||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
ssl_certificate /var/lib/acme/live/{{ config.domain_name }}/fullchain;
|
|
||||||
ssl_certificate_key /var/lib/acme/live/{{ config.domain_name }}/privkey;
|
|
||||||
|
|
||||||
gzip on;
|
|
||||||
|
|
||||||
server {
|
|
||||||
|
|
||||||
listen 127.0.0.1:8443 ssl default_server;
|
|
||||||
|
|
||||||
root /var/www/html;
|
|
||||||
|
|
||||||
index index.html index.htm;
|
|
||||||
|
|
||||||
server_name _;
|
|
||||||
|
|
||||||
access_log syslog:server=unix:/dev/log,facility=local7;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
# First attempt to serve request as file, then
|
|
||||||
# as directory, then fall back to displaying a 404.
|
|
||||||
try_files $uri $uri/ =404;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /metrics {
|
|
||||||
default_type text/plain;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /new {
|
|
||||||
if ($request_method = GET) {
|
|
||||||
# Redirect to Delta Chat,
|
|
||||||
# which will in turn do a POST request.
|
|
||||||
return 301 dcaccount:https://{{ config.domain_name }}/new;
|
|
||||||
}
|
|
||||||
|
|
||||||
fastcgi_pass unix:/run/fcgiwrap.socket;
|
|
||||||
include /etc/nginx/fastcgi_params;
|
|
||||||
fastcgi_param SCRIPT_FILENAME /usr/lib/cgi-bin/newemail.py;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Old URL for compatibility with e.g. printed QR codes.
|
|
||||||
#
|
|
||||||
# Copy-paste instead of redirect to /new
|
|
||||||
# because Delta Chat core does not follow redirects.
|
|
||||||
#
|
|
||||||
# Redirects are only for browsers.
|
|
||||||
location /cgi-bin/newemail.py {
|
|
||||||
if ($request_method = GET) {
|
|
||||||
return 301 dcaccount:https://{{ config.domain_name }}/new;
|
|
||||||
}
|
|
||||||
|
|
||||||
fastcgi_pass unix:/run/fcgiwrap.socket;
|
|
||||||
include /etc/nginx/fastcgi_params;
|
|
||||||
fastcgi_param SCRIPT_FILENAME /usr/lib/cgi-bin/newemail.py;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Proxy to iroh-relay service.
|
|
||||||
location /relay {
|
|
||||||
proxy_pass http://127.0.0.1:3340;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
|
|
||||||
# Upgrade header is normally set to "iroh derp http" or "websocket".
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "upgrade";
|
|
||||||
}
|
|
||||||
|
|
||||||
location /relay/probe {
|
|
||||||
proxy_pass http://127.0.0.1:3340;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /generate_204 {
|
|
||||||
proxy_pass http://127.0.0.1:3340;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Redirect www. to non-www
|
|
||||||
server {
|
|
||||||
listen 127.0.0.1:8443 ssl;
|
|
||||||
server_name www.{{ config.domain_name }};
|
|
||||||
return 301 $scheme://{{ config.domain_name }}$request_uri;
|
|
||||||
access_log syslog:server=unix:/dev/log,facility=local7;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{{ config.opendkim_selector }}._domainkey.{{ config.domain_name }} {{ config.domain_name }}:{{ config.opendkim_selector }}:/etc/dkimkeys/{{ config.opendkim_selector }}.private
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
*@{{ config.domain_name }} {{ config.opendkim_selector }}._domainkey.{{ config.domain_name }}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
if odkim.internal_ip(ctx) == 1 then
|
|
||||||
-- Outgoing message will be signed,
|
|
||||||
-- no need to look for signatures.
|
|
||||||
return nil
|
|
||||||
end
|
|
||||||
|
|
||||||
nsigs = odkim.get_sigcount(ctx)
|
|
||||||
if nsigs == nil then
|
|
||||||
return nil
|
|
||||||
end
|
|
||||||
|
|
||||||
for i = 1, nsigs do
|
|
||||||
sig = odkim.get_sighandle(ctx, i - 1)
|
|
||||||
sigres = odkim.sig_result(sig)
|
|
||||||
|
|
||||||
-- All signatures that do not correspond to From:
|
|
||||||
-- were ignored in screen.lua and return sigres -1.
|
|
||||||
--
|
|
||||||
-- Any valid signature that was not ignored like this
|
|
||||||
-- means the message is acceptable.
|
|
||||||
if sigres == 0 then
|
|
||||||
return nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
odkim.set_reply(ctx, "554", "5.7.1", "No valid DKIM signature found")
|
|
||||||
odkim.set_result(ctx, SMFIS_REJECT)
|
|
||||||
return nil
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
# OpenDKIM configuration.
|
|
||||||
|
|
||||||
Syslog yes
|
|
||||||
SyslogSuccess yes
|
|
||||||
#LogWhy no
|
|
||||||
|
|
||||||
# Common signing and verification parameters. In Debian, the "From" header is
|
|
||||||
# oversigned, because it is often the identity key used by reputation systems
|
|
||||||
# and thus somewhat security sensitive.
|
|
||||||
Canonicalization relaxed/simple
|
|
||||||
OversignHeaders From
|
|
||||||
|
|
||||||
On-BadSignature reject
|
|
||||||
On-KeyNotFound reject
|
|
||||||
On-NoSignature reject
|
|
||||||
|
|
||||||
# Signing domain, selector, and key (required). For example, perform signing
|
|
||||||
# for domain "example.com" with selector "2020" (2020._domainkey.example.com),
|
|
||||||
# using the private key stored in /etc/dkimkeys/example.private. More granular
|
|
||||||
# setup options can be found in /usr/share/doc/opendkim/README.opendkim.
|
|
||||||
Domain {{ config.domain_name }}
|
|
||||||
Selector {{ config.opendkim_selector }}
|
|
||||||
KeyFile /etc/dkimkeys/{{ config.opendkim_selector }}.private
|
|
||||||
KeyTable /etc/dkimkeys/KeyTable
|
|
||||||
SigningTable refile:/etc/dkimkeys/SigningTable
|
|
||||||
|
|
||||||
# Sign Autocrypt header in addition to the default specified in RFC 6376.
|
|
||||||
#
|
|
||||||
# Default list is here:
|
|
||||||
# <https://github.com/trusteddomainproject/OpenDKIM/blob/5c539587561785a66c1f67f720f2fb741f320785/libopendkim/dkim.c#L221-L245>
|
|
||||||
SignHeaders *,+autocrypt,+content-type
|
|
||||||
|
|
||||||
# Prevent addition of second Content-Type header
|
|
||||||
# and other important headers that should not be added
|
|
||||||
# after signing the message.
|
|
||||||
# See
|
|
||||||
# <https://www.zone.eu/blog/2024/05/17/bimi-and-dmarc-cant-save-you/>
|
|
||||||
# and RFC 6376 (page 41) for reference.
|
|
||||||
#
|
|
||||||
# We don't use "l=" body length so the problem described in RFC 6376
|
|
||||||
# is not applicable, but adding e.g. a second "From" header
|
|
||||||
# or second "Autocrypt" header is better prevented in any case.
|
|
||||||
#
|
|
||||||
# Default is empty.
|
|
||||||
OversignHeaders from,reply-to,subject,date,to,cc,resent-date,resent-from,resent-sender,resent-to,resent-cc,in-reply-to,references,list-id,list-help,list-unsubscribe,list-subscribe,list-post,list-owner,list-archive,autocrypt
|
|
||||||
|
|
||||||
# Script to ignore signatures that do not correspond to the From: domain.
|
|
||||||
ScreenPolicyScript /etc/opendkim/screen.lua
|
|
||||||
|
|
||||||
# Script to reject mails without a valid DKIM signature.
|
|
||||||
FinalPolicyScript /etc/opendkim/final.lua
|
|
||||||
|
|
||||||
# In Debian, opendkim runs as user "opendkim". A umask of 007 is required when
|
|
||||||
# using a local socket with MTAs that access the socket as a non-privileged
|
|
||||||
# user (for example, Postfix). You may need to add user "postfix" to group
|
|
||||||
# "opendkim" in that case.
|
|
||||||
UserID opendkim
|
|
||||||
UMask 007
|
|
||||||
|
|
||||||
Socket local:/var/spool/postfix/opendkim/opendkim.sock
|
|
||||||
|
|
||||||
PidFile /run/opendkim/opendkim.pid
|
|
||||||
|
|
||||||
# The trust anchor enables DNSSEC. In Debian, the trust anchor file is provided
|
|
||||||
# by the package dns-root-data.
|
|
||||||
TrustAnchorFile /usr/share/dns/root.key
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
-- Ignore signatures that do not correspond to the From: domain.
|
|
||||||
|
|
||||||
from_domain = odkim.get_fromdomain(ctx)
|
|
||||||
if from_domain == nil then
|
|
||||||
return nil
|
|
||||||
end
|
|
||||||
|
|
||||||
n = odkim.get_sigcount(ctx)
|
|
||||||
if n == nil then
|
|
||||||
return nil
|
|
||||||
end
|
|
||||||
|
|
||||||
for i = 1, n do
|
|
||||||
sig = odkim.get_sighandle(ctx, i - 1)
|
|
||||||
sig_domain = odkim.sig_getdomain(sig)
|
|
||||||
if from_domain ~= sig_domain then
|
|
||||||
odkim.sig_ignore(sig)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
return nil
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
[Service]
|
|
||||||
Restart=always
|
|
||||||
RuntimeMaxSec=1d
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
/^(.*)$/ ${1}
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
myorigin = {{ config.mail_domain }}
|
|
||||||
|
|
||||||
smtpd_banner = $myhostname ESMTP $mail_name (Debian/GNU)
|
|
||||||
biff = no
|
|
||||||
|
|
||||||
# appending .domain is the MUA's job.
|
|
||||||
append_dot_mydomain = no
|
|
||||||
|
|
||||||
# Uncomment the next line to generate "delayed mail" warnings
|
|
||||||
#delay_warning_time = 4h
|
|
||||||
|
|
||||||
readme_directory = no
|
|
||||||
|
|
||||||
# See http://www.postfix.org/COMPATIBILITY_README.html
|
|
||||||
compatibility_level = 3.6
|
|
||||||
|
|
||||||
# TLS parameters
|
|
||||||
smtpd_tls_cert_file=/var/lib/acme/live/{{ config.mail_domain }}/fullchain
|
|
||||||
smtpd_tls_key_file=/var/lib/acme/live/{{ config.mail_domain }}/privkey
|
|
||||||
smtpd_tls_security_level=may
|
|
||||||
|
|
||||||
smtp_tls_CApath=/etc/ssl/certs
|
|
||||||
smtp_tls_security_level=verify
|
|
||||||
# Send SNI extension when connecting to other servers.
|
|
||||||
# <https://www.postfix.org/postconf.5.html#smtp_tls_servername>
|
|
||||||
smtp_tls_servername = hostname
|
|
||||||
smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
|
|
||||||
smtp_tls_policy_maps = inline:{nauta.cu=may}
|
|
||||||
smtpd_tls_protocols = >=TLSv1.2
|
|
||||||
|
|
||||||
# Disable anonymous cipher suites
|
|
||||||
# and known insecure algorithms.
|
|
||||||
#
|
|
||||||
# Disabling anonymous ciphers
|
|
||||||
# does not generally improve security
|
|
||||||
# because clients that want to verify certificate
|
|
||||||
# will not select them anyway,
|
|
||||||
# but makes cipher suite list shorter and security scanners happy.
|
|
||||||
# See <https://www.postfix.org/TLS_README.html> for discussion.
|
|
||||||
#
|
|
||||||
# Only ancient insecure ciphers should be disabled here
|
|
||||||
# as MTA clients that do not support more secure cipher
|
|
||||||
# likely do not support MTA-STS either and will
|
|
||||||
# otherwise fall back to using plaintext connection.
|
|
||||||
smtpd_tls_exclude_ciphers = aNULL, RC4, MD5, DES
|
|
||||||
|
|
||||||
# Override client's preference order.
|
|
||||||
# <https://www.postfix.org/postconf.5.html#tls_preempt_cipherlist>
|
|
||||||
#
|
|
||||||
# This is mostly to ensure cipher suites with forward secrecy
|
|
||||||
# are preferred over non cipher suites without forward secrecy.
|
|
||||||
# See <https://www.postfix.org/FORWARD_SECRECY_README.html#server_fs>.
|
|
||||||
tls_preempt_cipherlist = yes
|
|
||||||
|
|
||||||
smtpd_relay_restrictions = permit_mynetworks permit_sasl_authenticated defer_unauth_destination
|
|
||||||
myhostname = {{ config.mail_domain }}
|
|
||||||
alias_maps = hash:/etc/aliases
|
|
||||||
alias_database = hash:/etc/aliases
|
|
||||||
|
|
||||||
# Postfix does not deliver mail for any domain by itself.
|
|
||||||
# Primary domain is listed in `virtual_mailbox_domains` instead
|
|
||||||
# and handed over to Dovecot.
|
|
||||||
mydestination =
|
|
||||||
|
|
||||||
relayhost =
|
|
||||||
mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128
|
|
||||||
mailbox_size_limit = 0
|
|
||||||
message_size_limit = {{config.max_message_size}}
|
|
||||||
recipient_delimiter = +
|
|
||||||
inet_interfaces = all
|
|
||||||
{% if disable_ipv6 %}
|
|
||||||
inet_protocols = ipv4
|
|
||||||
{% else %}
|
|
||||||
inet_protocols = all
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
virtual_transport = lmtp:unix:private/dovecot-lmtp
|
|
||||||
virtual_mailbox_domains = {{ config.mail_domain }}
|
|
||||||
|
|
||||||
mua_client_restrictions = permit_sasl_authenticated, reject
|
|
||||||
mua_sender_restrictions = reject_sender_login_mismatch, permit_sasl_authenticated, reject
|
|
||||||
mua_helo_restrictions = permit_mynetworks, reject_invalid_helo_hostname, reject_non_fqdn_helo_hostname, permit
|
|
||||||
|
|
||||||
# 1:1 map MAIL FROM to SASL login name.
|
|
||||||
smtpd_sender_login_maps = regexp:/etc/postfix/login_map
|
|
||||||
|
|
||||||
# Do not lookup SMTP client hostnames to reduce delays
|
|
||||||
# and avoid unnecessary DNS requests.
|
|
||||||
smtpd_peername_lookup = no
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Postfix MTA-STS resolver daemon
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/local/lib/postfix-mta-sts-resolver/bin/mta-sts-daemon
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
host: 127.0.0.1
|
|
||||||
port: 8461
|
|
||||||
reuse_port: true
|
|
||||||
shutdown_timeout: 20
|
|
||||||
cache:
|
|
||||||
type: internal
|
|
||||||
options:
|
|
||||||
cache_size: 10000
|
|
||||||
proactive_policy_fetching:
|
|
||||||
enabled: true
|
|
||||||
default_zone:
|
|
||||||
strict_testing: false
|
|
||||||
timeout: 4
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
/^Received:/ IGNORE
|
|
||||||
/^X-Originating-IP:/ IGNORE
|
|
||||||
/^X-Mailer:/ IGNORE
|
|
||||||
/^User-Agent:/ IGNORE
|
|
||||||
/^Subject:/ REPLACE Subject: [...]
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
import pyinfra
|
|
||||||
from pyinfra import host
|
|
||||||
|
|
||||||
from proxy import configure_ssh, configure_proxy
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
ipv4_relay = os.getenv("IPV4_RELAY")
|
|
||||||
ipv6_relay = os.getenv("IPV6_RELAY")
|
|
||||||
|
|
||||||
configure_ssh()
|
|
||||||
if host.data.get("ssh_port") not in (None, 22):
|
|
||||||
configure_proxy(ipv4_relay, ipv6_relay)
|
|
||||||
|
|
||||||
|
|
||||||
if pyinfra.is_cli:
|
|
||||||
main()
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import importlib
|
|
||||||
|
|
||||||
from pyinfra import host
|
|
||||||
from pyinfra.operations import files, server, apt, systemd
|
|
||||||
|
|
||||||
def configure_ssh():
|
|
||||||
files.replace(
|
|
||||||
name="Configure sshd to use port 2222",
|
|
||||||
path="/etc/ssh/sshd_config",
|
|
||||||
text="Port 22\n",
|
|
||||||
replace="Port 2222\n",
|
|
||||||
)
|
|
||||||
systemd.service(
|
|
||||||
name="apply SSH config",
|
|
||||||
service="ssh",
|
|
||||||
reloaded=True,
|
|
||||||
)
|
|
||||||
apt.update()
|
|
||||||
|
|
||||||
|
|
||||||
def configure_proxy(ipv4_relay, ipv6_relay):
|
|
||||||
files.put(
|
|
||||||
name="Configure nftables",
|
|
||||||
src=importlib.resources.files(__package__).joinpath("proxy_files/nftables.conf.j2"),
|
|
||||||
dest="/etc/nftables.conf",
|
|
||||||
ipv4_address=ipv4_relay, # :todo what if only one of them is specified?
|
|
||||||
ipv6_address=ipv6_relay,
|
|
||||||
)
|
|
||||||
|
|
||||||
server.sysctl(name="enable IPv4 forwarding", key="net.ipv4.ip_forward", value=1, persist=True)
|
|
||||||
|
|
||||||
server.sysctl(
|
|
||||||
name="enable IPv6 forwarding",
|
|
||||||
key="net.ipv6.conf.all.forwarding",
|
|
||||||
value=1,
|
|
||||||
persist=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
server.shell(
|
|
||||||
name="apply forwarding configuration",
|
|
||||||
commands=[
|
|
||||||
"sysctl -p",
|
|
||||||
"nft -f /etc/nftables.conf",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
if host.data.get("floating_ips"):
|
|
||||||
i = 0
|
|
||||||
for floating_ip in host.data.get("floating_ips"):
|
|
||||||
i += 1
|
|
||||||
files.template(
|
|
||||||
name="Add floating IPs",
|
|
||||||
src="servers/proxy-nine/files/60-floating.ip.cfg.j2",
|
|
||||||
dest=f"/etc/network/interfaces.d/{59 + i}-floating.ip.cfg",
|
|
||||||
ip_address=floating_ip,
|
|
||||||
i=i,
|
|
||||||
)
|
|
||||||
|
|
||||||
systemd.service(
|
|
||||||
name="apply floating IPs",
|
|
||||||
service="networking",
|
|
||||||
restarted=True,
|
|
||||||
)
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
auto eth0:{{ i }}
|
|
||||||
iface eth0:{{ i }} inet static
|
|
||||||
address {{ ip_address }}
|
|
||||||
netmask 32
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
#!/usr/sbin/nft -f
|
|
||||||
|
|
||||||
flush ruleset
|
|
||||||
|
|
||||||
define wan = eth0
|
|
||||||
|
|
||||||
# which ports to proxy
|
|
||||||
define ports = { smtp, http, https, imap, imaps, submission, submissions }
|
|
||||||
|
|
||||||
# the host we want to proxy to
|
|
||||||
define ipv4_address = {{ ipv4_address }}
|
|
||||||
define ipv6_address = [{{ ipv6_address }}]
|
|
||||||
|
|
||||||
table ip nat {
|
|
||||||
chain prerouting {
|
|
||||||
type nat hook prerouting priority dstnat; policy accept;
|
|
||||||
iif $wan tcp dport $ports dnat to $ipv4_address
|
|
||||||
}
|
|
||||||
|
|
||||||
chain postrouting {
|
|
||||||
type nat hook postrouting priority 0;
|
|
||||||
|
|
||||||
oifname $wan masquerade
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table ip6 nat {
|
|
||||||
chain prerouting {
|
|
||||||
type nat hook prerouting priority dstnat; policy accept;
|
|
||||||
iif $wan tcp dport $ports dnat to $ipv6_address
|
|
||||||
}
|
|
||||||
|
|
||||||
chain postrouting {
|
|
||||||
type nat hook postrouting priority 0;
|
|
||||||
|
|
||||||
oifname $wan masquerade
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
table inet filter {
|
|
||||||
chain input {
|
|
||||||
type filter hook input priority filter; policy drop;
|
|
||||||
|
|
||||||
# Accept ICMP.
|
|
||||||
# It is especially important to accept ICMPv6 ND messages,
|
|
||||||
# otherwise IPv6 connectivity breaks.
|
|
||||||
icmp type { echo-request } accept
|
|
||||||
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
|
||||||
|
|
||||||
# Allow incoming SSH connections.
|
|
||||||
tcp dport { 22, 2222 } accept
|
|
||||||
# Allow incoming shadowsocks connections.
|
|
||||||
tcp dport { 8388 } accept
|
|
||||||
|
|
||||||
ct state established accept
|
|
||||||
}
|
|
||||||
chain forward {
|
|
||||||
type filter hook forward priority filter; policy drop;
|
|
||||||
|
|
||||||
ct state established accept
|
|
||||||
ip daddr $ipv4_address counter accept
|
|
||||||
ip6 daddr $ipv6_address counter accept
|
|
||||||
}
|
|
||||||
chain output {
|
|
||||||
type filter hook output priority filter;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
"""
|
|
||||||
|
|
||||||
The 'cmdeploy.remote' sub package contains modules with remotely executing functions.
|
|
||||||
|
|
||||||
Its "_sshexec_bootstrap" module is executed remotely through `SSHExec`
|
|
||||||
and its main() loop there stays connected via a command channel,
|
|
||||||
ready to receive function invocations ("command") and return results.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from . import rdns, rshell
|
|
||||||
|
|
||||||
__all__ = ["rdns", "rshell"]
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
import builtins
|
|
||||||
import importlib
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
## Function Execution server
|
|
||||||
|
|
||||||
|
|
||||||
def _run_loop(cmd_channel):
|
|
||||||
while cmd := cmd_channel.receive():
|
|
||||||
cmd_channel.send(_handle_one_request(cmd))
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_one_request(cmd):
|
|
||||||
pymod_path, func_name, kwargs = cmd
|
|
||||||
try:
|
|
||||||
mod = importlib.import_module(pymod_path)
|
|
||||||
func = getattr(mod, func_name)
|
|
||||||
res = func(**kwargs)
|
|
||||||
return ("finish", res)
|
|
||||||
except:
|
|
||||||
data = traceback.format_exc()
|
|
||||||
return ("error", data)
|
|
||||||
|
|
||||||
|
|
||||||
def main(channel):
|
|
||||||
# enable simple "print" logging
|
|
||||||
|
|
||||||
builtins.print = lambda x="": channel.send(("log", x))
|
|
||||||
|
|
||||||
_run_loop(channel)
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
"""
|
|
||||||
Pure python functions which execute remotely in a system Python interpreter.
|
|
||||||
|
|
||||||
All functions of this module
|
|
||||||
|
|
||||||
- need to get and and return Python builtin data types only,
|
|
||||||
|
|
||||||
- can only use standard library dependencies,
|
|
||||||
|
|
||||||
- can freely call each other.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from .rshell import CalledProcessError, shell, log_progress
|
|
||||||
|
|
||||||
|
|
||||||
def perform_initial_checks(mail_domain, pre_command=""):
|
|
||||||
"""Collecting initial DNS settings."""
|
|
||||||
assert mail_domain
|
|
||||||
if not shell("dig", fail_ok=True, print=log_progress):
|
|
||||||
shell("apt-get update && apt-get install -y dnsutils", print=log_progress)
|
|
||||||
A = query_dns("A", mail_domain)
|
|
||||||
AAAA = query_dns("AAAA", mail_domain)
|
|
||||||
MTA_STS = query_dns("CNAME", f"mta-sts.{mail_domain}")
|
|
||||||
WWW = query_dns("CNAME", f"www.{mail_domain}")
|
|
||||||
|
|
||||||
res = dict(mail_domain=mail_domain, A=A, AAAA=AAAA, MTA_STS=MTA_STS, WWW=WWW)
|
|
||||||
res["acme_account_url"] = shell(pre_command + "acmetool account-url", fail_ok=True, print=log_progress)
|
|
||||||
res["dkim_entry"], res["web_dkim_entry"] = get_dkim_entry(
|
|
||||||
mail_domain, pre_command, dkim_selector="opendkim"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not MTA_STS or not WWW or (not A and not AAAA):
|
|
||||||
return res
|
|
||||||
|
|
||||||
# parse out sts-id if exists, example: "v=STSv1; id=2090123"
|
|
||||||
parts = query_dns("TXT", f"_mta-sts.{mail_domain}").split("id=")
|
|
||||||
res["sts_id"] = parts[1].rstrip('"') if len(parts) == 2 else ""
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def get_dkim_entry(mail_domain, pre_command, dkim_selector):
|
|
||||||
try:
|
|
||||||
dkim_pubkey = shell(
|
|
||||||
f"{pre_command}openssl rsa -in /etc/dkimkeys/{dkim_selector}.private "
|
|
||||||
"-pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'",
|
|
||||||
print=log_progress
|
|
||||||
)
|
|
||||||
except CalledProcessError:
|
|
||||||
return
|
|
||||||
dkim_value_raw = f"v=DKIM1;k=rsa;p={dkim_pubkey};s=email;t=s"
|
|
||||||
dkim_value = '" "'.join(re.findall(".{1,255}", dkim_value_raw))
|
|
||||||
web_dkim_value = "".join(re.findall(".{1,255}", dkim_value_raw))
|
|
||||||
return (
|
|
||||||
f'{dkim_selector}._domainkey.{mail_domain}. TXT "{dkim_value}"',
|
|
||||||
f'{dkim_selector}._domainkey.{mail_domain}. TXT "{web_dkim_value}"',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def query_dns(typ, domain):
|
|
||||||
# Get autoritative nameserver from the SOA record.
|
|
||||||
soa_answers = [
|
|
||||||
x.split()
|
|
||||||
for x in shell(f"dig -r -q {domain} -t SOA +noall +authority +answer", print=log_progress).split(
|
|
||||||
"\n"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
soa = [a for a in soa_answers if len(a) >= 3 and a[3] == "SOA"]
|
|
||||||
if not soa:
|
|
||||||
return
|
|
||||||
ns = soa[0][4]
|
|
||||||
|
|
||||||
# Query authoritative nameserver directly to bypass DNS cache.
|
|
||||||
res = shell(f"dig @{ns} -r -q {domain} -t {typ} +short", print=log_progress)
|
|
||||||
if res:
|
|
||||||
return res.split("\n")[0]
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def check_zonefile(zonefile, verbose=True):
|
|
||||||
"""Check expected zone file entries."""
|
|
||||||
required = True
|
|
||||||
required_diff = []
|
|
||||||
recommended_diff = []
|
|
||||||
|
|
||||||
for zf_line in zonefile.splitlines():
|
|
||||||
if "; Recommended" in zf_line:
|
|
||||||
required = False
|
|
||||||
continue
|
|
||||||
if not zf_line.strip() or zf_line.startswith(";"):
|
|
||||||
continue
|
|
||||||
print(f"dns-checking {zf_line!r}") if verbose else log_progress("")
|
|
||||||
zf_domain, zf_typ, zf_value = zf_line.split(maxsplit=2)
|
|
||||||
zf_domain = zf_domain.rstrip(".")
|
|
||||||
zf_value = zf_value.strip()
|
|
||||||
query_value = query_dns(zf_typ, zf_domain)
|
|
||||||
if zf_value != query_value:
|
|
||||||
assert zf_typ in ("A", "AAAA", "CNAME", "CAA", "SRV", "MX", "TXT"), zf_line
|
|
||||||
if required:
|
|
||||||
required_diff.append(zf_line)
|
|
||||||
else:
|
|
||||||
recommended_diff.append(zf_line)
|
|
||||||
|
|
||||||
return required_diff, recommended_diff
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
from subprocess import DEVNULL, CalledProcessError, check_output
|
|
||||||
|
|
||||||
|
|
||||||
def log_progress(data):
|
|
||||||
sys.stderr.write(".")
|
|
||||||
sys.stderr.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def shell(command, fail_ok=False, print=print):
|
|
||||||
print(f"$ {command}")
|
|
||||||
args = dict(shell=True)
|
|
||||||
if fail_ok:
|
|
||||||
args["stderr"] = DEVNULL
|
|
||||||
try:
|
|
||||||
return check_output(command, **args).decode().rstrip()
|
|
||||||
except CalledProcessError:
|
|
||||||
if not fail_ok:
|
|
||||||
raise
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def get_port_service(port: int) -> str:
|
|
||||||
return shell(
|
|
||||||
"ss -lptn 'src :%d' | awk 'NR>1 {print $6,$7}' | sed 's/users:((\"//;s/\".*//'"
|
|
||||||
% (port,)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def chatmail_version():
|
|
||||||
version = shell("cat /etc/chatmail-version")
|
|
||||||
if "cat: /etc/chatmail-version:" in version:
|
|
||||||
version = None
|
|
||||||
return version
|
|
||||||
|
|
||||||
|
|
||||||
def get_systemd_running():
|
|
||||||
lines = shell("systemctl --type=service --state=running").split("\n")
|
|
||||||
return [line for line in lines if line.startswith(" ")]
|
|
||||||
|
|
||||||
|
|
||||||
def write_numbytes(path, num):
|
|
||||||
with open(path, "w") as f:
|
|
||||||
f.write("x" * num)
|
|
||||||
|
|
||||||
|
|
||||||
def dovecot_recalc_quota(user):
|
|
||||||
shell(f"doveadm quota recalc -u {user}")
|
|
||||||
output = shell(f"doveadm quota get -u {user}")
|
|
||||||
#
|
|
||||||
# Quota name Type Value Limit %
|
|
||||||
# User quota STORAGE 5 102400 0
|
|
||||||
# User quota MESSAGE 2 - 0
|
|
||||||
#
|
|
||||||
for line in output.split("\n"):
|
|
||||||
parts = line.split()
|
|
||||||
if parts[2] == "STORAGE":
|
|
||||||
return dict(value=int(parts[3]), limit=int(parts[4]), percent=int(parts[5]))
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Chatmail dict proxy for IMAP METADATA
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart={execpath} /run/chatmail-metadata/metadata.socket {config_path}
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
User=vmail
|
|
||||||
RuntimeDirectory=chatmail-metadata
|
|
||||||
UMask=0077
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Chatmail dict authentication proxy for dovecot
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart={execpath} /run/doveauth/doveauth.socket {config_path}
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
User=vmail
|
|
||||||
RuntimeDirectory=doveauth
|
|
||||||
UMask=0077
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Chatmail echo bot for testing it works
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart={execpath} {config_path}
|
|
||||||
Environment="PATH={remote_venv_dir}:$PATH"
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
|
|
||||||
User=echobot
|
|
||||||
Group=echobot
|
|
||||||
|
|
||||||
# Create /var/lib/echobot
|
|
||||||
StateDirectory=echobot
|
|
||||||
|
|
||||||
# Create /run/echobot
|
|
||||||
#
|
|
||||||
# echobot stores /run/echobot/password
|
|
||||||
# with a password there, which doveauth then reads.
|
|
||||||
RuntimeDirectory=echobot
|
|
||||||
|
|
||||||
WorkingDirectory=/var/lib/echobot
|
|
||||||
|
|
||||||
# Apply security restrictions suggested by
|
|
||||||
# systemd-analyze security echobot.service
|
|
||||||
CapabilityBoundingSet=
|
|
||||||
LockPersonality=true
|
|
||||||
MemoryDenyWriteExecute=true
|
|
||||||
NoNewPrivileges=true
|
|
||||||
PrivateDevices=true
|
|
||||||
PrivateMounts=true
|
|
||||||
PrivateTmp=true
|
|
||||||
|
|
||||||
# We need to know about doveauth user to give it access to /run/echobot/password
|
|
||||||
PrivateUsers=false
|
|
||||||
|
|
||||||
ProtectClock=true
|
|
||||||
ProtectControlGroups=true
|
|
||||||
ProtectHostname=true
|
|
||||||
ProtectKernelLogs=true
|
|
||||||
ProtectKernelModules=true
|
|
||||||
ProtectKernelTunables=true
|
|
||||||
ProtectProc=noaccess
|
|
||||||
|
|
||||||
# Should be "strict", but we currently write /accounts folder in a protected path
|
|
||||||
ProtectSystem=full
|
|
||||||
|
|
||||||
RemoveIPC=true
|
|
||||||
RestrictAddressFamilies=AF_INET AF_INET6
|
|
||||||
RestrictNamespaces=true
|
|
||||||
RestrictRealtime=true
|
|
||||||
RestrictSUIDSGID=true
|
|
||||||
SystemCallArchitectures=native
|
|
||||||
SystemCallFilter=~@clock
|
|
||||||
SystemCallFilter=~@cpu-emulation
|
|
||||||
SystemCallFilter=~@debug
|
|
||||||
SystemCallFilter=~@module
|
|
||||||
SystemCallFilter=~@mount
|
|
||||||
SystemCallFilter=~@obsolete
|
|
||||||
SystemCallFilter=~@raw-io
|
|
||||||
SystemCallFilter=~@reboot
|
|
||||||
SystemCallFilter=~@resources
|
|
||||||
SystemCallFilter=~@swap
|
|
||||||
UMask=0077
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Incoming Chatmail Postfix before queue filter
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart={execpath} {config_path} incoming
|
|
||||||
Restart=always
|
|
||||||
RestartSec=30
|
|
||||||
User=vmail
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user