mirror of
https://github.com/chatmail/relay.git
synced 2026-05-10 16:04:37 +00:00
Compare commits
189 Commits
hagi/metri
...
link2xt/do
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d96c9221c4 | ||
|
|
d0ed8830f7 | ||
|
|
a6bdbb748b | ||
|
|
ba811c2e1c | ||
|
|
3ef45c2ffd | ||
|
|
8d72d770a3 | ||
|
|
e32d81520a | ||
|
|
e973bc1f41 | ||
|
|
cdfce25494 | ||
|
|
a1e80fdca1 | ||
|
|
7aa876a0bb | ||
|
|
dee36638cf | ||
|
|
effd5bc6e9 | ||
|
|
29eabba5a0 | ||
|
|
e7a9bf2a6c | ||
|
|
93423ee1d1 | ||
|
|
888f7e669a | ||
|
|
1f1d1fdf59 | ||
|
|
dcab097e00 | ||
|
|
a9bdc3d1d0 | ||
|
|
a7101be284 | ||
|
|
3ee0b7e288 | ||
|
|
e3f0bb195d | ||
|
|
fae0863633 | ||
|
|
7a64333c25 | ||
|
|
1331e7e77a | ||
|
|
ac1f2dadad | ||
|
|
4858a67be1 | ||
|
|
1238ed95da | ||
|
|
b32a57105d | ||
|
|
87d6d2d5cb | ||
|
|
5b05e0194f | ||
|
|
24843abed3 | ||
|
|
1f96334f8e | ||
|
|
4db953b22b | ||
|
|
8e847093da | ||
|
|
023253ad9c | ||
|
|
89c65d30d3 | ||
|
|
c4499d6c85 | ||
|
|
29888c2f03 | ||
|
|
eaff92cebc | ||
|
|
4f4fd6a90c | ||
|
|
da3eb89b67 | ||
|
|
765f081f6f | ||
|
|
5c87d69d46 | ||
|
|
686f32d6b3 | ||
|
|
68a62537e1 | ||
|
|
e3ff82544a | ||
|
|
eddfadaf7f | ||
|
|
1b3e2b32f2 | ||
|
|
353d3bfb3f | ||
|
|
4a8fc84c82 | ||
|
|
641a6f8d2e | ||
|
|
7f3996ef58 | ||
|
|
dd770f7e10 | ||
|
|
4dbb19db46 | ||
|
|
ad151c2cc1 | ||
|
|
28f357b598 | ||
|
|
bf0f6e2303 | ||
|
|
35a0f07887 | ||
|
|
52aa7cad06 | ||
|
|
22d77f4680 | ||
|
|
46c34bfbea | ||
|
|
052fb64a3d | ||
|
|
e8bf051cd0 | ||
|
|
d3c29b2f6e | ||
|
|
ef7f4965d4 | ||
|
|
c593906c26 | ||
|
|
27eea671dc | ||
|
|
79a9d2345b | ||
|
|
c3caddcec9 | ||
|
|
6d90182d2e | ||
|
|
ea503a6075 | ||
|
|
ffe313528e | ||
|
|
9b5b4c3787 | ||
|
|
c5bf3188a4 | ||
|
|
c4f46dc499 | ||
|
|
c1fd573de2 | ||
|
|
c6b083472f | ||
|
|
254fe95394 | ||
|
|
ac61ac082e | ||
|
|
02df395dab | ||
|
|
39584c7b7d | ||
|
|
4ebc4f3069 | ||
|
|
1eca8aa143 | ||
|
|
9c09d50e8f | ||
|
|
d73e896e66 | ||
|
|
283045dc4a | ||
|
|
180cfb3951 | ||
|
|
610637da80 | ||
|
|
73e6f5e6da | ||
|
|
b7e6926880 | ||
|
|
a7ef6ee35b | ||
|
|
920e062293 | ||
|
|
794a0608a1 | ||
|
|
fc09653de3 | ||
|
|
c8661fd135 | ||
|
|
4b0600a453 | ||
|
|
f1c10cac2b | ||
|
|
af83ca0235 | ||
|
|
8f6870ebb7 | ||
|
|
0e8bdbd3e3 | ||
|
|
0d593c22d1 | ||
|
|
a1f0a3e23b | ||
|
|
9b15d8de24 | ||
|
|
aaa51cf234 | ||
|
|
66c7115cfc | ||
|
|
823386d824 | ||
|
|
433cb71211 | ||
|
|
62c60d3070 | ||
|
|
698d328620 | ||
|
|
4292355310 | ||
|
|
85bb301255 | ||
|
|
0d61c13c58 | ||
|
|
15f79e0826 | ||
|
|
3d96f0fdfa | ||
|
|
733b9604ba | ||
|
|
969fdd7995 | ||
|
|
b1d11d7747 | ||
|
|
e948bdaea8 | ||
|
|
17389b8667 | ||
|
|
635b5de304 | ||
|
|
67be981176 | ||
|
|
0b8402c187 | ||
|
|
7c98c1f8c9 | ||
|
|
0483603d4a | ||
|
|
6b59b8be44 | ||
|
|
07ffc003e4 | ||
|
|
4cb62df33f | ||
|
|
ef58f011fb | ||
|
|
f7ef236ac8 | ||
|
|
dbe906a331 | ||
|
|
3899f41c61 | ||
|
|
57c29c14a4 | ||
|
|
2b5d903cc5 | ||
|
|
c8d270a853 | ||
|
|
72f4e9edbf | ||
|
|
1ce0a2b0ba | ||
|
|
044ebfb9a2 | ||
|
|
a41b034aa2 | ||
|
|
e00f0b852d | ||
|
|
501b12564c | ||
|
|
229ad15a28 | ||
|
|
e4f35d8dae | ||
|
|
4271573e15 | ||
|
|
b651a9046b | ||
|
|
6b84eaf8af | ||
|
|
1b076bcd22 | ||
|
|
30437f6c46 | ||
|
|
3171e40a26 | ||
|
|
61c915995b | ||
|
|
073bd86344 | ||
|
|
777a7addd2 | ||
|
|
4f28476c47 | ||
|
|
b05aec72c2 | ||
|
|
610675452e | ||
|
|
83387f5d08 | ||
|
|
142206529c | ||
|
|
c0f200b1a9 | ||
|
|
6d55f75bee | ||
|
|
c68cbf1806 | ||
|
|
9677617c7f | ||
|
|
d8cf282953 | ||
|
|
b959f57058 | ||
|
|
8768e6fd0b | ||
|
|
acbf370383 | ||
|
|
80dfdaee06 | ||
|
|
4d15ae9452 | ||
|
|
9a68d42ee8 | ||
|
|
d732d099ac | ||
|
|
582a2af799 | ||
|
|
fba3963d47 | ||
|
|
e80d33e2e0 | ||
|
|
6a3001bf22 | ||
|
|
368c41ba27 | ||
|
|
fa0d8432bc | ||
|
|
2811e08563 | ||
|
|
846a4066d8 | ||
|
|
6e1477666e | ||
|
|
013def94f9 | ||
|
|
468bb04149 | ||
|
|
30a23dad17 | ||
|
|
17af249f90 | ||
|
|
4e65291304 | ||
|
|
505ad36b36 | ||
|
|
dcb614911a | ||
|
|
e06c3631b2 | ||
|
|
da236e6e1b | ||
|
|
2796730a87 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
name: isolated chatmaild tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: run chatmaild tests
|
||||
working-directory: chatmaild
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
name: deploy-chatmail tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: initenv
|
||||
run: scripts/initenv.sh
|
||||
|
||||
20
.github/workflows/staging-ipv4.testrun.org-default.zone
vendored
Normal file
20
.github/workflows/staging-ipv4.testrun.org-default.zone
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
;; Zone file for staging-ipv4.testrun.org
|
||||
|
||||
$ORIGIN staging-ipv4.testrun.org.
|
||||
$TTL 300
|
||||
|
||||
@ IN SOA ns.testrun.org. root.nine.testrun.org (
|
||||
2023010101 ; Serial
|
||||
7200 ; Refresh
|
||||
3600 ; Retry
|
||||
1209600 ; Expire
|
||||
3600 ; Negative response caching TTL
|
||||
)
|
||||
|
||||
;; Nameservers.
|
||||
@ IN NS ns.testrun.org.
|
||||
|
||||
;; DNS records.
|
||||
@ IN A 37.27.95.249
|
||||
mta-sts.staging-ipv4.testrun.org. CNAME staging-ipv4.testrun.org.
|
||||
www.staging-ipv4.testrun.org. CNAME staging-ipv4.testrun.org.
|
||||
@@ -1,6 +1,6 @@
|
||||
;; Zone file for staging.testrun.org
|
||||
;; Zone file for staging2.testrun.org
|
||||
|
||||
$ORIGIN staging.testrun.org.
|
||||
$ORIGIN staging2.testrun.org.
|
||||
$TTL 300
|
||||
|
||||
@ IN SOA ns.testrun.org. root.nine.testrun.org (
|
||||
@@ -15,6 +15,7 @@ $TTL 300
|
||||
@ IN NS ns.testrun.org.
|
||||
|
||||
;; DNS records.
|
||||
@ IN A 37.27.37.98
|
||||
mta-sts.staging.testrun.org. CNAME staging.testrun.org.
|
||||
www.staging.testrun.org. CNAME staging.testrun.org.
|
||||
@ IN A 37.27.24.139
|
||||
mta-sts.staging2.testrun.org. CNAME staging2.testrun.org.
|
||||
www.staging2.testrun.org. CNAME staging2.testrun.org.
|
||||
|
||||
|
||||
98
.github/workflows/test-and-deploy-ipv4only.yaml
vendored
Normal file
98
.github/workflows/test-and-deploy-ipv4only.yaml
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
name: deploy on staging-ipv4.testrun.org, and run tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '**/README.md'
|
||||
- 'CHANGELOG.md'
|
||||
- 'LICENSE'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: deploy on staging-ipv4.testrun.org, and run tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
concurrency:
|
||||
group: ci-ipv4-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
||||
steps:
|
||||
- uses: jsok/serialize-workflow-action@v1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: prepare SSH
|
||||
run: |
|
||||
mkdir ~/.ssh
|
||||
echo "${{ secrets.STAGING_SSH_KEY }}" >> ~/.ssh/id_ed25519
|
||||
chmod 600 ~/.ssh/id_ed25519
|
||||
ssh-keyscan staging-ipv4.testrun.org > ~/.ssh/known_hosts
|
||||
# save previous acme & dkim state
|
||||
rsync -avz root@staging-ipv4.testrun.org:/var/lib/acme acme-ipv4 || true
|
||||
rsync -avz root@staging-ipv4.testrun.org:/etc/dkimkeys dkimkeys-ipv4 || true
|
||||
# store previous acme & dkim state on ns.testrun.org, if it contains useful certs
|
||||
if [ -f dkimkeys-ipv4/dkimkeys/opendkim.private ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" dkimkeys-ipv4 root@ns.testrun.org:/tmp/ || true; fi
|
||||
if [ "$(ls -A acme-ipv4/acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme-ipv4 root@ns.testrun.org:/tmp/ || true; fi
|
||||
# make sure CAA record isn't set
|
||||
ssh -o StrictHostKeyChecking=accept-new root@ns.testrun.org sed -i '/CAA/d' /etc/nsd/staging-ipv4.testrun.org.zone
|
||||
ssh root@ns.testrun.org systemctl reload nsd
|
||||
|
||||
- name: rebuild staging-ipv4.testrun.org to have a clean VPS
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.HETZNER_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"image":"debian-12"}' \
|
||||
"https://api.hetzner.cloud/v1/servers/${{ secrets.STAGING_SERVER_ID }}/actions/rebuild"
|
||||
|
||||
- run: scripts/initenv.sh
|
||||
|
||||
- name: append venv/bin to PATH
|
||||
run: echo venv/bin >>$GITHUB_PATH
|
||||
|
||||
- name: upload TLS cert after rebuilding
|
||||
run: |
|
||||
echo " --- wait until staging-ipv4.testrun.org VPS is rebuilt --- "
|
||||
rm ~/.ssh/known_hosts
|
||||
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org id -u ; do sleep 1 ; done
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org id -u
|
||||
# download acme & dkim state from ns.testrun.org
|
||||
rsync -e "ssh -o StrictHostKeyChecking=accept-new" -avz root@ns.testrun.org:/tmp/acme-ipv4 acme-restore || true
|
||||
rsync -avz root@ns.testrun.org:/tmp/dkimkeys-ipv4 dkimkeys-restore || true
|
||||
# restore acme & dkim state to staging2.testrun.org
|
||||
rsync -avz acme-restore/acme-ipv4/acme root@staging-ipv4.testrun.org:/var/lib/acme || true
|
||||
rsync -avz dkimkeys-restore/dkimkeys-ipv4/dkimkeys root@staging-ipv4.testrun.org:/etc/dkimkeys || true
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org chown root:root -R /var/lib/acme || true
|
||||
|
||||
- name: run formatting checks
|
||||
run: cmdeploy fmt -v
|
||||
|
||||
- name: run deploy-chatmail offline tests
|
||||
run: pytest --pyargs cmdeploy
|
||||
|
||||
- run: |
|
||||
cmdeploy init staging-ipv4.testrun.org
|
||||
sed -i 's#disable_ipv6 = False#disable_ipv6 = True#' chatmail.ini
|
||||
|
||||
- run: cmdeploy run
|
||||
|
||||
- name: set DNS entries
|
||||
run: |
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging-ipv4.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
|
||||
cmdeploy dns --zonefile staging-generated.zone
|
||||
cat staging-generated.zone >> .github/workflows/staging-ipv4.testrun.org-default.zone
|
||||
cat .github/workflows/staging-ipv4.testrun.org-default.zone
|
||||
scp .github/workflows/staging-ipv4.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging-ipv4.testrun.org.zone
|
||||
ssh root@ns.testrun.org nsd-checkzone staging-ipv4.testrun.org /etc/nsd/staging-ipv4.testrun.org.zone
|
||||
ssh root@ns.testrun.org systemctl reload nsd
|
||||
|
||||
- name: cmdeploy test
|
||||
run: CHATMAIL_DOMAIN2=nine.testrun.org cmdeploy test --slow
|
||||
|
||||
- name: cmdeploy dns (try 3 times)
|
||||
run: cmdeploy dns || cmdeploy dns || cmdeploy dns
|
||||
|
||||
55
.github/workflows/test-and-deploy.yaml
vendored
55
.github/workflows/test-and-deploy.yaml
vendored
@@ -1,4 +1,4 @@
|
||||
name: deploy on staging.testrun.org, and run tests
|
||||
name: deploy on staging2.testrun.org, and run tests
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -13,28 +13,35 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: deploy on staging.testrun.org, and run tests
|
||||
name: deploy on staging2.testrun.org, and run tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
concurrency:
|
||||
group: staging-deploy
|
||||
cancel-in-progress: true
|
||||
group: ci-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ !contains(github.ref, '$GITHUB_REF') }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: jsok/serialize-workflow-action@v1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: prepare SSH
|
||||
run: |
|
||||
mkdir ~/.ssh
|
||||
echo "${{ secrets.STAGING_SSH_KEY }}" >> ~/.ssh/id_ed25519
|
||||
chmod 600 ~/.ssh/id_ed25519
|
||||
ssh-keyscan staging.testrun.org > ~/.ssh/known_hosts
|
||||
ssh-keyscan staging2.testrun.org > ~/.ssh/known_hosts
|
||||
# save previous acme & dkim state
|
||||
rsync -avz root@staging.testrun.org:/var/lib/acme . || true
|
||||
rsync -avz root@staging.testrun.org:/etc/dkimkeys . || true
|
||||
rsync -avz root@staging2.testrun.org:/var/lib/acme . || true
|
||||
rsync -avz root@staging2.testrun.org:/etc/dkimkeys . || true
|
||||
# store previous acme & dkim state on ns.testrun.org, if it contains useful certs
|
||||
if [ -f dkimkeys/opendkim.private ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" dkimkeys root@ns.testrun.org:/tmp/ || true; fi
|
||||
if [ -z "$(ls -A acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme root@ns.testrun.org:/tmp/ || true; fi
|
||||
if [ "$(ls -A acme/certs)" ]; then rsync -avz -e "ssh -o StrictHostKeyChecking=accept-new" acme root@ns.testrun.org:/tmp/ || true; fi
|
||||
# make sure CAA record isn't set
|
||||
ssh -o StrictHostKeyChecking=accept-new root@ns.testrun.org sed -i '/CAA/d' /etc/nsd/staging2.testrun.org.zone
|
||||
ssh root@ns.testrun.org systemctl reload nsd
|
||||
|
||||
- name: rebuild staging.testrun.org to have a clean VPS
|
||||
- name: rebuild staging2.testrun.org to have a clean VPS
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Authorization: Bearer ${{ secrets.HETZNER_API_TOKEN }}" \
|
||||
@@ -49,17 +56,17 @@ jobs:
|
||||
|
||||
- name: upload TLS cert after rebuilding
|
||||
run: |
|
||||
echo " --- wait until staging.testrun.org VPS is rebuilt --- "
|
||||
echo " --- wait until staging2.testrun.org VPS is rebuilt --- "
|
||||
rm ~/.ssh/known_hosts
|
||||
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging.testrun.org id -u ; do sleep 1 ; done
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging.testrun.org id -u
|
||||
while ! ssh -o ConnectTimeout=180 -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org id -u ; do sleep 1 ; done
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org id -u
|
||||
# download acme & dkim state from ns.testrun.org
|
||||
rsync -e "ssh -o StrictHostKeyChecking=accept-new" -avz root@ns.testrun.org:/tmp/acme acme-restore || true
|
||||
rsync -avz root@ns.testrun.org:/tmp/dkimkeys dkimkeys-restore || true
|
||||
# restore acme & dkim state to staging.testrun.org
|
||||
rsync -avz acme-restore/acme/ root@staging.testrun.org:/var/lib/acme || true
|
||||
rsync -avz dkimkeys-restore/dkimkeys/ root@staging.testrun.org:/etc/dkimkeys || true
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging.testrun.org chown root:root -R /var/lib/acme || true
|
||||
# restore acme & dkim state to staging2.testrun.org
|
||||
rsync -avz acme-restore/acme/ root@staging2.testrun.org:/var/lib/acme || true
|
||||
rsync -avz dkimkeys-restore/dkimkeys/ root@staging2.testrun.org:/etc/dkimkeys || true
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging2.testrun.org chown root:root -R /var/lib/acme || true
|
||||
|
||||
- name: run formatting checks
|
||||
run: cmdeploy fmt -v
|
||||
@@ -67,23 +74,23 @@ jobs:
|
||||
- name: run deploy-chatmail offline tests
|
||||
run: pytest --pyargs cmdeploy
|
||||
|
||||
- run: cmdeploy init staging.testrun.org
|
||||
- run: cmdeploy init staging2.testrun.org
|
||||
|
||||
- run: cmdeploy run
|
||||
- run: cmdeploy run --verbose
|
||||
|
||||
- name: set DNS entries
|
||||
run: |
|
||||
ssh -o StrictHostKeyChecking=accept-new -v root@staging.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
|
||||
cmdeploy dns --zonefile staging-generated.zone
|
||||
ssh -o StrictHostKeyChecking=accept-new root@staging2.testrun.org chown opendkim:opendkim -R /etc/dkimkeys
|
||||
cmdeploy dns --zonefile staging-generated.zone --verbose
|
||||
cat staging-generated.zone >> .github/workflows/staging.testrun.org-default.zone
|
||||
cat .github/workflows/staging.testrun.org-default.zone
|
||||
scp .github/workflows/staging.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging.testrun.org.zone
|
||||
ssh root@ns.testrun.org nsd-checkzone staging.testrun.org /etc/nsd/staging.testrun.org.zone
|
||||
scp .github/workflows/staging.testrun.org-default.zone root@ns.testrun.org:/etc/nsd/staging2.testrun.org.zone
|
||||
ssh root@ns.testrun.org nsd-checkzone staging2.testrun.org /etc/nsd/staging2.testrun.org.zone
|
||||
ssh root@ns.testrun.org systemctl reload nsd
|
||||
|
||||
- name: cmdeploy test
|
||||
run: CHATMAIL_DOMAIN2=nine.testrun.org cmdeploy test --slow
|
||||
|
||||
- name: cmdeploy dns (try 3 times)
|
||||
run: cmdeploy dns || cmdeploy dns || cmdeploy dns
|
||||
run: cmdeploy dns -v || cmdeploy dns -v || cmdeploy dns -v
|
||||
|
||||
|
||||
142
CHANGELOG.md
142
CHANGELOG.md
@@ -2,14 +2,148 @@
|
||||
|
||||
## untagged
|
||||
|
||||
- run metrics generation with systemd-timer instead of cron
|
||||
([#304](https://github.com/deltachat/chatmail/pull/304))
|
||||
- fix checking for required DNS records
|
||||
([#412](https://github.com/deltachat/chatmail/pull/412))
|
||||
|
||||
- add a paragraph about "account deletion" to info page
|
||||
([#405](https://github.com/deltachat/chatmail/pull/405))
|
||||
|
||||
- avoid nginx listening on ipv6 if v6 is dsiabled
|
||||
([#402](https://github.com/deltachat/chatmail/pull/402))
|
||||
|
||||
- refactor ssh-based execution to allow organizing remote functions in
|
||||
modules.
|
||||
([#396](https://github.com/deltachat/chatmail/pull/396))
|
||||
|
||||
- trigger "apt upgrade" during "cmdeploy run"
|
||||
([#398](https://github.com/deltachat/chatmail/pull/398))
|
||||
|
||||
- drop hispanilandia passthrough address
|
||||
([#401](https://github.com/deltachat/chatmail/pull/401))
|
||||
|
||||
- set CAA record flags to 0
|
||||
|
||||
- add IMAP capabilities instead of overwriting them
|
||||
([#413](https://github.com/deltachat/chatmail/pull/413))
|
||||
|
||||
|
||||
## 1.4.1 2024-07-31
|
||||
|
||||
- fix metadata dictproxy which would confuse transactions
|
||||
resulting in missed notifications and other issues.
|
||||
([#393](https://github.com/deltachat/chatmail/pull/393))
|
||||
([#394](https://github.com/deltachat/chatmail/pull/394))
|
||||
|
||||
- add optional "imap_rawlog" config option. If true,
|
||||
.in/.out files are created in user home dirs
|
||||
containing the imap protocol messages.
|
||||
([#389](https://github.com/deltachat/chatmail/pull/389))
|
||||
|
||||
## 1.4.0 2024-07-28
|
||||
|
||||
- Add `disable_ipv6` config option to chatmail.ini.
|
||||
Required if the server doesn't have IPv6 connectivity.
|
||||
([#312](https://github.com/deltachat/chatmail/pull/312))
|
||||
|
||||
- allow current K9/Thunderbird-mail releases to send encrypted messages
|
||||
outside by accepting their localized "encrypted subject" strings.
|
||||
([#370](https://github.com/deltachat/chatmail/pull/370))
|
||||
|
||||
- Migrate and remove sqlite database in favor of password/lastlogin tracking
|
||||
in a user's maildir.
|
||||
([#379](https://github.com/deltachat/chatmail/pull/379))
|
||||
|
||||
- Require pyinfra V3 installed on the client side,
|
||||
run `./scripts/initenv.sh` to upgrade locally.
|
||||
([#378](https://github.com/deltachat/chatmail/pull/378))
|
||||
|
||||
- don't hardcode "/home/vmail" paths but rather set them
|
||||
once in the config object and use it everywhere else,
|
||||
thereby also improving testability.
|
||||
([#351](https://github.com/deltachat/chatmail/pull/351))
|
||||
temporarily introduced obligatory "passdb_path" and "mailboxes_dir"
|
||||
settings but they were removed/obsoleted in
|
||||
([#380](https://github.com/deltachat/chatmail/pull/380))
|
||||
|
||||
- BREAKING: new required chatmail.ini value 'delete_inactive_users_after = 100'
|
||||
which removes users from database and mails after 100 days without any login.
|
||||
([#350](https://github.com/deltachat/chatmail/pull/350))
|
||||
|
||||
- Refine DNS checking to distinguish between "required" and "recommended" settings
|
||||
([#372](https://github.com/deltachat/chatmail/pull/372))
|
||||
|
||||
- reload nginx in the acmetool cronjob
|
||||
([#360](https://github.com/deltachat/chatmail/pull/360))
|
||||
|
||||
- remove checking of reverse-DNS PTR records. Chatmail-servers don't
|
||||
depend on it and even in the wider e-mail system it's not common anymore.
|
||||
If it's an issue, a chatmail operator can still care to properly set reverse DNS.
|
||||
([#348](https://github.com/deltachat/chatmail/pull/348))
|
||||
|
||||
- Make DNS-checking faster and more interactive, run it fully during "cmdeploy run",
|
||||
also introducing a generic mechanism for rapid remote ssh-based python function execution.
|
||||
([#346](https://github.com/deltachat/chatmail/pull/346))
|
||||
|
||||
- Don't fix file owner ship of /home/vmail
|
||||
([#345](https://github.com/deltachat/chatmail/pull/345))
|
||||
|
||||
- Support iterating over all users with doveadm commands
|
||||
([#344](https://github.com/deltachat/chatmail/pull/344))
|
||||
|
||||
- Test and fix for attempts to create inadmissible accounts
|
||||
([#333](https://github.com/deltachat/chatmail/pull/321))
|
||||
|
||||
- check that OpenPGP has only PKESK, SKESK and SEIPD packets
|
||||
([#323](https://github.com/deltachat/chatmail/pull/323),
|
||||
[#324](https://github.com/deltachat/chatmail/pull/324))
|
||||
|
||||
- improve filtermail checks for encrypted messages and drop support for unencrypted MDNs
|
||||
([#320](https://github.com/deltachat/chatmail/pull/320))
|
||||
|
||||
- replace `bash` with `/bin/sh`
|
||||
([#334](https://github.com/deltachat/chatmail/pull/334))
|
||||
|
||||
- Increase number of logged in IMAP sessions to 50000
|
||||
([#335](https://github.com/deltachat/chatmail/pull/335))
|
||||
|
||||
- filtermail: do not allow ASCII armor without actual payload
|
||||
([#325](https://github.com/deltachat/chatmail/pull/325))
|
||||
|
||||
- Remove sieve to enable hardlink deduplication in LMTP
|
||||
([#343](https://github.com/deltachat/chatmail/pull/343))
|
||||
|
||||
- dovecot: enable gzip compression on disk
|
||||
([#341](https://github.com/deltachat/chatmail/pull/341))
|
||||
|
||||
- DKIM-sign Content-Type and oversign all signed headers
|
||||
([#296](https://github.com/deltachat/chatmail/pull/296))
|
||||
|
||||
- Add nonci_accounts metric
|
||||
([#347](https://github.com/deltachat/chatmail/pull/347))
|
||||
|
||||
- doveauth: log when a new account is created
|
||||
([#349](https://github.com/deltachat/chatmail/pull/349))
|
||||
|
||||
- Multiplex HTTPS, IMAP and SMTP on port 443
|
||||
([#357](https://github.com/deltachat/chatmail/pull/357))
|
||||
|
||||
## 1.3.0 - 2024-06-06
|
||||
|
||||
- don't check necessary DNS records on cmdeploy init anymore
|
||||
([#316](https://github.com/deltachat/chatmail/pull/316))
|
||||
|
||||
- ensure cron and acl are installed
|
||||
([#293](https://github.com/deltachat/chatmail/pull/293),
|
||||
[#310](https://github.com/deltachat/chatmail/pull/310))
|
||||
|
||||
- change default for delete_mails_after from 40 to 20 days
|
||||
([#300]https://github.com/deltachat/chatmail/pull/300)
|
||||
([#300](https://github.com/deltachat/chatmail/pull/300))
|
||||
|
||||
- save journald logs only to memory and save nginx logs to journald instead of file
|
||||
([#299](https://github.com/deltachat/chatmail/pull/299))
|
||||
|
||||
- fix writing of multiple obs repositories in `/etc/apt/sources.list`
|
||||
([#272](https://github.com/deltachat/chatmail/issues/272))
|
||||
([#290](https://github.com/deltachat/chatmail/pull/290))
|
||||
|
||||
- metadata: add support for `/shared/vendor/deltachat/irohrelay`
|
||||
([#284](https://github.com/deltachat/chatmail/pull/284))
|
||||
|
||||
122
README.md
122
README.md
@@ -34,8 +34,8 @@ Please substitute it with your own domain.
|
||||
scripts/cmdeploy init chat.example.org # <-- use your domain
|
||||
```
|
||||
|
||||
3. Setup first DNS records for your chatmail domain,
|
||||
according to the hints provided by `cmdeploy init`.
|
||||
3. Point your domain to the server's IP address,
|
||||
if you haven't done so already.
|
||||
Verify that SSH root login works:
|
||||
|
||||
```
|
||||
@@ -47,7 +47,8 @@ Please substitute it with your own domain.
|
||||
```
|
||||
scripts/cmdeploy run
|
||||
```
|
||||
This script will also show you additional DNS records
|
||||
This script will check that you have all necessary DNS records.
|
||||
If DNS records are missing, it will recommend
|
||||
which you should configure at your DNS provider
|
||||
(it can take some time until they are public).
|
||||
|
||||
@@ -59,7 +60,7 @@ To check the status of your remotely running chatmail service:
|
||||
scripts/cmdeploy status
|
||||
```
|
||||
|
||||
To check whether your DNS records are correct:
|
||||
To display and check all recommended DNS records:
|
||||
|
||||
```
|
||||
scripts/cmdeploy dns
|
||||
@@ -155,7 +156,8 @@ While this file is present, account creation will be blocked.
|
||||
|
||||
[Postfix](http://www.postfix.org/) listens on ports 25 (smtp) and 587 (submission) and 465 (submissions).
|
||||
[Dovecot](https://www.dovecot.org/) listens on ports 143 (imap) and 993 (imaps).
|
||||
[nginx](https://www.nginx.com/) listens on port 443 (https).
|
||||
[nginx](https://www.nginx.com/) listens on port 8443 (https-alt) and 443 (https).
|
||||
Port 443 multiplexes HTTPS, IMAP and SMTP using ALPN to redirect connections to ports 8443, 465 or 993.
|
||||
[acmetool](https://hlandau.github.io/acmetool/) listens on port 80 (http).
|
||||
|
||||
Delta Chat apps will, however, discover all ports and configurations
|
||||
@@ -185,3 +187,113 @@ to MAIL FROM with
|
||||
and rejects incorrectly authenticated emails with [`reject_sender_login_mismatch`](reject_sender_login_mismatch) policy.
|
||||
`From:` header must correspond to envelope MAIL FROM,
|
||||
this is ensured by `filtermail` proxy.
|
||||
|
||||
## Migrating chatmail server to a new host
|
||||
|
||||
If you want to migrate your chatmail server to a new host,
|
||||
follow these steps:
|
||||
|
||||
1. Block all ports except 80 and 22 with firewall on a new server.
|
||||
|
||||
To do this, add the following config to `/etc/nftables.conf`:
|
||||
```
|
||||
#!/usr/sbin/nft -f
|
||||
|
||||
flush ruleset
|
||||
|
||||
table inet filter {
|
||||
chain input {
|
||||
type filter hook input priority filter; policy drop;
|
||||
|
||||
# Accept ICMP.
|
||||
# It is especially important to accept ICMPv6 ND messages,
|
||||
# otherwise IPv6 connectivity breaks.
|
||||
icmp type { echo-request } accept
|
||||
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
||||
|
||||
tcp dport { ssh, http } accept
|
||||
|
||||
ct state established accept
|
||||
}
|
||||
chain forward {
|
||||
type filter hook forward priority filter;
|
||||
}
|
||||
chain output {
|
||||
type filter hook output priority filter;
|
||||
}
|
||||
}
|
||||
```
|
||||
Then execute `nft -f /etc/nftables.conf` as root.
|
||||
|
||||
This will ensure users will not connect to the new server
|
||||
and mails will not be delivered to the new server
|
||||
before you finish the setup.
|
||||
|
||||
Port 22 is needed for SSH access
|
||||
and port 80 is needed to get a TLS certificate.
|
||||
They are not used by Delta Chat
|
||||
or by other email servers trying to deliver the messages.
|
||||
|
||||
2. Point DNS to the new IP addresses.
|
||||
|
||||
You can already remove the old IP addresses from DNS.
|
||||
Existing Delta Chat users will still be able to connect
|
||||
to the old server, send and receive messages,
|
||||
but new users will fail to create new profiles
|
||||
with your chatmail server.
|
||||
|
||||
3. Setup the new server with `cmdeploy`.
|
||||
|
||||
This step is similar to initial setup.
|
||||
However, because ports Delta Chat uses are blocked,
|
||||
new server will not become usable immediately.
|
||||
If other servers try to deliver messages to your new server they will fail,
|
||||
but normally email servers will retry delivering messages
|
||||
for at least a week, so messages will not be lost.
|
||||
|
||||
4. Firewall all ports except `ssh` (22) on the old server.
|
||||
Existing users will not be able to connect from now on
|
||||
and no more messages will be delivered to your old chatmail server.
|
||||
|
||||
Blocking users from connecting to the new server
|
||||
until mailboxes are migrated is needed to avoid UID validity change.
|
||||
If Delta Chat connects to the new server before it is fully set up,
|
||||
it will lose track of the IMAP message UID
|
||||
and miss messages that arrived during migration.
|
||||
|
||||
Same for SMTP port 25, you want it blocked during migration so no new mails arrive
|
||||
while the server is moving.
|
||||
|
||||
5. Use `rsync -avz` over SSH to copy /home/vmail/mail from the old server to the new one
|
||||
preserving file permissions and timestamps.
|
||||
|
||||
6. Unblock ports used by Delta Chat and SMTP message exchange.
|
||||
For that you can modify `/etc/nftables.conf` as follows:
|
||||
```
|
||||
#!/usr/sbin/nft -f
|
||||
|
||||
flush ruleset
|
||||
|
||||
table inet filter {
|
||||
chain input {
|
||||
type filter hook input priority filter; policy drop;
|
||||
|
||||
# Accept ICMP.
|
||||
# It is especially important to accept ICMPv6 ND messages,
|
||||
# otherwise IPv6 connectivity breaks.
|
||||
icmp type { echo-request } accept
|
||||
icmpv6 type { echo-request, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } accept
|
||||
|
||||
tcp dport { ssh, smtp, http, https, imap, imaps, submission, submissions } accept
|
||||
|
||||
ct state established accept
|
||||
}
|
||||
chain forward {
|
||||
type filter hook forward priority filter;
|
||||
}
|
||||
chain output {
|
||||
type filter hook output priority filter;
|
||||
}
|
||||
}
|
||||
```
|
||||
Execute `nft -f /etc/nftables.conf` as root to apply the changes.
|
||||
|
||||
@@ -26,6 +26,8 @@ chatmail-metadata = "chatmaild.metadata:main"
|
||||
filtermail = "chatmaild.filtermail:main"
|
||||
echobot = "chatmaild.echo:main"
|
||||
chatmail-metrics = "chatmaild.metrics:main"
|
||||
delete_inactive_users = "chatmaild.delete_inactive_users:main"
|
||||
lastlogin = "chatmaild.lastlogin:main"
|
||||
|
||||
[project.entry-points.pytest11]
|
||||
"chatmaild.testplugin" = "chatmaild.tests.plugin"
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
|
||||
59
chatmaild/src/chatmaild/common_encrypted_subjects.py
Normal file
59
chatmaild/src/chatmaild/common_encrypted_subjects.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Generated from deltachat, draft-ietf-lamps-header-protection, and
|
||||
encrypted_subject localizations in
|
||||
https://github.com/thunderbird/thunderbird-android/
|
||||
|
||||
"""
|
||||
|
||||
common_encrypted_subjects = {
|
||||
"...",
|
||||
"[...]",
|
||||
"암호화된 메시지",
|
||||
"Ĉifrita mesaĝo",
|
||||
"Courriel chiffré",
|
||||
"Dulrituð skilaboð",
|
||||
"Encrypted Message",
|
||||
"Fersifere berjocht",
|
||||
"Kemennadenn enrineget",
|
||||
"Krüptitud kiri",
|
||||
"Krypterat meddelande",
|
||||
"Krypteret besked",
|
||||
"Kryptert melding",
|
||||
"Mensagem criptografada",
|
||||
"Mensagem encriptada",
|
||||
"Mensaje cifrado",
|
||||
"Mensaxe cifrada",
|
||||
"Mesaj Criptat",
|
||||
"Mesazh i Fshehtëzuar",
|
||||
"Messaggio criptato",
|
||||
"Messaghju cifratu",
|
||||
"Missatge encriptat",
|
||||
"Neges wedi'i Hamgryptio",
|
||||
"Pesan terenkripsi",
|
||||
"Salattu viesti",
|
||||
"Şifreli İleti",
|
||||
"Šifrēta ziņa",
|
||||
"Šifrirana poruka",
|
||||
"Šifrirano sporočilo",
|
||||
"Šifruotas laiškas",
|
||||
"Tin nhắn được mã hóa",
|
||||
"Titkosított üzenet",
|
||||
"Verschlüsselte Nachricht",
|
||||
"Versleuteld bericht",
|
||||
"Zašifrovaná zpráva",
|
||||
"Zaszyfrowana wiadomość",
|
||||
"Zifratu mezua",
|
||||
"Κρυπτογραφημένο μήνυμα",
|
||||
"Зашифроване повідомлення",
|
||||
"Зашифрованное сообщение",
|
||||
"Зашыфраваны ліст",
|
||||
"Криптирано съобщение",
|
||||
"Шифрована порука",
|
||||
"დაშიფრული წერილი",
|
||||
"הודעה מוצפנת",
|
||||
"پیام رمزنگاریشده",
|
||||
"رسالة مشفّرة",
|
||||
"എൻക്രിപ്റ്റുചെയ്ത സന്ദേശം",
|
||||
"加密邮件",
|
||||
"已加密的訊息",
|
||||
"暗号化されたメッセージ",
|
||||
}
|
||||
@@ -1,9 +1,17 @@
|
||||
from pathlib import Path
|
||||
|
||||
import iniconfig
|
||||
|
||||
from chatmaild.user import User
|
||||
|
||||
echobot_password_path = Path("/run/echobot/password")
|
||||
|
||||
|
||||
def read_config(inipath):
|
||||
assert Path(inipath).exists(), inipath
|
||||
cfg = iniconfig.IniConfig(inipath)
|
||||
return Config(inipath, params=cfg.sections["params"])
|
||||
params = cfg.sections["params"]
|
||||
return Config(inipath, params=params)
|
||||
|
||||
|
||||
class Config:
|
||||
@@ -12,7 +20,9 @@ class Config:
|
||||
self.mail_domain = params["mail_domain"]
|
||||
self.max_user_send_per_minute = int(params["max_user_send_per_minute"])
|
||||
self.max_mailbox_size = params["max_mailbox_size"]
|
||||
self.max_message_size = int(params.get("max_message_size", "31457280"))
|
||||
self.delete_mails_after = params["delete_mails_after"]
|
||||
self.delete_inactive_users_after = int(params["delete_inactive_users_after"])
|
||||
self.username_min_length = int(params["username_min_length"])
|
||||
self.username_max_length = int(params["username_max_length"])
|
||||
self.password_min_length = int(params["password_min_length"])
|
||||
@@ -20,23 +30,64 @@ class Config:
|
||||
self.passthrough_recipients = params["passthrough_recipients"].split()
|
||||
self.filtermail_smtp_port = int(params["filtermail_smtp_port"])
|
||||
self.postfix_reinject_port = int(params["postfix_reinject_port"])
|
||||
self.disable_ipv6 = params.get("disable_ipv6", "false").lower() == "true"
|
||||
self.imap_rawlog = params.get("imap_rawlog", "false").lower() == "true"
|
||||
self.iroh_relay = params.get("iroh_relay")
|
||||
self.privacy_postal = params.get("privacy_postal")
|
||||
self.privacy_mail = params.get("privacy_mail")
|
||||
self.privacy_pdo = params.get("privacy_pdo")
|
||||
self.privacy_supervisor = params.get("privacy_supervisor")
|
||||
|
||||
# deprecated option
|
||||
mbdir = params.get("mailboxes_dir", f"/home/vmail/mail/{self.mail_domain}")
|
||||
self.mailboxes_dir = Path(mbdir.strip())
|
||||
|
||||
# old unused option (except for first migration from sqlite to maildir store)
|
||||
self.passdb_path = Path(params.get("passdb_path", "/home/vmail/passdb.sqlite"))
|
||||
|
||||
def _getbytefile(self):
|
||||
return open(self._inipath, "rb")
|
||||
|
||||
def get_user(self, addr):
|
||||
if not addr or "@" not in addr or "/" in addr:
|
||||
raise ValueError(f"invalid address {addr!r}")
|
||||
|
||||
def write_initial_config(inipath, mail_domain):
|
||||
maildir = self.mailboxes_dir.joinpath(addr)
|
||||
if addr.startswith("echo@"):
|
||||
password_path = echobot_password_path
|
||||
else:
|
||||
password_path = maildir.joinpath("password")
|
||||
|
||||
return User(maildir, addr, password_path, uid="vmail", gid="vmail")
|
||||
|
||||
|
||||
def write_initial_config(inipath, mail_domain, overrides):
|
||||
"""Write out default config file, using the specified config value overrides."""
|
||||
from importlib.resources import files
|
||||
|
||||
inidir = files(__package__).joinpath("ini")
|
||||
content = (
|
||||
inidir.joinpath("chatmail.ini.f").read_text().format(mail_domain=mail_domain)
|
||||
)
|
||||
source_inipath = inidir.joinpath("chatmail.ini.f")
|
||||
content = source_inipath.read_text().format(mail_domain=mail_domain)
|
||||
|
||||
# apply config overrides
|
||||
new_lines = []
|
||||
extra = overrides.copy()
|
||||
for line in content.split("\n"):
|
||||
new_line = line.strip()
|
||||
if new_line and new_line[0] not in "#[":
|
||||
name, value = map(str.strip, new_line.split("=", maxsplit=1))
|
||||
value = extra.pop(name, value)
|
||||
new_line = f"{name} = {value}"
|
||||
new_lines.append(new_line)
|
||||
|
||||
for name, value in extra.items():
|
||||
new_line = f"{name} = {value}"
|
||||
new_lines.append(new_line)
|
||||
|
||||
content = "\n".join(new_lines)
|
||||
|
||||
# apply testrun privacy overrides
|
||||
|
||||
if mail_domain.endswith(".testrun.org"):
|
||||
override_inipath = inidir.joinpath("override-testrun.ini")
|
||||
privacy = iniconfig.IniConfig(override_inipath)["privacy"]
|
||||
|
||||
@@ -1,133 +0,0 @@
|
||||
import contextlib
|
||||
import sqlite3
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class DBError(Exception):
|
||||
"""error during an operation on the database."""
|
||||
|
||||
|
||||
class Connection:
|
||||
def __init__(self, sqlconn, write):
|
||||
self._sqlconn = sqlconn
|
||||
self._write = write
|
||||
|
||||
def close(self):
|
||||
self._sqlconn.close()
|
||||
|
||||
def commit(self):
|
||||
self._sqlconn.commit()
|
||||
|
||||
def rollback(self):
|
||||
self._sqlconn.rollback()
|
||||
|
||||
def execute(self, query, params=()):
|
||||
cur = self.cursor()
|
||||
try:
|
||||
cur.execute(query, params)
|
||||
except sqlite3.IntegrityError as e:
|
||||
raise DBError(e)
|
||||
return cur
|
||||
|
||||
def cursor(self):
|
||||
return self._sqlconn.cursor()
|
||||
|
||||
def get_user(self, addr: str) -> {}:
|
||||
"""Get a row from the users table."""
|
||||
q = "SELECT addr, password, last_login from users WHERE addr = ?"
|
||||
row = self._sqlconn.execute(q, (addr,)).fetchone()
|
||||
result = {}
|
||||
if row:
|
||||
result = dict(
|
||||
user=row[0],
|
||||
password=row[1],
|
||||
last_login=row[2],
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self, path: str):
|
||||
self.path = Path(path)
|
||||
self.ensure_tables()
|
||||
|
||||
def _get_connection(
|
||||
self, write=False, transaction=False, closing=False
|
||||
) -> Connection:
|
||||
# we let the database serialize all writers at connection time
|
||||
# to play it very safe (we don't have massive amounts of writes).
|
||||
mode = "ro"
|
||||
if write:
|
||||
mode = "rw"
|
||||
if not self.path.exists():
|
||||
mode = "rwc"
|
||||
uri = "file:%s?mode=%s" % (self.path, mode)
|
||||
sqlconn = sqlite3.connect(
|
||||
uri,
|
||||
timeout=60,
|
||||
isolation_level=None if transaction else "DEFERRED",
|
||||
uri=True,
|
||||
)
|
||||
|
||||
# Enable Write-Ahead Logging to avoid readers blocking writers and vice versa.
|
||||
if write:
|
||||
sqlconn.execute("PRAGMA journal_mode=wal")
|
||||
|
||||
if transaction:
|
||||
start_time = time.time()
|
||||
while 1:
|
||||
try:
|
||||
sqlconn.execute("begin immediate")
|
||||
break
|
||||
except sqlite3.OperationalError:
|
||||
# another thread may be writing, give it a chance to finish
|
||||
time.sleep(0.1)
|
||||
if time.time() - start_time > 5:
|
||||
# if it takes this long, something is wrong
|
||||
raise
|
||||
conn = Connection(sqlconn, write=write)
|
||||
if closing:
|
||||
conn = contextlib.closing(conn)
|
||||
return conn
|
||||
|
||||
@contextlib.contextmanager
|
||||
def write_transaction(self):
|
||||
conn = self._get_connection(closing=False, write=True, transaction=True)
|
||||
try:
|
||||
yield conn
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
raise
|
||||
else:
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def read_connection(self, closing=True) -> Connection:
|
||||
return self._get_connection(closing=closing, write=False)
|
||||
|
||||
def get_schema_version(self) -> int:
|
||||
with self.read_connection() as conn:
|
||||
dbversion = conn.execute("PRAGMA user_version").fetchone()[0]
|
||||
return dbversion
|
||||
|
||||
CURRENT_DBVERSION = 1
|
||||
|
||||
def ensure_tables(self):
|
||||
with self.write_transaction() as conn:
|
||||
if self.get_schema_version() > 1:
|
||||
raise DBError(
|
||||
"version is %s; downgrading schema is not supported"
|
||||
% (self.get_schema_version(),)
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
addr TEXT PRIMARY KEY,
|
||||
password TEXT,
|
||||
last_login INTEGER
|
||||
)
|
||||
""",
|
||||
)
|
||||
conn.execute("PRAGMA user_version=%s" % (self.CURRENT_DBVERSION,))
|
||||
31
chatmaild/src/chatmaild/delete_inactive_users.py
Normal file
31
chatmaild/src/chatmaild/delete_inactive_users.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""
|
||||
Remove inactive users
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .config import read_config
|
||||
|
||||
|
||||
def delete_inactive_users(config):
|
||||
cutoff_date = time.time() - config.delete_inactive_users_after * 86400
|
||||
for addr in os.listdir(config.mailboxes_dir):
|
||||
try:
|
||||
user = config.get_user(addr)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
read_timestamp = user.get_last_login_timestamp()
|
||||
if read_timestamp and read_timestamp < cutoff_date:
|
||||
path = config.mailboxes_dir.joinpath(addr)
|
||||
assert path == user.maildir
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
|
||||
|
||||
def main():
|
||||
(cfgpath,) = sys.argv[1:]
|
||||
config = read_config(cfgpath)
|
||||
delete_inactive_users(config)
|
||||
94
chatmaild/src/chatmaild/dictproxy.py
Normal file
94
chatmaild/src/chatmaild/dictproxy.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import logging
|
||||
import os
|
||||
from socketserver import StreamRequestHandler, ThreadingUnixStreamServer
|
||||
|
||||
|
||||
class DictProxy:
|
||||
def loop_forever(self, rfile, wfile):
|
||||
# Transaction storage is local to each handler loop.
|
||||
# Dovecot reuses transaction IDs across connections,
|
||||
# starting transaction with the name `1`
|
||||
# on two different connections to the same proxy sometimes.
|
||||
transactions = {}
|
||||
|
||||
while True:
|
||||
msg = rfile.readline().strip().decode()
|
||||
if not msg:
|
||||
break
|
||||
|
||||
res = self.handle_dovecot_request(msg, transactions)
|
||||
if res:
|
||||
wfile.write(res.encode("ascii"))
|
||||
wfile.flush()
|
||||
|
||||
def handle_dovecot_request(self, msg, transactions):
|
||||
# see https://doc.dovecot.org/developer_manual/design/dict_protocol/#dovecot-dict-protocol
|
||||
short_command = msg[0]
|
||||
parts = msg[1:].split("\t")
|
||||
|
||||
if short_command == "L":
|
||||
return self.handle_lookup(parts)
|
||||
elif short_command == "I":
|
||||
return self.handle_iterate(parts)
|
||||
elif short_command == "H":
|
||||
return # no version checking
|
||||
|
||||
if short_command not in ("BSC"):
|
||||
logging.warning(f"unknown dictproxy request: {msg!r}")
|
||||
return
|
||||
|
||||
transaction_id = parts[0]
|
||||
|
||||
if short_command == "B":
|
||||
return self.handle_begin_transaction(transaction_id, parts, transactions)
|
||||
elif short_command == "C":
|
||||
return self.handle_commit_transaction(transaction_id, parts, transactions)
|
||||
elif short_command == "S":
|
||||
addr = transactions[transaction_id]["addr"]
|
||||
if not self.handle_set(addr, parts):
|
||||
transactions[transaction_id]["res"] = "F\n"
|
||||
logging.error(f"dictproxy-set failed for {addr!r}: {msg!r}")
|
||||
|
||||
def handle_lookup(self, parts):
|
||||
logging.warning(f"lookup ignored: {parts!r}")
|
||||
return "N\n"
|
||||
|
||||
def handle_iterate(self, parts):
|
||||
# Empty line means ITER_FINISHED.
|
||||
# If we don't return empty line Dovecot will timeout.
|
||||
return "\n"
|
||||
|
||||
def handle_begin_transaction(self, transaction_id, parts, transactions):
|
||||
addr = parts[1]
|
||||
transactions[transaction_id] = dict(addr=addr, res="O\n")
|
||||
|
||||
def handle_set(self, addr, parts):
|
||||
# For documentation on key structure see
|
||||
# https://github.com/dovecot/core/blob/main/src/lib-storage/mailbox-attribute.h
|
||||
return False
|
||||
|
||||
def handle_commit_transaction(self, transaction_id, parts, transactions):
|
||||
# return whatever "set" command(s) set as result.
|
||||
return transactions.pop(transaction_id)["res"]
|
||||
|
||||
def serve_forever_from_socket(self, socket):
|
||||
dictproxy = self
|
||||
|
||||
class Handler(StreamRequestHandler):
|
||||
def handle(self):
|
||||
try:
|
||||
dictproxy.loop_forever(self.rfile, self.wfile)
|
||||
except Exception:
|
||||
logging.exception("Exception in the handler")
|
||||
raise
|
||||
|
||||
try:
|
||||
os.unlink(socket)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
with ThreadingUnixStreamServer(socket, Handler) as server:
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
@@ -3,24 +3,14 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from socketserver import (
|
||||
StreamRequestHandler,
|
||||
ThreadingMixIn,
|
||||
UnixStreamServer,
|
||||
)
|
||||
|
||||
from .config import Config, read_config
|
||||
from .database import Database
|
||||
from .dictproxy import DictProxy
|
||||
from .migrate_db import migrate_from_db_to_maildir
|
||||
|
||||
NOCREATE_FILE = "/etc/chatmail-nocreate"
|
||||
|
||||
|
||||
class UnknownCommand(ValueError):
|
||||
"""dictproxy handler received an unkown command"""
|
||||
|
||||
|
||||
def encrypt_password(password: str):
|
||||
# https://doc.dovecot.org/configuration_manual/authentication/password_schemes/
|
||||
passhash = crypt.crypt(password, crypt.METHOD_SHA512)
|
||||
@@ -60,74 +50,11 @@ def is_allowed_to_create(config: Config, user, cleartext_password) -> bool:
|
||||
config.username_min_length,
|
||||
config.username_max_length,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_user_data(db, config: Config, user):
|
||||
if user == f"echo@{config.mail_domain}":
|
||||
return dict(
|
||||
home=f"/home/vmail/mail/{config.mail_domain}/echo@{config.mail_domain}",
|
||||
uid="vmail",
|
||||
gid="vmail",
|
||||
)
|
||||
|
||||
with db.read_connection() as conn:
|
||||
result = conn.get_user(user)
|
||||
if result:
|
||||
result["home"] = f"/home/vmail/mail/{config.mail_domain}/{user}"
|
||||
result["uid"] = "vmail"
|
||||
result["gid"] = "vmail"
|
||||
return result
|
||||
|
||||
|
||||
def lookup_userdb(db, config: Config, user):
|
||||
return get_user_data(db, config, user)
|
||||
|
||||
|
||||
def lookup_passdb(db, config: Config, user, cleartext_password):
|
||||
if user == f"echo@{config.mail_domain}":
|
||||
# Echobot writes password it wants to log in with into /run/echobot/password
|
||||
try:
|
||||
password = Path("/run/echobot/password").read_text()
|
||||
except Exception:
|
||||
logging.exception("Exception when trying to read /run/echobot/password")
|
||||
return None
|
||||
|
||||
return dict(
|
||||
home=f"/home/vmail/mail/{config.mail_domain}/echo@{config.mail_domain}",
|
||||
uid="vmail",
|
||||
gid="vmail",
|
||||
password=encrypt_password(password),
|
||||
)
|
||||
|
||||
with db.write_transaction() as conn:
|
||||
userdata = conn.get_user(user)
|
||||
if userdata:
|
||||
# Update last login time.
|
||||
conn.execute(
|
||||
"UPDATE users SET last_login=? WHERE addr=?", (int(time.time()), user)
|
||||
)
|
||||
|
||||
userdata["home"] = f"/home/vmail/mail/{config.mail_domain}/{user}"
|
||||
userdata["uid"] = "vmail"
|
||||
userdata["gid"] = "vmail"
|
||||
return userdata
|
||||
if not is_allowed_to_create(config, user, cleartext_password):
|
||||
return
|
||||
|
||||
encrypted_password = encrypt_password(cleartext_password)
|
||||
q = """INSERT INTO users (addr, password, last_login)
|
||||
VALUES (?, ?, ?)"""
|
||||
conn.execute(q, (user, encrypted_password, int(time.time())))
|
||||
return dict(
|
||||
home=f"/home/vmail/mail/{config.mail_domain}/{user}",
|
||||
uid="vmail",
|
||||
gid="vmail",
|
||||
password=encrypted_password,
|
||||
)
|
||||
|
||||
|
||||
def split_and_unescape(s):
|
||||
"""Split strings using double quote as a separator and backslash as escape character
|
||||
into parts."""
|
||||
@@ -154,15 +81,12 @@ def split_and_unescape(s):
|
||||
yield out
|
||||
|
||||
|
||||
def handle_dovecot_request(msg, db, config: Config):
|
||||
# see https://doc.dovecot.org/3.0/developer_manual/design/dict_protocol/
|
||||
short_command = msg[0]
|
||||
if short_command == "H": # HELLO
|
||||
# we don't do any checking on versions and just return
|
||||
return
|
||||
elif short_command == "L": # LOOKUP
|
||||
parts = msg[1:].split("\t")
|
||||
class AuthDictProxy(DictProxy):
|
||||
def __init__(self, config):
|
||||
super().__init__()
|
||||
self.config = config
|
||||
|
||||
def handle_lookup(self, parts):
|
||||
# Dovecot <2.3.17 has only one part,
|
||||
# do not attempt to read any other parts for compatibility.
|
||||
keyname = parts[0]
|
||||
@@ -170,13 +94,14 @@ def handle_dovecot_request(msg, db, config: Config):
|
||||
namespace, type, args = keyname.split("/", 2)
|
||||
args = list(split_and_unescape(args))
|
||||
|
||||
config = self.config
|
||||
reply_command = "F"
|
||||
res = ""
|
||||
if namespace == "shared":
|
||||
if type == "userdb":
|
||||
user = args[0]
|
||||
if user.endswith(f"@{config.mail_domain}"):
|
||||
res = lookup_userdb(db, config, user)
|
||||
res = self.lookup_userdb(user)
|
||||
if res:
|
||||
reply_command = "O"
|
||||
else:
|
||||
@@ -184,55 +109,48 @@ def handle_dovecot_request(msg, db, config: Config):
|
||||
elif type == "passdb":
|
||||
user = args[1]
|
||||
if user.endswith(f"@{config.mail_domain}"):
|
||||
res = lookup_passdb(db, config, user, cleartext_password=args[0])
|
||||
res = self.lookup_passdb(user, cleartext_password=args[0])
|
||||
if res:
|
||||
reply_command = "O"
|
||||
else:
|
||||
reply_command = "N"
|
||||
json_res = json.dumps(res) if res else ""
|
||||
return f"{reply_command}{json_res}\n"
|
||||
raise UnknownCommand(msg)
|
||||
|
||||
def handle_iterate(self, parts):
|
||||
# example: I0\t0\tshared/userdb/
|
||||
if parts[2] == "shared/userdb/":
|
||||
result = "".join(
|
||||
f"Oshared/userdb/{user}\t\n" for user in self.iter_userdb()
|
||||
)
|
||||
return f"{result}\n"
|
||||
|
||||
def handle_dovecot_protocol(rfile, wfile, db: Database, config: Config):
|
||||
while True:
|
||||
msg = rfile.readline().strip().decode()
|
||||
if not msg:
|
||||
break
|
||||
try:
|
||||
res = handle_dovecot_request(msg, db, config)
|
||||
except UnknownCommand:
|
||||
logging.warning("unknown command: %r", msg)
|
||||
else:
|
||||
if res:
|
||||
wfile.write(res.encode("ascii"))
|
||||
wfile.flush()
|
||||
def iter_userdb(self) -> list:
|
||||
"""Get a list of all user addresses."""
|
||||
return [x for x in os.listdir(self.config.mailboxes_dir) if "@" in x]
|
||||
|
||||
def lookup_userdb(self, addr):
|
||||
return self.config.get_user(addr).get_userdb_dict()
|
||||
|
||||
class ThreadedUnixStreamServer(ThreadingMixIn, UnixStreamServer):
|
||||
request_queue_size = 100
|
||||
def lookup_passdb(self, addr, cleartext_password):
|
||||
user = self.config.get_user(addr)
|
||||
userdata = user.get_userdb_dict()
|
||||
if userdata:
|
||||
return userdata
|
||||
if not is_allowed_to_create(self.config, addr, cleartext_password):
|
||||
return
|
||||
|
||||
user.set_password(encrypt_password(cleartext_password))
|
||||
print(f"Created address: {addr}", file=sys.stderr)
|
||||
return user.get_userdb_dict()
|
||||
|
||||
|
||||
def main():
|
||||
socket = sys.argv[1]
|
||||
db = Database(sys.argv[2])
|
||||
config = read_config(sys.argv[3])
|
||||
socket, cfgpath = sys.argv[1:]
|
||||
config = read_config(cfgpath)
|
||||
|
||||
class Handler(StreamRequestHandler):
|
||||
def handle(self):
|
||||
try:
|
||||
handle_dovecot_protocol(self.rfile, self.wfile, db, config)
|
||||
except Exception:
|
||||
logging.exception("Exception in the handler")
|
||||
raise
|
||||
migrate_from_db_to_maildir(config)
|
||||
|
||||
try:
|
||||
os.unlink(socket)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
dictproxy = AuthDictProxy(config=config)
|
||||
|
||||
with ThreadedUnixStreamServer(socket, Handler) as server:
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
dictproxy.serve_forever_from_socket(socket)
|
||||
|
||||
@@ -8,11 +8,11 @@ import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from deltachat_rpc_client import Bot, DeltaChat, EventType, Rpc, events
|
||||
|
||||
from chatmaild.config import read_config
|
||||
from chatmaild.config import echobot_password_path, read_config
|
||||
from chatmaild.doveauth import encrypt_password
|
||||
from chatmaild.newemail import create_newemail_dict
|
||||
|
||||
hooks = events.HookCollection()
|
||||
@@ -21,9 +21,9 @@ hooks = events.HookCollection()
|
||||
@hooks.on(events.RawEvent)
|
||||
def log_event(event):
|
||||
if event.kind == EventType.INFO:
|
||||
logging.info("%s", event.msg)
|
||||
logging.info(event.msg)
|
||||
elif event.kind == EventType.WARNING:
|
||||
logging.warning("%s", event.msg)
|
||||
logging.warning(event.msg)
|
||||
|
||||
|
||||
@hooks.on(events.RawEvent(EventType.ERROR))
|
||||
@@ -45,7 +45,7 @@ def on_group_image_changed(event):
|
||||
|
||||
@hooks.on(events.GroupNameChanged)
|
||||
def on_group_name_changed(event):
|
||||
logging.info("group name changed, old name: %s", event.old_name)
|
||||
logging.info(f"group name changed, old name: {event.old_name}")
|
||||
|
||||
|
||||
@hooks.on(events.NewMessage(func=lambda e: not e.command))
|
||||
@@ -72,7 +72,7 @@ def main():
|
||||
with Rpc() as rpc:
|
||||
deltachat = DeltaChat(rpc)
|
||||
system_info = deltachat.get_system_info()
|
||||
logging.info("Running deltachat core %s", system_info.deltachat_core_version)
|
||||
logging.info(f"Running deltachat core {system_info.deltachat_core_version}")
|
||||
|
||||
accounts = deltachat.get_all_accounts()
|
||||
account = accounts[0] if accounts else deltachat.add_account()
|
||||
@@ -80,23 +80,23 @@ def main():
|
||||
bot = Bot(account, hooks)
|
||||
|
||||
config = read_config(sys.argv[1])
|
||||
addr = "echo@" + config.mail_domain
|
||||
|
||||
# Create password file
|
||||
if bot.is_configured():
|
||||
password = bot.account.get_config("mail_pw")
|
||||
else:
|
||||
password = create_newemail_dict(config)["password"]
|
||||
Path("/run/echobot/password").write_text(password)
|
||||
|
||||
echobot_password_path.write_text(encrypt_password(password))
|
||||
# Give the user which doveauth runs as access to the password file.
|
||||
subprocess.run(
|
||||
["/usr/bin/setfacl", "-m", "user:vmail:r", "/run/echobot/password"],
|
||||
check=True,
|
||||
subprocess.check_call(
|
||||
["/usr/bin/setfacl", "-m", "user:vmail:r", echobot_password_path],
|
||||
)
|
||||
|
||||
if not bot.is_configured():
|
||||
email = "echo@" + config.mail_domain
|
||||
bot.configure(email, password)
|
||||
bot.configure(addr, password)
|
||||
|
||||
bot.run_forever()
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
import logging
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from random import randint
|
||||
|
||||
import filelock
|
||||
|
||||
@@ -32,5 +33,12 @@ class FileDict:
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except Exception:
|
||||
logging.warning("corrupt serialization state at: %r", self.path)
|
||||
logging.warning(f"corrupt serialization state at: {self.path!r}")
|
||||
return {}
|
||||
|
||||
|
||||
def write_bytes_atomic(path, content):
|
||||
rint = randint(0, 10000000)
|
||||
tmp = path.with_name(path.name + f".tmp-{rint}")
|
||||
tmp.write_bytes(content)
|
||||
os.rename(tmp, path)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
import base64
|
||||
import binascii
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
@@ -10,59 +12,136 @@ from smtplib import SMTP as SMTPClient
|
||||
|
||||
from aiosmtpd.controller import Controller
|
||||
|
||||
from .common_encrypted_subjects import common_encrypted_subjects
|
||||
from .config import read_config
|
||||
|
||||
|
||||
def check_openpgp_payload(payload: bytes):
|
||||
"""Checks the OpenPGP payload.
|
||||
|
||||
OpenPGP payload must consist only of PKESK and SKESK packets
|
||||
terminated by a single SEIPD packet.
|
||||
|
||||
Returns True if OpenPGP payload is correct,
|
||||
False otherwise.
|
||||
|
||||
May raise IndexError while trying to read OpenPGP packet header
|
||||
if it is truncated.
|
||||
"""
|
||||
i = 0
|
||||
while i < len(payload):
|
||||
# Only OpenPGP format is allowed.
|
||||
if payload[i] & 0xC0 != 0xC0:
|
||||
return False
|
||||
|
||||
packet_type_id = payload[i] & 0x3F
|
||||
i += 1
|
||||
if payload[i] < 192:
|
||||
# One-octet length.
|
||||
body_len = payload[i]
|
||||
i += 1
|
||||
elif payload[i] < 224:
|
||||
# Two-octet length.
|
||||
body_len = ((payload[i] - 192) << 8) + payload[i + 1] + 192
|
||||
i += 2
|
||||
elif payload[i] == 255:
|
||||
# Five-octet length.
|
||||
body_len = (
|
||||
(payload[i + 1] << 24)
|
||||
| (payload[i + 2] << 16)
|
||||
| (payload[i + 3] << 8)
|
||||
| payload[i + 4]
|
||||
)
|
||||
i += 5
|
||||
else:
|
||||
# Partial body length is not allowed.
|
||||
return False
|
||||
|
||||
i += body_len
|
||||
|
||||
if i == len(payload):
|
||||
if packet_type_id == 18:
|
||||
# Last packet should be
|
||||
# Symmetrically Encrypted and Integrity Protected Data Packet (SEIPD)
|
||||
return True
|
||||
elif packet_type_id not in [1, 3]:
|
||||
# All packets except the last one must be either
|
||||
# Public-Key Encrypted Session Key Packet (PKESK)
|
||||
# or
|
||||
# Symmetric-Key Encrypted Session Key Packet (SKESK)
|
||||
return False
|
||||
|
||||
if i == 0:
|
||||
return False
|
||||
|
||||
if i > len(payload):
|
||||
# Payload is truncated.
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def check_armored_payload(payload: str):
|
||||
prefix = "-----BEGIN PGP MESSAGE-----\r\n\r\n"
|
||||
if not payload.startswith(prefix):
|
||||
return False
|
||||
payload = payload.removeprefix(prefix)
|
||||
|
||||
suffix = "-----END PGP MESSAGE-----\r\n\r\n"
|
||||
if not payload.endswith(suffix):
|
||||
return False
|
||||
payload = payload.removesuffix(suffix)
|
||||
|
||||
# Remove CRC24.
|
||||
payload = payload.rpartition("=")[0]
|
||||
|
||||
try:
|
||||
payload = base64.b64decode(payload)
|
||||
except binascii.Error:
|
||||
return False
|
||||
|
||||
try:
|
||||
return check_openpgp_payload(payload)
|
||||
except IndexError:
|
||||
return False
|
||||
|
||||
|
||||
def check_encrypted(message):
|
||||
"""Check that the message is an OpenPGP-encrypted message."""
|
||||
"""Check that the message is an OpenPGP-encrypted message.
|
||||
|
||||
MIME structure of the message must correspond to <https://www.rfc-editor.org/rfc/rfc3156>.
|
||||
"""
|
||||
if not message.is_multipart():
|
||||
return False
|
||||
if message.get("subject") != "...":
|
||||
if message.get("subject") not in common_encrypted_subjects:
|
||||
return False
|
||||
if message.get_content_type() != "multipart/encrypted":
|
||||
return False
|
||||
parts_count = 0
|
||||
for part in message.iter_parts():
|
||||
# We explicitly check Content-Type of each part later,
|
||||
# but this is to be absolutely sure `get_payload()` returns string and not list.
|
||||
if part.is_multipart():
|
||||
return False
|
||||
|
||||
if parts_count == 0:
|
||||
if part.get_content_type() != "application/pgp-encrypted":
|
||||
return False
|
||||
|
||||
payload = part.get_payload()
|
||||
if payload.strip() != "Version: 1":
|
||||
return False
|
||||
elif parts_count == 1:
|
||||
if part.get_content_type() != "application/octet-stream":
|
||||
return False
|
||||
|
||||
if not check_armored_payload(part.get_payload()):
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
parts_count += 1
|
||||
return True
|
||||
|
||||
|
||||
def check_mdn(message, envelope):
|
||||
if len(envelope.rcpt_tos) != 1:
|
||||
return False
|
||||
|
||||
for name in ["auto-submitted", "chat-version"]:
|
||||
if not message.get(name):
|
||||
return False
|
||||
|
||||
if message.get_content_type() != "multipart/report":
|
||||
return False
|
||||
|
||||
body = message.get_body()
|
||||
if body.get_content_type() != "text/plain":
|
||||
return False
|
||||
|
||||
if list(body.iter_attachments()) or list(body.iter_parts()):
|
||||
return False
|
||||
|
||||
# even with all mime-structural checks an attacker
|
||||
# could try to abuse the subject or body to contain links or other
|
||||
# annoyance -- we skip on checking subject/body for now as Delta Chat
|
||||
# should evolve to create E2E-encrypted read receipts anyway.
|
||||
# and then MDNs are just encrypted mail and can pass the border
|
||||
# to other instances.
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def asyncmain_beforequeue(config):
|
||||
port = config.filtermail_smtp_port
|
||||
Controller(BeforeQueueHandler(config), hostname="127.0.0.1", port=port).start()
|
||||
@@ -108,9 +187,6 @@ class BeforeQueueHandler:
|
||||
if envelope.mail_from.lower() != from_addr.lower():
|
||||
return f"500 Invalid FROM <{from_addr!r}> for <{envelope.mail_from!r}>"
|
||||
|
||||
if not mail_encrypted and check_mdn(message, envelope):
|
||||
return
|
||||
|
||||
if envelope.mail_from in self.config.passthrough_senders:
|
||||
return
|
||||
|
||||
|
||||
@@ -8,18 +8,24 @@ mail_domain = {mail_domain}
|
||||
#
|
||||
|
||||
#
|
||||
# Account Restrictions
|
||||
# Restrictions on user addresses
|
||||
#
|
||||
|
||||
# how many mails a user can send out per minute
|
||||
max_user_send_per_minute = 60
|
||||
|
||||
# maximum mailbox size of a chatmail account
|
||||
# maximum mailbox size of a chatmail address
|
||||
max_mailbox_size = 100M
|
||||
|
||||
# maximum message size for an e-mail in bytes
|
||||
max_message_size = 31457280
|
||||
|
||||
# days after which mails are unconditionally deleted
|
||||
delete_mails_after = 20
|
||||
|
||||
# days after which users without a successful login are deleted (database and mails)
|
||||
delete_inactive_users_after = 90
|
||||
|
||||
# minimum length a username must have
|
||||
username_min_length = 9
|
||||
|
||||
@@ -29,11 +35,12 @@ username_max_length = 9
|
||||
# minimum length a password must have
|
||||
password_min_length = 9
|
||||
|
||||
# list of chatmail accounts which can send outbound un-encrypted mail
|
||||
# list of chatmail addresses which can send outbound un-encrypted mail
|
||||
passthrough_senders =
|
||||
|
||||
# list of e-mail recipients for which to accept outbound un-encrypted mails
|
||||
passthrough_recipients = xstore@testrun.org groupsbot@hispanilandia.net
|
||||
# (space-separated)
|
||||
passthrough_recipients = xstore@testrun.org
|
||||
|
||||
#
|
||||
# Deployment Details
|
||||
@@ -45,6 +52,20 @@ filtermail_smtp_port = 10080
|
||||
# postfix accepts on the localhost reinject SMTP port
|
||||
postfix_reinject_port = 10025
|
||||
|
||||
# if set to "True" IPv6 is disabled
|
||||
disable_ipv6 = False
|
||||
|
||||
#
|
||||
# Debugging options
|
||||
#
|
||||
|
||||
# set to True if you want to track imap protocol execution
|
||||
# in per-maildir ".in/.out" files.
|
||||
# Note that you need to manually cleanup these files
|
||||
# so use this option with caution on production servers.
|
||||
imap_rawlog = false
|
||||
|
||||
|
||||
#
|
||||
# Privacy Policy
|
||||
#
|
||||
@@ -60,4 +81,3 @@ privacy_pdo =
|
||||
|
||||
# postal address of the privacy supervisor
|
||||
privacy_supervisor =
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
|
||||
[privacy]
|
||||
|
||||
passthrough_recipients = privacy@testrun.org xstore@testrun.org groupsbot@hispanilandia.net
|
||||
passthrough_recipients = privacy@testrun.org xstore@testrun.org
|
||||
|
||||
privacy_postal =
|
||||
Merlinux GmbH, Represented by the managing director H. Krekel,
|
||||
|
||||
31
chatmaild/src/chatmaild/lastlogin.py
Normal file
31
chatmaild/src/chatmaild/lastlogin.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import sys
|
||||
|
||||
from .config import read_config
|
||||
from .dictproxy import DictProxy
|
||||
|
||||
|
||||
class LastLoginDictProxy(DictProxy):
|
||||
def __init__(self, config):
|
||||
super().__init__()
|
||||
self.config = config
|
||||
|
||||
def handle_set(self, addr, parts):
|
||||
keyname = parts[1].split("/")
|
||||
value = parts[2] if len(parts) > 2 else ""
|
||||
if keyname[0] == "shared" and keyname[1] == "last-login":
|
||||
if addr.startswith("echo@"):
|
||||
return True
|
||||
addr = keyname[2]
|
||||
timestamp = int(value)
|
||||
user = self.config.get_user(addr)
|
||||
user.set_last_login_timestamp(timestamp)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
socket, config_path = sys.argv[1:]
|
||||
config = read_config(config_path)
|
||||
dictproxy = LastLoginDictProxy(config=config)
|
||||
dictproxy.serve_forever_from_socket(socket)
|
||||
@@ -1,25 +1,11 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from socketserver import (
|
||||
StreamRequestHandler,
|
||||
ThreadingMixIn,
|
||||
UnixStreamServer,
|
||||
)
|
||||
|
||||
from .config import read_config
|
||||
from .dictproxy import DictProxy
|
||||
from .filedict import FileDict
|
||||
from .notifier import Notifier
|
||||
|
||||
DICTPROXY_HELLO_CHAR = "H"
|
||||
DICTPROXY_LOOKUP_CHAR = "L"
|
||||
DICTPROXY_ITERATE_CHAR = "I"
|
||||
DICTPROXY_BEGIN_TRANSACTION_CHAR = "B"
|
||||
DICTPROXY_SET_CHAR = "S"
|
||||
DICTPROXY_COMMIT_TRANSACTION_CHAR = "C"
|
||||
DICTPROXY_TRANSACTION_CHARS = "BSC"
|
||||
|
||||
|
||||
class Metadata:
|
||||
# each SETMETADATA on this key appends to a list of unique device tokens
|
||||
@@ -49,91 +35,55 @@ class Metadata:
|
||||
return mdict.get(self.DEVICETOKEN_KEY, [])
|
||||
|
||||
|
||||
def handle_dovecot_protocol(rfile, wfile, notifier, metadata, iroh_relay=None):
|
||||
transactions = {}
|
||||
while True:
|
||||
msg = rfile.readline().strip().decode()
|
||||
if not msg:
|
||||
break
|
||||
class MetadataDictProxy(DictProxy):
|
||||
def __init__(self, notifier, metadata, iroh_relay=None):
|
||||
super().__init__()
|
||||
self.notifier = notifier
|
||||
self.metadata = metadata
|
||||
self.iroh_relay = iroh_relay
|
||||
|
||||
res = handle_dovecot_request(msg, transactions, notifier, metadata, iroh_relay)
|
||||
if res:
|
||||
wfile.write(res.encode("ascii"))
|
||||
wfile.flush()
|
||||
|
||||
|
||||
def handle_dovecot_request(msg, transactions, notifier, metadata, iroh_relay=None):
|
||||
# see https://doc.dovecot.org/3.0/developer_manual/design/dict_protocol/
|
||||
short_command = msg[0]
|
||||
parts = msg[1:].split("\t")
|
||||
if short_command == DICTPROXY_LOOKUP_CHAR:
|
||||
def handle_lookup(self, parts):
|
||||
# Lpriv/43f5f508a7ea0366dff30200c15250e3/devicetoken\tlkj123poi@c2.testrun.org
|
||||
keyparts = parts[0].split("/", 2)
|
||||
if keyparts[0] == "priv":
|
||||
keyname = keyparts[2]
|
||||
addr = parts[1]
|
||||
if keyname == metadata.DEVICETOKEN_KEY:
|
||||
res = " ".join(metadata.get_tokens_for_addr(addr))
|
||||
if keyname == self.metadata.DEVICETOKEN_KEY:
|
||||
res = " ".join(self.metadata.get_tokens_for_addr(addr))
|
||||
return f"O{res}\n"
|
||||
elif keyparts[0] == "shared":
|
||||
keyname = keyparts[2]
|
||||
if (
|
||||
keyname == "vendor/vendor.dovecot/pvt/server/vendor/deltachat/irohrelay"
|
||||
and iroh_relay
|
||||
and self.iroh_relay
|
||||
):
|
||||
# Handle `GETMETADATA "" /shared/vendor/deltachat/irohrelay`
|
||||
return f"O{iroh_relay}\n"
|
||||
logging.warning("lookup ignored: %r", msg)
|
||||
return f"O{self.iroh_relay}\n"
|
||||
logging.warning(f"lookup ignored: {parts!r}")
|
||||
return "N\n"
|
||||
elif short_command == DICTPROXY_ITERATE_CHAR:
|
||||
# Empty line means ITER_FINISHED.
|
||||
# If we don't return empty line Dovecot will timeout.
|
||||
return "\n"
|
||||
elif short_command == DICTPROXY_HELLO_CHAR:
|
||||
return # no version checking
|
||||
|
||||
if short_command not in (DICTPROXY_TRANSACTION_CHARS):
|
||||
logging.warning("unknown dictproxy request: %r", msg)
|
||||
return
|
||||
|
||||
transaction_id = parts[0]
|
||||
|
||||
if short_command == DICTPROXY_BEGIN_TRANSACTION_CHAR:
|
||||
addr = parts[1]
|
||||
transactions[transaction_id] = dict(addr=addr, res="O\n")
|
||||
elif short_command == DICTPROXY_COMMIT_TRANSACTION_CHAR:
|
||||
# each set devicetoken operation persists directly
|
||||
# and does not wait until a "commit" comes
|
||||
# because our dovecot config does not involve
|
||||
# multiple set-operations in a single commit
|
||||
return transactions.pop(transaction_id)["res"]
|
||||
elif short_command == DICTPROXY_SET_CHAR:
|
||||
def handle_set(self, addr, parts):
|
||||
# For documentation on key structure see
|
||||
# https://github.com/dovecot/core/blob/main/src/lib-storage/mailbox-attribute.h
|
||||
|
||||
keyname = parts[1].split("/")
|
||||
value = parts[2] if len(parts) > 2 else ""
|
||||
addr = transactions[transaction_id]["addr"]
|
||||
if keyname[0] == "priv" and keyname[2] == metadata.DEVICETOKEN_KEY:
|
||||
metadata.add_token_to_addr(addr, value)
|
||||
if keyname[0] == "priv" and keyname[2] == self.metadata.DEVICETOKEN_KEY:
|
||||
self.metadata.add_token_to_addr(addr, value)
|
||||
return True
|
||||
elif keyname[0] == "priv" and keyname[2] == "messagenew":
|
||||
notifier.new_message_for_addr(addr, metadata)
|
||||
else:
|
||||
# Transaction failed.
|
||||
transactions[transaction_id]["res"] = "F\n"
|
||||
self.notifier.new_message_for_addr(addr, self.metadata)
|
||||
return True
|
||||
|
||||
|
||||
class ThreadedUnixStreamServer(ThreadingMixIn, UnixStreamServer):
|
||||
request_queue_size = 100
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
socket, vmail_dir, config_path = sys.argv[1:]
|
||||
socket, config_path = sys.argv[1:]
|
||||
|
||||
config = read_config(config_path)
|
||||
iroh_relay = config.iroh_relay
|
||||
|
||||
vmail_dir = Path(vmail_dir)
|
||||
vmail_dir = config.mailboxes_dir
|
||||
if not vmail_dir.exists():
|
||||
logging.error("vmail dir does not exist: %r", vmail_dir)
|
||||
return 1
|
||||
@@ -144,23 +94,8 @@ def main():
|
||||
notifier = Notifier(queue_dir)
|
||||
notifier.start_notification_threads(metadata.remove_token_from_addr)
|
||||
|
||||
class Handler(StreamRequestHandler):
|
||||
def handle(self):
|
||||
try:
|
||||
handle_dovecot_protocol(
|
||||
self.rfile, self.wfile, notifier, metadata, iroh_relay
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Exception in the dovecot dictproxy handler")
|
||||
raise
|
||||
dictproxy = MetadataDictProxy(
|
||||
notifier=notifier, metadata=metadata, iroh_relay=iroh_relay
|
||||
)
|
||||
|
||||
try:
|
||||
os.unlink(socket)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
with ThreadedUnixStreamServer(socket, Handler) as server:
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
dictproxy.serve_forever_from_socket(socket)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@@ -16,9 +15,15 @@ def main(vmail_dir=None):
|
||||
if path.name[:3] in ("ci-", "ac_"):
|
||||
ci_accounts += 1
|
||||
|
||||
timestamp = int(time.time() * 1000)
|
||||
print(f"accounts {accounts} {timestamp}")
|
||||
print(f"ci_accounts {ci_accounts} {timestamp}")
|
||||
print("# HELP total number of accounts")
|
||||
print("# TYPE accounts gauge")
|
||||
print(f"accounts {accounts}")
|
||||
print("# HELP number of CI accounts")
|
||||
print("# TYPE ci_accounts gauge")
|
||||
print(f"ci_accounts {ci_accounts}")
|
||||
print("# HELP number of non-CI accounts")
|
||||
print("# TYPE nonci_accounts gauge")
|
||||
print(f"nonci_accounts {accounts - ci_accounts}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
63
chatmaild/src/chatmaild/migrate_db.py
Normal file
63
chatmaild/src/chatmaild/migrate_db.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""
|
||||
migration code from old sqlite databases into per-maildir "password" files
|
||||
where mtime reflects and is updated to be the "last-login" time.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
from chatmaild.config import read_config
|
||||
|
||||
|
||||
def get_all_rows(path):
|
||||
assert path.exists()
|
||||
uri = f"file:{path}?mode=ro"
|
||||
sqlconn = sqlite3.connect(uri, timeout=60, isolation_level="DEFERRED", uri=True)
|
||||
cur = sqlconn.cursor()
|
||||
cur.execute("SELECT * from users")
|
||||
rows = cur.fetchall()
|
||||
sqlconn.close()
|
||||
return rows
|
||||
|
||||
|
||||
def migrate_from_db_to_maildir(config, chunking=10000):
|
||||
path = config.passdb_path
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
all_rows = get_all_rows(path)
|
||||
|
||||
# don't transfer special/CI accounts
|
||||
rows = [row for row in all_rows if row[0][:3] not in ("ci-", "ac_")]
|
||||
|
||||
logging.info(f"ignoring {len(all_rows)-len(rows)} CI accounts")
|
||||
logging.info(f"migrating {len(rows)} sqlite database passwords to user dirs")
|
||||
|
||||
for i, row in enumerate(rows):
|
||||
addr = row[0]
|
||||
enc_password = row[1]
|
||||
user = config.get_user(addr)
|
||||
user.set_password(enc_password)
|
||||
|
||||
if len(row) == 3 and row[2]:
|
||||
timestamp = int(row[2])
|
||||
user.set_last_login_timestamp(timestamp)
|
||||
|
||||
if i > 0 and i % chunking == 0:
|
||||
logging.info(f"migration-progress: {i} passwords transferred")
|
||||
|
||||
logging.info("migration: all passwords migrated")
|
||||
oldpath = config.passdb_path.with_suffix(config.passdb_path.suffix + ".old")
|
||||
os.rename(config.passdb_path, oldpath)
|
||||
for path in config.passdb_path.parent.iterdir():
|
||||
if path.name.startswith(config.passdb_path.name + "-"):
|
||||
path.unlink()
|
||||
logging.info(f"migration: moved database to {oldpath!r}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
config = read_config(sys.argv[1])
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
migrate_from_db_to_maildir(config)
|
||||
@@ -92,7 +92,7 @@ class Notifier:
|
||||
def requeue_persistent_queue_items(self):
|
||||
for queue_path in self.queue_dir.iterdir():
|
||||
if queue_path.name.endswith(".tmp"):
|
||||
logging.warning("removing spurious queue item: %r", queue_path)
|
||||
logging.warning(f"removing spurious queue item: {queue_path!r}")
|
||||
queue_path.unlink()
|
||||
continue
|
||||
queue_item = PersistentQueueItem.read_from_path(queue_path)
|
||||
@@ -104,7 +104,7 @@ class Notifier:
|
||||
deadline = queue_item.start_ts + self.DROP_DEADLINE
|
||||
if retry_num >= len(self.retry_queues) or when > deadline:
|
||||
queue_item.delete()
|
||||
logging.error("notification exceeded deadline: %r", queue_item.token)
|
||||
logging.error(f"notification exceeded deadline: {queue_item.token!r}")
|
||||
return
|
||||
|
||||
self.retry_queues[retry_num].put((when, queue_item))
|
||||
@@ -162,5 +162,5 @@ class NotifyThread(Thread):
|
||||
queue_item.delete()
|
||||
return
|
||||
|
||||
logging.warning("Notification request failed: %r", res)
|
||||
logging.warning(f"Notification request failed: {res!r}")
|
||||
self.notifier.queue_for_retry(queue_item, retry_num=self.retry_num + 1)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
From: {from_addr}
|
||||
To: {to_addr}
|
||||
Subject: ...
|
||||
Subject: {subject}
|
||||
Date: Sun, 15 Oct 2023 16:43:21 +0000
|
||||
Message-ID: <Mr.UVyJWZmkCKM.hGzNc6glBE_@c2.testrun.org>
|
||||
In-Reply-To: <Mr.MvmCz-GQbi_.6FGRkhDf05c@c2.testrun.org>
|
||||
|
||||
44
chatmaild/src/chatmaild/tests/mail-data/literal.eml
Normal file
44
chatmaild/src/chatmaild/tests/mail-data/literal.eml
Normal file
@@ -0,0 +1,44 @@
|
||||
From: {from_addr}
|
||||
|
||||
To: {to_addr}
|
||||
|
||||
Subject: ...
|
||||
|
||||
Date: Sun, 15 Oct 2023 16:43:21 +0000
|
||||
|
||||
Message-ID: <Mr.UVyJWZmkCKM.hGzNc6glBE_@c2.testrun.org>
|
||||
|
||||
In-Reply-To: <Mr.MvmCz-GQbi_.6FGRkhDf05c@c2.testrun.org>
|
||||
|
||||
References: <Mr.3gckbNy5bch.uK3Hd2Ws6-w@c2.testrun.org>
|
||||
|
||||
<Mr.MvmCz-GQbi_.6FGRkhDf05c@c2.testrun.org>
|
||||
|
||||
Chat-Version: 1.0
|
||||
|
||||
Autocrypt: addr={from_addr}; prefer-encrypt=mutual;
|
||||
|
||||
keydata=xjMEZSwWjhYJKwYBBAHaRw8BAQdAQBEhqeJh0GueHB6kF/DUQqYCxARNBVokg/AzT+7LqH
|
||||
|
||||
rNFzxiYXJiYXpAYzIudGVzdHJ1bi5vcmc+wosEEBYIADMCGQEFAmUsFo4CGwMECwkIBwYVCAkKCwID
|
||||
|
||||
FgIBFiEEFTfUNvVnY3b9F7yHnmme1PfUhX8ACgkQnmme1PfUhX9A4AEAnHWHp49eBCMHK5t66gYPiW
|
||||
|
||||
XQuB1mwUjzGfYWB+0RXUoA/0xcQ3FbUNlGKW7Blp6eMFfViv6Mv2d3kNSXACB6nmcMzjgEZSwWjhIK
|
||||
|
||||
KwYBBAGXVQEFAQEHQBpY5L2M1XHo0uxf8SX1wNLBp/OVvidoWHQF2Jz+kJsUAwEIB8J4BBgWCAAgBQ
|
||||
|
||||
JlLBaOAhsMFiEEFTfUNvVnY3b9F7yHnmme1PfUhX8ACgkQnmme1PfUhX/INgEA37AJaNvruYsJVanP
|
||||
|
||||
IXnYw4CKd55UAwl8Zcy+M2diAbkA/0fHHcGV4r78hpbbL1Os52DPOdqYQRauIeJUeG+G6bQO
|
||||
|
||||
MIME-Version: 1.0
|
||||
|
||||
Content-Type: multipart/encrypted; protocol="application/pgp-encrypted";
|
||||
|
||||
boundary="YFrteb74qSXmggbOxZL9dRnhymywAi"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -7,16 +7,20 @@ from email.parser import BytesParser
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from chatmaild.config import read_config, write_initial_config
|
||||
from chatmaild.database import Database
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def make_config(tmp_path):
|
||||
inipath = tmp_path.joinpath("chatmail.ini")
|
||||
|
||||
def make_conf(mail_domain):
|
||||
write_initial_config(inipath, mail_domain=mail_domain)
|
||||
def make_conf(mail_domain, settings=None):
|
||||
basedir = tmp_path.joinpath(f"vmail/{mail_domain}")
|
||||
basedir.mkdir(parents=True, exist_ok=True)
|
||||
overrides = settings.copy() if settings else {}
|
||||
overrides["mailboxes_dir"] = str(basedir)
|
||||
write_initial_config(inipath, mail_domain, overrides=overrides)
|
||||
return read_config(inipath)
|
||||
|
||||
return make_conf
|
||||
@@ -32,6 +36,11 @@ def maildomain(example_config):
|
||||
return example_config.mail_domain
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def testaddr(maildomain):
|
||||
return f"user.name@{maildomain}"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def gencreds(maildomain):
|
||||
count = itertools.count()
|
||||
@@ -50,13 +59,6 @@ def gencreds(maildomain):
|
||||
return lambda domain=None: next(gen(domain))
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def db(tmpdir):
|
||||
db_path = tmpdir / "passdb.sqlite"
|
||||
print("database path:", db_path)
|
||||
return Database(db_path)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def maildata(request):
|
||||
try:
|
||||
@@ -67,9 +69,30 @@ def maildata(request):
|
||||
|
||||
assert datadir.exists(), datadir
|
||||
|
||||
def maildata(name, from_addr, to_addr):
|
||||
data = datadir.joinpath(name).read_text()
|
||||
text = data.format(from_addr=from_addr, to_addr=to_addr)
|
||||
def maildata(name, from_addr, to_addr, subject="..."):
|
||||
# Using `.read_bytes().decode()` instead of `.read_text()` to preserve newlines.
|
||||
data = datadir.joinpath(name).read_bytes().decode()
|
||||
|
||||
text = data.format(from_addr=from_addr, to_addr=to_addr, subject=subject)
|
||||
return BytesParser(policy=policy.default).parsebytes(text.encode())
|
||||
|
||||
return maildata
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mockout():
|
||||
class MockOut:
|
||||
captured_red = []
|
||||
captured_green = []
|
||||
captured_plain = []
|
||||
|
||||
def red(self, msg):
|
||||
self.captured_red.append(msg)
|
||||
|
||||
def green(self, msg):
|
||||
self.captured_green.append(msg)
|
||||
|
||||
def __call__(self, msg):
|
||||
self.captured_plain.append(msg)
|
||||
|
||||
return MockOut()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import pytest
|
||||
|
||||
from chatmaild.config import read_config
|
||||
|
||||
|
||||
@@ -30,3 +32,35 @@ def test_read_config_testrun(make_config):
|
||||
assert config.password_min_length == 9
|
||||
assert "privacy@testrun.org" in config.passthrough_recipients
|
||||
assert config.passthrough_senders == []
|
||||
|
||||
|
||||
def test_config_userstate_paths(make_config, tmp_path):
|
||||
config = make_config("something.testrun.org")
|
||||
mailboxes_dir = config.mailboxes_dir
|
||||
passdb_path = config.passdb_path
|
||||
assert mailboxes_dir.name == "something.testrun.org"
|
||||
assert str(passdb_path) == "/home/vmail/passdb.sqlite"
|
||||
assert config.mail_domain == "something.testrun.org"
|
||||
path = config.get_user("user1@something.testrun.org").maildir
|
||||
assert not path.exists()
|
||||
assert path == mailboxes_dir.joinpath("user1@something.testrun.org")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
config.get_user("")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
config.get_user(None)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
config.get_user("../some@something.testrun.org").maildir
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
config.get_user("..").maildir
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
config.get_user(".")
|
||||
|
||||
|
||||
def test_config_max_message_size(make_config, tmp_path):
|
||||
config = make_config("something.testrun.org", dict(max_message_size="10000"))
|
||||
assert config.max_message_size == 10000
|
||||
|
||||
59
chatmaild/src/chatmaild/tests/test_delete_inactive_users.py
Normal file
59
chatmaild/src/chatmaild/tests/test_delete_inactive_users.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import time
|
||||
|
||||
from chatmaild.delete_inactive_users import delete_inactive_users
|
||||
from chatmaild.doveauth import AuthDictProxy
|
||||
|
||||
|
||||
def test_login_timestamps(example_config):
|
||||
testaddr = "someuser@chat.example.org"
|
||||
user = example_config.get_user(testaddr)
|
||||
|
||||
# password file needs to be set because it's mtime tracks last-login time
|
||||
user.set_password("1l2k3j1l2k3j123")
|
||||
for i in range(10):
|
||||
user.set_last_login_timestamp(86400 * 4 + i)
|
||||
assert user.get_last_login_timestamp() == 86400 * 4
|
||||
|
||||
|
||||
def test_delete_inactive_users(example_config):
|
||||
new = time.time()
|
||||
old = new - (example_config.delete_inactive_users_after * 86400) - 1
|
||||
dictproxy = AuthDictProxy(example_config)
|
||||
|
||||
def create_user(addr, last_login):
|
||||
dictproxy.lookup_passdb(addr, "q9mr3faue")
|
||||
user = example_config.get_user(addr)
|
||||
user.maildir.joinpath("cur").mkdir()
|
||||
user.maildir.joinpath("cur", "something").mkdir()
|
||||
user.set_last_login_timestamp(timestamp=last_login)
|
||||
|
||||
# create some stale and some new accounts
|
||||
to_remove = []
|
||||
for i in range(150):
|
||||
addr = f"oldold{i:03}@chat.example.org"
|
||||
create_user(addr, last_login=old)
|
||||
to_remove.append(addr)
|
||||
|
||||
remain = []
|
||||
for i in range(5):
|
||||
addr = f"newnew{i:03}@chat.example.org"
|
||||
create_user(addr, last_login=new)
|
||||
remain.append(addr)
|
||||
|
||||
# check pre and post-conditions for delete_inactive_users()
|
||||
|
||||
for addr in to_remove:
|
||||
assert example_config.get_user(addr).maildir.exists()
|
||||
|
||||
delete_inactive_users(example_config)
|
||||
|
||||
for p in example_config.mailboxes_dir.iterdir():
|
||||
assert not p.name.startswith("old")
|
||||
|
||||
for addr in to_remove:
|
||||
assert not example_config.get_user(addr).maildir.exists()
|
||||
|
||||
for addr in remain:
|
||||
userdir = example_config.get_user(addr).maildir
|
||||
assert userdir.exists()
|
||||
assert userdir.joinpath("password").read_text()
|
||||
@@ -4,104 +4,132 @@ import queue
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
import chatmaild.doveauth
|
||||
import pytest
|
||||
from chatmaild.database import DBError
|
||||
|
||||
import chatmaild.doveauth
|
||||
from chatmaild.doveauth import (
|
||||
get_user_data,
|
||||
handle_dovecot_protocol,
|
||||
handle_dovecot_request,
|
||||
lookup_passdb,
|
||||
AuthDictProxy,
|
||||
is_allowed_to_create,
|
||||
)
|
||||
from chatmaild.newemail import create_newemail_dict
|
||||
|
||||
|
||||
def test_basic(db, example_config):
|
||||
lookup_passdb(db, example_config, "asdf12345@chat.example.org", "q9mr3faue")
|
||||
data = get_user_data(db, example_config, "asdf12345@chat.example.org")
|
||||
@pytest.fixture
|
||||
def dictproxy(example_config):
|
||||
return AuthDictProxy(config=example_config)
|
||||
|
||||
|
||||
def test_basic(dictproxy, gencreds):
|
||||
addr, password = gencreds()
|
||||
dictproxy.lookup_passdb(addr, password)
|
||||
data = dictproxy.lookup_userdb(addr)
|
||||
assert data
|
||||
data2 = lookup_passdb(
|
||||
db, example_config, "asdf12345@chat.example.org", "q9mr3jewvadsfaue"
|
||||
)
|
||||
data2 = dictproxy.lookup_passdb(addr, password)
|
||||
assert data == data2
|
||||
|
||||
|
||||
def test_dont_overwrite_password_on_wrong_login(db, example_config):
|
||||
def test_iterate_addresses(dictproxy):
|
||||
addresses = []
|
||||
|
||||
for i in range(10):
|
||||
addresses.append(f"asdf1234{i}@chat.example.org")
|
||||
dictproxy.lookup_passdb(addresses[-1], "q9mr3faue")
|
||||
|
||||
res = dictproxy.iter_userdb()
|
||||
assert set(res) == set(addresses)
|
||||
|
||||
|
||||
def test_invalid_username_length(example_config):
|
||||
config = example_config
|
||||
config.username_min_length = 6
|
||||
config.username_max_length = 10
|
||||
password = create_newemail_dict(config)["password"]
|
||||
assert not is_allowed_to_create(config, f"a1234@{config.mail_domain}", password)
|
||||
assert is_allowed_to_create(config, f"012345@{config.mail_domain}", password)
|
||||
assert is_allowed_to_create(config, f"0123456@{config.mail_domain}", password)
|
||||
assert is_allowed_to_create(config, f"0123456789@{config.mail_domain}", password)
|
||||
assert not is_allowed_to_create(
|
||||
config, f"0123456789x@{config.mail_domain}", password
|
||||
)
|
||||
|
||||
|
||||
def test_dont_overwrite_password_on_wrong_login(dictproxy):
|
||||
"""Test that logging in with a different password doesn't create a new user"""
|
||||
res = lookup_passdb(
|
||||
db, example_config, "newuser12@chat.example.org", "kajdlkajsldk12l3kj1983"
|
||||
res = dictproxy.lookup_passdb(
|
||||
"newuser12@chat.example.org", "kajdlkajsldk12l3kj1983"
|
||||
)
|
||||
assert res["password"]
|
||||
res2 = lookup_passdb(db, example_config, "newuser12@chat.example.org", "kajdslqwe")
|
||||
res2 = dictproxy.lookup_passdb("newuser12@chat.example.org", "kajdslqwe")
|
||||
# this function always returns a password hash, which is actually compared by dovecot.
|
||||
assert res["password"] == res2["password"]
|
||||
|
||||
|
||||
def test_nocreate_file(db, monkeypatch, tmpdir, example_config):
|
||||
def test_nocreate_file(monkeypatch, tmpdir, dictproxy):
|
||||
p = tmpdir.join("nocreate")
|
||||
p.write("")
|
||||
monkeypatch.setattr(chatmaild.doveauth, "NOCREATE_FILE", str(p))
|
||||
lookup_passdb(
|
||||
db, example_config, "newuser12@chat.example.org", "zequ0Aimuchoodaechik"
|
||||
)
|
||||
assert not get_user_data(db, example_config, "newuser12@chat.example.org")
|
||||
dictproxy.lookup_passdb("newuser12@chat.example.org", "zequ0Aimuchoodaechik")
|
||||
assert not dictproxy.lookup_userdb("newuser12@chat.example.org")
|
||||
|
||||
|
||||
def test_db_version(db):
|
||||
assert db.get_schema_version() == 1
|
||||
|
||||
|
||||
def test_too_high_db_version(db):
|
||||
with db.write_transaction() as conn:
|
||||
conn.execute("PRAGMA user_version=%s;" % (999,))
|
||||
with pytest.raises(DBError):
|
||||
db.ensure_tables()
|
||||
|
||||
|
||||
def test_handle_dovecot_request(db, example_config):
|
||||
def test_handle_dovecot_request(dictproxy):
|
||||
transactions = {}
|
||||
# Test that password can contain ", ', \ and /
|
||||
msg = (
|
||||
'Lshared/passdb/laksjdlaksjdlak\\\\sjdlk\\"12j\\\'3l1/k2j3123"'
|
||||
"some42123@chat.example.org\tsome42123@chat.example.org"
|
||||
)
|
||||
res = handle_dovecot_request(msg, db, example_config)
|
||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||
assert res
|
||||
assert res[0] == "O" and res.endswith("\n")
|
||||
userdata = json.loads(res[1:].strip())
|
||||
assert (
|
||||
userdata["home"]
|
||||
== "/home/vmail/mail/chat.example.org/some42123@chat.example.org"
|
||||
)
|
||||
assert userdata["home"].endswith("chat.example.org/some42123@chat.example.org")
|
||||
assert userdata["uid"] == userdata["gid"] == "vmail"
|
||||
assert userdata["password"].startswith("{SHA512-CRYPT}")
|
||||
|
||||
|
||||
def test_handle_dovecot_protocol_hello_is_skipped(db, example_config, caplog):
|
||||
def test_handle_dovecot_protocol_hello_is_skipped(example_config, caplog):
|
||||
dictproxy = AuthDictProxy(config=example_config)
|
||||
rfile = io.BytesIO(b"H3\t2\t0\t\tauth\n")
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, db, example_config)
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert wfile.getvalue() == b""
|
||||
assert not caplog.messages
|
||||
|
||||
|
||||
def test_handle_dovecot_protocol(db, example_config):
|
||||
def test_handle_dovecot_protocol_user_not_exists(example_config):
|
||||
dictproxy = AuthDictProxy(config=example_config)
|
||||
rfile = io.BytesIO(
|
||||
b"H3\t2\t0\t\tauth\nLshared/userdb/foobar@chat.example.org\tfoobar@chat.example.org\n"
|
||||
)
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, db, example_config)
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert wfile.getvalue() == b"N\n"
|
||||
|
||||
|
||||
def test_50_concurrent_lookups_different_accounts(db, gencreds, example_config):
|
||||
def test_handle_dovecot_protocol_iterate(gencreds, example_config):
|
||||
dictproxy = AuthDictProxy(config=example_config)
|
||||
dictproxy.lookup_passdb("asdf00000@chat.example.org", "q9mr3faue")
|
||||
dictproxy.lookup_passdb("asdf11111@chat.example.org", "q9mr3faue")
|
||||
rfile = io.BytesIO(b"H3\t2\t0\t\tauth\nI0\t0\tshared/userdb/")
|
||||
wfile = io.BytesIO()
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
lines = wfile.getvalue().decode("ascii").split("\n")
|
||||
assert "Oshared/userdb/asdf00000@chat.example.org\t" in lines
|
||||
assert "Oshared/userdb/asdf11111@chat.example.org\t" in lines
|
||||
assert not lines[2]
|
||||
|
||||
|
||||
def test_50_concurrent_lookups_different_accounts(gencreds, dictproxy):
|
||||
num_threads = 50
|
||||
req_per_thread = 5
|
||||
results = queue.Queue()
|
||||
|
||||
def lookup(db):
|
||||
def lookup():
|
||||
for i in range(req_per_thread):
|
||||
addr, password = gencreds()
|
||||
try:
|
||||
lookup_passdb(db, example_config, addr, password)
|
||||
dictproxy.lookup_passdb(addr, password)
|
||||
except Exception:
|
||||
results.put(traceback.format_exc())
|
||||
else:
|
||||
@@ -109,7 +137,7 @@ def test_50_concurrent_lookups_different_accounts(db, gencreds, example_config):
|
||||
|
||||
threads = []
|
||||
for i in range(num_threads):
|
||||
thread = threading.Thread(target=lookup, args=(db,), daemon=True)
|
||||
thread = threading.Thread(target=lookup, daemon=True)
|
||||
threads.append(thread)
|
||||
|
||||
print(f"created {num_threads} threads, starting them and waiting for results")
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from chatmaild.filedict import FileDict
|
||||
import threading
|
||||
|
||||
from chatmaild.filedict import FileDict, write_bytes_atomic
|
||||
|
||||
|
||||
def test_basic(tmp_path):
|
||||
@@ -17,3 +19,21 @@ def test_bad_marshal_file(tmp_path, caplog):
|
||||
fdict1.path.write_bytes(b"l12k3l12k3l")
|
||||
assert fdict1.read() == {}
|
||||
assert "corrupt" in caplog.records[0].msg
|
||||
|
||||
|
||||
def test_write_bytes_atomic_concurrent(tmp_path):
|
||||
p = tmp_path.joinpath("somefile.ext")
|
||||
write_bytes_atomic(p, b"hello")
|
||||
|
||||
threads = []
|
||||
for i in range(30):
|
||||
content = f"hello{i}".encode("ascii")
|
||||
t = threading.Thread(target=lambda: write_bytes_atomic(p, content))
|
||||
t.start()
|
||||
threads.append(t)
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
assert p.read_text().strip() != "hello"
|
||||
assert len(list(p.parent.iterdir())) == 1
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import pytest
|
||||
|
||||
from chatmaild.filtermail import (
|
||||
BeforeQueueHandler,
|
||||
SendRateLimiter,
|
||||
check_armored_payload,
|
||||
check_encrypted,
|
||||
check_mdn,
|
||||
common_encrypted_subjects,
|
||||
)
|
||||
|
||||
|
||||
@@ -54,42 +56,33 @@ def test_filtermail_no_encryption_detection(maildata):
|
||||
|
||||
|
||||
def test_filtermail_encryption_detection(maildata):
|
||||
msg = maildata("encrypted.eml", from_addr="1@example.org", to_addr="2@example.org")
|
||||
assert check_encrypted(msg)
|
||||
for subject in common_encrypted_subjects:
|
||||
msg = maildata(
|
||||
"encrypted.eml",
|
||||
from_addr="1@example.org",
|
||||
to_addr="2@example.org",
|
||||
subject=subject,
|
||||
)
|
||||
assert check_encrypted(msg)
|
||||
|
||||
# if the subject is not "..." it is not considered ac-encrypted
|
||||
# if the subject is not a known encrypted subject value, it is not considered ac-encrypted
|
||||
msg.replace_header("Subject", "Click this link")
|
||||
assert not check_encrypted(msg)
|
||||
|
||||
|
||||
def test_filtermail_is_mdn(maildata, gencreds, handler):
|
||||
def test_filtermail_no_literal_packets(maildata):
|
||||
"""Test that literal OpenPGP packet is not considered an encrypted mail."""
|
||||
msg = maildata("literal.eml", from_addr="1@example.org", to_addr="2@example.org")
|
||||
assert not check_encrypted(msg)
|
||||
|
||||
|
||||
def test_filtermail_unencrypted_mdn(maildata, gencreds):
|
||||
"""Unencrypted MDNs should not pass."""
|
||||
from_addr = gencreds()[0]
|
||||
to_addr = gencreds()[0] + ".other"
|
||||
msg = maildata("mdn.eml", from_addr, to_addr)
|
||||
msg = maildata("mdn.eml", from_addr=from_addr, to_addr=to_addr)
|
||||
|
||||
class env:
|
||||
mail_from = from_addr
|
||||
rcpt_tos = [to_addr]
|
||||
content = msg.as_bytes()
|
||||
|
||||
assert check_mdn(msg, env)
|
||||
print(msg.as_string())
|
||||
|
||||
assert not handler.check_DATA(env)
|
||||
|
||||
|
||||
def test_filtermail_to_multiple_recipients_no_mdn(maildata, gencreds):
|
||||
from_addr = gencreds()[0]
|
||||
to_addr = gencreds()[0] + ".other"
|
||||
thirdaddr = gencreds()[0]
|
||||
msg = maildata("mdn.eml", from_addr, to_addr)
|
||||
|
||||
class env:
|
||||
mail_from = from_addr
|
||||
rcpt_tos = [to_addr, thirdaddr]
|
||||
content = msg.as_bytes()
|
||||
|
||||
assert not check_mdn(msg, env)
|
||||
assert not check_encrypted(msg)
|
||||
|
||||
|
||||
def test_send_rate_limiter():
|
||||
@@ -110,7 +103,7 @@ def test_excempt_privacy(maildata, gencreds, handler):
|
||||
handler.config.passthrough_recipients = [to_addr]
|
||||
false_to = "privacy@something.org"
|
||||
|
||||
msg = maildata("plain.eml", from_addr, to_addr)
|
||||
msg = maildata("plain.eml", from_addr=from_addr, to_addr=to_addr)
|
||||
|
||||
class env:
|
||||
mail_from = from_addr
|
||||
@@ -133,7 +126,7 @@ def test_passthrough_senders(gencreds, handler, maildata):
|
||||
to_addr = "recipient@something.org"
|
||||
handler.config.passthrough_senders = [acc1]
|
||||
|
||||
msg = maildata("plain.eml", acc1, to_addr)
|
||||
msg = maildata("plain.eml", from_addr=acc1, to_addr=to_addr)
|
||||
|
||||
class env:
|
||||
mail_from = acc1
|
||||
@@ -142,3 +135,59 @@ def test_passthrough_senders(gencreds, handler, maildata):
|
||||
|
||||
# assert that None/no error is returned
|
||||
assert not handler.check_DATA(envelope=env)
|
||||
|
||||
|
||||
def test_check_armored_payload():
|
||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
||||
\r
|
||||
wU4DSqFx0d1yqAoSAQdAYkX/ZN/Az4B0k7X47zKyWrXxlDEdS3WOy0Yf2+GJTFgg\r
|
||||
Zk5ql0mLG8Ze+ZifCS0XMO4otlemSyJ0K1ZPdFMGzUDBTgNqzkFabxXoXRIBB0AM\r
|
||||
755wlX41X6Ay3KhnwBq7yEqSykVH6F3x11iHPKraLCAGZoaS8bKKNy/zg5slda1X\r
|
||||
pt14b4aC1VwtSnYhcRRELNLD/wE2TFif+g7poMmFY50VyMPLYjVP96Z5QCT4+z4H\r
|
||||
Ikh/pRRN8S3JNMrRJHc6prooSJmLcx47Y5un7VFy390MsJ+LiUJuQMDdYWRAinfs\r
|
||||
Ebm89Ezjm7F03qbFPXE0X4ZNzVXS/eKO0uhJQdiov/vmbn41rNtHmNpqjaO0vi5+\r
|
||||
sS9tR7yDUrIXiCUCN78eBLVioxtktsPZm5cDORbQWzv+7nmCEz9/JowCUcBVdCGn\r
|
||||
1ofOaH82JCAX/cRx08pLaDNj6iolVBsi56Dd+2bGxJOZOG2AMcEyz0pXY0dOAJCD\r
|
||||
iUThcQeGIdRnU3j8UBcnIEsjLu2+C+rrwMZQESMWKnJ0rnqTk0pK5kXScr6F/L0L\r
|
||||
UE49ccIexNm3xZvYr5drszr6wz3Tv5fdue87P4etBt90gF/Vzknck+g1LLlkzZkp\r
|
||||
d8dI0k2tOSPjUbDPnSy1x+X73WGpPZmj0kWT+RGvq0nH6UkJj3AQTG2qf1T8jK+3\r
|
||||
rTp3LR9vDkMwDjX4R8SA9c0wdnUzzr79OYQC9lTnzcx+fM6BBmgQ2GrS33jaFLp7\r
|
||||
L6/DFpCl5zhnPjM/2dKvMkw/Kd6XS/vjwsO405FQdjSDiQEEAZA+ZvAfcjdccbbU\r
|
||||
yCO+x0QNdeBsufDVnh3xvzuWy4CICdTQT4s1AWRPCzjOj+SGmx5WqCLWfsd8Ma0+\r
|
||||
w/C7SfTYu1FDQILLM+llpq1M/9GPley4QZ8JQjo262AyPXsPF/OW48uuZz0Db1xT\r
|
||||
Yh4iHBztj4VSdy7l2+IyaIf7cnL4EEBFxv/MwmVDXvDlxyvfAfIsd3D9SvJESzKZ\r
|
||||
VWDYwaocgeCN+ojKu1p885lu1EfRbX3fr3YO02K5/c2JYDkc0Py0W3wUP/J1XUax\r
|
||||
pbKpzwlkxEgtmzsGqsOfMJqBV3TNDrOA2uBsa+uBqP5MGYLZ49S/4v/bW9I01Cr1\r
|
||||
D2ZkV510Y1Vgo66WlP8mRqOTyt/5WRhPD+MxXdk67BNN/PmO6tMlVoJDuk+XwWPR\r
|
||||
t2TvNaND/yabT9eYI55Og4fzKD6RIjouUX8DvKLkm+7aXxVs2uuLQ3Jco3O82z55\r
|
||||
dbShU1jYsrw9oouXUz06MHPbkdhNbF/2hfhZ2qA31sNeovJw65iUv7sDKX3LVWgJ\r
|
||||
10jlywcDwqlU8CO7WC9lGixYTbnOkYZpXCGEl8e6Jbs79l42YFo4ogYpFK1NXFhV\r
|
||||
kOXRmDf/wmfj+c/ld3L2PkvwlgofhCudOQknZbo3ub1gjiTn7L+lMGHIj/3suMIl\r
|
||||
ID4EUxAXScIM1ZEz2fjtW5jATlqYcLjLTbf/olw6HFyPNH+9IssqXeZNKnGwPUB9\r
|
||||
3lTXsg0tpzl+x7F/2WjEw1DSNhjC0KnHt1vEYNMkUGDGFdN9y3ERLqX/FIgiASUb\r
|
||||
bTvAVupnAK3raBezGmhrs6LsQtLS9P0VvQiLU3uDhMqw8Z4SISLpcD+NnVBHzQqm\r
|
||||
6W5Qn/8xsCL6av18yUVTi2G3igt3QCNoYx9evt2ZcIkNoyyagUVjfZe5GHXh8Dnz\r
|
||||
GaBXW/hg3HlXLRGaQu4RYCzBMJILcO25OhZOg6jbkCLiEexQlm2e9krB5cXR49Al\r
|
||||
UN4fiB0KR9JyG2ayUdNJVkXZSZLnHyRgiaadlpUo16LVvw==\r
|
||||
=b5Kp\r
|
||||
-----END PGP MESSAGE-----\r
|
||||
\r
|
||||
"""
|
||||
|
||||
assert check_armored_payload(payload) == True
|
||||
|
||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
||||
\r
|
||||
HELLOWORLD
|
||||
-----END PGP MESSAGE-----\r
|
||||
\r
|
||||
"""
|
||||
assert check_armored_payload(payload) == False
|
||||
|
||||
payload = """-----BEGIN PGP MESSAGE-----\r
|
||||
\r
|
||||
=njUN
|
||||
-----END PGP MESSAGE-----\r
|
||||
\r
|
||||
"""
|
||||
assert check_armored_payload(payload) == False
|
||||
|
||||
64
chatmaild/src/chatmaild/tests/test_lastlogin.py
Normal file
64
chatmaild/src/chatmaild/tests/test_lastlogin.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import time
|
||||
|
||||
from chatmaild.doveauth import AuthDictProxy
|
||||
from chatmaild.lastlogin import (
|
||||
LastLoginDictProxy,
|
||||
)
|
||||
|
||||
|
||||
def test_handle_dovecot_request_last_login(testaddr, example_config):
|
||||
dictproxy = LastLoginDictProxy(config=example_config)
|
||||
|
||||
authproxy = AuthDictProxy(config=example_config)
|
||||
authproxy.lookup_passdb(testaddr, "1l2k3j1l2k3jl123")
|
||||
|
||||
dictproxy_transactions = {}
|
||||
|
||||
# Begin transaction
|
||||
tx = "1111"
|
||||
msg = f"B{tx}\t{testaddr}"
|
||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
||||
assert not res
|
||||
assert dictproxy_transactions == {tx: dict(addr=testaddr, res="O\n")}
|
||||
|
||||
# set last-login info for user
|
||||
user = dictproxy.config.get_user(testaddr)
|
||||
timestamp = int(time.time())
|
||||
msg = f"S{tx}\tshared/last-login/{testaddr}\t{timestamp}"
|
||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
||||
assert not res
|
||||
assert len(dictproxy_transactions) == 1
|
||||
read_timestamp = user.get_last_login_timestamp()
|
||||
assert read_timestamp == timestamp // 86400 * 86400
|
||||
|
||||
# finish transaction
|
||||
msg = f"C{tx}"
|
||||
res = dictproxy.handle_dovecot_request(msg, dictproxy_transactions)
|
||||
assert res == "O\n"
|
||||
assert len(dictproxy_transactions) == 0
|
||||
|
||||
|
||||
def test_handle_dovecot_request_last_login_echobot(example_config):
|
||||
dictproxy = LastLoginDictProxy(config=example_config)
|
||||
|
||||
authproxy = AuthDictProxy(config=example_config)
|
||||
testaddr = f"echo@{example_config.mail_domain}"
|
||||
authproxy.lookup_passdb(testaddr, "ignore")
|
||||
user = dictproxy.config.get_user(testaddr)
|
||||
|
||||
transactions = {}
|
||||
|
||||
# set last-login info for user
|
||||
tx = "1111"
|
||||
msg = f"B{tx}\t{testaddr}"
|
||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||
assert not res
|
||||
assert transactions == {tx: dict(addr=testaddr, res="O\n")}
|
||||
|
||||
timestamp = int(time.time())
|
||||
msg = f"S{tx}\tshared/last-login/{testaddr}\t{timestamp}"
|
||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||
assert not res
|
||||
assert len(transactions) == 1
|
||||
read_timestamp = user.get_last_login_timestamp()
|
||||
assert read_timestamp is None
|
||||
@@ -3,10 +3,10 @@ import time
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from chatmaild.metadata import (
|
||||
Metadata,
|
||||
handle_dovecot_protocol,
|
||||
handle_dovecot_request,
|
||||
MetadataDictProxy,
|
||||
)
|
||||
from chatmaild.notifier import (
|
||||
Notifier,
|
||||
@@ -30,8 +30,8 @@ def metadata(tmp_path):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def testaddr():
|
||||
return "user.name@example.org"
|
||||
def dictproxy(notifier, metadata):
|
||||
return MetadataDictProxy(notifier=notifier, metadata=metadata)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -88,51 +88,51 @@ def test_notifier_remove_without_set(metadata, testaddr):
|
||||
assert not metadata.get_tokens_for_addr(testaddr)
|
||||
|
||||
|
||||
def test_handle_dovecot_request_lookup_fails(notifier, metadata, testaddr):
|
||||
res = handle_dovecot_request(
|
||||
f"Lpriv/123/chatmail\t{testaddr}", {}, notifier, metadata
|
||||
def test_handle_dovecot_request_lookup_fails(dictproxy, testaddr):
|
||||
transactions = {}
|
||||
res = dictproxy.handle_dovecot_request(
|
||||
f"Lpriv/123/chatmail\t{testaddr}", transactions
|
||||
)
|
||||
assert res == "N\n"
|
||||
|
||||
|
||||
def test_handle_dovecot_request_happy_path(notifier, metadata, testaddr, token):
|
||||
def test_handle_dovecot_request_happy_path(dictproxy, testaddr, token):
|
||||
metadata = dictproxy.metadata
|
||||
transactions = {}
|
||||
notifier = dictproxy.notifier
|
||||
|
||||
# set device token in a transaction
|
||||
tx = "1111"
|
||||
msg = f"B{tx}\t{testaddr}"
|
||||
res = handle_dovecot_request(msg, transactions, notifier, metadata)
|
||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||
assert not res and not metadata.get_tokens_for_addr(testaddr)
|
||||
assert transactions == {tx: dict(addr=testaddr, res="O\n")}
|
||||
|
||||
msg = f"S{tx}\tpriv/guid00/devicetoken\t{token}"
|
||||
res = handle_dovecot_request(msg, transactions, notifier, metadata)
|
||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||
assert not res
|
||||
assert len(transactions) == 1
|
||||
assert metadata.get_tokens_for_addr(testaddr) == [token]
|
||||
|
||||
msg = f"C{tx}"
|
||||
res = handle_dovecot_request(msg, transactions, notifier, metadata)
|
||||
res = dictproxy.handle_dovecot_request(msg, transactions)
|
||||
assert res == "O\n"
|
||||
assert len(transactions) == 0
|
||||
assert metadata.get_tokens_for_addr(testaddr) == [token]
|
||||
|
||||
# trigger notification for incoming message
|
||||
tx2 = "2222"
|
||||
assert (
|
||||
handle_dovecot_request(f"B{tx2}\t{testaddr}", transactions, notifier, metadata)
|
||||
is None
|
||||
)
|
||||
assert dictproxy.handle_dovecot_request(f"B{tx2}\t{testaddr}", transactions) is None
|
||||
msg = f"S{tx2}\tpriv/guid00/messagenew"
|
||||
assert handle_dovecot_request(msg, transactions, notifier, metadata) is None
|
||||
assert dictproxy.handle_dovecot_request(msg, transactions) is None
|
||||
queue_item = notifier.retry_queues[0].get()[1]
|
||||
assert queue_item.token == token
|
||||
assert handle_dovecot_request(f"C{tx2}", transactions, notifier, metadata) == "O\n"
|
||||
assert dictproxy.handle_dovecot_request(f"C{tx2}", transactions) == "O\n"
|
||||
assert not transactions
|
||||
assert queue_item.path.exists()
|
||||
|
||||
|
||||
def test_handle_dovecot_protocol_set_devicetoken(metadata, notifier):
|
||||
def test_handle_dovecot_protocol_set_devicetoken(dictproxy):
|
||||
rfile = io.BytesIO(
|
||||
b"\n".join(
|
||||
[
|
||||
@@ -144,12 +144,12 @@ def test_handle_dovecot_protocol_set_devicetoken(metadata, notifier):
|
||||
)
|
||||
)
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, notifier, metadata)
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert wfile.getvalue() == b"O\n"
|
||||
assert metadata.get_tokens_for_addr("user@example.org") == ["01234"]
|
||||
assert dictproxy.metadata.get_tokens_for_addr("user@example.org") == ["01234"]
|
||||
|
||||
|
||||
def test_handle_dovecot_protocol_set_get_devicetoken(metadata, notifier):
|
||||
def test_handle_dovecot_protocol_set_get_devicetoken(dictproxy):
|
||||
rfile = io.BytesIO(
|
||||
b"\n".join(
|
||||
[
|
||||
@@ -161,19 +161,19 @@ def test_handle_dovecot_protocol_set_get_devicetoken(metadata, notifier):
|
||||
)
|
||||
)
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, notifier, metadata)
|
||||
assert metadata.get_tokens_for_addr("user@example.org") == ["01234"]
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert dictproxy.metadata.get_tokens_for_addr("user@example.org") == ["01234"]
|
||||
assert wfile.getvalue() == b"O\n"
|
||||
|
||||
rfile = io.BytesIO(
|
||||
b"\n".join([b"HELLO", b"Lpriv/0123/devicetoken\tuser@example.org"])
|
||||
)
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, notifier, metadata)
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert wfile.getvalue() == b"O01234\n"
|
||||
|
||||
|
||||
def test_handle_dovecot_protocol_iterate(metadata, notifier):
|
||||
def test_handle_dovecot_protocol_iterate(dictproxy):
|
||||
rfile = io.BytesIO(
|
||||
b"\n".join(
|
||||
[
|
||||
@@ -183,7 +183,7 @@ def test_handle_dovecot_protocol_iterate(metadata, notifier):
|
||||
)
|
||||
)
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, notifier, metadata)
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert wfile.getvalue() == b"\n"
|
||||
|
||||
|
||||
@@ -298,7 +298,7 @@ def test_persistent_queue_items(tmp_path, testaddr, token):
|
||||
assert not queue_item < item2 and not item2 < queue_item
|
||||
|
||||
|
||||
def test_iroh_relay(metadata):
|
||||
def test_iroh_relay(dictproxy):
|
||||
rfile = io.BytesIO(
|
||||
b"\n".join(
|
||||
[
|
||||
@@ -308,5 +308,6 @@ def test_iroh_relay(metadata):
|
||||
)
|
||||
)
|
||||
wfile = io.BytesIO()
|
||||
handle_dovecot_protocol(rfile, wfile, notifier, metadata, "https://example.org/")
|
||||
dictproxy.iroh_relay = "https://example.org/"
|
||||
dictproxy.loop_forever(rfile, wfile)
|
||||
assert wfile.getvalue() == b"Ohttps://example.org/\n"
|
||||
|
||||
@@ -8,9 +8,10 @@ def test_main(tmp_path, capsys):
|
||||
out, _ = capsys.readouterr()
|
||||
d = {}
|
||||
for line in out.split("\n"):
|
||||
if line.strip():
|
||||
name, num, _ = line.split()
|
||||
if line.strip() and not line.startswith("#"):
|
||||
name, num = line.split()
|
||||
d[name] = int(num)
|
||||
|
||||
assert d["accounts"] == 4
|
||||
assert d["ci_accounts"] == 3
|
||||
assert d["nonci_accounts"] == 1
|
||||
|
||||
67
chatmaild/src/chatmaild/tests/test_migrate_db.py
Normal file
67
chatmaild/src/chatmaild/tests/test_migrate_db.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import sqlite3
|
||||
|
||||
from chatmaild.migrate_db import migrate_from_db_to_maildir
|
||||
|
||||
|
||||
def test_migration_not_exists(tmp_path, example_config):
|
||||
example_config.passdb_path = tmp_path.joinpath("sqlite")
|
||||
|
||||
|
||||
def test_migration(tmp_path, example_config, caplog):
|
||||
passdb_path = tmp_path.joinpath("passdb.sqlite")
|
||||
uri = f"file:{passdb_path}?mode=rwc"
|
||||
sqlconn = sqlite3.connect(uri, timeout=60, uri=True)
|
||||
sqlconn.execute(
|
||||
"""
|
||||
CREATE TABLE users (
|
||||
addr TEXT PRIMARY KEY,
|
||||
password TEXT,
|
||||
last_login INTEGER
|
||||
)
|
||||
"""
|
||||
)
|
||||
all = {}
|
||||
|
||||
for i in range(500):
|
||||
values = (f"somsom{i:03}@example.org", f"passwo{i:03}", i * 86400)
|
||||
sqlconn.execute(
|
||||
"""
|
||||
INSERT INTO users (addr, password, last_login)
|
||||
VALUES (?, ?, ?)""",
|
||||
values,
|
||||
)
|
||||
all[values[0]] = values[1:]
|
||||
|
||||
for i in range(500):
|
||||
values = (f"pompom{i:03}@example.org", f"wopass{i:03}", "")
|
||||
sqlconn.execute(
|
||||
"""
|
||||
INSERT INTO users (addr, password, last_login)
|
||||
VALUES (?, ?, ?)""",
|
||||
values,
|
||||
)
|
||||
all[values[0]] = values[1:]
|
||||
|
||||
sqlconn.commit()
|
||||
sqlconn.close()
|
||||
|
||||
assert passdb_path.stat().st_size > 10000
|
||||
|
||||
example_config.passdb_path = passdb_path
|
||||
|
||||
assert not caplog.records
|
||||
|
||||
migrate_from_db_to_maildir(example_config, chunking=500)
|
||||
assert len(caplog.records) > 3
|
||||
|
||||
for path in example_config.mailboxes_dir.iterdir():
|
||||
if "@" not in path.name:
|
||||
continue
|
||||
password, last_login = all.pop(path.name)
|
||||
user = example_config.get_user(path.name)
|
||||
if last_login:
|
||||
assert user.get_last_login_timestamp() == last_login
|
||||
assert password == user.get_userdb_dict()["password"]
|
||||
|
||||
assert not all
|
||||
assert not example_config.passdb_path.exists()
|
||||
42
chatmaild/src/chatmaild/tests/test_user.py
Normal file
42
chatmaild/src/chatmaild/tests/test_user.py
Normal file
@@ -0,0 +1,42 @@
|
||||
def test_login_timestamp(testaddr, example_config):
|
||||
user = example_config.get_user(testaddr)
|
||||
user.set_password("someeqkjwelkqwjleqwe")
|
||||
user.set_last_login_timestamp(100000)
|
||||
assert user.get_last_login_timestamp() == 86400
|
||||
|
||||
user.set_last_login_timestamp(200000)
|
||||
assert user.get_last_login_timestamp() == 86400 * 2
|
||||
|
||||
|
||||
def test_get_user_dict_not_set(testaddr, example_config, caplog):
|
||||
user = example_config.get_user(testaddr)
|
||||
assert not caplog.records
|
||||
assert user.get_userdb_dict() == {}
|
||||
assert len(caplog.records) == 0
|
||||
|
||||
user.set_password("")
|
||||
assert user.get_userdb_dict() == {}
|
||||
assert len(caplog.records) == 1
|
||||
|
||||
|
||||
def test_get_user_dict(make_config, tmp_path):
|
||||
config = make_config("something.testrun.org")
|
||||
addr = "user1@something.org"
|
||||
user = config.get_user(addr)
|
||||
enc_password = "l1k2j31lk2j3l1k23j123"
|
||||
user.set_password(enc_password)
|
||||
data = user.get_userdb_dict()
|
||||
assert addr in str(data["home"])
|
||||
assert data["uid"] == "vmail"
|
||||
assert data["gid"] == "vmail"
|
||||
assert data["password"] == enc_password
|
||||
|
||||
|
||||
def test_no_mailboxes_dir(testaddr, example_config, tmp_path):
|
||||
p = tmp_path.joinpath("a", "mailboxes")
|
||||
example_config.mailboxes_dir = p
|
||||
|
||||
user = example_config.get_user(testaddr)
|
||||
user.set_password("someeqkjwelkqwjleqwe")
|
||||
user.set_last_login_timestamp(100000)
|
||||
assert user.get_last_login_timestamp() == 86400
|
||||
74
chatmaild/src/chatmaild/user.py
Normal file
74
chatmaild/src/chatmaild/user.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from chatmaild.filedict import write_bytes_atomic
|
||||
|
||||
|
||||
def get_daytimestamp(timestamp) -> int:
|
||||
return int(timestamp) // 86400 * 86400
|
||||
|
||||
|
||||
class User:
|
||||
def __init__(self, maildir, addr, password_path, uid, gid):
|
||||
self.maildir = maildir
|
||||
self.addr = addr
|
||||
self.password_path = password_path
|
||||
self.uid = uid
|
||||
self.gid = gid
|
||||
|
||||
@property
|
||||
def can_track(self):
|
||||
return "@" in self.addr and not self.addr.startswith("echo@")
|
||||
|
||||
def get_userdb_dict(self):
|
||||
"""Return a non-empty dovecot 'userdb' style dict
|
||||
if the user has an existing non-empty password"""
|
||||
try:
|
||||
pw = self.password_path.read_text()
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
|
||||
if not pw:
|
||||
logging.error(f"password is empty for: {self.addr}")
|
||||
return {}
|
||||
|
||||
home = str(self.maildir)
|
||||
return dict(addr=self.addr, home=home, uid=self.uid, gid=self.gid, password=pw)
|
||||
|
||||
def set_password(self, enc_password):
|
||||
"""Set the specified password for this user.
|
||||
|
||||
This method can be called concurrently
|
||||
but there is no guarantee which of the password-set calls will win.
|
||||
"""
|
||||
self.maildir.mkdir(exist_ok=True, parents=True)
|
||||
password = enc_password.encode("ascii")
|
||||
|
||||
try:
|
||||
write_bytes_atomic(self.password_path, password)
|
||||
except PermissionError:
|
||||
if not self.addr.startswith("echo@"):
|
||||
logging.error(f"could not write password for: {self.addr}")
|
||||
raise
|
||||
|
||||
def set_last_login_timestamp(self, timestamp):
|
||||
"""Track login time with daily granularity
|
||||
to minimize touching files and to minimize metadata leakage."""
|
||||
if not self.can_track:
|
||||
return
|
||||
try:
|
||||
mtime = int(os.stat(self.password_path).st_mtime)
|
||||
except FileNotFoundError:
|
||||
logging.error(f"Can not get last login timestamp for {self.addr}")
|
||||
return
|
||||
|
||||
timestamp = get_daytimestamp(timestamp)
|
||||
if mtime != timestamp:
|
||||
os.utime(self.password_path, (timestamp, timestamp))
|
||||
|
||||
def get_last_login_timestamp(self):
|
||||
if self.can_track:
|
||||
try:
|
||||
return int(self.password_path.stat().st_mtime)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
|
||||
name = "cmdeploy"
|
||||
version = "0.2"
|
||||
dependencies = [
|
||||
"pyinfra",
|
||||
"pyinfra>=3",
|
||||
"pillow",
|
||||
"qrcode",
|
||||
"markdown",
|
||||
@@ -18,6 +18,7 @@ dependencies = [
|
||||
"ruff",
|
||||
"pytest",
|
||||
"pytest-xdist",
|
||||
"execnet",
|
||||
"imap_tools",
|
||||
]
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ from pyinfra.operations import apt, files, pip, server, systemd
|
||||
|
||||
from .acmetool import deploy_acmetool
|
||||
|
||||
root_owned = dict(user="root", group="root", mode="644")
|
||||
|
||||
def _build_chatmaild(dist_dir) -> None:
|
||||
dist_dir = Path(dist_dir).resolve()
|
||||
@@ -51,6 +50,7 @@ def _install_remote_venv_with_chatmaild(config) -> None:
|
||||
remote_dist_file = f"{remote_base_dir}/dist/{dist_file.name}"
|
||||
remote_venv_dir = f"{remote_base_dir}/venv"
|
||||
remote_chatmail_inipath = f"{remote_base_dir}/chatmail.ini"
|
||||
root_owned = dict(user="root", group="root", mode="644")
|
||||
|
||||
apt.packages(
|
||||
name="apt install python3-virtualenv",
|
||||
@@ -85,43 +85,25 @@ def _install_remote_venv_with_chatmaild(config) -> None:
|
||||
],
|
||||
)
|
||||
|
||||
# create metrics every 5 minutes via systemd
|
||||
|
||||
files.put(
|
||||
name="Upload metrics.timer",
|
||||
src=importlib.resources.files(__package__).joinpath("service/metrics.timer"),
|
||||
dest=f"/etc/systemd/system/metrics.timer",
|
||||
**root_owned,
|
||||
)
|
||||
|
||||
files.template(
|
||||
name="upload metrics.service",
|
||||
src=importlib.resources.files(__package__).joinpath("service/metrics.service.j2"),
|
||||
dest="/etc/systemd/system/metrics.service",
|
||||
src=importlib.resources.files(__package__).joinpath("metrics.cron.j2"),
|
||||
dest="/etc/cron.d/chatmail-metrics",
|
||||
user="root",
|
||||
group="root",
|
||||
mode="644",
|
||||
config={
|
||||
"mail_domain": config.mail_domain,
|
||||
"mailboxes_dir": config.mailboxes_dir,
|
||||
"execpath": f"{remote_venv_dir}/bin/chatmail-metrics",
|
||||
},
|
||||
)
|
||||
|
||||
systemd.service(
|
||||
name=f"Setup metrics timer",
|
||||
service="metrics.timer",
|
||||
running=True,
|
||||
enabled=True,
|
||||
restarted=True,
|
||||
daemon_reload=True,
|
||||
)
|
||||
|
||||
# install systemd units
|
||||
for fn in (
|
||||
"doveauth",
|
||||
"filtermail",
|
||||
"echobot",
|
||||
"chatmail-metadata",
|
||||
"lastlogin",
|
||||
):
|
||||
params = dict(
|
||||
execpath=f"{remote_venv_dir}/bin/{fn}",
|
||||
@@ -286,6 +268,7 @@ def _configure_postfix(config: Config, debug: bool = False) -> bool:
|
||||
group="root",
|
||||
mode="644",
|
||||
config=config,
|
||||
disable_ipv6=config.disable_ipv6,
|
||||
)
|
||||
need_restart |= main_config.changed
|
||||
|
||||
@@ -336,6 +319,7 @@ def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
||||
mode="644",
|
||||
config=config,
|
||||
debug=debug,
|
||||
disable_ipv6=config.disable_ipv6,
|
||||
)
|
||||
need_restart |= main_config.changed
|
||||
auth_config = files.put(
|
||||
@@ -357,37 +341,6 @@ def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
||||
)
|
||||
need_restart |= lua_push_notification_script.changed
|
||||
|
||||
sieve_script = files.put(
|
||||
src=importlib.resources.files(__package__).joinpath("dovecot/default.sieve"),
|
||||
dest="/etc/dovecot/default.sieve",
|
||||
user="root",
|
||||
group="root",
|
||||
mode="644",
|
||||
)
|
||||
need_restart |= sieve_script.changed
|
||||
if sieve_script.changed:
|
||||
server.shell(
|
||||
name="compile sieve script",
|
||||
commands=["/usr/bin/sievec /etc/dovecot/default.sieve"],
|
||||
)
|
||||
|
||||
files.template(
|
||||
src=importlib.resources.files(__package__).joinpath("service/expunge.service.j2"),
|
||||
dest="/etc/systemd/system/expunge.service",
|
||||
config={
|
||||
"mail_domain": config.mail_domain,
|
||||
"delete_mails_after": config.delete_mails_after,
|
||||
},
|
||||
**root_owned,
|
||||
)
|
||||
|
||||
files.put(
|
||||
name="Upload expunge.timer",
|
||||
src=importlib.resources.files(__package__).joinpath("service/expunge.timer"),
|
||||
dest=f"/etc/systemd/system/expunge.timer",
|
||||
**root_owned,
|
||||
)
|
||||
|
||||
files.template(
|
||||
src=importlib.resources.files(__package__).joinpath("dovecot/expunge.cron.j2"),
|
||||
dest="/etc/cron.d/expunge",
|
||||
@@ -411,7 +364,7 @@ def _configure_dovecot(config: Config, debug: bool = False) -> bool:
|
||||
return need_restart
|
||||
|
||||
|
||||
def _configure_nginx(domain: str, debug: bool = False) -> bool:
|
||||
def _configure_nginx(config: Config, debug: bool = False) -> bool:
|
||||
"""Configures nginx HTTP server."""
|
||||
need_restart = False
|
||||
|
||||
@@ -421,7 +374,8 @@ def _configure_nginx(domain: str, debug: bool = False) -> bool:
|
||||
user="root",
|
||||
group="root",
|
||||
mode="644",
|
||||
config={"domain_name": domain},
|
||||
config={"domain_name": config.mail_domain},
|
||||
disable_ipv6=config.disable_ipv6,
|
||||
)
|
||||
need_restart |= main_config.changed
|
||||
|
||||
@@ -431,7 +385,7 @@ def _configure_nginx(domain: str, debug: bool = False) -> bool:
|
||||
user="root",
|
||||
group="root",
|
||||
mode="644",
|
||||
config={"domain_name": domain},
|
||||
config={"domain_name": config.mail_domain},
|
||||
)
|
||||
need_restart |= autoconfig.changed
|
||||
|
||||
@@ -441,7 +395,7 @@ def _configure_nginx(domain: str, debug: bool = False) -> bool:
|
||||
user="root",
|
||||
group="root",
|
||||
mode="644",
|
||||
config={"domain_name": domain},
|
||||
config={"domain_name": config.mail_domain},
|
||||
)
|
||||
need_restart |= mta_sts_config.changed
|
||||
|
||||
@@ -500,6 +454,7 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
|
||||
server.group(name="Create vmail group", group="vmail", system=True)
|
||||
server.user(name="Create vmail user", user="vmail", group="vmail", system=True)
|
||||
server.user(name="Create filtermail user", user="filtermail", system=True)
|
||||
server.group(name="Create opendkim group", group="opendkim", system=True)
|
||||
server.user(
|
||||
name="Create opendkim user",
|
||||
@@ -515,11 +470,6 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
)
|
||||
server.user(name="Create echobot user", user="echobot", system=True)
|
||||
|
||||
server.shell(
|
||||
name="Fix file owner in /home/vmail",
|
||||
commands=["test -d /home/vmail && chown -R vmail:vmail /home/vmail"],
|
||||
)
|
||||
|
||||
# Add our OBS repository for dovecot_no_delay
|
||||
files.put(
|
||||
name="Add Deltachat OBS GPG key to apt keyring",
|
||||
@@ -539,6 +489,7 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
)
|
||||
|
||||
apt.update(name="apt update", cache_time=24 * 3600)
|
||||
apt.upgrade(name="upgrade apt packages", auto_remove=True)
|
||||
|
||||
apt.packages(
|
||||
name="Install rsync",
|
||||
@@ -559,7 +510,6 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
"systemctl reset-failed unbound.service",
|
||||
],
|
||||
)
|
||||
|
||||
systemd.service(
|
||||
name="Start and enable unbound",
|
||||
service="unbound.service",
|
||||
@@ -585,12 +535,12 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
|
||||
apt.packages(
|
||||
name="Install Dovecot",
|
||||
packages=["dovecot-imapd", "dovecot-lmtpd", "dovecot-sieve"],
|
||||
packages=["dovecot-imapd", "dovecot-lmtpd"],
|
||||
)
|
||||
|
||||
apt.packages(
|
||||
name="Install nginx",
|
||||
packages=["nginx"],
|
||||
packages=["nginx", "libnginx-mod-stream"],
|
||||
)
|
||||
|
||||
apt.packages(
|
||||
@@ -610,7 +560,7 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
dovecot_need_restart = _configure_dovecot(config, debug=debug)
|
||||
postfix_need_restart = _configure_postfix(config, debug=debug)
|
||||
mta_sts_need_restart = _install_mta_sts_daemon()
|
||||
nginx_need_restart = _configure_nginx(mail_domain)
|
||||
nginx_need_restart = _configure_nginx(config)
|
||||
|
||||
_remove_rspamd()
|
||||
opendkim_need_restart = _configure_opendkim(mail_domain, "opendkim")
|
||||
@@ -679,5 +629,10 @@ def deploy_chatmail(config_path: Path) -> None:
|
||||
service="systemd-journald.service",
|
||||
running=True,
|
||||
enabled=True,
|
||||
restarted=journald_conf,
|
||||
restarted=journald_conf.changed,
|
||||
)
|
||||
|
||||
apt.packages(
|
||||
name="Ensure cron is installed",
|
||||
packages=["cron"],
|
||||
)
|
||||
|
||||
@@ -69,8 +69,7 @@ def deploy_acmetool(email="", domains=[]):
|
||||
restarted=service_file.changed,
|
||||
)
|
||||
|
||||
if str(host) != "staging.testrun.org":
|
||||
server.shell(
|
||||
name=f"Request certificate for: { ', '.join(domains) }",
|
||||
commands=[f"acmetool want --xlog.severity=debug { ' '.join(domains)}"],
|
||||
)
|
||||
server.shell(
|
||||
name=f"Request certificate for: { ', '.join(domains) }",
|
||||
commands=[f"acmetool want --xlog.severity=debug { ' '.join(domains)}"],
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
SHELL=/bin/sh
|
||||
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
|
||||
MAILTO=root
|
||||
20 16 * * * root /usr/bin/acmetool --batch reconcile && systemctl reload dovecot && systemctl reload postfix
|
||||
20 16 * * * root /usr/bin/acmetool --batch reconcile && systemctl reload dovecot && systemctl reload postfix && systemctl reload nginx
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
{chatmail_domain}. A {ipv4}
|
||||
{chatmail_domain}. AAAA {ipv6}
|
||||
{chatmail_domain}. MX 10 {chatmail_domain}.
|
||||
_submission._tcp.{chatmail_domain}. SRV 0 1 587 {chatmail_domain}.
|
||||
_submissions._tcp.{chatmail_domain}. SRV 0 1 465 {chatmail_domain}.
|
||||
_imap._tcp.{chatmail_domain}. SRV 0 1 143 {chatmail_domain}.
|
||||
_imaps._tcp.{chatmail_domain}. SRV 0 1 993 {chatmail_domain}.
|
||||
{chatmail_domain}. CAA 128 issue "letsencrypt.org;accounturi={acme_account_url}"
|
||||
{chatmail_domain}. TXT "v=spf1 a:{chatmail_domain} ~all"
|
||||
_dmarc.{chatmail_domain}. TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
|
||||
_mta-sts.{chatmail_domain}. TXT "v=STSv1; id={sts_id}"
|
||||
mta-sts.{chatmail_domain}. CNAME {chatmail_domain}.
|
||||
www.{chatmail_domain}. CNAME {chatmail_domain}.
|
||||
{dkim_entry}
|
||||
_adsp._domainkey.{chatmail_domain}. TXT "dkim=discardable"
|
||||
30
cmdeploy/src/cmdeploy/chatmail.zone.j2
Normal file
30
cmdeploy/src/cmdeploy/chatmail.zone.j2
Normal file
@@ -0,0 +1,30 @@
|
||||
;
|
||||
; Required DNS entries for chatmail servers
|
||||
;
|
||||
{% if A %}
|
||||
{{ mail_domain }}. A {{ A }}
|
||||
{% endif %}
|
||||
{% if AAAA %}
|
||||
{{ mail_domain }}. AAAA {{ AAAA }}
|
||||
{% endif %}
|
||||
{{ mail_domain }}. MX 10 {{ mail_domain }}.
|
||||
_mta-sts.{{ mail_domain }}. TXT "v=STSv1; id={{ sts_id }}"
|
||||
mta-sts.{{ mail_domain }}. CNAME {{ mail_domain }}.
|
||||
www.{{ mail_domain }}. CNAME {{ mail_domain }}.
|
||||
{{ dkim_entry }}
|
||||
|
||||
;
|
||||
; Recommended DNS entries for interoperability and security-hardening
|
||||
;
|
||||
{{ mail_domain }}. TXT "v=spf1 a:{{ mail_domain }} ~all"
|
||||
_dmarc.{{ mail_domain }}. TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
|
||||
|
||||
{% if acme_account_url %}
|
||||
{{ mail_domain }}. CAA 0 issue "letsencrypt.org;accounturi={{ acme_account_url }}"
|
||||
{% endif %}
|
||||
_adsp._domainkey.{{ mail_domain }}. TXT "dkim=discardable"
|
||||
|
||||
_submission._tcp.{{ mail_domain }}. SRV 0 1 587 {{ mail_domain }}.
|
||||
_submissions._tcp.{{ mail_domain }}. SRV 0 1 465 {{ mail_domain }}.
|
||||
_imap._tcp.{{ mail_domain }}. SRV 0 1 143 {{ mail_domain }}.
|
||||
_imaps._tcp.{{ mail_domain }}. SRV 0 1 993 {{ mail_domain }}.
|
||||
@@ -7,15 +7,19 @@ import argparse
|
||||
import importlib.resources
|
||||
import importlib.util
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pyinfra
|
||||
from chatmaild.config import read_config, write_initial_config
|
||||
from packaging import version
|
||||
from termcolor import colored
|
||||
|
||||
from cmdeploy.dns import check_necessary_dns, show_dns
|
||||
from . import dns, remote
|
||||
from .sshexec import SSHExec
|
||||
|
||||
#
|
||||
# cmdeploy sub commands and options
|
||||
@@ -35,13 +39,10 @@ def init_cmd(args, out):
|
||||
mail_domain = args.chatmail_domain
|
||||
if args.inipath.exists():
|
||||
print(f"Path exists, not modifying: {args.inipath}")
|
||||
return 1
|
||||
else:
|
||||
write_initial_config(args.inipath, mail_domain)
|
||||
write_initial_config(args.inipath, mail_domain, overrides={})
|
||||
out.green(f"created config file for {mail_domain} in {args.inipath}")
|
||||
check_necessary_dns(
|
||||
out,
|
||||
mail_domain,
|
||||
)
|
||||
|
||||
|
||||
def run_cmd_options(parser):
|
||||
@@ -55,41 +56,75 @@ def run_cmd_options(parser):
|
||||
|
||||
def run_cmd(args, out):
|
||||
"""Deploy chatmail services on the remote server."""
|
||||
mail_domain = args.config.mail_domain
|
||||
if not check_necessary_dns(
|
||||
out,
|
||||
mail_domain,
|
||||
):
|
||||
sys.exit(1)
|
||||
|
||||
sshexec = args.get_sshexec()
|
||||
remote_data = dns.get_initial_remote_data(sshexec, args.config.mail_domain)
|
||||
if not dns.check_initial_remote_data(remote_data, print=out.red):
|
||||
return 1
|
||||
|
||||
env = os.environ.copy()
|
||||
env["CHATMAIL_INI"] = args.inipath
|
||||
deploy_path = importlib.resources.files(__package__).joinpath("deploy.py").resolve()
|
||||
pyinf = "pyinfra --dry" if args.dry_run else "pyinfra"
|
||||
cmd = f"{pyinf} --ssh-user root {args.config.mail_domain} {deploy_path}"
|
||||
cmd = f"{pyinf} --ssh-user root {args.config.mail_domain} {deploy_path} -y"
|
||||
if version.parse(pyinfra.__version__) < version.parse("3"):
|
||||
out.red("Please re-run scripts/initenv.sh to update pyinfra to version 3.")
|
||||
return 1
|
||||
|
||||
out.check_call(cmd, env=env)
|
||||
print("Deploy completed, call `cmdeploy dns` next.")
|
||||
retcode = out.check_call(cmd, env=env)
|
||||
if retcode == 0:
|
||||
out.green("Deploy completed, call `cmdeploy dns` next.")
|
||||
elif not remote_data["acme_account_url"]:
|
||||
out.red("Deploy completed but letsencrypt not configured")
|
||||
out.red("Run 'cmdeploy run' again")
|
||||
retcode = 0
|
||||
else:
|
||||
out.red("Deploy failed")
|
||||
return retcode
|
||||
|
||||
|
||||
def dns_cmd_options(parser):
|
||||
parser.add_argument(
|
||||
"--zonefile",
|
||||
dest="zonefile",
|
||||
help="print the whole zonefile for deploying directly",
|
||||
type=pathlib.Path,
|
||||
default=None,
|
||||
help="write out a zonefile",
|
||||
)
|
||||
|
||||
|
||||
def dns_cmd(args, out):
|
||||
"""Generate dns zone file."""
|
||||
exit_code = show_dns(args, out)
|
||||
exit(exit_code)
|
||||
"""Check DNS entries and optionally generate dns zone file."""
|
||||
sshexec = args.get_sshexec()
|
||||
remote_data = dns.get_initial_remote_data(sshexec, args.config.mail_domain)
|
||||
if not remote_data:
|
||||
return 1
|
||||
|
||||
if not remote_data["acme_account_url"]:
|
||||
out.red("could not get letsencrypt account url, please run 'cmdeploy run'")
|
||||
return 1
|
||||
|
||||
if not remote_data["dkim_entry"]:
|
||||
out.red("could not determine dkim_entry, please run 'cmdeploy run'")
|
||||
return 1
|
||||
|
||||
zonefile = dns.get_filled_zone_file(remote_data)
|
||||
|
||||
if args.zonefile:
|
||||
args.zonefile.write_text(zonefile)
|
||||
out.green(f"DNS records successfully written to: {args.zonefile}")
|
||||
return 0
|
||||
|
||||
retcode = dns.check_full_zone(
|
||||
sshexec, remote_data=remote_data, zonefile=zonefile, out=out
|
||||
)
|
||||
return retcode
|
||||
|
||||
|
||||
def status_cmd(args, out):
|
||||
"""Display status for online chatmail instance."""
|
||||
|
||||
ssh = f"ssh root@{args.config.mail_domain}"
|
||||
sshexec = args.get_sshexec()
|
||||
|
||||
out.green(f"chatmail domain: {args.config.mail_domain}")
|
||||
if args.config.privacy_mail:
|
||||
@@ -97,10 +132,8 @@ def status_cmd(args, out):
|
||||
else:
|
||||
out.red("no privacy settings")
|
||||
|
||||
s1 = "systemctl --type=service --state=running"
|
||||
for line in out.shell_output(f"{ssh} -- {s1}").split("\n"):
|
||||
if line.startswith(" "):
|
||||
print(line)
|
||||
for line in sshexec(remote.rshell.get_systemd_running):
|
||||
print(line)
|
||||
|
||||
|
||||
def test_cmd_options(parser):
|
||||
@@ -129,7 +162,7 @@ def test_cmd(args, out):
|
||||
"-n4",
|
||||
"-rs",
|
||||
"-x",
|
||||
"-vrx",
|
||||
"-v",
|
||||
"--durations=5",
|
||||
]
|
||||
if args.slow:
|
||||
@@ -139,14 +172,6 @@ def test_cmd(args, out):
|
||||
|
||||
|
||||
def fmt_cmd_options(parser):
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
dest="verbose",
|
||||
action="store_true",
|
||||
help="provide information on invocations",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--check",
|
||||
"-c",
|
||||
@@ -176,7 +201,6 @@ def fmt_cmd(args, out):
|
||||
|
||||
out.check_call(" ".join(format_args), quiet=not args.verbose)
|
||||
out.check_call(" ".join(check_args), quiet=not args.verbose)
|
||||
return 0
|
||||
|
||||
|
||||
def bench_cmd(args, out):
|
||||
@@ -212,16 +236,6 @@ class Out:
|
||||
color = "red" if red else ("green" if green else None)
|
||||
print(colored(msg, color), file=file)
|
||||
|
||||
def shell_output(self, arg, no_print=False, timeout=10):
|
||||
if not no_print:
|
||||
self(f"[$ {arg}]", file=sys.stderr)
|
||||
output = subprocess.STDOUT
|
||||
else:
|
||||
output = subprocess.DEVNULL
|
||||
return subprocess.check_output(
|
||||
arg, shell=True, timeout=timeout, stderr=output
|
||||
).decode()
|
||||
|
||||
def check_call(self, arg, env=None, quiet=False):
|
||||
if not quiet:
|
||||
self(f"[$ {arg}]", file=sys.stderr)
|
||||
@@ -244,6 +258,14 @@ def add_config_option(parser):
|
||||
type=Path,
|
||||
help="path to the chatmail.ini file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
dest="verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="provide verbose logging",
|
||||
)
|
||||
|
||||
|
||||
def add_subcommand(subparsers, func):
|
||||
@@ -283,11 +305,18 @@ def get_parser():
|
||||
|
||||
|
||||
def main(args=None):
|
||||
"""Provide main entry point for 'xdcget' CLI invocation."""
|
||||
"""Provide main entry point for 'cmdeploy' CLI invocation."""
|
||||
parser = get_parser()
|
||||
args = parser.parse_args(args=args)
|
||||
if not hasattr(args, "func"):
|
||||
return parser.parse_args(["-h"])
|
||||
|
||||
def get_sshexec():
|
||||
print(f"[ssh] login to {args.config.mail_domain}")
|
||||
return SSHExec(args.config.mail_domain, verbose=args.verbose)
|
||||
|
||||
args.get_sshexec = get_sshexec
|
||||
|
||||
out = Out()
|
||||
kwargs = {}
|
||||
if args.func.__name__ not in ("init_cmd", "fmt_cmd"):
|
||||
@@ -305,7 +334,6 @@ def main(args=None):
|
||||
if res is None:
|
||||
res = 0
|
||||
return res
|
||||
|
||||
except KeyboardInterrupt:
|
||||
out.red("KeyboardInterrupt")
|
||||
sys.exit(130)
|
||||
|
||||
@@ -1,208 +1,64 @@
|
||||
import datetime
|
||||
import importlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import requests
|
||||
from jinja2 import Template
|
||||
|
||||
from . import remote
|
||||
|
||||
|
||||
class DNS:
|
||||
def __init__(self, out, mail_domain):
|
||||
self.session = requests.Session()
|
||||
self.out = out
|
||||
self.ssh = f"ssh root@{mail_domain} -- "
|
||||
self.out.shell_output(
|
||||
f"{ self.ssh }'apt-get update && apt-get install -y dnsutils'",
|
||||
timeout=60,
|
||||
no_print=True,
|
||||
)
|
||||
try:
|
||||
self.shell(f"unbound-control flush_zone {mail_domain}")
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
def shell(self, cmd):
|
||||
try:
|
||||
return self.out.shell_output(f"{self.ssh}{cmd}", no_print=True)
|
||||
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as e:
|
||||
if "exit status 255" in str(e) or "timed out" in str(e):
|
||||
self.out.red(f"Error: can't reach the server with: {self.ssh[:-4]}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
raise
|
||||
|
||||
def get_ipv4(self):
|
||||
cmd = "ip a | grep 'inet ' | grep 'scope global' | grep -oE '[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}' | head -1"
|
||||
return self.shell(cmd).strip()
|
||||
|
||||
def get_ipv6(self):
|
||||
cmd = "ip a | grep inet6 | grep 'scope global' | sed -e 's#/64 scope global##' | sed -e 's#inet6##'"
|
||||
return self.shell(cmd).strip()
|
||||
|
||||
def get(self, typ: str, domain: str) -> str:
|
||||
"""Get a DNS entry or empty string if there is none."""
|
||||
dig_result = self.shell(f"dig -r -q {domain} -t {typ} +short")
|
||||
line = dig_result.partition("\n")[0]
|
||||
return line
|
||||
|
||||
def check_ptr_record(self, ip: str, mail_domain) -> bool:
|
||||
"""Check the PTR record for an IPv4 or IPv6 address."""
|
||||
result = self.shell(f"dig -r -x {ip} +short").rstrip()
|
||||
return result == f"{mail_domain}."
|
||||
|
||||
|
||||
def show_dns(args, out) -> int:
|
||||
"""Check existing DNS records, optionally write them to zone file, return exit code 0 or 1."""
|
||||
template = importlib.resources.files(__package__).joinpath("chatmail.zone.f")
|
||||
mail_domain = args.config.mail_domain
|
||||
ssh = f"ssh root@{mail_domain}"
|
||||
dns = DNS(out, mail_domain)
|
||||
|
||||
print("Checking your DKIM keys and DNS entries...")
|
||||
try:
|
||||
acme_account_url = out.shell_output(f"{ssh} -- acmetool account-url")
|
||||
except subprocess.CalledProcessError:
|
||||
print("Please run `cmdeploy run` first.")
|
||||
return 1
|
||||
|
||||
dkim_selector = "opendkim"
|
||||
dkim_pubkey = out.shell_output(
|
||||
ssh + f" -- openssl rsa -in /etc/dkimkeys/{dkim_selector}.private"
|
||||
" -pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'"
|
||||
def get_initial_remote_data(sshexec, mail_domain):
|
||||
return sshexec.logged(
|
||||
call=remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=mail_domain)
|
||||
)
|
||||
dkim_entry_value = f"v=DKIM1;k=rsa;p={dkim_pubkey};s=email;t=s"
|
||||
dkim_entry_str = ""
|
||||
while len(dkim_entry_value) >= 255:
|
||||
dkim_entry_str += '"' + dkim_entry_value[:255] + '" '
|
||||
dkim_entry_value = dkim_entry_value[255:]
|
||||
dkim_entry_str += '"' + dkim_entry_value + '"'
|
||||
dkim_entry = f"{dkim_selector}._domainkey.{mail_domain}. TXT {dkim_entry_str}"
|
||||
|
||||
ipv6 = dns.get_ipv6()
|
||||
reverse_ipv6 = dns.check_ptr_record(ipv6, mail_domain)
|
||||
ipv4 = dns.get_ipv4()
|
||||
reverse_ipv4 = dns.check_ptr_record(ipv4, mail_domain)
|
||||
to_print = []
|
||||
|
||||
with open(template, "r") as f:
|
||||
zonefile = (
|
||||
f.read()
|
||||
.format(
|
||||
acme_account_url=acme_account_url,
|
||||
sts_id=datetime.datetime.now().strftime("%Y%m%d%H%M"),
|
||||
chatmail_domain=args.config.mail_domain,
|
||||
dkim_entry=dkim_entry,
|
||||
ipv6=ipv6,
|
||||
ipv4=ipv4,
|
||||
)
|
||||
.strip()
|
||||
)
|
||||
try:
|
||||
with open(args.zonefile, "w+") as zf:
|
||||
zf.write(zonefile)
|
||||
print(f"DNS records successfully written to: {args.zonefile}")
|
||||
return 0
|
||||
except TypeError:
|
||||
pass
|
||||
for raw_line in zonefile.splitlines():
|
||||
line = raw_line.format(
|
||||
acme_account_url=acme_account_url,
|
||||
sts_id=datetime.datetime.now().strftime("%Y%m%d%H%M"),
|
||||
chatmail_domain=args.config.mail_domain,
|
||||
dkim_entry=dkim_entry,
|
||||
ipv6=ipv6,
|
||||
).strip()
|
||||
for typ in ["A", "AAAA", "CNAME", "CAA"]:
|
||||
if f" {typ} " in line:
|
||||
domain, value = line.split(f" {typ} ")
|
||||
current = dns.get(typ, domain.strip()[:-1])
|
||||
if current != value.strip():
|
||||
to_print.append(line)
|
||||
if " MX " in line:
|
||||
domain, typ, prio, value = line.split()
|
||||
current = dns.get(typ, domain[:-1])
|
||||
if not current:
|
||||
to_print.append(line)
|
||||
elif current.split()[1] != value:
|
||||
print(line.replace(prio, str(int(current[0]) + 1)))
|
||||
if " SRV " in line:
|
||||
domain, typ, prio, weight, port, value = line.split()
|
||||
current = dns.get("SRV", domain[:-1])
|
||||
if current != f"{prio} {weight} {port} {value}":
|
||||
to_print.append(line)
|
||||
if " TXT " in line:
|
||||
domain, value = line.split(" TXT ")
|
||||
current = dns.get("TXT", domain.strip()[:-1])
|
||||
if domain.startswith("_mta-sts."):
|
||||
if current:
|
||||
if current.split("id=")[0] == value.split("id=")[0]:
|
||||
continue
|
||||
|
||||
# TXT records longer than 255 bytes
|
||||
# are split into multiple <character-string>s.
|
||||
# This typically happens with DKIM record
|
||||
# which contains long RSA key.
|
||||
#
|
||||
# Removing `" "` before comparison
|
||||
# to get back a single string.
|
||||
if current.replace('" "', "") != value.replace('" "', ""):
|
||||
to_print.append(line)
|
||||
|
||||
exit_code = 0
|
||||
if to_print:
|
||||
to_print.insert(
|
||||
0, "You should configure the following DNS entries at your provider:\n"
|
||||
)
|
||||
to_print.append(
|
||||
"\nIf you already configured the DNS entries, wait a bit until the DNS entries propagate to the Internet."
|
||||
)
|
||||
print("\n".join(to_print))
|
||||
exit_code = 1
|
||||
def check_initial_remote_data(remote_data, print=print):
|
||||
mail_domain = remote_data["mail_domain"]
|
||||
if not remote_data["A"] and not remote_data["AAAA"]:
|
||||
print(f"Missing A and/or AAAA DNS records for {mail_domain}!")
|
||||
elif remote_data["MTA_STS"] != f"{mail_domain}.":
|
||||
print("Missing MTA-STS CNAME record:")
|
||||
print(f"mta-sts.{mail_domain}. CNAME {mail_domain}.")
|
||||
elif remote_data["WWW"] != f"{mail_domain}.":
|
||||
print("Missing www CNAME record:")
|
||||
print(f"www.{mail_domain}. CNAME {mail_domain}.")
|
||||
else:
|
||||
out.green("Great! All your DNS entries are correct.")
|
||||
|
||||
to_print = []
|
||||
if not reverse_ipv4:
|
||||
to_print.append(f"\tIPv4:\t{ipv4}\t{args.config.mail_domain}")
|
||||
if not reverse_ipv6:
|
||||
to_print.append(f"\tIPv6:\t{ipv6}\t{args.config.mail_domain}")
|
||||
if len(to_print) > 0:
|
||||
if len(to_print) == 1:
|
||||
warning = "You should add the following PTR/reverse DNS entry:"
|
||||
else:
|
||||
warning = "You should add the following PTR/reverse DNS entries:"
|
||||
out.red(warning)
|
||||
for entry in to_print:
|
||||
print(entry)
|
||||
print(
|
||||
"You can do so at your hosting provider (maybe this isn't your DNS provider)."
|
||||
)
|
||||
exit_code = 1
|
||||
return exit_code
|
||||
return remote_data
|
||||
|
||||
|
||||
def check_necessary_dns(out, mail_domain):
|
||||
"""Check whether $mail_domain and mta-sts.$mail_domain resolve."""
|
||||
dns = DNS(out, mail_domain)
|
||||
ipv4 = dns.get("A", mail_domain)
|
||||
ipv6 = dns.get("AAAA", mail_domain)
|
||||
mta_entry = dns.get("CNAME", "mta-sts." + mail_domain)
|
||||
www_entry = dns.get("CNAME", "www." + mail_domain)
|
||||
to_print = []
|
||||
if not (ipv4 or ipv6):
|
||||
to_print.append(f"\t{mail_domain}.\t\t\tA<your server's IPv4 address>")
|
||||
if mta_entry != mail_domain + ".":
|
||||
to_print.append(f"\tmta-sts.{mail_domain}.\tCNAME\t{mail_domain}.")
|
||||
if www_entry != mail_domain + ".":
|
||||
to_print.append(f"\twww.{mail_domain}.\tCNAME\t{mail_domain}.")
|
||||
if to_print:
|
||||
to_print.insert(
|
||||
0,
|
||||
"\nFor chatmail to work, you need to configure this at your DNS provider:\n",
|
||||
)
|
||||
for line in to_print:
|
||||
print(line)
|
||||
print()
|
||||
else:
|
||||
dns.out.green("\nAll necessary DNS entries seem to be set.")
|
||||
return True
|
||||
def get_filled_zone_file(remote_data):
|
||||
sts_id = remote_data.get("sts_id")
|
||||
if not sts_id:
|
||||
sts_id = datetime.datetime.now().strftime("%Y%m%d%H%M")
|
||||
|
||||
template = importlib.resources.files(__package__).joinpath("chatmail.zone.j2")
|
||||
content = template.read_text()
|
||||
zonefile = Template(content).render(**remote_data)
|
||||
lines = [x.strip() for x in zonefile.split("\n") if x.strip()]
|
||||
lines.append("")
|
||||
zonefile = "\n".join(lines)
|
||||
return zonefile
|
||||
|
||||
|
||||
def check_full_zone(sshexec, remote_data, out, zonefile) -> int:
|
||||
"""Check existing DNS records, optionally write them to zone file
|
||||
and return (exitcode, remote_data) tuple."""
|
||||
|
||||
required_diff, recommended_diff = sshexec.logged(
|
||||
remote.rdns.check_zonefile,
|
||||
kwargs=dict(zonefile=zonefile, mail_domain=remote_data["mail_domain"]),
|
||||
)
|
||||
|
||||
if required_diff:
|
||||
out.red("Please set required DNS entries at your DNS provider:\n")
|
||||
for line in required_diff:
|
||||
out(line)
|
||||
return 1
|
||||
elif recommended_diff:
|
||||
out("WARNING: these recommended DNS entries are not set:\n")
|
||||
for line in recommended_diff:
|
||||
out(line)
|
||||
return 0
|
||||
|
||||
out.green("Great! All your DNS entries are verified and correct.")
|
||||
return 0
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
uri = proxy:/run/doveauth/doveauth.socket:auth
|
||||
iterate_disable = yes
|
||||
iterate_disable = no
|
||||
iterate_prefix = userdb/
|
||||
|
||||
default_pass_scheme = plain
|
||||
# %E escapes characters " (double quote), ' (single quote) and \ (backslash) with \ (backslash).
|
||||
# See <https://doc.dovecot.org/configuration_manual/config_file/config_variables/#modifiers>
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
require ["imap4flags"];
|
||||
|
||||
# flag the message so it doesn't cause a push notification
|
||||
|
||||
if header :is ["Auto-Submitted"] ["auto-replied", "auto-generated"] {
|
||||
addflag "$Auto";
|
||||
}
|
||||
@@ -1,5 +1,9 @@
|
||||
## Dovecot configuration file
|
||||
|
||||
{% if disable_ipv6 %}
|
||||
listen = *
|
||||
{% endif %}
|
||||
|
||||
protocols = imap lmtp
|
||||
|
||||
auth_mechanisms = plain
|
||||
@@ -19,15 +23,35 @@ mail_debug = yes
|
||||
# master: Warning: service(stats): client_limit (1000) reached, client connections are being dropped
|
||||
default_client_limit = 20000
|
||||
|
||||
# Increase number of logged in IMAP connections.
|
||||
# Each connection is handled by a separate `imap` process.
|
||||
# `imap` process should have `client_limit=1` as described in
|
||||
# <https://doc.dovecot.org/configuration_manual/service_configuration/#service-limits>
|
||||
# so each logged in IMAP session will need its own `imap` process.
|
||||
#
|
||||
# If this limit is reached,
|
||||
# users will fail to LOGIN as `imap-login` process
|
||||
# will accept them logging in but fail to transfer logged in
|
||||
# connection to `imap` process until someone logs out and
|
||||
# the following warning will be logged:
|
||||
# Warning: service(imap): process_limit (1024) reached, client connections are being dropped
|
||||
service imap {
|
||||
process_limit = 50000
|
||||
}
|
||||
|
||||
mail_server_admin = mailto:root@{{ config.mail_domain }}
|
||||
mail_server_comment = Chatmail server
|
||||
|
||||
mail_plugins = quota
|
||||
# `zlib` enables compressing messages stored in the maildir.
|
||||
# See
|
||||
# <https://doc.dovecot.org/configuration_manual/zlib_plugin/>
|
||||
# for documentation.
|
||||
#
|
||||
# quota plugin documentation:
|
||||
# <https://doc.dovecot.org/configuration_manual/quota_plugin/>
|
||||
mail_plugins = zlib quota
|
||||
|
||||
# these are the capabilities Delta Chat cares about actually
|
||||
# so let's keep the network overhead per login small
|
||||
# https://github.com/deltachat/deltachat-core-rust/blob/master/src/imap/capabilities.rs
|
||||
imap_capability = IMAP4rev1 IDLE MOVE QUOTA CONDSTORE NOTIFY METADATA XDELTAPUSH XCHATMAIL
|
||||
imap_capability = +XDELTAPUSH XCHATMAIL
|
||||
|
||||
|
||||
# Authentication for system users.
|
||||
@@ -44,7 +68,7 @@ userdb {
|
||||
##
|
||||
|
||||
# Mailboxes are stored in the "mail" directory of the vmail user home.
|
||||
mail_location = maildir:/home/vmail/mail/%d/%u
|
||||
mail_location = maildir:{{ config.mailboxes_dir }}/%u
|
||||
|
||||
namespace inbox {
|
||||
inbox = yes
|
||||
@@ -80,17 +104,20 @@ mail_privileged_group = vmail
|
||||
# Pass all IMAP METADATA requests to the server implementing Dovecot's dict protocol.
|
||||
mail_attribute_dict = proxy:/run/chatmail-metadata/metadata.socket:metadata
|
||||
|
||||
# Enable IMAP COMPRESS (RFC 4978).
|
||||
# `imap_zlib` enables IMAP COMPRESS (RFC 4978).
|
||||
# <https://datatracker.ietf.org/doc/html/rfc4978.html>
|
||||
protocol imap {
|
||||
mail_plugins = $mail_plugins imap_zlib imap_quota
|
||||
mail_plugins = $mail_plugins imap_zlib imap_quota last_login
|
||||
imap_metadata = yes
|
||||
}
|
||||
|
||||
plugin {
|
||||
last_login_dict = proxy:/run/chatmail-lastlogin/lastlogin.socket:lastlogin
|
||||
#last_login_key = last-login/%u # default
|
||||
last_login_precision = s
|
||||
}
|
||||
|
||||
protocol lmtp {
|
||||
# quota plugin documentation:
|
||||
# <https://doc.dovecot.org/configuration_manual/quota_plugin/>
|
||||
#
|
||||
# notify plugin is a dependency of push_notification plugin:
|
||||
# <https://doc.dovecot.org/settings/plugin/notify-plugin/>
|
||||
#
|
||||
@@ -99,10 +126,11 @@ protocol lmtp {
|
||||
#
|
||||
# mail_lua and push_notification_lua are needed for Lua push notification handler.
|
||||
# <https://doc.dovecot.org/configuration_manual/push_notification/#configuration>
|
||||
#
|
||||
# Sieve to mark messages that should not be notified as \Seen
|
||||
# <https://doc.dovecot.org/configuration_manual/sieve/configuration/>
|
||||
mail_plugins = $mail_plugins quota mail_lua notify push_notification push_notification_lua sieve
|
||||
mail_plugins = $mail_plugins mail_lua notify push_notification push_notification_lua
|
||||
}
|
||||
|
||||
plugin {
|
||||
zlib_save = gz
|
||||
}
|
||||
|
||||
plugin {
|
||||
@@ -124,10 +152,6 @@ plugin {
|
||||
push_notification_driver = lua:file=/etc/dovecot/push_notification.lua
|
||||
}
|
||||
|
||||
plugin {
|
||||
sieve_default = file:/etc/dovecot/default.sieve
|
||||
}
|
||||
|
||||
service lmtp {
|
||||
user=vmail
|
||||
|
||||
@@ -176,3 +200,24 @@ ssl_key = </var/lib/acme/live/{{ config.mail_domain }}/privkey
|
||||
ssl_dh = </usr/share/dovecot/dh.pem
|
||||
ssl_min_protocol = TLSv1.2
|
||||
ssl_prefer_server_ciphers = yes
|
||||
|
||||
|
||||
{% if config.imap_rawlog %}
|
||||
service postlogin {
|
||||
executable = script-login -d rawlog
|
||||
unix_listener postlogin {
|
||||
}
|
||||
}
|
||||
service imap {
|
||||
executable = imap postlogin
|
||||
}
|
||||
|
||||
protocol imap {
|
||||
#rawlog_dir = /tmp/rawlog/%u
|
||||
# Put .in and .out imap protocol logging files into per-user homedir
|
||||
# You can use a command like this to combine into one protocol stream:
|
||||
# sort -sn <(sed 's/ / C: /' *.in) <(sed 's/ / S: /' cat *.out)
|
||||
|
||||
rawlog_dir = %h
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
# delete all mails after {{ config.delete_mails_after }} days, in the Inbox
|
||||
2 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
# or in any IMAP subfolder
|
||||
2 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/.*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
# even if they are unseen
|
||||
2 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/.*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
# or only temporary (but then they shouldn't be around after {{ config.delete_mails_after }} days anyway).
|
||||
2 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/.*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
3 0 * * * vmail find /home/vmail/mail/{{ config.mail_domain }} -name 'maildirsize' -type f -delete
|
||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
2 0 * * * vmail find {{ config.mailboxes_dir }} -path '*/.*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
3 0 * * * vmail find {{ config.mailboxes_dir }} -name 'maildirsize' -type f -delete
|
||||
4 0 * * * vmail /usr/local/lib/chatmaild/venv/bin/delete_inactive_users /usr/local/lib/chatmaild/chatmail.ini
|
||||
|
||||
@@ -17,12 +17,8 @@ function dovecot_lua_notify_event_message_new(user, event)
|
||||
|
||||
if user.username ~= event.from_address then
|
||||
-- Incoming message
|
||||
if not contains(event.keywords, "$Auto") then
|
||||
-- Not an Auto-Submitted message, notifying.
|
||||
|
||||
-- Notify METADATA server about new message.
|
||||
mbox:metadata_set("/private/messagenew", "")
|
||||
end
|
||||
-- Notify METADATA server about new message.
|
||||
mbox:metadata_set("/private/messagenew", "")
|
||||
end
|
||||
|
||||
mbox:free()
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
[Journal]
|
||||
MaxRetentionSec=3d
|
||||
Storage=volatile
|
||||
|
||||
@@ -1 +1 @@
|
||||
*/5 * * * * root {{ config.execpath }} /home/vmail/mail/{{ config.mail_domain }} >/var/www/html/metrics
|
||||
*/5 * * * * root {{ config.execpath }} {{ config.mailboxes_dir }} >/var/www/html/metrics
|
||||
|
||||
@@ -19,6 +19,13 @@
|
||||
<authentication>password-cleartext</authentication>
|
||||
<username>%EMAILADDRESS%</username>
|
||||
</incomingServer>
|
||||
<incomingServer type="imap">
|
||||
<hostname>{{ config.domain_name }}</hostname>
|
||||
<port>443</port>
|
||||
<socketType>SSL</socketType>
|
||||
<authentication>password-cleartext</authentication>
|
||||
<username>%EMAILADDRESS%</username>
|
||||
</incomingServer>
|
||||
<outgoingServer type="smtp">
|
||||
<hostname>{{ config.domain_name }}</hostname>
|
||||
<port>465</port>
|
||||
@@ -33,5 +40,12 @@
|
||||
<authentication>password-cleartext</authentication>
|
||||
<username>%EMAILADDRESS%</username>
|
||||
</outgoingServer>
|
||||
<outgoingServer type="smtp">
|
||||
<hostname>{{ config.domain_name }}</hostname>
|
||||
<port>443</port>
|
||||
<socketType>SSL</socketType>
|
||||
<authentication>password-cleartext</authentication>
|
||||
<username>%EMAILADDRESS%</username>
|
||||
</outgoingServer>
|
||||
</emailProvider>
|
||||
</clientConfig>
|
||||
|
||||
@@ -1,13 +1,32 @@
|
||||
load_module modules/ngx_stream_module.so;
|
||||
|
||||
user www-data;
|
||||
worker_processes auto;
|
||||
pid /run/nginx.pid;
|
||||
error_log /var/log/nginx/error.log;
|
||||
error_log syslog:server=unix:/dev/log,facility=local3;
|
||||
|
||||
events {
|
||||
worker_connections 768;
|
||||
# multi_accept on;
|
||||
}
|
||||
|
||||
stream {
|
||||
map $ssl_preread_alpn_protocols $proxy {
|
||||
default 127.0.0.1:8443;
|
||||
~\bsmtp\b 127.0.0.1:465;
|
||||
~\bimap\b 127.0.0.1:993;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443;
|
||||
{% if not disable_ipv6 %}
|
||||
listen [::]:443;
|
||||
{% endif %}
|
||||
proxy_pass $proxy;
|
||||
ssl_preread on;
|
||||
}
|
||||
}
|
||||
|
||||
http {
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
@@ -26,8 +45,11 @@ http {
|
||||
gzip on;
|
||||
|
||||
server {
|
||||
listen 443 ssl default_server;
|
||||
listen [::]:443 ssl default_server;
|
||||
|
||||
listen 8443 ssl default_server;
|
||||
{% if not disable_ipv6 %}
|
||||
listen [::]:8443 ssl default_server;
|
||||
{% endif %}
|
||||
|
||||
root /var/www/html;
|
||||
|
||||
@@ -35,6 +57,8 @@ http {
|
||||
|
||||
server_name _;
|
||||
|
||||
access_log syslog:server=unix:/dev/log,facility=local7;
|
||||
|
||||
location / {
|
||||
# First attempt to serve request as file, then
|
||||
# as directory, then fall back to displaying a 404.
|
||||
@@ -76,9 +100,12 @@ http {
|
||||
|
||||
# Redirect www. to non-www
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
listen 8443 ssl;
|
||||
{% if not disable_ipv6 %}
|
||||
listen [::]:8443 ssl;
|
||||
{% endif %}
|
||||
server_name www.{{ config.domain_name }};
|
||||
return 301 $scheme://{{ config.domain_name }}$request_uri;
|
||||
access_log syslog:server=unix:/dev/log,facility=local7;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,24 @@ KeyTable /etc/dkimkeys/KeyTable
|
||||
SigningTable refile:/etc/dkimkeys/SigningTable
|
||||
|
||||
# Sign Autocrypt header in addition to the default specified in RFC 6376.
|
||||
SignHeaders *,+autocrypt
|
||||
#
|
||||
# Default list is here:
|
||||
# <https://github.com/trusteddomainproject/OpenDKIM/blob/5c539587561785a66c1f67f720f2fb741f320785/libopendkim/dkim.c#L221-L245>
|
||||
SignHeaders *,+autocrypt,+content-type
|
||||
|
||||
# Prevent addition of second Content-Type header
|
||||
# and other important headers that should not be added
|
||||
# after signing the message.
|
||||
# See
|
||||
# <https://www.zone.eu/blog/2024/05/17/bimi-and-dmarc-cant-save-you/>
|
||||
# and RFC 6376 (page 41) for reference.
|
||||
#
|
||||
# We don't use "l=" body length so the problem described in RFC 6376
|
||||
# is not applicable, but adding e.g. a second "From" header
|
||||
# or second "Autocrypt" header is better prevented in any case.
|
||||
#
|
||||
# Default is empty.
|
||||
OversignHeaders from,reply-to,subject,date,to,cc,resent-date,resent-from,resent-sender,resent-to,resent-cc,in-reply-to,references,list-id,list-help,list-unsubscribe,list-subscribe,list-post,list-owner,list-archive,autocrypt
|
||||
|
||||
# Script to ignore signatures that do not correspond to the From: domain.
|
||||
ScreenPolicyScript /etc/opendkim/screen.lua
|
||||
|
||||
@@ -62,11 +62,14 @@ mydestination =
|
||||
relayhost =
|
||||
mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128
|
||||
mailbox_size_limit = 0
|
||||
# maximum 30MB sized messages
|
||||
message_size_limit = 31457280
|
||||
message_size_limit = {{config.max_message_size}}
|
||||
recipient_delimiter = +
|
||||
inet_interfaces = all
|
||||
{% if disable_ipv6 %}
|
||||
inet_protocols = ipv4
|
||||
{% else %}
|
||||
inet_protocols = all
|
||||
{% endif %}
|
||||
|
||||
virtual_transport = lmtp:unix:private/dovecot-lmtp
|
||||
virtual_mailbox_domains = {{ config.mail_domain }}
|
||||
@@ -77,3 +80,7 @@ mua_helo_restrictions = permit_mynetworks, reject_invalid_helo_hostname, reject_
|
||||
|
||||
# 1:1 map MAIL FROM to SASL login name.
|
||||
smtpd_sender_login_maps = regexp:/etc/postfix/login_map
|
||||
|
||||
# Do not lookup SMTP client hostnames to reduce delays
|
||||
# and avoid unnecessary DNS requests.
|
||||
smtpd_peername_lookup = no
|
||||
|
||||
@@ -15,7 +15,7 @@ smtp inet n - y - - smtpd -v
|
||||
smtp inet n - y - - smtpd
|
||||
{%- endif %}
|
||||
-o smtpd_milters=unix:opendkim/opendkim.sock
|
||||
submission inet n - y - - smtpd
|
||||
submission inet n - y - 5000 smtpd
|
||||
-o syslog_name=postfix/submission
|
||||
-o smtpd_tls_security_level=encrypt
|
||||
-o smtpd_sasl_auth_enable=yes
|
||||
@@ -32,7 +32,7 @@ submission inet n - y - - smtpd
|
||||
-o smtpd_client_connection_count_limit=1000
|
||||
-o smtpd_proxy_filter=127.0.0.1:{{ config.filtermail_smtp_port }}
|
||||
-o cleanup_service_name=authclean
|
||||
smtps inet n - y - - smtpd
|
||||
smtps inet n - y - 5000 smtpd
|
||||
-o syslog_name=postfix/smtps
|
||||
-o smtpd_tls_wrappermode=yes
|
||||
-o smtpd_tls_security_level=encrypt
|
||||
|
||||
12
cmdeploy/src/cmdeploy/remote/__init__.py
Normal file
12
cmdeploy/src/cmdeploy/remote/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
|
||||
The 'cmdeploy.remote' sub package contains modules with remotely executing functions.
|
||||
|
||||
Its "_sshexec_bootstrap" module is executed remotely through `SSHExec`
|
||||
and its main() loop there stays connected via a command channel,
|
||||
ready to receive function invocations ("command") and return results.
|
||||
"""
|
||||
|
||||
from . import rdns, rshell
|
||||
|
||||
__all__ = ["rdns", "rshell"]
|
||||
30
cmdeploy/src/cmdeploy/remote/_sshexec_bootstrap.py
Normal file
30
cmdeploy/src/cmdeploy/remote/_sshexec_bootstrap.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import builtins
|
||||
import importlib
|
||||
import traceback
|
||||
|
||||
## Function Execution server
|
||||
|
||||
|
||||
def _run_loop(cmd_channel):
|
||||
while cmd := cmd_channel.receive():
|
||||
cmd_channel.send(_handle_one_request(cmd))
|
||||
|
||||
|
||||
def _handle_one_request(cmd):
|
||||
pymod_path, func_name, kwargs = cmd
|
||||
try:
|
||||
mod = importlib.import_module(pymod_path)
|
||||
func = getattr(mod, func_name)
|
||||
res = func(**kwargs)
|
||||
return ("finish", res)
|
||||
except:
|
||||
data = traceback.format_exc()
|
||||
return ("error", data)
|
||||
|
||||
|
||||
def main(channel):
|
||||
# enable simple "print" logging
|
||||
|
||||
builtins.print = lambda x="": channel.send(("log", x))
|
||||
|
||||
_run_loop(channel)
|
||||
88
cmdeploy/src/cmdeploy/remote/rdns.py
Normal file
88
cmdeploy/src/cmdeploy/remote/rdns.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Pure python functions which execute remotely in a system Python interpreter.
|
||||
|
||||
All functions of this module
|
||||
|
||||
- need to get and and return Python builtin data types only,
|
||||
|
||||
- can only use standard library dependencies,
|
||||
|
||||
- can freely call each other.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from .rshell import CalledProcessError, shell
|
||||
|
||||
|
||||
def perform_initial_checks(mail_domain):
|
||||
"""Collecting initial DNS settings."""
|
||||
assert mail_domain
|
||||
if not shell("dig", fail_ok=True):
|
||||
shell("apt-get install -y dnsutils")
|
||||
shell(f"unbound-control flush_zone {mail_domain}", fail_ok=True)
|
||||
A = query_dns("A", mail_domain)
|
||||
AAAA = query_dns("AAAA", mail_domain)
|
||||
MTA_STS = query_dns("CNAME", f"mta-sts.{mail_domain}")
|
||||
WWW = query_dns("CNAME", f"www.{mail_domain}")
|
||||
|
||||
res = dict(mail_domain=mail_domain, A=A, AAAA=AAAA, MTA_STS=MTA_STS, WWW=WWW)
|
||||
if not MTA_STS or not WWW or (not A and not AAAA):
|
||||
return res
|
||||
|
||||
res["acme_account_url"] = shell("acmetool account-url", fail_ok=True)
|
||||
res["dkim_entry"] = get_dkim_entry(mail_domain, dkim_selector="opendkim")
|
||||
|
||||
# parse out sts-id if exists, example: "v=STSv1; id=2090123"
|
||||
parts = query_dns("TXT", f"_mta-sts.{mail_domain}").split("id=")
|
||||
res["sts_id"] = parts[1].rstrip('"') if len(parts) == 2 else ""
|
||||
return res
|
||||
|
||||
|
||||
def get_dkim_entry(mail_domain, dkim_selector):
|
||||
try:
|
||||
dkim_pubkey = shell(
|
||||
f"openssl rsa -in /etc/dkimkeys/{dkim_selector}.private "
|
||||
"-pubout 2>/dev/null | awk '/-/{next}{printf(\"%s\",$0)}'"
|
||||
)
|
||||
except CalledProcessError:
|
||||
return
|
||||
dkim_value_raw = f"v=DKIM1;k=rsa;p={dkim_pubkey};s=email;t=s"
|
||||
dkim_value = '" "'.join(re.findall(".{1,255}", dkim_value_raw))
|
||||
return f'{dkim_selector}._domainkey.{mail_domain}. TXT "{dkim_value}"'
|
||||
|
||||
|
||||
def query_dns(typ, domain):
|
||||
res = shell(f"dig -r -q {domain} -t {typ} +short")
|
||||
print(res)
|
||||
if res:
|
||||
return res.split("\n")[0]
|
||||
return ""
|
||||
|
||||
|
||||
def check_zonefile(zonefile, mail_domain):
|
||||
"""Check expected zone file entries."""
|
||||
shell(f"unbound-control flush_zone {mail_domain}", fail_ok=True)
|
||||
required = True
|
||||
required_diff = []
|
||||
recommended_diff = []
|
||||
|
||||
for zf_line in zonefile.splitlines():
|
||||
if "; Recommended" in zf_line:
|
||||
required = False
|
||||
continue
|
||||
if not zf_line.strip() or zf_line.startswith(";"):
|
||||
continue
|
||||
print(f"dns-checking {zf_line!r}")
|
||||
zf_domain, zf_typ, zf_value = zf_line.split(maxsplit=2)
|
||||
zf_domain = zf_domain.rstrip(".")
|
||||
zf_value = zf_value.strip()
|
||||
query_value = query_dns(zf_typ, zf_domain)
|
||||
if zf_value != query_value:
|
||||
assert zf_typ in ("A", "AAAA", "CNAME", "CAA", "SRV", "MX", "TXT"), zf_line
|
||||
if required:
|
||||
required_diff.append(zf_line)
|
||||
else:
|
||||
recommended_diff.append(zf_line)
|
||||
|
||||
return required_diff, recommended_diff
|
||||
16
cmdeploy/src/cmdeploy/remote/rshell.py
Normal file
16
cmdeploy/src/cmdeploy/remote/rshell.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from subprocess import CalledProcessError, check_output
|
||||
|
||||
|
||||
def shell(command, fail_ok=False):
|
||||
print(f"$ {command}")
|
||||
try:
|
||||
return check_output(command, shell=True).decode().rstrip()
|
||||
except CalledProcessError:
|
||||
if not fail_ok:
|
||||
raise
|
||||
return ""
|
||||
|
||||
|
||||
def get_systemd_running():
|
||||
lines = shell("systemctl --type=service --state=running").split("\n")
|
||||
return [line for line in lines if line.startswith(" ")]
|
||||
@@ -2,7 +2,7 @@
|
||||
Description=Chatmail dict proxy for IMAP METADATA
|
||||
|
||||
[Service]
|
||||
ExecStart={execpath} /run/chatmail-metadata/metadata.socket /home/vmail/mail/{mail_domain} {config_path}
|
||||
ExecStart={execpath} /run/chatmail-metadata/metadata.socket {config_path}
|
||||
Restart=always
|
||||
RestartSec=30
|
||||
User=vmail
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Description=Chatmail dict authentication proxy for dovecot
|
||||
|
||||
[Service]
|
||||
ExecStart={execpath} /run/doveauth/doveauth.socket /home/vmail/passdb.sqlite {config_path}
|
||||
ExecStart={execpath} /run/doveauth/doveauth.socket {config_path}
|
||||
Restart=always
|
||||
RestartSec=30
|
||||
User=vmail
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
[Unit]
|
||||
Description=Expunge old mails after {{ config.delete_mails_after }} days
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
# delete all mails after {{ config.delete_mails_after }} days, in the Inbox
|
||||
ExecStart=/home/vmail/mail/{{ config.mail_domain }} -path '*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
# or in any IMAP subfolder
|
||||
ExecStart=vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/.*/cur/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
# even if they are unseen
|
||||
ExecStart=vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
ExecStart=vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/new/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
# or only temporary (but then they shouldn't be around after {{ config.delete_mails_after }} days anyway).
|
||||
ExecStart=vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
ExecStart=vmail find /home/vmail/mail/{{ config.mail_domain }} -path '*/.*/tmp/*' -mtime +{{ config.delete_mails_after }} -type f -delete
|
||||
ExecStart=vmail find /home/vmail/mail/{{ config.mail_domain }} -name 'maildirsize' -type f -delete
|
||||
@@ -1,9 +0,0 @@
|
||||
[Unit]
|
||||
Description=Run expunge.service daily
|
||||
|
||||
[Timer]
|
||||
OnCalendar=weekly
|
||||
Persistent=true
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
||||
@@ -5,6 +5,7 @@ Description=Chatmail Postfix before queue filter
|
||||
ExecStart={execpath} {config_path}
|
||||
Restart=always
|
||||
RestartSec=30
|
||||
User=filtermail
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
12
cmdeploy/src/cmdeploy/service/lastlogin.service.f
Normal file
12
cmdeploy/src/cmdeploy/service/lastlogin.service.f
Normal file
@@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=Dict proxy for last-login tracking
|
||||
|
||||
[Service]
|
||||
ExecStart={execpath} /run/chatmail-lastlogin/lastlogin.socket {config_path}
|
||||
Restart=always
|
||||
RestartSec=30
|
||||
User=vmail
|
||||
RuntimeDirectory=chatmail-lastlogin
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,5 +0,0 @@
|
||||
[Unit]
|
||||
Description=Generate metrics in /var/www/html/metrics
|
||||
|
||||
[Service]
|
||||
ExecStart={{ config.execpath }} /home/vmail/mail/{{ config.mail_domain }} > /var/www/html/metrics
|
||||
@@ -1,9 +0,0 @@
|
||||
[Unit]
|
||||
Description=Run metrics.service every 5 minutes
|
||||
|
||||
[Timer]
|
||||
OnBootSec=5min
|
||||
OnUnitActiveSec=5min
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
||||
87
cmdeploy/src/cmdeploy/sshexec.py
Normal file
87
cmdeploy/src/cmdeploy/sshexec.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
from queue import Queue
|
||||
|
||||
import execnet
|
||||
|
||||
from . import remote
|
||||
|
||||
|
||||
class FuncError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def bootstrap_remote(gateway, remote=remote):
|
||||
"""Return a command channel which can execute remote functions."""
|
||||
source_init_path = inspect.getfile(remote)
|
||||
basedir = os.path.dirname(source_init_path)
|
||||
name = os.path.basename(basedir)
|
||||
|
||||
# rsync sourcedir to remote host
|
||||
remote_pkg_path = f"/root/from-cmdeploy/{name}"
|
||||
q = Queue()
|
||||
finish = lambda: q.put(None)
|
||||
rsync = execnet.RSync(sourcedir=basedir, verbose=False)
|
||||
rsync.add_target(gateway, remote_pkg_path, finishedcallback=finish, delete=True)
|
||||
rsync.send()
|
||||
q.get()
|
||||
|
||||
# start sshexec bootstrap and return its command channel
|
||||
remote_sys_path = os.path.dirname(remote_pkg_path)
|
||||
channel = gateway.remote_exec(
|
||||
f"""
|
||||
import sys
|
||||
sys.path.insert(0, {remote_sys_path!r})
|
||||
from remote._sshexec_bootstrap import main
|
||||
main(channel)
|
||||
"""
|
||||
)
|
||||
return channel
|
||||
|
||||
|
||||
def print_stderr(item="", end="\n"):
|
||||
print(item, file=sys.stderr, end=end)
|
||||
|
||||
|
||||
class SSHExec:
|
||||
RemoteError = execnet.RemoteError
|
||||
FuncError = FuncError
|
||||
|
||||
def __init__(self, host, verbose=False, python="python3", timeout=60):
|
||||
self.gateway = execnet.makegateway(f"ssh=root@{host}//python={python}")
|
||||
self._remote_cmdloop_channel = bootstrap_remote(self.gateway, remote)
|
||||
self.timeout = timeout
|
||||
self.verbose = verbose
|
||||
|
||||
def __call__(self, call, kwargs=None, log_callback=None):
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
assert call.__module__.startswith("cmdeploy.remote")
|
||||
modname = call.__module__.replace("cmdeploy.", "")
|
||||
self._remote_cmdloop_channel.send((modname, call.__name__, kwargs))
|
||||
while 1:
|
||||
code, data = self._remote_cmdloop_channel.receive(timeout=self.timeout)
|
||||
if log_callback is not None and code == "log":
|
||||
log_callback(data)
|
||||
elif code == "finish":
|
||||
return data
|
||||
elif code == "error":
|
||||
raise self.FuncError(data)
|
||||
|
||||
def logged(self, call, kwargs):
|
||||
def log_progress(data):
|
||||
sys.stderr.write(".")
|
||||
sys.stderr.flush()
|
||||
|
||||
title = call.__doc__
|
||||
if not title:
|
||||
title = call.__name__
|
||||
if self.verbose:
|
||||
print_stderr("[ssh] " + title)
|
||||
return self(call, kwargs, log_callback=print_stderr)
|
||||
else:
|
||||
print_stderr(title, end="")
|
||||
res = self(call, kwargs, log_callback=log_progress)
|
||||
print_stderr()
|
||||
return res
|
||||
17
cmdeploy/src/cmdeploy/tests/data/zftest.zone
Normal file
17
cmdeploy/src/cmdeploy/tests/data/zftest.zone
Normal file
@@ -0,0 +1,17 @@
|
||||
; Required DNS entries for chatmail servers
|
||||
zftest.testrun.org. A 135.181.204.127
|
||||
zftest.testrun.org. AAAA 2a01:4f9:c012:52f4::1
|
||||
zftest.testrun.org. MX 10 zftest.testrun.org.
|
||||
_mta-sts.zftest.testrun.org. TXT "v=STSv1; id=202403211706"
|
||||
mta-sts.zftest.testrun.org. CNAME zftest.testrun.org.
|
||||
www.zftest.testrun.org. CNAME zftest.testrun.org.
|
||||
opendkim._domainkey.zftest.testrun.org. TXT "v=DKIM1;k=rsa;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoYt82CVUyz2ouaqjX2kB+5J80knAyoOU3MGU5aWppmwUwwTvj/oSTSpkc5JMtVTRmKKr8NUDWAL1Yw7dfGqqPHdHfwwjS3BIvDzYx+hzgtz62RnfNgV+/2MAoNpfX7cAFIHdRzEHNtwugc3RDLquqPoupAE3Y2YRw2T5zG5fILh4vwIcJZL5Uq6B92j8wwJqOex" "33n+vm1NKQ9rxo/UsHAmZlJzpooXcG/4igTBxJyJlamVSRR6N7Nul1v//YJb7J6v2o0iPHW6uE0StzKaPPNC2IVosSRFbD9H2oqppltptFSNPlI0E+t0JBWHem6YK7xcugiO3ImMCaaU8g6Jt/wIDAQAB;s=email;t=s"
|
||||
; Recommended DNS entries
|
||||
_submission._tcp.zftest.testrun.org. SRV 0 1 587 zftest.testrun.org.
|
||||
_submissions._tcp.zftest.testrun.org. SRV 0 1 465 zftest.testrun.org.
|
||||
_imap._tcp.zftest.testrun.org. SRV 0 1 143 zftest.testrun.org.
|
||||
_imaps._tcp.zftest.testrun.org. SRV 0 1 993 zftest.testrun.org.
|
||||
zftest.testrun.org. CAA 0 issue "letsencrypt.org;accounturi=https://acme-v02.api.letsencrypt.org/acme/acct/1371472956"
|
||||
zftest.testrun.org. TXT "v=spf1 a:zftest.testrun.org ~all"
|
||||
_dmarc.zftest.testrun.org. TXT "v=DMARC1;p=reject;adkim=s;aspf=s"
|
||||
_adsp._domainkey.zftest.testrun.org. TXT "dkim=discardable"
|
||||
@@ -41,9 +41,9 @@ class TestDC:
|
||||
|
||||
def dc_ping_pong():
|
||||
chat.send_text("ping")
|
||||
msg = ac2.wait_next_incoming_message()
|
||||
msg = ac2._evtracker.wait_next_incoming_message()
|
||||
msg.chat.send_text("pong")
|
||||
ac1.wait_next_incoming_message()
|
||||
ac1._evtracker.wait_next_incoming_message()
|
||||
|
||||
benchmark(dc_ping_pong, 5)
|
||||
|
||||
@@ -55,6 +55,6 @@ class TestDC:
|
||||
for i in range(10):
|
||||
chat.send_text(f"hello {i}")
|
||||
for i in range(10):
|
||||
ac2.wait_next_incoming_message()
|
||||
ac2._evtracker.wait_next_incoming_message()
|
||||
|
||||
benchmark(dc_send_10_receive_10, 5)
|
||||
|
||||
@@ -2,6 +2,56 @@ import smtplib
|
||||
|
||||
import pytest
|
||||
|
||||
from cmdeploy import remote
|
||||
from cmdeploy.sshexec import SSHExec
|
||||
|
||||
|
||||
class TestSSHExecutor:
|
||||
@pytest.fixture(scope="class")
|
||||
def sshexec(self, sshdomain):
|
||||
return SSHExec(sshdomain)
|
||||
|
||||
def test_ls(self, sshexec):
|
||||
out = sshexec(call=remote.rdns.shell, kwargs=dict(command="ls"))
|
||||
out2 = sshexec(call=remote.rdns.shell, kwargs=dict(command="ls"))
|
||||
assert out == out2
|
||||
|
||||
def test_perform_initial(self, sshexec, maildomain):
|
||||
res = sshexec(
|
||||
remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=maildomain)
|
||||
)
|
||||
assert res["A"] or res["AAAA"]
|
||||
|
||||
def test_logged(self, sshexec, maildomain, capsys):
|
||||
sshexec.logged(
|
||||
remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=maildomain)
|
||||
)
|
||||
out, err = capsys.readouterr()
|
||||
assert err.startswith("Collecting")
|
||||
assert err.endswith("....\n")
|
||||
assert err.count("\n") == 1
|
||||
|
||||
sshexec.verbose = True
|
||||
sshexec.logged(
|
||||
remote.rdns.perform_initial_checks, kwargs=dict(mail_domain=maildomain)
|
||||
)
|
||||
out, err = capsys.readouterr()
|
||||
lines = err.split("\n")
|
||||
assert len(lines) > 4
|
||||
assert remote.rdns.perform_initial_checks.__doc__ in lines[0]
|
||||
|
||||
def test_exception(self, sshexec, capsys):
|
||||
try:
|
||||
sshexec.logged(
|
||||
remote.rdns.perform_initial_checks,
|
||||
kwargs=dict(mail_domain=None),
|
||||
)
|
||||
except sshexec.FuncError as e:
|
||||
assert "rdns.py" in str(e)
|
||||
assert "AssertionError" in str(e)
|
||||
else:
|
||||
pytest.fail("didn't raise exception")
|
||||
|
||||
|
||||
def test_remote(remote, imap_or_smtp):
|
||||
lineproducer = remote.iter_output(imap_or_smtp.logcmd)
|
||||
@@ -86,15 +136,16 @@ def test_exceed_rate_limit(cmsetup, gencreds, maildata, chatmail_config):
|
||||
pytest.fail("Rate limit was not exceeded")
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_expunged(remote, chatmail_config):
|
||||
outdated_days = int(chatmail_config.delete_mails_after) + 1
|
||||
find_cmds = [
|
||||
f"find /home/vmail/mail/{chatmail_config.mail_domain} -path '*/cur/*' -mtime +{outdated_days} -type f",
|
||||
f"find /home/vmail/mail/{chatmail_config.mail_domain} -path '*/.*/cur/*' -mtime +{outdated_days} -type f",
|
||||
f"find /home/vmail/mail/{chatmail_config.mail_domain} -path '*/new/*' -mtime +{outdated_days} -type f",
|
||||
f"find /home/vmail/mail/{chatmail_config.mail_domain} -path '*/.*/new/*' -mtime +{outdated_days} -type f",
|
||||
f"find /home/vmail/mail/{chatmail_config.mail_domain} -path '*/tmp/*' -mtime +{outdated_days} -type f",
|
||||
f"find /home/vmail/mail/{chatmail_config.mail_domain} -path '*/.*/tmp/*' -mtime +{outdated_days} -type f",
|
||||
f"find {chatmail_config.mailboxes_dir} -path '*/cur/*' -mtime +{outdated_days} -type f",
|
||||
f"find {chatmail_config.mailboxes_dir} -path '*/.*/cur/*' -mtime +{outdated_days} -type f",
|
||||
f"find {chatmail_config.mailboxes_dir} -path '*/new/*' -mtime +{outdated_days} -type f",
|
||||
f"find {chatmail_config.mailboxes_dir} -path '*/.*/new/*' -mtime +{outdated_days} -type f",
|
||||
f"find {chatmail_config.mailboxes_dir} -path '*/tmp/*' -mtime +{outdated_days} -type f",
|
||||
f"find {chatmail_config.mailboxes_dir} -path '*/.*/tmp/*' -mtime +{outdated_days} -type f",
|
||||
]
|
||||
for cmd in find_cmds:
|
||||
for line in remote.iter_output(cmd):
|
||||
|
||||
@@ -10,7 +10,6 @@ from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from chatmaild.config import read_config
|
||||
from chatmaild.database import Database
|
||||
|
||||
conftestdir = Path(__file__).parent
|
||||
|
||||
@@ -35,7 +34,7 @@ def pytest_runtest_setup(item):
|
||||
pytest.skip("skipping slow test, use --slow to run")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.fixture(scope="session")
|
||||
def chatmail_config(pytestconfig):
|
||||
current = basedir = Path().resolve()
|
||||
while 1:
|
||||
@@ -49,12 +48,12 @@ def chatmail_config(pytestconfig):
|
||||
pytest.skip(f"no chatmail.ini file found in {basedir} or parent dirs")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.fixture(scope="session")
|
||||
def maildomain(chatmail_config):
|
||||
return chatmail_config.mail_domain
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@pytest.fixture(scope="session")
|
||||
def sshdomain(maildomain):
|
||||
return os.environ.get("CHATMAIL_SSH", maildomain)
|
||||
|
||||
@@ -79,6 +78,17 @@ def pytest_report_header():
|
||||
return ["-" * len(text), text, "-" * len(text)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cm_data(request):
|
||||
datadir = request.fspath.dirpath("data")
|
||||
|
||||
class CMData:
|
||||
def get(self, name):
|
||||
return datadir.join(name).read()
|
||||
|
||||
return CMData()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def benchmark(request):
|
||||
def bench(func, num, name=None, reportfunc=None):
|
||||
@@ -251,13 +261,6 @@ def gencreds(chatmail_config):
|
||||
return lambda domain=None: next(gen(domain))
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def db(tmpdir):
|
||||
db_path = tmpdir / "passdb.sqlite"
|
||||
print("database path:", db_path)
|
||||
return Database(db_path)
|
||||
|
||||
|
||||
#
|
||||
# Delta Chat testplugin re-use
|
||||
# use the cmfactory fixture to get chatmail instance accounts
|
||||
|
||||
@@ -21,8 +21,9 @@ class TestCmdline:
|
||||
run = parser.parse_args(["run"])
|
||||
assert init and run
|
||||
|
||||
@pytest.mark.xfail(reason="init doesn't exit anymore, check for CLI output instead")
|
||||
def test_init_not_overwrite(self):
|
||||
main(["init", "chat.example.org"])
|
||||
with pytest.raises(SystemExit):
|
||||
main(["init", "chat.example.org"])
|
||||
def test_init_not_overwrite(self, capsys):
|
||||
assert main(["init", "chat.example.org"]) == 0
|
||||
capsys.readouterr()
|
||||
assert main(["init", "chat.example.org"]) == 1
|
||||
out, err = capsys.readouterr()
|
||||
assert "path exists" in out.lower()
|
||||
|
||||
127
cmdeploy/src/cmdeploy/tests/test_dns.py
Normal file
127
cmdeploy/src/cmdeploy/tests/test_dns.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import pytest
|
||||
|
||||
from cmdeploy import remote
|
||||
from cmdeploy.dns import check_full_zone, check_initial_remote_data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mockdns_base(monkeypatch):
|
||||
qdict = {}
|
||||
|
||||
def query_dns(typ, domain):
|
||||
try:
|
||||
return qdict[typ][domain]
|
||||
except KeyError:
|
||||
return ""
|
||||
|
||||
monkeypatch.setattr(remote.rdns, query_dns.__name__, query_dns)
|
||||
return qdict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mockdns(mockdns_base):
|
||||
mockdns_base.update(
|
||||
{
|
||||
"A": {"some.domain": "1.1.1.1"},
|
||||
"AAAA": {"some.domain": "fde5:cd7a:9e1c:3240:5a99:936f:cdac:53ae"},
|
||||
"CNAME": {
|
||||
"mta-sts.some.domain": "some.domain.",
|
||||
"www.some.domain": "some.domain.",
|
||||
},
|
||||
}
|
||||
)
|
||||
return mockdns_base
|
||||
|
||||
|
||||
class TestPerformInitialChecks:
|
||||
def test_perform_initial_checks_ok1(self, mockdns):
|
||||
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
||||
assert remote_data["A"] == mockdns["A"]["some.domain"]
|
||||
assert remote_data["AAAA"] == mockdns["AAAA"]["some.domain"]
|
||||
assert remote_data["MTA_STS"] == mockdns["CNAME"]["mta-sts.some.domain"]
|
||||
assert remote_data["WWW"] == mockdns["CNAME"]["www.some.domain"]
|
||||
|
||||
@pytest.mark.parametrize("drop", ["A", "AAAA"])
|
||||
def test_perform_initial_checks_with_one_of_A_AAAA(self, mockdns, drop):
|
||||
del mockdns[drop]
|
||||
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
||||
assert not remote_data[drop]
|
||||
|
||||
l = []
|
||||
res = check_initial_remote_data(remote_data, print=l.append)
|
||||
assert res
|
||||
assert not l
|
||||
|
||||
def test_perform_initial_checks_no_mta_sts(self, mockdns):
|
||||
del mockdns["CNAME"]["mta-sts.some.domain"]
|
||||
remote_data = remote.rdns.perform_initial_checks("some.domain")
|
||||
assert not remote_data["MTA_STS"]
|
||||
|
||||
l = []
|
||||
res = check_initial_remote_data(remote_data, print=l.append)
|
||||
assert not res
|
||||
assert len(l) == 2
|
||||
|
||||
|
||||
def parse_zonefile_into_dict(zonefile, mockdns_base, only_required=False):
|
||||
for zf_line in zonefile.split("\n"):
|
||||
if zf_line.startswith("#"):
|
||||
if "Recommended" in zf_line and only_required:
|
||||
return
|
||||
continue
|
||||
if not zf_line.strip():
|
||||
continue
|
||||
zf_domain, zf_typ, zf_value = zf_line.split(maxsplit=2)
|
||||
zf_domain = zf_domain.rstrip(".")
|
||||
zf_value = zf_value.strip()
|
||||
mockdns_base.setdefault(zf_typ, {})[zf_domain] = zf_value
|
||||
|
||||
|
||||
class MockSSHExec:
|
||||
def logged(self, func, kwargs):
|
||||
return func(**kwargs)
|
||||
|
||||
def call(self, func, kwargs):
|
||||
return func(**kwargs)
|
||||
|
||||
|
||||
class TestZonefileChecks:
|
||||
def test_check_zonefile_all_ok(self, cm_data, mockdns_base):
|
||||
zonefile = cm_data.get("zftest.zone")
|
||||
parse_zonefile_into_dict(zonefile, mockdns_base)
|
||||
required_diff, recommended_diff = remote.rdns.check_zonefile(
|
||||
zonefile, "some.domain"
|
||||
)
|
||||
assert not required_diff and not recommended_diff
|
||||
|
||||
def test_check_zonefile_recommended_not_set(self, cm_data, mockdns_base):
|
||||
zonefile = cm_data.get("zftest.zone")
|
||||
zonefile_mocked = zonefile.split("; Recommended")[0]
|
||||
parse_zonefile_into_dict(zonefile_mocked, mockdns_base)
|
||||
required_diff, recommended_diff = remote.rdns.check_zonefile(
|
||||
zonefile, "some.domain"
|
||||
)
|
||||
assert not required_diff
|
||||
assert len(recommended_diff) == 8
|
||||
|
||||
def test_check_zonefile_output_required_fine(self, cm_data, mockdns_base, mockout):
|
||||
zonefile = cm_data.get("zftest.zone")
|
||||
zonefile_mocked = zonefile.split("; Recommended")[0]
|
||||
parse_zonefile_into_dict(zonefile_mocked, mockdns_base, only_required=True)
|
||||
mssh = MockSSHExec()
|
||||
mockdns_base["mail_domain"] = "some.domain"
|
||||
res = check_full_zone(mssh, mockdns_base, out=mockout, zonefile=zonefile)
|
||||
assert res == 0
|
||||
assert "WARNING" in mockout.captured_plain[0]
|
||||
assert len(mockout.captured_plain) == 9
|
||||
|
||||
def test_check_zonefile_output_full(self, cm_data, mockdns_base, mockout):
|
||||
zonefile = cm_data.get("zftest.zone")
|
||||
parse_zonefile_into_dict(zonefile, mockdns_base)
|
||||
mssh = MockSSHExec()
|
||||
mockdns_base["mail_domain"] = "some.domain"
|
||||
res = check_full_zone(mssh, mockdns_base, out=mockout, zonefile=zonefile)
|
||||
assert res == 0
|
||||
assert not mockout.captured_red
|
||||
assert "correct" in mockout.captured_green[0]
|
||||
assert not mockout.captured_red
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/sh
|
||||
#
|
||||
# Wrapper for cmdelpoy to run it in activated virtualenv.
|
||||
set -e
|
||||
|
||||
80
scripts/dovecot/README.md
Normal file
80
scripts/dovecot/README.md
Normal file
@@ -0,0 +1,80 @@
|
||||
## Introduction to custom Dovecot builds
|
||||
|
||||
Chatmail servers use a custom Debian build of the IMAP 'dovecot' server software because
|
||||
|
||||
a) Dovecot developers did not yet merge a [pull request](https://github.com/dovecot/core/pull/216)
|
||||
which majorly speeds up message delivery by removing a hardcoded 0.5 second delay
|
||||
on relaying incoming messages.
|
||||
|
||||
b) Even if merged, it would take years for it to reach Debian stable.
|
||||
|
||||
c) The modified dovecot has been successfully used since December 2023 without issues
|
||||
and we see no noticeable downside (theoretically higher CPU usage but not measureable)
|
||||
but a considerable upside as the delay-removal facilitates end-to-end message
|
||||
delivery of 200 ms in real networks.
|
||||
|
||||
The modified forked dovecot code lives at
|
||||
[https://github.com/chatmail/dovecot](https://github.com/chatmail/dovecot).
|
||||
The remainder of this document describes the setup of the Debian repository
|
||||
containing the patched dovecot version.
|
||||
|
||||
## Building Debian packages at build.opensuse.org
|
||||
|
||||
Delta Chat developers maintain an [account](https://build.opensuse.org/project/show/home:deltachat)
|
||||
in the [Open Build Service (OBS)](https://openbuildservice.org/),
|
||||
where the [resulting package](https://build.opensuse.org/package/show/home:deltachat/dovecot)
|
||||
is now used in deploying chatmail servers.
|
||||
|
||||
The Open Build Service (OBS) is a platform for building and distributing software packages
|
||||
across various operating systems and architectures.
|
||||
It supports openSUSE, Fedora, Debian, Ubuntu and Arch.
|
||||
It's [primary instance](https://build.opensuse.org/) is ran by the openSUSE project
|
||||
and is part of the pipeline of the creation of SUSE Linux Enterprise.
|
||||
|
||||
The OBS provides a mercurial-like interface to create source repositories
|
||||
that are then automatically built.
|
||||
While in theory a package can be created entirely over the web interface,
|
||||
the use of the cli-tool `osc` is more convenient and is described in the [official documentation](https://openbuildservice.org/help/manuals/obs-user-guide/art.obs.bg#sec.obsbg.obsconfig).
|
||||
|
||||
### How to build the dovecot debian package on the OBS via our script
|
||||
|
||||
In scripts/dovecot/ is a shell script that prepares the required files and pushes them to build.opensuse.org.
|
||||
|
||||
Before using the script, you should have osc set up as described in the [official documentation](https://openbuildservice.org/help/manuals/obs-user-guide/art.obs.bg#sec.obsbg.obsconfig).
|
||||
|
||||
The script assumes you are on Debian. It automatically installs any needed dependencies and creates the source package. To upload the resulting source package to the OBS you need to enter the username and password for deltachat on build.opensuse.org in the last step of the script.
|
||||
|
||||
Use `source build-obs.sh` to run it.
|
||||
|
||||
### Adding the resulting OBS repository to Debian 12
|
||||
|
||||
Our dovecot fork is automatically installed as part of the chatmail deployment. You can see it in cmdeploy/src/cmdeploy/__init__.py. If you want to add our fork manually to a system, you can do the following:
|
||||
|
||||
First add our signing key to your apt keyring:
|
||||
|
||||
```
|
||||
sudo cp cmdeploy/src/cmdeploy/obs-home-deltachat.gpg /etc/apt/keyrings/obs-home-deltachat.gpg`
|
||||
```
|
||||
|
||||
Now add our repository and key to /etc/apt/sources.list with a text editor of your choice:
|
||||
|
||||
```
|
||||
deb [signed-by=/etc/apt/keyrings/obs-home-deltachat.gpg] https://download.opensuse.org/repositories/home:/deltachat/Debian_12/ ./
|
||||
```
|
||||
|
||||
You can now install dovecot like normal.
|
||||
|
||||
```
|
||||
sudo apt update
|
||||
sudo apt install dovecot-core
|
||||
```
|
||||
|
||||
### Security concerns
|
||||
|
||||
The signing of the patched dovecot package is done in the OBS and
|
||||
in theory SUSE could make changes to the package delivered.
|
||||
It is probably reasonable to trust SUSE to not mess with the build
|
||||
process because it would cause serious negative reputation damage for them
|
||||
if they tried and someone finds out.
|
||||
|
||||
Our dovecot fork will receive the same security backports as the dovecot package in Debian Sid.
|
||||
54
scripts/dovecot/build-obs.sh
Normal file
54
scripts/dovecot/build-obs.sh
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Install dependencies
|
||||
echo "Installing dependencies for this script:"
|
||||
sudo apt install -y devscripts build-essential osc curl git debhelper-compat
|
||||
|
||||
# Define path of your local OBS repository
|
||||
SCRIPT_DIR=$PWD
|
||||
OBS_PATH=$SCRIPT_DIR/obs
|
||||
REPO_PATH=$OBS_PATH/home:deltachat/dovecot/
|
||||
|
||||
# Download Debian Source Files
|
||||
echo "Downloading precise files from Debian unstable repository..."
|
||||
mkdir dovecot-build
|
||||
cd dovecot-build
|
||||
|
||||
# taken May 6th 2024, from https://packages.debian.org/unstable/dovecot-core
|
||||
curl http://deb.debian.org/debian/pool/main/d/dovecot/dovecot_2.3.21+dfsg1-3.debian.tar.xz -O
|
||||
curl http://deb.debian.org/debian/pool/main/d/dovecot/dovecot_2.3.21+dfsg1.orig.tar.gz -O
|
||||
curl http://deb.debian.org/debian/pool/main/d/dovecot/dovecot_2.3.21+dfsg1.orig-pigeonhole.tar.gz -O
|
||||
|
||||
# Clone the Chatmail Dovecot Repo
|
||||
echo "Cloning the Chatmail Dovecot fork..."
|
||||
git clone https://github.com/chatmail/dovecot.git
|
||||
|
||||
# Build the source package
|
||||
echo "Building the source package"
|
||||
cd dovecot
|
||||
dpkg-source -b .
|
||||
|
||||
# Setting up OSC
|
||||
echo "Setting up OBS home repository"
|
||||
mkdir $OBS_PATH
|
||||
cd $OBS_PATH
|
||||
rm -rf home:deltachat/dovecot
|
||||
osc checkout home:deltachat/dovecot
|
||||
|
||||
# Copy Files to Your Local OBS Repository,
|
||||
echo "Copying files to your local OBS repository..."
|
||||
cd $SCRIPT_DIR/dovecot-build
|
||||
cp -rf dovecot_2.3.21+dfsg1-3.debian.tar.xz $REPO_PATH
|
||||
cp -rf dovecot_2.3.21+dfsg1.orig.tar.gz $REPO_PATH
|
||||
cp -rf dovecot_2.3.21+dfsg1.orig-pigeonhole.tar.gz $REPO_PATH
|
||||
cp -rf dovecot_2.3.21+dfsg1-3.dsc $REPO_PATH
|
||||
|
||||
# Push Changes to OBS
|
||||
echo "Pushing changes to OBS..."
|
||||
cd $REPO_PATH
|
||||
osc up
|
||||
osc add dovecot_2.3.21+dfsg1-3.debian.tar.xz
|
||||
osc add dovecot_2.3.21+dfsg1.orig.tar.gz
|
||||
osc add dovecot_2.3.21+dfsg1.orig-pigeonhole.tar.gz
|
||||
osc add dovecot_2.3.21+dfsg1-3.dsc
|
||||
osc commit
|
||||
Submodule scripts/dovecot/dovecot-build/dovecot deleted from 4b7f802ca1
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
set -e
|
||||
python3 -m venv --upgrade-deps venv
|
||||
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
|
||||
<img class="banner" src="collage-top.png"/>
|
||||
|
||||
## Dear [Delta Chat](https://get.delta.chat) users and newcomers,
|
||||
## Dear [Delta Chat](https://get.delta.chat) users and newcomers ...
|
||||
|
||||
{% if config.mail_domain != "nine.testrun.org" %}
|
||||
Welcome to instant, interoperable and [privacy-preserving](privacy.html) messaging :)
|
||||
{% else %}
|
||||
Welcome to the default onboarding server ({{ config.mail_domain }})
|
||||
for Delta Chat users. For details how it avoids storing personal information
|
||||
please see our [privacy policy](privacy.html).
|
||||
{% endif %}
|
||||
|
||||
👉 **Tap** or scan this QR code to get a random `@{{config.mail_domain}}` e-mail address
|
||||
<a class="cta-button" href="DCACCOUNT:https://{{ config.mail_domain }}/new">Get a {{config.mail_domain}} chat profile</a>
|
||||
|
||||
If you are viewing this page on a different device
|
||||
without a Delta Chat app,
|
||||
you can also **scan this QR code** with Delta Chat:
|
||||
|
||||
<a href="DCACCOUNT:https://{{ config.mail_domain }}/new">
|
||||
<img width=300 style="float: none;" src="qr-chatmail-invite-{{config.mail_domain}}.png" /></a>
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
|
||||
<img class="banner" src="collage-info.png"/>
|
||||
|
||||
## More information
|
||||
|
||||
{{ config.mail_domain }} provides a low-maintenance, resource efficient and
|
||||
@@ -10,11 +8,9 @@ for the usage in chats, especially DeltaChat.
|
||||
|
||||
### Choosing a chatmail address instead of using a random one
|
||||
|
||||
In the Delta Chat account setup
|
||||
you may tap `LOG INTO YOUR E-MAIL ACCOUNT`
|
||||
and fill the two fields like this:
|
||||
In the Delta Chat account setup you may tap `Create a profile` then `Use other server` and choose `Classic e-mail login`. Here fill the two fields like this:
|
||||
|
||||
- `Address`: invent a word with
|
||||
- `E-Mail Address`: invent a word with
|
||||
{% if username_min_length == username_max_length %}
|
||||
*exactly* {{ username_min_length }}
|
||||
{% else %}
|
||||
@@ -28,7 +24,7 @@ and fill the two fields like this:
|
||||
characters
|
||||
and append `@{{config.mail_domain}}` to it.
|
||||
|
||||
- `Password`: invent at least {{ password_min_length }} characters.
|
||||
- `Existing Password`: invent at least {{ password_min_length }} characters.
|
||||
|
||||
If the e-mail address is not yet taken, you'll get that account.
|
||||
The first login sets your password.
|
||||
@@ -47,6 +43,20 @@ The first login sets your password.
|
||||
- You can store up to [{{ config.max_mailbox_size }} messages on the server](https://delta.chat/en/help#what-happens-if-i-turn-on-delete-old-messages-from-server).
|
||||
|
||||
|
||||
### <a name="account-deletion"></a> Account deletion
|
||||
|
||||
If you remove a {{ config.mail_domain }} profile from within the Delta Chat app,
|
||||
then the according account on the server, along with all associated data,
|
||||
is automatically deleted {{ config.delete_inactive_users_after }} days afterwards.
|
||||
|
||||
If you use multiple devices
|
||||
then you need to remove the according chat profile from each device
|
||||
in order for all account data to be removed on the server side.
|
||||
|
||||
If you have any further questions or requests regarding account deletion
|
||||
please send a message from your account to {{ config.privacy_mail }}.
|
||||
|
||||
|
||||
### Who are the operators? Which software is running?
|
||||
|
||||
This chatmail provider is run by a small voluntary group of devs and sysadmins,
|
||||
|
||||
@@ -72,3 +72,15 @@ code {
|
||||
color: red;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.cta-button, .cta-button:hover, .cta-button:visited {
|
||||
border: 1.5px solid #a4c2d0;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
display: inline-block;
|
||||
margin: 10px 0;
|
||||
|
||||
background: linear-gradient(120deg, #77888f, #364e59);
|
||||
color: white !important;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
@@ -1,21 +1,41 @@
|
||||
<img class="banner" src="collage-privacy.png"/>
|
||||
|
||||
# Privacy Policy for {{ config.mail_domain }}
|
||||
|
||||
We want to show you in a fair and transparent way
|
||||
what personal data is processed by us.
|
||||
We follow a strict privacy-by-design approach
|
||||
and try to avoid processing your data in the first place,
|
||||
but as you may know,
|
||||
the internet,
|
||||
and in particular sending e-mail messages,
|
||||
does not work without data.
|
||||
Still,
|
||||
it's only fair that you know at all times
|
||||
what personal data is processed
|
||||
when you use our service.
|
||||
{% if config.mail_domain == "nine.testrun.org" %}
|
||||
Welcome to `{{config.mail_domain}}`, the default chatmail onboarding server for Delta Chat users.
|
||||
It is operated on the side by a small sysops team employed by [merlinux](https://merlinux.eu),
|
||||
an open-source R&D company also acting as the fiscal sponsor of Delta Chat app developments.
|
||||
See [other chatmail servers](https://delta.chat/en/chatmail) for alternative server operators.
|
||||
{% endif %}
|
||||
|
||||
|
||||
## Summary: No personal data asked or collected
|
||||
|
||||
This chatmail server neither asks for nor retains personal information.
|
||||
Chatmail servers exist to reliably transmit (store and deliver) end-to-end encrypted messages
|
||||
between user's devices running the Delta Chat messenger app.
|
||||
Technically, you may think of a Chatmail server as
|
||||
an end-to-end encrypted "messaging router" at Internet-scale.
|
||||
|
||||
A chatmail server is very unlike classic e-mail servers (for example Google Mail servers)
|
||||
that ask for personal data and permanently store messages.
|
||||
A chatmail server behaves more like the Signal messaging server
|
||||
but does not know about phone numbers and securely and automatically interoperates
|
||||
with other chatmail and classic e-mail servers.
|
||||
|
||||
In particular, this chatmail server
|
||||
|
||||
- unconditionally removes messages after {{ config.delete_mails_after }} days,
|
||||
|
||||
- prohibits sending out un-encrypted messages,
|
||||
|
||||
- only has temporary log files used for debugging purposes.
|
||||
|
||||
Legally, authorities might still regard chatmail as a "classic e-mail" server
|
||||
which collects and retains personal data.
|
||||
We do not agree on this interpretation. Nevertheless, we provide more legal details below
|
||||
to make life easier for data protection specialists and lawyers scrutinizing chatmail operations.
|
||||
|
||||
If you have any remaining questions about data protection, please contact us.
|
||||
|
||||
## 1. Name and contact information
|
||||
|
||||
@@ -57,7 +77,7 @@ we process the following data and details:
|
||||
- Users can retrieve or delete all stored messages
|
||||
without intervention from the operators using standard IMAP client tools.
|
||||
|
||||
### 3.1 Account setup
|
||||
### 2.1 Account setup
|
||||
|
||||
Creating an account happens in one of two ways on our mail servers:
|
||||
|
||||
@@ -78,7 +98,7 @@ Art. 6 (1) lit. b GDPR,
|
||||
as you have a usage contract with us
|
||||
by using our services.
|
||||
|
||||
## 3.2 Processing of E-Mail-Messages
|
||||
### 2.2 Processing of E-Mail-Messages
|
||||
|
||||
In addition,
|
||||
we will process data
|
||||
@@ -104,7 +124,7 @@ Therefore, limits are enforced:
|
||||
|
||||
- message size limits
|
||||
|
||||
- any other limit neccessary for the whole server to function in a healthy way
|
||||
- any other limit necessary for the whole server to function in a healthy way
|
||||
and to prevent abuse.
|
||||
|
||||
The processing and use of the above permissions
|
||||
@@ -178,8 +198,9 @@ for the purpose of drawing conclusions about your person.
|
||||
|
||||
## 4. Transfer of Data
|
||||
|
||||
Your personal data
|
||||
will not be transferred to third parties
|
||||
We do not retain any personal data but e-mail messages waiting to be delivered
|
||||
may contain personal data.
|
||||
Any such residual personal data will not be transferred to third parties
|
||||
for purposes other than those listed below:
|
||||
|
||||
a) you have given your express consent
|
||||
|
||||
Reference in New Issue
Block a user