Compare commits
96 Commits
MSC2140
...
c45f95ce3f
Author | SHA1 | Date | |
---|---|---|---|
c45f95ce3f | |||
640fa8e9f1 | |||
0f3c37bf6a | |||
|
ae5864cd91 | ||
|
e456724caf | ||
|
ed9dcc4061 | ||
|
ea8e386939 | ||
|
e0ec887118 | ||
|
a71d32ba77 | ||
|
a0f6fe9b0d | ||
|
c25647156a | ||
|
e7c4c12a98 | ||
|
90b2b5301c | ||
|
0d93a26e6d | ||
|
c29fc0f0eb | ||
|
e421c851c9 | ||
|
5b2b45233a | ||
|
888f7a4209 | ||
|
0c301a49c7 | ||
|
1fda2dd3b7 | ||
|
c4a20efe5e | ||
|
fc45f1b090 | ||
|
1480507d76 | ||
|
a1ab1e8e0a | ||
|
1e5033b461 | ||
|
7323851c6e | ||
|
08db73e55b | ||
|
9fba20475b | ||
|
9af5fce014 | ||
|
9843e14c1a | ||
|
60e6f1e23c | ||
|
6cdbcc69c7 | ||
|
ed7c714738 | ||
|
a9d783192b | ||
|
2bb5a734d1 | ||
|
9aa5c4cca9 | ||
|
9c4faab5d8 | ||
|
53c4ffdc4e | ||
|
e4144e923a | ||
|
791361c10d | ||
|
7c94bd4744 | ||
|
4b5eecd7e7 | ||
|
a6968fb7e9 | ||
|
d4853b1154 | ||
|
89df4b2425 | ||
|
0f89121b98 | ||
|
8a40ca185b | ||
|
5baeb42623 | ||
|
072e5f66cb | ||
|
b2f41d689b | ||
|
9b4aff58c7 | ||
|
a20e41574d | ||
|
72977d65ae | ||
|
7555fff1a5 | ||
|
aed12e5536 | ||
|
75efd9921d | ||
|
9219bd4723 | ||
|
73526be2ac | ||
|
b827efca2c | ||
|
6b7a4c8a23 | ||
|
47f6239268 | ||
|
0d6f65b469 | ||
|
be915aed94 | ||
|
ce938bb4a5 | ||
|
15db563e8d | ||
|
82a538c750 | ||
|
84ca8ebbd9 | ||
|
774ebf4fa8 | ||
|
eb1326c56a | ||
|
10cdb4360e | ||
|
17ebc2a421 | ||
|
cbb9fced8d | ||
|
7509174611 | ||
|
51d9225dda | ||
|
6216113400 | ||
|
cb32441959 | ||
|
0ec4df2c06 | ||
|
86b880069b | ||
|
a97273fe77 | ||
|
f9daf4d58a | ||
|
9e4cabb69b | ||
|
0b81de3cd0 | ||
|
698a16ec17 | ||
|
619b70d860 | ||
|
494c9e3941 | ||
|
94441d0446 | ||
|
b4776b50e2 | ||
|
2458b38b75 | ||
|
249e28a8b5 | ||
|
ba9e2d6121 | ||
|
f042b82a50 | ||
|
59071177ad | ||
|
6450cd1f20 | ||
|
90bc244f3e | ||
|
6e52a509db | ||
|
5ca666981a |
71
.github/workflows/codeql-analysis.yml
vendored
Normal file
71
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
schedule:
|
||||
- cron: '0 3 * * 6'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Override automatic language detection by changing the below list
|
||||
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
|
||||
language: ['java']
|
||||
# Learn more...
|
||||
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
10
Dockerfile
10
Dockerfile
@@ -1,3 +1,11 @@
|
||||
FROM --platform=$BUILDPLATFORM openjdk:8-jre-alpine AS builder
|
||||
|
||||
RUN apk update && apk add gradle git && rm -rf /var/lib/apk/* /var/cache/apk/*
|
||||
|
||||
WORKDIR /ma1sd
|
||||
COPY . .
|
||||
RUN ./gradlew shadowJar
|
||||
|
||||
FROM openjdk:8-jre-alpine
|
||||
|
||||
RUN apk update && apk add bash && rm -rf /var/lib/apk/* /var/cache/apk/*
|
||||
@@ -15,4 +23,4 @@ CMD [ "/start.sh" ]
|
||||
|
||||
ADD src/docker/start.sh /start.sh
|
||||
ADD src/script/ma1sd /app/ma1sd
|
||||
ADD build/libs/ma1sd.jar /app/ma1sd.jar
|
||||
COPY --from=builder /ma1sd/build/libs/ma1sd.jar /app/ma1sd.jar
|
||||
|
16
DockerfileX
Normal file
16
DockerfileX
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM --platform=$BUILDPLATFORM openjdk:11.0.7-jre-slim
|
||||
|
||||
VOLUME /etc/ma1sd
|
||||
VOLUME /var/ma1sd
|
||||
EXPOSE 8090
|
||||
|
||||
ENV JAVA_OPTS=""
|
||||
ENV CONF_FILE_PATH="/etc/ma1sd/ma1sd.yaml"
|
||||
ENV SIGN_KEY_PATH="/var/ma1sd/sign.key"
|
||||
ENV SQLITE_DATABASE_PATH="/var/ma1sd/ma1sd.db"
|
||||
|
||||
CMD [ "/start.sh" ]
|
||||
|
||||
ADD src/docker/start.sh /start.sh
|
||||
ADD src/script/ma1sd /app/ma1sd
|
||||
ADD build/libs/ma1sd.jar /app/ma1sd.jar
|
95
build.gradle
95
build.gradle
@@ -20,7 +20,7 @@
|
||||
|
||||
import java.util.regex.Pattern
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'application'
|
||||
apply plugin: 'com.github.johnrengelman.shadow'
|
||||
apply plugin: 'idea'
|
||||
@@ -73,90 +73,94 @@ String gitVersion() {
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
gradlePluginPortal()
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:5.1.0'
|
||||
classpath 'com.github.ben-manes:gradle-versions-plugin:0.27.0'
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:6.1.0'
|
||||
classpath 'com.github.ben-manes:gradle-versions-plugin:0.38.0'
|
||||
}
|
||||
}
|
||||
|
||||
repositories {
|
||||
jcenter()
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// Logging
|
||||
compile 'org.slf4j:slf4j-simple:1.7.25'
|
||||
api 'org.slf4j:slf4j-simple:1.7.25'
|
||||
|
||||
// Easy file management
|
||||
compile 'commons-io:commons-io:2.6'
|
||||
api 'commons-io:commons-io:2.8.0'
|
||||
|
||||
// Config management
|
||||
compile 'org.yaml:snakeyaml:1.25'
|
||||
api 'org.yaml:snakeyaml:1.28'
|
||||
|
||||
// Dependencies from old Matrix-java-sdk
|
||||
compile 'org.apache.commons:commons-lang3:3.9'
|
||||
compile 'com.squareup.okhttp3:okhttp:4.2.2'
|
||||
compile 'commons-codec:commons-codec:1.13'
|
||||
api 'org.apache.commons:commons-lang3:3.12.0'
|
||||
api 'com.squareup.okhttp3:okhttp:4.2.2'
|
||||
api 'commons-codec:commons-codec:1.15'
|
||||
|
||||
// ORMLite
|
||||
compile 'com.j256.ormlite:ormlite-jdbc:5.1'
|
||||
api 'com.j256.ormlite:ormlite-jdbc:5.3'
|
||||
|
||||
// ed25519 handling
|
||||
compile 'net.i2p.crypto:eddsa:0.3.0'
|
||||
api 'net.i2p.crypto:eddsa:0.3.0'
|
||||
|
||||
// LDAP connector
|
||||
compile 'org.apache.directory.api:api-all:1.0.0'
|
||||
api 'org.apache.directory.api:api-all:1.0.3'
|
||||
|
||||
// DNS lookups
|
||||
compile 'dnsjava:dnsjava:2.1.9'
|
||||
api 'dnsjava:dnsjava:2.1.9'
|
||||
|
||||
// HTTP connections
|
||||
compile 'org.apache.httpcomponents:httpclient:4.5.10'
|
||||
api 'org.apache.httpcomponents:httpclient:4.5.13'
|
||||
|
||||
// Phone numbers validation
|
||||
compile 'com.googlecode.libphonenumber:libphonenumber:8.10.22'
|
||||
api 'com.googlecode.libphonenumber:libphonenumber:8.12.21'
|
||||
|
||||
// E-mail sending
|
||||
compile 'javax.mail:javax.mail-api:1.6.2'
|
||||
compile 'com.sun.mail:javax.mail:1.6.2'
|
||||
api 'javax.mail:javax.mail-api:1.6.2'
|
||||
api 'com.sun.mail:javax.mail:1.6.2'
|
||||
|
||||
// Google Firebase Authentication backend
|
||||
compile 'com.google.firebase:firebase-admin:5.3.0'
|
||||
api 'com.google.firebase:firebase-admin:5.3.0'
|
||||
|
||||
// Connection Pool
|
||||
compile 'com.mchange:c3p0:0.9.5.4'
|
||||
api 'com.mchange:c3p0:0.9.5.5'
|
||||
|
||||
// SQLite
|
||||
compile 'org.xerial:sqlite-jdbc:3.28.0'
|
||||
api 'org.xerial:sqlite-jdbc:3.34.0'
|
||||
|
||||
// PostgreSQL
|
||||
compile 'org.postgresql:postgresql:42.2.8'
|
||||
api 'org.postgresql:postgresql:42.2.19'
|
||||
|
||||
// MariaDB/MySQL
|
||||
compile 'org.mariadb.jdbc:mariadb-java-client:2.5.1'
|
||||
api 'org.mariadb.jdbc:mariadb-java-client:2.7.2'
|
||||
|
||||
// UNIX sockets
|
||||
api 'com.kohlschutter.junixsocket:junixsocket-core:2.3.3'
|
||||
|
||||
// Twilio SDK for SMS
|
||||
compile 'com.twilio.sdk:twilio:7.45.0'
|
||||
api 'com.twilio.sdk:twilio:7.45.0'
|
||||
|
||||
// SendGrid SDK to send emails from GCE
|
||||
compile 'com.sendgrid:sendgrid-java:2.2.2'
|
||||
api 'com.sendgrid:sendgrid-java:2.2.2'
|
||||
|
||||
// ZT-Exec for exec identity store
|
||||
compile 'org.zeroturnaround:zt-exec:1.11'
|
||||
api 'org.zeroturnaround:zt-exec:1.12'
|
||||
|
||||
// HTTP server
|
||||
compile 'io.undertow:undertow-core:2.0.27.Final'
|
||||
api 'io.undertow:undertow-core:2.2.7.Final'
|
||||
|
||||
// Command parser for AS interface
|
||||
implementation 'commons-cli:commons-cli:1.4'
|
||||
api 'commons-cli:commons-cli:1.4'
|
||||
|
||||
testCompile 'junit:junit:4.13-rc-1'
|
||||
testCompile 'com.github.tomakehurst:wiremock:2.25.1'
|
||||
testCompile 'com.unboundid:unboundid-ldapsdk:4.0.12'
|
||||
testCompile 'com.icegreen:greenmail:1.5.11'
|
||||
testImplementation 'junit:junit:4.13.2'
|
||||
testImplementation 'com.github.tomakehurst:wiremock:2.27.2'
|
||||
testImplementation 'com.unboundid:unboundid-ldapsdk:4.0.12'
|
||||
testImplementation 'com.icegreen:greenmail:1.5.11'
|
||||
}
|
||||
|
||||
jar {
|
||||
@@ -239,7 +243,7 @@ task debBuild(dependsOn: shadowJar) {
|
||||
|
||||
ant.chmod(
|
||||
file: "${debBuildDebianPath}/postinst",
|
||||
perm: 'a+x'
|
||||
perm: '0755'
|
||||
)
|
||||
|
||||
ant.chmod(
|
||||
@@ -264,7 +268,7 @@ task debBuild(dependsOn: shadowJar) {
|
||||
}
|
||||
}
|
||||
|
||||
task dockerBuild(type: Exec, dependsOn: shadowJar) {
|
||||
task dockerBuild(type: Exec) {
|
||||
commandLine 'docker', 'build', '-t', dockerImageTag, project.rootDir
|
||||
|
||||
doLast {
|
||||
@@ -274,6 +278,15 @@ task dockerBuild(type: Exec, dependsOn: shadowJar) {
|
||||
}
|
||||
}
|
||||
|
||||
task dockerBuildX(type: Exec, dependsOn: shadowJar) {
|
||||
commandLine 'docker', 'buildx', 'build', '--push', '--platform', 'linux/arm64,linux/amd64,linux/arm/v7', '-t', dockerImageTag , project.rootDir
|
||||
doLast {
|
||||
exec {
|
||||
commandLine 'docker', 'buildx', 'build', '--push', '--platform', 'linux/arm64,linux/amd64,linux/arm/v7', '-t', "${dockerImageName}:latest-dev", project.rootDir
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task dockerPush(type: Exec) {
|
||||
commandLine 'docker', 'push', dockerImageTag
|
||||
|
||||
@@ -283,3 +296,15 @@ task dockerPush(type: Exec) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task dockerPushX(type: Exec) {
|
||||
commandLine 'docker', 'push', dockerImageTag
|
||||
|
||||
doLast {
|
||||
exec {
|
||||
commandLine 'docker', 'push', "${dockerImageName}:latest-dev"
|
||||
commandLine 'docker', 'push', "${dockerImageName}:latest-amd64-dev"
|
||||
commandLine 'docker', 'push', "${dockerImageName}:latest-arm64-dev"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
146
docs/MSC2140_MSC2134.md
Normal file
146
docs/MSC2140_MSC2134.md
Normal file
@@ -0,0 +1,146 @@
|
||||
# MSC2140
|
||||
|
||||
## V1 vs V2
|
||||
In the [MSC2140](https://github.com/matrix-org/matrix-doc/pull/2140) the v2 prefix was introduced.
|
||||
|
||||
Default values:
|
||||
```.yaml
|
||||
matrix:
|
||||
v1: true # deprecated
|
||||
v2: false
|
||||
```
|
||||
|
||||
To disable change value to `false`.
|
||||
|
||||
NOTE: the v1 is deprecated, therefore recommend to use only v2 and disable v1 (default value can be ommited):
|
||||
```.yaml
|
||||
matrix:
|
||||
v1: false
|
||||
```
|
||||
NOTE: Riot Web version 1.5.5 and below checks the v1 for backward compatibility.
|
||||
|
||||
NOTE: v2 disabled by default in order to preserve backward compatibility.
|
||||
|
||||
## Terms
|
||||
|
||||
###### Requires: No.
|
||||
|
||||
Administrator can omit terms configuration. In this case the terms checking will be disabled.
|
||||
|
||||
Example:
|
||||
```.yaml
|
||||
policy:
|
||||
policies:
|
||||
term_name: # term name
|
||||
version: 1.0 # version
|
||||
terms:
|
||||
en: # lang
|
||||
name: term name en # localized name
|
||||
url: https://ma1sd.host.tld/term_en.html # localized url
|
||||
fe: # lang
|
||||
name: term name fr # localized name
|
||||
url: https://ma1sd.host.tld/term_fr.html # localized url
|
||||
regexp:
|
||||
- '/_matrix/identity/v2/account.*'
|
||||
- '/_matrix/identity/v2/hash_details'
|
||||
- '/_matrix/identity/v2/lookup'
|
||||
```
|
||||
Where:
|
||||
|
||||
- `term_name` -- name of the terms.
|
||||
- `version` -- the terms version.
|
||||
- `lang` -- the term language.
|
||||
- `name` -- the name of the term.
|
||||
- `url` -- the url of the term. Might be any url (i.e. from another host) for a html page.
|
||||
- `regexp` -- regexp patterns for API which should be available only after accepting the terms.
|
||||
|
||||
API will be checks for accepted terms only with authorization.
|
||||
There are the next API:
|
||||
- [`GET /_matrix/identity/v2/account`](https://matrix.org/docs/spec/identity_service/r0.3.0#get-matrix-identity-v2-account) - Gets information about what user owns the access token used in the request.
|
||||
- [`POST /_matrix/identity/v2/account/logout`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-account-logout) - Logs out the access token, preventing it from being used to authenticate future requests to the server.
|
||||
- [`GET /_matrix/identity/v2/hash_details`](https://matrix.org/docs/spec/identity_service/r0.3.0#get-matrix-identity-v2-hash-details) - Gets parameters for hashing identifiers from the server. This can include any of the algorithms defined in this specification.
|
||||
- [`POST /_matrix/identity/v2/lookup`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-lookup) - Looks up the set of Matrix User IDs which have bound the 3PIDs given, if bindings are available. Note that the format of the addresses is defined later in this specification.
|
||||
- [`POST /_matrix/identity/v2/validate/email/requestToken`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-validate-email-requesttoken) - Create a session for validating an email address.
|
||||
- [`POST /_matrix/identity/v2/validate/email/submitToken`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-validate-email-submittoken) - Validate ownership of an email address.
|
||||
- [`GET /_matrix/identity/v2/validate/email/submitToken`](https://matrix.org/docs/spec/identity_service/r0.3.0#get-matrix-identity-v2-validate-email-submittoken) - Validate ownership of an email address.
|
||||
- [`POST /_matrix/identity/v2/validate/msisdn/requestToken`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-validate-msisdn-requesttoken) - Create a session for validating a phone number.
|
||||
- [`POST /_matrix/identity/v2/validate/msisdn/submitToken`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-validate-msisdn-submittoken) - Validate ownership of a phone number.
|
||||
- [`GET /_matrix/identity/v2/validate/msisdn/submitToken`](https://matrix.org/docs/spec/identity_service/r0.3.0#get-matrix-identity-v2-validate-msisdn-submittoken) - Validate ownership of a phone number.
|
||||
- [`GET /_matrix/identity/v2/3pid/getValidated3pid`](https://matrix.org/docs/spec/identity_service/r0.3.0#get-matrix-identity-v2-3pid-getvalidated3pid) - Determines if a given 3pid has been validated by a user.
|
||||
- [`POST /_matrix/identity/v2/3pid/bind`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-3pid-bind) - Publish an association between a session and a Matrix user ID.
|
||||
- [`POST /_matrix/identity/v2/3pid/unbind`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-3pid-unbind) - Remove an association between a session and a Matrix user ID.
|
||||
- [`POST /_matrix/identity/v2/store-invite`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-store-invite) - Store pending invitations to a user's 3pid.
|
||||
- [`POST /_matrix/identity/v2/sign-ed25519`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-sign-ed25519) - Sign invitation details.
|
||||
|
||||
There is only one exception: [`POST /_matrix/identity/v2/terms`](https://matrix.org/docs/spec/identity_service/r0.3.0#post-matrix-identity-v2-terms) which uses for accepting the terms and requires the authorization.
|
||||
|
||||
## [Hash lookup](https://github.com/matrix-org/matrix-doc/blob/hs/hash-identity/proposals/2134-identity-hash-lookup.md)
|
||||
|
||||
Hashes and the pepper updates together according to the `rotationPolicy`.
|
||||
|
||||
###### Requires: No.
|
||||
|
||||
In case the `none` algorithms ma1sd will be lookup using the v1 bulk API.
|
||||
|
||||
```.yaml
|
||||
hashing:
|
||||
enabled: true # enable or disable the hash lookup MSC2140 (default is false)
|
||||
pepperLength: 20 # length of the pepper value (default is 20)
|
||||
rotationPolicy: per_requests # or `per_seconds` how often the hashes will be updating
|
||||
hashStorageType: sql # or `in_memory` where the hashes will be stored
|
||||
algorithms:
|
||||
- none # the same as v1 bulk lookup
|
||||
- sha256 # hash the 3PID and pepper.
|
||||
delay: 2m # how often hashes will be updated if rotation policy = per_seconds (default is 10s)
|
||||
requests: 10 # how many lookup requests will be performed before updating hashes if rotation policy = per_requests (default is 10)
|
||||
```
|
||||
|
||||
When enabled and client requests the `none` algorithms then hash lookups works as v1 bulk lookup.
|
||||
|
||||
Delay specified in the format: `2d 4h 12m 34s` - this means 2 days 4 hours 12 minutes and 34 seconds. Zero units may be omitted. For example:
|
||||
|
||||
- 12s - 12 seconds
|
||||
- 3m - 3 minutes
|
||||
- 5m 6s - 5 minutes and 6 seconds
|
||||
- 6h 3s - 6 hours and 3 seconds
|
||||
|
||||
|
||||
Sha256 algorithm supports only sql, memory and exec 3PID providers.
|
||||
For sql provider (i.e. for the `synapseSql`):
|
||||
```.yaml
|
||||
synapseSql:
|
||||
lookup:
|
||||
query: 'select user_id as mxid, medium, address from user_threepid_id_server' # query for retrive 3PIDs for hashes.
|
||||
```
|
||||
|
||||
For general sql provider:
|
||||
```.yaml
|
||||
sql:
|
||||
lookup:
|
||||
query: 'select user as mxid, field1 as medium, field2 as address from some_table' # query for retrive 3PIDs for hashes.
|
||||
```
|
||||
|
||||
Each query should return the `mxid`, `medium` and `address` fields.
|
||||
|
||||
|
||||
For memory providers:
|
||||
```.yaml
|
||||
memory:
|
||||
hashEnabled: true # enable the hash lookup (defaults is false)
|
||||
```
|
||||
|
||||
For exec providers:
|
||||
```.yaml
|
||||
exec:
|
||||
identity:
|
||||
hashEnabled: true # enable the hash lookup (defaults is false)
|
||||
```
|
||||
|
||||
For ldap providers:
|
||||
```.yaml
|
||||
ldap:
|
||||
lookup: true
|
||||
```
|
||||
|
||||
NOTE: Federation requests work only with `none` algorithms.
|
||||
|
@@ -9,6 +9,8 @@
|
||||
## Binaries
|
||||
### Requirements
|
||||
- JDK 1.8
|
||||
- OpenJDK 11
|
||||
- OpenJDK 14
|
||||
|
||||
### Build
|
||||
```bash
|
||||
@@ -70,5 +72,13 @@ Then follow the instruction in the [Debian package](install/debian.md) document.
|
||||
```
|
||||
Then follow the instructions in the [Docker install](install/docker.md#configure) document.
|
||||
|
||||
### Multi-platform builds
|
||||
|
||||
Provided with experimental docker feature [buildx](https://docs.docker.com/buildx/working-with-buildx/)
|
||||
To build the arm64 and amd64 images run:
|
||||
```bash
|
||||
./gradlew dockerBuildX
|
||||
```
|
||||
|
||||
## Next steps
|
||||
- [Integrate with your infrastructure](getting-started.md#integrate)
|
||||
|
@@ -48,6 +48,9 @@ Create a list under the label `myOtherServers` containing two Identity servers:
|
||||
## Unbind (MSC1915)
|
||||
- `session.policy.unbind.enabled`: Enable or disable unbind functionality (MSC1915). (Defaults to true).
|
||||
|
||||
## Hash lookups, Term and others (MSC2140, MSC2134)
|
||||
See the [dedicated document](MSC2140_MSC2134.md) for configuration.
|
||||
|
||||
*Warning*: Unbind check incoming request by two ways:
|
||||
- session validation.
|
||||
- request signature via `X-Matrix` header and uses `server.publicUrl` property to construct the signing json;
|
||||
@@ -55,7 +58,53 @@ Commonly the `server.publicUrl` should be the same value as the `trusted_third_p
|
||||
|
||||
## Storage
|
||||
### SQLite
|
||||
`storage.provider.sqlite.database`: Absolute location of the SQLite database
|
||||
```yaml
|
||||
storage:
|
||||
backend: sqlite # default
|
||||
provider:
|
||||
sqlite:
|
||||
database: /var/lib/ma1sd/store.db # Absolute location of the SQLite database
|
||||
```
|
||||
|
||||
### Postgresql
|
||||
```yaml
|
||||
storage:
|
||||
backend: postgresql
|
||||
provider:
|
||||
postgresql:
|
||||
database: //localhost:5432/ma1sd
|
||||
username: ma1sd
|
||||
password: secret_password
|
||||
```
|
||||
See [the migration instruction](migration-to-postgresql.md) from sqlite to postgresql
|
||||
|
||||
|
||||
## Logging
|
||||
```yaml
|
||||
logging:
|
||||
root: error # default level for all loggers (apps and thirdparty libraries)
|
||||
app: info # log level only for the ma1sd
|
||||
requests: false # log request and response
|
||||
```
|
||||
|
||||
Possible value: `trace`, `debug`, `info`, `warn`, `error`, `off`.
|
||||
|
||||
Default value for root level: `info`.
|
||||
|
||||
Value for app level can be specified via `MA1SD_LOG_LEVEL` environment variable, configuration or start options.
|
||||
|
||||
Default value for app level: `info`.
|
||||
|
||||
| start option | equivalent configuration |
|
||||
| --- | --- |
|
||||
| | app: info |
|
||||
| -v | app: debug |
|
||||
| -vv | app: trace |
|
||||
|
||||
#### WARNING
|
||||
|
||||
The setting `logging.requests` *MUST NOT* be used in production due it prints full unmasked request and response into the log and can be cause of the data leak.
|
||||
This setting can be used only to testing and debugging errors.
|
||||
|
||||
## Identity stores
|
||||
See the [Identity stores](stores/README.md) for specific configuration
|
||||
|
@@ -56,8 +56,7 @@ Accounts cannot currently migrate/move from one server to another.
|
||||
See a [brief explanation document](concepts.md) about Matrix and ma1sd concepts and vocabulary.
|
||||
|
||||
### I already use the synapse LDAP3 auth provider. Why should I care about ma1sd?
|
||||
The [synapse LDAP3 auth provider](https://github.com/matrix-org/matrix-synapse-ldap3) is not longer maintained despite
|
||||
saying so and only handles on specific flow: validate credentials at login.
|
||||
The [synapse LDAP3 auth provider](https://github.com/matrix-org/matrix-synapse-ldap3) only handles one specific flow: validate credentials at login.
|
||||
|
||||
It does not:
|
||||
- Auto-provision user profiles
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# Identity
|
||||
Implementation of the [Identity Service API r0.2.0](https://matrix.org/docs/spec/identity_service/r0.2.0.html).
|
||||
Implementation of the [Identity Service API r0.3.0](https://matrix.org/docs/spec/identity_service/r0.3.0.html).
|
||||
|
||||
- [Lookups](#lookups)
|
||||
- [Invitations](#invitations)
|
||||
|
@@ -121,15 +121,13 @@ server {
|
||||
}
|
||||
```
|
||||
|
||||
### Synapse
|
||||
### Synapse (Deprecated with synapse v1.4.0)
|
||||
Add your ma1sd domain into the `homeserver.yaml` at `trusted_third_party_id_servers` and restart synapse.
|
||||
In a typical configuration, you would end up with something similar to:
|
||||
```yaml
|
||||
trusted_third_party_id_servers:
|
||||
- matrix.example.org
|
||||
```
|
||||
It is **highly recommended** to remove `matrix.org` and `vector.im` (or any other default entry) from your configuration
|
||||
so only your own Identity server is authoritative for your HS.
|
||||
|
||||
## Validate (Under reconstruction)
|
||||
**NOTE:** In case your homeserver has no working federation, step 5 will not happen. If step 4 took place, consider
|
||||
|
41
docs/migration-to-postgresql.md
Normal file
41
docs/migration-to-postgresql.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Migration from sqlite to postgresql
|
||||
|
||||
Starting from the version 2.3.0 ma1sd support postgresql for internal storage in addition to sqlite (parameters `storage.backend`).
|
||||
|
||||
#### Migration steps
|
||||
|
||||
1. create the postgresql database and user for ma1sd storage
|
||||
2. create a backup for sqlite storage (default location: /var/lib/ma1sd/store.db)
|
||||
3. migrate data from sqlite to postgresql
|
||||
4. change ma1sd configuration to use the postgresql
|
||||
|
||||
For data migration is it possible to use https://pgloader.io tool.
|
||||
|
||||
Example of the migration command:
|
||||
```shell script
|
||||
pgloader --with "quote identifiers" /path/to/store.db pgsql://ma1sd_user:ma1sd_password@host:port/database
|
||||
```
|
||||
or (short version for database on localhost)
|
||||
```shell script
|
||||
pgloader --with "quote identifiers" /path/to/store.db pgsql://ma1sd_user:ma1sd_password@localhost/ma1sd
|
||||
```
|
||||
|
||||
An option `--with "quote identifies"` used to create case sensitive tables.
|
||||
ma1sd_user - postgresql user for ma1sd.
|
||||
ma1sd_password - password of the postgresql user.
|
||||
host - postgresql host
|
||||
post - database port (default 5432)
|
||||
database - database name.
|
||||
|
||||
|
||||
Configuration example for postgresql storage:
|
||||
```yaml
|
||||
storage:
|
||||
backend: postgresql
|
||||
provider:
|
||||
postgresql:
|
||||
database: '//localhost/ma1sd' # or full variant //192.168.1.100:5432/ma1sd_database
|
||||
username: 'ma1sd_user'
|
||||
password: 'ma1sd_password'
|
||||
```
|
||||
|
@@ -1,7 +1,10 @@
|
||||
# Operations Guide
|
||||
- [Overview](#overview)
|
||||
- [Maintenance](#maintenance)
|
||||
- [Backuo](#backup)
|
||||
- [Operations Guide](#operations-guide)
|
||||
- [Overview](#overview)
|
||||
- [Maintenance](#maintenance)
|
||||
- [Backup](#backup)
|
||||
- [Run](#run)
|
||||
- [Restore](#restore)
|
||||
|
||||
## Overview
|
||||
This document gives various information for the day-to-day management and operations of ma1sd.
|
||||
|
@@ -89,7 +89,7 @@ ldap:
|
||||
#### 3PIDs
|
||||
You can also change the attribute lists for 3PID, like email or phone numbers.
|
||||
|
||||
The following example would overwrite the [default list of attributes](../../src/main/java/io/kamax/ma1sd/config/ldap/LdapConfig.java#L64)
|
||||
The following example would overwrite the [default list of attributes](../../src/main/java/io/kamax/mxisd/config/ldap/LdapConfig.java#L64)
|
||||
for emails and phone number:
|
||||
```yaml
|
||||
ldap:
|
||||
|
@@ -136,7 +136,7 @@ sql:
|
||||
|
||||
```
|
||||
For the `role` query, `type` can be used to tell ma1sd how to inject the User ID in the query:
|
||||
- `localpart` will extract and set only the localpart.
|
||||
- `uid` will extract and set only the localpart.
|
||||
- `mxid` will use the ID as-is.
|
||||
|
||||
On each query, the first parameter `?` is set as a string with the corresponding ID format.
|
||||
|
@@ -103,8 +103,8 @@ session:
|
||||
validation:
|
||||
enabled: true
|
||||
unbind:
|
||||
notification:
|
||||
enabled: true
|
||||
notifications: true
|
||||
enabled: true
|
||||
|
||||
# DO NOT COPY/PASTE AS-IS IN YOUR CONFIGURATION
|
||||
# CONFIGURATION EXAMPLE
|
||||
@@ -115,7 +115,7 @@ are allowed to do in terms of 3PID sessions. The policy has a global on/off swit
|
||||
|
||||
---
|
||||
|
||||
`unbind` controls warning notifications for 3PID removal.
|
||||
`unbind` controls warning notifications for 3PID removal. Setting `notifications` for `unbind` to false will prevent unbind emails from sending.
|
||||
|
||||
### Web views
|
||||
Once a user click on a validation link, it is taken to the Identity Server validation page where the token is submitted.
|
||||
|
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
@@ -1,5 +1,5 @@
|
||||
#Thu Jul 04 22:47:59 MSK 2019
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
|
||||
#Thu Dec 05 22:39:36 MSK 2019
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0-all.zip
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStorePath=wrapper/dists
|
||||
|
6
gradlew
vendored
6
gradlew
vendored
@@ -7,7 +7,7 @@
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -125,8 +125,8 @@ if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
2
gradlew.bat
vendored
2
gradlew.bat
vendored
@@ -5,7 +5,7 @@
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
@@ -21,6 +21,8 @@
|
||||
#
|
||||
matrix:
|
||||
domain: ''
|
||||
v1: true # deprecated
|
||||
v2: true # MSC2140 API v2. Riot require enabled V2 API.
|
||||
|
||||
|
||||
################
|
||||
@@ -49,10 +51,39 @@ key:
|
||||
# - /var/lib/ma1sd/store.db
|
||||
#
|
||||
storage:
|
||||
# backend: sqlite # or postgresql
|
||||
provider:
|
||||
sqlite:
|
||||
database: '/path/to/ma1sd.db'
|
||||
|
||||
# postgresql:
|
||||
# # Wrap all string values with quotes to avoid yaml parsing mistakes
|
||||
# database: '//localhost/ma1sd' # or full variant //192.168.1.100:5432/ma1sd_database
|
||||
# username: 'ma1sd_user'
|
||||
# password: 'ma1sd_password'
|
||||
#
|
||||
# # Pool configuration for postgresql backend.
|
||||
# #######
|
||||
# # Enable or disable pooling
|
||||
# pool: false
|
||||
#
|
||||
# #######
|
||||
# # Check database connection before get from pool
|
||||
# testBeforeGetFromPool: false # or true
|
||||
#
|
||||
# #######
|
||||
# # There is an internal thread which checks each of the database connections as a keep-alive mechanism. This set the
|
||||
# # number of milliseconds it sleeps between checks -- default is 30000. To disable the checking thread, set this to
|
||||
# # 0 before you start using the connection source.
|
||||
# checkConnectionsEveryMillis: 30000
|
||||
#
|
||||
# #######
|
||||
# # Set the number of connections that can be unused in the available list.
|
||||
# maxConnectionsFree: 5
|
||||
#
|
||||
# #######
|
||||
# # Set the number of milliseconds that a connection can stay open before being closed. Set to 9223372036854775807 to have
|
||||
# # the connections never expire.
|
||||
# maxConnectionAgeMillis: 3600000
|
||||
|
||||
###################
|
||||
# Identity Stores #
|
||||
@@ -109,3 +140,80 @@ threepid:
|
||||
|
||||
# Password for the account
|
||||
password: "ThePassword"
|
||||
|
||||
|
||||
#### MSC2134 (hash lookup)
|
||||
|
||||
#hashing:
|
||||
# enabled: false # enable or disable the hash lookup MSC2140 (default is false)
|
||||
# pepperLength: 20 # length of the pepper value (default is 20)
|
||||
# rotationPolicy: per_requests # or `per_seconds` how often the hashes will be updating
|
||||
# hashStorageType: sql # or `in_memory` where the hashes will be stored
|
||||
# algorithms:
|
||||
# - none # the same as v1 bulk lookup
|
||||
# - sha256 # hash the 3PID and pepper.
|
||||
# delay: 2m # how often hashes will be updated if rotation policy = per_seconds (default is 10s)
|
||||
# requests: 10 # how many lookup requests will be performed before updating hashes if rotation policy = per_requests (default is 10)
|
||||
|
||||
### hash lookup for synapseSql provider.
|
||||
# synapseSql:
|
||||
# lookup:
|
||||
# query: 'select user_id as mxid, medium, address from user_threepid_id_server' # query for retrive 3PIDs for hashes.
|
||||
# legacyRoomNames: false # use the old query to get room names.
|
||||
|
||||
### hash lookup for ldap provider (with example of the ldap configuration)
|
||||
# ldap:
|
||||
# enabled: true
|
||||
# lookup: true # hash lookup
|
||||
# activeDirectory: false
|
||||
# defaultDomain: ''
|
||||
# connection:
|
||||
# host: 'ldap.domain.tld'
|
||||
# port: 389
|
||||
# bindDn: 'cn=admin,dc=domain,dc=tld'
|
||||
# bindPassword: 'Secret'
|
||||
# baseDNs:
|
||||
# - 'dc=domain,dc=tld'
|
||||
# attribute:
|
||||
# uid:
|
||||
# type: 'uid' # or mxid
|
||||
# value: 'cn'
|
||||
# name: 'displayName'
|
||||
# identity:
|
||||
# filter: '(objectClass=inetOrgPerson)'
|
||||
|
||||
#### MSC2140 (Terms)
|
||||
#policy:
|
||||
# policies:
|
||||
# term_name: # term name
|
||||
# version: 1.0 # version
|
||||
# terms:
|
||||
# en: # lang
|
||||
# name: term name en # localized name
|
||||
# url: https://ma1sd.host.tld/term_en.html # localized url
|
||||
# fe: # lang
|
||||
# name: term name fr # localized name
|
||||
# url: https://ma1sd.host.tld/term_fr.html # localized url
|
||||
# regexp:
|
||||
# - '/_matrix/identity/v2/account.*'
|
||||
# - '/_matrix/identity/v2/hash_details'
|
||||
# - '/_matrix/identity/v2/lookup'
|
||||
#
|
||||
|
||||
# logging:
|
||||
# root: error # default level for all loggers (apps and thirdparty libraries)
|
||||
# app: info # log level only for the ma1sd
|
||||
# requests: false # or true to dump full requests and responses
|
||||
|
||||
|
||||
# Config invitation manager
|
||||
#invite:
|
||||
# fullDisplayName: true # print full name of the invited user (default false)
|
||||
# resolution:
|
||||
# timer: 10
|
||||
# period: seconds # search invites every 10 seconds (by default 5 minutes)
|
||||
|
||||
|
||||
# Internal API
|
||||
#internal:
|
||||
# enabled: true # default to false
|
||||
|
@@ -53,7 +53,7 @@ public class MatrixPath {
|
||||
}
|
||||
|
||||
public static MatrixPath clientR0() {
|
||||
return client().add("r0");
|
||||
return client().add("v3");
|
||||
}
|
||||
|
||||
private StringBuilder path = new StringBuilder();
|
||||
|
@@ -27,7 +27,7 @@ public class ThreePid implements _ThreePid {
|
||||
|
||||
public ThreePid(String medium, String address) {
|
||||
this.medium = medium;
|
||||
this.address = address;
|
||||
this.address = address.toLowerCase();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -375,13 +375,13 @@ public abstract class AMatrixHttpClient implements _MatrixClientRaw {
|
||||
}
|
||||
|
||||
protected HttpUrl.Builder getClientPathBuilder(String... segments) {
|
||||
String[] base = { "client", "r0" };
|
||||
String[] base = { "client", "v3" };
|
||||
segments = ArrayUtils.addAll(base, segments);
|
||||
return getPathBuilder(segments);
|
||||
}
|
||||
|
||||
protected HttpUrl.Builder getMediaPathBuilder(String... segments) {
|
||||
String[] base = { "media", "r0" };
|
||||
String[] base = { "media", "v3" };
|
||||
segments = ArrayUtils.addAll(base, segments);
|
||||
return getPathBuilder(segments);
|
||||
}
|
||||
|
@@ -1,47 +0,0 @@
|
||||
/*
|
||||
* matrix-java-sdk - Matrix Client SDK for Java
|
||||
* Copyright (C) 2017 Kamax Sarl
|
||||
*
|
||||
* https://www.kamax.io/
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package io.kamax.matrix.codec;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
||||
public class MxSha256 {
|
||||
|
||||
private MessageDigest md;
|
||||
|
||||
public MxSha256() {
|
||||
try {
|
||||
md = MessageDigest.getInstance("SHA-256");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public String hash(byte[] data) {
|
||||
return MxBase64.encode(md.digest(data));
|
||||
}
|
||||
|
||||
public String hash(String data) {
|
||||
return hash(data.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
}
|
@@ -23,11 +23,13 @@ package io.kamax.mxisd;
|
||||
import io.kamax.mxisd.config.MatrixConfig;
|
||||
import io.kamax.mxisd.config.MxisdConfig;
|
||||
import io.kamax.mxisd.config.PolicyConfig;
|
||||
import io.kamax.mxisd.config.ServerConfig;
|
||||
import io.kamax.mxisd.http.undertow.handler.ApiHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.AuthorizationHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.CheckTermsHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.InternalInfoHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.OptionsHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.RequestDumpingHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.SaneHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.as.v1.AsNotFoundHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.as.v1.AsTransactionHandler;
|
||||
@@ -52,9 +54,11 @@ import io.kamax.mxisd.http.undertow.handler.identity.share.SessionValidationGetH
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.share.SessionValidationPostHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.share.SignEd25519Handler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.share.StoreInviteHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.v1.*;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.v1.BulkLookupHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.v1.SingleLookupHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.v2.HashDetailsHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.v2.HashLookupHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.internal.InternalInviteManagerHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.invite.v1.RoomInviteHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.profile.v1.InternalProfileHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.profile.v1.ProfileHandler;
|
||||
@@ -98,42 +102,35 @@ public class HttpMxisd {
|
||||
public void start() {
|
||||
m.start();
|
||||
|
||||
HttpHandler asUserHandler = SaneHandler.around(new AsUserHandler(m.getAs()));
|
||||
HttpHandler asTxnHandler = SaneHandler.around(new AsTransactionHandler(m.getAs()));
|
||||
HttpHandler asNotFoundHandler = SaneHandler.around(new AsNotFoundHandler(m.getAs()));
|
||||
HttpHandler asUserHandler = sane(new AsUserHandler(m.getAs()));
|
||||
HttpHandler asTxnHandler = sane(new AsTransactionHandler(m.getAs()));
|
||||
HttpHandler asNotFoundHandler = sane(new AsNotFoundHandler(m.getAs()));
|
||||
|
||||
final RoutingHandler handler = Handlers.routing()
|
||||
.add("OPTIONS", "/**", SaneHandler.around(new OptionsHandler()))
|
||||
.add("OPTIONS", "/**", sane(new OptionsHandler()))
|
||||
|
||||
// Status endpoints
|
||||
.get(StatusHandler.Path, SaneHandler.around(new StatusHandler()))
|
||||
.get(VersionHandler.Path, SaneHandler.around(new VersionHandler()))
|
||||
.get(StatusHandler.Path, sane(new StatusHandler()))
|
||||
.get(VersionHandler.Path, sane(new VersionHandler()))
|
||||
|
||||
// Authentication endpoints
|
||||
.get(LoginHandler.Path, SaneHandler.around(new LoginGetHandler(m.getAuth(), m.getHttpClient())))
|
||||
.post(LoginHandler.Path, SaneHandler.around(new LoginPostHandler(m.getAuth())))
|
||||
.post(RestAuthHandler.Path, SaneHandler.around(new RestAuthHandler(m.getAuth())))
|
||||
|
||||
// Account endpoints
|
||||
.post(AccountRegisterHandler.Path, SaneHandler.around(new AccountRegisterHandler(m.getAccMgr())))
|
||||
.get(AccountGetUserInfoHandler.Path,
|
||||
SaneHandler.around(AuthorizationHandler.around(m.getAccMgr(), new AccountGetUserInfoHandler(m.getAccMgr()))))
|
||||
.post(AccountLogoutHandler.Path,
|
||||
SaneHandler.around(AuthorizationHandler.around(m.getAccMgr(), new AccountLogoutHandler(m.getAccMgr()))))
|
||||
.get(LoginHandler.Path, sane(new LoginGetHandler(m.getAuth(), m.getHttpClient())))
|
||||
.post(LoginHandler.Path, sane(new LoginPostHandler(m.getAuth())))
|
||||
.post(RestAuthHandler.Path, sane(new RestAuthHandler(m.getAuth())))
|
||||
|
||||
// Directory endpoints
|
||||
.post(UserDirectorySearchHandler.Path, SaneHandler.around(new UserDirectorySearchHandler(m.getDirectory())))
|
||||
.post(UserDirectorySearchHandler.Path, sane(new UserDirectorySearchHandler(m.getDirectory())))
|
||||
|
||||
// Profile endpoints
|
||||
.get(ProfileHandler.Path, SaneHandler.around(new ProfileHandler(m.getProfile())))
|
||||
.get(InternalProfileHandler.Path, SaneHandler.around(new InternalProfileHandler(m.getProfile())))
|
||||
.get(ProfileHandler.Path, sane(new ProfileHandler(m.getProfile())))
|
||||
.get(InternalProfileHandler.Path, sane(new InternalProfileHandler(m.getProfile())))
|
||||
|
||||
// Registration endpoints
|
||||
.post(Register3pidRequestTokenHandler.Path,
|
||||
SaneHandler.around(new Register3pidRequestTokenHandler(m.getReg(), m.getClientDns(), m.getHttpClient())))
|
||||
sane(new Register3pidRequestTokenHandler(m.getReg(), m.getClientDns(), m.getHttpClient())))
|
||||
|
||||
// Invite endpoints
|
||||
.post(RoomInviteHandler.Path, SaneHandler.around(new RoomInviteHandler(m.getHttpClient(), m.getClientDns(), m.getInvite())))
|
||||
.post(RoomInviteHandler.Path, sane(new RoomInviteHandler(m.getHttpClient(), m.getClientDns(), m.getInvite())))
|
||||
|
||||
// Application Service endpoints
|
||||
.get(AsUserHandler.Path, asUserHandler)
|
||||
@@ -145,12 +142,19 @@ public class HttpMxisd {
|
||||
.put("/transactions/{" + AsTransactionHandler.ID + "}", asTxnHandler) // Legacy endpoint
|
||||
|
||||
// Banned endpoints
|
||||
.get(InternalInfoHandler.Path, SaneHandler.around(new InternalInfoHandler()));
|
||||
.get(InternalInfoHandler.Path, sane(new InternalInfoHandler()));
|
||||
keyEndpoints(handler);
|
||||
identityEndpoints(handler);
|
||||
termsEndpoints(handler);
|
||||
hashEndpoints(handler);
|
||||
httpSrv = Undertow.builder().addHttpListener(m.getConfig().getServer().getPort(), "0.0.0.0").setHandler(handler).build();
|
||||
accountEndpoints(handler);
|
||||
|
||||
if (m.getConfig().getInternal().isEnabled()) {
|
||||
handler.get(InternalInviteManagerHandler.PATH, new InternalInviteManagerHandler(m.getInvite()));
|
||||
}
|
||||
|
||||
ServerConfig serverConfig = m.getConfig().getServer();
|
||||
httpSrv = Undertow.builder().addHttpListener(serverConfig.getPort(), serverConfig.getHostname()).setHandler(handler).build();
|
||||
|
||||
httpSrv.start();
|
||||
}
|
||||
@@ -193,17 +197,33 @@ public class HttpMxisd {
|
||||
);
|
||||
}
|
||||
|
||||
private void accountEndpoints(RoutingHandler routingHandler) {
|
||||
MatrixConfig matrixConfig = m.getConfig().getMatrix();
|
||||
if (matrixConfig.isV2()) {
|
||||
routingHandler.post(AccountRegisterHandler.Path, sane(new AccountRegisterHandler(m.getAccMgr())));
|
||||
wrapWithTokenAndAuthorizationHandlers(routingHandler, Methods.GET, new AccountGetUserInfoHandler(m.getAccMgr()),
|
||||
AccountGetUserInfoHandler.Path, true);
|
||||
wrapWithTokenAndAuthorizationHandlers(routingHandler, Methods.GET, new AccountLogoutHandler(m.getAccMgr()),
|
||||
AccountLogoutHandler.Path, true);
|
||||
}
|
||||
}
|
||||
|
||||
private void termsEndpoints(RoutingHandler routingHandler) {
|
||||
routingHandler.get(GetTermsHandler.PATH, new GetTermsHandler(m.getConfig().getPolicy()));
|
||||
routingHandler
|
||||
.post(AcceptTermsHandler.PATH, AuthorizationHandler.around(m.getAccMgr(), sane(new AcceptTermsHandler(m.getAccMgr()))));
|
||||
MatrixConfig matrixConfig = m.getConfig().getMatrix();
|
||||
if (matrixConfig.isV2()) {
|
||||
routingHandler.get(GetTermsHandler.PATH, sane(new GetTermsHandler(m.getConfig().getPolicy())));
|
||||
routingHandler.post(AcceptTermsHandler.PATH, sane(new AcceptTermsHandler(m.getAccMgr())));
|
||||
}
|
||||
}
|
||||
|
||||
private void hashEndpoints(RoutingHandler routingHandler) {
|
||||
routingHandler
|
||||
.get(HashDetailsHandler.PATH, AuthorizationHandler.around(m.getAccMgr(), sane(new HashDetailsHandler(m.getHashManager()))));
|
||||
routingHandler.post(HashLookupHandler.Path,
|
||||
AuthorizationHandler.around(m.getAccMgr(), sane(new HashLookupHandler(m.getIdentity(), m.getHashManager()))));
|
||||
MatrixConfig matrixConfig = m.getConfig().getMatrix();
|
||||
if (matrixConfig.isV2()) {
|
||||
wrapWithTokenAndAuthorizationHandlers(routingHandler, Methods.GET, new HashDetailsHandler(m.getHashManager()),
|
||||
HashDetailsHandler.PATH, true);
|
||||
wrapWithTokenAndAuthorizationHandlers(routingHandler, Methods.POST,
|
||||
new HashLookupHandler(m.getIdentity(), m.getHashManager()), HashLookupHandler.Path, true);
|
||||
}
|
||||
}
|
||||
|
||||
private void addEndpoints(RoutingHandler routingHandler, HttpString method, boolean useAuthorization, ApiHandler... handlers) {
|
||||
@@ -216,23 +236,35 @@ public class HttpMxisd {
|
||||
HttpHandler httpHandler) {
|
||||
MatrixConfig matrixConfig = m.getConfig().getMatrix();
|
||||
if (matrixConfig.isV1()) {
|
||||
routingHandler.add(method, apiHandler.getPath(IdentityServiceAPI.V1), httpHandler);
|
||||
routingHandler.add(method, apiHandler.getPath(IdentityServiceAPI.V1), sane(httpHandler));
|
||||
}
|
||||
if (matrixConfig.isV2()) {
|
||||
HttpHandler handlerWithTerms = CheckTermsHandler.around(m.getAccMgr(), httpHandler, getPolicyObjects(apiHandler));
|
||||
HttpHandler wrappedHandler = useAuthorization ? AuthorizationHandler.around(m.getAccMgr(), handlerWithTerms) : handlerWithTerms;
|
||||
routingHandler.add(method, apiHandler.getPath(IdentityServiceAPI.V2), wrappedHandler);
|
||||
wrapWithTokenAndAuthorizationHandlers(routingHandler, method, httpHandler, apiHandler.getPath(IdentityServiceAPI.V2),
|
||||
useAuthorization);
|
||||
}
|
||||
}
|
||||
|
||||
private void wrapWithTokenAndAuthorizationHandlers(RoutingHandler routingHandler, HttpString method, HttpHandler httpHandler,
|
||||
String url, boolean useAuthorization) {
|
||||
List<PolicyConfig.PolicyObject> policyObjects = getPolicyObjects(url);
|
||||
HttpHandler wrappedHandler;
|
||||
if (useAuthorization) {
|
||||
wrappedHandler = policyObjects.isEmpty() ? httpHandler : CheckTermsHandler.around(m.getAccMgr(), httpHandler, policyObjects);
|
||||
wrappedHandler = AuthorizationHandler.around(m.getAccMgr(), wrappedHandler);
|
||||
} else {
|
||||
wrappedHandler = httpHandler;
|
||||
}
|
||||
routingHandler.add(method, url, sane(wrappedHandler));
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private List<PolicyConfig.PolicyObject> getPolicyObjects(ApiHandler apiHandler) {
|
||||
private List<PolicyConfig.PolicyObject> getPolicyObjects(String url) {
|
||||
PolicyConfig policyConfig = m.getConfig().getPolicy();
|
||||
List<PolicyConfig.PolicyObject> policies = new ArrayList<>();
|
||||
if (!policyConfig.getPolicies().isEmpty()) {
|
||||
for (PolicyConfig.PolicyObject policy : policyConfig.getPolicies().values()) {
|
||||
for (Pattern pattern : policy.getPatterns()) {
|
||||
if (pattern.matcher(apiHandler.getHandlerPath()).matches()) {
|
||||
if (pattern.matcher(url).matches()) {
|
||||
policies.add(policy);
|
||||
}
|
||||
}
|
||||
@@ -242,6 +274,11 @@ public class HttpMxisd {
|
||||
}
|
||||
|
||||
private HttpHandler sane(HttpHandler httpHandler) {
|
||||
return SaneHandler.around(httpHandler);
|
||||
SaneHandler handler = SaneHandler.around(httpHandler);
|
||||
if (m.getConfig().getLogging().isRequests()) {
|
||||
return new RequestDumpingHandler(handler);
|
||||
} else {
|
||||
return handler;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -27,6 +27,7 @@ import io.kamax.mxisd.auth.AuthProviders;
|
||||
import io.kamax.mxisd.backend.IdentityStoreSupplier;
|
||||
import io.kamax.mxisd.backend.sql.synapse.Synapse;
|
||||
import io.kamax.mxisd.config.MxisdConfig;
|
||||
import io.kamax.mxisd.config.StorageConfig;
|
||||
import io.kamax.mxisd.crypto.CryptoFactory;
|
||||
import io.kamax.mxisd.crypto.KeyManager;
|
||||
import io.kamax.mxisd.crypto.SignatureManager;
|
||||
@@ -66,7 +67,7 @@ public class Mxisd {
|
||||
public static final String Version = StringUtils.defaultIfBlank(Mxisd.class.getPackage().getImplementationVersion(), "UNKNOWN");
|
||||
public static final String Agent = Name + "/" + Version;
|
||||
|
||||
private MxisdConfig cfg;
|
||||
private final MxisdConfig cfg;
|
||||
|
||||
private CloseableHttpClient httpClient;
|
||||
private IRemoteIdentityServerFetcher srvFetcher;
|
||||
@@ -109,7 +110,10 @@ public class Mxisd {
|
||||
IdentityServerUtils.setHttpClient(httpClient);
|
||||
srvFetcher = new RemoteIdentityServerFetcher(httpClient);
|
||||
|
||||
store = new OrmLiteSqlStorage(cfg);
|
||||
StorageConfig.BackendEnum storageBackend = cfg.getStorage().getBackend();
|
||||
StorageConfig.Provider storageProvider = cfg.getStorage().getProvider();
|
||||
store = new OrmLiteSqlStorage(storageBackend, storageProvider);
|
||||
|
||||
keyMgr = CryptoFactory.getKeyManager(cfg.getKey());
|
||||
signMgr = CryptoFactory.getSignatureManager(cfg, keyMgr);
|
||||
clientDns = new ClientDnsOverwrite(cfg.getDns().getOverwrite());
|
||||
@@ -125,13 +129,13 @@ public class Mxisd {
|
||||
idStrategy = new RecursivePriorityLookupStrategy(cfg.getLookup(), ThreePidProviders.get(), bridgeFetcher, hashManager);
|
||||
pMgr = new ProfileManager(ProfileProviders.get(), clientDns, httpClient);
|
||||
notifMgr = new NotificationManager(cfg.getNotification(), NotificationHandlers.get());
|
||||
sessMgr = new SessionManager(cfg, store, notifMgr, resolver, httpClient, signMgr);
|
||||
sessMgr = new SessionManager(cfg, store, notifMgr, resolver, signMgr);
|
||||
invMgr = new InvitationManager(cfg, store, idStrategy, keyMgr, signMgr, resolver, notifMgr, pMgr);
|
||||
authMgr = new AuthManager(cfg, AuthProviders.get(), idStrategy, invMgr, clientDns, httpClient);
|
||||
dirMgr = new DirectoryManager(cfg.getDirectory(), clientDns, httpClient, DirectoryProviders.get());
|
||||
regMgr = new RegistrationManager(cfg.getRegister(), httpClient, clientDns, invMgr);
|
||||
asHander = new AppSvcManager(this);
|
||||
accMgr = new AccountManager(store, resolver, getHttpClient(), cfg.getAccountConfig(), cfg.getMatrix());
|
||||
accMgr = new AccountManager(store, resolver, cfg.getAccountConfig(), cfg.getMatrix());
|
||||
}
|
||||
|
||||
public MxisdConfig getConfig() {
|
||||
|
@@ -44,31 +44,46 @@ public class MxisdStandaloneExec {
|
||||
try {
|
||||
MxisdConfig cfg = null;
|
||||
Iterator<String> argsIt = Arrays.asList(args).iterator();
|
||||
boolean dump = false;
|
||||
boolean exit = false;
|
||||
while (argsIt.hasNext()) {
|
||||
String arg = argsIt.next();
|
||||
if (StringUtils.equalsAny(arg, "-h", "--help", "-?", "--usage")) {
|
||||
System.out.println("Available arguments:" + System.lineSeparator());
|
||||
System.out.println(" -h, --help Show this help message");
|
||||
System.out.println(" --version Print the version then exit");
|
||||
System.out.println(" -c, --config Set the configuration file location");
|
||||
System.out.println(" -v Increase log level (log more info)");
|
||||
System.out.println(" -vv Further increase log level");
|
||||
System.out.println(" ");
|
||||
System.exit(0);
|
||||
} else if (StringUtils.equals(arg, "-v")) {
|
||||
System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "debug");
|
||||
} else if (StringUtils.equals(arg, "-vv")) {
|
||||
System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "trace");
|
||||
} else if (StringUtils.equalsAny(arg, "-c", "--config")) {
|
||||
String cfgFile = argsIt.next();
|
||||
cfg = YamlConfigLoader.loadFromFile(cfgFile);
|
||||
} else if (StringUtils.equals("--version", arg)) {
|
||||
System.out.println(Mxisd.Version);
|
||||
System.exit(0);
|
||||
} else {
|
||||
System.err.println("Invalid argument: " + arg);
|
||||
System.err.println("Try '--help' for available arguments");
|
||||
System.exit(1);
|
||||
switch (arg) {
|
||||
case "-h":
|
||||
case "--help":
|
||||
case "-?":
|
||||
case "--usage":
|
||||
System.out.println("Available arguments:" + System.lineSeparator());
|
||||
System.out.println(" -h, --help Show this help message");
|
||||
System.out.println(" --version Print the version then exit");
|
||||
System.out.println(" -c, --config Set the configuration file location");
|
||||
System.out.println(" -v Increase log level (log more info)");
|
||||
System.out.println(" -vv Further increase log level");
|
||||
System.out.println(" --dump Dump the full ma1sd configuration");
|
||||
System.out.println(" --dump-and-exit Dump the full ma1sd configuration and exit");
|
||||
System.out.println(" ");
|
||||
System.exit(0);
|
||||
return;
|
||||
case "-v":
|
||||
System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "debug");
|
||||
break;
|
||||
case "-vv":
|
||||
System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "trace");
|
||||
break;
|
||||
case "-c":
|
||||
case "--config":
|
||||
String cfgFile = argsIt.next();
|
||||
cfg = YamlConfigLoader.loadFromFile(cfgFile);
|
||||
break;
|
||||
case "--dump-and-exit":
|
||||
exit = true;
|
||||
case "--dump":
|
||||
dump = true;
|
||||
break;
|
||||
default:
|
||||
System.err.println("Invalid argument: " + arg);
|
||||
System.err.println("Try '--help' for available arguments");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,6 +91,13 @@ public class MxisdStandaloneExec {
|
||||
cfg = YamlConfigLoader.tryLoadFromFile("ma1sd.yaml").orElseGet(MxisdConfig::new);
|
||||
}
|
||||
|
||||
if (dump) {
|
||||
YamlConfigLoader.dumpConfig(cfg);
|
||||
if (exit) {
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
log.info("ma1sd starting");
|
||||
log.info("Version: {}", Mxisd.Version);
|
||||
|
||||
|
@@ -144,7 +144,13 @@ public class MembershipEventProcessor implements EventTypeProcessor {
|
||||
.collect(Collectors.toList());
|
||||
log.info("Found {} email(s) in identity store for {}", tpids.size(), inviteeId);
|
||||
|
||||
for (_ThreePid tpid : tpids) {
|
||||
log.info("Removing duplicates from identity store");
|
||||
List<_ThreePid> uniqueTpids = tpids.stream()
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
log.info("There are {} unique email(s) in identity store for {}", uniqueTpids.size(), inviteeId);
|
||||
|
||||
for (_ThreePid tpid : uniqueTpids) {
|
||||
log.info("Found Email to notify about room invitation: {}", tpid.getAddress());
|
||||
Map<String, String> properties = new HashMap<>();
|
||||
profiler.getDisplayName(sender).ifPresent(name -> properties.put("sender_display_name", name));
|
||||
|
@@ -9,14 +9,15 @@ import io.kamax.mxisd.config.PolicyConfig;
|
||||
import io.kamax.mxisd.exception.BadRequestException;
|
||||
import io.kamax.mxisd.exception.InvalidCredentialsException;
|
||||
import io.kamax.mxisd.exception.NotFoundException;
|
||||
import io.kamax.mxisd.http.undertow.handler.auth.v1.LoginGetHandler;
|
||||
import io.kamax.mxisd.matrix.HomeserverFederationResolver;
|
||||
import io.kamax.mxisd.matrix.HomeserverVerifier;
|
||||
import io.kamax.mxisd.storage.IStorage;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.AccountDao;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -33,15 +34,12 @@ public class AccountManager {
|
||||
|
||||
private final IStorage storage;
|
||||
private final HomeserverFederationResolver resolver;
|
||||
private final CloseableHttpClient httpClient;
|
||||
private final AccountConfig accountConfig;
|
||||
private final MatrixConfig matrixConfig;
|
||||
|
||||
public AccountManager(IStorage storage, HomeserverFederationResolver resolver,
|
||||
CloseableHttpClient httpClient, AccountConfig accountConfig, MatrixConfig matrixConfig) {
|
||||
public AccountManager(IStorage storage, HomeserverFederationResolver resolver, AccountConfig accountConfig, MatrixConfig matrixConfig) {
|
||||
this.storage = storage;
|
||||
this.resolver = resolver;
|
||||
this.httpClient = httpClient;
|
||||
this.accountConfig = accountConfig;
|
||||
this.matrixConfig = matrixConfig;
|
||||
}
|
||||
@@ -57,7 +55,7 @@ public class AccountManager {
|
||||
|
||||
String token = UUID.randomUUID().toString();
|
||||
AccountDao account = new AccountDao(openIdToken.getAccessToken(), openIdToken.getTokenType(),
|
||||
openIdToken.getMatrixServerName(), openIdToken.getExpiredIn(),
|
||||
openIdToken.getMatrixServerName(), openIdToken.getExpiresIn(),
|
||||
Instant.now().getEpochSecond(), userId, token);
|
||||
storage.insertToken(account);
|
||||
|
||||
@@ -67,24 +65,32 @@ public class AccountManager {
|
||||
}
|
||||
|
||||
private String getUserId(OpenIdToken openIdToken) {
|
||||
String homeserverURL = resolver.resolve(openIdToken.getMatrixServerName()).toString();
|
||||
LOGGER.info("Domain resolved: {} => {}", openIdToken.getMatrixServerName(), homeserverURL);
|
||||
String matrixServerName = openIdToken.getMatrixServerName();
|
||||
HomeserverFederationResolver.HomeserverTarget homeserverTarget = resolver.resolve(matrixServerName);
|
||||
String homeserverURL = homeserverTarget.getUrl().toString();
|
||||
LOGGER.info("Domain resolved: {} => {}", matrixServerName, homeserverURL);
|
||||
HttpGet getUserInfo = new HttpGet(
|
||||
homeserverURL + "/_matrix/federation/v1/openid/userinfo?access_token=" + openIdToken.getAccessToken());
|
||||
String userId;
|
||||
try (CloseableHttpResponse response = httpClient.execute(getUserInfo)) {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == HttpStatus.SC_OK) {
|
||||
String content = EntityUtils.toString(response.getEntity());
|
||||
LOGGER.trace("Response: {}", content);
|
||||
JsonObject body = GsonUtil.parseObj(content);
|
||||
userId = GsonUtil.getStringOrThrow(body, "sub");
|
||||
} else {
|
||||
LOGGER.error("Wrong response status: {}", statusCode);
|
||||
try (CloseableHttpClient httpClient = HttpClients.custom()
|
||||
.setSSLHostnameVerifier(new HomeserverVerifier(homeserverTarget.getDomain())).build()) {
|
||||
try (CloseableHttpResponse response = httpClient.execute(getUserInfo)) {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == HttpStatus.SC_OK) {
|
||||
String content = EntityUtils.toString(response.getEntity());
|
||||
LOGGER.trace("Response: {}", content);
|
||||
JsonObject body = GsonUtil.parseObj(content);
|
||||
userId = GsonUtil.getStringOrThrow(body, "sub");
|
||||
} else {
|
||||
LOGGER.error("Wrong response status: {}", statusCode);
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Unable to get user info.", e);
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Unable to get user info.", e);
|
||||
LOGGER.error("Unable to create a connection to host: " + homeserverURL, e);
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
|
@@ -140,7 +140,7 @@ public class AuthManager {
|
||||
}
|
||||
|
||||
try {
|
||||
MatrixID.asValid(mxId);
|
||||
MatrixID.asAcceptable(mxId);
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.warn("The returned User ID {} is not a valid Matrix ID. Login might fail at the Homeserver level", mxId);
|
||||
}
|
||||
|
@@ -1,14 +1,20 @@
|
||||
package io.kamax.mxisd.auth;
|
||||
|
||||
import com.google.gson.annotations.SerializedName;
|
||||
|
||||
public class OpenIdToken {
|
||||
|
||||
@SerializedName("access_token")
|
||||
private String accessToken;
|
||||
|
||||
@SerializedName("token_type")
|
||||
private String tokenType;
|
||||
|
||||
@SerializedName("matrix_server_name")
|
||||
private String matrixServerName;
|
||||
|
||||
private long expiredIn;
|
||||
@SerializedName("expires_in")
|
||||
private long expiresIn;
|
||||
|
||||
public String getAccessToken() {
|
||||
return accessToken;
|
||||
@@ -34,11 +40,11 @@ public class OpenIdToken {
|
||||
this.matrixServerName = matrixServerName;
|
||||
}
|
||||
|
||||
public long getExpiredIn() {
|
||||
return expiredIn;
|
||||
public long getExpiresIn() {
|
||||
return expiresIn;
|
||||
}
|
||||
|
||||
public void setExpiredIn(long expiredIn) {
|
||||
this.expiredIn = expiredIn;
|
||||
public void setExpiresIn(long expiresIn) {
|
||||
this.expiresIn = expiresIn;
|
||||
}
|
||||
}
|
||||
|
@@ -173,6 +173,10 @@ public class ExecIdentityStore extends ExecStore implements IThreePidProvider {
|
||||
|
||||
@Override
|
||||
public Iterable<ThreePidMapping> populateHashes() {
|
||||
if (!cfg.isHashLookup()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
Processor<List<ThreePidMapping>> p = new Processor<>();
|
||||
p.withConfig(cfg.getLookup().getBulk());
|
||||
|
||||
|
@@ -54,6 +54,8 @@ public class LdapAuthProvider extends LdapBackend implements AuthenticatorProvid
|
||||
|
||||
private transient final Logger log = LoggerFactory.getLogger(LdapAuthProvider.class);
|
||||
|
||||
public static final char[] CHARACTERS_TO_ESCAPE = ",#+<>;\"=*\\\\".toCharArray();
|
||||
|
||||
private PhoneNumberUtil phoneUtil = PhoneNumberUtil.getInstance();
|
||||
|
||||
public LdapAuthProvider(LdapConfig cfg, MatrixConfig mxCfg) {
|
||||
@@ -94,7 +96,8 @@ public class LdapAuthProvider extends LdapBackend implements AuthenticatorProvid
|
||||
return BackendAuthResult.failure();
|
||||
}
|
||||
|
||||
String userFilter = "(" + getUidAtt() + "=" + userFilterValue + ")";
|
||||
String filteredValue = escape(userFilterValue);
|
||||
String userFilter = "(" + getUidAtt() + "=" + filteredValue + ")";
|
||||
userFilter = buildWithFilter(userFilter, getCfg().getAuth().getFilter());
|
||||
|
||||
Set<String> attributes = new HashSet<>();
|
||||
@@ -162,8 +165,21 @@ public class LdapAuthProvider extends LdapBackend implements AuthenticatorProvid
|
||||
log.info("No match were found for {}", mxid);
|
||||
return BackendAuthResult.failure();
|
||||
} catch (LdapException | IOException | CursorException e) {
|
||||
log.error("Unable to invoke query request: ", e);
|
||||
throw new InternalServerError(e);
|
||||
}
|
||||
}
|
||||
|
||||
private String escape(String raw) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
boolean escape;
|
||||
for (char c : raw.toCharArray()) {
|
||||
escape = false;
|
||||
for (int i = 0; i < CHARACTERS_TO_ESCAPE.length && !escape; i++) {
|
||||
escape = CHARACTERS_TO_ESCAPE[i] == c;
|
||||
}
|
||||
sb.append(escape ? "\\" + c : c);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
@@ -20,6 +20,7 @@
|
||||
|
||||
package io.kamax.mxisd.backend.ldap;
|
||||
|
||||
import io.kamax.matrix.MatrixID;
|
||||
import io.kamax.matrix._MatrixID;
|
||||
import io.kamax.mxisd.config.MatrixConfig;
|
||||
import io.kamax.mxisd.config.ldap.LdapConfig;
|
||||
@@ -116,10 +117,20 @@ public abstract class LdapBackend {
|
||||
|
||||
public String buildMatrixIdFromUid(String uid) {
|
||||
String uidType = getCfg().getAttribute().getUid().getType();
|
||||
String localpart = uid.toLowerCase();
|
||||
|
||||
if (!StringUtils.equals(uid, localpart)) {
|
||||
log.info("UID {} from LDAP has been changed to lowercase to match the Synapse specifications", uid);
|
||||
}
|
||||
|
||||
if (StringUtils.equals(UID, uidType)) {
|
||||
return "@" + uid + ":" + mxCfg.getDomain();
|
||||
if(getCfg().isActiveDirectory()) {
|
||||
localpart = new UPN(uid.toLowerCase()).getMXID();
|
||||
}
|
||||
|
||||
return "@" + localpart + ":" + mxCfg.getDomain();
|
||||
} else if (StringUtils.equals(MATRIX_ID, uidType)) {
|
||||
return uid;
|
||||
return localpart;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Bind type " + uidType + " is not supported");
|
||||
}
|
||||
@@ -128,6 +139,10 @@ public abstract class LdapBackend {
|
||||
public String buildUidFromMatrixId(_MatrixID mxId) {
|
||||
String uidType = getCfg().getAttribute().getUid().getType();
|
||||
if (StringUtils.equals(UID, uidType)) {
|
||||
if(getCfg().isActiveDirectory()) {
|
||||
return new UPN(mxId).getUPN();
|
||||
}
|
||||
|
||||
return mxId.getLocalPart();
|
||||
} else if (StringUtils.equals(MATRIX_ID, uidType)) {
|
||||
return mxId.getId();
|
||||
@@ -169,4 +184,58 @@ public abstract class LdapBackend {
|
||||
return values;
|
||||
}
|
||||
|
||||
private class UPN {
|
||||
private String login;
|
||||
private String domain;
|
||||
|
||||
public UPN(String userPrincipalName) {
|
||||
String[] uidParts = userPrincipalName.split("@");
|
||||
|
||||
if (uidParts.length != 2) {
|
||||
throw new IllegalArgumentException(String.format("Wrong userPrincipalName provided: %s", userPrincipalName));
|
||||
}
|
||||
|
||||
this.login = uidParts[0];
|
||||
this.domain = uidParts[1];
|
||||
}
|
||||
|
||||
public UPN(_MatrixID mxid) {
|
||||
String[] idParts = mxid.getLocalPart().split("/");
|
||||
|
||||
if (idParts.length != 2) {
|
||||
if(idParts.length == 1 && !StringUtils.isEmpty(getCfg().getDefaultDomain())) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Local part of mxid %s does not contains domain separator and default domain is not configured",
|
||||
mxid.getLocalPart()
|
||||
));
|
||||
}
|
||||
|
||||
this.domain = getCfg().getDefaultDomain();
|
||||
} else {
|
||||
this.domain = idParts[1];
|
||||
}
|
||||
|
||||
this.login = idParts[0];
|
||||
}
|
||||
|
||||
public String getLogin() {
|
||||
return login;
|
||||
}
|
||||
|
||||
public String getDomain() {
|
||||
return domain;
|
||||
}
|
||||
|
||||
public String getMXID() {
|
||||
if(StringUtils.equalsIgnoreCase(getCfg().getDefaultDomain(), this.domain)) {
|
||||
return this.login;
|
||||
}
|
||||
|
||||
return new StringBuilder(this.login).append("/").append(this.domain).toString();
|
||||
}
|
||||
|
||||
public String getUPN() {
|
||||
return new StringBuilder(this.login).append("@").append(this.domain).toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -41,7 +41,10 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class LdapThreePidProvider extends LdapBackend implements IThreePidProvider {
|
||||
|
||||
@@ -137,4 +140,65 @@ public class LdapThreePidProvider extends LdapBackend implements IThreePidProvid
|
||||
return mappingsFound;
|
||||
}
|
||||
|
||||
private List<String> getAttributes() {
|
||||
final List<String> attributes = getCfg().getAttribute().getThreepid().values().stream().flatMap(List::stream)
|
||||
.collect(Collectors.toList());
|
||||
attributes.add(getUidAtt());
|
||||
return attributes;
|
||||
}
|
||||
|
||||
private Optional<String> getAttributeValue(Entry entry, List<String> attributes) {
|
||||
return attributes.stream()
|
||||
.map(attr -> getAttribute(entry, attr))
|
||||
.filter(Objects::nonNull)
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.findFirst();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<ThreePidMapping> populateHashes() {
|
||||
List<ThreePidMapping> result = new ArrayList<>();
|
||||
if (!getCfg().getIdentity().isLookup()) {
|
||||
return result;
|
||||
}
|
||||
|
||||
String filter = getCfg().getIdentity().getFilter();
|
||||
|
||||
try (LdapConnection conn = getConn()) {
|
||||
bind(conn);
|
||||
|
||||
log.debug("Query: {}", filter);
|
||||
List<String> attributes = getAttributes();
|
||||
log.debug("Attributes: {}", GsonUtil.build().toJson(attributes));
|
||||
|
||||
for (String baseDN : getBaseDNs()) {
|
||||
log.debug("Base DN: {}", baseDN);
|
||||
|
||||
try (EntryCursor cursor = conn.search(baseDN, filter, SearchScope.SUBTREE, attributes.toArray(new String[0]))) {
|
||||
while (cursor.next()) {
|
||||
Entry entry = cursor.get();
|
||||
log.info("Found possible match, DN: {}", entry.getDn().getName());
|
||||
Optional<String> mxid = getAttribute(entry, getUidAtt());
|
||||
if (!mxid.isPresent()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (Map.Entry<String, List<String>> attributeEntry : getCfg().getAttribute().getThreepid().entrySet()) {
|
||||
String medium = attributeEntry.getKey();
|
||||
getAttributeValue(entry, attributeEntry.getValue())
|
||||
.ifPresent(s -> result.add(new ThreePidMapping(medium, s, buildMatrixIdFromUid(mxid.get()))));
|
||||
}
|
||||
}
|
||||
} catch (CursorLdapReferralException e) {
|
||||
log.warn("3PID is only available via referral, skipping", e);
|
||||
} catch (IOException | LdapException | CursorException e) {
|
||||
log.error("Unable to fetch 3PID mappings", e);
|
||||
}
|
||||
}
|
||||
} catch (LdapException | IOException e) {
|
||||
log.error("Unable to fetch 3PID mappings", e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@@ -174,6 +174,10 @@ public class MemoryIdentityStore implements AuthenticatorProvider, DirectoryProv
|
||||
|
||||
@Override
|
||||
public Iterable<ThreePidMapping> populateHashes() {
|
||||
if (!cfg.isHashEnabled()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return cfg.getIdentities().stream()
|
||||
.map(mic -> mic.getThreepids().stream().map(mtp -> new ThreePidMapping(mtp.getMedium(), mtp.getAddress(), mic.getUsername())))
|
||||
.flatMap(s -> s).collect(
|
||||
|
@@ -107,14 +107,16 @@ public abstract class SqlThreePidProvider implements IThreePidProvider {
|
||||
|
||||
@Override
|
||||
public Iterable<ThreePidMapping> populateHashes() {
|
||||
if (StringUtils.isBlank(cfg.getLookup().getQuery())) {
|
||||
String query = cfg.getLookup().getQuery();
|
||||
if (StringUtils.isBlank(query)) {
|
||||
log.warn("Lookup query not configured, skip.");
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
log.debug("Uses query to match users: {}", query);
|
||||
List<ThreePidMapping> result = new ArrayList<>();
|
||||
try (Connection connection = pool.get()) {
|
||||
PreparedStatement statement = connection.prepareStatement(cfg.getLookup().getQuery());
|
||||
PreparedStatement statement = connection.prepareStatement(query);
|
||||
try (ResultSet resultSet = statement.executeQuery()) {
|
||||
while (resultSet.next()) {
|
||||
String mxid = resultSet.getString("mxid");
|
||||
|
@@ -29,23 +29,27 @@ import java.util.Optional;
|
||||
|
||||
public class Synapse {
|
||||
|
||||
private SqlConnectionPool pool;
|
||||
private final SqlConnectionPool pool;
|
||||
private final SynapseSqlProviderConfig providerConfig;
|
||||
|
||||
public Synapse(SynapseSqlProviderConfig sqlCfg) {
|
||||
this.pool = new SqlConnectionPool(sqlCfg);
|
||||
providerConfig = sqlCfg;
|
||||
}
|
||||
|
||||
public Optional<String> getRoomName(String id) {
|
||||
return pool.withConnFunction(conn -> {
|
||||
PreparedStatement stmt = conn.prepareStatement(SynapseQueries.getRoomName());
|
||||
stmt.setString(1, id);
|
||||
ResultSet rSet = stmt.executeQuery();
|
||||
if (!rSet.next()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
String query = providerConfig.isLegacyRoomNames() ? SynapseQueries.getLegacyRoomName() : SynapseQueries.getRoomName();
|
||||
|
||||
return Optional.ofNullable(rSet.getString(1));
|
||||
return pool.withConnFunction(conn -> {
|
||||
try (PreparedStatement stmt = conn.prepareStatement(query)) {
|
||||
stmt.setString(1, id);
|
||||
ResultSet rSet = stmt.executeQuery();
|
||||
if (!rSet.next()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return Optional.ofNullable(rSet.getString(1));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -51,7 +51,7 @@ public class SynapseQueries {
|
||||
if (StringUtils.equals("sqlite", type)) {
|
||||
return "select " + getUserId(type, domain) + ", displayname from profiles p where displayname like ?";
|
||||
} else if (StringUtils.equals("postgresql", type)) {
|
||||
return "select " + getUserId(type, domain) + ", displayname from profiles p where displayname ilike ?";
|
||||
return "SELECT u.name,p.displayname FROM users u JOIN profiles p ON u.name LIKE concat('@',p.user_id,':%') WHERE u.is_guest = 0 AND u.appservice_id IS NULL AND p.displayname LIKE ?";
|
||||
} else {
|
||||
throw new ConfigurationException("Invalid Synapse SQL type: " + type);
|
||||
}
|
||||
@@ -72,7 +72,10 @@ public class SynapseQueries {
|
||||
}
|
||||
|
||||
public static String getRoomName() {
|
||||
return "select r.name from room_names r, events e, (select r1.room_id,max(e1.origin_server_ts) ts from room_names r1, events e1 where r1.event_id = e1.event_id group by r1.room_id) rle where e.origin_server_ts = rle.ts and r.event_id = e.event_id and r.room_id = ?";
|
||||
return "select name from room_stats_state where room_id = ? limit 1";
|
||||
}
|
||||
|
||||
public static String getLegacyRoomName() {
|
||||
return "select r.name from room_names r, events e, (select r1.room_id,max(e1.origin_server_ts) ts from room_names r1, events e1 where r1.event_id = e1.event_id group by r1.room_id) rle where e.origin_server_ts = rle.ts and r.event_id = e.event_id and r.room_id = ?";
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,5 @@
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
public interface DatabaseStorageConfig {
|
||||
String getDatabase();
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
public class DurationDeserializer {
|
||||
|
||||
public long deserialize(String argument) {
|
||||
long duration = 0L;
|
||||
for (String part : argument.split(" ")) {
|
||||
String unit = part.substring(part.length() - 1);
|
||||
long value = Long.parseLong(part.substring(0, part.length() - 1));
|
||||
switch (unit) {
|
||||
case "s":
|
||||
duration += value;
|
||||
break;
|
||||
case "m":
|
||||
duration += value * 60;
|
||||
break;
|
||||
case "h":
|
||||
duration += value * 60 * 60;
|
||||
break;
|
||||
case "d":
|
||||
duration += value * 60 * 60 * 24;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException(String.format("Unknown duration unit: %s", unit));
|
||||
}
|
||||
}
|
||||
|
||||
return duration;
|
||||
}
|
||||
}
|
@@ -309,6 +309,7 @@ public class ExecConfig {
|
||||
private Boolean enabled;
|
||||
private int priority;
|
||||
private Lookup lookup = new Lookup();
|
||||
private boolean hashLookup = false;
|
||||
|
||||
public Boolean isEnabled() {
|
||||
return enabled;
|
||||
@@ -334,6 +335,13 @@ public class ExecConfig {
|
||||
this.lookup = lookup;
|
||||
}
|
||||
|
||||
public boolean isHashLookup() {
|
||||
return hashLookup;
|
||||
}
|
||||
|
||||
public void setHashLookup(boolean hashLookup) {
|
||||
this.hashLookup = hashLookup;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Profile {
|
||||
|
@@ -5,45 +5,58 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class HashingConfig {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(HashingConfig.class);
|
||||
|
||||
private boolean enabled = false;
|
||||
private int pepperLength = 10;
|
||||
private int pepperLength = 20;
|
||||
private RotationPolicyEnum rotationPolicy;
|
||||
private HashStorageEnum hashStorageType;
|
||||
private long delay = 10;
|
||||
private HashStorageEnum hashStorageType = HashStorageEnum.in_memory;
|
||||
private String delay = "10s";
|
||||
private transient long delayInSeconds = 10;
|
||||
private int requests = 10;
|
||||
private List<Algorithm> algorithms = new ArrayList<>();
|
||||
|
||||
public void build() {
|
||||
public void build(MatrixConfig matrixConfig) {
|
||||
if (isEnabled()) {
|
||||
LOGGER.info("--- Hash configuration ---");
|
||||
LOGGER.info(" Pepper length: {}", getPepperLength());
|
||||
LOGGER.info(" Rotation policy: {}", getRotationPolicy());
|
||||
LOGGER.info(" Hash storage type: {}", getHashStorageType());
|
||||
if (RotationPolicyEnum.PER_SECONDS == rotationPolicy) {
|
||||
LOGGER.info(" Rotation delay: {}", delay);
|
||||
Objects.requireNonNull(getHashStorageType(), "Storage type must be specified");
|
||||
if (RotationPolicyEnum.per_seconds == getRotationPolicy()) {
|
||||
setDelayInSeconds(new DurationDeserializer().deserialize(getDelay()));
|
||||
LOGGER.info(" Rotation delay: {}", getDelay());
|
||||
LOGGER.info(" Rotation delay in seconds: {}", getDelayInSeconds());
|
||||
}
|
||||
if (RotationPolicyEnum.per_requests == getRotationPolicy()) {
|
||||
LOGGER.info(" Rotation after requests: {}", getRequests());
|
||||
}
|
||||
LOGGER.info(" Algorithms: {}", getAlgorithms());
|
||||
} else {
|
||||
if (matrixConfig.isV2()) {
|
||||
LOGGER.warn("V2 enabled without the hash configuration.");
|
||||
}
|
||||
LOGGER.info("Hash configuration disabled, used only `none` pepper.");
|
||||
}
|
||||
}
|
||||
|
||||
public enum Algorithm {
|
||||
NONE,
|
||||
SHA256
|
||||
none,
|
||||
sha256
|
||||
}
|
||||
|
||||
public enum RotationPolicyEnum {
|
||||
PER_REQUESTS,
|
||||
PER_SECONDS
|
||||
per_requests,
|
||||
per_seconds
|
||||
}
|
||||
|
||||
public enum HashStorageEnum {
|
||||
IN_MEMORY,
|
||||
SQL
|
||||
in_memory,
|
||||
sql
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
@@ -78,14 +91,30 @@ public class HashingConfig {
|
||||
this.hashStorageType = hashStorageType;
|
||||
}
|
||||
|
||||
public long getDelay() {
|
||||
public String getDelay() {
|
||||
return delay;
|
||||
}
|
||||
|
||||
public void setDelay(long delay) {
|
||||
public void setDelay(String delay) {
|
||||
this.delay = delay;
|
||||
}
|
||||
|
||||
public long getDelayInSeconds() {
|
||||
return delayInSeconds;
|
||||
}
|
||||
|
||||
public void setDelayInSeconds(long delayInSeconds) {
|
||||
this.delayInSeconds = delayInSeconds;
|
||||
}
|
||||
|
||||
public int getRequests() {
|
||||
return requests;
|
||||
}
|
||||
|
||||
public void setRequests(int requests) {
|
||||
this.requests = requests;
|
||||
}
|
||||
|
||||
public List<Algorithm> getAlgorithms() {
|
||||
return algorithms;
|
||||
}
|
||||
|
24
src/main/java/io/kamax/mxisd/config/InternalAPIConfig.java
Normal file
24
src/main/java/io/kamax/mxisd/config/InternalAPIConfig.java
Normal file
@@ -0,0 +1,24 @@
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class InternalAPIConfig {
|
||||
|
||||
private final static Logger log = LoggerFactory.getLogger(InternalAPIConfig.class);
|
||||
|
||||
private boolean enabled = false;
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public void build() {
|
||||
log.info("--- Internal API config ---");
|
||||
log.info("Internal API enabled: {}", isEnabled());
|
||||
}
|
||||
}
|
@@ -67,6 +67,7 @@ public class InvitationConfig {
|
||||
|
||||
private boolean recursive = true;
|
||||
private long timer = 5;
|
||||
private PeriodDimension period = PeriodDimension.minutes;
|
||||
|
||||
public boolean isRecursive() {
|
||||
return recursive;
|
||||
@@ -84,6 +85,13 @@ public class InvitationConfig {
|
||||
this.timer = timer;
|
||||
}
|
||||
|
||||
public PeriodDimension getPeriod() {
|
||||
return period;
|
||||
}
|
||||
|
||||
public void setPeriod(PeriodDimension period) {
|
||||
this.period = period;
|
||||
}
|
||||
}
|
||||
|
||||
public static class SenderPolicy {
|
||||
@@ -115,6 +123,7 @@ public class InvitationConfig {
|
||||
private Expiration expiration = new Expiration();
|
||||
private Resolution resolution = new Resolution();
|
||||
private Policies policy = new Policies();
|
||||
private boolean fullDisplayName = false;
|
||||
|
||||
public Expiration getExpiration() {
|
||||
return expiration;
|
||||
@@ -140,11 +149,26 @@ public class InvitationConfig {
|
||||
this.policy = policy;
|
||||
}
|
||||
|
||||
public boolean isFullDisplayName() {
|
||||
return fullDisplayName;
|
||||
}
|
||||
|
||||
public void setFullDisplayName(boolean fullDisplayName) {
|
||||
this.fullDisplayName = fullDisplayName;
|
||||
}
|
||||
|
||||
public void build() {
|
||||
log.info("--- Invite config ---");
|
||||
log.info("Expiration: {}", GsonUtil.get().toJson(getExpiration()));
|
||||
log.info("Resolution: {}", GsonUtil.get().toJson(getResolution()));
|
||||
log.info("Policies: {}", GsonUtil.get().toJson(getPolicy()));
|
||||
log.info("Print full display name on invitation: {}", isFullDisplayName());
|
||||
}
|
||||
|
||||
public enum PeriodDimension {
|
||||
|
||||
minutes,
|
||||
|
||||
seconds
|
||||
}
|
||||
}
|
||||
|
60
src/main/java/io/kamax/mxisd/config/LoggingConfig.java
Normal file
60
src/main/java/io/kamax/mxisd/config/LoggingConfig.java
Normal file
@@ -0,0 +1,60 @@
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class LoggingConfig {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger("App");
|
||||
|
||||
private String root;
|
||||
private String app;
|
||||
private boolean requests = false;
|
||||
|
||||
public String getRoot() {
|
||||
return root;
|
||||
}
|
||||
|
||||
public void setRoot(String root) {
|
||||
this.root = root;
|
||||
}
|
||||
|
||||
public String getApp() {
|
||||
return app;
|
||||
}
|
||||
|
||||
public void setApp(String app) {
|
||||
this.app = app;
|
||||
}
|
||||
|
||||
public boolean isRequests() {
|
||||
return requests;
|
||||
}
|
||||
|
||||
public void setRequests(boolean requests) {
|
||||
this.requests = requests;
|
||||
}
|
||||
|
||||
public void build() {
|
||||
LOGGER.info("Logging config:");
|
||||
if (StringUtils.isNotBlank(getRoot())) {
|
||||
LOGGER.info(" Default log level: {}", getRoot());
|
||||
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", getRoot());
|
||||
}
|
||||
|
||||
String appLevel = System.getProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd");
|
||||
if (StringUtils.isNotBlank(appLevel)) {
|
||||
LOGGER.info(" Logging level set by environment: {}", appLevel);
|
||||
} else if (StringUtils.isNotBlank(getApp())) {
|
||||
System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", getApp());
|
||||
LOGGER.info(" Logging level set by the configuration: {}", getApp());
|
||||
} else {
|
||||
LOGGER.info(" Logging level hasn't set, use default");
|
||||
}
|
||||
LOGGER.info(" Log requests: {}", isRequests());
|
||||
if (isRequests()) {
|
||||
LOGGER.warn(" Request dumping enabled, use this only to debug purposes, don't use it in the production.");
|
||||
}
|
||||
}
|
||||
}
|
@@ -64,7 +64,7 @@ public class MatrixConfig {
|
||||
private String domain;
|
||||
private Identity identity = new Identity();
|
||||
private boolean v1 = true;
|
||||
private boolean v2 = true;
|
||||
private boolean v2 = false;
|
||||
|
||||
public String getDomain() {
|
||||
return domain;
|
||||
|
@@ -117,6 +117,8 @@ public class MxisdConfig {
|
||||
private WordpressConfig wordpress = new WordpressConfig();
|
||||
private PolicyConfig policy = new PolicyConfig();
|
||||
private HashingConfig hashing = new HashingConfig();
|
||||
private LoggingConfig logging = new LoggingConfig();
|
||||
private InternalAPIConfig internal = new InternalAPIConfig();
|
||||
|
||||
public AppServiceConfig getAppsvc() {
|
||||
return appsvc;
|
||||
@@ -342,6 +344,14 @@ public class MxisdConfig {
|
||||
this.hashing = hashing;
|
||||
}
|
||||
|
||||
public LoggingConfig getLogging() {
|
||||
return logging;
|
||||
}
|
||||
|
||||
public void setLogging(LoggingConfig logging) {
|
||||
this.logging = logging;
|
||||
}
|
||||
|
||||
public MxisdConfig inMemory() {
|
||||
getKey().setPath(":memory:");
|
||||
getStorage().getProvider().getSqlite().setDatabase(":memory:");
|
||||
@@ -349,7 +359,17 @@ public class MxisdConfig {
|
||||
return this;
|
||||
}
|
||||
|
||||
public InternalAPIConfig getInternal() {
|
||||
return internal;
|
||||
}
|
||||
|
||||
public void setInternal(InternalAPIConfig internal) {
|
||||
this.internal = internal;
|
||||
}
|
||||
|
||||
public MxisdConfig build() {
|
||||
getLogging().build();
|
||||
|
||||
if (StringUtils.isBlank(getServer().getName())) {
|
||||
getServer().setName(getMatrix().getDomain());
|
||||
log.debug("server.name is empty, using matrix.domain");
|
||||
@@ -359,6 +379,7 @@ public class MxisdConfig {
|
||||
getAuth().build();
|
||||
getAccountConfig().build();
|
||||
getDirectory().build();
|
||||
getDns().build();
|
||||
getExec().build();
|
||||
getFirebase().build();
|
||||
getForward().build();
|
||||
@@ -381,7 +402,8 @@ public class MxisdConfig {
|
||||
getView().build();
|
||||
getWordpress().build();
|
||||
getPolicy().build();
|
||||
getHashing().build();
|
||||
getHashing().build(getMatrix());
|
||||
getInternal().build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
@@ -100,10 +100,12 @@ public class PolicyConfig {
|
||||
policyObjectItem.getValue().getPatterns().add(Pattern.compile(regexp));
|
||||
}
|
||||
sb.append(" terms:\n");
|
||||
for (Map.Entry<String, TermObject> termItem : policyObject.getTerms().entrySet()) {
|
||||
sb.append(" - lang: ").append(termItem.getKey()).append("\n");
|
||||
sb.append(" name: ").append(termItem.getValue().getName()).append("\n");
|
||||
sb.append(" url: ").append(termItem.getValue().getUrl()).append("\n");
|
||||
if (policyObject.getTerms() != null) {
|
||||
for (Map.Entry<String, TermObject> termItem : policyObject.getTerms().entrySet()) {
|
||||
sb.append(" - lang: ").append(termItem.getKey()).append("\n");
|
||||
sb.append(" name: ").append(termItem.getValue().getName()).append("\n");
|
||||
sb.append(" url: ").append(termItem.getValue().getUrl()).append("\n");
|
||||
}
|
||||
}
|
||||
LOGGER.info(sb.toString());
|
||||
}
|
||||
|
105
src/main/java/io/kamax/mxisd/config/PostgresqlStorageConfig.java
Normal file
105
src/main/java/io/kamax/mxisd/config/PostgresqlStorageConfig.java
Normal file
@@ -0,0 +1,105 @@
|
||||
/*
|
||||
* mxisd - Matrix Identity Server Daemon
|
||||
* Copyright (C) 2017 Kamax Sarl
|
||||
*
|
||||
* https://www.kamax.io/
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
public class PostgresqlStorageConfig implements DatabaseStorageConfig {
|
||||
|
||||
private String database;
|
||||
|
||||
private String username;
|
||||
|
||||
private String password;
|
||||
|
||||
private boolean pool;
|
||||
|
||||
private int maxConnectionsFree = 1;
|
||||
|
||||
private long maxConnectionAgeMillis = 60 * 60 * 1000;
|
||||
|
||||
private long checkConnectionsEveryMillis = 30 * 1000;
|
||||
|
||||
private boolean testBeforeGetFromPool = false;
|
||||
|
||||
@Override
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
|
||||
public void setDatabase(String database) {
|
||||
this.database = database;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public void setUsername(String username) {
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
public String getPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
public void setPassword(String password) {
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
public boolean isPool() {
|
||||
return pool;
|
||||
}
|
||||
|
||||
public void setPool(boolean pool) {
|
||||
this.pool = pool;
|
||||
}
|
||||
|
||||
public int getMaxConnectionsFree() {
|
||||
return maxConnectionsFree;
|
||||
}
|
||||
|
||||
public void setMaxConnectionsFree(int maxConnectionsFree) {
|
||||
this.maxConnectionsFree = maxConnectionsFree;
|
||||
}
|
||||
|
||||
public long getMaxConnectionAgeMillis() {
|
||||
return maxConnectionAgeMillis;
|
||||
}
|
||||
|
||||
public void setMaxConnectionAgeMillis(long maxConnectionAgeMillis) {
|
||||
this.maxConnectionAgeMillis = maxConnectionAgeMillis;
|
||||
}
|
||||
|
||||
public long getCheckConnectionsEveryMillis() {
|
||||
return checkConnectionsEveryMillis;
|
||||
}
|
||||
|
||||
public void setCheckConnectionsEveryMillis(long checkConnectionsEveryMillis) {
|
||||
this.checkConnectionsEveryMillis = checkConnectionsEveryMillis;
|
||||
}
|
||||
|
||||
public boolean isTestBeforeGetFromPool() {
|
||||
return testBeforeGetFromPool;
|
||||
}
|
||||
|
||||
public void setTestBeforeGetFromPool(boolean testBeforeGetFromPool) {
|
||||
this.testBeforeGetFromPool = testBeforeGetFromPool;
|
||||
}
|
||||
}
|
@@ -20,10 +20,11 @@
|
||||
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
public class SQLiteStorageConfig {
|
||||
public class SQLiteStorageConfig implements DatabaseStorageConfig {
|
||||
|
||||
private String database;
|
||||
|
||||
@Override
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
|
@@ -34,6 +34,7 @@ public class ServerConfig {
|
||||
private String name;
|
||||
private int port = 8090;
|
||||
private String publicUrl;
|
||||
private String hostname;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
@@ -59,6 +60,14 @@ public class ServerConfig {
|
||||
this.publicUrl = publicUrl;
|
||||
}
|
||||
|
||||
public String getHostname() {
|
||||
return hostname;
|
||||
}
|
||||
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
public void build() {
|
||||
log.info("--- Server config ---");
|
||||
|
||||
@@ -75,8 +84,13 @@ public class ServerConfig {
|
||||
log.warn("Public URL is not valid: {}", StringUtils.defaultIfBlank(e.getMessage(), "<no reason provided>"));
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(getHostname())) {
|
||||
setHostname("0.0.0.0");
|
||||
}
|
||||
|
||||
log.info("Name: {}", getName());
|
||||
log.info("Port: {}", getPort());
|
||||
log.info("Public URL: {}", getPublicUrl());
|
||||
log.info("Hostname: {}", getHostname());
|
||||
}
|
||||
}
|
||||
|
@@ -48,6 +48,8 @@ public class SessionConfig {
|
||||
|
||||
private boolean enabled = true;
|
||||
|
||||
private boolean notifications = true;
|
||||
|
||||
public boolean getEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
@@ -55,11 +57,20 @@ public class SessionConfig {
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public boolean shouldNotify() {
|
||||
return notifications;
|
||||
}
|
||||
|
||||
public void setNotifications(boolean notifications) {
|
||||
this.notifications = notifications;
|
||||
}
|
||||
}
|
||||
|
||||
public Policy() {
|
||||
validation.enabled = true;
|
||||
unbind.enabled = true;
|
||||
unbind.notifications = true;
|
||||
}
|
||||
|
||||
private PolicyTemplate validation = new PolicyTemplate();
|
||||
|
@@ -21,14 +21,21 @@
|
||||
package io.kamax.mxisd.config;
|
||||
|
||||
import io.kamax.mxisd.exception.ConfigurationException;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
||||
public class StorageConfig {
|
||||
|
||||
public enum BackendEnum {
|
||||
sqlite,
|
||||
|
||||
postgresql
|
||||
}
|
||||
|
||||
public static class Provider {
|
||||
|
||||
private SQLiteStorageConfig sqlite = new SQLiteStorageConfig();
|
||||
|
||||
private PostgresqlStorageConfig postgresql = new PostgresqlStorageConfig();
|
||||
|
||||
public SQLiteStorageConfig getSqlite() {
|
||||
return sqlite;
|
||||
}
|
||||
@@ -37,16 +44,23 @@ public class StorageConfig {
|
||||
this.sqlite = sqlite;
|
||||
}
|
||||
|
||||
public PostgresqlStorageConfig getPostgresql() {
|
||||
return postgresql;
|
||||
}
|
||||
|
||||
public void setPostgresql(PostgresqlStorageConfig postgresql) {
|
||||
this.postgresql = postgresql;
|
||||
}
|
||||
}
|
||||
|
||||
private String backend = "sqlite";
|
||||
private BackendEnum backend = BackendEnum.sqlite; // or postgresql
|
||||
private Provider provider = new Provider();
|
||||
|
||||
public String getBackend() {
|
||||
public BackendEnum getBackend() {
|
||||
return backend;
|
||||
}
|
||||
|
||||
public void setBackend(String backend) {
|
||||
public void setBackend(BackendEnum backend) {
|
||||
this.backend = backend;
|
||||
}
|
||||
|
||||
@@ -59,7 +73,7 @@ public class StorageConfig {
|
||||
}
|
||||
|
||||
public void build() {
|
||||
if (StringUtils.isBlank(getBackend())) {
|
||||
if (getBackend() == null) {
|
||||
throw new ConfigurationException("storage.backend");
|
||||
}
|
||||
}
|
||||
|
@@ -76,4 +76,14 @@ public class YamlConfigLoader {
|
||||
}
|
||||
}
|
||||
|
||||
public static void dumpConfig(MxisdConfig cfg) {
|
||||
Representer rep = new Representer();
|
||||
rep.getPropertyUtils().setBeanAccess(BeanAccess.FIELD);
|
||||
rep.getPropertyUtils().setAllowReadOnlyProperties(true);
|
||||
rep.getPropertyUtils().setSkipMissingProperties(true);
|
||||
|
||||
Yaml yaml = new Yaml(new Constructor(MxisdConfig.class), rep);
|
||||
String dump = yaml.dump(cfg);
|
||||
log.info("Full configuration:\n{}", dump);
|
||||
}
|
||||
}
|
||||
|
@@ -233,6 +233,7 @@ public abstract class LdapConfig {
|
||||
private String filter;
|
||||
private String token = "%3pid";
|
||||
private Map<String, String> medium = new HashMap<>();
|
||||
private boolean lookup = false;
|
||||
|
||||
public String getFilter() {
|
||||
return filter;
|
||||
@@ -262,6 +263,13 @@ public abstract class LdapConfig {
|
||||
this.medium = medium;
|
||||
}
|
||||
|
||||
public boolean isLookup() {
|
||||
return lookup;
|
||||
}
|
||||
|
||||
public void setLookup(boolean lookup) {
|
||||
this.lookup = lookup;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Profile {
|
||||
@@ -283,6 +291,9 @@ public abstract class LdapConfig {
|
||||
private boolean enabled;
|
||||
private String filter;
|
||||
|
||||
private boolean activeDirectory;
|
||||
private String defaultDomain;
|
||||
|
||||
private Connection connection = new Connection();
|
||||
private Attribute attribute = new Attribute();
|
||||
private Auth auth = new Auth();
|
||||
@@ -308,6 +319,22 @@ public abstract class LdapConfig {
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
public boolean isActiveDirectory() {
|
||||
return activeDirectory;
|
||||
}
|
||||
|
||||
public void setActiveDirectory(boolean activeDirectory) {
|
||||
this.activeDirectory = activeDirectory;
|
||||
}
|
||||
|
||||
public String getDefaultDomain() {
|
||||
return defaultDomain;
|
||||
}
|
||||
|
||||
public void setDefaultDomain(String defaultDomain) {
|
||||
this.defaultDomain = defaultDomain;
|
||||
}
|
||||
|
||||
public Connection getConnection() {
|
||||
return connection;
|
||||
}
|
||||
@@ -399,6 +426,15 @@ public abstract class LdapConfig {
|
||||
throw new ConfigurationException("ldap.identity.token");
|
||||
}
|
||||
|
||||
if(isActiveDirectory()) {
|
||||
if(!StringUtils.equals(LdapBackend.UID, uidType)) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Attribute UID type should be set to %s in Active Directory mode",
|
||||
LdapBackend.UID
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Build queries
|
||||
attribute.getThreepid().forEach((k, v) -> {
|
||||
if (StringUtils.isBlank(identity.getMedium().get(k))) {
|
||||
|
@@ -27,6 +27,7 @@ public class MemoryStoreConfig {
|
||||
|
||||
private boolean enabled;
|
||||
private List<MemoryIdentityConfig> identities = new ArrayList<>();
|
||||
private boolean hashEnabled = false;
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
@@ -44,6 +45,14 @@ public class MemoryStoreConfig {
|
||||
this.identities = identities;
|
||||
}
|
||||
|
||||
public boolean isHashEnabled() {
|
||||
return hashEnabled;
|
||||
}
|
||||
|
||||
public void setHashEnabled(boolean hashEnabled) {
|
||||
this.hashEnabled = hashEnabled;
|
||||
}
|
||||
|
||||
public void build() {
|
||||
// no-op
|
||||
}
|
||||
|
@@ -45,4 +45,21 @@ public class MemoryThreePid implements _ThreePid {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
MemoryThreePid threePid = (MemoryThreePid) o;
|
||||
|
||||
if (!medium.equals(threePid.medium)) return false;
|
||||
return address.equals(threePid.address);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = medium.hashCode();
|
||||
result = 31 * result + address.hashCode();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@@ -125,7 +125,7 @@ public abstract class SqlConfig {
|
||||
}
|
||||
|
||||
public static class Lookup {
|
||||
private String query;
|
||||
private String query = "SELECT user_id AS mxid, medium, address from user_threepid_id_server";
|
||||
|
||||
public String getQuery() {
|
||||
return query;
|
||||
@@ -140,7 +140,7 @@ public abstract class SqlConfig {
|
||||
|
||||
private Boolean enabled;
|
||||
private String type = "mxid";
|
||||
private String query = "SELECT user_id AS uid FROM user_threepids WHERE medium = ? AND address = ?";
|
||||
private String query = "SELECT user_id AS uid FROM user_threepid_id_server WHERE medium = ? AND address = ?";
|
||||
private Map<String, String> medium = new HashMap<>();
|
||||
|
||||
public Boolean isEnabled() {
|
||||
|
@@ -24,9 +24,23 @@ import io.kamax.mxisd.UserIdType;
|
||||
import io.kamax.mxisd.backend.sql.synapse.SynapseQueries;
|
||||
import io.kamax.mxisd.config.sql.SqlConfig;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class SynapseSqlProviderConfig extends SqlConfig {
|
||||
|
||||
private transient final Logger log = LoggerFactory.getLogger(SynapseSqlProviderConfig.class);
|
||||
|
||||
private boolean legacyRoomNames = false;
|
||||
|
||||
public boolean isLegacyRoomNames() {
|
||||
return legacyRoomNames;
|
||||
}
|
||||
|
||||
public void setLegacyRoomNames(boolean legacyRoomNames) {
|
||||
this.legacyRoomNames = legacyRoomNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getProviderName() {
|
||||
return "Synapse SQL";
|
||||
@@ -42,7 +56,7 @@ public class SynapseSqlProviderConfig extends SqlConfig {
|
||||
|
||||
if (getIdentity().isEnabled() && StringUtils.isBlank(getIdentity().getType())) {
|
||||
getIdentity().setType("mxid");
|
||||
getIdentity().setQuery("SELECT user_id AS uid FROM user_threepids WHERE medium = ? AND address = ?");
|
||||
getIdentity().setQuery("SELECT user_id AS uid FROM user_threepid_id_server WHERE medium = ? AND address = ?");
|
||||
}
|
||||
|
||||
if (getProfile().isEnabled()) {
|
||||
@@ -65,4 +79,12 @@ public class SynapseSqlProviderConfig extends SqlConfig {
|
||||
printConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void printConfig() {
|
||||
super.printConfig();
|
||||
|
||||
if (isEnabled()) {
|
||||
log.info("Use legacy room name query: {}", isLegacyRoomNames());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -23,5 +23,6 @@ package io.kamax.mxisd.exception;
|
||||
public class InvalidParamException extends RuntimeException {
|
||||
|
||||
public InvalidParamException() {
|
||||
super("The chosen hash algorithm is invalid or disallowed");
|
||||
}
|
||||
}
|
||||
|
@@ -23,5 +23,6 @@ package io.kamax.mxisd.exception;
|
||||
public class InvalidPepperException extends RuntimeException {
|
||||
|
||||
public InvalidPepperException() {
|
||||
super("The provided pepper is invalid or expired");
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,28 @@
|
||||
/*
|
||||
* ma1sd - Matrix Identity Server Daemon
|
||||
* Copyright (C) 2020 Anatoliy SAblin
|
||||
*
|
||||
* https://www.github.com/ma1uta/ma1sd/
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package io.kamax.mxisd.exception;
|
||||
|
||||
public class TermsNotSignedException extends RuntimeException {
|
||||
|
||||
public TermsNotSignedException() {
|
||||
super("Please accept our updated terms of service before continuing");
|
||||
}
|
||||
}
|
@@ -1,6 +1,9 @@
|
||||
package io.kamax.mxisd.hash;
|
||||
|
||||
import io.kamax.mxisd.config.HashingConfig;
|
||||
import io.kamax.mxisd.hash.engine.Engine;
|
||||
import io.kamax.mxisd.hash.engine.HashEngine;
|
||||
import io.kamax.mxisd.hash.engine.NoneEngine;
|
||||
import io.kamax.mxisd.hash.rotation.HashRotationStrategy;
|
||||
import io.kamax.mxisd.hash.rotation.NoOpRotationStrategy;
|
||||
import io.kamax.mxisd.hash.rotation.RotationPerRequests;
|
||||
@@ -21,7 +24,7 @@ public class HashManager {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(HashManager.class);
|
||||
|
||||
private HashEngine hashEngine;
|
||||
private Engine engine;
|
||||
private HashRotationStrategy rotationStrategy;
|
||||
private HashStorage hashStorage;
|
||||
private HashingConfig config;
|
||||
@@ -32,7 +35,7 @@ public class HashManager {
|
||||
this.config = config;
|
||||
this.storage = storage;
|
||||
initStorage();
|
||||
hashEngine = new HashEngine(providers, getHashStorage(), config);
|
||||
engine = config.isEnabled() ? new HashEngine(providers, getHashStorage(), config) : new NoneEngine();
|
||||
initRotationStrategy();
|
||||
configured.set(true);
|
||||
}
|
||||
@@ -40,10 +43,10 @@ public class HashManager {
|
||||
private void initStorage() {
|
||||
if (config.isEnabled()) {
|
||||
switch (config.getHashStorageType()) {
|
||||
case IN_MEMORY:
|
||||
case in_memory:
|
||||
this.hashStorage = new InMemoryHashStorage();
|
||||
break;
|
||||
case SQL:
|
||||
case sql:
|
||||
this.hashStorage = new SqlHashStorage(storage);
|
||||
break;
|
||||
default:
|
||||
@@ -57,11 +60,11 @@ public class HashManager {
|
||||
private void initRotationStrategy() {
|
||||
if (config.isEnabled()) {
|
||||
switch (config.getRotationPolicy()) {
|
||||
case PER_REQUESTS:
|
||||
this.rotationStrategy = new RotationPerRequests();
|
||||
case per_requests:
|
||||
this.rotationStrategy = new RotationPerRequests(config.getRequests());
|
||||
break;
|
||||
case PER_SECONDS:
|
||||
this.rotationStrategy = new TimeBasedRotation(config.getDelay());
|
||||
case per_seconds:
|
||||
this.rotationStrategy = new TimeBasedRotation(config.getDelayInSeconds());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown rotation type: " + config.getHashStorageType());
|
||||
@@ -73,8 +76,8 @@ public class HashManager {
|
||||
this.rotationStrategy.register(getHashEngine());
|
||||
}
|
||||
|
||||
public HashEngine getHashEngine() {
|
||||
return hashEngine;
|
||||
public Engine getHashEngine() {
|
||||
return engine;
|
||||
}
|
||||
|
||||
public HashRotationStrategy getRotationStrategy() {
|
||||
|
7
src/main/java/io/kamax/mxisd/hash/engine/Engine.java
Normal file
7
src/main/java/io/kamax/mxisd/hash/engine/Engine.java
Normal file
@@ -0,0 +1,7 @@
|
||||
package io.kamax.mxisd.hash.engine;
|
||||
|
||||
public interface Engine {
|
||||
void updateHashes();
|
||||
|
||||
String getPepper();
|
||||
}
|
@@ -1,20 +1,25 @@
|
||||
package io.kamax.mxisd.hash;
|
||||
package io.kamax.mxisd.hash.engine;
|
||||
|
||||
import io.kamax.matrix.codec.MxSha256;
|
||||
import io.kamax.mxisd.config.HashingConfig;
|
||||
import io.kamax.mxisd.hash.storage.HashStorage;
|
||||
import io.kamax.mxisd.lookup.ThreePidMapping;
|
||||
import io.kamax.mxisd.lookup.provider.IThreePidProvider;
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
|
||||
public class HashEngine {
|
||||
public class HashEngine implements Engine {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(HashEngine.class);
|
||||
|
||||
private final List<? extends IThreePidProvider> providers;
|
||||
private final HashStorage hashStorage;
|
||||
private final MxSha256 sha256 = new MxSha256();
|
||||
private final HashingConfig config;
|
||||
private final Base64.Encoder base64 = Base64.getUrlEncoder().withoutPadding();
|
||||
private String pepper;
|
||||
|
||||
public HashEngine(List<? extends IThreePidProvider> providers, HashStorage hashStorage, HashingConfig config) {
|
||||
@@ -23,18 +28,28 @@ public class HashEngine {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateHashes() {
|
||||
LOGGER.info("Start update hashes.");
|
||||
synchronized (hashStorage) {
|
||||
this.pepper = newPepper();
|
||||
hashStorage.clear();
|
||||
for (IThreePidProvider provider : providers) {
|
||||
for (ThreePidMapping pidMapping : provider.populateHashes()) {
|
||||
hashStorage.add(pidMapping, hash(pidMapping));
|
||||
try {
|
||||
LOGGER.info("Populate hashes from the handler: {}", provider.getClass().getCanonicalName());
|
||||
for (ThreePidMapping pidMapping : provider.populateHashes()) {
|
||||
LOGGER.debug("Found 3PID: {}", pidMapping);
|
||||
hashStorage.add(pidMapping, hash(pidMapping));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Unable to update hashes of the provider: " + provider.toString(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
LOGGER.info("Finish update hashes.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPepper() {
|
||||
synchronized (hashStorage) {
|
||||
return pepper;
|
||||
@@ -42,10 +57,10 @@ public class HashEngine {
|
||||
}
|
||||
|
||||
protected String hash(ThreePidMapping pidMapping) {
|
||||
return sha256.hash(pidMapping.getMedium() + " " + pidMapping.getValue() + " " + getPepper());
|
||||
return base64.encodeToString(DigestUtils.sha256(pidMapping.getValue() + " " + pidMapping.getMedium() + " " + getPepper()));
|
||||
}
|
||||
|
||||
protected String newPepper() {
|
||||
return RandomStringUtils.random(config.getPepperLength());
|
||||
return RandomStringUtils.random(config.getPepperLength(), true, true);
|
||||
}
|
||||
}
|
19
src/main/java/io/kamax/mxisd/hash/engine/NoneEngine.java
Normal file
19
src/main/java/io/kamax/mxisd/hash/engine/NoneEngine.java
Normal file
@@ -0,0 +1,19 @@
|
||||
package io.kamax.mxisd.hash.engine;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class NoneEngine implements Engine {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(NoneEngine.class);
|
||||
|
||||
@Override
|
||||
public void updateHashes() {
|
||||
LOGGER.info("Nothing to update.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPepper() {
|
||||
return "";
|
||||
}
|
||||
}
|
@@ -1,12 +1,12 @@
|
||||
package io.kamax.mxisd.hash.rotation;
|
||||
|
||||
import io.kamax.mxisd.hash.HashEngine;
|
||||
import io.kamax.mxisd.hash.engine.Engine;
|
||||
|
||||
public interface HashRotationStrategy {
|
||||
|
||||
void register(HashEngine hashEngine);
|
||||
void register(Engine engine);
|
||||
|
||||
HashEngine getHashEngine();
|
||||
Engine getHashEngine();
|
||||
|
||||
void newRequest();
|
||||
|
||||
|
@@ -1,19 +1,19 @@
|
||||
package io.kamax.mxisd.hash.rotation;
|
||||
|
||||
import io.kamax.mxisd.hash.HashEngine;
|
||||
import io.kamax.mxisd.hash.engine.Engine;
|
||||
|
||||
public class NoOpRotationStrategy implements HashRotationStrategy {
|
||||
|
||||
private HashEngine hashEngine;
|
||||
private Engine engine;
|
||||
|
||||
@Override
|
||||
public void register(HashEngine hashEngine) {
|
||||
this.hashEngine = hashEngine;
|
||||
public void register(Engine engine) {
|
||||
this.engine = engine;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashEngine getHashEngine() {
|
||||
return hashEngine;
|
||||
public Engine getHashEngine() {
|
||||
return engine;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -1,28 +1,34 @@
|
||||
package io.kamax.mxisd.hash.rotation;
|
||||
|
||||
import io.kamax.mxisd.hash.HashEngine;
|
||||
import io.kamax.mxisd.hash.engine.Engine;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class RotationPerRequests implements HashRotationStrategy {
|
||||
|
||||
private HashEngine hashEngine;
|
||||
private Engine engine;
|
||||
private final AtomicInteger counter = new AtomicInteger(0);
|
||||
private final int barrier;
|
||||
|
||||
@Override
|
||||
public void register(HashEngine hashEngine) {
|
||||
this.hashEngine = hashEngine;
|
||||
public RotationPerRequests(int barrier) {
|
||||
this.barrier = barrier;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashEngine getHashEngine() {
|
||||
return hashEngine;
|
||||
public void register(Engine engine) {
|
||||
this.engine = engine;
|
||||
trigger();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine getHashEngine() {
|
||||
return engine;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void newRequest() {
|
||||
int newValue = counter.incrementAndGet();
|
||||
if (newValue >= 10) {
|
||||
if (newValue >= barrier) {
|
||||
counter.set(0);
|
||||
trigger();
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
package io.kamax.mxisd.hash.rotation;
|
||||
|
||||
import io.kamax.mxisd.hash.HashEngine;
|
||||
import io.kamax.mxisd.hash.engine.Engine;
|
||||
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
@@ -9,7 +9,7 @@ import java.util.concurrent.TimeUnit;
|
||||
public class TimeBasedRotation implements HashRotationStrategy {
|
||||
|
||||
private final long delay;
|
||||
private HashEngine hashEngine;
|
||||
private Engine engine;
|
||||
private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
|
||||
|
||||
public TimeBasedRotation(long delay) {
|
||||
@@ -17,15 +17,15 @@ public class TimeBasedRotation implements HashRotationStrategy {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void register(HashEngine hashEngine) {
|
||||
this.hashEngine = hashEngine;
|
||||
public void register(Engine engine) {
|
||||
this.engine = engine;
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(executorService::shutdown));
|
||||
executorService.scheduleWithFixedDelay(this::trigger, 0, delay, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashEngine getHashEngine() {
|
||||
return hashEngine;
|
||||
public Engine getHashEngine() {
|
||||
return engine;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -12,6 +12,7 @@ public class SqlHashStorage implements HashStorage {
|
||||
|
||||
public SqlHashStorage(IStorage storage) {
|
||||
this.storage = storage;
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(storage::clearHashes));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -20,11 +20,19 @@
|
||||
|
||||
package io.kamax.mxisd.http;
|
||||
|
||||
import static io.kamax.mxisd.util.RestClientUtils.urlEncode;
|
||||
|
||||
public class IsAPIv1 {
|
||||
|
||||
public static final String Base = "/_matrix/identity/api/v1";
|
||||
|
||||
public static String getValidate(String medium, String sid, String secret, String token) {
|
||||
return String.format("%s/validate/%s/submitToken?sid=%s&client_secret=%s&token=%s", Base, medium, sid, secret, token);
|
||||
return String.format("%s/validate/%s/submitToken?sid=%s&client_secret=%s&token=%s",
|
||||
Base,
|
||||
medium,
|
||||
urlEncode(sid),
|
||||
urlEncode(secret),
|
||||
urlEncode(token)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,5 @@
|
||||
package io.kamax.mxisd.http.undertow.conduit;
|
||||
|
||||
public interface ConduitWithDump {
|
||||
String dump();
|
||||
}
|
@@ -0,0 +1,107 @@
|
||||
/*
|
||||
* JBoss, Home of Professional Open Source.
|
||||
* Copyright 2014 Red Hat, Inc., and individual contributors
|
||||
* as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.kamax.mxisd.http.undertow.conduit;
|
||||
|
||||
import org.xnio.IoUtils;
|
||||
import org.xnio.channels.StreamSourceChannel;
|
||||
import org.xnio.conduits.AbstractStreamSinkConduit;
|
||||
import org.xnio.conduits.ConduitWritableByteChannel;
|
||||
import org.xnio.conduits.Conduits;
|
||||
import org.xnio.conduits.StreamSinkConduit;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
|
||||
/**
|
||||
* Conduit that saves all the data that is written through it and can dump it to the console
|
||||
* <p>
|
||||
* Obviously this should not be used in production.
|
||||
*
|
||||
* @author Stuart Douglas
|
||||
*/
|
||||
public class DebuggingStreamSinkConduit extends AbstractStreamSinkConduit<StreamSinkConduit> implements ConduitWithDump {
|
||||
|
||||
private final List<byte[]> data = new CopyOnWriteArrayList<>();
|
||||
|
||||
/**
|
||||
* Construct a new instance.
|
||||
*
|
||||
* @param next the delegate conduit to set
|
||||
*/
|
||||
public DebuggingStreamSinkConduit(StreamSinkConduit next) {
|
||||
super(next);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int write(ByteBuffer src) throws IOException {
|
||||
int pos = src.position();
|
||||
int res = super.write(src);
|
||||
if (res > 0) {
|
||||
byte[] d = new byte[res];
|
||||
for (int i = 0; i < res; ++i) {
|
||||
d[i] = src.get(i + pos);
|
||||
}
|
||||
data.add(d);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long write(ByteBuffer[] dsts, int offs, int len) throws IOException {
|
||||
for (int i = offs; i < len; ++i) {
|
||||
if (dsts[i].hasRemaining()) {
|
||||
return write(dsts[i]);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long transferFrom(final FileChannel src, final long position, final long count) throws IOException {
|
||||
return src.transferTo(position, count, new ConduitWritableByteChannel(this));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long transferFrom(final StreamSourceChannel source, final long count, final ByteBuffer throughBuffer) throws IOException {
|
||||
return IoUtils.transfer(source, count, throughBuffer, new ConduitWritableByteChannel(this));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int writeFinal(ByteBuffer src) throws IOException {
|
||||
return Conduits.writeFinalBasic(this, src);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long writeFinal(ByteBuffer[] srcs, int offset, int length) throws IOException {
|
||||
return Conduits.writeFinalBasic(this, srcs, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String dump() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (byte[] datum : data) {
|
||||
sb.append(new String(datum, StandardCharsets.UTF_8));
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* JBoss, Home of Professional Open Source.
|
||||
* Copyright 2014 Red Hat, Inc., and individual contributors
|
||||
* as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.kamax.mxisd.http.undertow.conduit;
|
||||
|
||||
import org.xnio.IoUtils;
|
||||
import org.xnio.channels.StreamSinkChannel;
|
||||
import org.xnio.conduits.AbstractStreamSourceConduit;
|
||||
import org.xnio.conduits.ConduitReadableByteChannel;
|
||||
import org.xnio.conduits.StreamSourceConduit;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
|
||||
/**
|
||||
* Conduit that saves all the data that is written through it and can dump it to the console
|
||||
* <p>
|
||||
* Obviously this should not be used in production.
|
||||
*
|
||||
* @author Stuart Douglas
|
||||
*/
|
||||
public class DebuggingStreamSourceConduit extends AbstractStreamSourceConduit<StreamSourceConduit> implements ConduitWithDump {
|
||||
|
||||
private final List<byte[]> data = new CopyOnWriteArrayList<>();
|
||||
|
||||
/**
|
||||
* Construct a new instance.
|
||||
*
|
||||
* @param next the delegate conduit to set
|
||||
*/
|
||||
public DebuggingStreamSourceConduit(StreamSourceConduit next) {
|
||||
super(next);
|
||||
}
|
||||
|
||||
public long transferTo(final long position, final long count, final FileChannel target) throws IOException {
|
||||
return target.transferFrom(new ConduitReadableByteChannel(this), position, count);
|
||||
}
|
||||
|
||||
public long transferTo(final long count, final ByteBuffer throughBuffer, final StreamSinkChannel target) throws IOException {
|
||||
return IoUtils.transfer(new ConduitReadableByteChannel(this), count, throughBuffer, target);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(ByteBuffer dst) throws IOException {
|
||||
int pos = dst.position();
|
||||
int res = super.read(dst);
|
||||
if (res > 0) {
|
||||
byte[] d = new byte[res];
|
||||
for (int i = 0; i < res; ++i) {
|
||||
d[i] = dst.get(i + pos);
|
||||
}
|
||||
data.add(d);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long read(ByteBuffer[] dsts, int offs, int len) throws IOException {
|
||||
for (int i = offs; i < len; ++i) {
|
||||
if (dsts[i].hasRemaining()) {
|
||||
return read(dsts[i]);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String dump() {
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (byte[] datum : data) {
|
||||
sb.append(new String(datum, StandardCharsets.UTF_8));
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
package io.kamax.mxisd.http.undertow.conduit;
|
||||
|
||||
import io.undertow.server.ConduitWrapper;
|
||||
import io.undertow.server.HttpServerExchange;
|
||||
import io.undertow.util.ConduitFactory;
|
||||
import org.xnio.conduits.Conduit;
|
||||
|
||||
public abstract class LazyConduitWrapper<T extends Conduit> implements ConduitWrapper<T> {
|
||||
|
||||
private T conduit = null;
|
||||
|
||||
protected abstract T create(ConduitFactory<T> factory, HttpServerExchange exchange);
|
||||
|
||||
@Override
|
||||
public T wrap(ConduitFactory<T> factory, HttpServerExchange exchange) {
|
||||
conduit = create(factory, exchange);
|
||||
return conduit;
|
||||
}
|
||||
|
||||
public T get() {
|
||||
return conduit;
|
||||
}
|
||||
}
|
@@ -58,7 +58,8 @@ public class AuthorizationHandler extends BasicHttpHandler {
|
||||
log.error("Account not found from request from: {}", exchange.getHostAndPort());
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
if (account.getExpiresIn() < System.currentTimeMillis()) {
|
||||
long expiredAt = (account.getCreatedAt() + account.getExpiresIn()) * 1000; // expired in milliseconds
|
||||
if (expiredAt < System.currentTimeMillis()) {
|
||||
log.error("Account for '{}' from: {}", account.getUserId(), exchange.getHostAndPort());
|
||||
accountManager.deleteAccount(token);
|
||||
throw new InvalidCredentialsException();
|
||||
|
@@ -33,6 +33,7 @@ import io.kamax.mxisd.util.RestClientUtils;
|
||||
import io.undertow.server.HttpHandler;
|
||||
import io.undertow.server.HttpServerExchange;
|
||||
import io.undertow.server.handlers.form.FormData;
|
||||
import io.undertow.util.Headers;
|
||||
import io.undertow.util.HttpString;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -189,7 +190,7 @@ public abstract class BasicHttpHandler implements HttpHandler {
|
||||
}
|
||||
|
||||
protected void respond(HttpServerExchange ex, int status, String errCode, String error) {
|
||||
respond(ex, status, buildErrorBody(ex, errCode, error));
|
||||
respond(ex, status, buildErrorBody(ex, errCode, error != null ? error : "An error has occurred"));
|
||||
}
|
||||
|
||||
protected void handleException(HttpServerExchange exchange, HttpMatrixException ex) {
|
||||
@@ -203,26 +204,34 @@ public abstract class BasicHttpHandler implements HttpHandler {
|
||||
}
|
||||
|
||||
protected void proxyPost(HttpServerExchange exchange, JsonObject body, CloseableHttpClient client, ClientDnsOverwrite dns) {
|
||||
proxyPost(exchange, body, client, dns, false);
|
||||
}
|
||||
|
||||
protected void proxyPost(HttpServerExchange exchange, JsonObject body, CloseableHttpClient client, ClientDnsOverwrite dns,
|
||||
boolean defaultJsonResponse) {
|
||||
String target = dns.transform(URI.create(exchange.getRequestURL())).toString();
|
||||
log.info("Requesting remote: {}", target);
|
||||
HttpPost req = RestClientUtils.post(target, GsonUtil.get(), body);
|
||||
|
||||
exchange.getRequestHeaders().forEach(header -> {
|
||||
header.forEach(v -> {
|
||||
String name = header.getHeaderName().toString();
|
||||
if (!StringUtils.startsWithIgnoreCase(name, "content-")) {
|
||||
req.addHeader(name, v);
|
||||
}
|
||||
});
|
||||
});
|
||||
exchange.getRequestHeaders().forEach(header -> header.forEach(v -> {
|
||||
String name = header.getHeaderName().toString();
|
||||
if (!StringUtils.startsWithIgnoreCase(name, "content-")) {
|
||||
req.addHeader(name, v);
|
||||
}
|
||||
}));
|
||||
|
||||
boolean missingJsonResponse = true;
|
||||
try (CloseableHttpResponse res = client.execute(req)) {
|
||||
exchange.setStatusCode(res.getStatusLine().getStatusCode());
|
||||
for (Header h : res.getAllHeaders()) {
|
||||
for (HeaderElement el : h.getElements()) {
|
||||
missingJsonResponse = !Headers.CONTENT_TYPE_STRING.equalsIgnoreCase(h.getName());
|
||||
exchange.getResponseHeaders().add(HttpString.tryFromString(h.getName()), el.getValue());
|
||||
}
|
||||
}
|
||||
if (defaultJsonResponse && missingJsonResponse) {
|
||||
exchange.getRequestHeaders().add(Headers.CONTENT_TYPE, "application/json");
|
||||
}
|
||||
res.getEntity().writeTo(exchange.getOutputStream());
|
||||
exchange.endExchange();
|
||||
} catch (IOException e) {
|
||||
|
@@ -23,7 +23,7 @@ package io.kamax.mxisd.http.undertow.handler;
|
||||
import io.kamax.mxisd.auth.AccountManager;
|
||||
import io.kamax.mxisd.config.PolicyConfig;
|
||||
import io.kamax.mxisd.exception.InvalidCredentialsException;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.AccountDao;
|
||||
import io.kamax.mxisd.exception.TermsNotSignedException;
|
||||
import io.undertow.server.HttpHandler;
|
||||
import io.undertow.server.HttpServerExchange;
|
||||
import org.slf4j.Logger;
|
||||
@@ -54,6 +54,11 @@ public class CheckTermsHandler extends BasicHttpHandler {
|
||||
|
||||
@Override
|
||||
public void handleRequest(HttpServerExchange exchange) throws Exception {
|
||||
if (policies == null || policies.isEmpty()) {
|
||||
child.handleRequest(exchange);
|
||||
return;
|
||||
}
|
||||
|
||||
String token = findAccessToken(exchange).orElse(null);
|
||||
if (token == null) {
|
||||
log.error("Unauthorized request from: {}", exchange.getHostAndPort());
|
||||
@@ -62,7 +67,7 @@ public class CheckTermsHandler extends BasicHttpHandler {
|
||||
|
||||
if (!accountManager.isTermAccepted(token, policies)) {
|
||||
log.error("Non accepting request from: {}", exchange.getHostAndPort());
|
||||
throw new InvalidCredentialsException();
|
||||
throw new TermsNotSignedException();
|
||||
}
|
||||
log.trace("Access granted");
|
||||
child.handleRequest(exchange);
|
||||
|
@@ -0,0 +1,186 @@
|
||||
/*
|
||||
* JBoss, Home of Professional Open Source.
|
||||
* Copyright 2014 Red Hat, Inc., and individual contributors
|
||||
* as indicated by the @author tags.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.kamax.mxisd.http.undertow.handler;
|
||||
|
||||
import io.kamax.mxisd.http.undertow.conduit.ConduitWithDump;
|
||||
import io.kamax.mxisd.http.undertow.conduit.DebuggingStreamSinkConduit;
|
||||
import io.kamax.mxisd.http.undertow.conduit.DebuggingStreamSourceConduit;
|
||||
import io.kamax.mxisd.http.undertow.conduit.LazyConduitWrapper;
|
||||
import io.undertow.security.api.SecurityContext;
|
||||
import io.undertow.server.HttpHandler;
|
||||
import io.undertow.server.HttpServerExchange;
|
||||
import io.undertow.server.handlers.Cookie;
|
||||
import io.undertow.util.ConduitFactory;
|
||||
import io.undertow.util.HeaderValues;
|
||||
import io.undertow.util.Headers;
|
||||
import io.undertow.util.LocaleUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xnio.conduits.StreamSinkConduit;
|
||||
import org.xnio.conduits.StreamSourceConduit;
|
||||
|
||||
import java.util.Deque;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Handler that dumps a exchange to a log.
|
||||
*
|
||||
* @author Stuart Douglas
|
||||
*/
|
||||
public class RequestDumpingHandler implements HttpHandler {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(RequestDumpingHandler.class);
|
||||
|
||||
private final HttpHandler next;
|
||||
|
||||
public RequestDumpingHandler(HttpHandler next) {
|
||||
this.next = next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(HttpServerExchange exchange) throws Exception {
|
||||
LazyConduitWrapper<StreamSourceConduit> requestConduitWrapper = new LazyConduitWrapper<StreamSourceConduit>() {
|
||||
@Override
|
||||
protected StreamSourceConduit create(ConduitFactory<StreamSourceConduit> factory, HttpServerExchange exchange) {
|
||||
return new DebuggingStreamSourceConduit(factory.create());
|
||||
}
|
||||
};
|
||||
LazyConduitWrapper<StreamSinkConduit> responseConduitWrapper = new LazyConduitWrapper<StreamSinkConduit>() {
|
||||
@Override
|
||||
protected StreamSinkConduit create(ConduitFactory<StreamSinkConduit> factory, HttpServerExchange exchange) {
|
||||
return new DebuggingStreamSinkConduit(factory.create());
|
||||
}
|
||||
};
|
||||
exchange.addRequestWrapper(requestConduitWrapper);
|
||||
exchange.addResponseWrapper(responseConduitWrapper);
|
||||
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
// Log pre-service information
|
||||
final SecurityContext sc = exchange.getSecurityContext();
|
||||
sb.append("\n----------------------------REQUEST---------------------------\n");
|
||||
sb.append(" URI=").append(exchange.getRequestURI()).append("\n");
|
||||
sb.append(" characterEncoding=").append(exchange.getRequestHeaders().get(Headers.CONTENT_ENCODING)).append("\n");
|
||||
sb.append(" contentLength=").append(exchange.getRequestContentLength()).append("\n");
|
||||
sb.append(" contentType=").append(exchange.getRequestHeaders().get(Headers.CONTENT_TYPE)).append("\n");
|
||||
//sb.append(" contextPath=" + exchange.getContextPath());
|
||||
if (sc != null) {
|
||||
if (sc.isAuthenticated()) {
|
||||
sb.append(" authType=").append(sc.getMechanismName()).append("\n");
|
||||
sb.append(" principle=").append(sc.getAuthenticatedAccount().getPrincipal()).append("\n");
|
||||
} else {
|
||||
sb.append(" authType=none\n");
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Cookie> cookies = exchange.getRequestCookies();
|
||||
if (cookies != null) {
|
||||
for (Map.Entry<String, Cookie> entry : cookies.entrySet()) {
|
||||
Cookie cookie = entry.getValue();
|
||||
sb.append(" cookie=").append(cookie.getName()).append("=").append(cookie.getValue()).append("\n");
|
||||
}
|
||||
}
|
||||
for (HeaderValues header : exchange.getRequestHeaders()) {
|
||||
for (String value : header) {
|
||||
sb.append(" header=").append(header.getHeaderName()).append("=").append(value).append("\n");
|
||||
}
|
||||
}
|
||||
sb.append(" locale=").append(LocaleUtils.getLocalesFromHeader(exchange.getRequestHeaders().get(Headers.ACCEPT_LANGUAGE)))
|
||||
.append("\n");
|
||||
sb.append(" method=").append(exchange.getRequestMethod()).append("\n");
|
||||
Map<String, Deque<String>> pnames = exchange.getQueryParameters();
|
||||
for (Map.Entry<String, Deque<String>> entry : pnames.entrySet()) {
|
||||
String pname = entry.getKey();
|
||||
Iterator<String> pvalues = entry.getValue().iterator();
|
||||
sb.append(" parameter=");
|
||||
sb.append(pname);
|
||||
sb.append('=');
|
||||
while (pvalues.hasNext()) {
|
||||
sb.append(pvalues.next());
|
||||
if (pvalues.hasNext()) {
|
||||
sb.append(", ");
|
||||
}
|
||||
}
|
||||
sb.append("\n");
|
||||
}
|
||||
//sb.append(" pathInfo=" + exchange.getPathInfo());
|
||||
sb.append(" protocol=").append(exchange.getProtocol()).append("\n");
|
||||
sb.append(" queryString=").append(exchange.getQueryString()).append("\n");
|
||||
sb.append(" remoteAddr=").append(exchange.getSourceAddress()).append("\n");
|
||||
sb.append(" remoteHost=").append(exchange.getSourceAddress().getHostName()).append("\n");
|
||||
//sb.append("requestedSessionId=" + exchange.getRequestedSessionId());
|
||||
sb.append(" scheme=").append(exchange.getRequestScheme()).append("\n");
|
||||
sb.append(" host=").append(exchange.getRequestHeaders().getFirst(Headers.HOST)).append("\n");
|
||||
sb.append(" serverPort=").append(exchange.getDestinationAddress().getPort()).append("\n");
|
||||
//sb.append(" servletPath=" + exchange.getServletPath());
|
||||
sb.append(" isSecure=").append(exchange.isSecure()).append("\n");
|
||||
|
||||
exchange.addExchangeCompleteListener((exchange1, nextListener) -> {
|
||||
StreamSourceConduit sourceConduit = requestConduitWrapper.get();
|
||||
if (sourceConduit instanceof ConduitWithDump) {
|
||||
ConduitWithDump conduitWithDump = (ConduitWithDump) sourceConduit;
|
||||
sb.append("body=\n");
|
||||
sb.append(conduitWithDump.dump()).append("\n");
|
||||
}
|
||||
|
||||
// Log post-service information
|
||||
sb.append("--------------------------RESPONSE--------------------------\n");
|
||||
if (sc != null) {
|
||||
if (sc.isAuthenticated()) {
|
||||
sb.append(" authType=").append(sc.getMechanismName()).append("\n");
|
||||
sb.append(" principle=").append(sc.getAuthenticatedAccount().getPrincipal()).append("\n");
|
||||
} else {
|
||||
sb.append(" authType=none\n");
|
||||
}
|
||||
}
|
||||
sb.append(" contentLength=").append(exchange1.getResponseContentLength()).append("\n");
|
||||
sb.append(" contentType=").append(exchange1.getResponseHeaders().getFirst(Headers.CONTENT_TYPE)).append("\n");
|
||||
Map<String, Cookie> cookies1 = exchange1.getResponseCookies();
|
||||
if (cookies1 != null) {
|
||||
for (Cookie cookie : cookies1.values()) {
|
||||
sb.append(" cookie=").append(cookie.getName()).append("=").append(cookie.getValue()).append("; domain=")
|
||||
.append(cookie.getDomain()).append("; path=").append(cookie.getPath()).append("\n");
|
||||
}
|
||||
}
|
||||
for (HeaderValues header : exchange1.getResponseHeaders()) {
|
||||
for (String value : header) {
|
||||
sb.append(" header=").append(header.getHeaderName()).append("=").append(value).append("\n");
|
||||
}
|
||||
}
|
||||
sb.append(" status=").append(exchange1.getStatusCode()).append("\n");
|
||||
StreamSinkConduit streamSinkConduit = responseConduitWrapper.get();
|
||||
if (streamSinkConduit instanceof ConduitWithDump) {
|
||||
ConduitWithDump conduitWithDump = (ConduitWithDump) streamSinkConduit;
|
||||
sb.append("body=\n");
|
||||
sb.append(conduitWithDump.dump());
|
||||
|
||||
}
|
||||
|
||||
sb.append("\n==============================================================");
|
||||
|
||||
|
||||
nextListener.proceed();
|
||||
LOGGER.info(sb.toString());
|
||||
});
|
||||
|
||||
|
||||
// Perform the exchange
|
||||
next.handleRequest(exchange);
|
||||
}
|
||||
}
|
@@ -82,7 +82,10 @@ public class SaneHandler extends BasicHttpHandler {
|
||||
} catch (InvalidJsonException e) {
|
||||
respond(exchange, HttpStatus.SC_BAD_REQUEST, e.getErrorCode(), e.getError());
|
||||
} catch (InvalidCredentialsException e) {
|
||||
log.error("Unauthorized: ", e);
|
||||
respond(exchange, HttpStatus.SC_UNAUTHORIZED, "M_UNAUTHORIZED", e.getMessage());
|
||||
} catch (TermsNotSignedException e) {
|
||||
respond(exchange, HttpStatus.SC_FORBIDDEN, "M_TERMS_NOT_SIGNED", e.getMessage());
|
||||
} catch (ObjectNotFoundException e) {
|
||||
respond(exchange, HttpStatus.SC_NOT_FOUND, "M_NOT_FOUND", e.getMessage());
|
||||
} catch (NotImplementedException e) {
|
||||
|
@@ -29,10 +29,7 @@ import java.util.Optional;
|
||||
public abstract class ApplicationServiceHandler extends BasicHttpHandler {
|
||||
|
||||
protected String getToken(HttpServerExchange ex) {
|
||||
return Optional.ofNullable(ex.getQueryParameters()
|
||||
.getOrDefault("access_token", new LinkedList<>())
|
||||
.peekFirst()
|
||||
).orElse("");
|
||||
return getAccessToken(ex);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -24,6 +24,6 @@ import io.kamax.mxisd.http.undertow.handler.BasicHttpHandler;
|
||||
|
||||
public abstract class LoginHandler extends BasicHttpHandler {
|
||||
|
||||
public static final String Path = "/_matrix/client/r0/login";
|
||||
public static final String Path = "/_matrix/client/v3/login";
|
||||
|
||||
}
|
||||
|
@@ -48,7 +48,7 @@ public class AccountRegisterHandler extends BasicHttpHandler {
|
||||
|
||||
if (LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info("Registration from domain: {}, expired at {}", openIdToken.getMatrixServerName(),
|
||||
new Date(openIdToken.getExpiredIn()));
|
||||
new Date(System.currentTimeMillis() + openIdToken.getExpiresIn()));
|
||||
}
|
||||
|
||||
String token = accountManager.register(openIdToken);
|
||||
|
@@ -31,7 +31,7 @@ import java.net.URI;
|
||||
|
||||
public class UserDirectorySearchHandler extends HomeserverProxyHandler {
|
||||
|
||||
public static final String Path = "/_matrix/client/r0/user_directory/search";
|
||||
public static final String Path = "/_matrix/client/v3/user_directory/search";
|
||||
|
||||
private DirectoryManager mgr;
|
||||
|
||||
|
@@ -31,6 +31,8 @@ public class HashDetailsHandler extends BasicHttpHandler {
|
||||
for (HashingConfig.Algorithm algorithm : config.getAlgorithms()) {
|
||||
algorithms.add(algorithm.name().toLowerCase());
|
||||
}
|
||||
} else {
|
||||
algorithms.add(HashingConfig.Algorithm.none.name().toLowerCase());
|
||||
}
|
||||
response.add("algorithms", algorithms);
|
||||
return response;
|
||||
|
@@ -31,6 +31,8 @@ import io.kamax.mxisd.http.undertow.handler.ApiHandler;
|
||||
import io.kamax.mxisd.http.undertow.handler.identity.share.LookupHandler;
|
||||
import io.kamax.mxisd.lookup.BulkLookupRequest;
|
||||
import io.kamax.mxisd.lookup.HashLookupRequest;
|
||||
import io.kamax.mxisd.lookup.SingleLookupReply;
|
||||
import io.kamax.mxisd.lookup.SingleLookupRequest;
|
||||
import io.kamax.mxisd.lookup.ThreePidMapping;
|
||||
import io.kamax.mxisd.lookup.strategy.LookupStrategy;
|
||||
import io.undertow.server.HttpServerExchange;
|
||||
@@ -40,6 +42,7 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class HashLookupHandler extends LookupHandler implements ApiHandler {
|
||||
|
||||
@@ -60,13 +63,10 @@ public class HashLookupHandler extends LookupHandler implements ApiHandler {
|
||||
ClientHashLookupRequest input = parseJsonTo(exchange, ClientHashLookupRequest.class);
|
||||
HashLookupRequest lookupRequest = new HashLookupRequest();
|
||||
setRequesterInfo(lookupRequest, exchange);
|
||||
lookupRequest.setHashes(input.getAddresses());
|
||||
log.info("Got bulk lookup request from {} with client {} - Is recursive? {}",
|
||||
lookupRequest.getRequester(), lookupRequest.getUserAgent(), lookupRequest.isRecursive());
|
||||
|
||||
if (!hashManager.getConfig().isEnabled()) {
|
||||
throw new InvalidParamException();
|
||||
}
|
||||
|
||||
if (!hashManager.getHashEngine().getPepper().equals(input.getPepper())) {
|
||||
throw new InvalidPepperException();
|
||||
}
|
||||
@@ -81,20 +81,33 @@ public class HashLookupHandler extends LookupHandler implements ApiHandler {
|
||||
default:
|
||||
throw new InvalidParamException();
|
||||
}
|
||||
hashManager.getRotationStrategy().newRequest();
|
||||
}
|
||||
|
||||
private void noneAlgorithm(HttpServerExchange exchange, HashLookupRequest request, ClientHashLookupRequest input) throws Exception {
|
||||
if (!hashManager.getConfig().getAlgorithms().contains(HashingConfig.Algorithm.NONE)) {
|
||||
if (hashManager.getConfig().isEnabled() && !hashManager.getConfig().getAlgorithms().contains(HashingConfig.Algorithm.none)) {
|
||||
throw new InvalidParamException();
|
||||
}
|
||||
|
||||
ClientHashLookupAnswer answer = null;
|
||||
if (input.getAddresses() != null && input.getAddresses().size() > 0) {
|
||||
if (input.getAddresses().size() == 1) {
|
||||
answer = noneSingleLookup(request, input);
|
||||
} else {
|
||||
answer = noneBulkLookup(request, input);
|
||||
}
|
||||
}
|
||||
respondJson(exchange, answer != null ? answer : new ClientHashLookupAnswer());
|
||||
}
|
||||
|
||||
private ClientHashLookupAnswer noneBulkLookup(HashLookupRequest request, ClientHashLookupRequest input) throws Exception {
|
||||
BulkLookupRequest bulkLookupRequest = new BulkLookupRequest();
|
||||
List<ThreePidMapping> mappings = new ArrayList<>();
|
||||
for (String address : input.getAddresses()) {
|
||||
String[] parts = address.split(" ");
|
||||
ThreePidMapping mapping = new ThreePidMapping();
|
||||
mapping.setMedium(parts[0]);
|
||||
mapping.setValue(parts[1]);
|
||||
mapping.setMedium(parts[1]);
|
||||
mapping.setValue(parts[0]);
|
||||
mappings.add(mapping);
|
||||
}
|
||||
bulkLookupRequest.setMappings(mappings);
|
||||
@@ -106,19 +119,42 @@ public class HashLookupHandler extends LookupHandler implements ApiHandler {
|
||||
}
|
||||
log.info("Finished bulk lookup request from {}", request.getRequester());
|
||||
|
||||
respondJson(exchange, answer);
|
||||
return answer;
|
||||
}
|
||||
|
||||
private ClientHashLookupAnswer noneSingleLookup(HashLookupRequest request, ClientHashLookupRequest input) {
|
||||
SingleLookupRequest singleLookupRequest = new SingleLookupRequest();
|
||||
String address = input.getAddresses().get(0);
|
||||
String[] parts = address.split(" ");
|
||||
singleLookupRequest.setThreePid(parts[0]);
|
||||
singleLookupRequest.setType(parts[1]);
|
||||
|
||||
ClientHashLookupAnswer answer = new ClientHashLookupAnswer();
|
||||
|
||||
Optional<SingleLookupReply> singleLookupReply = strategy.find(singleLookupRequest);
|
||||
if (singleLookupReply.isPresent()) {
|
||||
SingleLookupReply reply = singleLookupReply.get();
|
||||
answer.getMappings().put(address, reply.getMxid().toString());
|
||||
}
|
||||
log.info("Finished single lookup request from {}", request.getRequester());
|
||||
|
||||
return answer;
|
||||
}
|
||||
|
||||
private void sha256Algorithm(HttpServerExchange exchange, HashLookupRequest request, ClientHashLookupRequest input) {
|
||||
if (!hashManager.getConfig().getAlgorithms().contains(HashingConfig.Algorithm.SHA256)) {
|
||||
if (!hashManager.getConfig().getAlgorithms().contains(HashingConfig.Algorithm.sha256)) {
|
||||
throw new InvalidParamException();
|
||||
}
|
||||
|
||||
ClientHashLookupAnswer answer = new ClientHashLookupAnswer();
|
||||
for (Pair<String, ThreePidMapping> pair : hashManager.getHashStorage().find(request.getHashes())) {
|
||||
answer.getMappings().put(pair.getKey(), pair.getValue().getMxid());
|
||||
if (request.getHashes() != null && !request.getHashes().isEmpty()) {
|
||||
for (Pair<String, ThreePidMapping> pair : hashManager.getHashStorage().find(request.getHashes())) {
|
||||
answer.getMappings().put(pair.getKey(), pair.getValue().getMxid());
|
||||
}
|
||||
log.info("Finished bulk lookup request from {}", request.getRequester());
|
||||
} else {
|
||||
log.warn("Empty request");
|
||||
}
|
||||
log.info("Finished bulk lookup request from {}", request.getRequester());
|
||||
|
||||
respondJson(exchange, answer);
|
||||
}
|
||||
|
@@ -0,0 +1,30 @@
|
||||
package io.kamax.mxisd.http.undertow.handler.internal;
|
||||
|
||||
import com.google.gson.JsonObject;
|
||||
import io.kamax.mxisd.http.undertow.handler.BasicHttpHandler;
|
||||
import io.kamax.mxisd.invitation.InvitationManager;
|
||||
import io.undertow.server.HttpServerExchange;
|
||||
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
public class InternalInviteManagerHandler extends BasicHttpHandler {
|
||||
|
||||
public static final String PATH = "/_ma1sd/internal/admin/inv_manager";
|
||||
|
||||
private final InvitationManager invitationManager;
|
||||
private final ExecutorService executors = Executors.newFixedThreadPool(1);
|
||||
|
||||
public InternalInviteManagerHandler(InvitationManager invitationManager) {
|
||||
this.invitationManager = invitationManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(HttpServerExchange exchange) throws Exception {
|
||||
executors.submit(invitationManager::doMaintenance);
|
||||
|
||||
JsonObject obj = new JsonObject();
|
||||
obj.addProperty("result", "ok");
|
||||
respond(exchange, obj);
|
||||
}
|
||||
}
|
@@ -44,7 +44,7 @@ import java.util.Optional;
|
||||
|
||||
public class RoomInviteHandler extends BasicHttpHandler {
|
||||
|
||||
public static final String Path = "/_matrix/client/r0/rooms/{roomId}/invite";
|
||||
public static final String Path = "/_matrix/client/v3/rooms/{roomId}/invite";
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(RoomInviteHandler.class);
|
||||
|
||||
@@ -62,7 +62,7 @@ public class RoomInviteHandler extends BasicHttpHandler {
|
||||
public void handleRequest(HttpServerExchange exchange) {
|
||||
String accessToken = getAccessToken(exchange);
|
||||
|
||||
String whoamiUri = dns.transform(URI.create(exchange.getRequestURL()).resolve(URI.create("/_matrix/client/r0/account/whoami"))).toString();
|
||||
String whoamiUri = dns.transform(URI.create(exchange.getRequestURL()).resolve(URI.create("/_matrix/client/v3/account/whoami"))).toString();
|
||||
log.info("Who Am I URL: {}", whoamiUri);
|
||||
HttpGet whoAmIReq = new HttpGet(whoamiUri);
|
||||
whoAmIReq.addHeader("Authorization", "Bearer " + accessToken);
|
||||
|
@@ -34,7 +34,7 @@ import java.util.Optional;
|
||||
public class ProfileHandler extends HomeserverProxyHandler {
|
||||
|
||||
public static final String UserID = "userId";
|
||||
public static final String Path = "/_matrix/client/r0/profile/{" + UserID + "}";
|
||||
public static final String Path = "/_matrix/client/v3/profile/{" + UserID + "}";
|
||||
|
||||
protected ProfileManager mgr;
|
||||
|
||||
|
@@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory;
|
||||
public class Register3pidRequestTokenHandler extends BasicHttpHandler {
|
||||
|
||||
public static final String Key = "medium";
|
||||
public static final String Path = "/_matrix/client/r0/register/{" + Key + "}/requestToken";
|
||||
public static final String Path = "/_matrix/client/v3/register/{" + Key + "}/requestToken";
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(Register3pidRequestTokenHandler.class);
|
||||
|
||||
@@ -71,7 +71,7 @@ public class Register3pidRequestTokenHandler extends BasicHttpHandler {
|
||||
throw new NotAllowedException("Your " + medium + " address cannot be used for registration");
|
||||
}
|
||||
|
||||
proxyPost(exchange, body, client, dns);
|
||||
proxyPost(exchange, body, client, dns, true);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -2,7 +2,6 @@ package io.kamax.mxisd.http.undertow.handler.term.v2;
|
||||
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import io.kamax.matrix.json.GsonUtil;
|
||||
import io.kamax.mxisd.auth.AccountManager;
|
||||
import io.kamax.mxisd.exception.InvalidCredentialsException;
|
||||
import io.kamax.mxisd.http.undertow.handler.BasicHttpHandler;
|
||||
@@ -28,7 +27,7 @@ public class AcceptTermsHandler extends BasicHttpHandler {
|
||||
String token = getAccessToken(exchange);
|
||||
|
||||
JsonObject request = parseJsonObject(exchange);
|
||||
JsonObject accepts = GsonUtil.getObj(request, "user_accepts");
|
||||
JsonElement accepts = request.get("user_accepts");
|
||||
AccountDao account = accountManager.findAccount(token);
|
||||
|
||||
if (account == null) {
|
||||
|
@@ -29,7 +29,11 @@ import io.kamax.matrix.json.GsonUtil;
|
||||
import io.kamax.mxisd.config.InvitationConfig;
|
||||
import io.kamax.mxisd.config.MxisdConfig;
|
||||
import io.kamax.mxisd.config.ServerConfig;
|
||||
import io.kamax.mxisd.crypto.*;
|
||||
import io.kamax.mxisd.crypto.GenericKeyIdentifier;
|
||||
import io.kamax.mxisd.crypto.KeyIdentifier;
|
||||
import io.kamax.mxisd.crypto.KeyManager;
|
||||
import io.kamax.mxisd.crypto.KeyType;
|
||||
import io.kamax.mxisd.crypto.SignatureManager;
|
||||
import io.kamax.mxisd.exception.BadRequestException;
|
||||
import io.kamax.mxisd.exception.ConfigurationException;
|
||||
import io.kamax.mxisd.exception.MappingAlreadyExistsException;
|
||||
@@ -38,6 +42,7 @@ import io.kamax.mxisd.lookup.SingleLookupReply;
|
||||
import io.kamax.mxisd.lookup.ThreePidMapping;
|
||||
import io.kamax.mxisd.lookup.strategy.LookupStrategy;
|
||||
import io.kamax.mxisd.matrix.HomeserverFederationResolver;
|
||||
import io.kamax.mxisd.matrix.HomeserverVerifier;
|
||||
import io.kamax.mxisd.notification.NotificationManager;
|
||||
import io.kamax.mxisd.profile.ProfileManager;
|
||||
import io.kamax.mxisd.storage.IStorage;
|
||||
@@ -48,23 +53,26 @@ import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.ssl.SSLContextBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.DateTimeException;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -86,20 +94,19 @@ public class InvitationManager {
|
||||
private NotificationManager notifMgr;
|
||||
private ProfileManager profileMgr;
|
||||
|
||||
private CloseableHttpClient client;
|
||||
private Timer refreshTimer;
|
||||
|
||||
private Map<String, IThreePidInviteReply> invitations = new ConcurrentHashMap<>();
|
||||
|
||||
public InvitationManager(
|
||||
MxisdConfig mxisdCfg,
|
||||
IStorage storage,
|
||||
LookupStrategy lookupMgr,
|
||||
KeyManager keyMgr,
|
||||
SignatureManager signMgr,
|
||||
HomeserverFederationResolver resolver,
|
||||
NotificationManager notifMgr,
|
||||
ProfileManager profileMgr
|
||||
MxisdConfig mxisdCfg,
|
||||
IStorage storage,
|
||||
LookupStrategy lookupMgr,
|
||||
KeyManager keyMgr,
|
||||
SignatureManager signMgr,
|
||||
HomeserverFederationResolver resolver,
|
||||
NotificationManager notifMgr,
|
||||
ProfileManager profileMgr
|
||||
) {
|
||||
this.cfg = requireValid(mxisdCfg);
|
||||
this.srvCfg = mxisdCfg.getServer();
|
||||
@@ -117,11 +124,11 @@ public class InvitationManager {
|
||||
io.getProperties().putIfAbsent(CreatedAtPropertyKey, defaultCreateTs);
|
||||
log.debug("Processing invite {}", GsonUtil.get().toJson(io));
|
||||
ThreePidInvite invite = new ThreePidInvite(
|
||||
MatrixID.asAcceptable(io.getSender()),
|
||||
io.getMedium(),
|
||||
io.getAddress(),
|
||||
io.getRoomId(),
|
||||
io.getProperties()
|
||||
MatrixID.asAcceptable(io.getSender()),
|
||||
io.getMedium(),
|
||||
io.getAddress(),
|
||||
io.getRoomId(),
|
||||
io.getProperties()
|
||||
);
|
||||
|
||||
ThreePidInviteReply reply = new ThreePidInviteReply(io.getId(), invite, io.getToken(), "", Collections.emptyList());
|
||||
@@ -129,17 +136,6 @@ public class InvitationManager {
|
||||
});
|
||||
log.info("Loaded saved invites");
|
||||
|
||||
// FIXME export such madness into matrix-java-sdk with a nice wrapper to talk to a homeserver
|
||||
try {
|
||||
SSLContext sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustSelfSignedStrategy()).build();
|
||||
HostnameVerifier hostnameVerifier = new NoopHostnameVerifier();
|
||||
SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext, hostnameVerifier);
|
||||
client = HttpClients.custom().setSSLSocketFactory(sslSocketFactory).build();
|
||||
} catch (Exception e) {
|
||||
// FIXME do better...
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
log.info("Setting up invitation mapping refresh timer");
|
||||
refreshTimer = new Timer();
|
||||
|
||||
@@ -159,7 +155,17 @@ public class InvitationManager {
|
||||
log.error("Error when running background maintenance", t);
|
||||
}
|
||||
}
|
||||
}, 5000L, TimeUnit.MILLISECONDS.convert(cfg.getResolution().getTimer(), TimeUnit.MINUTES));
|
||||
}, 5000L, TimeUnit.MILLISECONDS.convert(cfg.getResolution().getTimer(), getTimeUnit()));
|
||||
}
|
||||
|
||||
private TimeUnit getTimeUnit() {
|
||||
switch (cfg.getResolution().getPeriod()) {
|
||||
case seconds:
|
||||
return TimeUnit.SECONDS;
|
||||
case minutes:
|
||||
default:
|
||||
return TimeUnit.MINUTES;
|
||||
}
|
||||
}
|
||||
|
||||
private InvitationConfig requireValid(MxisdConfig cfg) {
|
||||
@@ -180,7 +186,8 @@ public class InvitationManager {
|
||||
if (StringUtils.isBlank(cfg.getInvite().getExpiration().getResolveTo())) {
|
||||
String localpart = cfg.getAppsvc().getUser().getInviteExpired();
|
||||
if (StringUtils.isBlank(localpart)) {
|
||||
throw new ConfigurationException("Could not compute the Invitation expiration resolution target from App service user: not set");
|
||||
throw new ConfigurationException(
|
||||
"Could not compute the Invitation expiration resolution target from App service user: not set");
|
||||
}
|
||||
|
||||
cfg.getInvite().getExpiration().setResolveTo(MatrixID.asAcceptable(localpart, cfg.getMatrix().getDomain()).getId());
|
||||
@@ -202,7 +209,8 @@ public class InvitationManager {
|
||||
}
|
||||
|
||||
private String getIdForLog(IThreePidInviteReply reply) {
|
||||
return reply.getInvite().getSender().getId() + ":" + reply.getInvite().getRoomId() + ":" + reply.getInvite().getMedium() + ":" + reply.getInvite().getAddress();
|
||||
return reply.getInvite().getSender().getId() + ":" + reply.getInvite().getRoomId() + ":" + reply.getInvite()
|
||||
.getMedium() + ":" + reply.getInvite().getAddress();
|
||||
}
|
||||
|
||||
private Optional<SingleLookupReply> lookup3pid(String medium, String address) {
|
||||
@@ -256,13 +264,16 @@ public class InvitationManager {
|
||||
}
|
||||
|
||||
String invId = computeId(invitation);
|
||||
log.info("Handling invite for {}:{} from {} in room {}", invitation.getMedium(), invitation.getAddress(), invitation.getSender(), invitation.getRoomId());
|
||||
log.info("Handling invite for {}:{} from {} in room {}", invitation.getMedium(), invitation.getAddress(), invitation.getSender(),
|
||||
invitation.getRoomId());
|
||||
IThreePidInviteReply reply = invitations.get(invId);
|
||||
if (reply != null) {
|
||||
log.info("Invite is already pending for {}:{}, returning data", invitation.getMedium(), invitation.getAddress());
|
||||
if (!StringUtils.equals(invitation.getRoomId(), reply.getInvite().getRoomId())) {
|
||||
log.info("Sending new notification as new invite room {} is different from the original {}", invitation.getRoomId(), reply.getInvite().getRoomId());
|
||||
notifMgr.sendForReply(new ThreePidInviteReply(reply.getId(), invitation, reply.getToken(), reply.getDisplayName(), reply.getPublicKeys()));
|
||||
log.info("Sending new notification as new invite room {} is different from the original {}", invitation.getRoomId(),
|
||||
reply.getInvite().getRoomId());
|
||||
notifMgr.sendForReply(
|
||||
new ThreePidInviteReply(reply.getId(), invitation, reply.getToken(), reply.getDisplayName(), reply.getPublicKeys()));
|
||||
} else {
|
||||
// FIXME we should check attempt and send if bigger
|
||||
}
|
||||
@@ -276,7 +287,7 @@ public class InvitationManager {
|
||||
}
|
||||
|
||||
String token = RandomStringUtils.randomAlphanumeric(64);
|
||||
String displayName = invitation.getAddress().substring(0, 3) + "...";
|
||||
String displayName = getInvitedDisplayName(invitation.getAddress());
|
||||
KeyIdentifier pKeyId = keyMgr.getServerSigningKey().getId();
|
||||
KeyIdentifier eKeyId = keyMgr.generateKey(KeyType.Ephemeral);
|
||||
|
||||
@@ -299,11 +310,20 @@ public class InvitationManager {
|
||||
log.info("Storing invite under ID {}", invId);
|
||||
storage.insertInvite(reply);
|
||||
invitations.put(invId, reply);
|
||||
log.info("A new invite has been created for {}:{} on HS {}", invitation.getMedium(), invitation.getAddress(), invitation.getSender().getDomain());
|
||||
log.info("A new invite has been created for {}:{} on HS {}", invitation.getMedium(), invitation.getAddress(),
|
||||
invitation.getSender().getDomain());
|
||||
|
||||
return reply;
|
||||
}
|
||||
|
||||
private String getInvitedDisplayName(String origin) {
|
||||
if (cfg.isFullDisplayName()) {
|
||||
return origin;
|
||||
} else {
|
||||
return origin.substring(0, 3) + "...";
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasInvite(ThreePid tpid) {
|
||||
for (IThreePidInviteReply reply : invitations.values()) {
|
||||
if (!StringUtils.equals(tpid.getMedium(), reply.getInvite().getMedium())) {
|
||||
@@ -389,8 +409,10 @@ public class InvitationManager {
|
||||
public void publishMappingIfInvited(ThreePidMapping threePid) {
|
||||
log.info("Looking up possible pending invites for {}:{}", threePid.getMedium(), threePid.getValue());
|
||||
for (IThreePidInviteReply reply : invitations.values()) {
|
||||
if (StringUtils.equalsIgnoreCase(reply.getInvite().getMedium(), threePid.getMedium()) && StringUtils.equalsIgnoreCase(reply.getInvite().getAddress(), threePid.getValue())) {
|
||||
log.info("{}:{} has an invite pending on HS {}, publishing mapping", threePid.getMedium(), threePid.getValue(), reply.getInvite().getSender().getDomain());
|
||||
if (StringUtils.equalsIgnoreCase(reply.getInvite().getMedium(), threePid.getMedium()) && StringUtils
|
||||
.equalsIgnoreCase(reply.getInvite().getAddress(), threePid.getValue())) {
|
||||
log.info("{}:{} has an invite pending on HS {}, publishing mapping", threePid.getMedium(), threePid.getValue(),
|
||||
reply.getInvite().getSender().getDomain());
|
||||
publishMapping(reply, threePid.getMxid());
|
||||
}
|
||||
}
|
||||
@@ -423,11 +445,11 @@ public class InvitationManager {
|
||||
String address = reply.getInvite().getAddress();
|
||||
String domain = reply.getInvite().getSender().getDomain();
|
||||
log.info("Discovering HS for domain {}", domain);
|
||||
String hsUrlOpt = resolver.resolve(domain).toString();
|
||||
HomeserverFederationResolver.HomeserverTarget hsUrlOpt = resolver.resolve(domain);
|
||||
|
||||
// TODO this is needed as this will block if called during authentication cycle due to synapse implementation
|
||||
new Thread(() -> { // FIXME need to make this retry-able and within a general background working pool
|
||||
HttpPost req = new HttpPost(hsUrlOpt + "/_matrix/federation/v1/3pid/onbind");
|
||||
HttpPost req = new HttpPost(hsUrlOpt.getUrl().toString() + "/_matrix/federation/v1/3pid/onbind");
|
||||
// Expected body: https://matrix.to/#/!HUeDbmFUsWAhxHHvFG:matrix.org/$150469846739DCLWc:matrix.trancendances.fr
|
||||
JsonObject obj = new JsonObject();
|
||||
obj.addProperty("mxid", mxid);
|
||||
@@ -459,36 +481,41 @@ public class InvitationManager {
|
||||
Instant resolvedAt = Instant.now();
|
||||
boolean couldPublish = false;
|
||||
boolean shouldArchive = true;
|
||||
try {
|
||||
log.info("Posting onBind event to {}", req.getURI());
|
||||
CloseableHttpResponse response = client.execute(req);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
log.info("Answer code: {}", statusCode);
|
||||
if (statusCode >= 300 && statusCode != 403) {
|
||||
log.info("Answer body: {}", IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
log.warn("HS returned an error.");
|
||||
try (CloseableHttpClient httpClient = HttpClients.custom().setSSLHostnameVerifier(new HomeserverVerifier(hsUrlOpt.getDomain()))
|
||||
.build()) {
|
||||
try {
|
||||
log.info("Posting onBind event to {}", req.getURI());
|
||||
CloseableHttpResponse response = httpClient.execute(req);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
log.info("Answer code: {}", statusCode);
|
||||
if (statusCode >= 300 && statusCode != 403) {
|
||||
log.info("Answer body: {}", IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
log.warn("HS returned an error.");
|
||||
|
||||
shouldArchive = statusCode != 502;
|
||||
shouldArchive = statusCode != 502;
|
||||
if (shouldArchive) {
|
||||
log.info("Invite can be found in historical storage for manual re-processing");
|
||||
}
|
||||
} else {
|
||||
couldPublish = true;
|
||||
if (statusCode == 403) {
|
||||
log.info("Invite is obsolete or no longer under our control");
|
||||
}
|
||||
}
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
log.warn("Unable to tell HS {} about invite being mapped", domain, e);
|
||||
} finally {
|
||||
if (shouldArchive) {
|
||||
log.info("Invite can be found in historical storage for manual re-processing");
|
||||
}
|
||||
} else {
|
||||
couldPublish = true;
|
||||
if (statusCode == 403) {
|
||||
log.info("Invite is obsolete or no longer under our control");
|
||||
synchronized (this) {
|
||||
storage.insertHistoricalInvite(reply, mxid, resolvedAt, couldPublish);
|
||||
removeInvite(reply);
|
||||
log.info("Moved invite {} to historical table", reply.getId());
|
||||
}
|
||||
}
|
||||
}
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
log.warn("Unable to tell HS {} about invite being mapped", domain, e);
|
||||
} finally {
|
||||
if (shouldArchive) {
|
||||
synchronized (this) {
|
||||
storage.insertHistoricalInvite(reply, mxid, resolvedAt, couldPublish);
|
||||
removeInvite(reply);
|
||||
log.info("Moved invite {} to historical table", reply.getId());
|
||||
}
|
||||
}
|
||||
log.error("Unable to create client to the " + hsUrlOpt.getUrl().toString(), e);
|
||||
}
|
||||
}).start();
|
||||
}
|
||||
|
@@ -46,6 +46,4 @@ public interface LookupStrategy {
|
||||
Optional<SingleLookupReply> findRecursive(SingleLookupRequest request);
|
||||
|
||||
CompletableFuture<List<ThreePidMapping>> find(BulkLookupRequest requests);
|
||||
|
||||
CompletableFuture<List<ThreePidMapping>> find(HashLookupRequest request);
|
||||
}
|
||||
|
@@ -26,17 +26,23 @@ import io.kamax.matrix.json.MatrixJson;
|
||||
import io.kamax.mxisd.config.MxisdConfig;
|
||||
import io.kamax.mxisd.exception.ConfigurationException;
|
||||
import io.kamax.mxisd.hash.HashManager;
|
||||
import io.kamax.mxisd.hash.storage.HashStorage;
|
||||
import io.kamax.mxisd.lookup.*;
|
||||
import io.kamax.mxisd.lookup.ALookupRequest;
|
||||
import io.kamax.mxisd.lookup.BulkLookupRequest;
|
||||
import io.kamax.mxisd.lookup.SingleLookupReply;
|
||||
import io.kamax.mxisd.lookup.SingleLookupRequest;
|
||||
import io.kamax.mxisd.lookup.ThreePidMapping;
|
||||
import io.kamax.mxisd.lookup.fetcher.IBridgeFetcher;
|
||||
import io.kamax.mxisd.lookup.provider.IThreePidProvider;
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -238,13 +244,4 @@ public class RecursivePriorityLookupStrategy implements LookupStrategy {
|
||||
result.complete(mapFoundAll);
|
||||
return bulkLookupInProgress.remove(payloadId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<List<ThreePidMapping>> find(HashLookupRequest request) {
|
||||
HashStorage hashStorage = hashManager.getHashStorage();
|
||||
CompletableFuture<List<ThreePidMapping>> result = new CompletableFuture<>();
|
||||
result.complete(hashStorage.find(request.getHashes()).stream().map(Pair::getValue).collect(Collectors.toList()));
|
||||
hashManager.getRotationStrategy().newRequest();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@@ -130,7 +130,9 @@ public class HomeserverFederationResolver {
|
||||
|
||||
return Optional.empty();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error while trying to lookup well-known for " + domain, e);
|
||||
log.info("Error while trying to lookup well-known for " + domain);
|
||||
log.trace("Error while trying to lookup well-known for " + domain, e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,26 +180,26 @@ public class HomeserverFederationResolver {
|
||||
}
|
||||
}
|
||||
|
||||
public URL resolve(String domain) {
|
||||
public HomeserverTarget resolve(String domain) {
|
||||
Optional<URL> s1 = resolveOverwrite(domain);
|
||||
if (s1.isPresent()) {
|
||||
URL dest = s1.get();
|
||||
log.info("Resolution of {} via DNS overwrite to {}", domain, dest);
|
||||
return dest;
|
||||
return new HomeserverTarget(dest.getHost(), dest);
|
||||
}
|
||||
|
||||
Optional<URL> s2 = resolveLiteral(domain);
|
||||
if (s2.isPresent()) {
|
||||
URL dest = s2.get();
|
||||
log.info("Resolution of {} as IP literal or IP/hostname with explicit port to {}", domain, dest);
|
||||
return dest;
|
||||
return new HomeserverTarget(dest.getHost(), dest);
|
||||
}
|
||||
|
||||
Optional<URL> s3 = resolveWellKnown(domain);
|
||||
if (s3.isPresent()) {
|
||||
URL dest = s3.get();
|
||||
log.info("Resolution of {} via well-known to {}", domain, dest);
|
||||
return dest;
|
||||
return new HomeserverTarget(dest.getHost(), dest);
|
||||
}
|
||||
// The domain needs to be resolved
|
||||
|
||||
@@ -205,12 +207,30 @@ public class HomeserverFederationResolver {
|
||||
if (s4.isPresent()) {
|
||||
URL dest = s4.get();
|
||||
log.info("Resolution of {} via DNS SRV record to {}", domain, dest);
|
||||
return dest;
|
||||
return new HomeserverTarget(domain, dest);
|
||||
}
|
||||
|
||||
URL dest = build(domain + ":" + getDefaultPort());
|
||||
log.info("Resolution of {} to {}", domain, dest);
|
||||
return dest;
|
||||
return new HomeserverTarget(dest.getHost(), dest);
|
||||
}
|
||||
|
||||
public static class HomeserverTarget {
|
||||
|
||||
private final String domain;
|
||||
private final URL url;
|
||||
|
||||
HomeserverTarget(String domain, URL url) {
|
||||
this.domain = domain;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public String getDomain() {
|
||||
return domain;
|
||||
}
|
||||
|
||||
public URL getUrl() {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
86
src/main/java/io/kamax/mxisd/matrix/HomeserverVerifier.java
Normal file
86
src/main/java/io/kamax/mxisd/matrix/HomeserverVerifier.java
Normal file
@@ -0,0 +1,86 @@
|
||||
package io.kamax.mxisd.matrix;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.security.cert.Certificate;
|
||||
import java.security.cert.CertificateParsingException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
import javax.net.ssl.SSLPeerUnverifiedException;
|
||||
import javax.net.ssl.SSLSession;
|
||||
|
||||
public class HomeserverVerifier implements HostnameVerifier {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(HomeserverVerifier.class);
|
||||
private static final String ALT_DNS_NAME_TYPE = "2";
|
||||
private static final String ALT_IP_ADDRESS_TYPE = "7";
|
||||
|
||||
private final String matrixHostname;
|
||||
|
||||
public HomeserverVerifier(String matrixHostname) {
|
||||
this.matrixHostname = matrixHostname;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean verify(String hostname, SSLSession session) {
|
||||
try {
|
||||
Certificate peerCertificate = session.getPeerCertificates()[0];
|
||||
if (peerCertificate instanceof X509Certificate) {
|
||||
X509Certificate x509Certificate = (X509Certificate) peerCertificate;
|
||||
if (x509Certificate.getSubjectAlternativeNames() == null) {
|
||||
return false;
|
||||
}
|
||||
for (String altSubjectName : getAltSubjectNames(x509Certificate)) {
|
||||
if (match(altSubjectName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SSLPeerUnverifiedException | CertificateParsingException e) {
|
||||
LOGGER.error("Unable to check remote host", e);
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private List<String> getAltSubjectNames(X509Certificate x509Certificate) {
|
||||
List<String> subjectNames = new ArrayList<>();
|
||||
try {
|
||||
for (List<?> subjectAlternativeNames : x509Certificate.getSubjectAlternativeNames()) {
|
||||
if (subjectAlternativeNames == null
|
||||
|| subjectAlternativeNames.size() < 2
|
||||
|| subjectAlternativeNames.get(0) == null
|
||||
|| subjectAlternativeNames.get(1) == null) {
|
||||
continue;
|
||||
}
|
||||
String subjectType = subjectAlternativeNames.get(0).toString();
|
||||
switch (subjectType) {
|
||||
case ALT_DNS_NAME_TYPE:
|
||||
case ALT_IP_ADDRESS_TYPE:
|
||||
subjectNames.add(subjectAlternativeNames.get(1).toString());
|
||||
break;
|
||||
default:
|
||||
LOGGER.trace("Unusable subject type: " + subjectType);
|
||||
}
|
||||
}
|
||||
} catch (CertificateParsingException e) {
|
||||
LOGGER.error("Unable to parse the certificate", e);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return subjectNames;
|
||||
}
|
||||
|
||||
private boolean match(String altSubjectName) {
|
||||
if (altSubjectName.startsWith("*.")) {
|
||||
String subjectNameWithoutMask = altSubjectName.substring(1); // remove wildcard
|
||||
return matrixHostname.toLowerCase().endsWith(subjectNameWithoutMask.toLowerCase());
|
||||
} else {
|
||||
return matrixHostname.equalsIgnoreCase(altSubjectName);
|
||||
}
|
||||
}
|
||||
}
|
@@ -39,6 +39,7 @@ import io.kamax.mxisd.lookup.SingleLookupReply;
|
||||
import io.kamax.mxisd.lookup.SingleLookupRequest;
|
||||
import io.kamax.mxisd.lookup.ThreePidValidation;
|
||||
import io.kamax.mxisd.matrix.HomeserverFederationResolver;
|
||||
import io.kamax.mxisd.matrix.HomeserverVerifier;
|
||||
import io.kamax.mxisd.notification.NotificationManager;
|
||||
import io.kamax.mxisd.storage.IStorage;
|
||||
import io.kamax.mxisd.storage.dao.IThreePidSessionDao;
|
||||
@@ -53,12 +54,14 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Base64;
|
||||
import java.util.Calendar;
|
||||
@@ -72,7 +75,6 @@ public class SessionManager {
|
||||
private IStorage storage;
|
||||
private NotificationManager notifMgr;
|
||||
private HomeserverFederationResolver resolver;
|
||||
private CloseableHttpClient client;
|
||||
private SignatureManager signatureManager;
|
||||
|
||||
public SessionManager(
|
||||
@@ -80,14 +82,12 @@ public class SessionManager {
|
||||
IStorage storage,
|
||||
NotificationManager notifMgr,
|
||||
HomeserverFederationResolver resolver,
|
||||
CloseableHttpClient client,
|
||||
SignatureManager signatureManager
|
||||
) {
|
||||
this.cfg = cfg;
|
||||
this.storage = storage;
|
||||
this.notifMgr = notifMgr;
|
||||
this.resolver = resolver;
|
||||
this.client = client;
|
||||
this.signatureManager = signatureManager;
|
||||
}
|
||||
|
||||
@@ -236,7 +236,9 @@ public class SessionManager {
|
||||
}
|
||||
|
||||
log.info("Unbinding of {} {} to {} is accepted", tpid.getMedium(), tpid.getAddress(), mxid.getId());
|
||||
notifMgr.sendForUnbind(tpid);
|
||||
if (cfg.getSession().getPolicy().getUnbind().shouldNotify()) {
|
||||
notifMgr.sendForUnbind(tpid);
|
||||
}
|
||||
}
|
||||
|
||||
private String getDomain(String publicUrl) {
|
||||
@@ -305,25 +307,34 @@ public class SessionManager {
|
||||
|
||||
String canonical = MatrixJson.encodeCanonical(jsonObject);
|
||||
|
||||
String originUrl = resolver.resolve(origin).toString();
|
||||
HomeserverFederationResolver.HomeserverTarget homeserverTarget = resolver.resolve(origin);
|
||||
|
||||
validateServerKey(key, sig, canonical, originUrl);
|
||||
validateServerKey(key, sig, canonical, homeserverTarget);
|
||||
}
|
||||
|
||||
private String removeQuotes(String origin) {
|
||||
return origin.startsWith("\"") && origin.endsWith("\"") ? origin.substring(1, origin.length() - 1) : origin;
|
||||
}
|
||||
|
||||
private void validateServerKey(String key, String signature, String canonical, String originUrl) {
|
||||
private void validateServerKey(String key, String signature, String canonical,
|
||||
HomeserverFederationResolver.HomeserverTarget homeserverTarget) {
|
||||
String originUrl = homeserverTarget.getUrl().toString();
|
||||
HttpGet request = new HttpGet(originUrl + "/_matrix/key/v2/server");
|
||||
log.info("Get keys from the server {}", request.getURI());
|
||||
try (CloseableHttpResponse response = client.execute(request)) {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
log.info("Answer code: {}", statusCode);
|
||||
if (statusCode == 200) {
|
||||
verifyKey(key, signature, canonical, response);
|
||||
} else {
|
||||
throw new RemoteHomeServerException("Unable to fetch server keys.");
|
||||
try (CloseableHttpClient httpClient = HttpClients.custom()
|
||||
.setSSLHostnameVerifier(new HomeserverVerifier(homeserverTarget.getDomain())).build()) {
|
||||
try (CloseableHttpResponse response = httpClient.execute(request)) {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
log.info("Answer code: {}", statusCode);
|
||||
if (statusCode == 200) {
|
||||
verifyKey(key, signature, canonical, response);
|
||||
} else {
|
||||
throw new RemoteHomeServerException("Unable to fetch server keys.");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
String message = "Unable to get server keys: " + originUrl;
|
||||
log.error(message, e);
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
String message = "Unable to get server keys: " + originUrl;
|
||||
|
@@ -24,12 +24,17 @@ import com.j256.ormlite.dao.CloseableWrappedIterable;
|
||||
import com.j256.ormlite.dao.Dao;
|
||||
import com.j256.ormlite.dao.DaoManager;
|
||||
import com.j256.ormlite.jdbc.JdbcConnectionSource;
|
||||
import com.j256.ormlite.jdbc.JdbcPooledConnectionSource;
|
||||
import com.j256.ormlite.jdbc.db.PostgresDatabaseType;
|
||||
import com.j256.ormlite.jdbc.db.SqliteDatabaseType;
|
||||
import com.j256.ormlite.stmt.QueryBuilder;
|
||||
import com.j256.ormlite.support.ConnectionSource;
|
||||
import com.j256.ormlite.table.TableUtils;
|
||||
import io.kamax.matrix.ThreePid;
|
||||
import io.kamax.mxisd.config.MxisdConfig;
|
||||
import io.kamax.mxisd.config.PolicyConfig;
|
||||
import io.kamax.mxisd.config.PostgresqlStorageConfig;
|
||||
import io.kamax.mxisd.config.SQLiteStorageConfig;
|
||||
import io.kamax.mxisd.config.StorageConfig;
|
||||
import io.kamax.mxisd.exception.ConfigurationException;
|
||||
import io.kamax.mxisd.exception.InternalServerError;
|
||||
import io.kamax.mxisd.exception.InvalidCredentialsException;
|
||||
@@ -39,6 +44,7 @@ import io.kamax.mxisd.storage.IStorage;
|
||||
import io.kamax.mxisd.storage.dao.IThreePidSessionDao;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.ASTransactionDao;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.AccountDao;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.ChangelogDao;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.HashDao;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.HistoricalThreePidInviteIO;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.AcceptedDao;
|
||||
@@ -46,12 +52,15 @@ import io.kamax.mxisd.storage.ormlite.dao.ThreePidInviteIO;
|
||||
import io.kamax.mxisd.storage.ormlite.dao.ThreePidSessionDao;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
@@ -59,6 +68,8 @@ import java.util.stream.Collectors;
|
||||
|
||||
public class OrmLiteSqlStorage implements IStorage {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(OrmLiteSqlStorage.class);
|
||||
|
||||
@FunctionalInterface
|
||||
private interface Getter<T> {
|
||||
|
||||
@@ -73,42 +84,168 @@ public class OrmLiteSqlStorage implements IStorage {
|
||||
|
||||
}
|
||||
|
||||
public static class Migrations {
|
||||
public static final String FIX_ACCEPTED_DAO = "2019_12_09__2254__fix_accepted_dao";
|
||||
public static final String FIX_HASH_DAO_UNIQUE_INDEX = "2020_03_22__1153__fix_hash_dao_unique_index";
|
||||
public static final String CHANGE_TYPE_TO_TEXT_INVITE = "2020_04_21__2338__change_type_table_invites";
|
||||
public static final String CHANGE_TYPE_TO_TEXT_INVITE_HISTORY = "2020_10_26__2200__change_type_table_invite_history";
|
||||
}
|
||||
|
||||
private Dao<ThreePidInviteIO, String> invDao;
|
||||
private Dao<HistoricalThreePidInviteIO, String> expInvDao;
|
||||
private Dao<ThreePidSessionDao, String> sessionDao;
|
||||
private Dao<ASTransactionDao, String> asTxnDao;
|
||||
private Dao<AccountDao, String> accountDao;
|
||||
private Dao<AcceptedDao, String> acceptedDao;
|
||||
private Dao<AcceptedDao, Long> acceptedDao;
|
||||
private Dao<HashDao, String> hashDao;
|
||||
private Dao<ChangelogDao, String> changelogDao;
|
||||
private StorageConfig.BackendEnum backend;
|
||||
|
||||
public OrmLiteSqlStorage(MxisdConfig cfg) {
|
||||
this(cfg.getStorage().getBackend(), cfg.getStorage().getProvider().getSqlite().getDatabase());
|
||||
}
|
||||
|
||||
public OrmLiteSqlStorage(String backend, String path) {
|
||||
if (StringUtils.isBlank(backend)) {
|
||||
public OrmLiteSqlStorage(StorageConfig.BackendEnum backend, StorageConfig.Provider provider) {
|
||||
if (backend == null) {
|
||||
throw new ConfigurationException("storage.backend");
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(path)) {
|
||||
throw new ConfigurationException("Storage destination cannot be empty");
|
||||
}
|
||||
this.backend = backend;
|
||||
|
||||
withCatcher(() -> {
|
||||
ConnectionSource connPool = new JdbcConnectionSource("jdbc:" + backend + ":" + path);
|
||||
ConnectionSource connPool;
|
||||
switch (backend) {
|
||||
case postgresql:
|
||||
connPool = createPostgresqlConnection(provider.getPostgresql());
|
||||
break;
|
||||
case sqlite:
|
||||
connPool = createSqliteConnection(provider.getSqlite());
|
||||
break;
|
||||
default:
|
||||
throw new ConfigurationException("storage.backend");
|
||||
}
|
||||
|
||||
changelogDao = createDaoAndTable(connPool, ChangelogDao.class);
|
||||
invDao = createDaoAndTable(connPool, ThreePidInviteIO.class);
|
||||
expInvDao = createDaoAndTable(connPool, HistoricalThreePidInviteIO.class);
|
||||
sessionDao = createDaoAndTable(connPool, ThreePidSessionDao.class);
|
||||
asTxnDao = createDaoAndTable(connPool, ASTransactionDao.class);
|
||||
accountDao = createDaoAndTable(connPool, AccountDao.class);
|
||||
acceptedDao = createDaoAndTable(connPool, AcceptedDao.class);
|
||||
hashDao = createDaoAndTable(connPool, HashDao.class);
|
||||
acceptedDao = createDaoAndTable(connPool, AcceptedDao.class, true);
|
||||
hashDao = createDaoAndTable(connPool, HashDao.class, true);
|
||||
runMigration(connPool);
|
||||
});
|
||||
}
|
||||
|
||||
private ConnectionSource createSqliteConnection(SQLiteStorageConfig config) throws SQLException {
|
||||
if (StringUtils.isBlank(config.getDatabase())) {
|
||||
throw new ConfigurationException("Storage destination cannot be empty");
|
||||
}
|
||||
|
||||
return new JdbcConnectionSource("jdbc:" + backend + ":" + config.getDatabase(), null, null, new SqliteDatabaseType());
|
||||
}
|
||||
|
||||
private ConnectionSource createPostgresqlConnection(PostgresqlStorageConfig config) throws SQLException {
|
||||
if (StringUtils.isBlank(config.getDatabase())) {
|
||||
throw new ConfigurationException("Storage destination cannot be empty");
|
||||
}
|
||||
|
||||
if (config.isPool()) {
|
||||
LOGGER.info("Enable pooling");
|
||||
JdbcPooledConnectionSource source = new JdbcPooledConnectionSource(
|
||||
"jdbc:" + backend + ":" + config.getDatabase(), config.getUsername(), config.getPassword(),
|
||||
new PostgresDatabaseType());
|
||||
source.setMaxConnectionsFree(config.getMaxConnectionsFree());
|
||||
source.setMaxConnectionAgeMillis(config.getMaxConnectionAgeMillis());
|
||||
source.setCheckConnectionsEveryMillis(config.getCheckConnectionsEveryMillis());
|
||||
source.setTestBeforeGet(config.isTestBeforeGetFromPool());
|
||||
return source;
|
||||
} else {
|
||||
return new JdbcConnectionSource("jdbc:" + backend + ":" + config.getDatabase(), config.getUsername(), config.getPassword(),
|
||||
new PostgresDatabaseType());
|
||||
}
|
||||
}
|
||||
|
||||
private void runMigration(ConnectionSource connPol) throws SQLException {
|
||||
ChangelogDao fixAcceptedDao = changelogDao.queryForId(Migrations.FIX_ACCEPTED_DAO);
|
||||
if (fixAcceptedDao == null) {
|
||||
fixAcceptedDao(connPol);
|
||||
changelogDao.create(new ChangelogDao(Migrations.FIX_ACCEPTED_DAO, new Date(), "Recreate the accepted table."));
|
||||
}
|
||||
ChangelogDao fixHashDaoUniqueIndex = changelogDao.queryForId(Migrations.FIX_HASH_DAO_UNIQUE_INDEX);
|
||||
if (fixHashDaoUniqueIndex == null) {
|
||||
fixHashDaoUniqueIndex(connPol);
|
||||
changelogDao
|
||||
.create(new ChangelogDao(Migrations.FIX_HASH_DAO_UNIQUE_INDEX, new Date(), "Add the id and migrate the unique index."));
|
||||
}
|
||||
ChangelogDao fixInviteTableColumnType = changelogDao.queryForId(Migrations.CHANGE_TYPE_TO_TEXT_INVITE);
|
||||
if (fixInviteTableColumnType == null) {
|
||||
fixInviteTableColumnType(connPol);
|
||||
changelogDao.create(new ChangelogDao(Migrations.CHANGE_TYPE_TO_TEXT_INVITE, new Date(), "Modify column type to text."));
|
||||
}
|
||||
ChangelogDao fixInviteHistoryTableColumnType = changelogDao.queryForId(Migrations.CHANGE_TYPE_TO_TEXT_INVITE_HISTORY);
|
||||
if (fixInviteHistoryTableColumnType == null) {
|
||||
fixInviteHistoryTableColumnType(connPol);
|
||||
changelogDao.create(new ChangelogDao(Migrations.CHANGE_TYPE_TO_TEXT_INVITE_HISTORY, new Date(), "Modify column type to text."));
|
||||
}
|
||||
}
|
||||
|
||||
private void fixAcceptedDao(ConnectionSource connPool) throws SQLException {
|
||||
LOGGER.info("Migration: {}", Migrations.FIX_ACCEPTED_DAO);
|
||||
TableUtils.dropTable(acceptedDao, true);
|
||||
TableUtils.createTableIfNotExists(connPool, AcceptedDao.class);
|
||||
}
|
||||
|
||||
private void fixHashDaoUniqueIndex(ConnectionSource connPool) throws SQLException {
|
||||
LOGGER.info("Migration: {}", Migrations.FIX_HASH_DAO_UNIQUE_INDEX);
|
||||
TableUtils.dropTable(hashDao, true);
|
||||
TableUtils.createTableIfNotExists(connPool, HashDao.class);
|
||||
}
|
||||
|
||||
private void fixInviteTableColumnType(ConnectionSource connPool) throws SQLException {
|
||||
LOGGER.info("Migration: {}", Migrations.CHANGE_TYPE_TO_TEXT_INVITE);
|
||||
if (StorageConfig.BackendEnum.postgresql == backend) {
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column \"roomId\" type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column id type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column token type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column sender type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column medium type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column address type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid alter column properties type text");
|
||||
}
|
||||
}
|
||||
|
||||
private void fixInviteHistoryTableColumnType(ConnectionSource connPool) throws SQLException {
|
||||
LOGGER.info("Migration: {}", Migrations.CHANGE_TYPE_TO_TEXT_INVITE_HISTORY);
|
||||
if (StorageConfig.BackendEnum.postgresql == backend) {
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column \"resolvedTo\" type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column id type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column token type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column sender type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column medium type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column address type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column \"roomId\" type text");
|
||||
invDao.executeRawNoArgs("alter table invite_3pid_history alter column properties type text");
|
||||
}
|
||||
}
|
||||
|
||||
private <V, K> Dao<V, K> createDaoAndTable(ConnectionSource connPool, Class<V> c) throws SQLException {
|
||||
return createDaoAndTable(connPool, c, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Workaround for https://github.com/j256/ormlite-core/issues/20.
|
||||
*/
|
||||
private <V, K> Dao<V, K> createDaoAndTable(ConnectionSource connPool, Class<V> c, boolean workaround) throws SQLException {
|
||||
LOGGER.info("Create the dao: {}", c.getSimpleName());
|
||||
Dao<V, K> dao = DaoManager.createDao(connPool, c);
|
||||
TableUtils.createTableIfNotExists(connPool, c);
|
||||
if (workaround && StorageConfig.BackendEnum.postgresql.equals(backend)) {
|
||||
LOGGER.info("Workaround for postgresql on dao: {}", c.getSimpleName());
|
||||
try {
|
||||
dao.countOf();
|
||||
LOGGER.info("Table exists, do nothing");
|
||||
} catch (SQLException e) {
|
||||
LOGGER.info("Table doesn't exist, create");
|
||||
TableUtils.createTableIfNotExists(connPool, c);
|
||||
}
|
||||
} else {
|
||||
TableUtils.createTableIfNotExists(connPool, c);
|
||||
}
|
||||
return dao;
|
||||
}
|
||||
|
||||
@@ -294,6 +431,13 @@ public class OrmLiteSqlStorage implements IStorage {
|
||||
public void acceptTerm(String token, String url) {
|
||||
withCatcher(() -> {
|
||||
AccountDao account = findAccount(token).orElseThrow(InvalidCredentialsException::new);
|
||||
List<AcceptedDao> acceptedTerms = acceptedDao.queryForEq("userId", account.getUserId());
|
||||
for (AcceptedDao acceptedTerm : acceptedTerms) {
|
||||
if (acceptedTerm.getUrl().equalsIgnoreCase(url)) {
|
||||
// already accepted
|
||||
return;
|
||||
}
|
||||
}
|
||||
int created = acceptedDao.create(new AcceptedDao(url, account.getUserId(), System.currentTimeMillis()));
|
||||
if (created != 1) {
|
||||
throw new RuntimeException("Unexpected row count after DB action: " + created);
|
||||
|
@@ -26,7 +26,10 @@ import com.j256.ormlite.table.DatabaseTable;
|
||||
@DatabaseTable(tableName = "accepted")
|
||||
public class AcceptedDao {
|
||||
|
||||
@DatabaseField(canBeNull = false, id = true)
|
||||
@DatabaseField(generatedId = true)
|
||||
private Long id;
|
||||
|
||||
@DatabaseField(canBeNull = false)
|
||||
private String url;
|
||||
|
||||
@DatabaseField(canBeNull = false)
|
||||
@@ -45,6 +48,14 @@ public class AcceptedDao {
|
||||
this.acceptedAt = acceptedAt;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user