Compare commits
	
		
			30 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | a9d783192b | ||
|  | 2bb5a734d1 | ||
|  | 9aa5c4cca9 | ||
|  | 9c4faab5d8 | ||
|  | 53c4ffdc4e | ||
|  | e4144e923a | ||
|  | 791361c10d | ||
|  | 4b5eecd7e7 | ||
|  | a6968fb7e9 | ||
|  | d4853b1154 | ||
|  | 89df4b2425 | ||
|  | 0f89121b98 | ||
|  | 8a40ca185b | ||
|  | 5baeb42623 | ||
|  | 072e5f66cb | ||
|  | b2f41d689b | ||
|  | 9b4aff58c7 | ||
|  | a20e41574d | ||
|  | 72977d65ae | ||
|  | 7555fff1a5 | ||
|  | aed12e5536 | ||
|  | 75efd9921d | ||
|  | 9219bd4723 | ||
|  | 73526be2ac | ||
|  | b827efca2c | ||
|  | 6b7a4c8a23 | ||
|  | 47f6239268 | ||
|  | 0d6f65b469 | ||
|  | be915aed94 | ||
|  | ce938bb4a5 | 
							
								
								
									
										16
									
								
								DockerfileX
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								DockerfileX
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | |||||||
|  | FROM --platform=$BUILDPLATFORM openjdk:11.0.7-jre-slim | ||||||
|  |  | ||||||
|  | VOLUME /etc/ma1sd | ||||||
|  | VOLUME /var/ma1sd | ||||||
|  | EXPOSE 8090 | ||||||
|  |  | ||||||
|  | ENV JAVA_OPTS="" | ||||||
|  | ENV CONF_FILE_PATH="/etc/ma1sd/ma1sd.yaml" | ||||||
|  | ENV SIGN_KEY_PATH="/var/ma1sd/sign.key" | ||||||
|  | ENV SQLITE_DATABASE_PATH="/var/ma1sd/ma1sd.db" | ||||||
|  |  | ||||||
|  | CMD [ "/start.sh" ] | ||||||
|  |  | ||||||
|  | ADD src/docker/start.sh /start.sh | ||||||
|  | ADD src/script/ma1sd /app/ma1sd | ||||||
|  | ADD build/libs/ma1sd.jar /app/ma1sd.jar | ||||||
							
								
								
									
										33
									
								
								build.gradle
									
									
									
									
									
								
							
							
						
						
									
										33
									
								
								build.gradle
									
									
									
									
									
								
							| @@ -274,6 +274,27 @@ task dockerBuild(type: Exec, dependsOn: shadowJar) { | |||||||
|     } |     } | ||||||
| } | } | ||||||
|  |  | ||||||
|  | task dockerBuildX(type: Exec, dependsOn: shadowJar) { | ||||||
|  |     commandLine 'docker', 'buildx', 'build', '--load', '--platform', 'linux/arm64', '-t', dockerImageTag + '-arm64', project.rootDir | ||||||
|  |     doLast { | ||||||
|  |         exec { | ||||||
|  |             commandLine 'docker', 'buildx', 'build', '--load', '--platform', 'linux/amd64', '-t', dockerImageTag + '-amd64', project.rootDir | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         exec { | ||||||
|  |             commandLine 'docker', 'tag', dockerImageTag + '-arm64', "${dockerImageName}:latest-arm64-dev" | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         exec { | ||||||
|  |             commandLine 'docker', 'tag', dockerImageTag + '-amd64', "${dockerImageName}:latest-amd64-dev" | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         exec { | ||||||
|  |             commandLine 'docker', 'tag', dockerImageTag + '-amd64', "${dockerImageName}:latest-dev" | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
| task dockerPush(type: Exec) { | task dockerPush(type: Exec) { | ||||||
|     commandLine 'docker', 'push', dockerImageTag |     commandLine 'docker', 'push', dockerImageTag | ||||||
|  |  | ||||||
| @@ -283,3 +304,15 @@ task dockerPush(type: Exec) { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  |  | ||||||
|  | task dockerPushX(type: Exec) { | ||||||
|  |     commandLine 'docker', 'push', dockerImageTag | ||||||
|  |  | ||||||
|  |     doLast { | ||||||
|  |         exec { | ||||||
|  |             commandLine 'docker', 'push', "${dockerImageName}:latest-dev" | ||||||
|  |             commandLine 'docker', 'push', "${dockerImageName}:latest-amd64-dev" | ||||||
|  |             commandLine 'docker', 'push', "${dockerImageName}:latest-arm64-dev" | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|   | |||||||
| @@ -110,7 +110,7 @@ For sql provider (i.e. for the `synapseSql`): | |||||||
| ```.yaml | ```.yaml | ||||||
| synapseSql: | synapseSql: | ||||||
|   lookup: |   lookup: | ||||||
|     query: 'select user_id as mxid, medium, address from user_threepids' # query for retrive 3PIDs for hashes. |     query: 'select user_id as mxid, medium, address from user_threepid_id_server' # query for retrive 3PIDs for hashes. | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| For general sql provider: | For general sql provider: | ||||||
|   | |||||||
| @@ -9,6 +9,8 @@ | |||||||
| ## Binaries | ## Binaries | ||||||
| ### Requirements | ### Requirements | ||||||
| - JDK 1.8 | - JDK 1.8 | ||||||
|  | - OpenJDK 11 | ||||||
|  | - OpenJDK 14 | ||||||
|  |  | ||||||
| ### Build | ### Build | ||||||
| ```bash | ```bash | ||||||
| @@ -70,5 +72,13 @@ Then follow the instruction in the [Debian package](install/debian.md) document. | |||||||
| ``` | ``` | ||||||
| Then follow the instructions in the [Docker install](install/docker.md#configure) document. | Then follow the instructions in the [Docker install](install/docker.md#configure) document. | ||||||
|  |  | ||||||
|  | ### Multi-platform builds | ||||||
|  |  | ||||||
|  | Provided with experimental docker feature [buildx](https://docs.docker.com/buildx/working-with-buildx/) | ||||||
|  | To build the arm64 and amd64 images run: | ||||||
|  | ```bash | ||||||
|  | ./gradlew dockerBuildX | ||||||
|  | ``` | ||||||
|  |  | ||||||
| ## Next steps | ## Next steps | ||||||
| - [Integrate with your infrastructure](getting-started.md#integrate) | - [Integrate with your infrastructure](getting-started.md#integrate) | ||||||
|   | |||||||
| @@ -58,7 +58,53 @@ Commonly the `server.publicUrl` should be the same value as the `trusted_third_p | |||||||
|  |  | ||||||
| ## Storage | ## Storage | ||||||
| ### SQLite | ### SQLite | ||||||
| `storage.provider.sqlite.database`: Absolute location of the SQLite database | ```yaml | ||||||
|  | storage: | ||||||
|  |   backend: sqlite # default | ||||||
|  |   provider: | ||||||
|  |     sqlite: | ||||||
|  |       database: /var/lib/ma1sd/store.db #  Absolute location of the SQLite database | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Postgresql | ||||||
|  | ```yaml | ||||||
|  | storage: | ||||||
|  |   backend: postgresql | ||||||
|  |   provider: | ||||||
|  |     postgresql: | ||||||
|  |       database: //localhost:5432/ma1sd | ||||||
|  |       username: ma1sd | ||||||
|  |       password: secret_password | ||||||
|  | ``` | ||||||
|  | See [the migration instruction](migration-to-postgresql.md) from sqlite to postgresql | ||||||
|  |  | ||||||
|  |  | ||||||
|  | ## Logging | ||||||
|  | ```yaml | ||||||
|  | logging: | ||||||
|  |   root: error     # default level for all loggers (apps and thirdparty libraries) | ||||||
|  |   app: info       # log level only for the ma1sd | ||||||
|  |   requests: false # log request and response | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Possible value: `trace`, `debug`, `info`, `warn`, `error`, `off`. | ||||||
|  |  | ||||||
|  | Default value for root level: `info`. | ||||||
|  |  | ||||||
|  | Value for app level can be specified via `MA1SD_LOG_LEVEL` environment variable, configuration or start options. | ||||||
|  |  | ||||||
|  | Default value for app level: `info`. | ||||||
|  |  | ||||||
|  | | start option | equivalent configuration | | ||||||
|  | | --- | --- | | ||||||
|  | |  | app: info | | ||||||
|  | | -v | app: debug | | ||||||
|  | | -vv | app: trace | | ||||||
|  |  | ||||||
|  | #### WARNING | ||||||
|  |  | ||||||
|  | The setting `logging.requests` *MUST NOT* be used in production due it prints full unmasked request and response into the log and can be cause of the data leak.  | ||||||
|  | This setting can be used only to testing and debugging errors. | ||||||
|  |  | ||||||
| ## Identity stores | ## Identity stores | ||||||
| See the [Identity stores](stores/README.md) for specific configuration | See the [Identity stores](stores/README.md) for specific configuration | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| # Identity | # Identity | ||||||
| Implementation of the [Identity Service API r0.2.0](https://matrix.org/docs/spec/identity_service/r0.2.0.html). | Implementation of the [Identity Service API r0.3.0](https://matrix.org/docs/spec/identity_service/r0.3.0.html). | ||||||
|  |  | ||||||
| - [Lookups](#lookups) | - [Lookups](#lookups) | ||||||
| - [Invitations](#invitations) | - [Invitations](#invitations) | ||||||
|   | |||||||
							
								
								
									
										41
									
								
								docs/migration-to-postgresql.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								docs/migration-to-postgresql.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | |||||||
|  | # Migration from sqlite to postgresql | ||||||
|  |  | ||||||
|  | Starting from the version 2.3.0 ma1sd support postgresql for internal storage in addition to sqlite (parameters `storage.backend`). | ||||||
|  |  | ||||||
|  | #### Migration steps | ||||||
|  |  | ||||||
|  | 1. create the postgresql database and user for ma1sd storage | ||||||
|  | 2. create a backup for sqlite storage (default location: /var/lib/ma1sd/store.db) | ||||||
|  | 3. migrate data from sqlite to postgresql | ||||||
|  | 4. change ma1sd configuration to use the postgresql | ||||||
|  |  | ||||||
|  | For data migration is it possible to use https://pgloader.io tool. | ||||||
|  |  | ||||||
|  | Example of the migration command: | ||||||
|  | ```shell script | ||||||
|  | pgloader --with "quote identifiers" /path/to/store.db pgsql://ma1sd_user:ma1sd_password@host:port/database | ||||||
|  | ``` | ||||||
|  | or (short version for database on localhost) | ||||||
|  | ```shell script | ||||||
|  | pgloader --with "quote identifiers" /path/to/store.db pgsql://ma1sd_user:ma1sd_password@localhost/ma1sd | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | An option `--with "quote identifies"` used to create case sensitive tables. | ||||||
|  | ma1sd_user - postgresql user for ma1sd. | ||||||
|  | ma1sd_password - password of the postgresql user. | ||||||
|  | host - postgresql host | ||||||
|  | post - database port (default 5432) | ||||||
|  | database - database name. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Configuration example for postgresql storage: | ||||||
|  | ```yaml | ||||||
|  | storage: | ||||||
|  |   backend: postgresql | ||||||
|  |   provider: | ||||||
|  |     postgresql: | ||||||
|  |       database: '//localhost/ma1sd' # or full variant //192.168.1.100:5432/ma1sd_database | ||||||
|  |       username: 'ma1sd_user' | ||||||
|  |       password: 'ma1sd_password' | ||||||
|  | ``` | ||||||
|  |  | ||||||
| @@ -136,7 +136,7 @@ sql: | |||||||
|  |  | ||||||
| ``` | ``` | ||||||
| For the `role` query, `type` can be used to tell ma1sd how to inject the User ID in the query: | For the `role` query, `type` can be used to tell ma1sd how to inject the User ID in the query: | ||||||
| - `localpart` will extract and set only the localpart. | - `uid` will extract and set only the localpart. | ||||||
| - `mxid` will use the ID as-is. | - `mxid` will use the ID as-is. | ||||||
|  |  | ||||||
| On each query, the first parameter `?` is set as a string with the corresponding ID format. | On each query, the first parameter `?` is set as a string with the corresponding ID format. | ||||||
|   | |||||||
| @@ -22,7 +22,7 @@ | |||||||
| matrix: | matrix: | ||||||
|   domain: '' |   domain: '' | ||||||
|   v1: true   # deprecated |   v1: true   # deprecated | ||||||
|   v2: false  # MSC2140 API v2. Disabled by default in order to preserve backward compatibility. |   v2: true   # MSC2140 API v2. Riot require enabled V2 API. | ||||||
|  |  | ||||||
|  |  | ||||||
| ################ | ################ | ||||||
| @@ -51,10 +51,39 @@ key: | |||||||
| #  - /var/lib/ma1sd/store.db | #  - /var/lib/ma1sd/store.db | ||||||
| # | # | ||||||
| storage: | storage: | ||||||
|  | # backend: sqlite # or postgresql | ||||||
|   provider: |   provider: | ||||||
|     sqlite: |     sqlite: | ||||||
|       database: '/path/to/ma1sd.db' |       database: '/path/to/ma1sd.db' | ||||||
|  | #    postgresql: | ||||||
|  | #      # Wrap all string values with quotes to avoid yaml parsing mistakes | ||||||
|  | #      database: '//localhost/ma1sd' # or full variant //192.168.1.100:5432/ma1sd_database | ||||||
|  | #      username: 'ma1sd_user' | ||||||
|  | #      password: 'ma1sd_password' | ||||||
|  | # | ||||||
|  | #      # Pool configuration for postgresql backend. | ||||||
|  | #      ####### | ||||||
|  | #      # Enable or disable pooling | ||||||
|  | #      pool: false | ||||||
|  | # | ||||||
|  | #      ####### | ||||||
|  | #      # Check database connection before get from pool | ||||||
|  | #      testBeforeGetFromPool: false # or true | ||||||
|  | # | ||||||
|  | #      ####### | ||||||
|  | #      # There is an internal thread which checks each of the database connections as a keep-alive mechanism. This set the | ||||||
|  | #      # number of milliseconds it sleeps between checks -- default is 30000. To disable the checking thread, set this to | ||||||
|  | #      # 0 before you start using the connection source. | ||||||
|  | #      checkConnectionsEveryMillis: 30000 | ||||||
|  | # | ||||||
|  | #      ####### | ||||||
|  | #      # Set the number of connections that can be unused in the available list. | ||||||
|  | #      maxConnectionsFree: 5 | ||||||
|  | # | ||||||
|  | #      ####### | ||||||
|  | #      # Set the number of milliseconds that a connection can stay open before being closed. Set to 9223372036854775807 to have | ||||||
|  | #      # the connections never expire. | ||||||
|  | #      maxConnectionAgeMillis: 3600000 | ||||||
|  |  | ||||||
| ################### | ################### | ||||||
| # Identity Stores # | # Identity Stores # | ||||||
| @@ -129,7 +158,8 @@ threepid: | |||||||
| ### hash lookup for synapseSql provider. | ### hash lookup for synapseSql provider. | ||||||
| # synapseSql: | # synapseSql: | ||||||
| #   lookup: | #   lookup: | ||||||
| #     query: 'select user_id as mxid, medium, address from user_threepids' # query for retrive 3PIDs for hashes. | #     query: 'select user_id as mxid, medium, address from user_threepid_id_server' # query for retrive 3PIDs for hashes. | ||||||
|  | #   legacyRoomNames: false  # use the old query to get room names. | ||||||
|  |  | ||||||
| ### hash lookup for ldap provider (with example of the ldap configuration) | ### hash lookup for ldap provider (with example of the ldap configuration) | ||||||
| # ldap: | # ldap: | ||||||
| @@ -167,3 +197,8 @@ threepid: | |||||||
| #        - '/_matrix/identity/v2/hash_details' | #        - '/_matrix/identity/v2/hash_details' | ||||||
| #        - '/_matrix/identity/v2/lookup' | #        - '/_matrix/identity/v2/lookup' | ||||||
| # | # | ||||||
|  |  | ||||||
|  | # logging: | ||||||
|  | #   root: error     # default level for all loggers (apps and thirdparty libraries) | ||||||
|  | #   app: info       # log level only for the ma1sd | ||||||
|  | #   requests: false # or true to dump full requests and responses | ||||||
|   | |||||||
| @@ -23,11 +23,13 @@ package io.kamax.mxisd; | |||||||
| import io.kamax.mxisd.config.MatrixConfig; | import io.kamax.mxisd.config.MatrixConfig; | ||||||
| import io.kamax.mxisd.config.MxisdConfig; | import io.kamax.mxisd.config.MxisdConfig; | ||||||
| import io.kamax.mxisd.config.PolicyConfig; | import io.kamax.mxisd.config.PolicyConfig; | ||||||
|  | import io.kamax.mxisd.config.ServerConfig; | ||||||
| import io.kamax.mxisd.http.undertow.handler.ApiHandler; | import io.kamax.mxisd.http.undertow.handler.ApiHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.AuthorizationHandler; | import io.kamax.mxisd.http.undertow.handler.AuthorizationHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.CheckTermsHandler; | import io.kamax.mxisd.http.undertow.handler.CheckTermsHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.InternalInfoHandler; | import io.kamax.mxisd.http.undertow.handler.InternalInfoHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.OptionsHandler; | import io.kamax.mxisd.http.undertow.handler.OptionsHandler; | ||||||
|  | import io.kamax.mxisd.http.undertow.handler.RequestDumpingHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.SaneHandler; | import io.kamax.mxisd.http.undertow.handler.SaneHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.as.v1.AsNotFoundHandler; | import io.kamax.mxisd.http.undertow.handler.as.v1.AsNotFoundHandler; | ||||||
| import io.kamax.mxisd.http.undertow.handler.as.v1.AsTransactionHandler; | import io.kamax.mxisd.http.undertow.handler.as.v1.AsTransactionHandler; | ||||||
| @@ -99,9 +101,9 @@ public class HttpMxisd { | |||||||
|     public void start() { |     public void start() { | ||||||
|         m.start(); |         m.start(); | ||||||
|  |  | ||||||
|         HttpHandler asUserHandler = SaneHandler.around(new AsUserHandler(m.getAs())); |         HttpHandler asUserHandler = sane(new AsUserHandler(m.getAs())); | ||||||
|         HttpHandler asTxnHandler = SaneHandler.around(new AsTransactionHandler(m.getAs())); |         HttpHandler asTxnHandler = sane(new AsTransactionHandler(m.getAs())); | ||||||
|         HttpHandler asNotFoundHandler = SaneHandler.around(new AsNotFoundHandler(m.getAs())); |         HttpHandler asNotFoundHandler = sane(new AsNotFoundHandler(m.getAs())); | ||||||
|  |  | ||||||
|         final RoutingHandler handler = Handlers.routing() |         final RoutingHandler handler = Handlers.routing() | ||||||
|             .add("OPTIONS", "/**", sane(new OptionsHandler())) |             .add("OPTIONS", "/**", sane(new OptionsHandler())) | ||||||
| @@ -145,7 +147,8 @@ public class HttpMxisd { | |||||||
|         termsEndpoints(handler); |         termsEndpoints(handler); | ||||||
|         hashEndpoints(handler); |         hashEndpoints(handler); | ||||||
|         accountEndpoints(handler); |         accountEndpoints(handler); | ||||||
|         httpSrv = Undertow.builder().addHttpListener(m.getConfig().getServer().getPort(), "0.0.0.0").setHandler(handler).build(); |         ServerConfig serverConfig = m.getConfig().getServer(); | ||||||
|  |         httpSrv = Undertow.builder().addHttpListener(serverConfig.getPort(), serverConfig.getHostname()).setHandler(handler).build(); | ||||||
|  |  | ||||||
|         httpSrv.start(); |         httpSrv.start(); | ||||||
|     } |     } | ||||||
| @@ -265,6 +268,11 @@ public class HttpMxisd { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     private HttpHandler sane(HttpHandler httpHandler) { |     private HttpHandler sane(HttpHandler httpHandler) { | ||||||
|         return SaneHandler.around(httpHandler); |         SaneHandler handler = SaneHandler.around(httpHandler); | ||||||
|  |         if (m.getConfig().getLogging().isRequests()) { | ||||||
|  |             return new RequestDumpingHandler(handler); | ||||||
|  |         } else { | ||||||
|  |             return handler; | ||||||
|  |         } | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -27,6 +27,7 @@ import io.kamax.mxisd.auth.AuthProviders; | |||||||
| import io.kamax.mxisd.backend.IdentityStoreSupplier; | import io.kamax.mxisd.backend.IdentityStoreSupplier; | ||||||
| import io.kamax.mxisd.backend.sql.synapse.Synapse; | import io.kamax.mxisd.backend.sql.synapse.Synapse; | ||||||
| import io.kamax.mxisd.config.MxisdConfig; | import io.kamax.mxisd.config.MxisdConfig; | ||||||
|  | import io.kamax.mxisd.config.StorageConfig; | ||||||
| import io.kamax.mxisd.crypto.CryptoFactory; | import io.kamax.mxisd.crypto.CryptoFactory; | ||||||
| import io.kamax.mxisd.crypto.KeyManager; | import io.kamax.mxisd.crypto.KeyManager; | ||||||
| import io.kamax.mxisd.crypto.SignatureManager; | import io.kamax.mxisd.crypto.SignatureManager; | ||||||
| @@ -66,7 +67,7 @@ public class Mxisd { | |||||||
|     public static final String Version = StringUtils.defaultIfBlank(Mxisd.class.getPackage().getImplementationVersion(), "UNKNOWN"); |     public static final String Version = StringUtils.defaultIfBlank(Mxisd.class.getPackage().getImplementationVersion(), "UNKNOWN"); | ||||||
|     public static final String Agent = Name + "/" + Version; |     public static final String Agent = Name + "/" + Version; | ||||||
|  |  | ||||||
|     private MxisdConfig cfg; |     private final MxisdConfig cfg; | ||||||
|  |  | ||||||
|     private CloseableHttpClient httpClient; |     private CloseableHttpClient httpClient; | ||||||
|     private IRemoteIdentityServerFetcher srvFetcher; |     private IRemoteIdentityServerFetcher srvFetcher; | ||||||
| @@ -109,7 +110,10 @@ public class Mxisd { | |||||||
|         IdentityServerUtils.setHttpClient(httpClient); |         IdentityServerUtils.setHttpClient(httpClient); | ||||||
|         srvFetcher = new RemoteIdentityServerFetcher(httpClient); |         srvFetcher = new RemoteIdentityServerFetcher(httpClient); | ||||||
|  |  | ||||||
|         store = new OrmLiteSqlStorage(cfg); |         StorageConfig.BackendEnum storageBackend = cfg.getStorage().getBackend(); | ||||||
|  |         StorageConfig.Provider storageProvider = cfg.getStorage().getProvider(); | ||||||
|  |         store = new OrmLiteSqlStorage(storageBackend, storageProvider); | ||||||
|  |  | ||||||
|         keyMgr = CryptoFactory.getKeyManager(cfg.getKey()); |         keyMgr = CryptoFactory.getKeyManager(cfg.getKey()); | ||||||
|         signMgr = CryptoFactory.getSignatureManager(cfg, keyMgr); |         signMgr = CryptoFactory.getSignatureManager(cfg, keyMgr); | ||||||
|         clientDns = new ClientDnsOverwrite(cfg.getDns().getOverwrite()); |         clientDns = new ClientDnsOverwrite(cfg.getDns().getOverwrite()); | ||||||
|   | |||||||
| @@ -44,31 +44,46 @@ public class MxisdStandaloneExec { | |||||||
|         try { |         try { | ||||||
|             MxisdConfig cfg = null; |             MxisdConfig cfg = null; | ||||||
|             Iterator<String> argsIt = Arrays.asList(args).iterator(); |             Iterator<String> argsIt = Arrays.asList(args).iterator(); | ||||||
|  |             boolean dump = false; | ||||||
|  |             boolean exit = false; | ||||||
|             while (argsIt.hasNext()) { |             while (argsIt.hasNext()) { | ||||||
|                 String arg = argsIt.next(); |                 String arg = argsIt.next(); | ||||||
|                 if (StringUtils.equalsAny(arg, "-h", "--help", "-?", "--usage")) { |                 switch (arg) { | ||||||
|                     System.out.println("Available arguments:" + System.lineSeparator()); |                     case "-h": | ||||||
|                     System.out.println("  -h, --help       Show this help message"); |                     case "--help": | ||||||
|                     System.out.println("  --version        Print the version then exit"); |                     case "-?": | ||||||
|                     System.out.println("  -c, --config     Set the configuration file location"); |                     case "--usage": | ||||||
|                     System.out.println("  -v               Increase log level (log more info)"); |                         System.out.println("Available arguments:" + System.lineSeparator()); | ||||||
|                     System.out.println("  -vv              Further increase log level"); |                         System.out.println("  -h, --help       Show this help message"); | ||||||
|                     System.out.println(" "); |                         System.out.println("  --version        Print the version then exit"); | ||||||
|                     System.exit(0); |                         System.out.println("  -c, --config     Set the configuration file location"); | ||||||
|                 } else if (StringUtils.equals(arg, "-v")) { |                         System.out.println("  -v               Increase log level (log more info)"); | ||||||
|                     System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "debug"); |                         System.out.println("  -vv              Further increase log level"); | ||||||
|                 } else if (StringUtils.equals(arg, "-vv")) { |                         System.out.println("  --dump           Dump the full ma1sd configuration"); | ||||||
|                     System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "trace"); |                         System.out.println("  --dump-and-exit  Dump the full ma1sd configuration and exit"); | ||||||
|                 } else if (StringUtils.equalsAny(arg, "-c", "--config")) { |                         System.out.println(" "); | ||||||
|                     String cfgFile = argsIt.next(); |                         System.exit(0); | ||||||
|                     cfg = YamlConfigLoader.loadFromFile(cfgFile); |                         return; | ||||||
|                 } else if (StringUtils.equals("--version", arg)) { |                     case "-v": | ||||||
|                     System.out.println(Mxisd.Version); |                         System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "debug"); | ||||||
|                     System.exit(0); |                         break; | ||||||
|                 } else { |                     case "-vv": | ||||||
|                     System.err.println("Invalid argument: " + arg); |                         System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", "trace"); | ||||||
|                     System.err.println("Try '--help' for available arguments"); |                         break; | ||||||
|                     System.exit(1); |                     case "-c": | ||||||
|  |                     case "--config": | ||||||
|  |                         String cfgFile = argsIt.next(); | ||||||
|  |                         cfg = YamlConfigLoader.loadFromFile(cfgFile); | ||||||
|  |                         break; | ||||||
|  |                     case "--dump-and-exit": | ||||||
|  |                         exit = true; | ||||||
|  |                     case "--dump": | ||||||
|  |                         dump = true; | ||||||
|  |                         break; | ||||||
|  |                     default: | ||||||
|  |                         System.err.println("Invalid argument: " + arg); | ||||||
|  |                         System.err.println("Try '--help' for available arguments"); | ||||||
|  |                         System.exit(1); | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|  |  | ||||||
| @@ -76,6 +91,13 @@ public class MxisdStandaloneExec { | |||||||
|                 cfg = YamlConfigLoader.tryLoadFromFile("ma1sd.yaml").orElseGet(MxisdConfig::new); |                 cfg = YamlConfigLoader.tryLoadFromFile("ma1sd.yaml").orElseGet(MxisdConfig::new); | ||||||
|             } |             } | ||||||
|  |  | ||||||
|  |             if (dump) { | ||||||
|  |                 YamlConfigLoader.dumpConfig(cfg); | ||||||
|  |                 if (exit) { | ||||||
|  |                     System.exit(0); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |  | ||||||
|             log.info("ma1sd starting"); |             log.info("ma1sd starting"); | ||||||
|             log.info("Version: {}", Mxisd.Version); |             log.info("Version: {}", Mxisd.Version); | ||||||
|  |  | ||||||
|   | |||||||
| @@ -144,7 +144,13 @@ public class MembershipEventProcessor implements EventTypeProcessor { | |||||||
|                 .collect(Collectors.toList()); |                 .collect(Collectors.toList()); | ||||||
|         log.info("Found {} email(s) in identity store for {}", tpids.size(), inviteeId); |         log.info("Found {} email(s) in identity store for {}", tpids.size(), inviteeId); | ||||||
|  |  | ||||||
|         for (_ThreePid tpid : tpids) { |         log.info("Removing duplicates from identity store"); | ||||||
|  |         List<_ThreePid> uniqueTpids = tpids.stream() | ||||||
|  |                 .distinct() | ||||||
|  |                 .collect(Collectors.toList()); | ||||||
|  |         log.info("There are {} unique email(s) in identity store for {}", uniqueTpids.size(), inviteeId);         | ||||||
|  |  | ||||||
|  |         for (_ThreePid tpid : uniqueTpids) { | ||||||
|             log.info("Found Email to notify about room invitation: {}", tpid.getAddress()); |             log.info("Found Email to notify about room invitation: {}", tpid.getAddress()); | ||||||
|             Map<String, String> properties = new HashMap<>(); |             Map<String, String> properties = new HashMap<>(); | ||||||
|             profiler.getDisplayName(sender).ifPresent(name -> properties.put("sender_display_name", name)); |             profiler.getDisplayName(sender).ifPresent(name -> properties.put("sender_display_name", name)); | ||||||
|   | |||||||
| @@ -162,6 +162,7 @@ public class LdapAuthProvider extends LdapBackend implements AuthenticatorProvid | |||||||
|             log.info("No match were found for {}", mxid); |             log.info("No match were found for {}", mxid); | ||||||
|             return BackendAuthResult.failure(); |             return BackendAuthResult.failure(); | ||||||
|         } catch (LdapException | IOException | CursorException e) { |         } catch (LdapException | IOException | CursorException e) { | ||||||
|  |             log.error("Unable to invoke query request: ", e); | ||||||
|             throw new InternalServerError(e); |             throw new InternalServerError(e); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -29,23 +29,27 @@ import java.util.Optional; | |||||||
|  |  | ||||||
| public class Synapse { | public class Synapse { | ||||||
|  |  | ||||||
|     private SqlConnectionPool pool; |     private final SqlConnectionPool pool; | ||||||
|  |     private final SynapseSqlProviderConfig providerConfig; | ||||||
|  |  | ||||||
|     public Synapse(SynapseSqlProviderConfig sqlCfg) { |     public Synapse(SynapseSqlProviderConfig sqlCfg) { | ||||||
|         this.pool = new SqlConnectionPool(sqlCfg); |         this.pool = new SqlConnectionPool(sqlCfg); | ||||||
|  |         providerConfig = sqlCfg; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public Optional<String> getRoomName(String id) { |     public Optional<String> getRoomName(String id) { | ||||||
|         return pool.withConnFunction(conn -> { |         String query = providerConfig.isLegacyRoomNames() ? SynapseQueries.getLegacyRoomName() : SynapseQueries.getRoomName(); | ||||||
|             PreparedStatement stmt = conn.prepareStatement(SynapseQueries.getRoomName()); |  | ||||||
|             stmt.setString(1, id); |  | ||||||
|             ResultSet rSet = stmt.executeQuery(); |  | ||||||
|             if (!rSet.next()) { |  | ||||||
|                 return Optional.empty(); |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|             return Optional.ofNullable(rSet.getString(1)); |         return pool.withConnFunction(conn -> { | ||||||
|  |             try (PreparedStatement stmt = conn.prepareStatement(query)) { | ||||||
|  |                 stmt.setString(1, id); | ||||||
|  |                 ResultSet rSet = stmt.executeQuery(); | ||||||
|  |                 if (!rSet.next()) { | ||||||
|  |                     return Optional.empty(); | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 return Optional.ofNullable(rSet.getString(1)); | ||||||
|  |             } | ||||||
|         }); |         }); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -51,7 +51,7 @@ public class SynapseQueries { | |||||||
|         if (StringUtils.equals("sqlite", type)) { |         if (StringUtils.equals("sqlite", type)) { | ||||||
|             return "select " + getUserId(type, domain) + ", displayname from profiles p where displayname like ?"; |             return "select " + getUserId(type, domain) + ", displayname from profiles p where displayname like ?"; | ||||||
|         } else if (StringUtils.equals("postgresql", type)) { |         } else if (StringUtils.equals("postgresql", type)) { | ||||||
|             return "select " + getUserId(type, domain) + ", displayname from profiles p where displayname ilike ?"; |             return "SELECT u.name,p.displayname FROM users u JOIN profiles p ON u.name LIKE concat('@',p.user_id,':%') WHERE u.is_guest = 0 AND u.appservice_id IS NULL AND p.displayname LIKE ?"; | ||||||
|         } else { |         } else { | ||||||
|             throw new ConfigurationException("Invalid Synapse SQL type: " + type); |             throw new ConfigurationException("Invalid Synapse SQL type: " + type); | ||||||
|         } |         } | ||||||
| @@ -72,7 +72,10 @@ public class SynapseQueries { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     public static String getRoomName() { |     public static String getRoomName() { | ||||||
|         return "select r.name from room_names r, events e, (select r1.room_id,max(e1.origin_server_ts) ts from room_names r1, events e1 where r1.event_id = e1.event_id group by r1.room_id) rle where e.origin_server_ts = rle.ts and r.event_id = e.event_id and r.room_id = ?"; |         return "select name from room_stats_state where room_id = ? limit 1"; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public static String getLegacyRoomName() { | ||||||
|  |         return "select r.name from room_names r, events e, (select r1.room_id,max(e1.origin_server_ts) ts from room_names r1, events e1 where r1.event_id = e1.event_id group by r1.room_id) rle where e.origin_server_ts = rle.ts and r.event_id = e.event_id and r.room_id = ?"; | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,5 @@ | |||||||
|  | package io.kamax.mxisd.config; | ||||||
|  |  | ||||||
|  | public interface DatabaseStorageConfig { | ||||||
|  |     String getDatabase(); | ||||||
|  | } | ||||||
| @@ -19,7 +19,7 @@ public class HashingConfig { | |||||||
|     private int requests = 10; |     private int requests = 10; | ||||||
|     private List<Algorithm> algorithms = new ArrayList<>(); |     private List<Algorithm> algorithms = new ArrayList<>(); | ||||||
|  |  | ||||||
|     public void build() { |     public void build(MatrixConfig matrixConfig) { | ||||||
|         if (isEnabled()) { |         if (isEnabled()) { | ||||||
|             LOGGER.info("--- Hash configuration ---"); |             LOGGER.info("--- Hash configuration ---"); | ||||||
|             LOGGER.info("   Pepper length: {}", getPepperLength()); |             LOGGER.info("   Pepper length: {}", getPepperLength()); | ||||||
| @@ -35,6 +35,9 @@ public class HashingConfig { | |||||||
|             } |             } | ||||||
|             LOGGER.info("   Algorithms: {}", getAlgorithms()); |             LOGGER.info("   Algorithms: {}", getAlgorithms()); | ||||||
|         } else { |         } else { | ||||||
|  |             if (matrixConfig.isV2()) { | ||||||
|  |                 LOGGER.warn("V2 enabled without the hash configuration."); | ||||||
|  |             } | ||||||
|             LOGGER.info("Hash configuration disabled, used only `none` pepper."); |             LOGGER.info("Hash configuration disabled, used only `none` pepper."); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|   | |||||||
							
								
								
									
										60
									
								
								src/main/java/io/kamax/mxisd/config/LoggingConfig.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								src/main/java/io/kamax/mxisd/config/LoggingConfig.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | |||||||
|  | package io.kamax.mxisd.config; | ||||||
|  |  | ||||||
|  | import org.apache.commons.lang3.StringUtils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | public class LoggingConfig { | ||||||
|  |  | ||||||
|  |     private static final Logger LOGGER = LoggerFactory.getLogger("App"); | ||||||
|  |  | ||||||
|  |     private String root; | ||||||
|  |     private String app; | ||||||
|  |     private boolean requests = false; | ||||||
|  |  | ||||||
|  |     public String getRoot() { | ||||||
|  |         return root; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setRoot(String root) { | ||||||
|  |         this.root = root; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public String getApp() { | ||||||
|  |         return app; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setApp(String app) { | ||||||
|  |         this.app = app; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public boolean isRequests() { | ||||||
|  |         return requests; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setRequests(boolean requests) { | ||||||
|  |         this.requests = requests; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void build() { | ||||||
|  |         LOGGER.info("Logging config:"); | ||||||
|  |         if (StringUtils.isNotBlank(getRoot())) { | ||||||
|  |             LOGGER.info("  Default log level: {}", getRoot()); | ||||||
|  |             System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", getRoot()); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         String appLevel = System.getProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd"); | ||||||
|  |         if (StringUtils.isNotBlank(appLevel)) { | ||||||
|  |             LOGGER.info("  Logging level set by environment: {}", appLevel); | ||||||
|  |         } else if (StringUtils.isNotBlank(getApp())) { | ||||||
|  |             System.setProperty("org.slf4j.simpleLogger.log.io.kamax.mxisd", getApp()); | ||||||
|  |             LOGGER.info("  Logging level set by the configuration: {}", getApp()); | ||||||
|  |         } else { | ||||||
|  |             LOGGER.info("  Logging level hasn't set, use default"); | ||||||
|  |         } | ||||||
|  |         LOGGER.info("  Log requests: {}", isRequests()); | ||||||
|  |         if (isRequests()) { | ||||||
|  |             LOGGER.warn("  Request dumping enabled, use this only to debug purposes, don't use it in the production."); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -117,6 +117,7 @@ public class MxisdConfig { | |||||||
|     private WordpressConfig wordpress = new WordpressConfig(); |     private WordpressConfig wordpress = new WordpressConfig(); | ||||||
|     private PolicyConfig policy = new PolicyConfig(); |     private PolicyConfig policy = new PolicyConfig(); | ||||||
|     private HashingConfig hashing = new HashingConfig(); |     private HashingConfig hashing = new HashingConfig(); | ||||||
|  |     private LoggingConfig logging = new LoggingConfig(); | ||||||
|  |  | ||||||
|     public AppServiceConfig getAppsvc() { |     public AppServiceConfig getAppsvc() { | ||||||
|         return appsvc; |         return appsvc; | ||||||
| @@ -342,6 +343,14 @@ public class MxisdConfig { | |||||||
|         this.hashing = hashing; |         this.hashing = hashing; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public LoggingConfig getLogging() { | ||||||
|  |         return logging; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setLogging(LoggingConfig logging) { | ||||||
|  |         this.logging = logging; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     public MxisdConfig inMemory() { |     public MxisdConfig inMemory() { | ||||||
|         getKey().setPath(":memory:"); |         getKey().setPath(":memory:"); | ||||||
|         getStorage().getProvider().getSqlite().setDatabase(":memory:"); |         getStorage().getProvider().getSqlite().setDatabase(":memory:"); | ||||||
| @@ -350,6 +359,8 @@ public class MxisdConfig { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     public MxisdConfig build() { |     public MxisdConfig build() { | ||||||
|  |         getLogging().build(); | ||||||
|  |  | ||||||
|         if (StringUtils.isBlank(getServer().getName())) { |         if (StringUtils.isBlank(getServer().getName())) { | ||||||
|             getServer().setName(getMatrix().getDomain()); |             getServer().setName(getMatrix().getDomain()); | ||||||
|             log.debug("server.name is empty, using matrix.domain"); |             log.debug("server.name is empty, using matrix.domain"); | ||||||
| @@ -359,6 +370,7 @@ public class MxisdConfig { | |||||||
|         getAuth().build(); |         getAuth().build(); | ||||||
|         getAccountConfig().build(); |         getAccountConfig().build(); | ||||||
|         getDirectory().build(); |         getDirectory().build(); | ||||||
|  |         getDns().build(); | ||||||
|         getExec().build(); |         getExec().build(); | ||||||
|         getFirebase().build(); |         getFirebase().build(); | ||||||
|         getForward().build(); |         getForward().build(); | ||||||
| @@ -381,7 +393,7 @@ public class MxisdConfig { | |||||||
|         getView().build(); |         getView().build(); | ||||||
|         getWordpress().build(); |         getWordpress().build(); | ||||||
|         getPolicy().build(); |         getPolicy().build(); | ||||||
|         getHashing().build(); |         getHashing().build(getMatrix()); | ||||||
|  |  | ||||||
|         return this; |         return this; | ||||||
|     } |     } | ||||||
|   | |||||||
							
								
								
									
										105
									
								
								src/main/java/io/kamax/mxisd/config/PostgresqlStorageConfig.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										105
									
								
								src/main/java/io/kamax/mxisd/config/PostgresqlStorageConfig.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,105 @@ | |||||||
|  | /* | ||||||
|  |  * mxisd - Matrix Identity Server Daemon | ||||||
|  |  * Copyright (C) 2017 Kamax Sarl | ||||||
|  |  * | ||||||
|  |  * https://www.kamax.io/ | ||||||
|  |  * | ||||||
|  |  * This program is free software: you can redistribute it and/or modify | ||||||
|  |  * it under the terms of the GNU Affero General Public License as | ||||||
|  |  * published by the Free Software Foundation, either version 3 of the | ||||||
|  |  * License, or (at your option) any later version. | ||||||
|  |  * | ||||||
|  |  * This program is distributed in the hope that it will be useful, | ||||||
|  |  * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  |  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  |  * GNU Affero General Public License for more details. | ||||||
|  |  * | ||||||
|  |  * You should have received a copy of the GNU Affero General Public License | ||||||
|  |  * along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | package io.kamax.mxisd.config; | ||||||
|  |  | ||||||
|  | public class PostgresqlStorageConfig implements DatabaseStorageConfig { | ||||||
|  |  | ||||||
|  |     private String database; | ||||||
|  |  | ||||||
|  |     private String username; | ||||||
|  |  | ||||||
|  |     private String password; | ||||||
|  |  | ||||||
|  |     private boolean pool; | ||||||
|  |  | ||||||
|  |     private int maxConnectionsFree = 1; | ||||||
|  |  | ||||||
|  |     private long maxConnectionAgeMillis = 60 * 60 * 1000; | ||||||
|  |  | ||||||
|  |     private long checkConnectionsEveryMillis = 30 * 1000; | ||||||
|  |  | ||||||
|  |     private boolean testBeforeGetFromPool = false; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public String getDatabase() { | ||||||
|  |         return database; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setDatabase(String database) { | ||||||
|  |         this.database = database; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public String getUsername() { | ||||||
|  |         return username; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setUsername(String username) { | ||||||
|  |         this.username = username; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public String getPassword() { | ||||||
|  |         return password; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setPassword(String password) { | ||||||
|  |         this.password = password; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public boolean isPool() { | ||||||
|  |         return pool; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setPool(boolean pool) { | ||||||
|  |         this.pool = pool; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public int getMaxConnectionsFree() { | ||||||
|  |         return maxConnectionsFree; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setMaxConnectionsFree(int maxConnectionsFree) { | ||||||
|  |         this.maxConnectionsFree = maxConnectionsFree; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public long getMaxConnectionAgeMillis() { | ||||||
|  |         return maxConnectionAgeMillis; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setMaxConnectionAgeMillis(long maxConnectionAgeMillis) { | ||||||
|  |         this.maxConnectionAgeMillis = maxConnectionAgeMillis; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public long getCheckConnectionsEveryMillis() { | ||||||
|  |         return checkConnectionsEveryMillis; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setCheckConnectionsEveryMillis(long checkConnectionsEveryMillis) { | ||||||
|  |         this.checkConnectionsEveryMillis = checkConnectionsEveryMillis; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public boolean isTestBeforeGetFromPool() { | ||||||
|  |         return testBeforeGetFromPool; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setTestBeforeGetFromPool(boolean testBeforeGetFromPool) { | ||||||
|  |         this.testBeforeGetFromPool = testBeforeGetFromPool; | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -20,10 +20,11 @@ | |||||||
|  |  | ||||||
| package io.kamax.mxisd.config; | package io.kamax.mxisd.config; | ||||||
|  |  | ||||||
| public class SQLiteStorageConfig { | public class SQLiteStorageConfig implements DatabaseStorageConfig { | ||||||
|  |  | ||||||
|     private String database; |     private String database; | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|     public String getDatabase() { |     public String getDatabase() { | ||||||
|         return database; |         return database; | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -34,6 +34,7 @@ public class ServerConfig { | |||||||
|     private String name; |     private String name; | ||||||
|     private int port = 8090; |     private int port = 8090; | ||||||
|     private String publicUrl; |     private String publicUrl; | ||||||
|  |     private String hostname; | ||||||
|  |  | ||||||
|     public String getName() { |     public String getName() { | ||||||
|         return name; |         return name; | ||||||
| @@ -59,6 +60,14 @@ public class ServerConfig { | |||||||
|         this.publicUrl = publicUrl; |         this.publicUrl = publicUrl; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public String getHostname() { | ||||||
|  |         return hostname; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setHostname(String hostname) { | ||||||
|  |         this.hostname = hostname; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     public void build() { |     public void build() { | ||||||
|         log.info("--- Server config ---"); |         log.info("--- Server config ---"); | ||||||
|  |  | ||||||
| @@ -75,8 +84,13 @@ public class ServerConfig { | |||||||
|             log.warn("Public URL is not valid: {}", StringUtils.defaultIfBlank(e.getMessage(), "<no reason provided>")); |             log.warn("Public URL is not valid: {}", StringUtils.defaultIfBlank(e.getMessage(), "<no reason provided>")); | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         if (StringUtils.isBlank(getHostname())) { | ||||||
|  |             setHostname("0.0.0.0"); | ||||||
|  |         } | ||||||
|  |  | ||||||
|         log.info("Name: {}", getName()); |         log.info("Name: {}", getName()); | ||||||
|         log.info("Port: {}", getPort()); |         log.info("Port: {}", getPort()); | ||||||
|         log.info("Public URL: {}", getPublicUrl()); |         log.info("Public URL: {}", getPublicUrl()); | ||||||
|  |         log.info("Hostname: {}", getHostname()); | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -21,14 +21,21 @@ | |||||||
| package io.kamax.mxisd.config; | package io.kamax.mxisd.config; | ||||||
|  |  | ||||||
| import io.kamax.mxisd.exception.ConfigurationException; | import io.kamax.mxisd.exception.ConfigurationException; | ||||||
| import org.apache.commons.lang.StringUtils; |  | ||||||
|  |  | ||||||
| public class StorageConfig { | public class StorageConfig { | ||||||
|  |  | ||||||
|  |     public enum BackendEnum { | ||||||
|  |         sqlite, | ||||||
|  |  | ||||||
|  |         postgresql | ||||||
|  |     } | ||||||
|  |  | ||||||
|     public static class Provider { |     public static class Provider { | ||||||
|  |  | ||||||
|         private SQLiteStorageConfig sqlite = new SQLiteStorageConfig(); |         private SQLiteStorageConfig sqlite = new SQLiteStorageConfig(); | ||||||
|  |  | ||||||
|  |         private PostgresqlStorageConfig postgresql = new PostgresqlStorageConfig(); | ||||||
|  |  | ||||||
|         public SQLiteStorageConfig getSqlite() { |         public SQLiteStorageConfig getSqlite() { | ||||||
|             return sqlite; |             return sqlite; | ||||||
|         } |         } | ||||||
| @@ -37,16 +44,23 @@ public class StorageConfig { | |||||||
|             this.sqlite = sqlite; |             this.sqlite = sqlite; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         public PostgresqlStorageConfig getPostgresql() { | ||||||
|  |             return postgresql; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public void setPostgresql(PostgresqlStorageConfig postgresql) { | ||||||
|  |             this.postgresql = postgresql; | ||||||
|  |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     private String backend = "sqlite"; |     private BackendEnum backend = BackendEnum.sqlite; // or postgresql | ||||||
|     private Provider provider = new Provider(); |     private Provider provider = new Provider(); | ||||||
|  |  | ||||||
|     public String getBackend() { |     public BackendEnum getBackend() { | ||||||
|         return backend; |         return backend; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public void setBackend(String backend) { |     public void setBackend(BackendEnum backend) { | ||||||
|         this.backend = backend; |         this.backend = backend; | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -59,7 +73,7 @@ public class StorageConfig { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     public void build() { |     public void build() { | ||||||
|         if (StringUtils.isBlank(getBackend())) { |         if (getBackend() == null) { | ||||||
|             throw new ConfigurationException("storage.backend"); |             throw new ConfigurationException("storage.backend"); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -76,4 +76,14 @@ public class YamlConfigLoader { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public static void dumpConfig(MxisdConfig cfg) { | ||||||
|  |         Representer rep = new Representer(); | ||||||
|  |         rep.getPropertyUtils().setBeanAccess(BeanAccess.FIELD); | ||||||
|  |         rep.getPropertyUtils().setAllowReadOnlyProperties(true); | ||||||
|  |         rep.getPropertyUtils().setSkipMissingProperties(true); | ||||||
|  |  | ||||||
|  |         Yaml yaml = new Yaml(new Constructor(MxisdConfig.class), rep); | ||||||
|  |         String dump = yaml.dump(cfg); | ||||||
|  |         log.info("Full configuration:\n{}", dump); | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -45,4 +45,21 @@ public class MemoryThreePid implements _ThreePid { | |||||||
|         this.address = address; |         this.address = address; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public boolean equals(Object o) { | ||||||
|  |         if (this == o) return true; | ||||||
|  |         if (o == null || getClass() != o.getClass()) return false; | ||||||
|  |  | ||||||
|  |         MemoryThreePid threePid = (MemoryThreePid) o; | ||||||
|  |  | ||||||
|  |         if (!medium.equals(threePid.medium)) return false; | ||||||
|  |         return address.equals(threePid.address); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public int hashCode() { | ||||||
|  |         int result = medium.hashCode(); | ||||||
|  |         result = 31 * result + address.hashCode(); | ||||||
|  |         return result; | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -125,7 +125,7 @@ public abstract class SqlConfig { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     public static class Lookup { |     public static class Lookup { | ||||||
|         private String query = "SELECT user_id AS mxid, medium, address from user_threepids"; |         private String query = "SELECT user_id AS mxid, medium, address from user_threepid_id_server"; | ||||||
|  |  | ||||||
|         public String getQuery() { |         public String getQuery() { | ||||||
|             return query; |             return query; | ||||||
| @@ -140,7 +140,7 @@ public abstract class SqlConfig { | |||||||
|  |  | ||||||
|         private Boolean enabled; |         private Boolean enabled; | ||||||
|         private String type = "mxid"; |         private String type = "mxid"; | ||||||
|         private String query = "SELECT user_id AS uid FROM user_threepids WHERE medium = ? AND address = ?"; |         private String query = "SELECT user_id AS uid FROM user_threepid_id_server WHERE medium = ? AND address = ?"; | ||||||
|         private Map<String, String> medium = new HashMap<>(); |         private Map<String, String> medium = new HashMap<>(); | ||||||
|  |  | ||||||
|         public Boolean isEnabled() { |         public Boolean isEnabled() { | ||||||
|   | |||||||
| @@ -24,9 +24,23 @@ import io.kamax.mxisd.UserIdType; | |||||||
| import io.kamax.mxisd.backend.sql.synapse.SynapseQueries; | import io.kamax.mxisd.backend.sql.synapse.SynapseQueries; | ||||||
| import io.kamax.mxisd.config.sql.SqlConfig; | import io.kamax.mxisd.config.sql.SqlConfig; | ||||||
| import org.apache.commons.lang.StringUtils; | import org.apache.commons.lang.StringUtils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
| public class SynapseSqlProviderConfig extends SqlConfig { | public class SynapseSqlProviderConfig extends SqlConfig { | ||||||
|  |  | ||||||
|  |     private transient final Logger log = LoggerFactory.getLogger(SynapseSqlProviderConfig.class); | ||||||
|  |  | ||||||
|  |     private boolean legacyRoomNames = false; | ||||||
|  |  | ||||||
|  |     public boolean isLegacyRoomNames() { | ||||||
|  |         return legacyRoomNames; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setLegacyRoomNames(boolean legacyRoomNames) { | ||||||
|  |         this.legacyRoomNames = legacyRoomNames; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     protected String getProviderName() { |     protected String getProviderName() { | ||||||
|         return "Synapse SQL"; |         return "Synapse SQL"; | ||||||
| @@ -42,7 +56,7 @@ public class SynapseSqlProviderConfig extends SqlConfig { | |||||||
|  |  | ||||||
|         if (getIdentity().isEnabled() && StringUtils.isBlank(getIdentity().getType())) { |         if (getIdentity().isEnabled() && StringUtils.isBlank(getIdentity().getType())) { | ||||||
|             getIdentity().setType("mxid"); |             getIdentity().setType("mxid"); | ||||||
|             getIdentity().setQuery("SELECT user_id AS uid FROM user_threepids WHERE medium = ? AND address = ?"); |             getIdentity().setQuery("SELECT user_id AS uid FROM user_threepid_id_server WHERE medium = ? AND address = ?"); | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         if (getProfile().isEnabled()) { |         if (getProfile().isEnabled()) { | ||||||
| @@ -65,4 +79,12 @@ public class SynapseSqlProviderConfig extends SqlConfig { | |||||||
|         printConfig(); |         printConfig(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     protected void printConfig() { | ||||||
|  |         super.printConfig(); | ||||||
|  |  | ||||||
|  |         if (isEnabled()) { | ||||||
|  |             log.info("Use legacy room name query: {}", isLegacyRoomNames()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,6 +1,9 @@ | |||||||
| package io.kamax.mxisd.hash; | package io.kamax.mxisd.hash; | ||||||
|  |  | ||||||
| import io.kamax.mxisd.config.HashingConfig; | import io.kamax.mxisd.config.HashingConfig; | ||||||
|  | import io.kamax.mxisd.hash.engine.Engine; | ||||||
|  | import io.kamax.mxisd.hash.engine.HashEngine; | ||||||
|  | import io.kamax.mxisd.hash.engine.NoneEngine; | ||||||
| import io.kamax.mxisd.hash.rotation.HashRotationStrategy; | import io.kamax.mxisd.hash.rotation.HashRotationStrategy; | ||||||
| import io.kamax.mxisd.hash.rotation.NoOpRotationStrategy; | import io.kamax.mxisd.hash.rotation.NoOpRotationStrategy; | ||||||
| import io.kamax.mxisd.hash.rotation.RotationPerRequests; | import io.kamax.mxisd.hash.rotation.RotationPerRequests; | ||||||
| @@ -21,7 +24,7 @@ public class HashManager { | |||||||
|  |  | ||||||
|     private static final Logger LOGGER = LoggerFactory.getLogger(HashManager.class); |     private static final Logger LOGGER = LoggerFactory.getLogger(HashManager.class); | ||||||
|  |  | ||||||
|     private HashEngine hashEngine; |     private Engine engine; | ||||||
|     private HashRotationStrategy rotationStrategy; |     private HashRotationStrategy rotationStrategy; | ||||||
|     private HashStorage hashStorage; |     private HashStorage hashStorage; | ||||||
|     private HashingConfig config; |     private HashingConfig config; | ||||||
| @@ -32,7 +35,7 @@ public class HashManager { | |||||||
|         this.config = config; |         this.config = config; | ||||||
|         this.storage = storage; |         this.storage = storage; | ||||||
|         initStorage(); |         initStorage(); | ||||||
|         hashEngine = new HashEngine(providers, getHashStorage(), config); |         engine = config.isEnabled() ? new HashEngine(providers, getHashStorage(), config) : new NoneEngine(); | ||||||
|         initRotationStrategy(); |         initRotationStrategy(); | ||||||
|         configured.set(true); |         configured.set(true); | ||||||
|     } |     } | ||||||
| @@ -73,8 +76,8 @@ public class HashManager { | |||||||
|         this.rotationStrategy.register(getHashEngine()); |         this.rotationStrategy.register(getHashEngine()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public HashEngine getHashEngine() { |     public Engine getHashEngine() { | ||||||
|         return hashEngine; |         return engine; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public HashRotationStrategy getRotationStrategy() { |     public HashRotationStrategy getRotationStrategy() { | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								src/main/java/io/kamax/mxisd/hash/engine/Engine.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								src/main/java/io/kamax/mxisd/hash/engine/Engine.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | package io.kamax.mxisd.hash.engine; | ||||||
|  |  | ||||||
|  | public interface Engine { | ||||||
|  |     void updateHashes(); | ||||||
|  |  | ||||||
|  |     String getPepper(); | ||||||
|  | } | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| package io.kamax.mxisd.hash; | package io.kamax.mxisd.hash.engine; | ||||||
| 
 | 
 | ||||||
| import io.kamax.mxisd.config.HashingConfig; | import io.kamax.mxisd.config.HashingConfig; | ||||||
| import io.kamax.mxisd.hash.storage.HashStorage; | import io.kamax.mxisd.hash.storage.HashStorage; | ||||||
| @@ -12,7 +12,7 @@ import org.slf4j.LoggerFactory; | |||||||
| import java.util.Base64; | import java.util.Base64; | ||||||
| import java.util.List; | import java.util.List; | ||||||
| 
 | 
 | ||||||
| public class HashEngine { | public class HashEngine implements Engine { | ||||||
| 
 | 
 | ||||||
|     private static final Logger LOGGER = LoggerFactory.getLogger(HashEngine.class); |     private static final Logger LOGGER = LoggerFactory.getLogger(HashEngine.class); | ||||||
| 
 | 
 | ||||||
| @@ -28,6 +28,7 @@ public class HashEngine { | |||||||
|         this.config = config; |         this.config = config; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     @Override | ||||||
|     public void updateHashes() { |     public void updateHashes() { | ||||||
|         LOGGER.info("Start update hashes."); |         LOGGER.info("Start update hashes."); | ||||||
|         synchronized (hashStorage) { |         synchronized (hashStorage) { | ||||||
| @@ -48,6 +49,7 @@ public class HashEngine { | |||||||
|         LOGGER.info("Finish update hashes."); |         LOGGER.info("Finish update hashes."); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     @Override | ||||||
|     public String getPepper() { |     public String getPepper() { | ||||||
|         synchronized (hashStorage) { |         synchronized (hashStorage) { | ||||||
|             return pepper; |             return pepper; | ||||||
							
								
								
									
										19
									
								
								src/main/java/io/kamax/mxisd/hash/engine/NoneEngine.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								src/main/java/io/kamax/mxisd/hash/engine/NoneEngine.java
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | package io.kamax.mxisd.hash.engine; | ||||||
|  |  | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  |  | ||||||
|  | public class NoneEngine implements Engine { | ||||||
|  |  | ||||||
|  |     private static final Logger LOGGER = LoggerFactory.getLogger(NoneEngine.class); | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void updateHashes() { | ||||||
|  |         LOGGER.info("Nothing to update."); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public String getPepper() { | ||||||
|  |         return ""; | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,12 +1,12 @@ | |||||||
| package io.kamax.mxisd.hash.rotation; | package io.kamax.mxisd.hash.rotation; | ||||||
|  |  | ||||||
| import io.kamax.mxisd.hash.HashEngine; | import io.kamax.mxisd.hash.engine.Engine; | ||||||
|  |  | ||||||
| public interface HashRotationStrategy { | public interface HashRotationStrategy { | ||||||
|  |  | ||||||
|     void register(HashEngine hashEngine); |     void register(Engine engine); | ||||||
|  |  | ||||||
|     HashEngine getHashEngine(); |     Engine getHashEngine(); | ||||||
|  |  | ||||||
|     void newRequest(); |     void newRequest(); | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,19 +1,19 @@ | |||||||
| package io.kamax.mxisd.hash.rotation; | package io.kamax.mxisd.hash.rotation; | ||||||
|  |  | ||||||
| import io.kamax.mxisd.hash.HashEngine; | import io.kamax.mxisd.hash.engine.Engine; | ||||||
|  |  | ||||||
| public class NoOpRotationStrategy implements HashRotationStrategy { | public class NoOpRotationStrategy implements HashRotationStrategy { | ||||||
|  |  | ||||||
|     private HashEngine hashEngine; |     private Engine engine; | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     public void register(HashEngine hashEngine) { |     public void register(Engine engine) { | ||||||
|         this.hashEngine = hashEngine; |         this.engine = engine; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     public HashEngine getHashEngine() { |     public Engine getHashEngine() { | ||||||
|         return hashEngine; |         return engine; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|   | |||||||
| @@ -1,12 +1,12 @@ | |||||||
| package io.kamax.mxisd.hash.rotation; | package io.kamax.mxisd.hash.rotation; | ||||||
|  |  | ||||||
| import io.kamax.mxisd.hash.HashEngine; | import io.kamax.mxisd.hash.engine.Engine; | ||||||
|  |  | ||||||
| import java.util.concurrent.atomic.AtomicInteger; | import java.util.concurrent.atomic.AtomicInteger; | ||||||
|  |  | ||||||
| public class RotationPerRequests implements HashRotationStrategy { | public class RotationPerRequests implements HashRotationStrategy { | ||||||
|  |  | ||||||
|     private HashEngine hashEngine; |     private Engine engine; | ||||||
|     private final AtomicInteger counter = new AtomicInteger(0); |     private final AtomicInteger counter = new AtomicInteger(0); | ||||||
|     private final int barrier; |     private final int barrier; | ||||||
|  |  | ||||||
| @@ -15,14 +15,14 @@ public class RotationPerRequests implements HashRotationStrategy { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     public void register(HashEngine hashEngine) { |     public void register(Engine engine) { | ||||||
|         this.hashEngine = hashEngine; |         this.engine = engine; | ||||||
|         trigger(); |         trigger(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     public HashEngine getHashEngine() { |     public Engine getHashEngine() { | ||||||
|         return hashEngine; |         return engine; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| package io.kamax.mxisd.hash.rotation; | package io.kamax.mxisd.hash.rotation; | ||||||
|  |  | ||||||
| import io.kamax.mxisd.hash.HashEngine; | import io.kamax.mxisd.hash.engine.Engine; | ||||||
|  |  | ||||||
| import java.util.concurrent.Executors; | import java.util.concurrent.Executors; | ||||||
| import java.util.concurrent.ScheduledExecutorService; | import java.util.concurrent.ScheduledExecutorService; | ||||||
| @@ -9,7 +9,7 @@ import java.util.concurrent.TimeUnit; | |||||||
| public class TimeBasedRotation implements HashRotationStrategy { | public class TimeBasedRotation implements HashRotationStrategy { | ||||||
|  |  | ||||||
|     private final long delay; |     private final long delay; | ||||||
|     private HashEngine hashEngine; |     private Engine engine; | ||||||
|     private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); |     private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); | ||||||
|  |  | ||||||
|     public TimeBasedRotation(long delay) { |     public TimeBasedRotation(long delay) { | ||||||
| @@ -17,15 +17,15 @@ public class TimeBasedRotation implements HashRotationStrategy { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     public void register(HashEngine hashEngine) { |     public void register(Engine engine) { | ||||||
|         this.hashEngine = hashEngine; |         this.engine = engine; | ||||||
|         Runtime.getRuntime().addShutdownHook(new Thread(executorService::shutdown)); |         Runtime.getRuntime().addShutdownHook(new Thread(executorService::shutdown)); | ||||||
|         executorService.scheduleWithFixedDelay(this::trigger, 0, delay, TimeUnit.SECONDS); |         executorService.scheduleWithFixedDelay(this::trigger, 0, delay, TimeUnit.SECONDS); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|     public HashEngine getHashEngine() { |     public Engine getHashEngine() { | ||||||
|         return hashEngine; |         return engine; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Override |     @Override | ||||||
|   | |||||||
| @@ -0,0 +1,5 @@ | |||||||
|  | package io.kamax.mxisd.http.undertow.conduit; | ||||||
|  |  | ||||||
|  | public interface ConduitWithDump { | ||||||
|  |     String dump(); | ||||||
|  | } | ||||||
| @@ -0,0 +1,107 @@ | |||||||
|  | /* | ||||||
|  |  * JBoss, Home of Professional Open Source. | ||||||
|  |  * Copyright 2014 Red Hat, Inc., and individual contributors | ||||||
|  |  * as indicated by the @author tags. | ||||||
|  |  * | ||||||
|  |  * Licensed under the Apache License, Version 2.0 (the "License"); | ||||||
|  |  * you may not use this file except in compliance with the License. | ||||||
|  |  * You may obtain a copy of the License at | ||||||
|  |  * | ||||||
|  |  *     http://www.apache.org/licenses/LICENSE-2.0 | ||||||
|  |  * | ||||||
|  |  *  Unless required by applicable law or agreed to in writing, software | ||||||
|  |  *  distributed under the License is distributed on an "AS IS" BASIS, | ||||||
|  |  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||||
|  |  *  See the License for the specific language governing permissions and | ||||||
|  |  *  limitations under the License. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | package io.kamax.mxisd.http.undertow.conduit; | ||||||
|  |  | ||||||
|  | import org.xnio.IoUtils; | ||||||
|  | import org.xnio.channels.StreamSourceChannel; | ||||||
|  | import org.xnio.conduits.AbstractStreamSinkConduit; | ||||||
|  | import org.xnio.conduits.ConduitWritableByteChannel; | ||||||
|  | import org.xnio.conduits.Conduits; | ||||||
|  | import org.xnio.conduits.StreamSinkConduit; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.nio.ByteBuffer; | ||||||
|  | import java.nio.channels.FileChannel; | ||||||
|  | import java.nio.charset.StandardCharsets; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.concurrent.CopyOnWriteArrayList; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Conduit that saves all the data that is written through it and can dump it to the console | ||||||
|  |  * <p> | ||||||
|  |  * Obviously this should not be used in production. | ||||||
|  |  * | ||||||
|  |  * @author Stuart Douglas | ||||||
|  |  */ | ||||||
|  | public class DebuggingStreamSinkConduit extends AbstractStreamSinkConduit<StreamSinkConduit> implements ConduitWithDump { | ||||||
|  |  | ||||||
|  |     private final List<byte[]> data = new CopyOnWriteArrayList<>(); | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Construct a new instance. | ||||||
|  |      * | ||||||
|  |      * @param next the delegate conduit to set | ||||||
|  |      */ | ||||||
|  |     public DebuggingStreamSinkConduit(StreamSinkConduit next) { | ||||||
|  |         super(next); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public int write(ByteBuffer src) throws IOException { | ||||||
|  |         int pos = src.position(); | ||||||
|  |         int res = super.write(src); | ||||||
|  |         if (res > 0) { | ||||||
|  |             byte[] d = new byte[res]; | ||||||
|  |             for (int i = 0; i < res; ++i) { | ||||||
|  |                 d[i] = src.get(i + pos); | ||||||
|  |             } | ||||||
|  |             data.add(d); | ||||||
|  |         } | ||||||
|  |         return res; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public long write(ByteBuffer[] dsts, int offs, int len) throws IOException { | ||||||
|  |         for (int i = offs; i < len; ++i) { | ||||||
|  |             if (dsts[i].hasRemaining()) { | ||||||
|  |                 return write(dsts[i]); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         return 0; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public long transferFrom(final FileChannel src, final long position, final long count) throws IOException { | ||||||
|  |         return src.transferTo(position, count, new ConduitWritableByteChannel(this)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public long transferFrom(final StreamSourceChannel source, final long count, final ByteBuffer throughBuffer) throws IOException { | ||||||
|  |         return IoUtils.transfer(source, count, throughBuffer, new ConduitWritableByteChannel(this)); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public int writeFinal(ByteBuffer src) throws IOException { | ||||||
|  |         return Conduits.writeFinalBasic(this, src); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public long writeFinal(ByteBuffer[] srcs, int offset, int length) throws IOException { | ||||||
|  |         return Conduits.writeFinalBasic(this, srcs, offset, length); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public String dump() { | ||||||
|  |         StringBuilder sb = new StringBuilder(); | ||||||
|  |         for (byte[] datum : data) { | ||||||
|  |             sb.append(new String(datum, StandardCharsets.UTF_8)); | ||||||
|  |         } | ||||||
|  |         return sb.toString(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,95 @@ | |||||||
|  | /* | ||||||
|  |  * JBoss, Home of Professional Open Source. | ||||||
|  |  * Copyright 2014 Red Hat, Inc., and individual contributors | ||||||
|  |  * as indicated by the @author tags. | ||||||
|  |  * | ||||||
|  |  * Licensed under the Apache License, Version 2.0 (the "License"); | ||||||
|  |  * you may not use this file except in compliance with the License. | ||||||
|  |  * You may obtain a copy of the License at | ||||||
|  |  * | ||||||
|  |  *     http://www.apache.org/licenses/LICENSE-2.0 | ||||||
|  |  * | ||||||
|  |  *  Unless required by applicable law or agreed to in writing, software | ||||||
|  |  *  distributed under the License is distributed on an "AS IS" BASIS, | ||||||
|  |  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||||
|  |  *  See the License for the specific language governing permissions and | ||||||
|  |  *  limitations under the License. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | package io.kamax.mxisd.http.undertow.conduit; | ||||||
|  |  | ||||||
|  | import org.xnio.IoUtils; | ||||||
|  | import org.xnio.channels.StreamSinkChannel; | ||||||
|  | import org.xnio.conduits.AbstractStreamSourceConduit; | ||||||
|  | import org.xnio.conduits.ConduitReadableByteChannel; | ||||||
|  | import org.xnio.conduits.StreamSourceConduit; | ||||||
|  |  | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.nio.ByteBuffer; | ||||||
|  | import java.nio.channels.FileChannel; | ||||||
|  | import java.nio.charset.StandardCharsets; | ||||||
|  | import java.util.List; | ||||||
|  | import java.util.concurrent.CopyOnWriteArrayList; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Conduit that saves all the data that is written through it and can dump it to the console | ||||||
|  |  * <p> | ||||||
|  |  * Obviously this should not be used in production. | ||||||
|  |  * | ||||||
|  |  * @author Stuart Douglas | ||||||
|  |  */ | ||||||
|  | public class DebuggingStreamSourceConduit extends AbstractStreamSourceConduit<StreamSourceConduit> implements ConduitWithDump { | ||||||
|  |  | ||||||
|  |     private final List<byte[]> data = new CopyOnWriteArrayList<>(); | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Construct a new instance. | ||||||
|  |      * | ||||||
|  |      * @param next the delegate conduit to set | ||||||
|  |      */ | ||||||
|  |     public DebuggingStreamSourceConduit(StreamSourceConduit next) { | ||||||
|  |         super(next); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public long transferTo(final long position, final long count, final FileChannel target) throws IOException { | ||||||
|  |         return target.transferFrom(new ConduitReadableByteChannel(this), position, count); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public long transferTo(final long count, final ByteBuffer throughBuffer, final StreamSinkChannel target) throws IOException { | ||||||
|  |         return IoUtils.transfer(new ConduitReadableByteChannel(this), count, throughBuffer, target); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public int read(ByteBuffer dst) throws IOException { | ||||||
|  |         int pos = dst.position(); | ||||||
|  |         int res = super.read(dst); | ||||||
|  |         if (res > 0) { | ||||||
|  |             byte[] d = new byte[res]; | ||||||
|  |             for (int i = 0; i < res; ++i) { | ||||||
|  |                 d[i] = dst.get(i + pos); | ||||||
|  |             } | ||||||
|  |             data.add(d); | ||||||
|  |         } | ||||||
|  |         return res; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public long read(ByteBuffer[] dsts, int offs, int len) throws IOException { | ||||||
|  |         for (int i = offs; i < len; ++i) { | ||||||
|  |             if (dsts[i].hasRemaining()) { | ||||||
|  |                 return read(dsts[i]); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         return 0; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public String dump() { | ||||||
|  |  | ||||||
|  |         StringBuilder sb = new StringBuilder(); | ||||||
|  |         for (byte[] datum : data) { | ||||||
|  |             sb.append(new String(datum, StandardCharsets.UTF_8)); | ||||||
|  |         } | ||||||
|  |         return sb.toString(); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,23 @@ | |||||||
|  | package io.kamax.mxisd.http.undertow.conduit; | ||||||
|  |  | ||||||
|  | import io.undertow.server.ConduitWrapper; | ||||||
|  | import io.undertow.server.HttpServerExchange; | ||||||
|  | import io.undertow.util.ConduitFactory; | ||||||
|  | import org.xnio.conduits.Conduit; | ||||||
|  |  | ||||||
|  | public abstract class LazyConduitWrapper<T extends Conduit> implements ConduitWrapper<T> { | ||||||
|  |  | ||||||
|  |     private T conduit = null; | ||||||
|  |  | ||||||
|  |     protected abstract T create(ConduitFactory<T> factory, HttpServerExchange exchange); | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public T wrap(ConduitFactory<T> factory, HttpServerExchange exchange) { | ||||||
|  |         conduit = create(factory, exchange); | ||||||
|  |         return conduit; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public T get() { | ||||||
|  |         return conduit; | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -0,0 +1,186 @@ | |||||||
|  | /* | ||||||
|  |  * JBoss, Home of Professional Open Source. | ||||||
|  |  * Copyright 2014 Red Hat, Inc., and individual contributors | ||||||
|  |  * as indicated by the @author tags. | ||||||
|  |  * | ||||||
|  |  * Licensed under the Apache License, Version 2.0 (the "License"); | ||||||
|  |  * you may not use this file except in compliance with the License. | ||||||
|  |  * You may obtain a copy of the License at | ||||||
|  |  * | ||||||
|  |  *     http://www.apache.org/licenses/LICENSE-2.0 | ||||||
|  |  * | ||||||
|  |  *  Unless required by applicable law or agreed to in writing, software | ||||||
|  |  *  distributed under the License is distributed on an "AS IS" BASIS, | ||||||
|  |  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||||
|  |  *  See the License for the specific language governing permissions and | ||||||
|  |  *  limitations under the License. | ||||||
|  |  */ | ||||||
|  |  | ||||||
|  | package io.kamax.mxisd.http.undertow.handler; | ||||||
|  |  | ||||||
|  | import io.kamax.mxisd.http.undertow.conduit.ConduitWithDump; | ||||||
|  | import io.kamax.mxisd.http.undertow.conduit.DebuggingStreamSinkConduit; | ||||||
|  | import io.kamax.mxisd.http.undertow.conduit.DebuggingStreamSourceConduit; | ||||||
|  | import io.kamax.mxisd.http.undertow.conduit.LazyConduitWrapper; | ||||||
|  | import io.undertow.security.api.SecurityContext; | ||||||
|  | import io.undertow.server.HttpHandler; | ||||||
|  | import io.undertow.server.HttpServerExchange; | ||||||
|  | import io.undertow.server.handlers.Cookie; | ||||||
|  | import io.undertow.util.ConduitFactory; | ||||||
|  | import io.undertow.util.HeaderValues; | ||||||
|  | import io.undertow.util.Headers; | ||||||
|  | import io.undertow.util.LocaleUtils; | ||||||
|  | import org.slf4j.Logger; | ||||||
|  | import org.slf4j.LoggerFactory; | ||||||
|  | import org.xnio.conduits.StreamSinkConduit; | ||||||
|  | import org.xnio.conduits.StreamSourceConduit; | ||||||
|  |  | ||||||
|  | import java.util.Deque; | ||||||
|  | import java.util.Iterator; | ||||||
|  | import java.util.Map; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Handler that dumps a exchange to a log. | ||||||
|  |  * | ||||||
|  |  * @author Stuart Douglas | ||||||
|  |  */ | ||||||
|  | public class RequestDumpingHandler implements HttpHandler { | ||||||
|  |  | ||||||
|  |     private static final Logger LOGGER = LoggerFactory.getLogger(RequestDumpingHandler.class); | ||||||
|  |  | ||||||
|  |     private final HttpHandler next; | ||||||
|  |  | ||||||
|  |     public RequestDumpingHandler(HttpHandler next) { | ||||||
|  |         this.next = next; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @Override | ||||||
|  |     public void handleRequest(HttpServerExchange exchange) throws Exception { | ||||||
|  |         LazyConduitWrapper<StreamSourceConduit> requestConduitWrapper = new LazyConduitWrapper<StreamSourceConduit>() { | ||||||
|  |             @Override | ||||||
|  |             protected StreamSourceConduit create(ConduitFactory<StreamSourceConduit> factory, HttpServerExchange exchange) { | ||||||
|  |                 return new DebuggingStreamSourceConduit(factory.create()); | ||||||
|  |             } | ||||||
|  |         }; | ||||||
|  |         LazyConduitWrapper<StreamSinkConduit> responseConduitWrapper = new LazyConduitWrapper<StreamSinkConduit>() { | ||||||
|  |             @Override | ||||||
|  |             protected StreamSinkConduit create(ConduitFactory<StreamSinkConduit> factory, HttpServerExchange exchange) { | ||||||
|  |                 return new DebuggingStreamSinkConduit(factory.create()); | ||||||
|  |             } | ||||||
|  |         }; | ||||||
|  |         exchange.addRequestWrapper(requestConduitWrapper); | ||||||
|  |         exchange.addResponseWrapper(responseConduitWrapper); | ||||||
|  |  | ||||||
|  |         final StringBuilder sb = new StringBuilder(); | ||||||
|  | // Log pre-service information | ||||||
|  |         final SecurityContext sc = exchange.getSecurityContext(); | ||||||
|  |         sb.append("\n----------------------------REQUEST---------------------------\n"); | ||||||
|  |         sb.append("               URI=").append(exchange.getRequestURI()).append("\n"); | ||||||
|  |         sb.append(" characterEncoding=").append(exchange.getRequestHeaders().get(Headers.CONTENT_ENCODING)).append("\n"); | ||||||
|  |         sb.append("     contentLength=").append(exchange.getRequestContentLength()).append("\n"); | ||||||
|  |         sb.append("       contentType=").append(exchange.getRequestHeaders().get(Headers.CONTENT_TYPE)).append("\n"); | ||||||
|  |         //sb.append("       contextPath=" + exchange.getContextPath()); | ||||||
|  |         if (sc != null) { | ||||||
|  |             if (sc.isAuthenticated()) { | ||||||
|  |                 sb.append("          authType=").append(sc.getMechanismName()).append("\n"); | ||||||
|  |                 sb.append("         principle=").append(sc.getAuthenticatedAccount().getPrincipal()).append("\n"); | ||||||
|  |             } else { | ||||||
|  |                 sb.append("          authType=none\n"); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         Map<String, Cookie> cookies = exchange.getRequestCookies(); | ||||||
|  |         if (cookies != null) { | ||||||
|  |             for (Map.Entry<String, Cookie> entry : cookies.entrySet()) { | ||||||
|  |                 Cookie cookie = entry.getValue(); | ||||||
|  |                 sb.append("            cookie=").append(cookie.getName()).append("=").append(cookie.getValue()).append("\n"); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         for (HeaderValues header : exchange.getRequestHeaders()) { | ||||||
|  |             for (String value : header) { | ||||||
|  |                 sb.append("            header=").append(header.getHeaderName()).append("=").append(value).append("\n"); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         sb.append("            locale=").append(LocaleUtils.getLocalesFromHeader(exchange.getRequestHeaders().get(Headers.ACCEPT_LANGUAGE))) | ||||||
|  |             .append("\n"); | ||||||
|  |         sb.append("            method=").append(exchange.getRequestMethod()).append("\n"); | ||||||
|  |         Map<String, Deque<String>> pnames = exchange.getQueryParameters(); | ||||||
|  |         for (Map.Entry<String, Deque<String>> entry : pnames.entrySet()) { | ||||||
|  |             String pname = entry.getKey(); | ||||||
|  |             Iterator<String> pvalues = entry.getValue().iterator(); | ||||||
|  |             sb.append("         parameter="); | ||||||
|  |             sb.append(pname); | ||||||
|  |             sb.append('='); | ||||||
|  |             while (pvalues.hasNext()) { | ||||||
|  |                 sb.append(pvalues.next()); | ||||||
|  |                 if (pvalues.hasNext()) { | ||||||
|  |                     sb.append(", "); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             sb.append("\n"); | ||||||
|  |         } | ||||||
|  |         //sb.append("          pathInfo=" + exchange.getPathInfo()); | ||||||
|  |         sb.append("          protocol=").append(exchange.getProtocol()).append("\n"); | ||||||
|  |         sb.append("       queryString=").append(exchange.getQueryString()).append("\n"); | ||||||
|  |         sb.append("        remoteAddr=").append(exchange.getSourceAddress()).append("\n"); | ||||||
|  |         sb.append("        remoteHost=").append(exchange.getSourceAddress().getHostName()).append("\n"); | ||||||
|  |         //sb.append("requestedSessionId=" + exchange.getRequestedSessionId()); | ||||||
|  |         sb.append("            scheme=").append(exchange.getRequestScheme()).append("\n"); | ||||||
|  |         sb.append("              host=").append(exchange.getRequestHeaders().getFirst(Headers.HOST)).append("\n"); | ||||||
|  |         sb.append("        serverPort=").append(exchange.getDestinationAddress().getPort()).append("\n"); | ||||||
|  |         //sb.append("       servletPath=" + exchange.getServletPath()); | ||||||
|  |         sb.append("          isSecure=").append(exchange.isSecure()).append("\n"); | ||||||
|  |  | ||||||
|  |         exchange.addExchangeCompleteListener((exchange1, nextListener) -> { | ||||||
|  |             StreamSourceConduit sourceConduit = requestConduitWrapper.get(); | ||||||
|  |             if (sourceConduit instanceof ConduitWithDump) { | ||||||
|  |                 ConduitWithDump conduitWithDump = (ConduitWithDump) sourceConduit; | ||||||
|  |                 sb.append("body=\n"); | ||||||
|  |                 sb.append(conduitWithDump.dump()).append("\n"); | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             // Log post-service information | ||||||
|  |             sb.append("--------------------------RESPONSE--------------------------\n"); | ||||||
|  |             if (sc != null) { | ||||||
|  |                 if (sc.isAuthenticated()) { | ||||||
|  |                     sb.append("          authType=").append(sc.getMechanismName()).append("\n"); | ||||||
|  |                     sb.append("         principle=").append(sc.getAuthenticatedAccount().getPrincipal()).append("\n"); | ||||||
|  |                 } else { | ||||||
|  |                     sb.append("          authType=none\n"); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             sb.append("     contentLength=").append(exchange1.getResponseContentLength()).append("\n"); | ||||||
|  |             sb.append("       contentType=").append(exchange1.getResponseHeaders().getFirst(Headers.CONTENT_TYPE)).append("\n"); | ||||||
|  |             Map<String, Cookie> cookies1 = exchange1.getResponseCookies(); | ||||||
|  |             if (cookies1 != null) { | ||||||
|  |                 for (Cookie cookie : cookies1.values()) { | ||||||
|  |                     sb.append("            cookie=").append(cookie.getName()).append("=").append(cookie.getValue()).append("; domain=") | ||||||
|  |                         .append(cookie.getDomain()).append("; path=").append(cookie.getPath()).append("\n"); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             for (HeaderValues header : exchange1.getResponseHeaders()) { | ||||||
|  |                 for (String value : header) { | ||||||
|  |                     sb.append("            header=").append(header.getHeaderName()).append("=").append(value).append("\n"); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             sb.append("            status=").append(exchange1.getStatusCode()).append("\n"); | ||||||
|  |             StreamSinkConduit streamSinkConduit = responseConduitWrapper.get(); | ||||||
|  |             if (streamSinkConduit instanceof ConduitWithDump) { | ||||||
|  |                 ConduitWithDump conduitWithDump = (ConduitWithDump) streamSinkConduit; | ||||||
|  |                 sb.append("body=\n"); | ||||||
|  |                 sb.append(conduitWithDump.dump()); | ||||||
|  |  | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             sb.append("\n=============================================================="); | ||||||
|  |  | ||||||
|  |  | ||||||
|  |             nextListener.proceed(); | ||||||
|  |             LOGGER.info(sb.toString()); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         // Perform the exchange | ||||||
|  |         next.handleRequest(exchange); | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -31,6 +31,8 @@ public class HashDetailsHandler extends BasicHttpHandler { | |||||||
|             for (HashingConfig.Algorithm algorithm : config.getAlgorithms()) { |             for (HashingConfig.Algorithm algorithm : config.getAlgorithms()) { | ||||||
|                 algorithms.add(algorithm.name().toLowerCase()); |                 algorithms.add(algorithm.name().toLowerCase()); | ||||||
|             } |             } | ||||||
|  |         } else { | ||||||
|  |             algorithms.add(HashingConfig.Algorithm.none.name().toLowerCase()); | ||||||
|         } |         } | ||||||
|         response.add("algorithms", algorithms); |         response.add("algorithms", algorithms); | ||||||
|         return response; |         return response; | ||||||
|   | |||||||
| @@ -67,10 +67,6 @@ public class HashLookupHandler extends LookupHandler implements ApiHandler { | |||||||
|         log.info("Got bulk lookup request from {} with client {} - Is recursive? {}", |         log.info("Got bulk lookup request from {} with client {} - Is recursive? {}", | ||||||
|             lookupRequest.getRequester(), lookupRequest.getUserAgent(), lookupRequest.isRecursive()); |             lookupRequest.getRequester(), lookupRequest.getUserAgent(), lookupRequest.isRecursive()); | ||||||
|  |  | ||||||
|         if (!hashManager.getConfig().isEnabled()) { |  | ||||||
|             throw new InvalidParamException(); |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         if (!hashManager.getHashEngine().getPepper().equals(input.getPepper())) { |         if (!hashManager.getHashEngine().getPepper().equals(input.getPepper())) { | ||||||
|             throw new InvalidPepperException(); |             throw new InvalidPepperException(); | ||||||
|         } |         } | ||||||
| @@ -89,7 +85,7 @@ public class HashLookupHandler extends LookupHandler implements ApiHandler { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     private void noneAlgorithm(HttpServerExchange exchange, HashLookupRequest request, ClientHashLookupRequest input) throws Exception { |     private void noneAlgorithm(HttpServerExchange exchange, HashLookupRequest request, ClientHashLookupRequest input) throws Exception { | ||||||
|         if (!hashManager.getConfig().getAlgorithms().contains(HashingConfig.Algorithm.none)) { |         if (hashManager.getConfig().isEnabled() && !hashManager.getConfig().getAlgorithms().contains(HashingConfig.Algorithm.none)) { | ||||||
|             throw new InvalidParamException(); |             throw new InvalidParamException(); | ||||||
|         } |         } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -23,13 +23,18 @@ package io.kamax.mxisd.storage.ormlite; | |||||||
| import com.j256.ormlite.dao.CloseableWrappedIterable; | import com.j256.ormlite.dao.CloseableWrappedIterable; | ||||||
| import com.j256.ormlite.dao.Dao; | import com.j256.ormlite.dao.Dao; | ||||||
| import com.j256.ormlite.dao.DaoManager; | import com.j256.ormlite.dao.DaoManager; | ||||||
|  | import com.j256.ormlite.db.PostgresDatabaseType; | ||||||
|  | import com.j256.ormlite.db.SqliteDatabaseType; | ||||||
| import com.j256.ormlite.jdbc.JdbcConnectionSource; | import com.j256.ormlite.jdbc.JdbcConnectionSource; | ||||||
|  | import com.j256.ormlite.jdbc.JdbcPooledConnectionSource; | ||||||
| import com.j256.ormlite.stmt.QueryBuilder; | import com.j256.ormlite.stmt.QueryBuilder; | ||||||
| import com.j256.ormlite.support.ConnectionSource; | import com.j256.ormlite.support.ConnectionSource; | ||||||
| import com.j256.ormlite.table.TableUtils; | import com.j256.ormlite.table.TableUtils; | ||||||
| import io.kamax.matrix.ThreePid; | import io.kamax.matrix.ThreePid; | ||||||
| import io.kamax.mxisd.config.MxisdConfig; |  | ||||||
| import io.kamax.mxisd.config.PolicyConfig; | import io.kamax.mxisd.config.PolicyConfig; | ||||||
|  | import io.kamax.mxisd.config.PostgresqlStorageConfig; | ||||||
|  | import io.kamax.mxisd.config.SQLiteStorageConfig; | ||||||
|  | import io.kamax.mxisd.config.StorageConfig; | ||||||
| import io.kamax.mxisd.exception.ConfigurationException; | import io.kamax.mxisd.exception.ConfigurationException; | ||||||
| import io.kamax.mxisd.exception.InternalServerError; | import io.kamax.mxisd.exception.InternalServerError; | ||||||
| import io.kamax.mxisd.exception.InvalidCredentialsException; | import io.kamax.mxisd.exception.InvalidCredentialsException; | ||||||
| @@ -81,6 +86,8 @@ public class OrmLiteSqlStorage implements IStorage { | |||||||
|  |  | ||||||
|     public static class Migrations { |     public static class Migrations { | ||||||
|         public static final String FIX_ACCEPTED_DAO = "2019_12_09__2254__fix_accepted_dao"; |         public static final String FIX_ACCEPTED_DAO = "2019_12_09__2254__fix_accepted_dao"; | ||||||
|  |         public static final String FIX_HASH_DAO_UNIQUE_INDEX = "2020_03_22__1153__fix_hash_dao_unique_index"; | ||||||
|  |         public static final String CHANGE_TYPE_TO_TEXT_INVITE = "2020_04_21__2338__change_type_table_invites"; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     private Dao<ThreePidInviteIO, String> invDao; |     private Dao<ThreePidInviteIO, String> invDao; | ||||||
| @@ -91,40 +98,85 @@ public class OrmLiteSqlStorage implements IStorage { | |||||||
|     private Dao<AcceptedDao, Long> acceptedDao; |     private Dao<AcceptedDao, Long> acceptedDao; | ||||||
|     private Dao<HashDao, String> hashDao; |     private Dao<HashDao, String> hashDao; | ||||||
|     private Dao<ChangelogDao, String> changelogDao; |     private Dao<ChangelogDao, String> changelogDao; | ||||||
|  |     private StorageConfig.BackendEnum backend; | ||||||
|  |  | ||||||
|     public OrmLiteSqlStorage(MxisdConfig cfg) { |     public OrmLiteSqlStorage(StorageConfig.BackendEnum backend, StorageConfig.Provider provider) { | ||||||
|         this(cfg.getStorage().getBackend(), cfg.getStorage().getProvider().getSqlite().getDatabase()); |         if (backend == null) { | ||||||
|     } |  | ||||||
|  |  | ||||||
|     public OrmLiteSqlStorage(String backend, String path) { |  | ||||||
|         if (StringUtils.isBlank(backend)) { |  | ||||||
|             throw new ConfigurationException("storage.backend"); |             throw new ConfigurationException("storage.backend"); | ||||||
|         } |         } | ||||||
|  |         this.backend = backend; | ||||||
|         if (StringUtils.isBlank(path)) { |  | ||||||
|             throw new ConfigurationException("Storage destination cannot be empty"); |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         withCatcher(() -> { |         withCatcher(() -> { | ||||||
|             ConnectionSource connPool = new JdbcConnectionSource("jdbc:" + backend + ":" + path); |             ConnectionSource connPool; | ||||||
|  |             switch (backend) { | ||||||
|  |                 case postgresql: | ||||||
|  |                     connPool = createPostgresqlConnection(provider.getPostgresql()); | ||||||
|  |                     break; | ||||||
|  |                 case sqlite: | ||||||
|  |                     connPool = createSqliteConnection(provider.getSqlite()); | ||||||
|  |                     break; | ||||||
|  |                 default: | ||||||
|  |                     throw new ConfigurationException("storage.backend"); | ||||||
|  |             } | ||||||
|  |  | ||||||
|             changelogDao = createDaoAndTable(connPool, ChangelogDao.class); |             changelogDao = createDaoAndTable(connPool, ChangelogDao.class); | ||||||
|             invDao = createDaoAndTable(connPool, ThreePidInviteIO.class); |             invDao = createDaoAndTable(connPool, ThreePidInviteIO.class); | ||||||
|             expInvDao = createDaoAndTable(connPool, HistoricalThreePidInviteIO.class); |             expInvDao = createDaoAndTable(connPool, HistoricalThreePidInviteIO.class); | ||||||
|             sessionDao = createDaoAndTable(connPool, ThreePidSessionDao.class); |             sessionDao = createDaoAndTable(connPool, ThreePidSessionDao.class); | ||||||
|             asTxnDao = createDaoAndTable(connPool, ASTransactionDao.class); |             asTxnDao = createDaoAndTable(connPool, ASTransactionDao.class); | ||||||
|             accountDao = createDaoAndTable(connPool, AccountDao.class); |             accountDao = createDaoAndTable(connPool, AccountDao.class); | ||||||
|             acceptedDao = createDaoAndTable(connPool, AcceptedDao.class); |             acceptedDao = createDaoAndTable(connPool, AcceptedDao.class, true); | ||||||
|             hashDao = createDaoAndTable(connPool, HashDao.class); |             hashDao = createDaoAndTable(connPool, HashDao.class, true); | ||||||
|             runMigration(connPool); |             runMigration(connPool); | ||||||
|         }); |         }); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     private ConnectionSource createSqliteConnection(SQLiteStorageConfig config) throws SQLException { | ||||||
|  |         if (StringUtils.isBlank(config.getDatabase())) { | ||||||
|  |             throw new ConfigurationException("Storage destination cannot be empty"); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return new JdbcConnectionSource("jdbc:" + backend + ":" + config.getDatabase(), null, null, new SqliteDatabaseType()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private ConnectionSource createPostgresqlConnection(PostgresqlStorageConfig config) throws SQLException { | ||||||
|  |         if (StringUtils.isBlank(config.getDatabase())) { | ||||||
|  |             throw new ConfigurationException("Storage destination cannot be empty"); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         if (config.isPool()) { | ||||||
|  |             LOGGER.info("Enable pooling"); | ||||||
|  |             JdbcPooledConnectionSource source = new JdbcPooledConnectionSource( | ||||||
|  |                 "jdbc:" + backend + ":" + config.getDatabase(), config.getUsername(), config.getPassword(), | ||||||
|  |                 new PostgresDatabaseType()); | ||||||
|  |             source.setMaxConnectionsFree(config.getMaxConnectionsFree()); | ||||||
|  |             source.setMaxConnectionAgeMillis(config.getMaxConnectionAgeMillis()); | ||||||
|  |             source.setCheckConnectionsEveryMillis(config.getCheckConnectionsEveryMillis()); | ||||||
|  |             source.setTestBeforeGet(config.isTestBeforeGetFromPool()); | ||||||
|  |             return source; | ||||||
|  |         } else { | ||||||
|  |             return new JdbcConnectionSource("jdbc:" + backend + ":" + config.getDatabase(), config.getUsername(), config.getPassword(), | ||||||
|  |                 new PostgresDatabaseType()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|     private void runMigration(ConnectionSource connPol) throws SQLException { |     private void runMigration(ConnectionSource connPol) throws SQLException { | ||||||
|         ChangelogDao fixAcceptedDao = changelogDao.queryForId(Migrations.FIX_ACCEPTED_DAO); |         ChangelogDao fixAcceptedDao = changelogDao.queryForId(Migrations.FIX_ACCEPTED_DAO); | ||||||
|         if (fixAcceptedDao == null) { |         if (fixAcceptedDao == null) { | ||||||
|             fixAcceptedDao(connPol); |             fixAcceptedDao(connPol); | ||||||
|             changelogDao.create(new ChangelogDao(Migrations.FIX_ACCEPTED_DAO, new Date(), "Recreate the accepted table.")); |             changelogDao.create(new ChangelogDao(Migrations.FIX_ACCEPTED_DAO, new Date(), "Recreate the accepted table.")); | ||||||
|         } |         } | ||||||
|  |         ChangelogDao fixHashDaoUniqueIndex = changelogDao.queryForId(Migrations.FIX_HASH_DAO_UNIQUE_INDEX); | ||||||
|  |         if (fixHashDaoUniqueIndex == null) { | ||||||
|  |             fixHashDaoUniqueIndex(connPol); | ||||||
|  |             changelogDao | ||||||
|  |                 .create(new ChangelogDao(Migrations.FIX_HASH_DAO_UNIQUE_INDEX, new Date(), "Add the id and migrate the unique index.")); | ||||||
|  |         } | ||||||
|  |         ChangelogDao fixInviteTableColumnType = changelogDao.queryForId(Migrations.CHANGE_TYPE_TO_TEXT_INVITE); | ||||||
|  |         if (fixInviteTableColumnType == null) { | ||||||
|  |             fixInviteTableColumnType(connPol); | ||||||
|  |             changelogDao.create(new ChangelogDao(Migrations.CHANGE_TYPE_TO_TEXT_INVITE, new Date(), "Modify column type to text.")); | ||||||
|  |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     private void fixAcceptedDao(ConnectionSource connPool) throws SQLException { |     private void fixAcceptedDao(ConnectionSource connPool) throws SQLException { | ||||||
| @@ -133,10 +185,47 @@ public class OrmLiteSqlStorage implements IStorage { | |||||||
|         TableUtils.createTableIfNotExists(connPool, AcceptedDao.class); |         TableUtils.createTableIfNotExists(connPool, AcceptedDao.class); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     private void fixHashDaoUniqueIndex(ConnectionSource connPool) throws SQLException { | ||||||
|  |         LOGGER.info("Migration: {}", Migrations.FIX_HASH_DAO_UNIQUE_INDEX); | ||||||
|  |         TableUtils.dropTable(hashDao, true); | ||||||
|  |         TableUtils.createTableIfNotExists(connPool, HashDao.class); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private void fixInviteTableColumnType(ConnectionSource connPool) throws SQLException { | ||||||
|  |         LOGGER.info("Migration: {}", Migrations.CHANGE_TYPE_TO_TEXT_INVITE); | ||||||
|  |         if (StorageConfig.BackendEnum.postgresql == backend) { | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column \"roomId\" type text"); | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column id type text"); | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column token type text"); | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column sender type text"); | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column medium type text"); | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column address type text"); | ||||||
|  |             invDao.executeRawNoArgs("alter table invite_3pid alter column properties type text"); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|     private <V, K> Dao<V, K> createDaoAndTable(ConnectionSource connPool, Class<V> c) throws SQLException { |     private <V, K> Dao<V, K> createDaoAndTable(ConnectionSource connPool, Class<V> c) throws SQLException { | ||||||
|  |         return createDaoAndTable(connPool, c, false); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     /** | ||||||
|  |      * Workaround for https://github.com/j256/ormlite-core/issues/20. | ||||||
|  |      */ | ||||||
|  |     private <V, K> Dao<V, K> createDaoAndTable(ConnectionSource connPool, Class<V> c, boolean workaround) throws SQLException { | ||||||
|         LOGGER.info("Create the dao: {}", c.getSimpleName()); |         LOGGER.info("Create the dao: {}", c.getSimpleName()); | ||||||
|         Dao<V, K> dao = DaoManager.createDao(connPool, c); |         Dao<V, K> dao = DaoManager.createDao(connPool, c); | ||||||
|         TableUtils.createTableIfNotExists(connPool, c); |         if (workaround && StorageConfig.BackendEnum.postgresql.equals(backend)) { | ||||||
|  |             LOGGER.info("Workaround for postgresql on dao: {}", c.getSimpleName()); | ||||||
|  |             try { | ||||||
|  |                 dao.countOf(); | ||||||
|  |                 LOGGER.info("Table exists, do nothing"); | ||||||
|  |             } catch (SQLException e) { | ||||||
|  |                 LOGGER.info("Table doesn't exist, create"); | ||||||
|  |                 TableUtils.createTableIfNotExists(connPool, c); | ||||||
|  |             } | ||||||
|  |         } else { | ||||||
|  |             TableUtils.createTableIfNotExists(connPool, c); | ||||||
|  |         } | ||||||
|         return dao; |         return dao; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -6,13 +6,16 @@ import com.j256.ormlite.table.DatabaseTable; | |||||||
| @DatabaseTable(tableName = "hashes") | @DatabaseTable(tableName = "hashes") | ||||||
| public class HashDao { | public class HashDao { | ||||||
|  |  | ||||||
|     @DatabaseField(canBeNull = false, id = true) |     @DatabaseField(generatedId = true) | ||||||
|  |     private Long id; | ||||||
|  |  | ||||||
|  |     @DatabaseField(canBeNull = false, uniqueCombo = true) | ||||||
|     private String mxid; |     private String mxid; | ||||||
|  |  | ||||||
|     @DatabaseField(canBeNull = false) |     @DatabaseField(canBeNull = false, uniqueCombo = true) | ||||||
|     private String medium; |     private String medium; | ||||||
|  |  | ||||||
|     @DatabaseField(canBeNull = false) |     @DatabaseField(canBeNull = false, uniqueCombo = true) | ||||||
|     private String address; |     private String address; | ||||||
|  |  | ||||||
|     @DatabaseField(canBeNull = false) |     @DatabaseField(canBeNull = false) | ||||||
| @@ -28,6 +31,14 @@ public class HashDao { | |||||||
|         this.hash = hash; |         this.hash = hash; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public Long getId() { | ||||||
|  |         return id; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void setId(Long id) { | ||||||
|  |         this.id = id; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     public String getMxid() { |     public String getMxid() { | ||||||
|         return mxid; |         return mxid; | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -20,6 +20,8 @@ | |||||||
|  |  | ||||||
| package io.kamax.mxisd.test.storage; | package io.kamax.mxisd.test.storage; | ||||||
|  |  | ||||||
|  | import io.kamax.mxisd.config.SQLiteStorageConfig; | ||||||
|  | import io.kamax.mxisd.config.StorageConfig; | ||||||
| import io.kamax.mxisd.storage.ormlite.OrmLiteSqlStorage; | import io.kamax.mxisd.storage.ormlite.OrmLiteSqlStorage; | ||||||
| import org.junit.Test; | import org.junit.Test; | ||||||
|  |  | ||||||
| @@ -29,14 +31,22 @@ public class OrmLiteSqlStorageTest { | |||||||
|  |  | ||||||
|     @Test |     @Test | ||||||
|     public void insertAsTxnDuplicate() { |     public void insertAsTxnDuplicate() { | ||||||
|         OrmLiteSqlStorage store = new OrmLiteSqlStorage("sqlite", ":memory:"); |         StorageConfig.Provider provider = new StorageConfig.Provider(); | ||||||
|  |         SQLiteStorageConfig config = new SQLiteStorageConfig(); | ||||||
|  |         config.setDatabase(":memory:"); | ||||||
|  |         provider.setSqlite(config); | ||||||
|  |         OrmLiteSqlStorage store = new OrmLiteSqlStorage(StorageConfig.BackendEnum.sqlite, provider); | ||||||
|         store.insertTransactionResult("mxisd", "1", Instant.now(), "{}"); |         store.insertTransactionResult("mxisd", "1", Instant.now(), "{}"); | ||||||
|         store.insertTransactionResult("mxisd", "2", Instant.now(), "{}"); |         store.insertTransactionResult("mxisd", "2", Instant.now(), "{}"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @Test(expected = RuntimeException.class) |     @Test(expected = RuntimeException.class) | ||||||
|     public void insertAsTxnSame() { |     public void insertAsTxnSame() { | ||||||
|         OrmLiteSqlStorage store = new OrmLiteSqlStorage("sqlite", ":memory:"); |         StorageConfig.Provider provider = new StorageConfig.Provider(); | ||||||
|  |         SQLiteStorageConfig config = new SQLiteStorageConfig(); | ||||||
|  |         config.setDatabase(":memory:"); | ||||||
|  |         provider.setSqlite(config); | ||||||
|  |         OrmLiteSqlStorage store = new OrmLiteSqlStorage(StorageConfig.BackendEnum.sqlite, provider); | ||||||
|         store.insertTransactionResult("mxisd", "1", Instant.now(), "{}"); |         store.insertTransactionResult("mxisd", "1", Instant.now(), "{}"); | ||||||
|         store.insertTransactionResult("mxisd", "1", Instant.now(), "{}"); |         store.insertTransactionResult("mxisd", "1", Instant.now(), "{}"); | ||||||
|     } |     } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user