diff --git a/common/src/main/java/io/druid/metadata/PasswordProvider.java b/common/src/main/java/io/druid/metadata/PasswordProvider.java index 7d8292a2bf6b..ec8855aeb214 100644 --- a/common/src/main/java/io/druid/metadata/PasswordProvider.java +++ b/common/src/main/java/io/druid/metadata/PasswordProvider.java @@ -24,7 +24,7 @@ /** - * Implement this for different ways to (optionally securely) access db passwords. + * Implement this for different ways to (optionally securely) access secrets. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DefaultPasswordProvider.class) @JsonSubTypes(value = { diff --git a/common/src/test/java/io/druid/metadata/MetadataStorageConnectorConfigTest.java b/common/src/test/java/io/druid/metadata/MetadataStorageConnectorConfigTest.java index 8ad962fd9488..00be7fd284b5 100644 --- a/common/src/test/java/io/druid/metadata/MetadataStorageConnectorConfigTest.java +++ b/common/src/test/java/io/druid/metadata/MetadataStorageConnectorConfigTest.java @@ -76,7 +76,7 @@ public void testEquals() throws IOException private static final ObjectMapper jsonMapper = new ObjectMapper(); @Test - public void testMetadaStorageConnectionConfigSimplePassword() throws Exception + public void testMetadataStorageConnectionConfigSimplePassword() throws Exception { testMetadataStorageConnectionConfig( true, @@ -90,7 +90,7 @@ public void testMetadaStorageConnectionConfigSimplePassword() throws Exception } @Test - public void testMetadaStorageConnectionConfigWithDefaultProviderPassword() throws Exception + public void testMetadataStorageConnectionConfigWithDefaultProviderPassword() throws Exception { testMetadataStorageConnectionConfig( true, diff --git a/distribution/pom.xml b/distribution/pom.xml index 83b42df61ed0..e35f2ced7f17 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -110,6 +110,8 @@ io.druid.extensions:druid-stats -c io.druid.extensions:druid-examples + -c + io.druid.extensions:simple-client-sslcontext ${druid.distribution.pulldeps.opts} diff --git a/docs/content/configuration/broker.md b/docs/content/configuration/broker.md index 98fa02f129cd..349d96442ffd 100644 --- a/docs/content/configuration/broker.md +++ b/docs/content/configuration/broker.md @@ -15,7 +15,8 @@ The broker node uses several of the global configs in [Configuration](../configu |Property|Description|Default| |--------|-----------|-------| |`druid.host`|The host for the current node. This is used to advertise the current processes location as reachable from another node and should generally be specified such that `http://${druid.host}/` could actually talk to this process|InetAddress.getLocalHost().getCanonicalHostName()| -|`druid.port`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8082| +|`druid.plaintextPort`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8082| +|`druid.tlsPort`|TLS port for HTTPS connector, if [druid.server.http.tls](../operations/tls-support.html) is set then this config will be used. If `druid.host` contains port then that port will be ignored. This should be a non-negative Integer.|8282| |`druid.service`|The name of the service. This is used as a dimension when emitting metrics and alerts to differentiate between the various services|druid/broker| ### Query Configs diff --git a/docs/content/configuration/coordinator.md b/docs/content/configuration/coordinator.md index cde94abe7e16..5a38a338fb1a 100644 --- a/docs/content/configuration/coordinator.md +++ b/docs/content/configuration/coordinator.md @@ -15,7 +15,8 @@ The coordinator node uses several of the global configs in [Configuration](../co |Property|Description|Default| |--------|-----------|-------| |`druid.host`|The host for the current node. This is used to advertise the current processes location as reachable from another node and should generally be specified such that `http://${druid.host}/` could actually talk to this process|InetAddress.getLocalHost().getCanonicalHostName()| -|`druid.port`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8081| +|`druid.plaintextPort`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8081| +|`druid.tlsPort`|TLS port for HTTPS connector, if [druid.server.http.tls](../operations/tls-support.html) is set then this config will be used. If `druid.host` contains port then that port will be ignored. This should be a non-negative Integer.|8281| |`druid.service`|The name of the service. This is used as a dimension when emitting metrics and alerts to differentiate between the various services|druid/coordinator| ### Coordinator Operation diff --git a/docs/content/configuration/historical.md b/docs/content/configuration/historical.md index d97c3e294f04..321408e78e16 100644 --- a/docs/content/configuration/historical.md +++ b/docs/content/configuration/historical.md @@ -15,7 +15,8 @@ The historical node uses several of the global configs in [Configuration](../con |Property|Description|Default| |--------|-----------|-------| |`druid.host`|The host for the current node. This is used to advertise the current processes location as reachable from another node and should generally be specified such that `http://${druid.host}/` could actually talk to this process|InetAddress.getLocalHost().getCanonicalHostName()| -|`druid.port`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8083| +|`druid.plaintextPort`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8083| +|`druid.tlsPort`|TLS port for HTTPS connector, if [druid.server.http.tls](../operations/tls-support.html) is set then this config will be used. If `druid.host` contains port then that port will be ignored. This should be a non-negative Integer.|8283| |`druid.service`|The name of the service. This is used as a dimension when emitting metrics and alerts to differentiate between the various services|druid/historical| ### General Configuration diff --git a/docs/content/configuration/index.md b/docs/content/configuration/index.md index 10609730caa9..5f054ea74d51 100644 --- a/docs/content/configuration/index.md +++ b/docs/content/configuration/index.md @@ -234,7 +234,7 @@ These properties specify the jdbc connection and other configuration around the |`druid.metadata.storage.type`|The type of metadata storage to use. Choose from "mysql", "postgresql", or "derby".|derby| |`druid.metadata.storage.connector.connectURI`|The jdbc uri for the database to connect to|none| |`druid.metadata.storage.connector.user`|The username to connect with.|none| -|`druid.metadata.storage.connector.password`|The password provider or String password used to connect with.|none| +|`druid.metadata.storage.connector.password`|The [Password Provider](../operations/password-provider.html) or String password used to connect with.|none| |`druid.metadata.storage.connector.createTables`|If Druid requires a table and it doesn't exist, create it?|true| |`druid.metadata.storage.tables.base`|The base name for tables.|druid| |`druid.metadata.storage.tables.segments`|The table to use to look for segments.|druid_segments| @@ -246,26 +246,6 @@ These properties specify the jdbc connection and other configuration around the |`druid.metadata.storage.tables.supervisors`|Used by the indexing service to store supervisor configurations.|druid_supervisors| |`druid.metadata.storage.tables.audit`|The table to use for audit history of configuration changes e.g. Coordinator rules.|druid_audit| -#### Password Provider - -Environment variable password provider provides password by looking at specified environment variable. Use this in order to avoid specifying password in runtime.properties file. -e.g - -```json -{ - "type": "environment", - "variable": "METADATA_STORAGE_PASSWORD" -} -``` - -The values are described below. - -|Field|Type|Description|Required| -|-----|----|-----------|--------| -|`type`|String|password provider type|Yes: `environment`| -|`variable`|String|environment variable to read password from|Yes| - - ### Deep Storage The configurations concern how to push and pull [Segments](../design/segments.html) from deep storage. diff --git a/docs/content/configuration/indexing-service.md b/docs/content/configuration/indexing-service.md index 2d00c140514f..36d6f8603a84 100644 --- a/docs/content/configuration/indexing-service.md +++ b/docs/content/configuration/indexing-service.md @@ -9,14 +9,24 @@ The indexing service uses several of the global configs in [Configuration](../co ### Must be set on Overlord and Middle Manager -#### Node Configs +#### Overlord Node Configs |Property|Description|Default| |--------|-----------|-------| |`druid.host`|The host for the current node. This is used to advertise the current processes location as reachable from another node and should generally be specified such that `http://${druid.host}/` could actually talk to this process|InetAddress.getLocalHost().getCanonicalHostName()| -|`druid.port`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8090| +|`druid.plaintextPort`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8090| +|`druid.tlsPort`|TLS port for HTTPS connector, if [druid.server.http.tls](../operations/tls-support.html) is set then this config will be used. If `druid.host` contains port then that port will be ignored. This should be a non-negative Integer.|8290| |`druid.service`|The name of the service. This is used as a dimension when emitting metrics and alerts to differentiate between the various services|druid/overlord| +#### MiddleManager Node Configs + +|Property|Description|Default| +|--------|-----------|-------| +|`druid.host`|The host for the current node. This is used to advertise the current processes location as reachable from another node and should generally be specified such that `http://${druid.host}/` could actually talk to this process|InetAddress.getLocalHost().getCanonicalHostName()| +|`druid.plaintextPort`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8091| +|`druid.tlsPort`|TLS port for HTTPS connector, if [druid.server.http.tls](../operations/tls-support.html) is set then this config will be used. If `druid.host` contains port then that port will be ignored. This should be a non-negative Integer.|8291| +|`druid.service`|The name of the service. This is used as a dimension when emitting metrics and alerts to differentiate between the various services|druid/middlemanager| + #### Task Logging If you are running the indexing service in remote mode, the task logs must be stored in S3, Azure Blob Store, Google Cloud Storage or HDFS. @@ -297,8 +307,9 @@ Middle managers pass their configurations down to their child peons. The middle |`druid.indexer.runner.javaOpts`|*DEPRECATED* A string of -X Java options to pass to the peon's JVM. Quotable parameters or parameters with spaces are encouraged to use javaOptsArray|""| |`druid.indexer.runner.javaOptsArray`|A json array of strings to be passed in as options to the peon's jvm. This is additive to javaOpts and is recommended for properly handling arguments which contain quotes or spaces like `["-XX:OnOutOfMemoryError=kill -9 %p"]`|`[]`| |`druid.indexer.runner.maxZnodeBytes`|The maximum size Znode in bytes that can be created in Zookeeper.|524288| -|`druid.indexer.runner.startPort`|The port that peons begin running on.|8100| -|`druid.indexer.runner.separateIngestionEndpoint`|*Deprecated.* Use separate server and consequently separate jetty thread pool for ingesting events|false| +|`druid.indexer.runner.startPort`|Starting port used for peon processes, should be greater than 1023.|8100| +|`druid.indexer.runner.tlsStartPort`|Starting TLS port for peon processes, should be greater than 1023.|8300| +|`druid.indexer.runner.separateIngestionEndpoint`|*Deprecated.* Use separate server and consequently separate jetty thread pool for ingesting events. Not supported with TLS.|false| |`druid.worker.ip`|The IP of the worker.|localhost| |`druid.worker.version`|Version identifier for the middle manager.|0| |`druid.worker.capacity`|Maximum number of tasks the middle manager can accept.|Number of available processors - 1| diff --git a/docs/content/configuration/realtime.md b/docs/content/configuration/realtime.md index ca634e6072df..8fc157b3f3ab 100644 --- a/docs/content/configuration/realtime.md +++ b/docs/content/configuration/realtime.md @@ -16,7 +16,8 @@ The realtime node uses several of the global configs in [Configuration](../confi |Property|Description|Default| |--------|-----------|-------| |`druid.host`|The host for the current node. This is used to advertise the current processes location as reachable from another node and should generally be specified such that `http://${druid.host}/` could actually talk to this process|InetAddress.getLocalHost().getCanonicalHostName()| -|`druid.port`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8084| +|`druid.plaintextPort`|This is the port to actually listen on; unless port mapping is used, this will be the same port as is on `druid.host`|8084| +|`druid.tlsPort`|TLS port for HTTPS connector, if [druid.server.http.tls](../operations/tls-support.html) is set then this config will be used. If `druid.host` contains port then that port will be ignored. This should be a non-negative Integer.|8284| |`druid.service`|The name of the service. This is used as a dimension when emitting metrics and alerts to differentiate between the various services|druid/realtime| ### Realtime Operation diff --git a/docs/content/development/extensions-core/simple-client-sslcontext.md b/docs/content/development/extensions-core/simple-client-sslcontext.md new file mode 100644 index 000000000000..de7dfb1d6641 --- /dev/null +++ b/docs/content/development/extensions-core/simple-client-sslcontext.md @@ -0,0 +1,22 @@ +--- +layout: doc_page +--- + +## Simple SSLContext Provider Module + +This module contains a simple implementation of [SSLContext](http://docs.oracle.com/javase/8/docs/api/javax/net/ssl/SSLContext.html) +that will be injected to be used with HttpClient that Druid nodes use internally to communicate with each other. To learn more about +Java's SSL support, please refer to [this](http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html) guide. + +# Configuration + +|Property|Description|Default|Required| +|--------|-----------|-------|--------| +|`druid.client.https.protocol`|SSL protocol to use.|`TLSv1.2`|no| +|`druid.client.https.trustStoreType`|The type of the key store where trusted root certificates are stored.|`java.security.KeyStore.getDefaultType()`|no| +|`druid.client.https.trustStorePath`|The file path or URL of the TLS/SSL Key store where trusted root certificates are stored.|none|yes| +|`druid.client.https.trustStoreAlgorithm`|Algorithm to be used by TrustManager to validate certificate chains|`javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm()`|no| +|`druid.client.https.trustStorePassword`|The [Password Provider](../../operations/password-provider.html) or String password for the Trust Store.|none|yes| + +This [document](http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html) lists all the possible +values for the above mentioned configs among others provided by Java implementation. \ No newline at end of file diff --git a/docs/content/development/extensions.md b/docs/content/development/extensions.md index d6bfca71dba0..d2aedbe77fba 100644 --- a/docs/content/development/extensions.md +++ b/docs/content/development/extensions.md @@ -37,6 +37,7 @@ Core extensions are maintained by Druid committers. |druid-stats|Statistics related module including variance and standard deviation.|[link](../development/extensions-core/stats.html)| |mysql-metadata-storage|MySQL metadata store.|[link](../development/extensions-core/mysql.html)| |postgresql-metadata-storage|PostgreSQL metadata store.|[link](../development/extensions-core/postgresql.html)| +|simple-client-sslcontext|Simple SSLContext provider module to be used by internal HttpClient talking to other nodes over HTTPS.|[link](../development/extensions-core/simple-client-sslcontext.html)| # Community Extensions diff --git a/docs/content/development/modules.md b/docs/content/development/modules.md index e44d3db2de9e..33bfc1045186 100644 --- a/docs/content/development/modules.md +++ b/docs/content/development/modules.md @@ -178,6 +178,23 @@ Adding new Jersey resources to a module requires calling the following code to b Jerseys.addResource(binder, NewResource.class); ``` +### Adding a new Password Provider implementation + +You will need to implement `io.druid.metadata.PasswordProvider` interface. For every place where Druid uses PasswordProvider, a new instance of the implementation will be created, +thus make sure all the necessary information required for fetching each password is supplied during object instantiation. +In your implementation of `io.druid.initialization.DruidModule`, `getJacksonModules` should look something like this - + +``` java + return ImmutableList.of( + new SimpleModule("SomePasswordProviderModule") + .registerSubtypes( + new NamedType(SomePasswordProvider.class, "some") + ) + ); +``` + +where `SomePasswordProvider` is the implementation of `PasswordProvider` interface, you can have a look at `io.druid.metadata.EnvironmentVariablePasswordProvider` for example. + ### Bundle your extension with all the other Druid extensions When you do `mvn install`, Druid extensions will be packaged within the Druid tarball and `extensions` directory, which are both underneath `distribution/target/`. diff --git a/docs/content/operations/password-provider.md b/docs/content/operations/password-provider.md new file mode 100644 index 000000000000..42ddae14f30d --- /dev/null +++ b/docs/content/operations/password-provider.md @@ -0,0 +1,31 @@ +#### Password Provider + +Druid needs some passwords for accessing various secured systems like metadata store, Key Store containing server certificates etc. +All these passwords have corresponding runtime properties associated with them, for example `druid.metadata.storage.connector.password` corresponds to the metadata store password. + +By default users can directly set the passwords in plaintext for these runtime properties, for example `druid.metadata.storage.connector.password=pwd` sets the metadata store password +to be used by Druid to connect to metadata store to `pwd`. Apart from this, users can use environment variables to get password in following way - + +Environment variable password provider provides password by looking at specified environment variable. Use this in order to avoid specifying password in runtime.properties file. +e.g + +```json +{ "type": "environment", "variable": "METADATA_STORAGE_PASSWORD" } +``` + +The values are described below. + +|Field|Type|Description|Required| +|-----|----|-----------|--------| +|`type`|String|password provider type|Yes: `environment`| +|`variable`|String|environment variable to read password from|Yes| + +However, many times users may want their own way to optionally securely fetch password during runtime of the Druid process. +Druid allows this by users to implement their own `PasswordProvider` interface and create a Druid extension to register this implementation at Druid process startup. +Please have a look at "Adding a new Password Provider implementation" on this [page](../development/modules.html) to learn more. + +To use this implementation, simply set the relevant password runtime property to something similar as was done for Environment variable password provider like - + +```json +{ "type": "", "": "", ... } +``` \ No newline at end of file diff --git a/docs/content/operations/tls-support.md b/docs/content/operations/tls-support.md new file mode 100644 index 000000000000..08747d769d1b --- /dev/null +++ b/docs/content/operations/tls-support.md @@ -0,0 +1,46 @@ +--- +layout: doc_page +--- + +TLS Support +=============== + +# General Configuration + +|Property|Description|Default| +|--------|-----------|-------| +|`druid.server.http.plaintext`|Enable/Disable HTTP connector.|`true`| +|`druid.server.http.tls`|Enable/Disable HTTPS connector.|`false`| + +Although not recommended but both HTTP and HTTPS connectors can be enabled at a time and respective ports are configurable using `druid.plaintextPort` +and `druid.tlsPort` properties on each node. Please see `Configuration` section of individual nodes to check the valid and default values for these ports. + +# Jetty Server TLS Configuration + +Druid uses Jetty as an embedded web server. To get familiar with TLS/SSL in general and related concepts like Certificates etc. +reading this [Jetty documentation](http://www.eclipse.org/jetty/documentation/9.3.x/configuring-ssl.html) might be helpful. +To get more in depth knowledge of TLS/SSL support in Java in general, please refer to this [guide](http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html). +The documentation [here](http://www.eclipse.org/jetty/documentation/9.3.x/configuring-ssl.html#configuring-sslcontextfactory) +can help in understanding TLS/SSL configurations listed below. This [document](http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html) lists all the possible +values for the below mentioned configs among others provided by Java implementation. + +|Property|Description|Default|Required| +|--------|-----------|-------|--------| +|`druid.server.https.keyStorePath`|The file path or URL of the TLS/SSL Key store.|none|yes| +|`druid.server.https.keyStoreType`|The type of the key store.|none|yes| +|`druid.server.https.certAlias`|Alias of TLS/SSL certificate for the connector.|none|yes| +|`druid.server.https.keyStorePassword`|The [Password Provider](../operations/password-provider.html) or String password for the Key Store.|none|yes| +|`druid.server.https.keyManagerPassword`|The [Password Provider](../operations/password-provider.html) or String password for the Key Manager.|none|no| + +# Druid's internal communication over TLS + +Whenever possible Druid nodes will use HTTPS to talk to each other. To enable this communication Druid's HttpClient needs to +be configured with a proper [SSLContext](http://docs.oracle.com/javase/8/docs/api/javax/net/ssl/SSLContext.html) that is able +to validate the Server Certificates, otherwise communication will fail. + +Since, there are various ways to configure SSLContext, by default, Druid looks for an instance of SSLContext Guice binding +while creating the HttpClient. This binding can be achieved writing a [Druid extension](../development/extensions.html) +which can provide an instance of SSLContext. Druid comes with a simple extension present [here](../development/extensions-core/simple-client-sslcontext.html) +which should be useful enough for most simple cases, see [this](./including-extensions.html) for how to include extensions. +If this extension does not satisfy the requirement then please follow the extension [implementation] (https://github.com/druid-io/druid/tree/master/extensions-core/simple-client-sslcontext) +to create your own extension. \ No newline at end of file diff --git a/docs/content/toc.md b/docs/content/toc.md index 6b9474735f46..6eca667d36c9 100644 --- a/docs/content/toc.md +++ b/docs/content/toc.md @@ -78,6 +78,8 @@ layout: toc * [Insert Segment Tool](/docs/VERSION/operations/insert-segment-to-db.html) * [Pull Dependencies Tool](/docs/VERSION/operations/pull-deps.html) * [Recommendations](/docs/VERSION/operations/recommendations.html) + * [TLS Support](/docs/VERSION/operations/tls-support.html) + * [Password Provider](/docs/VERSION/operations/password-provider.html) ## Configuration * [Common Configuration](/docs/VERSION/configuration/index.html) diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java index c9f7bede1eab..1dfe672d0fa2 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java @@ -59,6 +59,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); } }, new OrcExtensionsModule() diff --git a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampMinMaxAggregatorTest.java b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampMinMaxAggregatorTest.java index b74f56ce40c3..340864ab479b 100644 --- a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampMinMaxAggregatorTest.java +++ b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampMinMaxAggregatorTest.java @@ -112,6 +112,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); } }, new TimestampMinMaxModule() diff --git a/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java b/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java index b842c4a34b09..2605673ba20c 100644 --- a/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java +++ b/extensions-core/caffeine-cache/src/test/java/io/druid/client/cache/CaffeineCacheTest.java @@ -76,6 +76,7 @@ public void testBasicInjection() throws Exception binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(CaffeineCacheConfig.class).toInstance(config); binder.bind(Cache.class).toProvider(CaffeineCacheProviderWithConfig.class).in(ManageLifecycle.class); @@ -103,6 +104,7 @@ public void testSimpleInjection() binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(Cache.class).toProvider(CacheProvider.class); JsonConfigProvider.bind(binder, uuid, CacheProvider.class); @@ -387,6 +389,7 @@ public void testFromProperties() binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); JsonConfigProvider.bind(binder, keyPrefix, CaffeineCacheConfig.class); } ) @@ -417,6 +420,7 @@ public void testMixedCaseFromProperties() binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); JsonConfigProvider.bind(binder, keyPrefix, CaffeineCacheConfig.class); } ) @@ -444,6 +448,7 @@ public void testDefaultFromProperties() binder -> { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); JsonConfigProvider.bind(binder, keyPrefix, CaffeineCacheConfig.class); } ) diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java index 96c1970f773f..ca7b94a2fc05 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java @@ -248,6 +248,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); } }, // These injections fail under IntelliJ but are required for maven diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java index db1618bf1f02..d87d47bfc2f1 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java @@ -182,7 +182,9 @@ public Map pause(final String id, final long timeout) if (response.getStatus().equals(HttpResponseStatus.OK)) { log.info("Task [%s] paused successfully", id); - return jsonMapper.readValue(response.getContent(), new TypeReference>() {}); + return jsonMapper.readValue(response.getContent(), new TypeReference>() + { + }); } final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy(); @@ -256,7 +258,9 @@ public Map getCurrentOffsets(final String id, final boolean retry try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "offsets/current", null, retry); - return jsonMapper.readValue(response.getContent(), new TypeReference>() {}); + return jsonMapper.readValue(response.getContent(), new TypeReference>() + { + }); } catch (NoTaskLocationException e) { return ImmutableMap.of(); @@ -272,7 +276,9 @@ public Map getEndOffsets(final String id) try { final FullResponseHolder response = submitRequest(id, HttpMethod.GET, "offsets/end", null, true); - return jsonMapper.readValue(response.getContent(), new TypeReference>() {}); + return jsonMapper.readValue(response.getContent(), new TypeReference>() + { + }); } catch (NoTaskLocationException e) { return ImmutableMap.of(); @@ -480,18 +486,34 @@ private FullResponseHolder submitRequest( throw new TaskNotRunnableException(StringUtils.format("Aborting request because task [%s] is not runnable", id)); } + String host = location.getHost(); + String scheme = ""; + int port = -1; + try { location = taskInfoProvider.getTaskLocation(id); if (location.equals(TaskLocation.unknown())) { throw new NoTaskLocationException(StringUtils.format("No TaskLocation available for task [%s]", id)); } + host = location.getHost(); + scheme = location.getTlsPort() >= 0 ? "https" : "http"; + port = location.getTlsPort() >= 0 ? location.getTlsPort() : location.getPort(); + // Netty throws some annoying exceptions if a connection can't be opened, which happens relatively frequently // for tasks that happen to still be starting up, so test the connection first to keep the logs clean. - checkConnection(location.getHost(), location.getPort()); + checkConnection(host, port); try { - URI serviceUri = new URI("http", null, location.getHost(), location.getPort(), path, query, null); + URI serviceUri = new URI( + scheme, + null, + host, + port, + path, + query, + null + ); request = new Request(method, serviceUri.toURL()); // used to validate that we are talking to the correct worker @@ -543,10 +565,15 @@ private FullResponseHolder submitRequest( } else { delay = retryPolicy.getAndIncrementRetryDelay(); } - String urlForLog = (request != null ? request.getUrl().toString() - : StringUtils.format("http://%s:%d%s", location.getHost(), location.getPort(), path)); + : StringUtils.format( + "%s://%s:%d%s", + scheme, + host, + port, + path + )); if (!retry) { // if retry=false, we probably aren't too concerned if the operation doesn't succeed (i.e. the request was // for informational purposes only) so don't log a scary stack trace diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 01179cd8f6c6..a895de01e383 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -72,6 +72,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport private static final List TEST_IDS = Lists.newArrayList("test-id1", "test-id2", "test-id3", "test-id4"); private static final String TEST_HOST = "test-host"; private static final int TEST_PORT = 1234; + private static final int TEST_TLS_PORT = -1; private static final String TEST_DATASOURCE = "test-datasource"; private static final Duration TEST_HTTP_TIMEOUT = new Duration(5000); private static final long TEST_NUM_RETRIES = 0; @@ -106,11 +107,11 @@ public void setUp() throws Exception headers = createMock(HttpHeaders.class); client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT)).anyTimes(); + expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)).anyTimes(); expect(taskInfoProvider.getTaskStatus(TEST_ID)).andReturn(Optional.of(TaskStatus.running(TEST_ID))).anyTimes(); for (int i = 0; i < TEST_IDS.size(); i++) { - expect(taskInfoProvider.getTaskLocation(TEST_IDS.get(i))).andReturn(new TaskLocation(TEST_HOST, TEST_PORT)) + expect(taskInfoProvider.getTaskLocation(TEST_IDS.get(i))).andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) .anyTimes(); expect(taskInfoProvider.getTaskStatus(TEST_IDS.get(i))).andReturn(Optional.of(TaskStatus.running(TEST_IDS.get(i)))) .anyTimes(); @@ -149,7 +150,7 @@ public void testNoTaskLocation() throws Exception public void testTaskNotRunnableException() throws Exception { reset(taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT)).anyTimes(); + expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)).anyTimes(); expect(taskInfoProvider.getTaskStatus(TEST_ID)).andReturn(Optional.of(TaskStatus.failure(TEST_ID))).anyTimes(); replayAll(); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index c67dce5928bc..84ef4e92c5ef 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -878,7 +878,7 @@ public void testQueueNextTasksOnSuccess() throws Exception @Test public void testBeginPublishAndQueueNextTasks() throws Exception { - final TaskLocation location = new TaskLocation("testHost", 1234); + final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getSupervisor(2, 2, true, "PT1M", null, false); addSomeEvents(100); @@ -961,7 +961,7 @@ public void testBeginPublishAndQueueNextTasks() throws Exception @Test public void testDiscoverExistingPublishingTask() throws Exception { - final TaskLocation location = new TaskLocation("testHost", 1234); + final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getSupervisor(1, 1, true, "PT1H", null, false); addSomeEvents(1); @@ -1051,7 +1051,7 @@ public void testDiscoverExistingPublishingTask() throws Exception @Test public void testDiscoverExistingPublishingTaskWithDifferentPartitionAllocation() throws Exception { - final TaskLocation location = new TaskLocation("testHost", 1234); + final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getSupervisor(1, 1, true, "PT1H", null, false); addSomeEvents(1); @@ -1141,8 +1141,8 @@ public void testDiscoverExistingPublishingTaskWithDifferentPartitionAllocation() @Test public void testDiscoverExistingPublishingAndReadingTask() throws Exception { - final TaskLocation location1 = new TaskLocation("testHost", 1234); - final TaskLocation location2 = new TaskLocation("testHost2", 145); + final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); + final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); final DateTime startTime = new DateTime(); supervisor = getSupervisor(1, 1, true, "PT1H", null, false); @@ -1281,7 +1281,7 @@ public void testKillUnresponsiveTasksWhileGettingStartTime() throws Exception @Test public void testKillUnresponsiveTasksWhilePausing() throws Exception { - final TaskLocation location = new TaskLocation("testHost", 1234); + final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getSupervisor(2, 2, true, "PT1M", null, false); addSomeEvents(100); @@ -1348,7 +1348,7 @@ public void testKillUnresponsiveTasksWhilePausing() throws Exception @Test public void testKillUnresponsiveTasksWhileSettingEndOffsets() throws Exception { - final TaskLocation location = new TaskLocation("testHost", 1234); + final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getSupervisor(2, 2, true, "PT1M", null, false); addSomeEvents(100); @@ -1445,8 +1445,8 @@ public void testStop() throws Exception @Test public void testStopGracefully() throws Exception { - final TaskLocation location1 = new TaskLocation("testHost", 1234); - final TaskLocation location2 = new TaskLocation("testHost2", 145); + final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); + final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); final DateTime startTime = new DateTime(); supervisor = getSupervisor(2, 1, true, "PT1H", null, false); @@ -1631,8 +1631,8 @@ public void testResetNoDataSourceMetadata() throws Exception @Test public void testResetRunningTasks() throws Exception { - final TaskLocation location1 = new TaskLocation("testHost", 1234); - final TaskLocation location2 = new TaskLocation("testHost2", 145); + final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); + final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); final DateTime startTime = new DateTime(); supervisor = getSupervisor(2, 1, true, "PT1H", null, false); diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java index 463235d69239..74e613d59772 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java @@ -41,6 +41,7 @@ import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.UriExtractionNamespace; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.server.lookup.namespace.cache.CacheScheduler; import io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager; import org.easymock.EasyMock; @@ -524,7 +525,7 @@ private Injector makeInjector() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); } } diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java index 570a26668137..25df1f53ebb6 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OffHeapNamespaceExtractionCacheManagerTest.java @@ -29,6 +29,7 @@ import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.server.lookup.namespace.NamespaceExtractionModule; import org.junit.Assert; import org.junit.Test; @@ -49,7 +50,7 @@ public void testInjection() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); } } diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManagerTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManagerTest.java index 57137f43ea7a..134e5fdd95de 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManagerTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/OnHeapNamespaceExtractionCacheManagerTest.java @@ -29,6 +29,7 @@ import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.server.lookup.namespace.NamespaceExtractionModule; import org.junit.Assert; import org.junit.Test; @@ -49,7 +50,7 @@ public void testInjection() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); } } diff --git a/extensions-core/simple-client-sslcontext/README.md b/extensions-core/simple-client-sslcontext/README.md new file mode 100644 index 000000000000..2233f4aba2b6 --- /dev/null +++ b/extensions-core/simple-client-sslcontext/README.md @@ -0,0 +1,3 @@ +This module contains a simple implementation of [SslContext](http://docs.oracle.com/javase/8/docs/api/javax/net/ssl/SSLContext.html) +that will be injected to be used with HttpClient that Druid nodes use internally to communicate with each other. +More details [here](http://druid.io/docs/latest/development/extensions-core/simple-client-sslcontext.html). diff --git a/extensions-core/simple-client-sslcontext/pom.xml b/extensions-core/simple-client-sslcontext/pom.xml new file mode 100644 index 000000000000..be4333b13fce --- /dev/null +++ b/extensions-core/simple-client-sslcontext/pom.xml @@ -0,0 +1,43 @@ + + + + druid + io.druid + 0.11.0-SNAPSHOT + ../../pom.xml + + 4.0.0 + + io.druid.extensions + simple-client-sslcontext + simple-client-sslcontext + simple-client-sslcontext + + + + io.druid + druid-api + ${project.parent.version} + provided + + + io.druid + druid-common + ${project.parent.version} + provided + + + io.druid + druid-server + ${project.parent.version} + provided + + + com.metamx + emitter + provided + + + diff --git a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLClientConfig.java b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLClientConfig.java new file mode 100644 index 000000000000..dbd7636ee5c1 --- /dev/null +++ b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLClientConfig.java @@ -0,0 +1,77 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.https; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.metadata.PasswordProvider; + +public class SSLClientConfig +{ + @JsonProperty + private String protocol; + + @JsonProperty + private String trustStoreType; + + @JsonProperty + private String trustStorePath; + + @JsonProperty + private String trustStoreAlgorithm; + + @JsonProperty("trustStorePassword") + private PasswordProvider trustStorePasswordProvider; + + public String getProtocol() + { + return protocol; + } + + public String getTrustStoreType() + { + return trustStoreType; + } + + public String getTrustStorePath() + { + return trustStorePath; + } + + public String getTrustStoreAlgorithm() + { + return trustStoreAlgorithm; + } + + public PasswordProvider getTrustStorePasswordProvider() + { + return trustStorePasswordProvider; + } + + @Override + public String toString() + { + return "SSLClientConfig{" + + "protocol='" + protocol + '\'' + + ", trustStoreType='" + trustStoreType + '\'' + + ", trustStorePath='" + trustStorePath + '\'' + + ", trustStoreAlgorithm='" + trustStoreAlgorithm + '\'' + + '}'; + } +} diff --git a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java new file mode 100644 index 000000000000..9705ee909548 --- /dev/null +++ b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java @@ -0,0 +1,50 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.https; + +import com.fasterxml.jackson.databind.Module; +import com.google.common.collect.ImmutableList; +import com.google.inject.Binder; +import io.druid.guice.JsonConfigProvider; +import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.Global; +import io.druid.initialization.DruidModule; + +import javax.net.ssl.SSLContext; +import java.util.List; + +public class SSLContextModule implements DruidModule +{ + + @Override + public List getJacksonModules() + { + return ImmutableList.of(); + } + + @Override + public void configure(Binder binder) + { + JsonConfigProvider.bind(binder, "druid.client.https", SSLClientConfig.class); + binder.bind(SSLContext.class).toProvider(SSLContextProvider.class); + binder.bind(SSLContext.class).annotatedWith(Global.class).toProvider(SSLContextProvider.class); + binder.bind(SSLContext.class).annotatedWith(Client.class).toProvider(SSLContextProvider.class); + } +} diff --git a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextProvider.java b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextProvider.java new file mode 100644 index 000000000000..ccdacf1b733d --- /dev/null +++ b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextProvider.java @@ -0,0 +1,75 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.https; + +import com.google.common.base.Throwables; +import com.google.inject.Inject; +import com.google.inject.Provider; +import com.metamx.emitter.EmittingLogger; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import java.io.FileInputStream; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; + +public class SSLContextProvider implements Provider +{ + private static final EmittingLogger log = new EmittingLogger(SSLContextProvider.class); + + private SSLClientConfig config; + + @Inject + public SSLContextProvider(SSLClientConfig config) + { + this.config = config; + } + + @Override + public SSLContext get() + { + log.info("Creating SslContext for https client using config [%s]", config); + + SSLContext sslContext = null; + try { + sslContext = SSLContext.getInstance(config.getProtocol() == null ? "TLSv1.2" : config.getProtocol()); + KeyStore keyStore = KeyStore.getInstance(config.getTrustStoreType() == null + ? KeyStore.getDefaultType() + : config.getTrustStoreType()); + keyStore.load( + new FileInputStream(config.getTrustStorePath()), + config.getTrustStorePasswordProvider().getPassword().toCharArray() + ); + TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(config.getTrustStoreAlgorithm() == null + ? TrustManagerFactory.getDefaultAlgorithm() + : config.getTrustStoreAlgorithm()); + trustManagerFactory.init(keyStore); + sslContext.init(null, trustManagerFactory.getTrustManagers(), null); + } + catch (CertificateException | KeyManagementException | IOException | KeyStoreException | NoSuchAlgorithmException e) { + Throwables.propagate(e); + } + return sslContext; + } +} diff --git a/extensions-core/simple-client-sslcontext/src/main/resources/META-INF/services/io.druid.initialization.DruidModule b/extensions-core/simple-client-sslcontext/src/main/resources/META-INF/services/io.druid.initialization.DruidModule new file mode 100644 index 000000000000..600c7b87254a --- /dev/null +++ b/extensions-core/simple-client-sslcontext/src/main/resources/META-INF/services/io.druid.initialization.DruidModule @@ -0,0 +1 @@ +io.druid.https.SSLContextModule \ No newline at end of file diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index 51a46bd79468..851ecd68ee8f 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -56,6 +56,7 @@ import io.druid.segment.indexing.granularity.GranularitySpec; import io.druid.segment.loading.DataSegmentPusher; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.ShardSpecLookup; @@ -108,7 +109,7 @@ public class HadoopDruidIndexerConfig public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("hadoop-indexer", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("hadoop-indexer", null, null, null, new ServerConfig()) ); JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java index a3574f16cb25..c0d3d3cff17e 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java @@ -39,6 +39,7 @@ import io.druid.segment.IndexSpec; import io.druid.segment.loading.DataSegmentPusher; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.timeline.DataSegment; import org.joda.time.Interval; @@ -65,7 +66,7 @@ public class HadoopDruidConverterConfig public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("hadoop-converter", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("hadoop-converter", null, null, null, new ServerConfig()) ); } } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index a8cb0216705e..d0a74653b7d6 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -50,6 +50,7 @@ import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.conf.Configuration; @@ -135,7 +136,7 @@ public void configure(Binder binder) { binder.bind(UsedSegmentLister.class).toInstance(segmentList); JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("dummy-node", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("dummy-node", null, null, null, new ServerConfig()) ); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java b/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java index 14f61646f9aa..8fd5b4df6fac 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java @@ -26,14 +26,15 @@ public class TaskLocation { - private static final TaskLocation UNKNOWN = new TaskLocation(null, -1); + private static final TaskLocation UNKNOWN = new TaskLocation(null, -1, -1); private final String host; private final int port; + private final int tlsPort; - public static TaskLocation create(String host, int port) + public static TaskLocation create(String host, int port, int tlsPort) { - return new TaskLocation(host, port); + return new TaskLocation(host, port, tlsPort); } public static TaskLocation unknown() @@ -44,11 +45,13 @@ public static TaskLocation unknown() @JsonCreator public TaskLocation( @JsonProperty("host") String host, - @JsonProperty("port") int port + @JsonProperty("port") int port, + @JsonProperty("tlsPort") int tlsPort ) { this.host = host; this.port = port; + this.tlsPort = tlsPort; } @JsonProperty @@ -63,6 +66,11 @@ public int getPort() return port; } + @JsonProperty + public int getTlsPort() { + return tlsPort; + } + @Override public boolean equals(Object o) { @@ -72,15 +80,20 @@ public boolean equals(Object o) if (o == null || getClass() != o.getClass()) { return false; } + TaskLocation that = (TaskLocation) o; - return port == that.port && + + return port == that.port && tlsPort == that.tlsPort && Objects.equals(host, that.host); } @Override public int hashCode() { - return Objects.hash(host, port); + int result = host.hashCode(); + result = 31 * result + port; + result = 31 * result + tlsPort; + return result; } @Override @@ -89,6 +102,7 @@ public String toString() return "TaskLocation{" + "host='" + host + '\'' + ", port=" + port + + ", tlsPort=" + tlsPort + '}'; } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index c57959ed2def..fce07084a55e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -62,6 +62,7 @@ import io.druid.java.util.common.logger.Logger; import io.druid.query.DruidMetrics; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.server.metrics.MonitorsConfig; import io.druid.tasklogs.TaskLogPusher; import io.druid.tasklogs.TaskLogStreamer; @@ -102,6 +103,8 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer private final ListeningExecutorService exec; private final ObjectMapper jsonMapper; private final PortFinder portFinder; + private final PortFinder tlsPortFinder; + private final ServerConfig serverConfig; private final CopyOnWriteArrayList> listeners = new CopyOnWriteArrayList<>(); // Writes must be synchronized. This is only a ConcurrentMap so "informational" reads can occur without waiting. @@ -117,7 +120,8 @@ public ForkingTaskRunner( Properties props, TaskLogPusher taskLogPusher, ObjectMapper jsonMapper, - @Self DruidNode node + @Self DruidNode node, + ServerConfig serverConfig ) { this.config = config; @@ -127,7 +131,8 @@ public ForkingTaskRunner( this.jsonMapper = jsonMapper; this.node = node; this.portFinder = new PortFinder(config.getStartPort()); - + this.tlsPortFinder = new PortFinder(config.getTlsStartPort()); + this.serverConfig = serverConfig; this.exec = MoreExecutors.listeningDecorator( Execs.multiThreaded(workerConfig.getCapacity(), "forking-task-runner-%d") ); @@ -229,19 +234,25 @@ public TaskStatus call() final ProcessHolder processHolder; final String childHost = node.getHost(); - final int childPort; - final int childChatHandlerPort; - - if (config.isSeparateIngestionEndpoint()) { - Pair portPair = portFinder.findTwoConsecutiveUnusedPorts(); - childPort = portPair.lhs; - childChatHandlerPort = portPair.rhs; - } else { - childPort = portFinder.findUnusedPort(); - childChatHandlerPort = -1; + int childPort = -1; + int tlsChildPort = -1; + int childChatHandlerPort = -1; + + if(serverConfig.isPlaintext()) { + if (config.isSeparateIngestionEndpoint()) { + Pair portPair = portFinder.findTwoConsecutiveUnusedPorts(); + childPort = portPair.lhs; + childChatHandlerPort = portPair.rhs; + } else { + childPort = portFinder.findUnusedPort(); + } + } + + if(serverConfig.isTls()) { + tlsChildPort = tlsPortFinder.findUnusedPort(); } - final TaskLocation taskLocation = TaskLocation.create(childHost, childPort); + final TaskLocation taskLocation = TaskLocation.create(childHost, childPort, tlsChildPort); try { final Closer closer = Closer.create(); @@ -370,6 +381,7 @@ public TaskStatus call() command.add(StringUtils.format("-Ddruid.host=%s", childHost)); command.add(StringUtils.format("-Ddruid.port=%d", childPort)); + command.add(StringUtils.format("-Ddruid.tlsPort=%d", tlsChildPort)); /** * These are not enabled per default to allow the user to either set or not set them * Users are highly suggested to be set in druid.indexer.runner.javaOpts @@ -391,6 +403,8 @@ public TaskStatus call() "-Ddruid.indexer.task.chathandler.port=%d", childChatHandlerPort )); + // Note - TLS is not supported with separate ingestion config, + // if set then peon task will fail to start } command.add("io.druid.cli.Main"); @@ -413,7 +427,8 @@ public TaskStatus call() new ProcessBuilder(ImmutableList.copyOf(command)).redirectErrorStream(true).start(), logFile, taskLocation.getHost(), - taskLocation.getPort() + taskLocation.getPort(), + taskLocation.getTlsPort() ); processHolder = taskWorkItem.processHolder; @@ -485,7 +500,12 @@ public TaskStatus call() } } - portFinder.markPortUnused(childPort); + if(serverConfig.isPlaintext()) { + portFinder.markPortUnused(childPort); + } + if(serverConfig.isTls()) { + tlsPortFinder.markPortUnused(tlsChildPort); + } if (childChatHandlerPort > 0) { portFinder.markPortUnused(childChatHandlerPort); } @@ -746,7 +766,7 @@ public TaskLocation getLocation() if (processHolder == null) { return TaskLocation.unknown(); } else { - return TaskLocation.create(processHolder.host, processHolder.port); + return TaskLocation.create(processHolder.host, processHolder.port, processHolder.tlsPort); } } } @@ -757,13 +777,15 @@ private static class ProcessHolder private final File logFile; private final String host; private final int port; + private final int tlsPort; - private ProcessHolder(Process process, File logFile, String host, int port) + private ProcessHolder(Process process, File logFile, String host, int port, int tlsPort) { this.process = process; this.logFile = logFile; this.host = host; this.port = port; + this.tlsPort = tlsPort; } private void registerWithCloser(Closer closer) diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunnerFactory.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunnerFactory.java index aa9161638db6..5778d1a68557 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunnerFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunnerFactory.java @@ -26,6 +26,7 @@ import io.druid.indexing.overlord.config.ForkingTaskRunnerConfig; import io.druid.indexing.worker.config.WorkerConfig; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.tasklogs.TaskLogPusher; import java.util.Properties; @@ -41,6 +42,7 @@ public class ForkingTaskRunnerFactory implements TaskRunnerFactory workerConfigRef; private final ScalingStats scalingStats; @@ -253,7 +255,7 @@ int getWorkersNeededToAssignTasks( } else { // None of the existing worker can run this task, we need to provision one worker for it. // create a dummy worker and try to simulate assigning task to it. - workerRunningTask = createDummyWorker("dummy" + need, capacity, workerTaskRunnerConfig.getMinWorkerVersion()); + workerRunningTask = createDummyWorker(SCHEME, "dummy" + need, capacity, workerTaskRunnerConfig.getMinWorkerVersion()); need++; } // Update map with worker running task @@ -400,10 +402,10 @@ private static ImmutableWorkerInfo workerWithTask(ImmutableWorkerInfo immutableW ); } - private static ImmutableWorkerInfo createDummyWorker(String host, int capacity, String version) + private static ImmutableWorkerInfo createDummyWorker(String scheme, String host, int capacity, String version) { return new ImmutableWorkerInfo( - new Worker(host, "-2", capacity, version), + new Worker(scheme, host, "-2", capacity, version), 0, Sets.newHashSet(), Sets.newHashSet(), diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java index 7b3c6ba8bf32..b40a38b530d2 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java @@ -62,6 +62,11 @@ public class ForkingTaskRunnerConfig @Max(65535) private int startPort = 8100; + @JsonProperty + @Min(1024) + @Max(65535) + private int tlsStartPort = 8300; + @JsonProperty @NotNull List allowedPrefixes = Lists.newArrayList( @@ -107,6 +112,11 @@ public int getStartPort() return startPort; } + public int getTlsStartPort() + { + return tlsStartPort; + } + public List getAllowedPrefixes() { return allowedPrefixes; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordRedirectInfo.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordRedirectInfo.java index 7ea9597334a9..13ab836ac90a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordRedirectInfo.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordRedirectInfo.java @@ -53,14 +53,14 @@ public boolean doLocal(String requestURI) } @Override - public URL getRedirectURL(String queryString, String requestURI) + public URL getRedirectURL(String scheme, String queryString, String requestURI) { try { final String leader = taskMaster.getCurrentLeader(); if (leader == null || leader.isEmpty()) { return null; } else { - return new URI("http", leader, requestURI, queryString, null).toURL(); + return new URI(scheme, leader, requestURI, queryString, null).toURL(); } } catch (Exception e) { diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/Worker.java b/indexing-service/src/main/java/io/druid/indexing/worker/Worker.java index f349399b21a0..4a7b581e656d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/Worker.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/Worker.java @@ -27,6 +27,7 @@ */ public class Worker { + private final String scheme; private final String host; private final String ip; private final int capacity; @@ -34,18 +35,26 @@ public class Worker @JsonCreator public Worker( + @JsonProperty("scheme") String scheme, @JsonProperty("host") String host, @JsonProperty("ip") String ip, @JsonProperty("capacity") int capacity, @JsonProperty("version") String version ) { + this.scheme = scheme; this.host = host; this.ip = ip; this.capacity = capacity; this.version = version; } + @JsonProperty + public String getScheme() + { + return scheme; + } + @JsonProperty public String getHost() { @@ -70,17 +79,6 @@ public String getVersion() return version; } - @Override - public String toString() - { - return "Worker{" + - "host='" + host + '\'' + - ", ip='" + ip + '\'' + - ", capacity=" + capacity + - ", version='" + version + '\'' + - '}'; - } - @Override public boolean equals(Object o) { @@ -96,6 +94,9 @@ public boolean equals(Object o) if (capacity != worker.capacity) { return false; } + if (!scheme.equals(worker.scheme)) { + return false; + } if (!host.equals(worker.host)) { return false; } @@ -103,16 +104,29 @@ public boolean equals(Object o) return false; } return version.equals(worker.version); - } @Override public int hashCode() { - int result = host.hashCode(); + int result = scheme.hashCode(); + result = 31 * result + host.hashCode(); result = 31 * result + ip.hashCode(); result = 31 * result + capacity; result = 31 * result + version.hashCode(); return result; } + + @Override + public String toString() + { + return "Worker{" + + "scheme='" + scheme + '\'' + + ", host='" + host + '\'' + + ", ip='" + ip + '\'' + + ", capacity=" + capacity + + ", version='" + version + '\'' + + '}'; + } + } diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java index 4272d778759f..13189a860fb6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java @@ -82,6 +82,7 @@ public Response doDisable() { try { final Worker disabledWorker = new Worker( + enabledWorker.getScheme(), enabledWorker.getHost(), enabledWorker.getIp(), enabledWorker.getCapacity(), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java index 441b06a0cee1..f5e4eb0d58e5 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java @@ -35,7 +35,7 @@ public void testSerde() throws Exception { ImmutableWorkerInfo workerInfo = new ImmutableWorkerInfo( new Worker( - "testWorker", "192.0.0.1", 10, "v1" + "http", "testWorker", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -56,7 +56,7 @@ public void testEqualsAndSerde() // Everything equal assertEqualsAndHashCode(new ImmutableWorkerInfo( new Worker( - "testWorker", "192.0.0.1", 10, "v1" + "http", "testWorker", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -64,7 +64,7 @@ public void testEqualsAndSerde() new DateTime("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( - "testWorker", "192.0.0.1", 10, "v1" + "http", "testWorker", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -75,7 +75,7 @@ public void testEqualsAndSerde() // different worker same tasks assertEqualsAndHashCode(new ImmutableWorkerInfo( new Worker( - "testWorker1", "192.0.0.1", 10, "v1" + "http", "testWorker1", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -83,7 +83,7 @@ public void testEqualsAndSerde() new DateTime("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( - "testWorker2", "192.0.0.1", 10, "v1" + "http", "testWorker2", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -94,7 +94,7 @@ public void testEqualsAndSerde() // same worker different task groups assertEqualsAndHashCode(new ImmutableWorkerInfo( new Worker( - "testWorker", "192.0.0.1", 10, "v1" + "http", "testWorker", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp3", "grp2"), @@ -102,7 +102,7 @@ public void testEqualsAndSerde() new DateTime("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( - "testWorker", "192.0.0.1", 10, "v1" + "http", "testWorker", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -113,7 +113,7 @@ public void testEqualsAndSerde() // same worker different tasks assertEqualsAndHashCode(new ImmutableWorkerInfo( new Worker( - "testWorker1", "192.0.0.1", 10, "v1" + "http", "testWorker1", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -121,7 +121,7 @@ public void testEqualsAndSerde() new DateTime("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( - "testWorker2", "192.0.0.1", 10, "v1" + "http", "testWorker2", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -132,7 +132,7 @@ public void testEqualsAndSerde() // same worker different capacity assertEqualsAndHashCode(new ImmutableWorkerInfo( new Worker( - "testWorker1", "192.0.0.1", 10, "v1" + "http", "testWorker1", "192.0.0.1", 10, "v1" ), 3, ImmutableSet.of("grp1", "grp2"), @@ -140,7 +140,7 @@ public void testEqualsAndSerde() new DateTime("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( - "testWorker2", "192.0.0.1", 10, "v1" + "http", "testWorker2", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), @@ -151,7 +151,7 @@ public void testEqualsAndSerde() // same worker different lastCompletedTaskTime assertEqualsAndHashCode(new ImmutableWorkerInfo( new Worker( - "testWorker1", "192.0.0.1", 10, "v1" + "http", "testWorker1", "192.0.0.1", 10, "v1" ), 3, ImmutableSet.of("grp1", "grp2"), @@ -159,7 +159,7 @@ public void testEqualsAndSerde() new DateTime("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( - "testWorker2", "192.0.0.1", 10, "v1" + "http", "testWorker2", "192.0.0.1", 10, "v1" ), 2, ImmutableSet.of("grp1", "grp2"), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java index f1e2544622a8..4b4ac6da9c98 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java @@ -57,7 +57,7 @@ public class RemoteTaskRunnerTestUtils static final String announcementsPath = StringUtils.format("%s/indexer/announcements", basePath); static final String tasksPath = StringUtils.format("%s/indexer/tasks", basePath); static final String statusPath = StringUtils.format("%s/indexer/status", basePath); - static final TaskLocation DUMMY_LOCATION = TaskLocation.create("dummy", 9000); + static final TaskLocation DUMMY_LOCATION = TaskLocation.create("dummy", 9000, -1); private TestingCluster testingCluster; @@ -132,6 +132,7 @@ public String getBase() Worker makeWorker(final String workerId) throws Exception { Worker worker = new Worker( + "http", workerId, workerId, 3, @@ -151,7 +152,7 @@ void disableWorker(Worker worker) throws Exception { cf.setData().forPath( joiner.join(announcementsPath, worker.getHost()), - jsonMapper.writeValueAsBytes(new Worker(worker.getHost(), worker.getIp(), worker.getCapacity(), "")) + jsonMapper.writeValueAsBytes(new Worker(worker.getScheme(), worker.getHost(), worker.getIp(), worker.getCapacity(), "")) ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 47cc0b3ca641..430ca49919ca 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -104,6 +104,7 @@ import io.druid.server.DruidNode; import io.druid.server.coordination.DataSegmentAnnouncer; import io.druid.server.coordination.DataSegmentServerAnnouncer; +import io.druid.server.initialization.ServerConfig; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -609,7 +610,7 @@ private TaskRunner setUpThreadPoolTaskRunner(TaskToolboxFactory tb) tb, taskConfig, emitter, - new DruidNode("dummy", "dummy", 10000) + new DruidNode("dummy", "dummy", 10000, null, new ServerConfig()) ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedResourceManagementStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedResourceManagementStrategyTest.java index e9d08675a3bb..c185b981bf5f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedResourceManagementStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedResourceManagementStrategyTest.java @@ -179,7 +179,7 @@ public void testSuccessfulMinWorkersProvisionWithOldVersionNodeRunning() throws EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(testTask).toImmutable(), - new TestZkWorker(testTask, "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node + new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); @@ -216,7 +216,7 @@ public void testSomethingProvisioning() throws Exception EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(testTask).toImmutable(), - new TestZkWorker(testTask, "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node + new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(1); @@ -274,9 +274,9 @@ public void testProvisionAlert() throws Exception ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( - new TestZkWorker(testTask, "hi", "lo", MIN_VERSION, 1).toImmutable(), - new TestZkWorker(testTask, "h1", "n1", INVALID_VERSION).toImmutable(), // Invalid version node - new TestZkWorker(testTask, "h2", "n1", INVALID_VERSION).toImmutable() // Invalid version node + new TestZkWorker(testTask, "http", "hi", "lo", MIN_VERSION, 1).toImmutable(), + new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable(), // Invalid version node + new TestZkWorker(testTask, "http", "h2", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); @@ -464,7 +464,7 @@ public void testMinCountIncrease() throws Exception ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( - new TestZkWorker(NoopTask.create(), "h1", "i1", MIN_VERSION).toImmutable() + new TestZkWorker(NoopTask.create(), "http", "h1", "i1", MIN_VERSION).toImmutable() ) ).times(3); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(2); @@ -559,28 +559,30 @@ public TestZkWorker( Task testTask ) { - this(testTask, "host", "ip", MIN_VERSION); + this(testTask, "http", "host", "ip", MIN_VERSION); } public TestZkWorker( Task testTask, + String scheme, String host, String ip, String version ) { - this(testTask, host, ip, version, 1); + this(testTask, scheme, host, ip, version, 1); } public TestZkWorker( Task testTask, + String scheme, String host, String ip, String version, int capacity ) { - super(new Worker(host, ip, capacity, version), null, new DefaultObjectMapper()); + super(new Worker(scheme, host, ip, capacity, version), null, new DefaultObjectMapper()); this.testTask = testTask; } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleResourceManagementStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleResourceManagementStrategyTest.java index bded83982091..2d62fb2d5d10 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleResourceManagementStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleResourceManagementStrategyTest.java @@ -403,7 +403,7 @@ public void testMinCountIncrease() throws Exception ).times(3); EasyMock.expect(runner.getWorkers()).andReturn( Collections.singletonList( - new TestZkWorker(NoopTask.create(), "h1", "i1", "0").toImmutable() + new TestZkWorker(NoopTask.create(), "http", "h1", "i1", "0").toImmutable() ) ).times(3); EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); @@ -496,17 +496,18 @@ public TestZkWorker( Task testTask ) { - this(testTask, "host", "ip", "0"); + this(testTask, "http", "host", "ip", "0"); } public TestZkWorker( Task testTask, + String scheme, String host, String ip, String version ) { - super(new Worker(host, ip, 3, version), null, new DefaultObjectMapper()); + super(new Worker(scheme, host, ip, 3, version), null, new DefaultObjectMapper()); this.testTask = testTask; } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java index c5d433c9809f..73fb97421846 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfigTest.java @@ -51,6 +51,7 @@ public void configure(Binder binder) { binder.bind(Key.get(String.class, Names.named("serviceName"))).toInstance("some service"); binder.bind(Key.get(Integer.class, Names.named("servicePort"))).toInstance(0); + binder.bind(Key.get(Integer.class, Names.named("tlsServicePort"))).toInstance(-1); } } ) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordRedirectInfoTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordRedirectInfoTest.java index a0db2b1bebdd..d907edfc6dac 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordRedirectInfoTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordRedirectInfoTest.java @@ -68,7 +68,7 @@ public void testGetRedirectURLNull() { EasyMock.expect(taskMaster.getCurrentLeader()).andReturn(null).anyTimes(); EasyMock.replay(taskMaster); - URL url = redirectInfo.getRedirectURL("query", "/request"); + URL url = redirectInfo.getRedirectURL("http","query", "/request"); Assert.assertNull(url); EasyMock.verify(taskMaster); } @@ -78,7 +78,7 @@ public void testGetRedirectURLEmpty() { EasyMock.expect(taskMaster.getCurrentLeader()).andReturn("").anyTimes(); EasyMock.replay(taskMaster); - URL url = redirectInfo.getRedirectURL("query", "/request"); + URL url = redirectInfo.getRedirectURL("http", "query", "/request"); Assert.assertNull(url); EasyMock.verify(taskMaster); } @@ -91,7 +91,7 @@ public void testGetRedirectURL() String request = "/request"; EasyMock.expect(taskMaster.getCurrentLeader()).andReturn(host).anyTimes(); EasyMock.replay(taskMaster); - URL url = redirectInfo.getRedirectURL(query, request); + URL url = redirectInfo.getRedirectURL("http", query, request); Assert.assertEquals("http://localhost/request?foo=bar&x=y", url.toString()); EasyMock.verify(taskMaster); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java index 61e0353b4bc1..2f2a79b09908 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java @@ -55,6 +55,7 @@ import io.druid.server.DruidNode; import io.druid.server.coordinator.CoordinatorOverlordServiceConfig; import io.druid.server.initialization.IndexerZkConfig; +import io.druid.server.initialization.ServerConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.server.security.AuthConfig; @@ -84,7 +85,7 @@ public class OverlordTest { - private static final TaskLocation TASK_LOCATION = new TaskLocation("dummy", 1000); + private static final TaskLocation TASK_LOCATION = new TaskLocation("dummy", 1000, -1); private TestingServer server; private Timing timing; @@ -158,7 +159,7 @@ public void setUp() throws Exception curator.start(); curator.blockUntilConnected(); curator.create().creatingParentsIfNeeded().forPath(indexerZkConfig.getLeaderLatchPath()); - druidNode = new DruidNode("hey", "what", 1234); + druidNode = new DruidNode("hey", "what", 1234, null, new ServerConfig()); ServiceEmitter serviceEmitter = new NoopServiceEmitter(); taskMaster = new TaskMaster( new TaskQueueConfig(null, new Period(1), null, new Period(10)), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java index 1e576d3f7da7..722e5637a790 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java @@ -46,28 +46,28 @@ public void testFindWorkerForTask() throws Exception ImmutableMap.of( "localhost0", new ImmutableWorkerInfo( - new Worker("localhost0", "localhost0", 2, "v1"), 0, + new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost1", new ImmutableWorkerInfo( - new Worker("localhost1", "localhost1", 2, "v1"), 0, + new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost2", new ImmutableWorkerInfo( - new Worker("localhost2", "localhost2", 2, "v1"), 1, + new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost3", new ImmutableWorkerInfo( - new Worker("localhost3", "localhost3", 2, "v1"), 1, + new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -98,14 +98,14 @@ public void testFindWorkerForTaskWithNulls() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("lhost", "lhost", 1, "v1"), 0, + new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 1, "v1"), 0, + new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -129,7 +129,7 @@ public void testIsolation() throws Exception ImmutableMap.of( "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 1, "v1"), 0, + new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java index aad319300edf..a22c4b09595d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java @@ -43,14 +43,14 @@ public void testFindWorkerForTask() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("lhost", "lhost", 1, "v1"), 0, + new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 1, "v1"), 1, + new Worker("http", "localhost", "localhost", 1, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -79,14 +79,14 @@ public void testFindWorkerForTaskWhenSameCurrCapacityUsed() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("lhost", "lhost", 5, "v1"), 5, + new Worker("http","lhost", "lhost", 5, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 10, "v1"), 5, + new Worker("http","localhost", "localhost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -116,14 +116,14 @@ public void testOneDisableWorkerDifferentUsedCapacity() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("disableHost", "disableHost", 10, DISABLED_VERSION), 2, + new Worker("http","disableHost", "disableHost", 10, DISABLED_VERSION), 2, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("enableHost", "enableHost", 10, "v1"), 5, + new Worker("http","enableHost", "enableHost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -153,14 +153,14 @@ public void testOneDisableWorkerSameUsedCapacity() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("disableHost", "disableHost", 10, DISABLED_VERSION), 5, + new Worker("http","disableHost", "disableHost", 10, DISABLED_VERSION), 5, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("enableHost", "enableHost", 10, "v1"), 5, + new Worker("http","enableHost", "enableHost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java index d113e37d80df..43bcf7d6b5e3 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java @@ -46,14 +46,14 @@ public void testFindWorkerForTask() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("lhost", "lhost", 1, "v1"), 0, + new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 1, "v1"), 0, + new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -84,14 +84,14 @@ public void testFindWorkerForTaskWithNulls() throws Exception ImmutableMap.of( "lhost", new ImmutableWorkerInfo( - new Worker("lhost", "lhost", 1, "v1"), 0, + new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() ), "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 1, "v1"), 0, + new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() @@ -115,7 +115,7 @@ public void testIsolation() throws Exception ImmutableMap.of( "localhost", new ImmutableWorkerInfo( - new Worker("localhost", "localhost", 1, "v1"), 0, + new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), DateTime.now() diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java index f128d9e9b066..0beab6ba7065 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -47,6 +47,7 @@ import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import io.druid.server.DruidNode; import io.druid.server.initialization.IndexerZkConfig; +import io.druid.server.initialization.ServerConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.metrics.NoopServiceEmitter; import org.apache.curator.framework.CuratorFramework; @@ -70,7 +71,7 @@ public class WorkerTaskMonitorTest private static final String basePath = "/test/druid"; private static final String tasksPath = StringUtils.format("%s/indexer/tasks/worker", basePath); private static final String statusPath = StringUtils.format("%s/indexer/status/worker", basePath); - private static final DruidNode DUMMY_NODE = new DruidNode("dummy", "dummy", 9000); + private static final DruidNode DUMMY_NODE = new DruidNode("dummy", "dummy", 9000, null, new ServerConfig()); private TestingCluster testingCluster; private CuratorFramework cf; @@ -108,6 +109,7 @@ public void setUp() throws Exception cf.create().creatingParentsIfNeeded().forPath(basePath); worker = new Worker( + "http", "worker", "localhost", 3, @@ -294,7 +296,7 @@ public boolean isValid() Assert.assertEquals(task.getId(), announcements.get(0).getTaskStatus().getId()); Assert.assertEquals(TaskStatus.Status.SUCCESS, announcements.get(0).getTaskStatus().getStatusCode()); Assert.assertEquals(DUMMY_NODE.getHost(), announcements.get(0).getTaskLocation().getHost()); - Assert.assertEquals(DUMMY_NODE.getPort(), announcements.get(0).getTaskLocation().getPort()); + Assert.assertEquals(DUMMY_NODE.getPlaintextPort(), announcements.get(0).getTaskLocation().getPort()); } @Test(timeout = 30_000L) diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java index 987314aa14c5..6db3da26eb9a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java @@ -71,6 +71,7 @@ public void setUp() throws Exception cf.create().creatingParentsIfNeeded().forPath(basePath); worker = new Worker( + "http", "host", "ip", 3, diff --git a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java index db8f973ff51a..08b267797fb3 100644 --- a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java +++ b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java @@ -62,6 +62,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); // See IndexingServiceModuleHelper JsonConfigProvider.bind(binder, indexerPropertyString, IndexerZkConfig.class); JsonConfigProvider.bind( diff --git a/pom.xml b/pom.xml index eb8e844bb700..fbf262e53a33 100644 --- a/pom.xml +++ b/pom.xml @@ -114,6 +114,7 @@ extensions-core/lookups-cached-global extensions-core/lookups-cached-single extensions-core/s3-extensions + extensions-core/simple-client-sslcontext extensions-contrib/azure-extensions extensions-contrib/cassandra-storage diff --git a/server/src/main/java/io/druid/client/BrokerServerView.java b/server/src/main/java/io/druid/client/BrokerServerView.java index 01344db8eda3..a1e30a23a018 100644 --- a/server/src/main/java/io/druid/client/BrokerServerView.java +++ b/server/src/main/java/io/druid/client/BrokerServerView.java @@ -199,7 +199,7 @@ private QueryableDruidServer addServer(DruidServer server) private DirectDruidClient makeDirectClient(DruidServer server) { - return new DirectDruidClient(warehouse, queryWatcher, smileMapper, httpClient, server.getHost(), emitter); + return new DirectDruidClient(warehouse, queryWatcher, smileMapper, httpClient, server.getScheme(), server.getHost(), emitter); } private QueryableDruidServer removeServer(DruidServer server) diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 35a86b0f320f..9e2c0c6fe77e 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -110,6 +110,7 @@ public class DirectDruidClient implements QueryRunner private final QueryWatcher queryWatcher; private final ObjectMapper objectMapper; private final HttpClient httpClient; + private final String scheme; private final String host; private final ServiceEmitter emitter; @@ -146,6 +147,7 @@ public DirectDruidClient( QueryWatcher queryWatcher, ObjectMapper objectMapper, HttpClient httpClient, + String scheme, String host, ServiceEmitter emitter ) @@ -154,6 +156,7 @@ public DirectDruidClient( this.queryWatcher = queryWatcher; this.objectMapper = objectMapper; this.httpClient = httpClient; + this.scheme = scheme; this.host = host; this.emitter = emitter; @@ -192,8 +195,8 @@ public Sequence run(final QueryPlus queryPlus, final Map c } final ListenableFuture future; - final String url = StringUtils.format("http://%s/druid/v2/", host); - final String cancelUrl = StringUtils.format("http://%s/druid/v2/%s", host, query.getId()); + final String url = StringUtils.format("%s://%s/druid/v2/", scheme, host); + final String cancelUrl = StringUtils.format("%s://%s/druid/v2/%s", scheme, host, query.getId()); try { log.debug("Querying queryId[%s] url[%s]", query.getId(), url); diff --git a/server/src/main/java/io/druid/client/DruidServer.java b/server/src/main/java/io/druid/client/DruidServer.java index 0f0c4ff6999a..be8eb13c1176 100644 --- a/server/src/main/java/io/druid/client/DruidServer.java +++ b/server/src/main/java/io/druid/client/DruidServer.java @@ -62,8 +62,9 @@ public DruidServer( ) { this( + node.getHostAndPortToUse(), node.getHostAndPort(), - node.getHostAndPort(), + node.getHostAndTlsPort(), config.getMaxSize(), type, config.getTier(), @@ -74,14 +75,15 @@ public DruidServer( @JsonCreator public DruidServer( @JsonProperty("name") String name, - @JsonProperty("host") String host, + @JsonProperty("host") String hostAndPort, + @JsonProperty("hostAndTlsPort") String hostAndTlsPort, @JsonProperty("maxSize") long maxSize, @JsonProperty("type") ServerType type, @JsonProperty("tier") String tier, @JsonProperty("priority") int priority ) { - this.metadata = new DruidServerMetadata(name, host, maxSize, type, tier, priority); + this.metadata = new DruidServerMetadata(name, hostAndPort, hostAndTlsPort, maxSize, type, tier, priority); this.dataSources = new ConcurrentHashMap(); this.segments = new ConcurrentHashMap(); @@ -98,10 +100,19 @@ public DruidServerMetadata getMetadata() return metadata; } - @JsonProperty public String getHost() { - return metadata.getHost(); + return getHostAndTlsPort() != null ? getHostAndTlsPort() : getHostAndPort(); + } + + @JsonProperty("host") + public String getHostAndPort() { + return metadata.getHostAndPort(); + } + + @JsonProperty + public String getHostAndTlsPort() { + return metadata.getHostAndTlsPort(); } public long getCurrSize() @@ -138,6 +149,11 @@ public int getPriority() return metadata.getPriority(); } + public String getScheme() + { + return metadata.getHostAndTlsPort() != null ? "https" : "http"; + } + public Map getSegments() { // Copying the map slows things down a lot here, don't use Immutable Map here diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index fb5b13f16ad5..06e3009d3ef6 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -520,7 +520,7 @@ Future updateSegmentsListAsync() config.getServerTimeout() ); } - URL url = new URL("http", serverHostAndPort.getHostText(), serverHostAndPort.getPort(), req); + URL url = new URL(druidServer.getScheme(), serverHostAndPort.getHostText(), serverHostAndPort.getPort(), req); BytesAccumulatingResponseHandler responseHandler = new BytesAccumulatingResponseHandler(); diff --git a/server/src/main/java/io/druid/curator/discovery/CuratorServiceAnnouncer.java b/server/src/main/java/io/druid/curator/discovery/CuratorServiceAnnouncer.java index 29b660bdaf1e..0020e179a390 100644 --- a/server/src/main/java/io/druid/curator/discovery/CuratorServiceAnnouncer.java +++ b/server/src/main/java/io/druid/curator/discovery/CuratorServiceAnnouncer.java @@ -61,10 +61,11 @@ public void announce(DruidNode service) } else { try { instance = ServiceInstance.builder() - .name(serviceName) - .address(service.getHost()) - .port(service.getPort()) - .build(); + .name(serviceName) + .address(service.getHost()) + .port(service.getPlaintextPort()) + .sslPort(service.getTlsPort()) + .build(); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java b/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java index 6d78ff4a92c3..51ac2dfd16d7 100644 --- a/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java +++ b/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java @@ -20,15 +20,14 @@ package io.druid.curator.discovery; import com.google.common.base.Function; +import com.google.common.base.Preconditions; import com.google.common.collect.Collections2; import com.google.common.net.HostAndPort; - import io.druid.client.selector.DiscoverySelector; import io.druid.client.selector.Server; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; - import org.apache.curator.x.discovery.ServiceInstance; import org.apache.curator.x.discovery.ServiceProvider; @@ -54,6 +53,12 @@ public ServerDiscoverySelector(ServiceProvider serviceProvider) @Override public Server apply(final ServiceInstance instance) { + Preconditions.checkState( + instance.getPort() >= 0 || instance.getSslPort() >= 0, + "WTH?! Both port and sslPort not set" + ); + final int port = instance.getSslPort() >= 0 ? instance.getSslPort() : instance.getPort(); + final String scheme = instance.getSslPort() >= 0 ? "https" : "http"; return new Server() { @Override @@ -71,13 +76,13 @@ public String getAddress() @Override public int getPort() { - return instance.getPort(); + return port; } @Override public String getScheme() { - return "http"; + return scheme; } }; } diff --git a/server/src/main/java/io/druid/guice/StorageNodeModule.java b/server/src/main/java/io/druid/guice/StorageNodeModule.java index d4d74add21a0..2a1cee02b065 100644 --- a/server/src/main/java/io/druid/guice/StorageNodeModule.java +++ b/server/src/main/java/io/druid/guice/StorageNodeModule.java @@ -57,8 +57,9 @@ public DruidServerMetadata getMetadata(@Self DruidNode node, @Nullable NodeTypeC } return new DruidServerMetadata( + node.getHostAndPortToUse(), node.getHostAndPort(), - node.getHostAndPort(), + node.getHostAndTlsPort(), config.getMaxSize(), nodeType.getNodeType(), config.getTier(), diff --git a/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java b/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java index e6aaf7f929ba..d1b771db403d 100644 --- a/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java +++ b/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java @@ -26,7 +26,6 @@ import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.TypeLiteral; - import io.druid.java.util.common.lifecycle.Lifecycle; import javax.net.ssl.SSLContext; diff --git a/server/src/main/java/io/druid/guice/http/HttpClientModule.java b/server/src/main/java/io/druid/guice/http/HttpClientModule.java index 9d61c7005423..c8dc0915ac8f 100644 --- a/server/src/main/java/io/druid/guice/http/HttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/HttpClientModule.java @@ -117,6 +117,7 @@ public HttpClient get() if (getSslContextBinding() != null) { builder.withSslContext(getSslContextBinding().getProvider().get()); } + return HttpClientInit.createClient(builder.build(), LifecycleUtils.asMmxLifecycle(getLifecycleProvider().get())); } } diff --git a/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java b/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java index ab43adee3dae..5c7fec2da65c 100644 --- a/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/JettyHttpClientModule.java @@ -22,12 +22,10 @@ import com.google.common.base.Throwables; import com.google.inject.Binder; import com.google.inject.Module; - import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Global; import io.druid.java.util.common.lifecycle.Lifecycle; - import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; diff --git a/server/src/main/java/io/druid/query/lookup/LookupModule.java b/server/src/main/java/io/druid/query/lookup/LookupModule.java index 6932822fa0d6..51b7ba9b36ae 100644 --- a/server/src/main/java/io/druid/query/lookup/LookupModule.java +++ b/server/src/main/java/io/druid/query/lookup/LookupModule.java @@ -30,7 +30,6 @@ import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.net.HostAndPort; import com.google.inject.Binder; import com.google.inject.Inject; import io.druid.common.utils.ServletResourceUtils; @@ -47,6 +46,7 @@ import io.druid.java.util.common.logger.Logger; import io.druid.query.expression.LookupExprMacro; import io.druid.server.DruidNode; +import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.initialization.jetty.JettyBindings; import io.druid.server.listener.announcer.ListenerResourceAnnouncer; @@ -138,7 +138,9 @@ public Response handleUpdates( } catch (final IOException ex) { LOG.debug(ex, "Bad request"); - return Response.status(Response.Status.BAD_REQUEST).entity(ServletResourceUtils.sanitizeException(ex)).build(); + return Response.status(Response.Status.BAD_REQUEST) + .entity(ServletResourceUtils.sanitizeException(ex)) + .build(); } try { @@ -206,7 +208,7 @@ public LookupResourceListenerAnnouncer( announcer, lookupListeningAnnouncerConfig, lookupListeningAnnouncerConfig.getLookupKey(), - HostAndPort.fromString(node.getHostAndPort()) + HostAndPortWithScheme.fromString(node.getServiceScheme(), node.getHostAndPortToUse()) ); } } diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java b/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java index 4efee5332c9a..8b5f3d81baee 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java @@ -28,6 +28,7 @@ import io.druid.guice.annotations.RemoteChatHandler; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; +import io.druid.server.initialization.ServerConfig; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListSet; @@ -43,17 +44,20 @@ public class ServiceAnnouncingChatHandlerProvider implements ChatHandlerProvider private final DruidNode node; private final ServiceAnnouncer serviceAnnouncer; + private final ServerConfig serverConfig; private final ConcurrentMap handlers; private final ConcurrentSkipListSet announcements; @Inject public ServiceAnnouncingChatHandlerProvider( @RemoteChatHandler DruidNode node, - ServiceAnnouncer serviceAnnouncer + ServiceAnnouncer serviceAnnouncer, + ServerConfig serverConfig ) { this.node = node; this.serviceAnnouncer = serviceAnnouncer; + this.serverConfig = serverConfig; this.handlers = Maps.newConcurrentMap(); this.announcements = new ConcurrentSkipListSet<>(); } @@ -122,6 +126,6 @@ public Optional get(final String key) private DruidNode makeDruidNode(String key) { - return new DruidNode(key, node.getHost(), node.getPort()); + return new DruidNode(key, node.getHost(), node.getPlaintextPort(), node.getTlsPort(), serverConfig); } } diff --git a/server/src/main/java/io/druid/server/DruidNode.java b/server/src/main/java/io/druid/server/DruidNode.java index 7dca62d832d6..eb8c9f52acd1 100644 --- a/server/src/main/java/io/druid/server/DruidNode.java +++ b/server/src/main/java/io/druid/server/DruidNode.java @@ -25,13 +25,12 @@ import com.google.common.base.Preconditions; import com.google.common.net.HostAndPort; import com.google.inject.name.Named; - import io.druid.common.utils.SocketUtil; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.server.initialization.ServerConfig; import javax.validation.constraints.Max; -import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import java.net.InetAddress; import java.net.UnknownHostException; @@ -48,10 +47,33 @@ public class DruidNode @NotNull private String host; + /** + * This property is now deprecated, this is present just so that JsonConfigurator does not fail if this is set. + * Please use {@link DruidNode#plaintextPort} instead, which if set will be used and hence this has -1 as default value. + * */ + @Deprecated @JsonProperty - @Min(0) @Max(0xffff) + @Max(0xffff) private int port = -1; + @JsonProperty + @Max(0xffff) + private int plaintextPort = -1; + + @JsonProperty + @Max(0xffff) + private int tlsPort = -1; + + @JacksonInject + @NotNull + private ServerConfig serverConfig; + + public DruidNode(String serviceName, String host, Integer plaintextPort, Integer tlsPort, ServerConfig serverConfig) + { + this(serviceName, host, plaintextPort, null, tlsPort, serverConfig); + this.serverConfig = serverConfig; + } + /** * host = null , port = null -> host = _default_, port = -1 * host = "abc:123", port = null -> host = abc, port = 123 @@ -72,45 +94,69 @@ public class DruidNode public DruidNode( @JacksonInject @Named("serviceName") @JsonProperty("service") String serviceName, @JsonProperty("host") String host, - @JacksonInject @Named("servicePort") @JsonProperty("port") Integer port + @JsonProperty("plaintextPort") Integer plaintextPort, + @JacksonInject @Named("servicePort") @JsonProperty("port") Integer port, + @JacksonInject @Named("tlsServicePort") @JsonProperty("tlsPort") Integer tlsPort, + @JacksonInject ServerConfig serverConfig ) { - init(serviceName, host, port); + init(serviceName, host, plaintextPort != null ? plaintextPort : port, tlsPort, serverConfig); } - - private void init(String serviceName, String host, Integer port) + private void init(String serviceName, String host, Integer plainTextPort, Integer tlsPort, ServerConfig serverConfig) { Preconditions.checkNotNull(serviceName); - this.serviceName = serviceName; - if(host == null && port == null) { - host = getDefaultHost(); - port = -1; - } - else { - final HostAndPort hostAndPort; - if (host != null) { - hostAndPort = HostAndPort.fromString(host); - if (port != null && hostAndPort.hasPort() && port != hostAndPort.getPort()) { - throw new IAE("Conflicting host:port [%s] and port [%d] settings", host, port); - } - } else { - hostAndPort = HostAndPort.fromParts(getDefaultHost(), port); - } + if (!serverConfig.isTls() && !serverConfig.isPlaintext()) { + throw new IAE("At least one of the druid.server.http.plainText or druid.server.http.tls needs to be enabled"); + } + final boolean nullHost = host == null; + HostAndPort hostAndPort; + Integer portFromHostConfig; + if (host != null) { + hostAndPort = HostAndPort.fromString(host); host = hostAndPort.getHostText(); - - if (hostAndPort.hasPort()) { - port = hostAndPort.getPort(); + portFromHostConfig = hostAndPort.hasPort() ? hostAndPort.getPort() : null; + if (plainTextPort != null && portFromHostConfig != null && !plainTextPort.equals(portFromHostConfig)) { + throw new IAE("Conflicting host:port [%s] and port [%d] settings", host, plainTextPort); } + if (portFromHostConfig != null) { + plainTextPort = portFromHostConfig; + } + } else { + host = getDefaultHost(); + } - if (port == null) { - port = SocketUtil.findOpenPort(8080); + if (serverConfig.isPlaintext() && serverConfig.isTls() && ((plainTextPort == null || tlsPort == null) + || plainTextPort.equals(tlsPort))) { + // If both plainTExt and tls are enabled then do not allow plaintextPort to be null or + throw new IAE("plaintextPort and tlsPort cannot be null or same if both http and https connectors are enabled"); + } + if (serverConfig.isTls() && (tlsPort == null || tlsPort < 0)) { + throw new IAE("A valid tlsPort needs to specified when druid.server.http.tls is set"); + } + + if (serverConfig.isPlaintext()) { + // to preserve backwards compatible behaviour + if (nullHost && plainTextPort == null) { + plainTextPort = -1; + } else { + if (plainTextPort == null) { + plainTextPort = SocketUtil.findOpenPort(8080); + } } + this.plaintextPort = plainTextPort; + } else { + this.plaintextPort = -1; + } + if (serverConfig.isTls()) { + this.tlsPort = tlsPort; + } else { + this.tlsPort = -1; } - this.port = port; + this.serviceName = serviceName; this.host = host; } @@ -124,43 +170,61 @@ public String getHost() return host; } - public int getPort() + public int getPlaintextPort() { - return port; + return plaintextPort; + } + + public int getTlsPort() + { + return tlsPort; } public DruidNode withService(String service) { - return new DruidNode(service, host, port); + return new DruidNode(service, host, plaintextPort, tlsPort, serverConfig); + } + + public String getServiceScheme() + { + return tlsPort >= 0 ? "https" : "http"; } /** * Returns host and port together as something that can be used as part of a URI. */ - public String getHostAndPort() { - if(port < 0) { - return HostAndPort.fromString(host).toString(); - } else { - return HostAndPort.fromParts(host, port).toString(); + public String getHostAndPort() + { + if (serverConfig.isPlaintext()) { + if (plaintextPort < 0) { + return HostAndPort.fromString(host).toString(); + } else { + return HostAndPort.fromParts(host, plaintextPort).toString(); + } } + return null; } - public static String getDefaultHost() { - try { - return InetAddress.getLocalHost().getCanonicalHostName(); - } catch(UnknownHostException e) { - throw new ISE(e, "Unable to determine host name"); + public String getHostAndTlsPort() + { + if (serverConfig.isTls()) { + return HostAndPort.fromParts(host, tlsPort).toString(); } + return null; } - @Override - public String toString() + public String getHostAndPortToUse() { + return getHostAndTlsPort() != null ? getHostAndTlsPort() : getHostAndPort(); + } + + public static String getDefaultHost() { - return "DruidNode{" + - "serviceName='" + serviceName + '\'' + - ", host='" + host + '\'' + - ", port=" + port + - '}'; + try { + return InetAddress.getLocalHost().getCanonicalHostName(); + } + catch (UnknownHostException e) { + throw new ISE(e, "Unable to determine host name"); + } } @Override @@ -173,24 +237,43 @@ public boolean equals(Object o) return false; } - DruidNode node = (DruidNode) o; + DruidNode druidNode = (DruidNode) o; - if (port != node.port) { + if (plaintextPort != druidNode.plaintextPort) { return false; } - if (!serviceName.equals(node.serviceName)) { + if (tlsPort != druidNode.tlsPort) { return false; } - return host.equals(node.host); - + if (serviceName != null ? !serviceName.equals(druidNode.serviceName) : druidNode.serviceName != null) { + return false; + } + if (host != null ? !host.equals(druidNode.host) : druidNode.host != null) { + return false; + } + return serverConfig != null ? serverConfig.equals(druidNode.serverConfig) : druidNode.serverConfig == null; } @Override public int hashCode() { - int result = serviceName.hashCode(); - result = 31 * result + host.hashCode(); - result = 31 * result + port; + int result = serviceName != null ? serviceName.hashCode() : 0; + result = 31 * result + (host != null ? host.hashCode() : 0); + result = 31 * result + plaintextPort; + result = 31 * result + tlsPort; + result = 31 * result + (serverConfig != null ? serverConfig.hashCode() : 0); return result; } + + @Override + public String toString() + { + return "DruidNode{" + + "serviceName='" + serviceName + '\'' + + ", host='" + host + '\'' + + ", plaintextPort=" + plaintextPort + + ", tlsPort=" + tlsPort + + ", serverConfig=" + serverConfig + + '}'; + } } diff --git a/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java b/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java index 7c4ff251a9ba..082050b59b5f 100644 --- a/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java +++ b/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java @@ -27,7 +27,8 @@ public class DruidServerMetadata { private final String name; - private final String host; + private final String hostAndPort; + private final String hostAndTlsPort; private final long maxSize; private final String tier; private final ServerType type; @@ -36,7 +37,8 @@ public class DruidServerMetadata @JsonCreator public DruidServerMetadata( @JsonProperty("name") String name, - @JsonProperty("host") String host, + @JsonProperty("host") String hostAndPort, + @JsonProperty("hostAndTlsPort") String hostAndTlsPort, @JsonProperty("maxSize") long maxSize, @JsonProperty("type") ServerType type, @JsonProperty("tier") String tier, @@ -44,7 +46,8 @@ public DruidServerMetadata( ) { this.name = name; - this.host = host; + this.hostAndPort = hostAndPort; + this.hostAndTlsPort = hostAndTlsPort; this.maxSize = maxSize; this.tier = tier; this.type = type; @@ -57,10 +60,20 @@ public String getName() return name; } + public String getHost() { + return getHostAndTlsPort() != null ? getHostAndTlsPort() : getHostAndPort(); + } + + @JsonProperty("host") + public String getHostAndPort() + { + return hostAndPort; + } + @JsonProperty - public String getHost() + public String getHostAndTlsPort() { - return host; + return hostAndTlsPort; } @JsonProperty @@ -102,35 +115,35 @@ public boolean equals(Object o) return false; } - DruidServerMetadata metadata = (DruidServerMetadata) o; + DruidServerMetadata that = (DruidServerMetadata) o; - if (maxSize != metadata.maxSize) { + if (maxSize != that.maxSize) { return false; } - if (priority != metadata.priority) { + if (priority != that.priority) { return false; } - if (host != null ? !host.equals(metadata.host) : metadata.host != null) { + if (name != null ? !name.equals(that.name) : that.name != null) { return false; } - if (name != null ? !name.equals(metadata.name) : metadata.name != null) { + if (hostAndPort != null ? !hostAndPort.equals(that.hostAndPort) : that.hostAndPort != null) { return false; } - if (tier != null ? !tier.equals(metadata.tier) : metadata.tier != null) { + if (hostAndTlsPort != null ? !hostAndTlsPort.equals(that.hostAndTlsPort) : that.hostAndTlsPort != null) { return false; } - if (type != null ? !type.equals(metadata.type) : metadata.type != null) { + if (tier != null ? !tier.equals(that.tier) : that.tier != null) { return false; } - - return true; + return type == that.type; } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; - result = 31 * result + (host != null ? host.hashCode() : 0); + result = 31 * result + (hostAndPort != null ? hostAndPort.hashCode() : 0); + result = 31 * result + (hostAndTlsPort != null ? hostAndTlsPort.hashCode() : 0); result = 31 * result + (int) (maxSize ^ (maxSize >>> 32)); result = 31 * result + (tier != null ? tier.hashCode() : 0); result = 31 * result + (type != null ? type.hashCode() : 0); @@ -143,11 +156,12 @@ public String toString() { return "DruidServerMetadata{" + "name='" + name + '\'' + - ", host='" + host + '\'' + + ", hostAndPort='" + hostAndPort + '\'' + + ", hostAndTlsPort='" + hostAndTlsPort + '\'' + ", maxSize=" + maxSize + ", tier='" + tier + '\'' + - ", type='" + type + '\'' + - ", priority='" + priority + '\'' + + ", type=" + type + + ", priority=" + priority + '}'; } } diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index af6c7a9f0685..0b68a670b901 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -510,7 +510,7 @@ public void start() private LeaderLatch createNewLeaderLatch() { final LeaderLatch newLeaderLatch = new LeaderLatch( - curator, ZKPaths.makePath(zkPaths.getCoordinatorPath(), COORDINATOR_OWNER_NODE), self.getHostAndPort() + curator, ZKPaths.makePath(zkPaths.getCoordinatorPath(), COORDINATOR_OWNER_NODE), self.getHostAndPortToUse() ); newLeaderLatch.addListener( diff --git a/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java b/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java index 632b5af4f6df..f603d12bccf7 100644 --- a/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java +++ b/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java @@ -52,7 +52,7 @@ public boolean doLocal(String requestURI) } @Override - public URL getRedirectURL(String queryString, String requestURI) + public URL getRedirectURL(String scheme, String queryString, String requestURI) { try { final String leader = coordinator.getCurrentLeader(); @@ -60,7 +60,7 @@ public URL getRedirectURL(String queryString, String requestURI) return null; } - String location = StringUtils.format("http://%s%s", leader, requestURI); + String location = StringUtils.format("%s://%s%s", scheme, leader, requestURI); if (queryString != null) { location = StringUtils.format("%s?%s", location, queryString); diff --git a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java new file mode 100644 index 000000000000..8e6771c08ce2 --- /dev/null +++ b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java @@ -0,0 +1,109 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.http; + +import com.google.common.base.Preconditions; +import com.google.common.net.HostAndPort; + +public class HostAndPortWithScheme +{ + private String scheme; + private HostAndPort hostAndPort; + + public HostAndPortWithScheme(String scheme, HostAndPort hostAndPort) { + this.scheme = scheme; + this.hostAndPort = hostAndPort; + } + + public static HostAndPortWithScheme fromParts(String scheme, String host, int port) { + return new HostAndPortWithScheme(scheme, HostAndPort.fromParts(host, port)); + } + + public static HostAndPortWithScheme fromString(String hostPortMaybeSchemeString) { + if (hostPortMaybeSchemeString.startsWith("http")) { + return HostAndPortWithScheme.fromString( + hostPortMaybeSchemeString.substring(0, hostPortMaybeSchemeString.indexOf(':')), + hostPortMaybeSchemeString.substring(hostPortMaybeSchemeString.indexOf(':') + 1) + ); + } + return HostAndPortWithScheme.fromString("http", hostPortMaybeSchemeString); + } + + public static HostAndPortWithScheme fromString(String scheme, String hostPortString) { + return new HostAndPortWithScheme(checkAndGetScheme(scheme), HostAndPort.fromString(hostPortString)); + } + + private static String checkAndGetScheme(String scheme) { + Preconditions.checkState(scheme.toLowerCase().equals("http") || scheme.toLowerCase().equals("https")); + return scheme.toLowerCase(); + } + + public String getScheme() { + return scheme; + } + + public String getHostText() { + return hostAndPort.getHostText(); + } + + public int getPort() { + return hostAndPort.getPort(); + } + + public int getPortOrDefault(int defaultPort) { + return hostAndPort.getPortOrDefault(defaultPort); + } + + public HostAndPort getHostAndPort() + { + return hostAndPort; + } + + @Override + public String toString() { + return String.format("%s:%s", scheme, hostAndPort.toString()); + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + HostAndPortWithScheme that = (HostAndPortWithScheme) o; + + if (!scheme.equals(that.scheme)) { + return false; + } + return hostAndPort.equals(that.hostAndPort); + } + + @Override + public int hashCode() + { + int result = scheme.hashCode(); + result = 31 * result + hostAndPort.hashCode(); + return result; + } +} diff --git a/server/src/main/java/io/druid/server/http/RedirectFilter.java b/server/src/main/java/io/druid/server/http/RedirectFilter.java index 71a518af9d47..868c887fbddf 100644 --- a/server/src/main/java/io/druid/server/http/RedirectFilter.java +++ b/server/src/main/java/io/druid/server/http/RedirectFilter.java @@ -71,7 +71,7 @@ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) if (redirectInfo.doLocal(request.getRequestURI())) { chain.doFilter(request, response); } else { - URL url = redirectInfo.getRedirectURL(request.getQueryString(), request.getRequestURI()); + URL url = redirectInfo.getRedirectURL(request.getScheme(), request.getQueryString(), request.getRequestURI()); log.debug("Forwarding request to [%s]", url); if (url == null) { diff --git a/server/src/main/java/io/druid/server/http/RedirectInfo.java b/server/src/main/java/io/druid/server/http/RedirectInfo.java index 2c57fde40594..42172ecb208f 100644 --- a/server/src/main/java/io/druid/server/http/RedirectInfo.java +++ b/server/src/main/java/io/druid/server/http/RedirectInfo.java @@ -27,5 +27,5 @@ public interface RedirectInfo { public boolean doLocal(String requestURI); - public URL getRedirectURL(String queryString, String requestURI); + public URL getRedirectURL(String scheme, String queryString, String requestURI); } diff --git a/server/src/main/java/io/druid/server/initialization/EmitterModule.java b/server/src/main/java/io/druid/server/initialization/EmitterModule.java index 4db7bf2de6af..613a03886be6 100644 --- a/server/src/main/java/io/druid/server/initialization/EmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/EmitterModule.java @@ -89,7 +89,7 @@ public ServiceEmitter getServiceEmitter(@Self Supplier configSupplier ); final ServiceEmitter retVal = new ServiceEmitter( config.getServiceName(), - config.getHostAndPort(), + config.getHostAndPortToUse(), emitter, otherServiceDimensions ); diff --git a/server/src/main/java/io/druid/server/initialization/ServerConfig.java b/server/src/main/java/io/druid/server/initialization/ServerConfig.java index 97ba7b888b82..b299908bca4d 100644 --- a/server/src/main/java/io/druid/server/initialization/ServerConfig.java +++ b/server/src/main/java/io/druid/server/initialization/ServerConfig.java @@ -45,6 +45,12 @@ public class ServerConfig @Min(1) private long maxScatterGatherBytes = Long.MAX_VALUE; + @JsonProperty + private boolean plaintext = true; + + @JsonProperty + private boolean tls = false; + public int getNumThreads() { return numThreads; @@ -65,6 +71,16 @@ public long getMaxScatterGatherBytes() return maxScatterGatherBytes; } + public boolean isPlaintext() + { + return plaintext; + } + + public boolean isTls() + { + return tls; + } + @Override public String toString() { @@ -73,6 +89,8 @@ public String toString() ", maxIdleTime=" + maxIdleTime + ", defaultQueryTimeout=" + defaultQueryTimeout + ", maxScatterGatherBytes=" + maxScatterGatherBytes + + ", plaintext=" + plaintext + + ", tls=" + tls + '}'; } } diff --git a/server/src/main/java/io/druid/server/initialization/TLSServerConfig.java b/server/src/main/java/io/druid/server/initialization/TLSServerConfig.java new file mode 100644 index 000000000000..e70539545975 --- /dev/null +++ b/server/src/main/java/io/druid/server/initialization/TLSServerConfig.java @@ -0,0 +1,75 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package io.druid.server.initialization; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.metadata.PasswordProvider; + +public class TLSServerConfig +{ + @JsonProperty + private String keyStorePath; + + @JsonProperty + private String keyStoreType; + + @JsonProperty + private String certAlias; + + @JsonProperty("keyStorePassword") + private PasswordProvider keyStorePasswordProvider; + + @JsonProperty("keyManagerPassword") + private PasswordProvider keyManagerPasswordProvider; + + public String getKeyStorePath() + { + return keyStorePath; + } + + public String getKeyStoreType() + { + return keyStoreType; + } + + public PasswordProvider getKeyStorePasswordProvider() + { + return keyStorePasswordProvider; + } + + public String getCertAlias() + { + return certAlias; + } + + public PasswordProvider getKeyManagerPasswordProvider() + { + return keyManagerPasswordProvider; + } + + @Override + public String toString() + { + return "TLSServerConfig{" + + "keyStorePath='" + keyStorePath + '\'' + + ", keyStoreType='" + keyStoreType + '\'' + + ", certAlias='" + certAlias + '\'' + + '}'; + } +} diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java index 656c9f115f9b..11bb291c6d0f 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/ChatHandlerServerModule.java @@ -37,6 +37,7 @@ import io.druid.segment.realtime.firehose.ChatHandlerResource; import io.druid.server.DruidNode; import io.druid.server.initialization.ServerConfig; +import io.druid.server.initialization.TLSServerConfig; import io.druid.server.metrics.DataSourceTaskIdHolder; import org.eclipse.jetty.server.Server; @@ -85,10 +86,17 @@ public void configure(Binder binder) ServerConfig.class, RemoteChatHandler.class ); + JsonConfigProvider.bind( + binder, + "druid.indexer.server.chathandler.https", + TLSServerConfig.class, + RemoteChatHandler.class + ); LifecycleModule.register(binder, Server.class, RemoteChatHandler.class); } else { binder.bind(DruidNode.class).annotatedWith(RemoteChatHandler.class).to(Key.get(DruidNode.class, Self.class)); binder.bind(ServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(ServerConfig.class)); + binder.bind(TLSServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(TLSServerConfig.class)); } } @@ -108,10 +116,11 @@ public Server getServer( Injector injector, Lifecycle lifecycle, @RemoteChatHandler DruidNode node, - @RemoteChatHandler ServerConfig config + @RemoteChatHandler ServerConfig config, + @RemoteChatHandler TLSServerConfig TLSServerConfig ) { - final Server server = JettyServerModule.makeJettyServer(node, config); + final Server server = JettyServerModule.makeJettyServer(node, config, TLSServerConfig); JettyServerModule.initializeServer(injector, lifecycle, server); return server; } diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java index 7de440c07d5a..424eb21d322b 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java @@ -54,14 +54,20 @@ import io.druid.server.DruidNode; import io.druid.server.StatusResource; import io.druid.server.initialization.ServerConfig; +import io.druid.server.initialization.TLSServerConfig; import io.druid.server.metrics.DataSourceTaskIdHolder; import io.druid.server.metrics.MetricsModule; import io.druid.server.metrics.MonitorsConfig; +import org.apache.http.HttpVersion; import org.eclipse.jetty.server.ConnectionFactory; -import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.server.HttpConfiguration; +import org.eclipse.jetty.server.HttpConnectionFactory; +import org.eclipse.jetty.server.SecureRequestCustomizer; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.SslConnectionFactory; +import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.util.thread.ScheduledExecutorScheduler; @@ -86,6 +92,7 @@ protected void configureServlets() Binder binder = binder(); JsonConfigProvider.bind(binder, "druid.server.http", ServerConfig.class); + JsonConfigProvider.bind(binder, "druid.server.https", TLSServerConfig.class); binder.bind(GuiceContainer.class).to(DruidGuiceContainer.class); binder.bind(DruidGuiceContainer.class).in(Scopes.SINGLETON); @@ -130,10 +137,14 @@ protected ResourceConfig getDefaultResourceConfig( @Provides @LazySingleton public Server getServer( - final Injector injector, final Lifecycle lifecycle, @Self final DruidNode node, final ServerConfig config + final Injector injector, + final Lifecycle lifecycle, + @Self final DruidNode node, + final ServerConfig config, + final TLSServerConfig TLSServerConfig ) { - final Server server = makeJettyServer(node, config); + final Server server = makeJettyServer(node, config, TLSServerConfig); initializeServer(injector, lifecycle, server); return server; } @@ -156,7 +167,7 @@ public JacksonSmileProvider getJacksonSmileProvider(@Smile ObjectMapper objectMa return provider; } - static Server makeJettyServer(DruidNode node, ServerConfig config) + static Server makeJettyServer(DruidNode node, ServerConfig config, TLSServerConfig tlsServerConfig) { final QueuedThreadPool threadPool = new QueuedThreadPool(); threadPool.setMinThreads(config.getNumThreads()); @@ -169,20 +180,53 @@ static Server makeJettyServer(DruidNode node, ServerConfig config) // to fire on main exit. Related bug: https://github.com/druid-io/druid/pull/1627 server.addBean(new ScheduledExecutorScheduler("JettyScheduler", true), true); - ServerConnector connector = new ServerConnector(server); - connector.setPort(node.getPort()); - connector.setIdleTimeout(Ints.checkedCast(config.getMaxIdleTime().toStandardDuration().getMillis())); - // workaround suggested in - - // https://bugs.eclipse.org/bugs/show_bug.cgi?id=435322#c66 for jetty half open connection issues during failovers - connector.setAcceptorPriorityDelta(-1); + final List serverConnectors = new ArrayList<>(); - List monitoredConnFactories = new ArrayList<>(); - for (ConnectionFactory cf : connector.getConnectionFactories()) { - monitoredConnFactories.add(new JettyMonitoringConnectionFactory(cf, activeConnections)); + if (config.isPlaintext()) { + log.info("Creating http connector with port [%d]", node.getPlaintextPort()); + final ServerConnector connector = new ServerConnector(server); + connector.setPort(node.getPlaintextPort()); + serverConnectors.add(connector); + } + if (config.isTls()) { + log.info("Creating https connector with port [%d]", node.getTlsPort()); + final SslContextFactory sslContextFactory = new SslContextFactory(tlsServerConfig.getKeyStorePath()); + sslContextFactory.setKeyStoreType(tlsServerConfig.getKeyStoreType()); + sslContextFactory.setKeyStorePassword(tlsServerConfig.getKeyStorePasswordProvider().getPassword()); + sslContextFactory.setCertAlias(tlsServerConfig.getCertAlias()); + sslContextFactory.setKeyManagerPassword(tlsServerConfig.getKeyManagerPasswordProvider() == null + ? null + : tlsServerConfig.getKeyManagerPasswordProvider().getPassword()); + final HttpConfiguration httpsConfiguration = new HttpConfiguration(); + httpsConfiguration.setSecureScheme("https"); + httpsConfiguration.setSecurePort(node.getTlsPort()); + httpsConfiguration.addCustomizer(new SecureRequestCustomizer()); + final ServerConnector connector = new ServerConnector( + server, + new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.toString()), + new HttpConnectionFactory(httpsConfiguration) + ); + connector.setPort(node.getTlsPort()); + serverConnectors.add(connector); + } + + final ServerConnector[] connectors = new ServerConnector[serverConnectors.size()]; + int index = 0; + for (ServerConnector connector : serverConnectors) { + connectors[index++] = connector; + connector.setIdleTimeout(Ints.checkedCast(config.getMaxIdleTime().toStandardDuration().getMillis())); + // workaround suggested in - + // https://bugs.eclipse.org/bugs/show_bug.cgi?id=435322#c66 for jetty half open connection issues during failovers + connector.setAcceptorPriorityDelta(-1); + + List monitoredConnFactories = new ArrayList<>(); + for (ConnectionFactory cf : connector.getConnectionFactories()) { + monitoredConnFactories.add(new JettyMonitoringConnectionFactory(cf, activeConnections)); + } + connector.setConnectionFactories(monitoredConnFactories); } - connector.setConnectionFactories(monitoredConnFactories); - server.setConnectors(new Connector[]{connector}); + server.setConnectors(connectors); return server; } diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java index 8aacf1f78539..e32669e1fe16 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java @@ -25,13 +25,11 @@ import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.net.HostAndPort; import com.google.inject.Inject; - import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; - +import io.druid.server.http.HostAndPortWithScheme; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.utils.ZKPaths; import org.apache.zookeeper.KeeperException; @@ -46,7 +44,7 @@ public class ListenerDiscoverer { private static final Logger LOG = new Logger(ListenerDiscoverer.class); - private volatile Map lastSeenMap = ImmutableMap.of(); + private volatile Map lastSeenMap = ImmutableMap.of(); private final CuratorFramework cf; private final ListeningAnnouncerConfig listeningAnnouncerConfig; private final Object startStopSync = new Object(); @@ -98,14 +96,14 @@ public void stop() * * @throws IOException if there was an error refreshing the zookeeper cache */ - public Collection getNodes(final String listener_key) throws IOException + public Collection getNodes(final String listener_key) throws IOException { return getCurrentNodes(listener_key).keySet(); } - Map getCurrentNodes(final String listener_key) throws IOException + Map getCurrentNodes(final String listener_key) throws IOException { - final HashMap retVal = new HashMap<>(); + final HashMap retVal = new HashMap<>(); final String zkPath = listeningAnnouncerConfig.getAnnouncementPath(listener_key); final Collection children; try { @@ -132,9 +130,9 @@ Map getCurrentNodes(final String listener_key) throws IOExcep LOG.debug("Lost data at path [%s]", childPath); continue; } - final HostAndPort hostAndPort = HostAndPort.fromString(child); + final HostAndPortWithScheme hostAndPortWithScheme = HostAndPortWithScheme.fromString(child); final Long l = ByteBuffer.wrap(data).getLong(); - retVal.put(hostAndPort, l); + retVal.put(hostAndPortWithScheme, l); } catch (IllegalArgumentException iae) { LOG.warn(iae, "Error parsing [%s]", childPath); @@ -152,16 +150,16 @@ Map getCurrentNodes(final String listener_key) throws IOExcep * * @throws IOException If there was an error in refreshing the Zookeeper cache */ - public synchronized Collection getNewNodes(final String listener_key) throws IOException + public synchronized Collection getNewNodes(final String listener_key) throws IOException { - final Map priorSeenMap = lastSeenMap; - final Map currentMap = getCurrentNodes(listener_key); - final Collection retVal = Collections2.filter( + final Map priorSeenMap = lastSeenMap; + final Map currentMap = getCurrentNodes(listener_key); + final Collection retVal = Collections2.filter( currentMap.keySet(), - new Predicate() + new Predicate() { @Override - public boolean apply(HostAndPort input) + public boolean apply(HostAndPortWithScheme input) { final Long l = priorSeenMap.get(input); return l == null || l < currentMap.get(input); diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java index e16c4c76c48a..f87c825d8c94 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java @@ -20,14 +20,12 @@ package io.druid.server.listener.announcer; import com.google.common.base.Throwables; -import com.google.common.net.HostAndPort; import com.google.common.primitives.Longs; - import io.druid.curator.announcement.Announcer; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; - +import io.druid.server.http.HostAndPortWithScheme; import org.apache.curator.utils.ZKPaths; import org.joda.time.DateTime; @@ -52,7 +50,7 @@ public ListenerResourceAnnouncer( Announcer announcer, ListeningAnnouncerConfig listeningAnnouncerConfig, String listener_key, - HostAndPort node + HostAndPortWithScheme node ) { this( @@ -65,7 +63,7 @@ public ListenerResourceAnnouncer( ListenerResourceAnnouncer( Announcer announcer, String announceBasePath, - HostAndPort node + HostAndPortWithScheme node ) { this.announcePath = ZKPaths.makePath(announceBasePath, node.toString()); diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java index fd36a8ea17ca..95a3226ebd91 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -23,8 +23,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; +import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; @@ -54,6 +56,7 @@ import io.druid.java.util.common.StringUtils; import io.druid.query.lookup.LookupModule; import io.druid.query.lookup.LookupsState; +import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.listener.announcer.ListenerDiscoverer; import io.druid.server.listener.resource.ListenerResource; import org.jboss.netty.handler.codec.http.HttpHeaders; @@ -287,7 +290,17 @@ public Collection discoverNodesInTier(String tier) { try { Preconditions.checkState(lifecycleLock.awaitStarted(5, TimeUnit.SECONDS), "not started"); - return listenerDiscoverer.getNodes(LookupModule.getTierListenerPath(tier)); + return Collections2.transform( + listenerDiscoverer.getNodes(LookupModule.getTierListenerPath(tier)), + new Function() + { + @Override + public HostAndPort apply(HostAndPortWithScheme input) + { + return input.getHostAndPort(); + } + } + ); } catch (IOException e) { throw Throwables.propagate(e); } @@ -508,11 +521,12 @@ void lookupManagementLoop() LOG.debug("Starting lookup mgmt for tier [%s].", tierEntry.getKey()); final Map tierLookups = tierEntry.getValue(); - for (final HostAndPort node : listenerDiscoverer.getNodes(LookupModule.getTierListenerPath(tierEntry.getKey()))) { + for (final HostAndPortWithScheme node : listenerDiscoverer.getNodes(LookupModule.getTierListenerPath(tierEntry.getKey()))) { LOG.debug( - "Starting lookup mgmt for tier [%s] and host [%s:%s].", + "Starting lookup mgmt for tier [%s] and host [%s:%s:%s].", tierEntry.getKey(), + node.getScheme(), node.getHostText(), node.getPort() ); @@ -521,16 +535,17 @@ void lookupManagementLoop() executorService.submit( () -> { try { - return new AbstractMap.SimpleImmutableEntry<>(node, doLookupManagementOnNode(node, tierLookups)); + return new AbstractMap.SimpleImmutableEntry<>(node.getHostAndPort(), doLookupManagementOnNode(node, tierLookups)); } catch (InterruptedException ex) { - LOG.warn(ex, "lookup management on node [%s:%s] interrupted.", node.getHostText(), node.getPort()); + LOG.warn(ex, "lookup management on node [%s:%s:%s] interrupted.", node.getScheme(), node.getHostText(), node.getPort()); return null; } catch (Exception ex) { LOG.makeAlert( ex, - "Failed to finish lookup management on node [%s:%s]", + "Failed to finish lookup management on node [%s:%s:%s]", + node.getScheme(), node.getHostText(), node.getPort() ).emit(); @@ -569,7 +584,7 @@ void lookupManagementLoop() } private LookupsState doLookupManagementOnNode( - HostAndPort node, + HostAndPortWithScheme node, Map nodeTierLookupsToBe ) throws IOException, InterruptedException, ExecutionException { @@ -657,20 +672,20 @@ Set getToBeDroppedFromNode( return toDrop; } - static URL getLookupsURL(HostAndPort druidNode) throws MalformedURLException + static URL getLookupsURL(HostAndPortWithScheme druidNode) throws MalformedURLException { return new URL( - "http", + druidNode.getScheme(), druidNode.getHostText(), druidNode.getPortOrDefault(-1), LOOKUP_BASE_REQUEST_PATH ); } - static URL getLookupsUpdateURL(HostAndPort druidNode) throws MalformedURLException + static URL getLookupsUpdateURL(HostAndPortWithScheme druidNode) throws MalformedURLException { return new URL( - "http", + druidNode.getScheme(), druidNode.getHostText(), druidNode.getPortOrDefault(-1), LOOKUP_UPDATE_REQUEST_PATH @@ -714,7 +729,7 @@ public LookupsCommunicator( } public LookupsState updateNode( - HostAndPort node, + HostAndPortWithScheme node, LookupsState lookupsUpdate ) throws IOException, InterruptedException, ExecutionException @@ -771,7 +786,7 @@ public LookupsState updateNode( } public LookupsState getLookupStateForNode( - HostAndPort node + HostAndPortWithScheme node ) throws IOException, InterruptedException, ExecutionException { final URL url = getLookupsURL(node); diff --git a/server/src/test/java/io/druid/client/BrokerServerViewTest.java b/server/src/test/java/io/druid/client/BrokerServerViewTest.java index f2d5bcb12e5c..b16c6684f458 100644 --- a/server/src/test/java/io/druid/client/BrokerServerViewTest.java +++ b/server/src/test/java/io/druid/client/BrokerServerViewTest.java @@ -98,6 +98,7 @@ public void testSingleServerAddedRemovedSegment() throws Exception final DruidServer druidServer = new DruidServer( "localhost:1234", "localhost:1234", + null, 10000000L, ServerType.HISTORICAL, "default_tier", @@ -164,6 +165,7 @@ public DruidServer apply(String input) return new DruidServer( input, input, + null, 10000000L, ServerType.HISTORICAL, "default_tier", diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java index 6aa88d4b6eda..176d99c2f2a6 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java @@ -173,7 +173,7 @@ public QueryableDruidServer pick( ) { return new QueryableDruidServer( - new DruidServer("localhost", "localhost", 100, ServerType.HISTORICAL, "a", 10), + new DruidServer("localhost", "localhost", null, 100, ServerType.HISTORICAL, "a", 10), EasyMock.createNiceMock(DirectDruidClient.class) ); } @@ -187,7 +187,7 @@ public List pick( { return Collections.singletonList( new QueryableDruidServer( - new DruidServer("localhost", "localhost", 100, ServerType.HISTORICAL, "a", 10), + new DruidServer("localhost", "localhost", null, 100, ServerType.HISTORICAL, "a", 10), EasyMock.createNiceMock(DirectDruidClient.class) ) ); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 7d790f9e148f..f19c1faeedb1 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -331,11 +331,11 @@ public void setUp() throws Exception client = makeClient(MoreExecutors.sameThreadExecutor()); servers = new DruidServer[]{ - new DruidServer("test1", "test1", 10, ServerType.HISTORICAL, "bye", 0), - new DruidServer("test2", "test2", 10, ServerType.HISTORICAL, "bye", 0), - new DruidServer("test3", "test3", 10, ServerType.HISTORICAL, "bye", 0), - new DruidServer("test4", "test4", 10, ServerType.HISTORICAL, "bye", 0), - new DruidServer("test5", "test5", 10, ServerType.HISTORICAL, "bye", 0) + new DruidServer("test1", "test1", null, 10, ServerType.HISTORICAL, "bye", 0), + new DruidServer("test2", "test2", null, 10, ServerType.HISTORICAL, "bye", 0), + new DruidServer("test3", "test3", null, 10, ServerType.HISTORICAL, "bye", 0), + new DruidServer("test4", "test4", null, 10, ServerType.HISTORICAL, "bye", 0), + new DruidServer("test5", "test5", null, 10, ServerType.HISTORICAL, "bye", 0) }; } diff --git a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java index 0ba1ac3ba392..49dc29b71ebe 100644 --- a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java +++ b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java @@ -91,6 +91,7 @@ public void testSingleServerAddedRemovedSegment() throws Exception final DruidServer druidServer = new DruidServer( "localhost:1234", "localhost:1234", + null, 10000000L, ServerType.HISTORICAL, "default_tier", @@ -149,7 +150,7 @@ public void testMultipleServerAddedRemovedSegment() throws Exception setupViews(); final List druidServers = Lists.transform( - ImmutableList.of("locahost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), + ImmutableList.of("localhost:0", "localhost:1", "localhost:2", "localhost:3", "localhost:4"), new Function() { @Override @@ -158,6 +159,7 @@ public DruidServer apply(String input) return new DruidServer( input, input, + null, 10000000L, ServerType.HISTORICAL, "default_tier", diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index f5fbe40d2402..d6e5db2dfbd0 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -138,6 +138,7 @@ public void testRun() throws Exception QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, + "http", "foo", new NoopServiceEmitter() ); @@ -146,17 +147,18 @@ public void testRun() throws Exception QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, + "http", "foo2", new NoopServiceEmitter() ); QueryableDruidServer queryableDruidServer1 = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client1 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment()); QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client2 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment()); @@ -254,12 +256,13 @@ public void testCancel() throws Exception QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, + "http", "foo", new NoopServiceEmitter() ); QueryableDruidServer queryableDruidServer1 = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client1 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment()); @@ -323,12 +326,13 @@ public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingExce QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, + "http", hostName, new NoopServiceEmitter() ); QueryableDruidServer queryableDruidServer = new QueryableDruidServer( - new DruidServer("test1", hostName, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", hostName, null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client1 ); diff --git a/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java b/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java index 65fa0736da2c..6bc7dcfba89f 100644 --- a/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java +++ b/server/src/test/java/io/druid/client/cache/CacheMonitorTest.java @@ -29,6 +29,7 @@ import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import org.junit.Assert; import org.junit.Test; @@ -43,7 +44,7 @@ public void testOptionalInject() throws Exception public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); } } @@ -62,7 +63,7 @@ public void testInject() throws Exception public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); binder.bind(Cache.class).toInstance(MapCache.create(0)); } diff --git a/server/src/test/java/io/druid/client/cache/HybridCacheTest.java b/server/src/test/java/io/druid/client/cache/HybridCacheTest.java index 8a35de69cdc6..ba0ede2c8212 100644 --- a/server/src/test/java/io/druid/client/cache/HybridCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/HybridCacheTest.java @@ -60,7 +60,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("hybridTest"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.install(new CacheModule(prefix)); } } diff --git a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java index b1fb4d5a9254..5bfbfa10038c 100644 --- a/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java +++ b/server/src/test/java/io/druid/client/cache/MemcachedCacheTest.java @@ -153,6 +153,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/memcached"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(MemcachedCacheConfig.class).toInstance(config); binder.bind(Cache.class).toProvider(MemcachedProviderWithConfig.class).in(ManageLifecycle.class); @@ -186,6 +187,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/memcached"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(Cache.class).toProvider(CacheProvider.class); JsonConfigProvider.bind(binder, uuid, CacheProvider.class); diff --git a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java index 770e60444551..f28009fecffc 100644 --- a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java @@ -124,6 +124,7 @@ public void setUp() throws Exception DruidServerMetadata serverMetadata = new DruidServerMetadata( "id", "host", + null, Long.MAX_VALUE, ServerType.HISTORICAL, "tier", @@ -443,6 +444,7 @@ public BatchDataSegmentAnnouncer call() new DruidServerMetadata( "id", "host", + null, Long.MAX_VALUE, ServerType.HISTORICAL, "tier", diff --git a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java index 5c99abaa8882..50cf50df6ca6 100644 --- a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java +++ b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java @@ -52,7 +52,7 @@ public void testSerde() throws IOException null, NoneShardSpec.instance(), 0, 0 - ), Sets.newHashSet(new DruidServerMetadata("a", "host", 10, ServerType.HISTORICAL, "tier", 1)) + ), Sets.newHashSet(new DruidServerMetadata("a", "host", null, 10, ServerType.HISTORICAL, "tier", 1)) ); ImmutableSegmentLoadInfo serde = mapper.readValue( diff --git a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java index ffad1bd7facd..bdeeb461d364 100644 --- a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java +++ b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java @@ -72,7 +72,7 @@ public void testSegmentUpdate() throws Exception selector.addServerAndUpdateSegment( new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), EasyMock.createMock(DirectDruidClient.class) ), DataSegment.builder() diff --git a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java index 74757f0ff88a..6787b79c3072 100644 --- a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java @@ -44,11 +44,11 @@ public void testHighestPriorityTierSelectorStrategy() { DirectDruidClient client = EasyMock.createMock(DirectDruidClient.class); QueryableDruidServer lowPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client ); QueryableDruidServer highPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), client ); @@ -63,11 +63,11 @@ public void testLowestPriorityTierSelectorStrategy() { DirectDruidClient client = EasyMock.createMock(DirectDruidClient.class); QueryableDruidServer lowPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client ); QueryableDruidServer highPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), client ); @@ -82,15 +82,15 @@ public void testCustomPriorityTierSelectorStrategy() { DirectDruidClient client = EasyMock.createMock(DirectDruidClient.class); QueryableDruidServer lowPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, -1), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, -1), client ); QueryableDruidServer mediumPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client ); QueryableDruidServer highPriority = new QueryableDruidServer( - new DruidServer("test1", "localhost", 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), + new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), client ); diff --git a/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java b/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java index c80598511e55..bb55b1ee1bce 100644 --- a/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java +++ b/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java @@ -54,6 +54,7 @@ public void testPick() throws Exception EasyMock.expect(serviceProvider.getInstance()).andReturn(instance).anyTimes(); EasyMock.expect(instance.getAddress()).andReturn(ADDRESS).anyTimes(); EasyMock.expect(instance.getPort()).andReturn(PORT).anyTimes(); + EasyMock.expect(instance.getSslPort()).andReturn(-1).anyTimes(); EasyMock.replay(instance, serviceProvider); Server server = serverDiscoverySelector.pick(); Assert.assertEquals(PORT, server.getPort()); @@ -84,6 +85,7 @@ public void testPickIPv6() throws Exception EasyMock.expect(serviceProvider.getInstance()).andReturn(instance).anyTimes(); EasyMock.expect(instance.getAddress()).andReturn(ADDRESS).anyTimes(); EasyMock.expect(instance.getPort()).andReturn(PORT).anyTimes(); + EasyMock.expect(instance.getSslPort()).andReturn(-1).anyTimes(); EasyMock.replay(instance, serviceProvider); Server server = serverDiscoverySelector.pick(); Assert.assertEquals(PORT, server.getPort()); @@ -114,6 +116,7 @@ public void testPickIPv6Bracket() throws Exception EasyMock.expect(serviceProvider.getInstance()).andReturn(instance).anyTimes(); EasyMock.expect(instance.getAddress()).andReturn(ADDRESS).anyTimes(); EasyMock.expect(instance.getPort()).andReturn(PORT).anyTimes(); + EasyMock.expect(instance.getSslPort()).andReturn(-1).anyTimes(); EasyMock.replay(instance, serviceProvider); Server server = serverDiscoverySelector.pick(); Assert.assertEquals(PORT, server.getPort()); diff --git a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java index 06a665a15f53..87ef70454dfe 100644 --- a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java +++ b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java @@ -82,6 +82,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); JsonConfigProvider.bind(binder, configPrefix, clazz); } }; diff --git a/server/src/test/java/io/druid/initialization/InitializationTest.java b/server/src/test/java/io/druid/initialization/InitializationTest.java index 80b0769c3c02..792de39b8d2c 100644 --- a/server/src/test/java/io/druid/initialization/InitializationTest.java +++ b/server/src/test/java/io/druid/initialization/InitializationTest.java @@ -34,6 +34,7 @@ import io.druid.guice.annotations.Self; import io.druid.java.util.common.ISE; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import org.junit.Assert; import org.junit.FixMethodOrder; import org.junit.Rule; @@ -140,7 +141,7 @@ public void test05MakeInjectorWithModules() throws Exception public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); } } diff --git a/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java b/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java index aaa00c8701e1..970e4e9f3ea4 100644 --- a/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java +++ b/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java @@ -42,9 +42,9 @@ public void testDimensionsSpecSerde() throws Exception new SegmentDescriptor(new Interval(100, 200), "version", 100), 65535, Arrays.asList( - new DruidServerMetadata("server1", "host1", 30000L, ServerType.HISTORICAL, "tier1", 0), - new DruidServerMetadata("server2", "host2", 40000L, ServerType.HISTORICAL, "tier1", 1), - new DruidServerMetadata("server3", "host3", 50000L, ServerType.REALTIME, "tier2", 2) + new DruidServerMetadata("server1", "host1", null, 30000L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("server2", "host2", null, 40000L, ServerType.HISTORICAL, "tier1", 1), + new DruidServerMetadata("server3", "host3", null, 50000L, ServerType.REALTIME, "tier2", 2) ) ); diff --git a/server/src/test/java/io/druid/query/lookup/LookupListeningAnnouncerConfigTest.java b/server/src/test/java/io/druid/query/lookup/LookupListeningAnnouncerConfigTest.java index a9b31a367b31..f1030ac1db17 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupListeningAnnouncerConfigTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupListeningAnnouncerConfigTest.java @@ -31,6 +31,7 @@ import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.server.metrics.DataSourceTaskIdHolder; import org.junit.Assert; import org.junit.Before; @@ -50,7 +51,7 @@ public class LookupListeningAnnouncerConfigTest public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); binder.bind(Key.get( String.class, diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java index bd7c2b418cd7..a286515a3f90 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java @@ -70,6 +70,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(DruidProcessingConfig.class).toInstance( new DruidProcessingConfig() { diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java index bd9941f5b013..7d1284604d77 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProviderTest.java @@ -21,6 +21,7 @@ import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import org.easymock.Capture; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; @@ -53,7 +54,7 @@ private static class TestChatHandler implements ChatHandler {} @Before public void setUp() throws Exception { - chatHandlerProvider = new ServiceAnnouncingChatHandlerProvider(node, serviceAnnouncer); + chatHandlerProvider = new ServiceAnnouncingChatHandlerProvider(node, serviceAnnouncer, new ServerConfig()); } @Test @@ -89,7 +90,8 @@ private void testRegistrationWithAnnounce(boolean useThreeArgConstructor) throws Capture captured = Capture.newInstance(); EasyMock.expect(node.getHost()).andReturn(TEST_HOST); - EasyMock.expect(node.getPort()).andReturn(TEST_PORT); + EasyMock.expect(node.getPlaintextPort()).andReturn(TEST_PORT); + EasyMock.expect(node.getTlsPort()).andReturn(-1); serviceAnnouncer.announce(EasyMock.capture(captured)); replayAll(); @@ -105,14 +107,17 @@ private void testRegistrationWithAnnounce(boolean useThreeArgConstructor) throws DruidNode param = captured.getValues().get(0); Assert.assertEquals(TEST_SERVICE_NAME, param.getServiceName()); Assert.assertEquals(TEST_HOST, param.getHost()); - Assert.assertEquals(TEST_PORT, param.getPort()); + Assert.assertEquals(TEST_PORT, param.getPlaintextPort()); + Assert.assertEquals(-1, param.getTlsPort()); + Assert.assertEquals(null, param.getHostAndTlsPort()); Assert.assertTrue("chatHandler did not register", chatHandlerProvider.get(TEST_SERVICE_NAME).isPresent()); Assert.assertEquals(testChatHandler, chatHandlerProvider.get(TEST_SERVICE_NAME).get()); captured.reset(); resetAll(); EasyMock.expect(node.getHost()).andReturn(TEST_HOST); - EasyMock.expect(node.getPort()).andReturn(TEST_PORT); + EasyMock.expect(node.getPlaintextPort()).andReturn(TEST_PORT); + EasyMock.expect(node.getTlsPort()).andReturn(-1); serviceAnnouncer.unannounce(EasyMock.capture(captured)); replayAll(); @@ -122,7 +127,9 @@ private void testRegistrationWithAnnounce(boolean useThreeArgConstructor) throws param = captured.getValues().get(0); Assert.assertEquals(TEST_SERVICE_NAME, param.getServiceName()); Assert.assertEquals(TEST_HOST, param.getHost()); - Assert.assertEquals(TEST_PORT, param.getPort()); + Assert.assertEquals(TEST_PORT, param.getPlaintextPort()); + Assert.assertEquals(-1, param.getTlsPort()); + Assert.assertEquals(null, param.getHostAndTlsPort()); Assert.assertFalse("chatHandler did not deregister", chatHandlerProvider.get(TEST_SERVICE_NAME).isPresent()); } } diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java index 8238366d6093..bc40f3dcb41e 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java @@ -332,6 +332,7 @@ private DruidServerMetadata createServerMetadata(String name, ServerType type) return new DruidServerMetadata( name, name, + null, 10000, type, "tier", diff --git a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java index 6298ac108409..409652038386 100644 --- a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java +++ b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java @@ -46,6 +46,7 @@ import io.druid.query.Query; import io.druid.query.QueryToolChest; import io.druid.server.initialization.BaseJettyTest; +import io.druid.server.initialization.ServerConfig; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; import io.druid.server.log.RequestLogger; @@ -85,7 +86,7 @@ public void setup() throws Exception setProperties(); Injector injector = setupInjector(); final DruidNode node = injector.getInstance(Key.get(DruidNode.class, Self.class)); - port = node.getPort(); + port = node.getPlaintextPort(); port1 = SocketUtil.findOpenPortFrom(port + 1); port2 = SocketUtil.findOpenPortFrom(port1 + 1); @@ -106,7 +107,7 @@ protected Injector setupInjector() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null, null, new ServerConfig()) ); binder.bind(JettyServerInitializer.class).to(ProxyJettyServerInit.class).in(LazySingleton.class); Jerseys.addResource(binder, SlowResource.class); @@ -197,20 +198,20 @@ public void initialize(Server server, Injector injector) @Override public String getHost(Query query) { - return "localhost:" + node.getPort(); + return "localhost:" + node.getPlaintextPort(); } @Override public String getDefaultHost() { - return "localhost:" + node.getPort(); + return "localhost:" + node.getPlaintextPort(); } @Override public Collection getAllHosts() { return ImmutableList.of( - "localhost:" + node.getPort(), + "localhost:" + node.getPlaintextPort(), "localhost:" + port1, "localhost:" + port2 ); diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index 6818f625c12e..ae0988a25865 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -82,7 +82,7 @@ public class ClientInfoResourceTest public void setup() { VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); - DruidServer server = new DruidServer("name", "host", 1234, ServerType.HISTORICAL, "tier", 0); + DruidServer server = new DruidServer("name", "host", null, 1234, ServerType.HISTORICAL, "tier", 0); addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d5"), ImmutableList.of("m5"), "v0"); diff --git a/server/src/test/java/io/druid/server/DruidNodeTest.java b/server/src/test/java/io/druid/server/DruidNodeTest.java index 10825d719f10..342a29a49e8f 100644 --- a/server/src/test/java/io/druid/server/DruidNodeTest.java +++ b/server/src/test/java/io/druid/server/DruidNodeTest.java @@ -20,11 +20,29 @@ package io.druid.server; import com.google.common.net.HostAndPort; +import io.druid.server.initialization.ServerConfig; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import java.lang.reflect.Field; + public class DruidNodeTest { + private ServerConfig serverConfig; + private Field plainTextField; + private Field tlsField; + + @Before + public void setUp() throws Exception + { + serverConfig = new ServerConfig(); + plainTextField = serverConfig.getClass().getDeclaredField("plaintext"); + tlsField = serverConfig.getClass().getDeclaredField("tls"); + plainTextField.setAccessible(true); + tlsField.setAccessible(true); + } + @Test public void testDefaultsAndSanity() throws Exception { @@ -32,87 +50,250 @@ public void testDefaultsAndSanity() throws Exception DruidNode node; - node = new DruidNode(service, null, null); + node = new DruidNode(service, null, null, null, serverConfig); Assert.assertEquals(DruidNode.getDefaultHost(), node.getHost()); - Assert.assertEquals(-1, node.getPort()); + Assert.assertEquals(-1, node.getPlaintextPort()); // Hosts which report only ipv6 will have getDefaultHost() report something like fe80::6e40:8ff:fe93:9230 // but getHostAndPort() reports [fe80::6e40:8ff:fe93:9230] Assert.assertEquals(HostAndPort.fromString(DruidNode.getDefaultHost()).toString(), node.getHostAndPort()); - node = new DruidNode(service, "2001:db8:85a3::8a2e:370:7334", -1); + node = new DruidNode(service, "2001:db8:85a3::8a2e:370:7334", -1, null, serverConfig); Assert.assertEquals("2001:db8:85a3::8a2e:370:7334", node.getHost()); - Assert.assertEquals(-1, node.getPort()); + Assert.assertEquals(-1, node.getPlaintextPort()); Assert.assertEquals("[2001:db8:85a3::8a2e:370:7334]", node.getHostAndPort()); - node = new DruidNode(service, "abc:123", null); + node = new DruidNode(service, "abc:123", null, null, serverConfig); Assert.assertEquals("abc", node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); Assert.assertEquals("abc:123", node.getHostAndPort()); - node = new DruidNode(service, "2001:db8:85a3::8a2e:370:7334", null); + node = new DruidNode(service, "2001:db8:85a3::8a2e:370:7334", null, null, serverConfig); Assert.assertEquals("2001:db8:85a3::8a2e:370:7334", node.getHost()); - Assert.assertTrue(8080 <= node.getPort()); + Assert.assertTrue(8080 <= node.getPlaintextPort()); - node = new DruidNode(service, "[2001:db8:85a3::8a2e:370:7334]", null); + node = new DruidNode(service, "[2001:db8:85a3::8a2e:370:7334]", null, null, serverConfig); Assert.assertEquals("2001:db8:85a3::8a2e:370:7334", node.getHost()); - Assert.assertTrue(8080 <= node.getPort()); + Assert.assertTrue(8080 <= node.getPlaintextPort()); - node = new DruidNode(service, "abc", null); + node = new DruidNode(service, "abc", null, null, serverConfig); Assert.assertEquals("abc", node.getHost()); - Assert.assertTrue(8080 <= node.getPort()); + Assert.assertTrue(8080 <= node.getPlaintextPort()); - node = new DruidNode(service, "abc", 123); + node = new DruidNode(service, "abc", 123, null, serverConfig); Assert.assertEquals("abc", node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); Assert.assertEquals("abc:123", node.getHostAndPort()); - node = new DruidNode(service, "abc:123", 123); + node = new DruidNode(service, "abc:123", 123, null, serverConfig); Assert.assertEquals("abc", node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); Assert.assertEquals("abc:123", node.getHostAndPort()); - node = new DruidNode(service, "[2001:db8:85a3::8a2e:370:7334]:123", null); + node = new DruidNode(service, "[2001:db8:85a3::8a2e:370:7334]:123", null, null, serverConfig); Assert.assertEquals("2001:db8:85a3::8a2e:370:7334", node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); Assert.assertEquals("[2001:db8:85a3::8a2e:370:7334]:123", node.getHostAndPort()); - node = new DruidNode(service, "2001:db8:85a3::8a2e:370:7334", 123); + node = new DruidNode(service, "2001:db8:85a3::8a2e:370:7334", 123, null, serverConfig); Assert.assertEquals("2001:db8:85a3::8a2e:370:7334", node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); Assert.assertEquals("[2001:db8:85a3::8a2e:370:7334]:123", node.getHostAndPort()); - node = new DruidNode(service, "[2001:db8:85a3::8a2e:370:7334]", 123); + node = new DruidNode(service, "[2001:db8:85a3::8a2e:370:7334]", 123, null, serverConfig); Assert.assertEquals("2001:db8:85a3::8a2e:370:7334", node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); Assert.assertEquals("[2001:db8:85a3::8a2e:370:7334]:123", node.getHostAndPort()); - node = new DruidNode(service, null, 123); + node = new DruidNode(service, null, 123, null, serverConfig); + Assert.assertEquals(DruidNode.getDefaultHost(), node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + + node = new DruidNode(service, null, 123, 123, serverConfig); + Assert.assertEquals(DruidNode.getDefaultHost(), node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(-1, node.getTlsPort()); + + node = new DruidNode(service, "host", 123, 123, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(-1, node.getTlsPort()); + + node = new DruidNode(service, "host:123", null, 123, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(-1, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test", "host:123", null, 214, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(214, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test", "host", 123, 214, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(214, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test", "host:123", 123, 214, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(214, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test", null, 123, 214, serverConfig); Assert.assertEquals(DruidNode.getDefaultHost(), node.getHost()); - Assert.assertEquals(123, node.getPort()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(214, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, false); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test", "host:123", null, 214, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(-1, node.getPlaintextPort()); + Assert.assertEquals(214, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, false); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test", "host:123", null, 123, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(-1, node.getPlaintextPort()); + Assert.assertEquals(123, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, false); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test",null, null, 123, serverConfig); + Assert.assertEquals(DruidNode.getDefaultHost(), node.getHost()); + Assert.assertEquals(-1, node.getPlaintextPort()); + Assert.assertEquals(123, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, false); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test",null, -1, 123, serverConfig); + Assert.assertEquals(DruidNode.getDefaultHost(), node.getHost()); + Assert.assertEquals(-1, node.getPlaintextPort()); + Assert.assertEquals(123, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, false); + tlsField.setBoolean(serverConfig, true); + node = new DruidNode("test","host", -1, 123, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(-1, node.getPlaintextPort()); + Assert.assertEquals(123, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, false); + node = new DruidNode("test","host", -1, 123, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(-1, node.getPlaintextPort()); + Assert.assertEquals(-1, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, false); + node = new DruidNode("test","host:123", 123, null, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(-1, node.getTlsPort()); + + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, false); + node = new DruidNode("test","host:123", null, 123, serverConfig); + Assert.assertEquals("host", node.getHost()); + Assert.assertEquals(123, node.getPlaintextPort()); + Assert.assertEquals(-1, node.getTlsPort()); } @Test(expected = IllegalArgumentException.class) public void testConflictingPorts() throws Exception { - new DruidNode("test/service", "abc:123", 456); + new DruidNode("test/service", "abc:123", 456, null, new ServerConfig()); + } + + @Test(expected = IllegalArgumentException.class) + public void testAtLeastTlsOrPlainTextIsSet() throws Exception + { + plainTextField.setBoolean(serverConfig, false); + tlsField.setBoolean(serverConfig, false); + new DruidNode("test", "host:123", null, 123, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testSamePlainTextAndTlsPort() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", "host:123", null, 123, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testSamePlainTextAndTlsPort1() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", "host", 123, 123, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testNullTlsPort() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", "host:123", null, null, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testNullPlainTextAndTlsPort1() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", "host", null, null, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testNullTlsPort1() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", "host:123", 123, null, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testNullPlainTextAndTlsPort() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", null, null, null, serverConfig); + } + + @Test(expected = IllegalArgumentException.class) + public void testConflictingPlainTextPort() throws Exception + { + plainTextField.setBoolean(serverConfig, true); + tlsField.setBoolean(serverConfig, true); + new DruidNode("test", "host:123", 321, null, serverConfig); } @Test(expected = IllegalArgumentException.class) public void testInvalidIPv6WithPort() throws Exception { - new DruidNode("test/service", "[abc:fff]:123", 456); + new DruidNode("test/service", "[abc:fff]:123", 456, null, new ServerConfig()); } @Test(expected = IllegalArgumentException.class) public void testInvalidIPv6() throws Exception { - new DruidNode("test/service", "abc:fff", 456); + new DruidNode("test/service", "abc:fff", 456, null, new ServerConfig()); } @Test(expected = IllegalArgumentException.class) public void testConflictingPortsNonsense() throws Exception { - new DruidNode("test/service", "[2001:db8:85a3::8a2e:370:7334]:123", 456); + new DruidNode("test/service", "[2001:db8:85a3::8a2e:370:7334]:123", 456, null, new ServerConfig()); } @Test @@ -121,10 +302,11 @@ public void testEquals() throws Exception final String serviceName = "serviceName"; final String host = "some.host"; final int port = 9898; - Assert.assertEquals(new DruidNode(serviceName, host, port), new DruidNode(serviceName, host, port)); - Assert.assertNotEquals(new DruidNode(serviceName, host, port), new DruidNode(serviceName, host, -1)); - Assert.assertNotEquals(new DruidNode(serviceName, host, port), new DruidNode(serviceName, "other.host", port)); - Assert.assertNotEquals(new DruidNode(serviceName, host, port), new DruidNode("otherServiceName", host, port)); + final ServerConfig serverConfig = new ServerConfig(); + Assert.assertEquals(new DruidNode(serviceName, host, port, null, serverConfig), new DruidNode(serviceName, host, port, null, serverConfig)); + Assert.assertNotEquals(new DruidNode(serviceName, host, port, null, serverConfig), new DruidNode(serviceName, host, -1, null, serverConfig)); + Assert.assertNotEquals(new DruidNode(serviceName, host, port, null, serverConfig), new DruidNode(serviceName, "other.host", port, null, serverConfig)); + Assert.assertNotEquals(new DruidNode(serviceName, host, port, null, serverConfig), new DruidNode("otherServiceName", host, port, null, serverConfig)); } @Test @@ -134,10 +316,11 @@ public void testHashCode() throws Exception final String serviceName = "serviceName"; final String host = "some.host"; final int port = 9898; - Assert.assertEquals(new DruidNode(serviceName, host, port).hashCode(), new DruidNode(serviceName, host, port).hashCode()); + final ServerConfig serverConfig = new ServerConfig(); + Assert.assertEquals(new DruidNode(serviceName, host, port, null, serverConfig).hashCode(), new DruidNode(serviceName, host, port, null, serverConfig).hashCode()); // Potential hash collision if hashCode method ever changes - Assert.assertNotEquals(new DruidNode(serviceName, host, port).hashCode(), new DruidNode(serviceName, host, -1).hashCode()); - Assert.assertNotEquals(new DruidNode(serviceName, host, port).hashCode(), new DruidNode(serviceName, "other.host", port).hashCode()); - Assert.assertNotEquals(new DruidNode(serviceName, host, port).hashCode(), new DruidNode("otherServiceName", host, port).hashCode()); + Assert.assertNotEquals(new DruidNode(serviceName, host, port, null, serverConfig).hashCode(), new DruidNode(serviceName, host, -1, null, serverConfig).hashCode()); + Assert.assertNotEquals(new DruidNode(serviceName, host, port, null, serverConfig).hashCode(), new DruidNode(serviceName, "other.host", port, null, serverConfig).hashCode()); + Assert.assertNotEquals(new DruidNode(serviceName, host, port, null, serverConfig).hashCode(), new DruidNode("otherServiceName", host, port, null, serverConfig).hashCode()); } } diff --git a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java index d8233cbff2bf..a1456819540e 100644 --- a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java @@ -81,6 +81,7 @@ public class ZkCoordinatorTest extends CuratorTestBase private final DruidServerMetadata me = new DruidServerMetadata( "dummyServer", "dummyHost", + null, 0, ServerType.HISTORICAL, "normal", @@ -515,7 +516,7 @@ public String getBase() } ); binder.bind(DruidServerMetadata.class) - .toInstance(new DruidServerMetadata("dummyServer", "dummyHost", 0, ServerType.HISTORICAL, "normal", 0)); + .toInstance(new DruidServerMetadata("dummyServer", "dummyHost", null, 0, ServerType.HISTORICAL, "normal", 0)); binder.bind(DataSegmentAnnouncer.class).toInstance(announcer); binder.bind(DataSegmentServerAnnouncer.class).toInstance(EasyMock.createNiceMock(DataSegmentServerAnnouncer.class)); binder.bind(CuratorFramework.class).toInstance(curator); diff --git a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java index 6790dad3cd9d..6286cbaf650c 100644 --- a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java @@ -106,6 +106,7 @@ public void setUp() throws Exception new DruidServerMetadata( "id", "host", + null, Long.MAX_VALUE, ServerType.HISTORICAL, "tier", diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java index e1bff13b2874..521a33da504c 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java @@ -62,7 +62,7 @@ public static List setupDummyCluster(int serverCount, int maxSegme serverHolderList.add( new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("DruidServer_Name_" + i, "localhost", 10000000L, ServerType.HISTORICAL, "hot", 1), + new DruidServerMetadata("DruidServer_Name_" + i, "localhost", null, 10000000L, ServerType.HISTORICAL, "hot", 1), 3000L, ImmutableMap.of("DUMMY", EasyMock.createMock(ImmutableDruidDataSource.class)), ImmutableMap.copyOf(segments) diff --git a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index f6b0267162e4..1faf9f3eed84 100644 --- a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -61,7 +61,7 @@ public static List setupDummyCluster(int serverCount, int maxSegme serverHolderList.add( new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("DruidServer_Name_" + i, "localhost", 10000000L, ServerType.HISTORICAL, "hot", 1), + new DruidServerMetadata("DruidServer_Name_" + i, "localhost", null, 10000000L, ServerType.HISTORICAL, "hot", 1), 3000L, ImmutableMap.of("DUMMY", EasyMock.createMock(ImmutableDruidDataSource.class)), ImmutableMap.copyOf(segments) diff --git a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java index d91675f9e42f..70c080d0ef3b 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java @@ -86,7 +86,7 @@ public class DruidClusterTest private static final ServerHolder newRealtime = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host2", 100L, ServerType.REALTIME, "tier1", 0), + new DruidServerMetadata("name1", "host2", null, 100L, ServerType.REALTIME, "tier1", 0), 0L, ImmutableMap.of( "src1", @@ -102,7 +102,7 @@ public class DruidClusterTest private static final ServerHolder newHistorical = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host2", 100L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host2", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, ImmutableMap.of( "src1", @@ -125,7 +125,7 @@ public void setup() ImmutableSet.of( new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 100L, ServerType.REALTIME, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 100L, ServerType.REALTIME, "tier1", 0), 0L, ImmutableMap.of( "src1", @@ -145,7 +145,7 @@ public void setup() ImmutableList.of( new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 100L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, ImmutableMap.of( "src1", diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index f426d11a1def..2ad1723fdc3d 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -147,6 +147,7 @@ public void testRunThreeTiersOneReplicant() throws Exception new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -163,6 +164,7 @@ public void testRunThreeTiersOneReplicant() throws Exception new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, "normal", @@ -179,6 +181,7 @@ public void testRunThreeTiersOneReplicant() throws Exception new DruidServer( "serverCold", "hostCold", + null, 1000, ServerType.HISTORICAL, "cold", @@ -259,6 +262,7 @@ public void testRunTwoTiersTwoReplicants() throws Exception new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -270,6 +274,7 @@ public void testRunTwoTiersTwoReplicants() throws Exception new DruidServer( "serverHot2", "hostHot2", + null, 1000, ServerType.HISTORICAL, "hot", @@ -286,6 +291,7 @@ public void testRunTwoTiersTwoReplicants() throws Exception new DruidServer( "serverCold", "hostCold", + null, 1000, ServerType.HISTORICAL, "cold", @@ -357,6 +363,7 @@ public void testRunTwoTiersWithExistingSegments() throws Exception DruidServer normServer = new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, "normal", @@ -376,6 +383,7 @@ public void testRunTwoTiersWithExistingSegments() throws Exception new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -462,6 +470,7 @@ public void testRunTwoTiersTierDoesNotExist() throws Exception new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, "normal", @@ -525,6 +534,7 @@ public void testRunRuleDoesNotExist() throws Exception new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, "normal", @@ -579,6 +589,7 @@ public void testDropRemove() throws Exception DruidServer server = new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, "normal", @@ -651,6 +662,7 @@ public void testDropTooManyInSameTier() throws Exception DruidServer server1 = new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, "normal", @@ -661,6 +673,7 @@ public void testDropTooManyInSameTier() throws Exception DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", + null, 1000, ServerType.HISTORICAL, "normal", @@ -740,6 +753,7 @@ public void testDropTooManyInDifferentTiers() throws Exception DruidServer server1 = new DruidServer( "server1", "host1", + null, 1000, ServerType.HISTORICAL, "hot", @@ -749,6 +763,7 @@ public void testDropTooManyInDifferentTiers() throws Exception DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", + null, 1000, ServerType.HISTORICAL, "normal", @@ -831,6 +846,7 @@ public void testDontDropInDifferentTiers() throws Exception DruidServer server1 = new DruidServer( "server1", "host1", + null, 1000, ServerType.HISTORICAL, "hot", @@ -839,6 +855,7 @@ public void testDontDropInDifferentTiers() throws Exception DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", + null, 1000, ServerType.HISTORICAL, "normal", @@ -915,6 +932,7 @@ public void testDropServerActuallyServesSegment() throws Exception DruidServer server1 = new DruidServer( "server1", "host1", + null, 1000, ServerType.HISTORICAL, "normal", @@ -924,6 +942,7 @@ public void testDropServerActuallyServesSegment() throws Exception DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", + null, 1000, ServerType.HISTORICAL, "normal", @@ -933,6 +952,7 @@ public void testDropServerActuallyServesSegment() throws Exception DruidServer server3 = new DruidServer( "serverNorm3", "hostNorm3", + null, 1000, ServerType.HISTORICAL, "normal", @@ -1034,6 +1054,7 @@ public void testReplicantThrottle() throws Exception new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -1045,6 +1066,7 @@ public void testReplicantThrottle() throws Exception new DruidServer( "serverHot2", "hostHot2", + null, 1000, ServerType.HISTORICAL, "hot", @@ -1160,6 +1182,7 @@ public void testReplicantThrottleAcrossTiers() throws Exception new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -1176,6 +1199,7 @@ public void testReplicantThrottleAcrossTiers() throws Exception new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, @@ -1251,6 +1275,7 @@ public void testDropReplicantThrottle() throws Exception DruidServer server1 = new DruidServer( "serverNorm1", "hostNorm1", + null, 1000, ServerType.HISTORICAL, "normal", @@ -1262,6 +1287,7 @@ public void testDropReplicantThrottle() throws Exception DruidServer server2 = new DruidServer( "serverNorm2", "hostNorm2", + null, 1000, ServerType.HISTORICAL, "normal", @@ -1366,6 +1392,7 @@ public void testRulesRunOnNonOvershadowedSegmentsOnly() throws Exception new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index 111daf0bb757..051fc5a5d328 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -44,6 +44,7 @@ import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.rules.ForeverLoadRule; import io.druid.server.coordinator.rules.Rule; +import io.druid.server.initialization.ServerConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.lookup.cache.LookupCoordinatorManager; import io.druid.server.metrics.NoopServiceEmitter; @@ -141,7 +142,7 @@ public void setUp() throws Exception druidCoordinatorConfig ); loadQueuePeon.start(); - druidNode = new DruidNode("hey", "what", 1234); + druidNode = new DruidNode("hey", "what", 1234, null, new ServerConfig()); loadManagementPeons = new MapMaker().makeMap(); scheduledExecutorFactory = new ScheduledExecutorFactory() { @@ -223,7 +224,7 @@ public void testMoveSegment() throws Exception EasyMock.replay(metadataRuleManager); EasyMock.expect(druidServer.toImmutableDruidServer()).andReturn( new ImmutableDruidServer( - new DruidServerMetadata("from", null, 5L, ServerType.HISTORICAL, null, 0), + new DruidServerMetadata("from", null, null, 5L, ServerType.HISTORICAL, null, 0), 1L, null, ImmutableMap.of("dummySegment", segment) @@ -234,7 +235,7 @@ public void testMoveSegment() throws Exception druidServer2 = EasyMock.createMock(DruidServer.class); EasyMock.expect(druidServer2.toImmutableDruidServer()).andReturn( new ImmutableDruidServer( - new DruidServerMetadata("to", null, 5L, ServerType.HISTORICAL, null, 0), + new DruidServerMetadata("to", null, null, 5L, ServerType.HISTORICAL, null, 0), 1L, null, ImmutableMap.of("dummySegment2", segment) @@ -292,7 +293,7 @@ public void testCoordinatorRun() throws Exception{ EasyMock.replay(immutableDruidDataSource); // Setup ServerInventoryView - druidServer = new DruidServer("server1", "localhost", 5L, ServerType.HISTORICAL, tier, 0); + druidServer = new DruidServer("server1", "localhost", null, 5L, ServerType.HISTORICAL, tier, 0); loadManagementPeons.put("server1", loadQueuePeon); EasyMock.expect(serverInventoryView.getInventory()).andReturn( ImmutableList.of(druidServer) diff --git a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java index 33e6417bd331..aa7bb5c8156f 100644 --- a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java @@ -85,7 +85,7 @@ public void testCompareTo() throws Exception // available size of 100 final ServerHolder h1 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 100L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, ImmutableMap.of( "src1", @@ -102,7 +102,7 @@ public void testCompareTo() throws Exception // available size of 100 final ServerHolder h2 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 200L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 200L, ServerType.HISTORICAL, "tier1", 0), 100L, ImmutableMap.of( "src1", @@ -119,7 +119,7 @@ public void testCompareTo() throws Exception // available size of 10 final ServerHolder h3 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 1000L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 1000L, ServerType.HISTORICAL, "tier1", 0), 990L, ImmutableMap.of( "src1", @@ -136,7 +136,7 @@ public void testCompareTo() throws Exception // available size of 50 final ServerHolder h4 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 50L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 50L, ServerType.HISTORICAL, "tier1", 0), 0L, ImmutableMap.of( "src1", @@ -160,7 +160,7 @@ public void testEquals() throws Exception { final ServerHolder h1 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 100L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, ImmutableMap.of( "src1", @@ -176,7 +176,7 @@ public void testEquals() throws Exception final ServerHolder h2 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name2", "host1", 200L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name2", "host1", null, 200L, ServerType.HISTORICAL, "tier1", 0), 100L, ImmutableMap.of( "src1", @@ -192,7 +192,7 @@ public void testEquals() throws Exception final ServerHolder h3 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host2", 200L, ServerType.HISTORICAL, "tier1", 0), + new DruidServerMetadata("name1", "host2", null, 200L, ServerType.HISTORICAL, "tier1", 0), 100L, ImmutableMap.of( "src1", @@ -208,7 +208,7 @@ public void testEquals() throws Exception final ServerHolder h4 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 200L, ServerType.HISTORICAL, "tier2", 0), + new DruidServerMetadata("name1", "host1", null,200L, ServerType.HISTORICAL, "tier2", 0), 100L, ImmutableMap.of( "src1", @@ -224,7 +224,7 @@ public void testEquals() throws Exception final ServerHolder h5 = new ServerHolder( new ImmutableDruidServer( - new DruidServerMetadata("name1", "host1", 100L, ServerType.REALTIME, "tier1", 0), + new DruidServerMetadata("name1", "host1", null, 100L, ServerType.REALTIME, "tier1", 0), 0L, ImmutableMap.of( "src1", diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index fe461117bd06..9aa2ff77931f 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -107,6 +107,7 @@ public void setUp() throws Exception new DruidServer( "serverHot2", "hostHot2", + null, 1000, ServerType.HISTORICAL, "hot", @@ -121,6 +122,7 @@ public void setUp() throws Exception new DruidServer( "serverHot1", "hostHot1", + null, 1000, ServerType.HISTORICAL, "hot", @@ -135,6 +137,7 @@ public void setUp() throws Exception new DruidServer( "serverNorm1", "hostNorm1", + null, 1000, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, @@ -149,6 +152,7 @@ public void setUp() throws Exception new DruidServer( "serverNorm2", "hostNorm2", + null, 100, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, @@ -164,6 +168,7 @@ public void setUp() throws Exception new DruidServer( "serverHot3", "hostHot3", + null, 1000, ServerType.HISTORICAL, "hot", @@ -178,6 +183,7 @@ public void setUp() throws Exception new DruidServer( "serverNorm3", "hostNorm3", + null, 100, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java index 2a584ce61c5d..b8937d2ebf14 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java @@ -161,6 +161,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -177,6 +178,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, @@ -262,6 +264,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) DruidServer server1 = new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -271,6 +274,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) DruidServer server2 = new DruidServer( "serverNorm", "hostNorm", + null, 1000, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, @@ -381,6 +385,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -465,6 +470,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) DruidServer server1 = new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", @@ -473,6 +479,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) DruidServer server2 = new DruidServer( "serverHo2t", "hostHot2", + null, 1000, ServerType.HISTORICAL, "hot", @@ -574,6 +581,7 @@ public boolean appliesTo(Interval interval, DateTime referenceTimestamp) new DruidServer( "serverHot", "hostHot", + null, 1000, ServerType.HISTORICAL, "hot", diff --git a/server/src/test/java/io/druid/server/http/CoordinatorRedirectInfoTest.java b/server/src/test/java/io/druid/server/http/CoordinatorRedirectInfoTest.java index 56feb114a901..2bb63c465c7a 100644 --- a/server/src/test/java/io/druid/server/http/CoordinatorRedirectInfoTest.java +++ b/server/src/test/java/io/druid/server/http/CoordinatorRedirectInfoTest.java @@ -68,7 +68,7 @@ public void testGetRedirectURLNull() { EasyMock.expect(druidCoordinator.getCurrentLeader()).andReturn(null).anyTimes(); EasyMock.replay(druidCoordinator); - URL url = coordinatorRedirectInfo.getRedirectURL("query", "/request"); + URL url = coordinatorRedirectInfo.getRedirectURL("http","query", "/request"); Assert.assertNull(url); EasyMock.verify(druidCoordinator); } @@ -81,7 +81,7 @@ public void testGetRedirectURL() String request = "/request"; EasyMock.expect(druidCoordinator.getCurrentLeader()).andReturn(host).anyTimes(); EasyMock.replay(druidCoordinator); - URL url = coordinatorRedirectInfo.getRedirectURL(query, request); + URL url = coordinatorRedirectInfo.getRedirectURL("http", query, request); Assert.assertEquals("http://localhost/request?foo=bar&x=y", url.toString()); EasyMock.verify(druidCoordinator); } diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index 6404ec2f38e8..74f98197488e 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -329,7 +329,7 @@ public void testSimpleGetTheDataSourceManyTiers() throws Exception @Test public void testGetSegmentDataSourceIntervals() { - server = new DruidServer("who", "host", 1234, ServerType.HISTORICAL, "tier1", 0); + server = new DruidServer("who", "host", null, 1234, ServerType.HISTORICAL, "tier1", 0); server.addDataSegment(dataSegmentList.get(0).getIdentifier(), dataSegmentList.get(0)); server.addDataSegment(dataSegmentList.get(1).getIdentifier(), dataSegmentList.get(1)); server.addDataSegment(dataSegmentList.get(2).getIdentifier(), dataSegmentList.get(2)); @@ -379,7 +379,7 @@ public void testGetSegmentDataSourceIntervals() @Test public void testGetSegmentDataSourceSpecificInterval() { - server = new DruidServer("who", "host", 1234, ServerType.HISTORICAL, "tier1", 0); + server = new DruidServer("who", "host", null, 1234, ServerType.HISTORICAL, "tier1", 0); server.addDataSegment(dataSegmentList.get(0).getIdentifier(), dataSegmentList.get(0)); server.addDataSegment(dataSegmentList.get(1).getIdentifier(), dataSegmentList.get(1)); server.addDataSegment(dataSegmentList.get(2).getIdentifier(), dataSegmentList.get(2)); diff --git a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java index d3cb3aa157e9..3e7e69b86d27 100644 --- a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java +++ b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java @@ -93,7 +93,7 @@ public void setUp() 5 ) ); - server = new DruidServer("who", "host", 1234, ServerType.HISTORICAL, "tier1", 0); + server = new DruidServer("who", "host", null, 1234, ServerType.HISTORICAL, "tier1", 0); server.addDataSegment(dataSegmentList.get(0).getIdentifier(), dataSegmentList.get(0)); server.addDataSegment(dataSegmentList.get(1).getIdentifier(), dataSegmentList.get(1)); server.addDataSegment(dataSegmentList.get(2).getIdentifier(), dataSegmentList.get(2)); diff --git a/server/src/test/java/io/druid/server/http/ServersResourceTest.java b/server/src/test/java/io/druid/server/http/ServersResourceTest.java index 80e4b97ffb05..c9842cb195c0 100644 --- a/server/src/test/java/io/druid/server/http/ServersResourceTest.java +++ b/server/src/test/java/io/druid/server/http/ServersResourceTest.java @@ -35,7 +35,8 @@ import javax.ws.rs.core.Response; -public class ServersResourceTest { +public class ServersResourceTest +{ private DruidServer server; private ServersResource serversResource; private ObjectMapper objectMapper = new DefaultObjectMapper(); @@ -43,7 +44,7 @@ public class ServersResourceTest { @Before public void setUp() { - DruidServer dummyServer = new DruidServer("dummy", "host", 1234L, ServerType.HISTORICAL, "tier", 0); + DruidServer dummyServer = new DruidServer("dummy", "host", null, 1234L, ServerType.HISTORICAL, "tier", 0); DataSegment segment = DataSegment.builder() .dataSource("dataSource") .interval(new Interval("2016-03-22T14Z/2016-03-22T15Z")) @@ -115,9 +116,9 @@ public void testGetServerSimple() throws Exception @Test public void testDruidServerSerde() throws Exception { - DruidServer server = new DruidServer("dummy", "dummyHost", 1234, ServerType.HISTORICAL, "dummyTier", 1); + DruidServer server = new DruidServer("dummy", "dummyHost", null, 1234, ServerType.HISTORICAL, "dummyTier", 1); String serverJson = objectMapper.writeValueAsString(server); - String expected = "{\"name\":\"dummy\",\"host\":\"dummyHost\",\"maxSize\":1234,\"type\":\"historical\",\"tier\":\"dummyTier\",\"priority\":1}"; + String expected = "{\"name\":\"dummy\",\"host\":\"dummyHost\",\"hostAndTlsPort\":null,\"maxSize\":1234,\"type\":\"historical\",\"tier\":\"dummyTier\",\"priority\":1}"; Assert.assertEquals(expected, serverJson); DruidServer deserializedServer = objectMapper.readValue(serverJson, DruidServer.class); Assert.assertEquals(server, deserializedServer); @@ -126,11 +127,52 @@ public void testDruidServerSerde() throws Exception @Test public void testDruidServerMetadataSerde() throws Exception { - DruidServerMetadata metadata = new DruidServerMetadata("dummy", "host", 1234, ServerType.HISTORICAL, "tier", 1); + DruidServerMetadata metadata = new DruidServerMetadata( + "dummy", + "host", + null, + 1234, + ServerType.HISTORICAL, + "tier", + 1 + ); String metadataJson = objectMapper.writeValueAsString(metadata); - String expected = "{\"name\":\"dummy\",\"host\":\"host\",\"maxSize\":1234,\"type\":\"historical\",\"tier\":\"tier\",\"priority\":1}"; + String expected = "{\"name\":\"dummy\",\"host\":\"host\",\"hostAndTlsPort\":null,\"maxSize\":1234,\"type\":\"historical\",\"tier\":\"tier\",\"priority\":1}"; Assert.assertEquals(expected, metadataJson); DruidServerMetadata deserializedMetadata = objectMapper.readValue(metadataJson, DruidServerMetadata.class); Assert.assertEquals(metadata, deserializedMetadata); + + metadata = new DruidServerMetadata( + "host:123", + "host:123", + null, + 0, + ServerType.HISTORICAL, + "t1", + 0 + ); + + Assert.assertEquals(metadata, objectMapper.readValue( + "{\"name\":\"host:123\",\"maxSize\":0,\"type\":\"HISTORICAL\",\"tier\":\"t1\",\"priority\":0,\"host\":\"host:123\"}", + DruidServerMetadata.class + )); + + metadata = new DruidServerMetadata( + "host:123", + "host:123", + "host:214", + 0, + ServerType.HISTORICAL, + "t1", + 0 + ); + Assert.assertEquals(metadata, objectMapper.readValue( + "{\"name\":\"host:123\",\"maxSize\":0,\"type\":\"HISTORICAL\",\"tier\":\"t1\",\"priority\":0,\"host\":\"host:123\",\"hostAndTlsPort\":\"host:214\"}", + DruidServerMetadata.class + )); + Assert.assertEquals(metadata, objectMapper.readValue( + objectMapper.writeValueAsString(metadata), + DruidServerMetadata.class + )); } } diff --git a/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java b/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java index 7bd4697099da..643e8494ad75 100644 --- a/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java +++ b/server/src/test/java/io/druid/server/initialization/BaseJettyTest.java @@ -84,7 +84,7 @@ public void setup() throws Exception setProperties(); Injector injector = setupInjector(); final DruidNode node = injector.getInstance(Key.get(DruidNode.class, Self.class)); - port = node.getPort(); + port = node.getPlaintextPort(); lifecycle = injector.getInstance(Lifecycle.class); lifecycle.start(); ClientHolder holder = injector.getInstance(ClientHolder.class); diff --git a/server/src/test/java/io/druid/server/initialization/JettyQosTest.java b/server/src/test/java/io/druid/server/initialization/JettyQosTest.java index a7d39dd130c2..8897c7f553f2 100644 --- a/server/src/test/java/io/druid/server/initialization/JettyQosTest.java +++ b/server/src/test/java/io/druid/server/initialization/JettyQosTest.java @@ -69,7 +69,7 @@ protected Injector setupInjector() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null, null, new ServerConfig()) ); binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); Jerseys.addResource(binder, SlowResource.class); diff --git a/server/src/test/java/io/druid/server/initialization/JettyTest.java b/server/src/test/java/io/druid/server/initialization/JettyTest.java index 0feb5c0f8a1a..a38c1340f536 100644 --- a/server/src/test/java/io/druid/server/initialization/JettyTest.java +++ b/server/src/test/java/io/druid/server/initialization/JettyTest.java @@ -79,7 +79,7 @@ protected Injector setupInjector() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null, null, new ServerConfig()) ); binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); diff --git a/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java b/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java index 2c246fb69661..5bee4731fa07 100644 --- a/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java +++ b/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java @@ -20,11 +20,11 @@ package io.druid.server.listener.announcer; import com.google.common.collect.ImmutableSet; -import com.google.common.net.HostAndPort; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; import io.druid.segment.CloserRule; +import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.utils.ZKPaths; import org.junit.Assert; @@ -86,7 +86,7 @@ public void close() throws IOException }); Assert.assertTrue(listenerDiscoverer.getNodes(listenerKey).isEmpty()); - final HostAndPort node = HostAndPort.fromParts("someHost", 8888); + final HostAndPortWithScheme node = HostAndPortWithScheme.fromParts("http", "someHost", 8888); final ListenerResourceAnnouncer listenerResourceAnnouncer = new ListenerResourceAnnouncer( announcer, config, @@ -132,7 +132,7 @@ public void close() throws IOException Thread.sleep(1); } Assert.assertEquals( - ImmutableSet.of(HostAndPort.fromString(node.toString())), + ImmutableSet.of(HostAndPortWithScheme.fromString(node.toString())), listenerDiscoverer.getNodes(listenerKey) ); // 2nd call of two concurrent getNewNodes should return no entry collection diff --git a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java index 7b0dc7dc6826..bb843506cc76 100644 --- a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java @@ -19,12 +19,12 @@ package io.druid.server.listener.announcer; -import com.google.common.net.HostAndPort; import com.google.common.primitives.Longs; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; import io.druid.segment.CloserRule; +import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.utils.ZKPaths; import org.easymock.EasyMock; @@ -70,7 +70,7 @@ public void testAnnouncerBehaves() throws Exception Assert.assertNotNull(curator.create().forPath("/druid")); Assert.assertTrue(curator.blockUntilConnected(10, TimeUnit.SECONDS)); final Announcer announcer = new Announcer(curator, executorService); - final HostAndPort node = HostAndPort.fromString("localhost"); + final HostAndPortWithScheme node = HostAndPortWithScheme.fromString("localhost"); final ListenerResourceAnnouncer listenerResourceAnnouncer = new ListenerResourceAnnouncer( announcer, listeningAnnouncerConfig, @@ -90,7 +90,7 @@ public void close() throws IOException } }); Assert.assertNotNull(curator.checkExists().forPath(announcePath)); - final String nodePath = ZKPaths.makePath(announcePath, node.getHostText()); + final String nodePath = ZKPaths.makePath(announcePath, String.format("%s:%s", node.getScheme(), node.getHostText())); Assert.assertNotNull(curator.checkExists().forPath(nodePath)); Assert.assertEquals(Longs.BYTES, curator.getData().decompressed().forPath(nodePath).length); Assert.assertNull(curator.checkExists() @@ -111,7 +111,7 @@ public void close() throws IOException public void testStartCorrect() throws Exception { final Announcer announcer = EasyMock.createStrictMock(Announcer.class); - final HostAndPort node = HostAndPort.fromString("some_host"); + final HostAndPortWithScheme node = HostAndPortWithScheme.fromString("some_host"); final ListenerResourceAnnouncer resourceAnnouncer = new ListenerResourceAnnouncer( announcer, @@ -124,7 +124,7 @@ public void testStartCorrect() throws Exception announcer.announce( - EasyMock.eq(ZKPaths.makePath(announcePath, node.getHostText())), + EasyMock.eq(ZKPaths.makePath(announcePath, String.format("%s:%s", node.getScheme(), node.getHostText()))), EasyMock.aryEq(resourceAnnouncer.getAnnounceBytes()) ); EasyMock.expectLastCall().once(); diff --git a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerProviderTest.java b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerProviderTest.java index ab1ce25f2c15..187124bd31d4 100644 --- a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerProviderTest.java +++ b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerProviderTest.java @@ -83,6 +83,7 @@ public void configure(Binder binder) binder.bind(RequestLogger.class).toProvider(RequestLoggerProvider.class).in(ManageLifecycle.class); binder.bind(Key.get(String.class, Names.named("serviceName"))).toInstance("some service"); binder.bind(Key.get(Integer.class, Names.named("servicePort"))).toInstance(0); + binder.bind(Key.get(Integer.class, Names.named("tlsServicePort"))).toInstance(-1); JsonConfigProvider.bind(binder, propertyPrefix, RequestLoggerProvider.class); } } diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index f5ff3636a912..e2fad952dcc1 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -41,6 +41,7 @@ import io.druid.java.util.common.ISE; import io.druid.query.lookup.LookupModule; import io.druid.query.lookup.LookupsState; +import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.listener.announcer.ListenerDiscoverer; import org.easymock.EasyMock; import org.hamcrest.BaseMatcher; @@ -205,7 +206,7 @@ HttpResponseHandler makeResponseHandler( }; LookupsState resp = lookupsCommunicator.updateNode( - HostAndPort.fromString("localhost"), + HostAndPortWithScheme.fromString("localhost"), LOOKUPS_STATE ); @@ -245,7 +246,7 @@ HttpResponseHandler makeResponseHandler( try { lookupsCommunicator.updateNode( - HostAndPort.fromString("localhost"), + HostAndPortWithScheme.fromString("localhost"), LOOKUPS_STATE ); Assert.fail(); @@ -288,7 +289,7 @@ HttpResponseHandler makeResponseHandler( try { lookupsCommunicator.updateNode( - HostAndPort.fromString("localhost"), + HostAndPortWithScheme.fromString("localhost"), LOOKUPS_STATE ); Assert.fail(); @@ -331,7 +332,7 @@ HttpResponseHandler makeResponseHandler( Thread.currentThread().interrupt(); try { lookupsCommunicator.updateNode( - HostAndPort.fromString("localhost"), + HostAndPortWithScheme.fromString("localhost"), LOOKUPS_STATE ); Assert.fail(); @@ -386,7 +387,7 @@ HttpResponseHandler makeResponseHandler( }; LookupsState resp = lookupsCommunicator.getLookupStateForNode( - HostAndPort.fromString("localhost") + HostAndPortWithScheme.fromString("localhost") ); EasyMock.verify(client, responseHandler); @@ -426,7 +427,7 @@ HttpResponseHandler makeResponseHandler( try { lookupsCommunicator.getLookupStateForNode( - HostAndPort.fromString("localhost") + HostAndPortWithScheme.fromString("localhost") ); Assert.fail(); } @@ -468,7 +469,7 @@ HttpResponseHandler makeResponseHandler( try { lookupsCommunicator.getLookupStateForNode( - HostAndPort.fromString("localhost") + HostAndPortWithScheme.fromString("localhost") ); Assert.fail(); } @@ -511,7 +512,7 @@ HttpResponseHandler makeResponseHandler( Thread.currentThread().interrupt(); try { lookupsCommunicator.getLookupStateForNode( - HostAndPort.fromString("localhost") + HostAndPortWithScheme.fromString("localhost") ); Assert.fail(); } @@ -1046,8 +1047,8 @@ public void testLookupManagementLoop() throws Exception new AtomicReference<>(configuredLookups)).once(); EasyMock.replay(configManager); - HostAndPort host1 = HostAndPort.fromParts("host1", 1234); - HostAndPort host2 = HostAndPort.fromParts("host2", 3456); + HostAndPortWithScheme host1 = HostAndPortWithScheme.fromParts("http", "host1", 1234); + HostAndPortWithScheme host2 = HostAndPortWithScheme.fromParts("http", "host2", 3456); EasyMock.reset(discoverer); EasyMock.expect( @@ -1142,9 +1143,9 @@ public int getThreadPoolSize() manager.start(); Map> expectedKnownState = ImmutableMap.of( - host1, + host1.getHostAndPort(), host1UpdatedState, - host2, + host2.getHostAndPort(), host2UpdatedState ); diff --git a/server/src/test/java/io/druid/server/metrics/MetricsModuleTest.java b/server/src/test/java/io/druid/server/metrics/MetricsModuleTest.java index d743c0525722..643b48428d0d 100644 --- a/server/src/test/java/io/druid/server/metrics/MetricsModuleTest.java +++ b/server/src/test/java/io/druid/server/metrics/MetricsModuleTest.java @@ -30,6 +30,7 @@ import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import org.junit.Assert; import org.junit.Test; @@ -47,7 +48,7 @@ public void testSimpleInjection() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); } }) @@ -72,7 +73,7 @@ public void testSimpleInjectionWithValues() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", null, null, null, new ServerConfig()) ); binder.bind(Key.get( String.class, diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index e4e748c44d5a..0745c4d02a0d 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -90,6 +90,7 @@ public void configure(Binder binder) TieredBrokerConfig.DEFAULT_BROKER_SERVICE_NAME ); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8082); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8282); binder.bind(CachingClusteredClient.class).in(LazySingleton.class); binder.bind(BrokerServerView.class).in(LazySingleton.class); diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index 6ef0feebbe50..87fba0a3d145 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -127,6 +127,7 @@ public void configure(Binder binder) .annotatedWith(Names.named("serviceName")) .to(TieredBrokerConfig.DEFAULT_COORDINATOR_SERVICE_NAME); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8081); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8281); ConfigProvider.bind(binder, DruidCoordinatorConfig.class); diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java index 9845331dbe75..f6954df689fe 100644 --- a/services/src/main/java/io/druid/cli/CliHistorical.java +++ b/services/src/main/java/io/druid/cli/CliHistorical.java @@ -82,6 +82,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/historical"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8083); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8283); // register Server before binding ZkCoordinator to ensure HTTP endpoints are available immediately LifecycleModule.register(binder, Server.class); diff --git a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java index 5182957ee67d..c31a5f15c4f3 100644 --- a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java @@ -85,6 +85,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/internal-hadoop-indexer"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); // bind metadata storage config based on HadoopIOConfig MetadataStorageUpdaterJobSpec metadataSpec = getHadoopDruidIndexerConfig().getSchema() diff --git a/services/src/main/java/io/druid/cli/CliMiddleManager.java b/services/src/main/java/io/druid/cli/CliMiddleManager.java index def4d7c821e6..764c7cb9e025 100644 --- a/services/src/main/java/io/druid/cli/CliMiddleManager.java +++ b/services/src/main/java/io/druid/cli/CliMiddleManager.java @@ -78,6 +78,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/middlemanager"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8091); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8291); IndexingServiceModuleHelper.configureTaskRunnerConfigs(binder); @@ -104,7 +105,8 @@ public void configure(Binder binder) public Worker getWorker(@Self DruidNode node, WorkerConfig config) { return new Worker( - node.getHostAndPort(), + node.getServiceScheme(), + node.getHostAndPortToUse(), config.getIp(), config.getCapacity(), config.getVersion() diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index 82ef507f6f80..099fdf71e4ca 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -131,6 +131,7 @@ public void configure(Binder binder) .annotatedWith(Names.named("serviceName")) .to(IndexingServiceSelectorConfig.DEFAULT_SERVICE_NAME); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8090); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8290); } JsonConfigProvider.bind(binder, "druid.coordinator.asOverlord", CoordinatorOverlordServiceConfig.class); diff --git a/services/src/main/java/io/druid/cli/CliPeon.java b/services/src/main/java/io/druid/cli/CliPeon.java index f0c455b81c04..aa845225f3c8 100644 --- a/services/src/main/java/io/druid/cli/CliPeon.java +++ b/services/src/main/java/io/druid/cli/CliPeon.java @@ -141,6 +141,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/peon"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(-1); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); PolyBind.createChoice( binder, diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java index 2c9236511c48..605825721104 100644 --- a/services/src/main/java/io/druid/cli/CliRealtime.java +++ b/services/src/main/java/io/druid/cli/CliRealtime.java @@ -69,6 +69,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8284); } }, new ChatHandlerServerModule(properties), diff --git a/services/src/main/java/io/druid/cli/CliRealtimeExample.java b/services/src/main/java/io/druid/cli/CliRealtimeExample.java index 5075b65b1a72..efadc9a8ad5d 100644 --- a/services/src/main/java/io/druid/cli/CliRealtimeExample.java +++ b/services/src/main/java/io/druid/cli/CliRealtimeExample.java @@ -82,6 +82,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8284); binder.bind(DataSegmentPusher.class).to(NoopDataSegmentPusher.class).in(LazySingleton.class); binder.bind(DataSegmentAnnouncer.class).to(NoopDataSegmentAnnouncer.class).in(LazySingleton.class); diff --git a/services/src/main/java/io/druid/cli/CliRouter.java b/services/src/main/java/io/druid/cli/CliRouter.java index a926252ff044..fc2e96feff93 100644 --- a/services/src/main/java/io/druid/cli/CliRouter.java +++ b/services/src/main/java/io/druid/cli/CliRouter.java @@ -84,6 +84,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/router"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8888); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(9088); JsonConfigProvider.bind(binder, "druid.router", TieredBrokerConfig.class); diff --git a/services/src/main/java/io/druid/cli/CoordinatorOverlordRedirectInfo.java b/services/src/main/java/io/druid/cli/CoordinatorOverlordRedirectInfo.java index 755dce45f396..0cd587040c32 100644 --- a/services/src/main/java/io/druid/cli/CoordinatorOverlordRedirectInfo.java +++ b/services/src/main/java/io/druid/cli/CoordinatorOverlordRedirectInfo.java @@ -51,11 +51,11 @@ public boolean doLocal(String requestURI) } @Override - public URL getRedirectURL(String queryString, String requestURI) + public URL getRedirectURL(String scheme, String queryString, String requestURI) { return isOverlordRequest(requestURI) ? - overlordRedirectInfo.getRedirectURL(queryString, requestURI) : - coordinatorRedirectInfo.getRedirectURL(queryString, requestURI); + overlordRedirectInfo.getRedirectURL(scheme, queryString, requestURI) : + coordinatorRedirectInfo.getRedirectURL(scheme, queryString, requestURI); } private boolean isOverlordRequest(String requestURI) diff --git a/services/src/main/java/io/druid/cli/CreateTables.java b/services/src/main/java/io/druid/cli/CreateTables.java index abb5a7638acc..8e99b19df883 100644 --- a/services/src/main/java/io/druid/cli/CreateTables.java +++ b/services/src/main/java/io/druid/cli/CreateTables.java @@ -36,6 +36,7 @@ import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import java.util.List; @@ -105,7 +106,7 @@ public String getPassword() binder, Key.get(MetadataStorageTablesConfig.class), MetadataStorageTablesConfig.fromBase(base) ); JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, new ServerConfig()) ); } } diff --git a/services/src/main/java/io/druid/cli/DumpSegment.java b/services/src/main/java/io/druid/cli/DumpSegment.java index cb73808c9e31..f2d6935e2c85 100644 --- a/services/src/main/java/io/druid/cli/DumpSegment.java +++ b/services/src/main/java/io/druid/cli/DumpSegment.java @@ -439,6 +439,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(DruidProcessingConfig.class).toInstance( new DruidProcessingConfig() { diff --git a/services/src/main/java/io/druid/cli/InsertSegment.java b/services/src/main/java/io/druid/cli/InsertSegment.java index ad585af418ae..cac1dc6a7130 100644 --- a/services/src/main/java/io/druid/cli/InsertSegment.java +++ b/services/src/main/java/io/druid/cli/InsertSegment.java @@ -40,6 +40,7 @@ import io.druid.segment.loading.DataSegmentFinder; import io.druid.segment.loading.SegmentLoadingException; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.timeline.DataSegment; import java.io.IOException; @@ -86,7 +87,7 @@ protected List getModules() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, new ServerConfig()) ); } } diff --git a/services/src/main/java/io/druid/cli/ResetCluster.java b/services/src/main/java/io/druid/cli/ResetCluster.java index 76c14f59c61a..b11e66f97b58 100644 --- a/services/src/main/java/io/druid/cli/ResetCluster.java +++ b/services/src/main/java/io/druid/cli/ResetCluster.java @@ -39,6 +39,7 @@ import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.segment.loading.DataSegmentKiller; import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.tasklogs.TaskLogKiller; import java.util.List; @@ -91,7 +92,7 @@ protected List getModules() public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1, null, new ServerConfig()) ); JsonConfigProvider.bind(binder, "druid.indexer.task", TaskConfig.class); } diff --git a/services/src/main/java/io/druid/cli/ValidateSegments.java b/services/src/main/java/io/druid/cli/ValidateSegments.java index b021c201692f..760d09148dee 100644 --- a/services/src/main/java/io/druid/cli/ValidateSegments.java +++ b/services/src/main/java/io/druid/cli/ValidateSegments.java @@ -88,6 +88,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(DruidProcessingConfig.class).toInstance( new DruidProcessingConfig() { diff --git a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java index e4b0c9d7af58..5a4efb721a66 100644 --- a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java +++ b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java @@ -109,6 +109,7 @@ public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/validator"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); } } ); diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java index 117fcdb8985c..e6779458b57c 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java @@ -45,7 +45,7 @@ public DruidAvaticaHandler( ) throws InstantiationException, IllegalAccessException, InvocationTargetException { super(new LocalService(druidMeta), avaticaMonitor); - setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPort())); + setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } @Override diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java index e3103a5416c6..3362aa60bfdd 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -130,7 +130,7 @@ public void setUp() throws Exception ); final DruidAvaticaHandler handler = new DruidAvaticaHandler( druidMeta, - new DruidNode("dummy", "dummy", 1), + new DruidNode("dummy", "dummy", 1, null, new ServerConfig()), new AvaticaMonitor() ); final int port = new Random().nextInt(9999) + 10000; @@ -575,7 +575,7 @@ public Frame fetch( final DruidAvaticaHandler handler = new DruidAvaticaHandler( smallFrameDruidMeta, - new DruidNode("dummy", "dummy", 1), + new DruidNode("dummy", "dummy", 1, null, new ServerConfig()), new AvaticaMonitor() ); final int port = new Random().nextInt(9999) + 20000; diff --git a/sql/src/test/java/io/druid/sql/calcite/util/TestServerInventoryView.java b/sql/src/test/java/io/druid/sql/calcite/util/TestServerInventoryView.java index aa34d2411de0..baccc0dd1254 100644 --- a/sql/src/test/java/io/druid/sql/calcite/util/TestServerInventoryView.java +++ b/sql/src/test/java/io/druid/sql/calcite/util/TestServerInventoryView.java @@ -52,7 +52,7 @@ public TimelineLookup getTimeline(DataSource dataSource) @Override public void registerSegmentCallback(Executor exec, final SegmentCallback callback) { - final DruidServerMetadata dummyServer = new DruidServerMetadata("dummy", "dummy", 0, ServerType.HISTORICAL, "dummy", 0); + final DruidServerMetadata dummyServer = new DruidServerMetadata("dummy", "dummy", null, 0, ServerType.HISTORICAL, "dummy", 0); for (final DataSegment segment : segments) { exec.execute(