diff --git a/NOTICE b/NOTICE index 0436fa3885e3..752e2d646438 100644 --- a/NOTICE +++ b/NOTICE @@ -26,7 +26,7 @@ Copyright 2014-2019 The Apache Software Foundation -================= Apache Hadoop 2.8.3 ================= +================= Apache Hadoop 2.8.5 ================= Apache Hadoop Copyright 2009-2017 The Apache Software Foundation diff --git a/core/src/main/java/org/apache/druid/timeline/SegmentWithOvershadowedStatus.java b/core/src/main/java/org/apache/druid/timeline/SegmentWithOvershadowedStatus.java index 3f2972fd07e9..8904de7a381a 100644 --- a/core/src/main/java/org/apache/druid/timeline/SegmentWithOvershadowedStatus.java +++ b/core/src/main/java/org/apache/druid/timeline/SegmentWithOvershadowedStatus.java @@ -34,7 +34,7 @@ public class SegmentWithOvershadowedStatus implements Comparable LOAD_SPEC = ImmutableMap.of("something", "or_other"); + private static final boolean OVERSHADOWED = true; private static final int TEST_VERSION = 0x9; + private static final SegmentWithOvershadowedStatus SEGMENT = createSegmentWithOvershadowedStatus(); - @Before - public void setUp() + private static ObjectMapper createObjectMapper() { + ObjectMapper objectMapper = new TestObjectMapper(); InjectableValues.Std injectableValues = new InjectableValues.Std(); injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT); - MAPPER.setInjectableValues(injectableValues); + objectMapper.setInjectableValues(injectableValues); + return objectMapper; } - @Test - public void testUnwrappedSegmentWithOvershadowedStatusDeserialization() throws Exception + private static SegmentWithOvershadowedStatus createSegmentWithOvershadowedStatus() { - final Interval interval = Intervals.of("2011-10-01/2011-10-02"); - final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); - - final DataSegment dataSegment = new DataSegment( + DataSegment dataSegment = new DataSegment( "something", - interval, + INTERVAL, "1", - loadSpec, + LOAD_SPEC, Arrays.asList("dim1", "dim2"), Arrays.asList("met1", "met2"), NoneShardSpec.instance(), @@ -72,42 +72,58 @@ public void testUnwrappedSegmentWithOvershadowedStatusDeserialization() throws E 1 ); - final SegmentWithOvershadowedStatus segment = new SegmentWithOvershadowedStatus(dataSegment, false); + return new SegmentWithOvershadowedStatus(dataSegment, OVERSHADOWED); + } + @Test + public void testUnwrappedSegmentWithOvershadowedStatusDeserialization() throws Exception + { final Map objectMap = MAPPER.readValue( - MAPPER.writeValueAsString(segment), + MAPPER.writeValueAsString(SEGMENT), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); Assert.assertEquals(11, objectMap.size()); Assert.assertEquals("something", objectMap.get("dataSource")); - Assert.assertEquals(interval.toString(), objectMap.get("interval")); + Assert.assertEquals(INTERVAL.toString(), objectMap.get("interval")); Assert.assertEquals("1", objectMap.get("version")); - Assert.assertEquals(loadSpec, objectMap.get("loadSpec")); + Assert.assertEquals(LOAD_SPEC, objectMap.get("loadSpec")); Assert.assertEquals("dim1,dim2", objectMap.get("dimensions")); Assert.assertEquals("met1,met2", objectMap.get("metrics")); Assert.assertEquals(ImmutableMap.of("type", "none"), objectMap.get("shardSpec")); Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion")); Assert.assertEquals(1, objectMap.get("size")); - Assert.assertEquals(false, objectMap.get("overshadowed")); + Assert.assertEquals(OVERSHADOWED, objectMap.get("overshadowed")); + + final String json = MAPPER.writeValueAsString(SEGMENT); - final String json = MAPPER.writeValueAsString(segment); final TestSegmentWithOvershadowedStatus deserializedSegment = MAPPER.readValue( json, TestSegmentWithOvershadowedStatus.class ); - Assert.assertEquals(segment.getDataSegment().getDataSource(), deserializedSegment.getDataSource()); - Assert.assertEquals(segment.getDataSegment().getInterval(), deserializedSegment.getInterval()); - Assert.assertEquals(segment.getDataSegment().getVersion(), deserializedSegment.getVersion()); - Assert.assertEquals(segment.getDataSegment().getLoadSpec(), deserializedSegment.getLoadSpec()); - Assert.assertEquals(segment.getDataSegment().getDimensions(), deserializedSegment.getDimensions()); - Assert.assertEquals(segment.getDataSegment().getMetrics(), deserializedSegment.getMetrics()); - Assert.assertEquals(segment.getDataSegment().getShardSpec(), deserializedSegment.getShardSpec()); - Assert.assertEquals(segment.getDataSegment().getSize(), deserializedSegment.getSize()); - Assert.assertEquals(segment.getDataSegment().getId(), deserializedSegment.getId()); + DataSegment dataSegment = SEGMENT.getDataSegment(); + Assert.assertEquals(dataSegment.getDataSource(), deserializedSegment.getDataSource()); + Assert.assertEquals(dataSegment.getInterval(), deserializedSegment.getInterval()); + Assert.assertEquals(dataSegment.getVersion(), deserializedSegment.getVersion()); + Assert.assertEquals(dataSegment.getLoadSpec(), deserializedSegment.getLoadSpec()); + Assert.assertEquals(dataSegment.getDimensions(), deserializedSegment.getDimensions()); + Assert.assertEquals(dataSegment.getMetrics(), deserializedSegment.getMetrics()); + Assert.assertEquals(dataSegment.getShardSpec(), deserializedSegment.getShardSpec()); + Assert.assertEquals(dataSegment.getSize(), deserializedSegment.getSize()); + Assert.assertEquals(dataSegment.getId(), deserializedSegment.getId()); + } + // Previously, the implementation of SegmentWithOvershadowedStatus had @JsonCreator/@JsonProperty and @JsonUnwrapped + // on the same field (dataSegment), which used to work in Jackson 2.6, but does not work with Jackson 2.9: + // https://github.com/FasterXML/jackson-databind/issues/265#issuecomment-264344051 + @Test + public void testJsonCreatorAndJsonUnwrappedAnnotationsAreCompatible() throws Exception + { + String json = MAPPER.writeValueAsString(SEGMENT); + SegmentWithOvershadowedStatus segment = MAPPER.readValue(json, SegmentWithOvershadowedStatus.class); + Assert.assertEquals(SEGMENT, segment); } } diff --git a/distribution/bin/check-licenses.py b/distribution/bin/check-licenses.py index 45538782e1d5..7f7474af4214 100755 --- a/distribution/bin/check-licenses.py +++ b/distribution/bin/check-licenses.py @@ -408,4 +408,4 @@ def check_licenses(license_yaml, dependency_reports_root): check_licenses(license_yaml, dependency_reports_root) except KeyboardInterrupt: - print('Interrupted, closing.') \ No newline at end of file + print('Interrupted, closing.') diff --git a/docs/configuration/index.md b/docs/configuration/index.md index 3d0e7f6a90ae..b0c5be85c1fb 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -1135,7 +1135,7 @@ Additional peon configs include: |`druid.peon.mode`|Choices are "local" and "remote". Setting this to local means you intend to run the peon as a standalone process (Not recommended).|remote| |`druid.indexer.task.baseDir`|Base temporary working directory.|`System.getProperty("java.io.tmpdir")`| |`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|`${druid.indexer.task.baseDir}/persistent/tasks`| -|`druid.indexer.task.defaultHadoopCoordinates`|Hadoop version to use with HadoopIndexTasks that do not request a particular version.|org.apache.hadoop:hadoop-client:2.8.3| +|`druid.indexer.task.defaultHadoopCoordinates`|Hadoop version to use with HadoopIndexTasks that do not request a particular version.|org.apache.hadoop:hadoop-client:2.8.5| |`druid.indexer.task.defaultRowFlushBoundary`|Highest row count before persisting to disk. Used for indexing generating tasks.|75000| |`druid.indexer.task.directoryLockTimeout`|Wait this long for zombie peons to exit before giving up on their replacements.|PT10M| |`druid.indexer.task.gracefulShutdownTimeout`|Wait this long on middleManager restart for restorable tasks to gracefully exit.|PT5M| @@ -1157,7 +1157,7 @@ When new segments are created, Druid temporarily stores some preprocessed data i *medium* exist for those buffers: *temporary files* and *off-heap memory*. *Temporary files* (`tmpFile`) are stored under the task working directory (see `druid.indexer.task.baseTaskDir` -configuration above) and thus share it's mounting properties, e. g. they could be backed by HDD, SSD or memory (tmpfs). +configuration above) and thus share it's mounting properties, e.g., they could be backed by HDD, SSD or memory (tmpfs). This type of medium may do unnecessary disk I/O and requires some disk space to be available. *Off-heap memory medium* (`offHeapMemory`) creates buffers in off-heap memory of a JVM process that is running a task. @@ -1196,7 +1196,7 @@ then the value from the configuration below is used: |`druid.worker.numConcurrentMerges`|Maximum number of segment persist or merge operations that can run concurrently across all tasks.|`druid.worker.capacity` / 2, rounded down| |`druid.indexer.task.baseDir`|Base temporary working directory.|`System.getProperty("java.io.tmpdir")`| |`druid.indexer.task.baseTaskDir`|Base temporary working directory for tasks.|`${druid.indexer.task.baseDir}/persistent/tasks`| -|`druid.indexer.task.defaultHadoopCoordinates`|Hadoop version to use with HadoopIndexTasks that do not request a particular version.|org.apache.hadoop:hadoop-client:2.8.3| +|`druid.indexer.task.defaultHadoopCoordinates`|Hadoop version to use with HadoopIndexTasks that do not request a particular version.|org.apache.hadoop:hadoop-client:2.8.5| |`druid.indexer.task.gracefulShutdownTimeout`|Wait this long on Indexer restart for restorable tasks to gracefully exit.|PT5M| |`druid.indexer.task.hadoopWorkingPath`|Temporary working directory for Hadoop tasks.|`/tmp/druid-indexing`| |`druid.indexer.task.restoreTasksOnRestart`|If true, the Indexer will attempt to stop tasks gracefully on shutdown and restore them on restart.|false| diff --git a/docs/operations/other-hadoop.md b/docs/operations/other-hadoop.md index 303a0ac9f473..f76c0eb98926 100644 --- a/docs/operations/other-hadoop.md +++ b/docs/operations/other-hadoop.md @@ -89,7 +89,7 @@ classloader. 2. Batch ingestion uses jars from `hadoop-dependencies/` to submit Map/Reduce jobs (location customizable via the `druid.extensions.hadoopDependenciesDir` runtime property; see [Configuration](../configuration/index.html#extensions)). -`hadoop-client:2.8.3` is the default version of the Hadoop client bundled with Druid for both purposes. This works with +`hadoop-client:2.8.5` is the default version of the Hadoop client bundled with Druid for both purposes. This works with many Hadoop distributions (the version does not necessarily need to match), but if you run into issues, you can instead have Druid load libraries that exactly match your distribution. To do this, either copy the jars from your Hadoop cluster, or use the `pull-deps` tool to download the jars from a Maven repository. diff --git a/docs/tutorials/tutorial-batch-hadoop.md b/docs/tutorials/tutorial-batch-hadoop.md index 492c55b9103b..c7d5f0329be3 100644 --- a/docs/tutorials/tutorial-batch-hadoop.md +++ b/docs/tutorials/tutorial-batch-hadoop.md @@ -38,18 +38,18 @@ Once the Docker install is complete, please proceed to the next steps in the tut ## Build the Hadoop docker image -For this tutorial, we've provided a Dockerfile for a Hadoop 2.8.3 cluster, which we'll use to run the batch indexing task. +For this tutorial, we've provided a Dockerfile for a Hadoop 2.8.5 cluster, which we'll use to run the batch indexing task. This Dockerfile and related files are located at `quickstart/tutorial/hadoop/docker`. -From the apache-druid-{{DRUIDVERSION}} package root, run the following commands to build a Docker image named "druid-hadoop-demo" with version tag "2.8.3": +From the apache-druid-{{DRUIDVERSION}} package root, run the following commands to build a Docker image named "druid-hadoop-demo" with version tag "2.8.5": ```bash cd quickstart/tutorial/hadoop/docker -docker build -t druid-hadoop-demo:2.8.3 . +docker build -t druid-hadoop-demo:2.8.5 . ``` -This will start building the Hadoop image. Once the image build is done, you should see the message `Successfully tagged druid-hadoop-demo:2.8.3` printed to the console. +This will start building the Hadoop image. Once the image build is done, you should see the message `Successfully tagged druid-hadoop-demo:2.8.5` printed to the console. ## Setup the Hadoop docker cluster @@ -77,7 +77,7 @@ On the host machine, add the following entry to `/etc/hosts`: Once the `/tmp/shared` folder has been created and the `etc/hosts` entry has been added, run the following command to start the Hadoop container. ```bash -docker run -it -h druid-hadoop-demo --name druid-hadoop-demo -p 2049:2049 -p 2122:2122 -p 8020:8020 -p 8021:8021 -p 8030:8030 -p 8031:8031 -p 8032:8032 -p 8033:8033 -p 8040:8040 -p 8042:8042 -p 8088:8088 -p 8443:8443 -p 9000:9000 -p 10020:10020 -p 19888:19888 -p 34455:34455 -p 49707:49707 -p 50010:50010 -p 50020:50020 -p 50030:50030 -p 50060:50060 -p 50070:50070 -p 50075:50075 -p 50090:50090 -p 51111:51111 -v /tmp/shared:/shared druid-hadoop-demo:2.8.3 /etc/bootstrap.sh -bash +docker run -it -h druid-hadoop-demo --name druid-hadoop-demo -p 2049:2049 -p 2122:2122 -p 8020:8020 -p 8021:8021 -p 8030:8030 -p 8031:8031 -p 8032:8032 -p 8033:8033 -p 8040:8040 -p 8042:8042 -p 8088:8088 -p 8443:8443 -p 9000:9000 -p 10020:10020 -p 19888:19888 -p 34455:34455 -p 49707:49707 -p 50010:50010 -p 50020:50020 -p 50030:50030 -p 50060:50060 -p 50070:50070 -p 50075:50075 -p 50090:50090 -p 51111:51111 -v /tmp/shared:/shared druid-hadoop-demo:2.8.5 /etc/bootstrap.sh -bash ``` Once the container is started, your terminal will attach to a bash shell running inside the container: diff --git a/examples/quickstart/tutorial/hadoop/docker/Dockerfile b/examples/quickstart/tutorial/hadoop/docker/Dockerfile index b02496fbb999..b43454ca6993 100644 --- a/examples/quickstart/tutorial/hadoop/docker/Dockerfile +++ b/examples/quickstart/tutorial/hadoop/docker/Dockerfile @@ -14,11 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Creates pseudo distributed hadoop 2.8.3 with java 8 +# Creates pseudo distributed hadoop 2.8.5 with java 8 # # Modified from the SequenceIQ Dockerfiles at https://github.com/sequenceiq/hadoop-docker # -# docker build -t druid-hadoop-demo:2.8.3 . +# docker build -t druid-hadoop-demo:2.8.5 . FROM sequenceiq/pam:centos-6.5 MAINTAINER SequenceIQ @@ -31,8 +31,10 @@ RUN yum clean all \ && yum install -y curl which tar sudo openssh-server openssh-clients rsync yum-plugin-ovl\ && yum clean all \ && yum update -y libselinux \ + && yum update -y nss \ && yum clean all # update libselinux. see https://github.com/sequenceiq/hadoop-docker/issues/14 +# update nss. see https://unix.stackexchange.com/questions/280548/curl-doesnt-connect-to-https-while-wget-does-nss-error-12286 # passwordless ssh RUN ssh-keygen -q -N "" -t dsa -f /etc/ssh/ssh_host_dsa_key @@ -49,8 +51,8 @@ ENV JAVA_HOME /usr/lib/jvm/zulu-8 ENV PATH $PATH:$JAVA_HOME/bin # hadoop -RUN curl -s https://archive.apache.org/dist/hadoop/core/hadoop-2.8.3/hadoop-2.8.3.tar.gz | tar -xz -C /usr/local/ -RUN cd /usr/local && ln -s ./hadoop-2.8.3 hadoop +RUN curl -s https://archive.apache.org/dist/hadoop/core/hadoop-2.8.5/hadoop-2.8.5.tar.gz | tar -xz -C /usr/local/ +RUN cd /usr/local && ln -s ./hadoop-2.8.5 hadoop ENV HADOOP_PREFIX /usr/local/hadoop ENV HADOOP_COMMON_HOME /usr/local/hadoop diff --git a/examples/quickstart/tutorial/wikipedia-index-hadoop.json b/examples/quickstart/tutorial/wikipedia-index-hadoop.json index 1ad80747c04d..c727fcfcc38b 100644 --- a/examples/quickstart/tutorial/wikipedia-index-hadoop.json +++ b/examples/quickstart/tutorial/wikipedia-index-hadoop.json @@ -75,5 +75,5 @@ } } }, - "hadoopDependencyCoordinates": ["org.apache.hadoop:hadoop-client:2.8.3"] + "hadoopDependencyCoordinates": ["org.apache.hadoop:hadoop-client:2.8.5"] } diff --git a/extensions-core/postgresql-metadata-storage/pom.xml b/extensions-core/postgresql-metadata-storage/pom.xml index 2ddd17e560f9..43c6a4088465 100644 --- a/extensions-core/postgresql-metadata-storage/pom.xml +++ b/extensions-core/postgresql-metadata-storage/pom.xml @@ -50,7 +50,7 @@ org.postgresql postgresql - 9.4.1208.jre7 + 42.2.8.jre7 org.jdbi diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java index 5ee8d5e11de8..a7e3f498fd6d 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java @@ -77,6 +77,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; +import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; @@ -117,7 +118,7 @@ public static List getPublishedSegments(HadoopDruidIndexerConfig co FileSystem fs = descriptorInfoDir.getFileSystem(conf); for (FileStatus status : fs.listStatus(descriptorInfoDir)) { - final DataSegment segment = jsonMapper.readValue(fs.open(status.getPath()), DataSegment.class); + final DataSegment segment = jsonMapper.readValue((InputStream) fs.open(status.getPath()), DataSegment.class); publishedSegmentsBuilder.add(segment); log.info("Adding segment %s to the list of published segments", segment.getId()); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/config/TaskConfig.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/config/TaskConfig.java index 52bf08357976..7c22dad5b62b 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/config/TaskConfig.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/config/TaskConfig.java @@ -34,7 +34,7 @@ public class TaskConfig { public static final List DEFAULT_DEFAULT_HADOOP_COORDINATES = ImmutableList.of( - "org.apache.hadoop:hadoop-client:2.8.3" + "org.apache.hadoop:hadoop-client:2.8.5" ); private static final Period DEFAULT_DIRECTORY_LOCK_TIMEOUT = new Period("PT10M"); diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json index 4437e725e28b..a169cfe87361 100644 --- a/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json @@ -13,6 +13,6 @@ "is_available": 1, "is_realtime": 0, "is_overshadowed": 0, - "payload": "{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space\":\"\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",\"overshadowed\":false}" + "payload": "{\"overshadowed\":false,\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space\":\"\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}" } ] diff --git a/licenses.yaml b/licenses.yaml index 026b626015de..a7e5f5524b9a 100644 --- a/licenses.yaml +++ b/licenses.yaml @@ -179,7 +179,7 @@ name: ClassMate license_category: binary module: java-core license_name: Apache License version 2.0 -version: 1.0.0 +version: 1.1.0 libraries: - com.fasterxml: classmate notices: @@ -196,7 +196,7 @@ name: Jackson license_category: binary module: java-core license_name: Apache License version 2.0 -version: 2.6.7 +version: 2.9.10 libraries: - com.fasterxml.jackson.core: jackson-annotations - com.fasterxml.jackson.core: jackson-core @@ -237,7 +237,7 @@ name: Caffeine license_category: binary module: java-core license_name: Apache License version 2.0 -version: 2.5.5 +version: 2.8.0 libraries: - com.github.ben-manes.caffeine: caffeine @@ -389,7 +389,7 @@ name: Apache Commons Codec license_category: binary module: java-core license_name: Apache License version 2.0 -version: 1.12 +version: 1.13 libraries: - commons-codec: commons-codec notices: @@ -504,7 +504,7 @@ name: Apache Commons Compress license_category: binary module: java-core license_name: Apache License version 2.0 -version: 1.18 +version: 1.19 libraries: - org.apache.commons: commons-compress notices: @@ -758,7 +758,7 @@ name: Netty license_category: binary module: java-core license_name: Apache License version 2.0 -version: 4.1.30.Final +version: 4.1.42.Final libraries: - io.netty: netty-buffer - io.netty: netty-codec @@ -1135,7 +1135,7 @@ name: Apache Calcite Avatica license_category: binary module: java-core license_name: Apache License version 2.0 -version: 1.12.0 +version: 1.15.0 libraries: - org.apache.calcite.avatica: avatica-core - org.apache.calcite.avatica: avatica-metrics @@ -1289,7 +1289,7 @@ name: Apache HttpClient license_category: binary module: hadoop-client license_name: Apache License version 2.0 -version: 4.5.2 +version: 4.5.6 libraries: - org.apache.httpcomponents: httpclient notices: @@ -1317,7 +1317,7 @@ name: Apache Log4j license_category: binary module: java-core license_name: Apache License version 2.0 -version: 2.5 +version: 2.8.2 libraries: - org.apache.logging.log4j: log4j-1.2-api - org.apache.logging.log4j: log4j-api @@ -1479,7 +1479,7 @@ name: Plexus Common Utilities license_category: binary module: java-core license_name: Apache License version 2.0 -version: 3.0.15 +version: 3.0.24 libraries: - org.codehaus.plexus: plexus-utils notices: @@ -1501,7 +1501,7 @@ name: Jetty license_category: binary module: java-core license_name: Apache License version 2.0 -version: 9.4.10.v20180503 +version: 9.4.12.v20180830 libraries: - org.eclipse.jetty: jetty-client - org.eclipse.jetty: jetty-continuation @@ -1649,7 +1649,7 @@ name: Hibernate Validator Engine license_category: binary module: java-core license_name: Apache License version 2.0 -version: 5.1.3.Final +version: 5.2.5.Final libraries: - org.hibernate: hibernate-validator @@ -1788,7 +1788,7 @@ name: JBoss Logging 3 license_category: binary module: java-core license_name: Apache License version 2.0 -version: 3.1.3.GA +version: 3.2.1.Final libraries: - org.jboss.logging: jboss-logging @@ -1879,7 +1879,7 @@ name: Apache Hadoop license_category: binary module: hadoop-client license_name: Apache License version 2.0 -version: 2.8.3 +version: 2.8.5 libraries: - org.apache.hadoop: hadoop-annotations - org.apache.hadoop: hadoop-auth @@ -2354,7 +2354,7 @@ name: Nimbus JOSE+JWT license_category: binary module: hadoop-client license_name: Apache License version 2.0 -version: 3.9 +version: 4.41.1 libraries: - com.nimbusds: nimbus-jose-jwt @@ -2581,125 +2581,6 @@ notices: file. --- -name: Netty -license_category: binary -module: hadoop-client -license_name: Apache License version 2.0 -version: 3.6.2.Final -libraries: - - io.netty: netty -notices: - - netty: | - == - The Netty Project - ================= - - Please visit the Netty web site for more information: - - * http://netty.io/ - - Copyright 2011 The Netty Project - - The Netty Project licenses this file to you under the Apache License, - version 2.0 (the "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at: - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - Also, please refer to each LICENSE..txt file, which is located in - the 'license' directory of the distribution file, for the license terms of the - components that this product depends on. - - ------------------------------------------------------------------------------- - This product contains the extensions to Java Collections Framework which has - been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: - - * LICENSE: - * license/LICENSE.jsr166y.txt (Public Domain) - * HOMEPAGE: - * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ - * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ - - This product contains a modified version of Robert Harder's Public Domain - Base64 Encoder and Decoder, which can be obtained at: - - * LICENSE: - * license/LICENSE.base64.txt (Public Domain) - * HOMEPAGE: - * http://iharder.sourceforge.net/current/java/base64/ - - This product contains a modified version of 'JZlib', a re-implementation of - zlib in pure Java, which can be obtained at: - - * LICENSE: - * license/LICENSE.jzlib.txt (BSD Style License) - * HOMEPAGE: - * http://www.jcraft.com/jzlib/ - - This product optionally depends on 'Protocol Buffers', Google's data - interchange format, which can be obtained at: - - * LICENSE: - * license/LICENSE.protobuf.txt (New BSD License) - * HOMEPAGE: - * http://code.google.com/p/protobuf/ - - This product optionally depends on 'SLF4J', a simple logging facade for Java, - which can be obtained at: - - * LICENSE: - * license/LICENSE.slf4j.txt (MIT License) - * HOMEPAGE: - * http://www.slf4j.org/ - - This product optionally depends on 'Apache Commons Logging', a logging - framework, which can be obtained at: - - * LICENSE: - * license/LICENSE.commons-logging.txt (Apache License 2.0) - * HOMEPAGE: - * http://commons.apache.org/logging/ - - This product optionally depends on 'Apache Log4J', a logging framework, - which can be obtained at: - - * LICENSE: - * license/LICENSE.log4j.txt (Apache License 2.0) - * HOMEPAGE: - * http://logging.apache.org/log4j/ - - This product optionally depends on 'JBoss Logging', a logging framework, - which can be obtained at: - - * LICENSE: - * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) - * HOMEPAGE: - * http://anonsvn.jboss.org/repos/common/common-logging-spi/ - - This product optionally depends on 'Apache Felix', an open source OSGi - framework implementation, which can be obtained at: - - * LICENSE: - * license/LICENSE.felix.txt (Apache License 2.0) - * HOMEPAGE: - * http://felix.apache.org/ - - This product optionally depends on 'Webbit', a Java event based - WebSocket and HTTP server: - - * LICENSE: - * license/LICENSE.webbit.txt (BSD License) - * HOMEPAGE: - * https://github.com/joewalnes/webbit - ---- - name: Apache Log4j license_category: binary module: hadoop-client @@ -2916,7 +2797,7 @@ name: Apache Avro license_category: binary module: extensions/druid-avro-extensions license_name: Apache License version 2.0 -version: 1.9.0 +version: 1.9.1 libraries: - org.apache.avro: avro - org.apache.avro: avro-mapred @@ -3456,7 +3337,7 @@ name: ICU4J license_category: binary module: java-core license_name: ICU License -version: 54.1.1 +version: 55.1 copyright: International Business Machines Corporation and others license_file_path: licenses/bin/icu4j.ICU libraries: @@ -3746,15 +3627,13 @@ libraries: --- -name: \"Java Concurrency In Practice\" Book Annotations +name: JCIP Annotations Under Apache License license_category: binary -module: hadoop-client -license_name: Creative Commons Attribution 2.5 -version: 1.0 -copyright: Brian Goetz and Tim Peierls -license_file_path: licenses/bin/creative-commons-2.5.LICENSE +module: extensions/druid-hdfs-storage +license_name: Apache License version 2.0 +version: 1.0-1 libraries: - - net.jcip: jcip-annotations + - com.github.stephenc.jcip: jcip-annotations --- diff --git a/licenses/bin/creative-commons-2.5.LICENSE b/licenses/bin/creative-commons-2.5.LICENSE deleted file mode 100644 index f2aa747f5283..000000000000 --- a/licenses/bin/creative-commons-2.5.LICENSE +++ /dev/null @@ -1,60 +0,0 @@ - -Attribution-NonCommercial 2.5 - -CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS LICENSE DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE INFORMATION PROVIDED, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM ITS USE. -License - -THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. - -BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. - -1. Definitions - -"Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. -"Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. -"Licensor" means the individual or entity that offers the Work under the terms of this License. -"Original Author" means the individual or entity who created the Work. -"Work" means the copyrightable work of authorship offered under the terms of this License. -"You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. -2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. - -3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: - -to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; -to create and reproduce Derivative Works; -to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; -to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works; -The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved, including but not limited to the rights set forth in Sections 4(d) and 4(e). - -4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: - -You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(c), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(c), as requested. -You may not exercise any of the rights granted to You in Section 3 above in any manner that is primarily intended for or directed toward commercial advantage or private monetary compensation. The exchange of the Work for other copyrighted works by means of digital file-sharing or otherwise shall not be considered to be intended for or directed toward commercial advantage or private monetary compensation, provided there is no payment of any monetary compensation in connection with the exchange of copyrighted works. -If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. -For the avoidance of doubt, where the Work is a musical composition: - -Performance Royalties Under Blanket Licenses. Licensor reserves the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work if that performance is primarily intended for or directed toward commercial advantage or private monetary compensation. -Mechanical Rights and Statutory Royalties. Licensor reserves the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions), if Your distribution of such cover version is primarily intended for or directed toward commercial advantage or private monetary compensation. -Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor reserves the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions), if Your public digital performance is primarily intended for or directed toward commercial advantage or private monetary compensation. -5. Representations, Warranties and Disclaimer - -UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. - -6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. Termination - -This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. -Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. -8. Miscellaneous - -Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. -Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. -If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. -No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. -This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. -Creative Commons is not a party to this License, and makes no warranty whatsoever in connection with the Work. Creative Commons will not be liable to You or any party on any legal theory for any damages whatsoever, including without limitation any general, special, incidental or consequential damages arising in connection to this license. Notwithstanding the foregoing two (2) sentences, if Creative Commons has expressly identified itself as the Licensor hereunder, it shall have all rights and obligations of Licensor. - -Except for the limited purpose of indicating to the public that the Work is licensed under the CCPL, neither party will use the trademark "Creative Commons" or any related trademark or logo of Creative Commons without the prior written consent of Creative Commons. Any permitted use will be in compliance with Creative Commons' then-current trademark usage guidelines, as may be published on its website or otherwise made available upon request from time to time. - -Creative Commons may be contacted at https://creativecommons.org/. \ No newline at end of file diff --git a/pom.xml b/pom.xml index 4f1fb16064b9..f794d645a202 100644 --- a/pom.xml +++ b/pom.xml @@ -78,31 +78,30 @@ 0.9.0.M2 4.1.0 2.12.0 - 1.12.0 - 1.9.0 + 1.15.0 + 1.9.1 1.17.0 10.14.2.0 4.0.0 16.0.1 4.1.0 1.3 - 9.4.10.v20180503 + 9.4.12.v20180830 1.19.3 - - 2.6.7 + 2.9.10 1.9.13 - 2.5 + 2.8.2 3.10.6.Final - 4.1.30.Final + 4.1.42.Final v10.14.2 6.5.0 1.7.12 - 2.8.3 + 2.8.5 2.0.2 1.11.199 - 2.5.5 + 2.8.0 3.4.14 2.5.7 @@ -201,7 +200,7 @@ commons-codec commons-codec - 1.12 + 1.13 commons-io @@ -322,9 +321,14 @@ - org.apache.curator - curator-x-discovery - ${apache.curator.version} + + org.apache.curator + curator-x-discovery + ${apache.curator.version} org.apache.calcite @@ -389,7 +393,7 @@ com.ibm.icu icu4j - 54.1.1 + 55.1 org.mozilla @@ -399,7 +403,7 @@ org.apache.commons commons-compress - 1.18 + 1.19 org.tukaani @@ -454,7 +458,7 @@ org.hibernate hibernate-validator - 5.1.3.Final + 5.2.5.Final javax.validation @@ -934,7 +938,7 @@ org.codehaus.plexus plexus-utils - 3.0.15 + 3.0.24 com.github.ben-manes.caffeine diff --git a/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java b/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java index 1c3853b0f5ba..30367ec4b003 100644 --- a/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java +++ b/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java @@ -36,8 +36,6 @@ import org.junit.Before; import org.junit.Test; -import java.lang.reflect.Field; -import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.Map; @@ -48,7 +46,6 @@ public class CaffeineCacheTest { - private static final int RANDOM_SEED = 3478178; private static final byte[] HI = StringUtils.toUtf8("hiiiiiiiiiiiiiiiiiii"); private static final byte[] HO = StringUtils.toUtf8("hooooooooooooooooooo"); @@ -194,7 +191,6 @@ public long getSizeInBytes() final Cache.NamedKey key1 = new Cache.NamedKey("the", s1); final Cache.NamedKey key2 = new Cache.NamedKey("the", s2); final CaffeineCache cache = CaffeineCache.create(config, Runnable::run); - forceRandomSeed(cache); Assert.assertNull(cache.get(key1)); Assert.assertNull(cache.get(key2)); @@ -475,18 +471,6 @@ public void put(Cache cache, Cache.NamedKey key, Integer value) { cache.put(key, Ints.toByteArray(value)); } - - // See - public static void forceRandomSeed(CaffeineCache cache) throws Exception - { - final Map map = cache.getCache().asMap(); - final Method getFrequencySketch = map.getClass().getDeclaredMethod("frequencySketch"); - getFrequencySketch.setAccessible(true); - final Object frequencySketch = getFrequencySketch.invoke(map); - final Field seedField = frequencySketch.getClass().getDeclaredField("randomSeed"); - seedField.setAccessible(true); - seedField.setInt(frequencySketch, RANDOM_SEED); - } } class CaffeineCacheProviderWithConfig extends CaffeineCacheProvider diff --git a/server/src/test/java/org/apache/druid/guice/FirehoseModuleTest.java b/server/src/test/java/org/apache/druid/guice/FirehoseModuleTest.java index e4f034032e01..8ecc93dece2b 100644 --- a/server/src/test/java/org/apache/druid/guice/FirehoseModuleTest.java +++ b/server/src/test/java/org/apache/druid/guice/FirehoseModuleTest.java @@ -19,7 +19,6 @@ package org.apache.druid.guice; -import com.fasterxml.jackson.databind.AnnotationIntrospector; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.cfg.MapperConfig; @@ -69,8 +68,7 @@ private static Set getFirehoseFactorySubtypeClasses(ObjectMapper objectMa { Class parentClass = FirehoseFactory.class; MapperConfig config = objectMapper.getDeserializationConfig(); - AnnotationIntrospector annotationIntrospector = config.getAnnotationIntrospector(); - AnnotatedClass ac = AnnotatedClass.constructWithoutSuperTypes(parentClass, annotationIntrospector, config); + AnnotatedClass ac = AnnotatedClass.constructWithoutSuperTypes(parentClass, config); Collection subtypes = objectMapper.getSubtypeResolver().collectAndResolveSubtypesByClass(config, ac); Assert.assertNotNull(subtypes); return subtypes.stream() diff --git a/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java index 437efd313ce0..1490506f1941 100644 --- a/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java @@ -282,7 +282,7 @@ public void testSerdeWithInvalidParserMap() throws Exception expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class)); expectedException.expectCause(CoreMatchers.instanceOf(JsonMappingException.class)); expectedException.expectMessage( - "Instantiation of [simple type, class org.apache.druid.data.input.impl.StringInputRowParser] value failed: parseSpec" + "Cannot construct instance of `org.apache.druid.data.input.impl.StringInputRowParser`, problem: parseSpec" ); // Jackson creates a default type parser (StringInputRowParser) for an invalid type. diff --git a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java index b7e40bfe2175..ed56572e3d35 100644 --- a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java @@ -42,7 +42,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.Logger; +import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.OutputStreamAppender; +import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.layout.JsonLayout; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -154,11 +156,25 @@ public QueryRunner lookup(Query query, QuerySegmentWalker walker) @BeforeClass public static void setUpStatic() { + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Configuration configuration = loggerContext.getConfiguration(); appender = OutputStreamAppender .newBuilder() .setName("test stream") .setTarget(BAOS) - .setLayout(JsonLayout.createLayout(false, true, false, true, true, StandardCharsets.UTF_8)) + .setLayout(JsonLayout.createLayout( + configuration, + false, + true, + true, + false, + true, + true, + "[", + "]", + StandardCharsets.UTF_8, + true + )) .build(); final Logger logger = (Logger) LogManager.getLogger(LoggingRequestLogger.class); diff --git a/server/src/test/java/org/apache/druid/server/log/RequestLoggerProviderTest.java b/server/src/test/java/org/apache/druid/server/log/RequestLoggerProviderTest.java index 49142550a66a..0f9ece45a118 100644 --- a/server/src/test/java/org/apache/druid/server/log/RequestLoggerProviderTest.java +++ b/server/src/test/java/org/apache/druid/server/log/RequestLoggerProviderTest.java @@ -85,7 +85,7 @@ public void testLoggerPropertiesWithNoType() ); expectedException.expect(ProvisionException.class); - expectedException.expectMessage("missing property 'type'"); + expectedException.expectMessage("missing type id property 'type'"); configurator.configurate( properties, diff --git a/services/src/main/java/org/apache/druid/cli/PullDependencies.java b/services/src/main/java/org/apache/druid/cli/PullDependencies.java index 2d98aecf7517..29b5ef0d7018 100644 --- a/services/src/main/java/org/apache/druid/cli/PullDependencies.java +++ b/services/src/main/java/org/apache/druid/cli/PullDependencies.java @@ -168,7 +168,7 @@ public class PullDependencies implements Runnable @Option( name = "--no-default-hadoop", - description = "Don't pull down the default hadoop coordinate, i.e., org.apache.hadoop:hadoop-client:2.8.3. If `-h` option is supplied, then default hadoop coordinate will not be downloaded.", + description = "Don't pull down the default hadoop coordinate, i.e., org.apache.hadoop:hadoop-client:2.8.5. If `-h` option is supplied, then default hadoop coordinate will not be downloaded.", required = false) public boolean noDefaultHadoop = false;