From 7b9a6c31e8a6480f5c72ad794ece5729010e36d2 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 13 Apr 2021 09:54:35 +0300 Subject: [PATCH 01/15] Issue #10953: ARM64 - Build and test on AWS Graviton2 node Documentation: - https://blog.travis-ci.com/2020-09-11-arm-on-aws - https://aws.amazon.com/blogs/opensource/getting-started-with-travis-ci-com-on-aws-graviton2/ Trying to fix the problem described at #10953 --- .travis.yml | 11 ++++++ .../druid/java/util/metrics/MonitorsTest.java | 10 +++++ .../java/util/metrics/SigarLoadTest.java | 13 ++++++- .../util/metrics/SigarPidDiscovererTest.java | 6 +++ .../hadoop/DatasourceIngestionSpecTest.java | 8 ++++ .../ExpressionVectorSelectorsTest.java | 37 ++++++++++++------- 6 files changed, 70 insertions(+), 15 deletions(-) diff --git a/.travis.yml b/.travis.yml index a25cc34bf2af..b6af32c761c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -303,6 +303,17 @@ jobs: after_success: - (cd web-console && travis_retry npm run codecov) # retry in case of network error + - name: "Build and test on ARM64 CPU architecture" + stage: Tests - phase 2 + arch: arm64-graviton2 + dist: focal + virt: vm + group: edge + jdk: openjdk11 + env: + - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing,server,services' + script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C + - name: "web console end-to-end test" before_install: *setup_generate_license install: web-console/script/druid build diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java index 872af3dca22a..6f58fa47d454 100644 --- a/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java +++ b/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java @@ -23,12 +23,22 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.emitter.core.Event; import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; import java.util.List; public class MonitorsTest { + private static final String CPU_ARCH = System.getProperty("os.arch"); + + @Before + public void before() + { + // Do not run the tests on ARM64. Sigar library has no binaries for ARM64 + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + } @Test public void testSetFeed() diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java index 42e1003dd475..40d7dada3c82 100644 --- a/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java +++ b/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java @@ -19,12 +19,23 @@ package org.apache.druid.java.util.metrics; -import junit.framework.Assert; import org.hyperic.sigar.Sigar; +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; public class SigarLoadTest { + private static final String CPU_ARCH = System.getProperty("os.arch"); + + @Before + public void before() + { + // Do not run the tests on ARM64. Sigar library has no binaries for ARM64 + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + } + @Test public void testSigarLoad() { diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java index 26357f8dc9a7..32711ec0294d 100644 --- a/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java +++ b/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java @@ -19,13 +19,19 @@ package org.apache.druid.java.util.metrics; +import org.junit.Assume; import org.junit.Test; public class SigarPidDiscovererTest { + private static final String CPU_ARCH = System.getProperty("os.arch"); + @Test public void simpleTest() { + // Do not run the tests on ARM64. Sigar library has no binaries for ARM64 + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + // Just make sure we don't crash SigarPidDiscoverer.instance().getPid(); } diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java index 98da95fc77b0..f2f6131225d4 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java @@ -22,12 +22,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.query.filter.SelectorDimFilter; import org.apache.druid.segment.TestHelper; import org.apache.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; import java.util.List; @@ -38,6 +40,12 @@ public class DatasourceIngestionSpecTest { private static final ObjectMapper MAPPER = TestHelper.makeJsonMapper(); + @Before + public void before() + { + NullHandling.initializeForTests(); + } + @Test public void testSingleIntervalSerde() throws Exception { diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index c53676bea3ae..446932cc340b 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.virtual; import com.google.common.collect.ImmutableList; -import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.io.Closer; @@ -48,6 +47,7 @@ import org.apache.druid.timeline.partition.LinearShardSpec; import org.junit.AfterClass; import org.junit.Assert; +import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -96,25 +96,31 @@ public class ExpressionVectorSelectorsTest private static QueryableIndex INDEX; private static Closer CLOSER; + private static final String CPU_ARCH = System.getProperty("os.arch"); + @BeforeClass public static void setupClass() { CLOSER = Closer.create(); - final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); + // Do not run the tests on ARM64. + // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 + if (!"aarch64".equals(CPU_ARCH)) { + final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); - final DataSegment dataSegment = DataSegment.builder() - .dataSource("foo") - .interval(schemaInfo.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(); + final DataSegment dataSegment = DataSegment.builder() + .dataSource("foo") + .interval(schemaInfo.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(); - final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); - INDEX = CLOSER.register( - segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) - ); + final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); + INDEX = CLOSER.register( + segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) + ); + } } @AfterClass @@ -140,6 +146,9 @@ public ExpressionVectorSelectorsTest(String expression) @Before public void setup() { + // Don't run the tests on ARM64. @BeforeClass fails with OutOfMemoryError on TravisCI + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); outputType = parsed.getOutputType( new ColumnInspector() @@ -269,7 +278,7 @@ public static void sanityTestVectorizedExpressionSelectors( int rows = 0; while (!nonVectorized.isDone()) { Assert.assertEquals( - StringUtils.format("Failed at row %s", rows), + "Failed at row " + rows, nonSelector.getObject(), results.get(rows) ); From bb00f6622c4c9c389e35979e6966fa7c4ae141b2 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 Apr 2021 13:35:13 +0300 Subject: [PATCH 02/15] Issue #10953: Split the modules tested on ARM64 in two jobs ... because at the moment they took 1h which is a little bit above the TravisCI limit of 50mins per job and because @clintropolis requested to add one more module - sql --- .travis.yml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b6af32c761c2..580b180dc98e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -303,7 +303,7 @@ jobs: after_success: - (cd web-console && travis_retry npm run codecov) # retry in case of network error - - name: "Build and test on ARM64 CPU architecture" + - name: "Build and test on ARM64 CPU architecture (1)" stage: Tests - phase 2 arch: arm64-graviton2 dist: focal @@ -311,7 +311,18 @@ jobs: group: edge jdk: openjdk11 env: - - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing,server,services' + - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing' + script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C + + - name: "Build and test on ARM64 CPU architecture (2)" + stage: Tests - phase 2 + arch: arm64-graviton2 + dist: focal + virt: vm + group: edge + jdk: openjdk11 + env: + - MAVEN_PROJECTS='core,sql,server,services' script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C - name: "web console end-to-end test" From 3eea9d22e04ab33beee37b7581bf7207f3942775 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 Apr 2021 13:40:07 +0300 Subject: [PATCH 03/15] Issue #10953: Try with twice smaller ROWS_PER_SEGMENT Suggested-by: clintropolis --- .../segment/virtual/ExpressionVectorSelectorsTest.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index 446932cc340b..1340d269e968 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -91,7 +91,7 @@ public class ExpressionVectorSelectorsTest "string3" ); - private static final int ROWS_PER_SEGMENT = 100_000; + private static final int ROWS_PER_SEGMENT = 50_000; private static QueryableIndex INDEX; private static Closer CLOSER; @@ -105,7 +105,7 @@ public static void setupClass() // Do not run the tests on ARM64. // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 - if (!"aarch64".equals(CPU_ARCH)) { +// if (!"aarch64".equals(CPU_ARCH)) { final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); final DataSegment dataSegment = DataSegment.builder() @@ -120,7 +120,7 @@ public static void setupClass() INDEX = CLOSER.register( segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) ); - } +// } } @AfterClass @@ -147,7 +147,7 @@ public ExpressionVectorSelectorsTest(String expression) public void setup() { // Don't run the tests on ARM64. @BeforeClass fails with OutOfMemoryError on TravisCI - Assume.assumeFalse("aarch64".equals(CPU_ARCH)); +// Assume.assumeFalse("aarch64".equals(CPU_ARCH)); Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); outputType = parsed.getOutputType( From 3031571544fe79fd49af0cba0322fd4b93ff6a75 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 Apr 2021 09:41:11 +0300 Subject: [PATCH 04/15] Fix Checkstyle issues --- .../ExpressionVectorSelectorsTest.java | 30 ++++++++----------- 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index 1340d269e968..ffd7b78bcc92 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -47,7 +47,6 @@ import org.apache.druid.timeline.partition.LinearShardSpec; import org.junit.AfterClass; import org.junit.Assert; -import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -105,22 +104,20 @@ public static void setupClass() // Do not run the tests on ARM64. // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 -// if (!"aarch64".equals(CPU_ARCH)) { - final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); + final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); - final DataSegment dataSegment = DataSegment.builder() - .dataSource("foo") - .interval(schemaInfo.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(); + final DataSegment dataSegment = DataSegment.builder() + .dataSource("foo") + .interval(schemaInfo.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(); - final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); - INDEX = CLOSER.register( - segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) - ); -// } + final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); + INDEX = CLOSER.register( + segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) + ); } @AfterClass @@ -146,9 +143,6 @@ public ExpressionVectorSelectorsTest(String expression) @Before public void setup() { - // Don't run the tests on ARM64. @BeforeClass fails with OutOfMemoryError on TravisCI -// Assume.assumeFalse("aarch64".equals(CPU_ARCH)); - Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); outputType = parsed.getOutputType( new ColumnInspector() From a78fbfbb3afffa5616f4f1d92f5ce90bf60b4438 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 20 Apr 2021 09:36:53 +0300 Subject: [PATCH 05/15] Issue #10953 - Remove unused constant and obsolete comments --- .../druid/segment/virtual/ExpressionVectorSelectorsTest.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index ffd7b78bcc92..1dd7f1ef2c84 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -95,15 +95,11 @@ public class ExpressionVectorSelectorsTest private static QueryableIndex INDEX; private static Closer CLOSER; - private static final String CPU_ARCH = System.getProperty("os.arch"); - @BeforeClass public static void setupClass() { CLOSER = Closer.create(); - // Do not run the tests on ARM64. - // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); final DataSegment dataSegment = DataSegment.builder() From e87d8b08bdf2d7e04084c1ed6e344e2f6c38edb2 Mon Sep 17 00:00:00 2001 From: Will Xu <2bethere@gmail.com> Date: Mon, 18 Apr 2022 15:13:17 -0400 Subject: [PATCH 06/15] Focus on arm builds --- .travis.yml | 1200 +++++++++++++++++++++++++-------------------------- 1 file changed, 600 insertions(+), 600 deletions(-) diff --git a/.travis.yml b/.travis.yml index 580b180dc98e..27bfad7cf58f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,41 +64,41 @@ stages: jobs: include: - - name: "animal sniffer checks" - stage: Tests - phase 1 - script: ${MVN} animal-sniffer:check --fail-at-end - - - name: "checkstyle" - script: ${MVN} checkstyle:checkstyle --fail-at-end - - - name: "enforcer checks" - script: ${MVN} enforcer:enforce --fail-at-end - - - name: "forbidden api checks" - script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end - - - name: "pmd checks" - script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check - - - name: "spotbugs checks" - script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' - - - name: "license checks" - install: skip - before_script: &setup_generate_license - - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y - - pip3 install wheel # install wheel first explicitly - - pip3 install pyyaml - script: - - > - ${MVN} apache-rat:check -Prat --fail-at-end - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn - -Drat.consoleOutput=true - # Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available - # (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system). - - mkdir -p target - - distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2 - - distribution/bin/check-licenses.py licenses.yaml target/license-reports +# - name: "animal sniffer checks" +# stage: Tests - phase 1 +# script: ${MVN} animal-sniffer:check --fail-at-end +# +# - name: "checkstyle" +# script: ${MVN} checkstyle:checkstyle --fail-at-end +# +# - name: "enforcer checks" +# script: ${MVN} enforcer:enforce --fail-at-end +# +# - name: "forbidden api checks" +# script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end +# +# - name: "pmd checks" +# script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check +# +# - name: "spotbugs checks" +# script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' +# +# - name: "license checks" +# install: skip +# before_script: &setup_generate_license +# - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y +# - pip3 install wheel # install wheel first explicitly +# - pip3 install pyyaml +# script: +# - > +# ${MVN} apache-rat:check -Prat --fail-at-end +# -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn +# -Drat.consoleOutput=true +# # Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available +# # (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system). +# - mkdir -p target +# - distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2 +# - distribution/bin/check-licenses.py licenses.yaml target/license-reports - name: "(openjdk8) strict compilation" install: skip @@ -107,201 +107,201 @@ jobs: MAVEN_OPTS='-Xmx3000m' ${MVN} clean -Pstrict compile test-compile --fail-at-end -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} - - name: "analyze dependencies" - script: |- - MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo " - - The dependency analysis has found a dependency that is either: - - 1) Used and undeclared: These are available as a transitive dependency but should be explicitly - added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is - shown above. - - 2) Unused and declared: These are not needed and removing them from the POM will speed up the build - and reduce the artifact size. The dependencies to remove are shown above. - - If there are false positive dependency analysis warnings, they can be suppressed: - https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies - https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html - - For more information, refer to: - https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html - - " && false; } - - - name: "intellij inspections" - script: > - docker run --rm - -v $(pwd):/project - -v ~/.m2:/home/inspect/.m2 - ccaominh/intellij-inspect:1.0.0 - /project/pom.xml - /project/.idea/inspectionProfiles/Druid.xml - --levels ERROR - --scope JavaInspectionsScope - - - &package - name: "(openjdk8) packaging check" - install: skip - before_script: *setup_generate_license - script: > - MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end - -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C - - - <<: *package - name: "(openjdk11) packaging check" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_processing_module - name: "(openjdk8) processing module test" - stage: Tests - phase 1 - env: - - MAVEN_PROJECTS='processing' - before_script: - - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true - script: - - unset _JAVA_OPTIONS - # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when - # resolving the SIGAR dependency. - - > - MAVEN_OPTS='-Xmx1100m' ${MVN} test -pl ${MAVEN_PROJECTS} - ${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} - - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0" - - free -m - - ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report - # Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069). - # This is not needed for build triggered by tags, since there will be no code diff. - - echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging - - if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi - # Determine the modified files that match the maven projects being tested. We use maven project lists that - # either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled. - # If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty - # so that the coverage check is skipped. - - all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])" - - for f in ${all_files}; do echo $f; done # for debugging - - > - if [[ "${MAVEN_PROJECTS}" = \!* ]]; then - regex="${MAVEN_PROJECTS:1}"; - regex="^${regex//,\!/\\|^}"; - project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])"; - else - regex="^${MAVEN_PROJECTS//,/\\|^}"; - project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])"; - fi - - for f in ${project_files}; do echo $f; done # for debugging - # Check diff code coverage for the maven projects being tested (retry install in case of network error). - # Currently, the function coverage check is not reliable, so it is disabled. - - > - if [ -n "${project_files}" ]; then - travis_retry npm install @connectis/diff-test-coverage@1.5.3 - && git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files} - | node_modules/.bin/diff-test-coverage - --coverage "**/target/site/jacoco/jacoco.xml" - --type jacoco - --line-coverage 50 - --branch-coverage 50 - --function-coverage 0 - --log-template "coverage-lines-complete" - --log-template "coverage-files-complete" - --log-template "totals-complete" - --log-template "errors" - -- - || { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; } - fi - after_success: - # retry in case of network error - - travis_retry curl -o codecov.sh -s https://codecov.io/bash - - travis_retry bash codecov.sh -X gcov - - - <<: *test_processing_module - name: "(openjdk11) processing module test" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_processing_module_sqlcompat - <<: *test_processing_module - name: "(openjdk8) processing module test (SQL Compatibility)" - stage: Tests - phase 1 - before_script: &setup_sqlcompat - - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false - - - <<: *test_processing_module_sqlcompat - name: "(openjdk11) processing module test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_indexing_module - <<: *test_processing_module - name: "(openjdk8) indexing modules test" - env: - - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service' - - - <<: *test_indexing_module - name: "(openjdk11) indexing modules test" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_indexing_module_sqlcompat - <<: *test_indexing_module - name: "(openjdk8) indexing modules test (SQL Compatibility)" - stage: Tests - phase 1 - before_script: *setup_sqlcompat - - - <<: *test_indexing_module_sqlcompat - name: "(openjdk11) indexing modules test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_server_module - <<: *test_processing_module - name: "(openjdk8) server module test" - env: - - MAVEN_PROJECTS='server' - - - <<: *test_server_module - name: "(openjdk11) server module test" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_server_module_sqlcompat - <<: *test_server_module - name: "(openjdk8) server module test (SQL Compatibility)" - before_script: *setup_sqlcompat - - - <<: *test_server_module_sqlcompat - name: "(openjdk11) server module test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_other_modules - <<: *test_processing_module - name: "(openjdk8) other modules test" - env: - - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests' - - - <<: *test_other_modules - name: "(openjdk11) other modules test" - stage: Tests - phase 2 - jdk: openjdk11 - - - &test_other_modules_sqlcompat - <<: *test_other_modules - name: "(openjdk8) other modules test (SQL Compatibility)" - before_script: *setup_sqlcompat - - - <<: *test_other_modules_sqlcompat - name: "(openjdk11) other modules test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - name: "web console" - install: skip - stage: Tests - phase 1 - script: - - ${MVN} test -pl 'web-console' - after_success: - - (cd web-console && travis_retry npm run codecov) # retry in case of network error +# - name: "analyze dependencies" +# script: |- +# MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo " +# +# The dependency analysis has found a dependency that is either: +# +# 1) Used and undeclared: These are available as a transitive dependency but should be explicitly +# added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is +# shown above. +# +# 2) Unused and declared: These are not needed and removing them from the POM will speed up the build +# and reduce the artifact size. The dependencies to remove are shown above. +# +# If there are false positive dependency analysis warnings, they can be suppressed: +# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies +# https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html +# +# For more information, refer to: +# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html +# +# " && false; } +# +# - name: "intellij inspections" +# script: > +# docker run --rm +# -v $(pwd):/project +# -v ~/.m2:/home/inspect/.m2 +# ccaominh/intellij-inspect:1.0.0 +# /project/pom.xml +# /project/.idea/inspectionProfiles/Druid.xml +# --levels ERROR +# --scope JavaInspectionsScope +# +# - &package +# name: "(openjdk8) packaging check" +# install: skip +# before_script: *setup_generate_license +# script: > +# MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end +# -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C +# +# - <<: *package +# name: "(openjdk11) packaging check" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_processing_module +# name: "(openjdk8) processing module test" +# stage: Tests - phase 1 +# env: +# - MAVEN_PROJECTS='processing' +# before_script: +# - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true +# script: +# - unset _JAVA_OPTIONS +# # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when +# # resolving the SIGAR dependency. +# - > +# MAVEN_OPTS='-Xmx1100m' ${MVN} test -pl ${MAVEN_PROJECTS} +# ${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} +# - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0" +# - free -m +# - ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report +# # Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069). +# # This is not needed for build triggered by tags, since there will be no code diff. +# - echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging +# - if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi +# # Determine the modified files that match the maven projects being tested. We use maven project lists that +# # either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled. +# # If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty +# # so that the coverage check is skipped. +# - all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])" +# - for f in ${all_files}; do echo $f; done # for debugging +# - > +# if [[ "${MAVEN_PROJECTS}" = \!* ]]; then +# regex="${MAVEN_PROJECTS:1}"; +# regex="^${regex//,\!/\\|^}"; +# project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])"; +# else +# regex="^${MAVEN_PROJECTS//,/\\|^}"; +# project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])"; +# fi +# - for f in ${project_files}; do echo $f; done # for debugging +# # Check diff code coverage for the maven projects being tested (retry install in case of network error). +# # Currently, the function coverage check is not reliable, so it is disabled. +# - > +# if [ -n "${project_files}" ]; then +# travis_retry npm install @connectis/diff-test-coverage@1.5.3 +# && git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files} +# | node_modules/.bin/diff-test-coverage +# --coverage "**/target/site/jacoco/jacoco.xml" +# --type jacoco +# --line-coverage 50 +# --branch-coverage 50 +# --function-coverage 0 +# --log-template "coverage-lines-complete" +# --log-template "coverage-files-complete" +# --log-template "totals-complete" +# --log-template "errors" +# -- +# || { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; } +# fi +# after_success: +# # retry in case of network error +# - travis_retry curl -o codecov.sh -s https://codecov.io/bash +# - travis_retry bash codecov.sh -X gcov +# +# - <<: *test_processing_module +# name: "(openjdk11) processing module test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_processing_module_sqlcompat +# <<: *test_processing_module +# name: "(openjdk8) processing module test (SQL Compatibility)" +# stage: Tests - phase 1 +# before_script: &setup_sqlcompat +# - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false +# +# - <<: *test_processing_module_sqlcompat +# name: "(openjdk11) processing module test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_indexing_module +# <<: *test_processing_module +# name: "(openjdk8) indexing modules test" +# env: +# - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service' +# +# - <<: *test_indexing_module +# name: "(openjdk11) indexing modules test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_indexing_module_sqlcompat +# <<: *test_indexing_module +# name: "(openjdk8) indexing modules test (SQL Compatibility)" +# stage: Tests - phase 1 +# before_script: *setup_sqlcompat +# +# - <<: *test_indexing_module_sqlcompat +# name: "(openjdk11) indexing modules test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_server_module +# <<: *test_processing_module +# name: "(openjdk8) server module test" +# env: +# - MAVEN_PROJECTS='server' +# +# - <<: *test_server_module +# name: "(openjdk11) server module test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_server_module_sqlcompat +# <<: *test_server_module +# name: "(openjdk8) server module test (SQL Compatibility)" +# before_script: *setup_sqlcompat +# +# - <<: *test_server_module_sqlcompat +# name: "(openjdk11) server module test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_other_modules +# <<: *test_processing_module +# name: "(openjdk8) other modules test" +# env: +# - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests' +# +# - <<: *test_other_modules +# name: "(openjdk11) other modules test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - &test_other_modules_sqlcompat +# <<: *test_other_modules +# name: "(openjdk8) other modules test (SQL Compatibility)" +# before_script: *setup_sqlcompat +# +# - <<: *test_other_modules_sqlcompat +# name: "(openjdk11) other modules test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - name: "web console" +# install: skip +# stage: Tests - phase 1 +# script: +# - ${MVN} test -pl 'web-console' +# after_success: +# - (cd web-console && travis_retry npm run codecov) # retry in case of network error - name: "Build and test on ARM64 CPU architecture (1)" stage: Tests - phase 2 @@ -325,376 +325,376 @@ jobs: - MAVEN_PROJECTS='core,sql,server,services' script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C - - name: "web console end-to-end test" - before_install: *setup_generate_license - install: web-console/script/druid build - before_script: - - nvm install 10.24.0 - - web-console/script/druid start - script: (cd web-console && npm run test-e2e) - after_script: web-console/script/druid stop - - - name: "docs" - install: (cd website && npm install) - script: |- - (cd website && npm run lint && npm run spellcheck) || { echo " - - If there are spell check errors: - - 1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions - are at the top of the file and explain how to suppress false positives either globally or - within a particular file. - - 2) Running Spell Check Locally: cd website && npm install && npm run spellcheck - - For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck - - " && false; } - - # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) - # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) - # Integration tests will either use MiddleManagers or Indexers - # (Currently integration tests only support running with jvm runtime 8 and 11) - # START - Integration tests for Compile with Java 8 and Run with Java 8 - - &integration_batch_index - name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: &integration_test_services - - docker - env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: &run_integration_test - - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} -Dzk.version=${ZK_VERSION} ${MAVEN_SKIP} - after_failure: &integration_test_diags - - for v in ~/shared/logs/*.log ; do - echo $v logtail ======================== ; tail -100 $v ; - done - - for v in broker middlemanager overlord router coordinator historical ; do - echo $v dmesg ======================== ; - docker exec -it druid-$v sh -c 'dmesg | tail -3' ; - done - - - <<: *integration_batch_index - name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_input_format - name: "(Compile=openjdk8, Run=openjdk8) input format integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_input_format - name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_input_source - name: "(Compile=openjdk8, Run=openjdk8) input source integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_input_source - name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" - jdk: openjdk8 - stage: Tests - phase 2 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_kafka_index - name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_index - name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_kafka_index_slow - name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_index_slow - name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_kafka_transactional_index - name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_kafka_transactional_index_slow - name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_transactional_index_slow - name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_kafka_format_tests - name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_format_tests - name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_query - name: "(Compile=openjdk8, Run=openjdk8) query integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_query_retry - name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_query_error - name: "(Compile=openjdk8, Run=openjdk8) query error integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_security - name: "(Compile=openjdk8, Run=openjdk8) security integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_ldap_security - name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_realtime_index - name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_append_ingestion - name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_append_ingestion - name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_compaction_tests - name: "(Compile=openjdk8, Run=openjdk8) compaction integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_compaction_tests - name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_tests - name: "(Compile=openjdk8, Run=openjdk8) other integration tests" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer" - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - - # Subset of integration tests to run with ZooKeeper 3.4.x for backwards compatibility - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk8, ZK=3.4) leadership and high availability integration tests" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' ZK_VERSION=3.4 - - - <<: *integration_kafka_format_tests - name: "(Compile=openjdk8, Run=openjdk8, ZK=3.4) Kafka index integration test with various formats" - env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' ZK_VERSION=3.4 - - # END - Integration tests for Compile with Java 8 and Run with Java 8 - - # START - Integration tests for Compile with Java 8 and Run with Java 11 - - <<: *integration_batch_index - name: "(Compile=openjdk8, Run=openjdk11) batch index integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_input_format - name: "(Compile=openjdk8, Run=openjdk11) input format integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_input_source - name: "(Compile=openjdk8, Run=openjdk11) input source integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_query - name: "(Compile=openjdk8, Run=openjdk11) query integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_query_retry - name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_query_error - name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_security - name: "(Compile=openjdk8, Run=openjdk11) security integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_ldap_security - name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_realtime_index - name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_append_ingestion - name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_compaction_tests - name: "(Compile=openjdk8, Run=openjdk11) compaction integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk11) other integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - # Subset of integration tests to run with ZooKeeper 3.4.x for backwards compatibility - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk11, ZK=3.4) leadership and high availability integration tests" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' ZK_VERSION=3.4 - - # END - Integration tests for Compile with Java 8 and Run with Java 11 - - - &integration_batch_index_k8s - name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: &integration_test_services_k8s - - docker - env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true - script: &run_integration_test_k8s - - ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER} - after_failure: &integration_test_diags_k8s - - for v in broker middlemanager router coordinator historical ; do - echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------"; - sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0; - done - - name: "security vulnerabilities" - stage: cron - install: skip - script: |- - ${MVN} dependency-check:aggregate -pl '!integration-tests' || { echo " - - The OWASP dependency check has found security vulnerabilities. Please use a newer version - of the dependency that does not have vulnerabilities. To see a report run - `mvn dependency-check:check` - If the analysis has false positives, - they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more - information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html). - - " && false; } +# - name: "web console end-to-end test" +# before_install: *setup_generate_license +# install: web-console/script/druid build +# before_script: +# - nvm install 10.24.0 +# - web-console/script/druid start +# script: (cd web-console && npm run test-e2e) +# after_script: web-console/script/druid stop +# +# - name: "docs" +# install: (cd website && npm install) +# script: |- +# (cd website && npm run lint && npm run spellcheck) || { echo " +# +# If there are spell check errors: +# +# 1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions +# are at the top of the file and explain how to suppress false positives either globally or +# within a particular file. +# +# 2) Running Spell Check Locally: cd website && npm install && npm run spellcheck +# +# For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck +# +# " && false; } +# +# # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) +# # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) +# # Integration tests will either use MiddleManagers or Indexers +# # (Currently integration tests only support running with jvm runtime 8 and 11) +# # START - Integration tests for Compile with Java 8 and Run with Java 8 +# - &integration_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: &integration_test_services +# - docker +# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: &run_integration_test +# - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} -Dzk.version=${ZK_VERSION} ${MAVEN_SKIP} +# after_failure: &integration_test_diags +# - for v in ~/shared/logs/*.log ; do +# echo $v logtail ======================== ; tail -100 $v ; +# done +# - for v in broker middlemanager overlord router coordinator historical ; do +# echo $v dmesg ======================== ; +# docker exec -it druid-$v sh -c 'dmesg | tail -3' ; +# done +# +# - <<: *integration_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_input_format +# name: "(Compile=openjdk8, Run=openjdk8) input format integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_input_format +# name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_input_source +# name: "(Compile=openjdk8, Run=openjdk8) input source integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_input_source +# name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" +# jdk: openjdk8 +# stage: Tests - phase 2 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_kafka_index +# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_index +# name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_kafka_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_kafka_transactional_index +# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_kafka_transactional_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_transactional_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_kafka_format_tests +# name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_format_tests +# name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_query +# name: "(Compile=openjdk8, Run=openjdk8) query integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_query_retry +# name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_query_error +# name: "(Compile=openjdk8, Run=openjdk8) query error integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_security +# name: "(Compile=openjdk8, Run=openjdk8) security integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_ldap_security +# name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_realtime_index +# name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_append_ingestion +# name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_append_ingestion +# name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_compaction_tests +# name: "(Compile=openjdk8, Run=openjdk8) compaction integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_compaction_tests +# name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_tests +# name: "(Compile=openjdk8, Run=openjdk8) other integration tests" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer" +# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# +# # Subset of integration tests to run with ZooKeeper 3.4.x for backwards compatibility +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk8, ZK=3.4) leadership and high availability integration tests" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' ZK_VERSION=3.4 +# +# - <<: *integration_kafka_format_tests +# name: "(Compile=openjdk8, Run=openjdk8, ZK=3.4) Kafka index integration test with various formats" +# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' ZK_VERSION=3.4 +# +# # END - Integration tests for Compile with Java 8 and Run with Java 8 +# +# # START - Integration tests for Compile with Java 8 and Run with Java 11 +# - <<: *integration_batch_index +# name: "(Compile=openjdk8, Run=openjdk11) batch index integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_input_format +# name: "(Compile=openjdk8, Run=openjdk11) input format integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_input_source +# name: "(Compile=openjdk8, Run=openjdk11) input source integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_query +# name: "(Compile=openjdk8, Run=openjdk11) query integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_query_retry +# name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_query_error +# name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_security +# name: "(Compile=openjdk8, Run=openjdk11) security integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_ldap_security +# name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_realtime_index +# name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_append_ingestion +# name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_compaction_tests +# name: "(Compile=openjdk8, Run=openjdk11) compaction integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk11) other integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# # Subset of integration tests to run with ZooKeeper 3.4.x for backwards compatibility +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk11, ZK=3.4) leadership and high availability integration tests" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' ZK_VERSION=3.4 +# +# # END - Integration tests for Compile with Java 8 and Run with Java 11 +# +# - &integration_batch_index_k8s +# name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: &integration_test_services_k8s +# - docker +# env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true +# script: &run_integration_test_k8s +# - ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER} +# after_failure: &integration_test_diags_k8s +# - for v in broker middlemanager router coordinator historical ; do +# echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------"; +# sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0; +# done +# - name: "security vulnerabilities" +# stage: cron +# install: skip +# script: |- +# ${MVN} dependency-check:aggregate -pl '!integration-tests' || { echo " +# +# The OWASP dependency check has found security vulnerabilities. Please use a newer version +# of the dependency that does not have vulnerabilities. To see a report run +# `mvn dependency-check:check` +# If the analysis has false positives, +# they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more +# information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html). +# +# " && false; } # Travis CI only supports per build (and not per-job notifications): https://github.com/travis-ci/travis-ci/issues/9888 notifications: From cdc5456913f67e8964463002fcc7ecd9501db3a7 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 13 Apr 2021 09:54:35 +0300 Subject: [PATCH 07/15] Issue #10953: ARM64 - Build and test on AWS Graviton2 node Documentation: - https://blog.travis-ci.com/2020-09-11-arm-on-aws - https://aws.amazon.com/blogs/opensource/getting-started-with-travis-ci-com-on-aws-graviton2/ Trying to fix the problem described at #10953 --- .travis.yml | 11 ++++++ .../druid/java/util/metrics/MonitorsTest.java | 10 +++++ .../java/util/metrics/SigarLoadTest.java | 13 ++++++- .../util/metrics/SigarPidDiscovererTest.java | 6 +++ .../hadoop/DatasourceIngestionSpecTest.java | 8 ++++ .../ExpressionVectorSelectorsTest.java | 37 ++++++++++++------- 6 files changed, 70 insertions(+), 15 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7c4fc8589070..0dc9967791cc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -382,6 +382,17 @@ jobs: after_success: - (cd web-console && travis_retry npm run codecov) # retry in case of network error + - name: "Build and test on ARM64 CPU architecture" + stage: Tests - phase 2 + arch: arm64-graviton2 + dist: focal + virt: vm + group: edge + jdk: openjdk11 + env: + - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing,server,services' + script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C + - name: "web console end-to-end test" before_install: *setup_generate_license install: web-console/script/druid build diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java index 872af3dca22a..6f58fa47d454 100644 --- a/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java +++ b/core/src/test/java/org/apache/druid/java/util/metrics/MonitorsTest.java @@ -23,12 +23,22 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.emitter.core.Event; import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; import java.util.List; public class MonitorsTest { + private static final String CPU_ARCH = System.getProperty("os.arch"); + + @Before + public void before() + { + // Do not run the tests on ARM64. Sigar library has no binaries for ARM64 + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + } @Test public void testSetFeed() diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java index 42e1003dd475..40d7dada3c82 100644 --- a/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java +++ b/core/src/test/java/org/apache/druid/java/util/metrics/SigarLoadTest.java @@ -19,12 +19,23 @@ package org.apache.druid.java.util.metrics; -import junit.framework.Assert; import org.hyperic.sigar.Sigar; +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; public class SigarLoadTest { + private static final String CPU_ARCH = System.getProperty("os.arch"); + + @Before + public void before() + { + // Do not run the tests on ARM64. Sigar library has no binaries for ARM64 + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + } + @Test public void testSigarLoad() { diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java index 26357f8dc9a7..32711ec0294d 100644 --- a/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java +++ b/core/src/test/java/org/apache/druid/java/util/metrics/SigarPidDiscovererTest.java @@ -19,13 +19,19 @@ package org.apache.druid.java.util.metrics; +import org.junit.Assume; import org.junit.Test; public class SigarPidDiscovererTest { + private static final String CPU_ARCH = System.getProperty("os.arch"); + @Test public void simpleTest() { + // Do not run the tests on ARM64. Sigar library has no binaries for ARM64 + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + // Just make sure we don't crash SigarPidDiscoverer.instance().getPid(); } diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java index 98da95fc77b0..f2f6131225d4 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceIngestionSpecTest.java @@ -22,12 +22,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.query.filter.SelectorDimFilter; import org.apache.druid.segment.TestHelper; import org.apache.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; import java.util.List; @@ -38,6 +40,12 @@ public class DatasourceIngestionSpecTest { private static final ObjectMapper MAPPER = TestHelper.makeJsonMapper(); + @Before + public void before() + { + NullHandling.initializeForTests(); + } + @Test public void testSingleIntervalSerde() throws Exception { diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index 54fc20deed02..5112c6d35282 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.virtual; import com.google.common.collect.ImmutableList; -import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.io.Closer; @@ -47,6 +46,7 @@ import org.apache.druid.timeline.partition.LinearShardSpec; import org.junit.AfterClass; import org.junit.Assert; +import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -99,25 +99,31 @@ public class ExpressionVectorSelectorsTest private static QueryableIndex INDEX; private static Closer CLOSER; + private static final String CPU_ARCH = System.getProperty("os.arch"); + @BeforeClass public static void setupClass() { CLOSER = Closer.create(); - final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); + // Do not run the tests on ARM64. + // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 + if (!"aarch64".equals(CPU_ARCH)) { + final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); - final DataSegment dataSegment = DataSegment.builder() - .dataSource("foo") - .interval(schemaInfo.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(); + final DataSegment dataSegment = DataSegment.builder() + .dataSource("foo") + .interval(schemaInfo.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(); - final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); - INDEX = CLOSER.register( - segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) - ); + final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); + INDEX = CLOSER.register( + segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) + ); + } } @AfterClass @@ -143,6 +149,9 @@ public ExpressionVectorSelectorsTest(String expression) @Before public void setup() { + // Don't run the tests on ARM64. @BeforeClass fails with OutOfMemoryError on TravisCI + Assume.assumeFalse("aarch64".equals(CPU_ARCH)); + Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); outputType = parsed.getOutputType(INDEX); if (outputType == null) { @@ -262,7 +271,7 @@ public static void sanityTestVectorizedExpressionSelectors( int rows = 0; while (!nonVectorized.isDone()) { Assert.assertEquals( - StringUtils.format("Failed at row %s", rows), + "Failed at row " + rows, nonSelector.getObject(), results.get(rows) ); From 3459d330bb54315bb0622cde7b5f070e67cfd2a3 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Fri, 16 Apr 2021 13:35:13 +0300 Subject: [PATCH 08/15] Issue #10953: Split the modules tested on ARM64 in two jobs ... because at the moment they took 1h which is a little bit above the TravisCI limit of 50mins per job and because @clintropolis requested to add one more module - sql --- .travis.yml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0dc9967791cc..3b046a9588be 100644 --- a/.travis.yml +++ b/.travis.yml @@ -382,7 +382,7 @@ jobs: after_success: - (cd web-console && travis_retry npm run codecov) # retry in case of network error - - name: "Build and test on ARM64 CPU architecture" + - name: "Build and test on ARM64 CPU architecture (1)" stage: Tests - phase 2 arch: arm64-graviton2 dist: focal @@ -390,7 +390,18 @@ jobs: group: edge jdk: openjdk11 env: - - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing,server,services' + - MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing' + script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C + + - name: "Build and test on ARM64 CPU architecture (2)" + stage: Tests - phase 2 + arch: arm64-graviton2 + dist: focal + virt: vm + group: edge + jdk: openjdk11 + env: + - MAVEN_PROJECTS='core,sql,server,services' script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C - name: "web console end-to-end test" From e6dd1a558f02a375455f365368b0d6b2ad40df33 Mon Sep 17 00:00:00 2001 From: Will Xu <2bethere@gmail.com> Date: Mon, 18 Apr 2022 15:19:00 -0400 Subject: [PATCH 09/15] Resovling merge conflict --- .../segment/virtual/ExpressionVectorSelectorsTest.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index 5112c6d35282..36b34776f44d 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -94,7 +94,7 @@ public class ExpressionVectorSelectorsTest "concat(string1, nonexistent)" ); - private static final int ROWS_PER_SEGMENT = 10_000; + private static final int ROWS_PER_SEGMENT = 50_000; private static QueryableIndex INDEX; private static Closer CLOSER; @@ -108,7 +108,7 @@ public static void setupClass() // Do not run the tests on ARM64. // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 - if (!"aarch64".equals(CPU_ARCH)) { +// if (!"aarch64".equals(CPU_ARCH)) { final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); final DataSegment dataSegment = DataSegment.builder() @@ -123,7 +123,7 @@ public static void setupClass() INDEX = CLOSER.register( segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) ); - } +// } } @AfterClass @@ -150,7 +150,7 @@ public ExpressionVectorSelectorsTest(String expression) public void setup() { // Don't run the tests on ARM64. @BeforeClass fails with OutOfMemoryError on TravisCI - Assume.assumeFalse("aarch64".equals(CPU_ARCH)); +// Assume.assumeFalse("aarch64".equals(CPU_ARCH)); Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); outputType = parsed.getOutputType(INDEX); From 48c392c32075de21c4145e0cbd379a6948086797 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Mon, 19 Apr 2021 09:41:11 +0300 Subject: [PATCH 10/15] Fix Checkstyle issues --- .../ExpressionVectorSelectorsTest.java | 30 ++++++++----------- 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index 36b34776f44d..0edbc8e421ba 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -46,7 +46,6 @@ import org.apache.druid.timeline.partition.LinearShardSpec; import org.junit.AfterClass; import org.junit.Assert; -import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -108,22 +107,20 @@ public static void setupClass() // Do not run the tests on ARM64. // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 -// if (!"aarch64".equals(CPU_ARCH)) { - final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); + final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); - final DataSegment dataSegment = DataSegment.builder() - .dataSource("foo") - .interval(schemaInfo.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(); + final DataSegment dataSegment = DataSegment.builder() + .dataSource("foo") + .interval(schemaInfo.getDataInterval()) + .version("1") + .shardSpec(new LinearShardSpec(0)) + .size(0) + .build(); - final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); - INDEX = CLOSER.register( - segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) - ); -// } + final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator()); + INDEX = CLOSER.register( + segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT) + ); } @AfterClass @@ -149,9 +146,6 @@ public ExpressionVectorSelectorsTest(String expression) @Before public void setup() { - // Don't run the tests on ARM64. @BeforeClass fails with OutOfMemoryError on TravisCI -// Assume.assumeFalse("aarch64".equals(CPU_ARCH)); - Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); outputType = parsed.getOutputType(INDEX); if (outputType == null) { From 0590d28574309fe64510e9beda4c0564dd5d2944 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 20 Apr 2021 09:36:53 +0300 Subject: [PATCH 11/15] Issue #10953 - Remove unused constant and obsolete comments --- .../druid/segment/virtual/ExpressionVectorSelectorsTest.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index 0edbc8e421ba..a8a473a611f1 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -98,15 +98,11 @@ public class ExpressionVectorSelectorsTest private static QueryableIndex INDEX; private static Closer CLOSER; - private static final String CPU_ARCH = System.getProperty("os.arch"); - @BeforeClass public static void setupClass() { CLOSER = Closer.create(); - // Do not run the tests on ARM64. - // SegmentGenerator#generate() fails with OutOfMemoryError on TravisCI ARM64 final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench"); final DataSegment dataSegment = DataSegment.builder() From 454ae2899af7e0bfa39368e14dac06461b8e5816 Mon Sep 17 00:00:00 2001 From: Will Xu <2bethere@gmail.com> Date: Mon, 18 Apr 2022 15:28:35 -0400 Subject: [PATCH 12/15] Testing out arm builds --- .travis.yml | 1382 +++++++++++++++++++++++++-------------------------- 1 file changed, 691 insertions(+), 691 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3b046a9588be..2d2f94d81bfa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -69,319 +69,319 @@ stages: jobs: include: - - name: "animal sniffer checks" - stage: Tests - phase 1 - script: ${MVN} animal-sniffer:check --fail-at-end - - - name: "checkstyle" - script: ${MVN} checkstyle:checkstyle --fail-at-end - - - name: "enforcer checks" - script: ${MVN} enforcer:enforce --fail-at-end - - - name: "forbidden api checks" - script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end - - - name: "pmd checks" - script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check - - - name: "spotbugs checks" - script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' - - - name: "license checks" - install: skip - before_script: &setup_generate_license - - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y - - ./check_test_suite.py && travis_terminate 0 || echo 'Continuing setup' - - pip3 install wheel # install wheel first explicitly - - pip3 install pyyaml==5.4.1 - script: - - > - ${MVN} apache-rat:check -Prat --fail-at-end - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn - -Drat.consoleOutput=true - # Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available - # (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system). - - mkdir -p target - - distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2 - - distribution/bin/check-licenses.py licenses.yaml target/license-reports - - name: "script checks" - install: skip - # who watches the watchers? - script: ./check_test_suite_test.py - - - name: "(openjdk11) strict compilation" - install: skip - # errorprone requires JDK 11 - jdk: openjdk11 - # Strict compilation requires more than 2 GB - script: > - ./check_test_suite.py && travis_terminate 0 || MAVEN_OPTS='-Xmx3000m' ${MVN} clean -DstrictCompile compile test-compile --fail-at-end - ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} - - - name: "analyze dependencies" - script: |- - MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo " - - The dependency analysis has found a dependency that is either: - - 1) Used and undeclared: These are available as a transitive dependency but should be explicitly - added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is - shown above. - - 2) Unused and declared: These are not needed and removing them from the POM will speed up the build - and reduce the artifact size. The dependencies to remove are shown above. - - If there are false positive dependency analysis warnings, they can be suppressed: - https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies - https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html - - For more information, refer to: - https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html - - " && false; } - - - name: "analyze hadoop 3 dependencies" - script: |- - MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true -Phadoop3 || { echo " - - The dependency analysis has found a dependency that is either: - - 1) Used and undeclared: These are available as a transitive dependency but should be explicitly - added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is - shown above. - - 2) Unused and declared: These are not needed and removing them from the POM will speed up the build - and reduce the artifact size. The dependencies to remove are shown above. - - If there are false positive dependency analysis warnings, they can be suppressed: - https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies - https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html - - For more information, refer to: - https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html - - " && false; } - - - name: "intellij inspections" - script: > - ./check_test_suite.py && travis_terminate 0 || docker run --rm - -v $(pwd):/project - -v ~/.m2:/home/inspect/.m2 - ccaominh/intellij-inspect:1.0.0 - /project/pom.xml - /project/.idea/inspectionProfiles/Druid.xml - --levels ERROR - --scope JavaInspectionsScope - - - &package - name: "(openjdk8) packaging check" - install: skip - before_script: *setup_generate_license - script: > - MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end - -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C - - - <<: *package - name: "(openjdk11) packaging check" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *package - name: "(openjdk15) packaging check" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_processing_module - name: "(openjdk8) processing module test" - stage: Tests - phase 1 - env: - - MAVEN_PROJECTS='processing' - before_script: - - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true - script: - - unset _JAVA_OPTIONS - # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when - # resolving the SIGAR dependency. - - > - MAVEN_OPTS='-Xmx2048m' ${MVN} test -pl ${MAVEN_PROJECTS} - ${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} - - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0" - - free -m - - ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report - # Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069). - # This is not needed for build triggered by tags, since there will be no code diff. - - echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging - - if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi - # Determine the modified files that match the maven projects being tested. We use maven project lists that - # either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled. - # If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty - # so that the coverage check is skipped. - - all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])" - - for f in ${all_files}; do echo $f; done # for debugging - - > - if [[ "${MAVEN_PROJECTS}" = \!* ]]; then - regex="${MAVEN_PROJECTS:1}"; - regex="^${regex//,\!/\\|^}"; - project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])"; - else - regex="^${MAVEN_PROJECTS//,/\\|^}"; - project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])"; - fi - - for f in ${project_files}; do echo $f; done # for debugging - # Check diff code coverage for the maven projects being tested (retry install in case of network error). - # Currently, the function coverage check is not reliable, so it is disabled. - - > - if [ -n "${project_files}" ]; then - travis_retry npm install @connectis/diff-test-coverage@1.5.3 - && git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files} - | node_modules/.bin/diff-test-coverage - --coverage "**/target/site/jacoco/jacoco.xml" - --type jacoco - --line-coverage 50 - --branch-coverage 50 - --function-coverage 0 - --log-template "coverage-lines-complete" - --log-template "coverage-files-complete" - --log-template "totals-complete" - --log-template "errors" - -- - || { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; } - fi - after_success: - # retry in case of network error - - travis_retry curl -o codecov.sh -s https://codecov.io/bash - - travis_retry bash codecov.sh -X gcov - - - <<: *test_processing_module - name: "(openjdk11) processing module test" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_processing_module - name: "(openjdk15) processing module test" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_processing_module_sqlcompat - <<: *test_processing_module - name: "(openjdk8) processing module test (SQL Compatibility)" - stage: Tests - phase 1 - before_script: &setup_sqlcompat - - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false - - - <<: *test_processing_module_sqlcompat - name: "(openjdk11) processing module test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_processing_module_sqlcompat - name: "(openjdk15) processing module test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_indexing_module - <<: *test_processing_module - name: "(openjdk8) indexing modules test" - env: - - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service' - - - <<: *test_indexing_module - name: "(openjdk11) indexing modules test" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_indexing_module - name: "(openjdk15) indexing modules test" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_indexing_module_sqlcompat - <<: *test_indexing_module - name: "(openjdk8) indexing modules test (SQL Compatibility)" - stage: Tests - phase 1 - before_script: *setup_sqlcompat - - - <<: *test_indexing_module_sqlcompat - name: "(openjdk11) indexing modules test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_indexing_module_sqlcompat - name: "(openjdk15) indexing modules test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_server_module - <<: *test_processing_module - name: "(openjdk8) server module test" - env: - - MAVEN_PROJECTS='server' - - - <<: *test_server_module - name: "(openjdk11) server module test" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_server_module - name: "(openjdk15) server module test" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_server_module_sqlcompat - <<: *test_server_module - name: "(openjdk8) server module test (SQL Compatibility)" - before_script: *setup_sqlcompat - - - <<: *test_server_module_sqlcompat - name: "(openjdk11) server module test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_server_module_sqlcompat - name: "(openjdk15) server module test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_other_modules - <<: *test_processing_module - name: "(openjdk8) other modules test" - env: - - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests' - - - <<: *test_other_modules - name: "(openjdk11) other modules test" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_other_modules - name: "(openjdk15) other modules test" - stage: Tests - phase 2 - jdk: openjdk15 - - - &test_other_modules_sqlcompat - <<: *test_other_modules - name: "(openjdk8) other modules test (SQL Compatibility)" - before_script: *setup_sqlcompat - - - <<: *test_other_modules_sqlcompat - name: "(openjdk11) other modules test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk11 - - - <<: *test_other_modules_sqlcompat - name: "(openjdk15) other modules test (SQL Compatibility)" - stage: Tests - phase 2 - jdk: openjdk15 - - - name: "web console" - install: skip - stage: Tests - phase 1 - script: - - ./check_test_suite.py && travis_terminate 0 || ${MVN} test -pl 'web-console' - after_success: - - (cd web-console && travis_retry npm run codecov) # retry in case of network error - +# - name: "animal sniffer checks" +# stage: Tests - phase 1 +# script: ${MVN} animal-sniffer:check --fail-at-end +# +# - name: "checkstyle" +# script: ${MVN} checkstyle:checkstyle --fail-at-end +# +# - name: "enforcer checks" +# script: ${MVN} enforcer:enforce --fail-at-end +# +# - name: "forbidden api checks" +# script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end +# +# - name: "pmd checks" +# script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check +# +# - name: "spotbugs checks" +# script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' +# +# - name: "license checks" +# install: skip +# before_script: &setup_generate_license +# - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y +# - ./check_test_suite.py && travis_terminate 0 || echo 'Continuing setup' +# - pip3 install wheel # install wheel first explicitly +# - pip3 install pyyaml==5.4.1 +# script: +# - > +# ${MVN} apache-rat:check -Prat --fail-at-end +# -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn +# -Drat.consoleOutput=true +# # Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available +# # (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system). +# - mkdir -p target +# - distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2 +# - distribution/bin/check-licenses.py licenses.yaml target/license-reports +# - name: "script checks" +# install: skip +# # who watches the watchers? +# script: ./check_test_suite_test.py +# +# - name: "(openjdk11) strict compilation" +# install: skip +# # errorprone requires JDK 11 +# jdk: openjdk11 +# # Strict compilation requires more than 2 GB +# script: > +# ./check_test_suite.py && travis_terminate 0 || MAVEN_OPTS='-Xmx3000m' ${MVN} clean -DstrictCompile compile test-compile --fail-at-end +# ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} +# +# - name: "analyze dependencies" +# script: |- +# MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo " +# +# The dependency analysis has found a dependency that is either: +# +# 1) Used and undeclared: These are available as a transitive dependency but should be explicitly +# added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is +# shown above. +# +# 2) Unused and declared: These are not needed and removing them from the POM will speed up the build +# and reduce the artifact size. The dependencies to remove are shown above. +# +# If there are false positive dependency analysis warnings, they can be suppressed: +# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies +# https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html +# +# For more information, refer to: +# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html +# +# " && false; } +# +# - name: "analyze hadoop 3 dependencies" +# script: |- +# MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true -Phadoop3 || { echo " +# +# The dependency analysis has found a dependency that is either: +# +# 1) Used and undeclared: These are available as a transitive dependency but should be explicitly +# added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is +# shown above. +# +# 2) Unused and declared: These are not needed and removing them from the POM will speed up the build +# and reduce the artifact size. The dependencies to remove are shown above. +# +# If there are false positive dependency analysis warnings, they can be suppressed: +# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies +# https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html +# +# For more information, refer to: +# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html +# +# " && false; } +# +# - name: "intellij inspections" +# script: > +# ./check_test_suite.py && travis_terminate 0 || docker run --rm +# -v $(pwd):/project +# -v ~/.m2:/home/inspect/.m2 +# ccaominh/intellij-inspect:1.0.0 +# /project/pom.xml +# /project/.idea/inspectionProfiles/Druid.xml +# --levels ERROR +# --scope JavaInspectionsScope +# +# - &package +# name: "(openjdk8) packaging check" +# install: skip +# before_script: *setup_generate_license +# script: > +# MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end +# -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C +# +# - <<: *package +# name: "(openjdk11) packaging check" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *package +# name: "(openjdk15) packaging check" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_processing_module +# name: "(openjdk8) processing module test" +# stage: Tests - phase 1 +# env: +# - MAVEN_PROJECTS='processing' +# before_script: +# - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true +# script: +# - unset _JAVA_OPTIONS +# # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when +# # resolving the SIGAR dependency. +# - > +# MAVEN_OPTS='-Xmx2048m' ${MVN} test -pl ${MAVEN_PROJECTS} +# ${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} +# - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0" +# - free -m +# - ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report +# # Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069). +# # This is not needed for build triggered by tags, since there will be no code diff. +# - echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging +# - if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi +# # Determine the modified files that match the maven projects being tested. We use maven project lists that +# # either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled. +# # If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty +# # so that the coverage check is skipped. +# - all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])" +# - for f in ${all_files}; do echo $f; done # for debugging +# - > +# if [[ "${MAVEN_PROJECTS}" = \!* ]]; then +# regex="${MAVEN_PROJECTS:1}"; +# regex="^${regex//,\!/\\|^}"; +# project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])"; +# else +# regex="^${MAVEN_PROJECTS//,/\\|^}"; +# project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])"; +# fi +# - for f in ${project_files}; do echo $f; done # for debugging +# # Check diff code coverage for the maven projects being tested (retry install in case of network error). +# # Currently, the function coverage check is not reliable, so it is disabled. +# - > +# if [ -n "${project_files}" ]; then +# travis_retry npm install @connectis/diff-test-coverage@1.5.3 +# && git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files} +# | node_modules/.bin/diff-test-coverage +# --coverage "**/target/site/jacoco/jacoco.xml" +# --type jacoco +# --line-coverage 50 +# --branch-coverage 50 +# --function-coverage 0 +# --log-template "coverage-lines-complete" +# --log-template "coverage-files-complete" +# --log-template "totals-complete" +# --log-template "errors" +# -- +# || { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; } +# fi +# after_success: +# # retry in case of network error +# - travis_retry curl -o codecov.sh -s https://codecov.io/bash +# - travis_retry bash codecov.sh -X gcov +# +# - <<: *test_processing_module +# name: "(openjdk11) processing module test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_processing_module +# name: "(openjdk15) processing module test" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_processing_module_sqlcompat +# <<: *test_processing_module +# name: "(openjdk8) processing module test (SQL Compatibility)" +# stage: Tests - phase 1 +# before_script: &setup_sqlcompat +# - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false +# +# - <<: *test_processing_module_sqlcompat +# name: "(openjdk11) processing module test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_processing_module_sqlcompat +# name: "(openjdk15) processing module test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_indexing_module +# <<: *test_processing_module +# name: "(openjdk8) indexing modules test" +# env: +# - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service' +# +# - <<: *test_indexing_module +# name: "(openjdk11) indexing modules test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_indexing_module +# name: "(openjdk15) indexing modules test" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_indexing_module_sqlcompat +# <<: *test_indexing_module +# name: "(openjdk8) indexing modules test (SQL Compatibility)" +# stage: Tests - phase 1 +# before_script: *setup_sqlcompat +# +# - <<: *test_indexing_module_sqlcompat +# name: "(openjdk11) indexing modules test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_indexing_module_sqlcompat +# name: "(openjdk15) indexing modules test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_server_module +# <<: *test_processing_module +# name: "(openjdk8) server module test" +# env: +# - MAVEN_PROJECTS='server' +# +# - <<: *test_server_module +# name: "(openjdk11) server module test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_server_module +# name: "(openjdk15) server module test" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_server_module_sqlcompat +# <<: *test_server_module +# name: "(openjdk8) server module test (SQL Compatibility)" +# before_script: *setup_sqlcompat +# +# - <<: *test_server_module_sqlcompat +# name: "(openjdk11) server module test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_server_module_sqlcompat +# name: "(openjdk15) server module test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_other_modules +# <<: *test_processing_module +# name: "(openjdk8) other modules test" +# env: +# - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests' +# +# - <<: *test_other_modules +# name: "(openjdk11) other modules test" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_other_modules +# name: "(openjdk15) other modules test" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - &test_other_modules_sqlcompat +# <<: *test_other_modules +# name: "(openjdk8) other modules test (SQL Compatibility)" +# before_script: *setup_sqlcompat +# +# - <<: *test_other_modules_sqlcompat +# name: "(openjdk11) other modules test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk11 +# +# - <<: *test_other_modules_sqlcompat +# name: "(openjdk15) other modules test (SQL Compatibility)" +# stage: Tests - phase 2 +# jdk: openjdk15 +# +# - name: "web console" +# install: skip +# stage: Tests - phase 1 +# script: +# - ./check_test_suite.py && travis_terminate 0 || ${MVN} test -pl 'web-console' +# after_success: +# - (cd web-console && travis_retry npm run codecov) # retry in case of network error +# - name: "Build and test on ARM64 CPU architecture (1)" stage: Tests - phase 2 arch: arm64-graviton2 @@ -403,384 +403,384 @@ jobs: env: - MAVEN_PROJECTS='core,sql,server,services' script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C - - - name: "web console end-to-end test" - before_install: *setup_generate_license - install: web-console/script/druid build - before_script: - - ./check_test_suite.py && travis_terminate 0 || echo 'Starting nvm install...' - - nvm install 14.19.0 - - web-console/script/druid start - script: (cd web-console && npm run test-e2e) - after_script: web-console/script/druid stop - - - name: "docs" - install: ./check_test_suite.py && travis_terminate 0 || (cd website && npm install) - script: |- - (cd website && npm run lint && npm run spellcheck) || { echo " - - If there are spell check errors: - - 1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions - are at the top of the file and explain how to suppress false positives either globally or - within a particular file. - - 2) Running Spell Check Locally: cd website && npm install && npm run spellcheck - - For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck - - " && false; } - - # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) - # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) - # Integration tests will either use MiddleManagers or Indexers - # (Currently integration tests only support running with jvm runtime 8 and 11) - # START - Integration tests for Compile with Java 8 and Run with Java 8 - - &integration_batch_index - name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: &integration_test_services - - docker - env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: &run_integration_test - - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP} -Doverride.config.path=${OVERRIDE_CONFIG_PATH} - after_failure: &integration_test_diags - - for v in ~/shared/logs/*.log ; do - echo $v logtail ======================== ; tail -100 $v ; - done - - for v in broker middlemanager overlord router coordinator historical ; do - echo $v dmesg ======================== ; - docker exec -it druid-$v sh -c 'dmesg | tail -3' ; - done - - - <<: *integration_batch_index - name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_input_format - name: "(Compile=openjdk8, Run=openjdk8) input format integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_input_format - name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_input_source - name: "(Compile=openjdk8, Run=openjdk8) input source integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_input_source - name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" - jdk: openjdk8 - stage: Tests - phase 2 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - <<: *integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store" - env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' - - - <<: *integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store with indexer" - env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' - - - &integration_kafka_index - name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_index - name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - <<: *integration_kafka_index - name: "(Compile=openjdk8, Run=openjdk8) custom coordinator duties integration test" - env: TESTNG_GROUPS='-Dgroups=custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/custom-coordinator-duties' - - - &integration_kafka_index_slow - name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_index_slow - name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_kafka_transactional_index - name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_kafka_transactional_index_slow - name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_transactional_index_slow - name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_kafka_format_tests - name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_kafka_format_tests - name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer" - env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_query - name: "(Compile=openjdk8, Run=openjdk8) query integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_query_retry - name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_query_error - name: "(Compile=openjdk8, Run=openjdk8) query error integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_security - name: "(Compile=openjdk8, Run=openjdk8) security integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_ldap_security - name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_realtime_index - name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - &integration_append_ingestion - name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_append_ingestion - name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_compaction_tests - name: "(Compile=openjdk8, Run=openjdk8) compaction integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_compaction_tests - name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer" - env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - &integration_tests - name: "(Compile=openjdk8, Run=openjdk8) other integration tests" - stage: Tests - phase 2 - jdk: openjdk8 - services: *integration_test_services - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' - script: *run_integration_test - after_failure: *integration_test_diags - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer" - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - - <<: *integration_query - name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - # END - Integration tests for Compile with Java 8 and Run with Java 8 - - # START - Integration tests for Compile with Java 8 and Run with Java 11 - - <<: *integration_batch_index - name: "(Compile=openjdk8, Run=openjdk11) batch index integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_input_format - name: "(Compile=openjdk8, Run=openjdk11) input format integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_input_source - name: "(Compile=openjdk8, Run=openjdk11) input source integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_perfect_rollup_parallel_batch_index - name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_query - name: "(Compile=openjdk8, Run=openjdk11) query integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - - <<: *integration_query_retry - name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - - <<: *integration_query_error - name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - - <<: *integration_security - name: "(Compile=openjdk8, Run=openjdk11) security integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - - <<: *integration_ldap_security - name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_realtime_index - name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_append_ingestion - name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_compaction_tests - name: "(Compile=openjdk8, Run=openjdk11) compaction integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk11) other integration test" - jdk: openjdk8 - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' - - - <<: *integration_tests - name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - - <<: *integration_query - name: "(Compile=openjdk8, Run=openjdk11) query integration test (mariaDB)" - jdk: openjdk8 - env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' - - # END - Integration tests for Compile with Java 8 and Run with Java 11 - - - &integration_batch_index_k8s - name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test" - stage: Tests - phase 2 - jdk: openjdk8 - services: &integration_test_services_k8s - - docker - env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true - script: &run_integration_test_k8s - - ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER} - after_failure: &integration_test_diags_k8s - - for v in broker middlemanager router coordinator historical ; do - echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------"; - sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0; - done - - name: "security vulnerabilities" - stage: cron - install: skip - script: |- - ${MVN} dependency-check:purge dependency-check:check || { echo " - - The OWASP dependency check has found security vulnerabilities. Please use a newer version - of the dependency that does not have vulnerabilities. To see a report run - `mvn dependency-check:check` - If the analysis has false positives, - they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more - information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html). - - " && false; } +# +# - name: "web console end-to-end test" +# before_install: *setup_generate_license +# install: web-console/script/druid build +# before_script: +# - ./check_test_suite.py && travis_terminate 0 || echo 'Starting nvm install...' +# - nvm install 14.19.0 +# - web-console/script/druid start +# script: (cd web-console && npm run test-e2e) +# after_script: web-console/script/druid stop +# +# - name: "docs" +# install: ./check_test_suite.py && travis_terminate 0 || (cd website && npm install) +# script: |- +# (cd website && npm run lint && npm run spellcheck) || { echo " +# +# If there are spell check errors: +# +# 1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions +# are at the top of the file and explain how to suppress false positives either globally or +# within a particular file. +# +# 2) Running Spell Check Locally: cd website && npm install && npm run spellcheck +# +# For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck +# +# " && false; } +# +# # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) +# # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) +# # Integration tests will either use MiddleManagers or Indexers +# # (Currently integration tests only support running with jvm runtime 8 and 11) +# # START - Integration tests for Compile with Java 8 and Run with Java 8 +# - &integration_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: &integration_test_services +# - docker +# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: &run_integration_test +# - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP} -Doverride.config.path=${OVERRIDE_CONFIG_PATH} +# after_failure: &integration_test_diags +# - for v in ~/shared/logs/*.log ; do +# echo $v logtail ======================== ; tail -100 $v ; +# done +# - for v in broker middlemanager overlord router coordinator historical ; do +# echo $v dmesg ======================== ; +# docker exec -it druid-$v sh -c 'dmesg | tail -3' ; +# done +# +# - <<: *integration_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_input_format +# name: "(Compile=openjdk8, Run=openjdk8) input format integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_input_format +# name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_input_source +# name: "(Compile=openjdk8, Run=openjdk8) input source integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_input_source +# name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" +# jdk: openjdk8 +# stage: Tests - phase 2 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - <<: *integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store" +# env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' +# +# - <<: *integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store with indexer" +# env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' +# +# - &integration_kafka_index +# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_index +# name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - <<: *integration_kafka_index +# name: "(Compile=openjdk8, Run=openjdk8) custom coordinator duties integration test" +# env: TESTNG_GROUPS='-Dgroups=custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/custom-coordinator-duties' +# +# - &integration_kafka_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_kafka_transactional_index +# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_kafka_transactional_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_transactional_index_slow +# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_kafka_format_tests +# name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_kafka_format_tests +# name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer" +# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_query +# name: "(Compile=openjdk8, Run=openjdk8) query integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_query_retry +# name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_query_error +# name: "(Compile=openjdk8, Run=openjdk8) query error integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_security +# name: "(Compile=openjdk8, Run=openjdk8) security integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_ldap_security +# name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_realtime_index +# name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - &integration_append_ingestion +# name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_append_ingestion +# name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_compaction_tests +# name: "(Compile=openjdk8, Run=openjdk8) compaction integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_compaction_tests +# name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer" +# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - &integration_tests +# name: "(Compile=openjdk8, Run=openjdk8) other integration tests" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: *integration_test_services +# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' +# script: *run_integration_test +# after_failure: *integration_test_diags +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer" +# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# - <<: *integration_query +# name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# # END - Integration tests for Compile with Java 8 and Run with Java 8 +# +# # START - Integration tests for Compile with Java 8 and Run with Java 11 +# - <<: *integration_batch_index +# name: "(Compile=openjdk8, Run=openjdk11) batch index integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_input_format +# name: "(Compile=openjdk8, Run=openjdk11) input format integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_input_source +# name: "(Compile=openjdk8, Run=openjdk11) input source integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_perfect_rollup_parallel_batch_index +# name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_query +# name: "(Compile=openjdk8, Run=openjdk11) query integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# - <<: *integration_query_retry +# name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# - <<: *integration_query_error +# name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# - <<: *integration_security +# name: "(Compile=openjdk8, Run=openjdk11) security integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# - <<: *integration_ldap_security +# name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_realtime_index +# name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_append_ingestion +# name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_compaction_tests +# name: "(Compile=openjdk8, Run=openjdk11) compaction integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk11) other integration test" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' +# +# - <<: *integration_tests +# name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# - <<: *integration_query +# name: "(Compile=openjdk8, Run=openjdk11) query integration test (mariaDB)" +# jdk: openjdk8 +# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' +# +# # END - Integration tests for Compile with Java 8 and Run with Java 11 +# +# - &integration_batch_index_k8s +# name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test" +# stage: Tests - phase 2 +# jdk: openjdk8 +# services: &integration_test_services_k8s +# - docker +# env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true +# script: &run_integration_test_k8s +# - ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER} +# after_failure: &integration_test_diags_k8s +# - for v in broker middlemanager router coordinator historical ; do +# echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------"; +# sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0; +# done +# - name: "security vulnerabilities" +# stage: cron +# install: skip +# script: |- +# ${MVN} dependency-check:purge dependency-check:check || { echo " +# +# The OWASP dependency check has found security vulnerabilities. Please use a newer version +# of the dependency that does not have vulnerabilities. To see a report run +# `mvn dependency-check:check` +# If the analysis has false positives, +# they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more +# information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html). +# +# " && false; } # Travis CI only supports per build (and not per-job notifications): https://github.com/travis-ci/travis-ci/issues/9888 notifications: From 3976d2523396a0d2665def068979af1d75ee7867 Mon Sep 17 00:00:00 2001 From: Will Xu <2bethere@gmail.com> Date: Mon, 18 Apr 2022 22:00:06 -0400 Subject: [PATCH 13/15] Trying smaller row size --- .../druid/segment/virtual/ExpressionVectorSelectorsTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index a8a473a611f1..a99c0d6369a5 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -93,7 +93,7 @@ public class ExpressionVectorSelectorsTest "concat(string1, nonexistent)" ); - private static final int ROWS_PER_SEGMENT = 50_000; + private static final int ROWS_PER_SEGMENT = 25_000; private static QueryableIndex INDEX; private static Closer CLOSER; From bc9ffd617d53d94f8bd98aa64efd405e5efd02ef Mon Sep 17 00:00:00 2001 From: Will Xu <2bethere@gmail.com> Date: Tue, 19 Apr 2022 11:02:34 -0400 Subject: [PATCH 14/15] Adjusting to reduce heap need --- .../druid/segment/virtual/ExpressionVectorSelectorsTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java index a99c0d6369a5..a7ca0ad079bc 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/ExpressionVectorSelectorsTest.java @@ -93,7 +93,7 @@ public class ExpressionVectorSelectorsTest "concat(string1, nonexistent)" ); - private static final int ROWS_PER_SEGMENT = 25_000; + private static final int ROWS_PER_SEGMENT = 10_000; private static QueryableIndex INDEX; private static Closer CLOSER; From 23a6da50ee6eefe311f7feb0da843cd92549c379 Mon Sep 17 00:00:00 2001 From: Will Xu <2bethere@gmail.com> Date: Tue, 19 Apr 2022 15:54:20 -0400 Subject: [PATCH 15/15] Moving back to normal travis --- .travis.yml | 1382 +++++++++++++++++++++++++-------------------------- 1 file changed, 691 insertions(+), 691 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2d2f94d81bfa..af43cbe0ecf3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -69,319 +69,319 @@ stages: jobs: include: -# - name: "animal sniffer checks" -# stage: Tests - phase 1 -# script: ${MVN} animal-sniffer:check --fail-at-end -# -# - name: "checkstyle" -# script: ${MVN} checkstyle:checkstyle --fail-at-end -# -# - name: "enforcer checks" -# script: ${MVN} enforcer:enforce --fail-at-end -# -# - name: "forbidden api checks" -# script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end -# -# - name: "pmd checks" -# script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check -# -# - name: "spotbugs checks" -# script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' -# -# - name: "license checks" -# install: skip -# before_script: &setup_generate_license -# - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y -# - ./check_test_suite.py && travis_terminate 0 || echo 'Continuing setup' -# - pip3 install wheel # install wheel first explicitly -# - pip3 install pyyaml==5.4.1 -# script: -# - > -# ${MVN} apache-rat:check -Prat --fail-at-end -# -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -# -Drat.consoleOutput=true -# # Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available -# # (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system). -# - mkdir -p target -# - distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2 -# - distribution/bin/check-licenses.py licenses.yaml target/license-reports -# - name: "script checks" -# install: skip -# # who watches the watchers? -# script: ./check_test_suite_test.py -# -# - name: "(openjdk11) strict compilation" -# install: skip -# # errorprone requires JDK 11 -# jdk: openjdk11 -# # Strict compilation requires more than 2 GB -# script: > -# ./check_test_suite.py && travis_terminate 0 || MAVEN_OPTS='-Xmx3000m' ${MVN} clean -DstrictCompile compile test-compile --fail-at-end -# ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -# -# - name: "analyze dependencies" -# script: |- -# MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo " -# -# The dependency analysis has found a dependency that is either: -# -# 1) Used and undeclared: These are available as a transitive dependency but should be explicitly -# added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is -# shown above. -# -# 2) Unused and declared: These are not needed and removing them from the POM will speed up the build -# and reduce the artifact size. The dependencies to remove are shown above. -# -# If there are false positive dependency analysis warnings, they can be suppressed: -# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies -# https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html -# -# For more information, refer to: -# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html -# -# " && false; } -# -# - name: "analyze hadoop 3 dependencies" -# script: |- -# MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true -Phadoop3 || { echo " -# -# The dependency analysis has found a dependency that is either: -# -# 1) Used and undeclared: These are available as a transitive dependency but should be explicitly -# added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is -# shown above. -# -# 2) Unused and declared: These are not needed and removing them from the POM will speed up the build -# and reduce the artifact size. The dependencies to remove are shown above. -# -# If there are false positive dependency analysis warnings, they can be suppressed: -# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies -# https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html -# -# For more information, refer to: -# https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html -# -# " && false; } -# -# - name: "intellij inspections" -# script: > -# ./check_test_suite.py && travis_terminate 0 || docker run --rm -# -v $(pwd):/project -# -v ~/.m2:/home/inspect/.m2 -# ccaominh/intellij-inspect:1.0.0 -# /project/pom.xml -# /project/.idea/inspectionProfiles/Druid.xml -# --levels ERROR -# --scope JavaInspectionsScope -# -# - &package -# name: "(openjdk8) packaging check" -# install: skip -# before_script: *setup_generate_license -# script: > -# MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end -# -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C -# -# - <<: *package -# name: "(openjdk11) packaging check" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *package -# name: "(openjdk15) packaging check" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_processing_module -# name: "(openjdk8) processing module test" -# stage: Tests - phase 1 -# env: -# - MAVEN_PROJECTS='processing' -# before_script: -# - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true -# script: -# - unset _JAVA_OPTIONS -# # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when -# # resolving the SIGAR dependency. -# - > -# MAVEN_OPTS='-Xmx2048m' ${MVN} test -pl ${MAVEN_PROJECTS} -# ${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} -# - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0" -# - free -m -# - ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report -# # Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069). -# # This is not needed for build triggered by tags, since there will be no code diff. -# - echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging -# - if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi -# # Determine the modified files that match the maven projects being tested. We use maven project lists that -# # either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled. -# # If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty -# # so that the coverage check is skipped. -# - all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])" -# - for f in ${all_files}; do echo $f; done # for debugging -# - > -# if [[ "${MAVEN_PROJECTS}" = \!* ]]; then -# regex="${MAVEN_PROJECTS:1}"; -# regex="^${regex//,\!/\\|^}"; -# project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])"; -# else -# regex="^${MAVEN_PROJECTS//,/\\|^}"; -# project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])"; -# fi -# - for f in ${project_files}; do echo $f; done # for debugging -# # Check diff code coverage for the maven projects being tested (retry install in case of network error). -# # Currently, the function coverage check is not reliable, so it is disabled. -# - > -# if [ -n "${project_files}" ]; then -# travis_retry npm install @connectis/diff-test-coverage@1.5.3 -# && git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files} -# | node_modules/.bin/diff-test-coverage -# --coverage "**/target/site/jacoco/jacoco.xml" -# --type jacoco -# --line-coverage 50 -# --branch-coverage 50 -# --function-coverage 0 -# --log-template "coverage-lines-complete" -# --log-template "coverage-files-complete" -# --log-template "totals-complete" -# --log-template "errors" -# -- -# || { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; } -# fi -# after_success: -# # retry in case of network error -# - travis_retry curl -o codecov.sh -s https://codecov.io/bash -# - travis_retry bash codecov.sh -X gcov -# -# - <<: *test_processing_module -# name: "(openjdk11) processing module test" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_processing_module -# name: "(openjdk15) processing module test" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_processing_module_sqlcompat -# <<: *test_processing_module -# name: "(openjdk8) processing module test (SQL Compatibility)" -# stage: Tests - phase 1 -# before_script: &setup_sqlcompat -# - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false -# -# - <<: *test_processing_module_sqlcompat -# name: "(openjdk11) processing module test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_processing_module_sqlcompat -# name: "(openjdk15) processing module test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_indexing_module -# <<: *test_processing_module -# name: "(openjdk8) indexing modules test" -# env: -# - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service' -# -# - <<: *test_indexing_module -# name: "(openjdk11) indexing modules test" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_indexing_module -# name: "(openjdk15) indexing modules test" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_indexing_module_sqlcompat -# <<: *test_indexing_module -# name: "(openjdk8) indexing modules test (SQL Compatibility)" -# stage: Tests - phase 1 -# before_script: *setup_sqlcompat -# -# - <<: *test_indexing_module_sqlcompat -# name: "(openjdk11) indexing modules test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_indexing_module_sqlcompat -# name: "(openjdk15) indexing modules test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_server_module -# <<: *test_processing_module -# name: "(openjdk8) server module test" -# env: -# - MAVEN_PROJECTS='server' -# -# - <<: *test_server_module -# name: "(openjdk11) server module test" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_server_module -# name: "(openjdk15) server module test" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_server_module_sqlcompat -# <<: *test_server_module -# name: "(openjdk8) server module test (SQL Compatibility)" -# before_script: *setup_sqlcompat -# -# - <<: *test_server_module_sqlcompat -# name: "(openjdk11) server module test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_server_module_sqlcompat -# name: "(openjdk15) server module test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_other_modules -# <<: *test_processing_module -# name: "(openjdk8) other modules test" -# env: -# - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests' -# -# - <<: *test_other_modules -# name: "(openjdk11) other modules test" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_other_modules -# name: "(openjdk15) other modules test" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - &test_other_modules_sqlcompat -# <<: *test_other_modules -# name: "(openjdk8) other modules test (SQL Compatibility)" -# before_script: *setup_sqlcompat -# -# - <<: *test_other_modules_sqlcompat -# name: "(openjdk11) other modules test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk11 -# -# - <<: *test_other_modules_sqlcompat -# name: "(openjdk15) other modules test (SQL Compatibility)" -# stage: Tests - phase 2 -# jdk: openjdk15 -# -# - name: "web console" -# install: skip -# stage: Tests - phase 1 -# script: -# - ./check_test_suite.py && travis_terminate 0 || ${MVN} test -pl 'web-console' -# after_success: -# - (cd web-console && travis_retry npm run codecov) # retry in case of network error -# + - name: "animal sniffer checks" + stage: Tests - phase 1 + script: ${MVN} animal-sniffer:check --fail-at-end + + - name: "checkstyle" + script: ${MVN} checkstyle:checkstyle --fail-at-end + + - name: "enforcer checks" + script: ${MVN} enforcer:enforce --fail-at-end + + - name: "forbidden api checks" + script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end + + - name: "pmd checks" + script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check + + - name: "spotbugs checks" + script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' + + - name: "license checks" + install: skip + before_script: &setup_generate_license + - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y + - ./check_test_suite.py && travis_terminate 0 || echo 'Continuing setup' + - pip3 install wheel # install wheel first explicitly + - pip3 install pyyaml==5.4.1 + script: + - > + ${MVN} apache-rat:check -Prat --fail-at-end + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn + -Drat.consoleOutput=true + # Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available + # (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system). + - mkdir -p target + - distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2 + - distribution/bin/check-licenses.py licenses.yaml target/license-reports + - name: "script checks" + install: skip + # who watches the watchers? + script: ./check_test_suite_test.py + + - name: "(openjdk11) strict compilation" + install: skip + # errorprone requires JDK 11 + jdk: openjdk11 + # Strict compilation requires more than 2 GB + script: > + ./check_test_suite.py && travis_terminate 0 || MAVEN_OPTS='-Xmx3000m' ${MVN} clean -DstrictCompile compile test-compile --fail-at-end + ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} + + - name: "analyze dependencies" + script: |- + MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo " + + The dependency analysis has found a dependency that is either: + + 1) Used and undeclared: These are available as a transitive dependency but should be explicitly + added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is + shown above. + + 2) Unused and declared: These are not needed and removing them from the POM will speed up the build + and reduce the artifact size. The dependencies to remove are shown above. + + If there are false positive dependency analysis warnings, they can be suppressed: + https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies + https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html + + For more information, refer to: + https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html + + " && false; } + + - name: "analyze hadoop 3 dependencies" + script: |- + MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true -Phadoop3 || { echo " + + The dependency analysis has found a dependency that is either: + + 1) Used and undeclared: These are available as a transitive dependency but should be explicitly + added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is + shown above. + + 2) Unused and declared: These are not needed and removing them from the POM will speed up the build + and reduce the artifact size. The dependencies to remove are shown above. + + If there are false positive dependency analysis warnings, they can be suppressed: + https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies + https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html + + For more information, refer to: + https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html + + " && false; } + + - name: "intellij inspections" + script: > + ./check_test_suite.py && travis_terminate 0 || docker run --rm + -v $(pwd):/project + -v ~/.m2:/home/inspect/.m2 + ccaominh/intellij-inspect:1.0.0 + /project/pom.xml + /project/.idea/inspectionProfiles/Druid.xml + --levels ERROR + --scope JavaInspectionsScope + + - &package + name: "(openjdk8) packaging check" + install: skip + before_script: *setup_generate_license + script: > + MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end + -pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C + + - <<: *package + name: "(openjdk11) packaging check" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *package + name: "(openjdk15) packaging check" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_processing_module + name: "(openjdk8) processing module test" + stage: Tests - phase 1 + env: + - MAVEN_PROJECTS='processing' + before_script: + - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true + script: + - unset _JAVA_OPTIONS + # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when + # resolving the SIGAR dependency. + - > + MAVEN_OPTS='-Xmx2048m' ${MVN} test -pl ${MAVEN_PROJECTS} + ${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} + - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0" + - free -m + - ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report + # Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069). + # This is not needed for build triggered by tags, since there will be no code diff. + - echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging + - if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi + # Determine the modified files that match the maven projects being tested. We use maven project lists that + # either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled. + # If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty + # so that the coverage check is skipped. + - all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])" + - for f in ${all_files}; do echo $f; done # for debugging + - > + if [[ "${MAVEN_PROJECTS}" = \!* ]]; then + regex="${MAVEN_PROJECTS:1}"; + regex="^${regex//,\!/\\|^}"; + project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])"; + else + regex="^${MAVEN_PROJECTS//,/\\|^}"; + project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])"; + fi + - for f in ${project_files}; do echo $f; done # for debugging + # Check diff code coverage for the maven projects being tested (retry install in case of network error). + # Currently, the function coverage check is not reliable, so it is disabled. + - > + if [ -n "${project_files}" ]; then + travis_retry npm install @connectis/diff-test-coverage@1.5.3 + && git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files} + | node_modules/.bin/diff-test-coverage + --coverage "**/target/site/jacoco/jacoco.xml" + --type jacoco + --line-coverage 50 + --branch-coverage 50 + --function-coverage 0 + --log-template "coverage-lines-complete" + --log-template "coverage-files-complete" + --log-template "totals-complete" + --log-template "errors" + -- + || { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; } + fi + after_success: + # retry in case of network error + - travis_retry curl -o codecov.sh -s https://codecov.io/bash + - travis_retry bash codecov.sh -X gcov + + - <<: *test_processing_module + name: "(openjdk11) processing module test" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_processing_module + name: "(openjdk15) processing module test" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_processing_module_sqlcompat + <<: *test_processing_module + name: "(openjdk8) processing module test (SQL Compatibility)" + stage: Tests - phase 1 + before_script: &setup_sqlcompat + - export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false + + - <<: *test_processing_module_sqlcompat + name: "(openjdk11) processing module test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_processing_module_sqlcompat + name: "(openjdk15) processing module test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_indexing_module + <<: *test_processing_module + name: "(openjdk8) indexing modules test" + env: + - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service' + + - <<: *test_indexing_module + name: "(openjdk11) indexing modules test" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_indexing_module + name: "(openjdk15) indexing modules test" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_indexing_module_sqlcompat + <<: *test_indexing_module + name: "(openjdk8) indexing modules test (SQL Compatibility)" + stage: Tests - phase 1 + before_script: *setup_sqlcompat + + - <<: *test_indexing_module_sqlcompat + name: "(openjdk11) indexing modules test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_indexing_module_sqlcompat + name: "(openjdk15) indexing modules test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_server_module + <<: *test_processing_module + name: "(openjdk8) server module test" + env: + - MAVEN_PROJECTS='server' + + - <<: *test_server_module + name: "(openjdk11) server module test" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_server_module + name: "(openjdk15) server module test" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_server_module_sqlcompat + <<: *test_server_module + name: "(openjdk8) server module test (SQL Compatibility)" + before_script: *setup_sqlcompat + + - <<: *test_server_module_sqlcompat + name: "(openjdk11) server module test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_server_module_sqlcompat + name: "(openjdk15) server module test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_other_modules + <<: *test_processing_module + name: "(openjdk8) other modules test" + env: + - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests' + + - <<: *test_other_modules + name: "(openjdk11) other modules test" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_other_modules + name: "(openjdk15) other modules test" + stage: Tests - phase 2 + jdk: openjdk15 + + - &test_other_modules_sqlcompat + <<: *test_other_modules + name: "(openjdk8) other modules test (SQL Compatibility)" + before_script: *setup_sqlcompat + + - <<: *test_other_modules_sqlcompat + name: "(openjdk11) other modules test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk11 + + - <<: *test_other_modules_sqlcompat + name: "(openjdk15) other modules test (SQL Compatibility)" + stage: Tests - phase 2 + jdk: openjdk15 + + - name: "web console" + install: skip + stage: Tests - phase 1 + script: + - ./check_test_suite.py && travis_terminate 0 || ${MVN} test -pl 'web-console' + after_success: + - (cd web-console && travis_retry npm run codecov) # retry in case of network error + - name: "Build and test on ARM64 CPU architecture (1)" stage: Tests - phase 2 arch: arm64-graviton2 @@ -403,384 +403,384 @@ jobs: env: - MAVEN_PROJECTS='core,sql,server,services' script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Ddruid.console.skip=true -DargLine=-Xmx3000m -T1C -# -# - name: "web console end-to-end test" -# before_install: *setup_generate_license -# install: web-console/script/druid build -# before_script: -# - ./check_test_suite.py && travis_terminate 0 || echo 'Starting nvm install...' -# - nvm install 14.19.0 -# - web-console/script/druid start -# script: (cd web-console && npm run test-e2e) -# after_script: web-console/script/druid stop -# -# - name: "docs" -# install: ./check_test_suite.py && travis_terminate 0 || (cd website && npm install) -# script: |- -# (cd website && npm run lint && npm run spellcheck) || { echo " -# -# If there are spell check errors: -# -# 1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions -# are at the top of the file and explain how to suppress false positives either globally or -# within a particular file. -# -# 2) Running Spell Check Locally: cd website && npm install && npm run spellcheck -# -# For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck -# -# " && false; } -# -# # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) -# # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) -# # Integration tests will either use MiddleManagers or Indexers -# # (Currently integration tests only support running with jvm runtime 8 and 11) -# # START - Integration tests for Compile with Java 8 and Run with Java 8 -# - &integration_batch_index -# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: &integration_test_services -# - docker -# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: &run_integration_test -# - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP} -Doverride.config.path=${OVERRIDE_CONFIG_PATH} -# after_failure: &integration_test_diags -# - for v in ~/shared/logs/*.log ; do -# echo $v logtail ======================== ; tail -100 $v ; -# done -# - for v in broker middlemanager overlord router coordinator historical ; do -# echo $v dmesg ======================== ; -# docker exec -it druid-$v sh -c 'dmesg | tail -3' ; -# done -# -# - <<: *integration_batch_index -# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_input_format -# name: "(Compile=openjdk8, Run=openjdk8) input format integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_input_format -# name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_input_source -# name: "(Compile=openjdk8, Run=openjdk8) input source integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_input_source -# name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_perfect_rollup_parallel_batch_index -# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" -# jdk: openjdk8 -# stage: Tests - phase 2 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_perfect_rollup_parallel_batch_index -# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - <<: *integration_perfect_rollup_parallel_batch_index -# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store" -# env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' -# -# - <<: *integration_perfect_rollup_parallel_batch_index -# name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store with indexer" -# env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' -# -# - &integration_kafka_index -# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_kafka_index -# name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - <<: *integration_kafka_index -# name: "(Compile=openjdk8, Run=openjdk8) custom coordinator duties integration test" -# env: TESTNG_GROUPS='-Dgroups=custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/custom-coordinator-duties' -# -# - &integration_kafka_index_slow -# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_kafka_index_slow -# name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer" -# env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_kafka_transactional_index -# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_kafka_transactional_index_slow -# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_kafka_transactional_index_slow -# name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer" -# env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_kafka_format_tests -# name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_kafka_format_tests -# name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer" -# env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_query -# name: "(Compile=openjdk8, Run=openjdk8) query integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_query_retry -# name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_query_error -# name: "(Compile=openjdk8, Run=openjdk8) query error integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_security -# name: "(Compile=openjdk8, Run=openjdk8) security integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_ldap_security -# name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_realtime_index -# name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - &integration_append_ingestion -# name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_append_ingestion -# name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_compaction_tests -# name: "(Compile=openjdk8, Run=openjdk8) compaction integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_compaction_tests -# name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer" -# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - &integration_tests -# name: "(Compile=openjdk8, Run=openjdk8) other integration tests" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: *integration_test_services -# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' -# script: *run_integration_test -# after_failure: *integration_test_diags -# -# - <<: *integration_tests -# name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer" -# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' -# -# - <<: *integration_tests -# name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# - <<: *integration_query -# name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# # END - Integration tests for Compile with Java 8 and Run with Java 8 -# -# # START - Integration tests for Compile with Java 8 and Run with Java 11 -# - <<: *integration_batch_index -# name: "(Compile=openjdk8, Run=openjdk11) batch index integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_input_format -# name: "(Compile=openjdk8, Run=openjdk11) input format integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_input_source -# name: "(Compile=openjdk8, Run=openjdk11) input source integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_perfect_rollup_parallel_batch_index -# name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_query -# name: "(Compile=openjdk8, Run=openjdk11) query integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# - <<: *integration_query_retry -# name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# - <<: *integration_query_error -# name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# - <<: *integration_security -# name: "(Compile=openjdk8, Run=openjdk11) security integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# - <<: *integration_ldap_security -# name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_realtime_index -# name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_append_ingestion -# name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_compaction_tests -# name: "(Compile=openjdk8, Run=openjdk11) compaction integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_tests -# name: "(Compile=openjdk8, Run=openjdk11) other integration test" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' -# -# - <<: *integration_tests -# name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# - <<: *integration_query -# name: "(Compile=openjdk8, Run=openjdk11) query integration test (mariaDB)" -# jdk: openjdk8 -# env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' -# -# # END - Integration tests for Compile with Java 8 and Run with Java 11 -# -# - &integration_batch_index_k8s -# name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test" -# stage: Tests - phase 2 -# jdk: openjdk8 -# services: &integration_test_services_k8s -# - docker -# env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true -# script: &run_integration_test_k8s -# - ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER} -# after_failure: &integration_test_diags_k8s -# - for v in broker middlemanager router coordinator historical ; do -# echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------"; -# sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0; -# done -# - name: "security vulnerabilities" -# stage: cron -# install: skip -# script: |- -# ${MVN} dependency-check:purge dependency-check:check || { echo " -# -# The OWASP dependency check has found security vulnerabilities. Please use a newer version -# of the dependency that does not have vulnerabilities. To see a report run -# `mvn dependency-check:check` -# If the analysis has false positives, -# they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more -# information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html). -# -# " && false; } + + - name: "web console end-to-end test" + before_install: *setup_generate_license + install: web-console/script/druid build + before_script: + - ./check_test_suite.py && travis_terminate 0 || echo 'Starting nvm install...' + - nvm install 14.19.0 + - web-console/script/druid start + script: (cd web-console && npm run test-e2e) + after_script: web-console/script/druid stop + + - name: "docs" + install: ./check_test_suite.py && travis_terminate 0 || (cd website && npm install) + script: |- + (cd website && npm run lint && npm run spellcheck) || { echo " + + If there are spell check errors: + + 1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions + are at the top of the file and explain how to suppress false positives either globally or + within a particular file. + + 2) Running Spell Check Locally: cd website && npm install && npm run spellcheck + + For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck + + " && false; } + + # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) + # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) + # Integration tests will either use MiddleManagers or Indexers + # (Currently integration tests only support running with jvm runtime 8 and 11) + # START - Integration tests for Compile with Java 8 and Run with Java 8 + - &integration_batch_index + name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: &integration_test_services + - docker + env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: &run_integration_test + - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP} -Doverride.config.path=${OVERRIDE_CONFIG_PATH} + after_failure: &integration_test_diags + - for v in ~/shared/logs/*.log ; do + echo $v logtail ======================== ; tail -100 $v ; + done + - for v in broker middlemanager overlord router coordinator historical ; do + echo $v dmesg ======================== ; + docker exec -it druid-$v sh -c 'dmesg | tail -3' ; + done + + - <<: *integration_batch_index + name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_input_format + name: "(Compile=openjdk8, Run=openjdk8) input format integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_input_format + name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_input_source + name: "(Compile=openjdk8, Run=openjdk8) input source integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_input_source + name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" + jdk: openjdk8 + stage: Tests - phase 2 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - <<: *integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store" + env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' + + - <<: *integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store with indexer" + env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store' + + - &integration_kafka_index + name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_kafka_index + name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - <<: *integration_kafka_index + name: "(Compile=openjdk8, Run=openjdk8) custom coordinator duties integration test" + env: TESTNG_GROUPS='-Dgroups=custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/custom-coordinator-duties' + + - &integration_kafka_index_slow + name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_kafka_index_slow + name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer" + env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_kafka_transactional_index + name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_kafka_transactional_index_slow + name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_kafka_transactional_index_slow + name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer" + env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_kafka_format_tests + name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_kafka_format_tests + name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer" + env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_query + name: "(Compile=openjdk8, Run=openjdk8) query integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_query_retry + name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_query_error + name: "(Compile=openjdk8, Run=openjdk8) query error integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_security + name: "(Compile=openjdk8, Run=openjdk8) security integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_ldap_security + name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_realtime_index + name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - &integration_append_ingestion + name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_append_ingestion + name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_compaction_tests + name: "(Compile=openjdk8, Run=openjdk8) compaction integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_compaction_tests + name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer" + env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - &integration_tests + name: "(Compile=openjdk8, Run=openjdk8) other integration tests" + stage: Tests - phase 2 + jdk: openjdk8 + services: *integration_test_services + env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' + script: *run_integration_test + after_failure: *integration_test_diags + + - <<: *integration_tests + name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer" + env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' + + - <<: *integration_tests + name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + - <<: *integration_query + name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + # END - Integration tests for Compile with Java 8 and Run with Java 8 + + # START - Integration tests for Compile with Java 8 and Run with Java 11 + - <<: *integration_batch_index + name: "(Compile=openjdk8, Run=openjdk11) batch index integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_input_format + name: "(Compile=openjdk8, Run=openjdk11) input format integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_input_source + name: "(Compile=openjdk8, Run=openjdk11) input source integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_query + name: "(Compile=openjdk8, Run=openjdk11) query integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + - <<: *integration_query_retry + name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + - <<: *integration_query_error + name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + - <<: *integration_security + name: "(Compile=openjdk8, Run=openjdk11) security integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + - <<: *integration_ldap_security + name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_realtime_index + name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_append_ingestion + name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_compaction_tests + name: "(Compile=openjdk8, Run=openjdk11) compaction integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_tests + name: "(Compile=openjdk8, Run=openjdk11) other integration test" + jdk: openjdk8 + env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' + + - <<: *integration_tests + name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + - <<: *integration_query + name: "(Compile=openjdk8, Run=openjdk11) query integration test (mariaDB)" + jdk: openjdk8 + env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data' + + # END - Integration tests for Compile with Java 8 and Run with Java 11 + + - &integration_batch_index_k8s + name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test" + stage: Tests - phase 2 + jdk: openjdk8 + services: &integration_test_services_k8s + - docker + env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true + script: &run_integration_test_k8s + - ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER} + after_failure: &integration_test_diags_k8s + - for v in broker middlemanager router coordinator historical ; do + echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------"; + sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0; + done + - name: "security vulnerabilities" + stage: cron + install: skip + script: |- + ${MVN} dependency-check:purge dependency-check:check || { echo " + + The OWASP dependency check has found security vulnerabilities. Please use a newer version + of the dependency that does not have vulnerabilities. To see a report run + `mvn dependency-check:check` + If the analysis has false positives, + they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more + information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html). + + " && false; } # Travis CI only supports per build (and not per-job notifications): https://github.com/travis-ci/travis-ci/issues/9888 notifications: