From b4ca576dbb3a80441544ed59eb4cf42e4d7a965d Mon Sep 17 00:00:00 2001 From: Benjamin Gonzalez Date: Mon, 3 Jan 2022 18:54:36 -0600 Subject: [PATCH 1/5] [BEAM-12572] Add javaExamplesFlink gradle task --- runners/flink/flink_runner.gradle | 34 +++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle index 608a24e11dd9..3ff352603d9e 100644 --- a/runners/flink/flink_runner.gradle +++ b/runners/flink/flink_runner.gradle @@ -123,6 +123,7 @@ test { configurations { validatesRunner miniCluster + examplesJavaIntegrationTest } dependencies { @@ -177,8 +178,14 @@ dependencies { implementation library.java.jackson_databind implementation "org.apache.flink:flink-annotations:$flink_version" implementation "org.apache.flink:flink-optimizer_2.11:$flink_version" + examplesJavaIntegrationTest project(project.path) + examplesJavaIntegrationTest project(":examples:java") + examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntimeMigration") } +def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' +def tempLocation = project.findProperty('tempLocation') ?: 'gs://temp-storage-for-end-to-end-tests' + class ValidatesRunnerConfig { String name boolean streaming @@ -274,6 +281,33 @@ tasks.register('validatesRunner') { // Generates :runners:flink:1.13:runQuickstartJavaFlinkLocal createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'FlinkLocal') +task examplesJavaFlinkIntegrationTest(type: Test) { + group = "Verification" + // Disable gradle cache + outputs.upToDateWhen { false } +// def runnerType = config.streaming ? "streaming" : "batch" + def pipelineOptionsArray = ["--runner=TestFlinkRunner", +// "--streaming=${config.streaming}", + "--parallelism=2", + "--tempLocation=${tempLocation}", + "--tempRoot=${tempLocation}", + "--project=${gcpProject}", + ] + + def pipelineOptions = JsonOutput.toJson(pipelineOptionsArray) + systemProperty "beamTestPipelineOptions", pipelineOptions + + // The examples/java preCommit task already covers running WordCountIT/WindowedWordCountIT so + // this postCommit integration test excludes them. + include '**/*IT.class' + exclude '**/BigQueryTornadoesIT.class' + exclude '**/WindowedWordCountIT.class' + maxParallelForks 4 + classpath = configurations.examplesJavaIntegrationTest + testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) + useJUnit { } +} + /** * Updates the documentation with the current pipeline options. */ From af74d8a1c78b80469bb15e66c1e0bdfe350bba7f Mon Sep 17 00:00:00 2001 From: Benjamin Gonzalez Date: Wed, 5 Jan 2022 11:56:17 -0600 Subject: [PATCH 2/5] [BEAM-12572] Add Java jobs to run examples on Spark, Flink, Direct Runners --- ...job_PostCommit_Java_Examples_Direct.groovy | 46 +++++++++++++++++++ .../job_PostCommit_Java_Examples_Flink.groovy | 46 +++++++++++++++++++ .../job_PostCommit_Java_Examples_Spark.groovy | 46 +++++++++++++++++++ runners/direct-java/build.gradle | 30 ++++++++++++ runners/flink/flink_runner.gradle | 10 ++-- runners/spark/spark_runner.gradle | 34 ++++++++++++++ 6 files changed, 206 insertions(+), 6 deletions(-) create mode 100644 .test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy create mode 100644 .test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy create mode 100644 .test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy new file mode 100644 index 000000000000..c9c0cbdf3494 --- /dev/null +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties + +// This job runs the Java examples tests with DirectRunner. +job('beam_PostCommit_Java_Examples_Direct') { + description('Run Java Examples on Direct Runner') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Direct Runner Examples', + 'Run Java Examples_Direct') + + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } + + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:direct:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) + } + } +} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy new file mode 100644 index 000000000000..1b648ce02bc7 --- /dev/null +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties + +// This job runs the Java examples tests with FlinkRunner. +job('beam_PostCommit_Java_Examples_Flink') { + description('Run Java Examples on Flink Runner') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Flink Runner Examples', + 'Run Java Examples_Flink') + + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } + + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:flink:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) + } + } +} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy new file mode 100644 index 000000000000..2ec34762596e --- /dev/null +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties + +// This job runs the Java examples tests with SparkRunner. +job('beam_PostCommit_Java_Examples_Spark') { + description('Run Java Examples on Spark Runner') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Spark Runner Examples', + 'Run Java Examples_Spark') + + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } + + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:spark:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) + } + } +} diff --git a/runners/direct-java/build.gradle b/runners/direct-java/build.gradle index ad8b36083a5d..ba2ad93f7c0b 100644 --- a/runners/direct-java/build.gradle +++ b/runners/direct-java/build.gradle @@ -61,6 +61,7 @@ evaluationDependsOn(":sdks:java:core") configurations { needsRunner validatesRunner + examplesJavaIntegrationTest } dependencies { @@ -94,6 +95,9 @@ dependencies { permitUnusedDeclared library.java.vendored_grpc_1_36_0 permitUnusedDeclared project(":runners:java-fn-execution") permitUnusedDeclared project(":sdks:java:fn-execution") + examplesJavaIntegrationTest project(project.path) + examplesJavaIntegrationTest project(":examples:java") + examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntimeMigration") } // windows handles quotes differently from linux, @@ -173,6 +177,7 @@ def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' def gcsBucket = project.findProperty('gcsBucket') ?: 'temp-storage-for-release-validation-tests/nightly-snapshot-validation' def bqDataset = project.findProperty('bqDataset') ?: 'beam_postrelease_mobile_gaming' def pubsubTopic = project.findProperty('pubsubTopic') ?: 'java_mobile_gaming_topic' +def tempLocation = project.findProperty('tempLocation') ?: 'gs://temp-storage-for-end-to-end-tests' // Generates :runners:direct-java:runQuickstartJavaDirect createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'Direct') @@ -184,3 +189,28 @@ createJavaExamplesArchetypeValidationTask(type: 'MobileGaming', gcsBucket: gcsBucket, bqDataset: bqDataset, pubsubTopic: pubsubTopic) + +task examplesIntegrationTest(type: Test) { + description = "Runs examples tests on DirectRunner" + + testLogging.showStandardStreams = true + + String[] pipelineOptions = [ + "--runner=DirectRunner", + "--runnerDeterminedSharding=false", + "--tempLocation=${tempLocation}", + "--tempRoot=${tempLocation}", + "--project=${gcpProject}", + ] + systemProperty "beamTestPipelineOptions", pipelineOptionsStringCrossPlatformHandling(pipelineOptions) + + // TODO (BEAM-12572) Fix integration Tests to run with DirectRunner + include '**/*IT.class' + exclude '**/WindowedWordCountIT.class' // Timeouts + exclude '**/TopWikipediaSessionsIT.class' // OOM Exception + exclude '**/TfIdfIT.class' // Timeouts + maxParallelForks 4 + classpath = configurations.examplesJavaIntegrationTest + testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) + useJUnit { } +} \ No newline at end of file diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle index 3ff352603d9e..0039f23ab636 100644 --- a/runners/flink/flink_runner.gradle +++ b/runners/flink/flink_runner.gradle @@ -281,13 +281,12 @@ tasks.register('validatesRunner') { // Generates :runners:flink:1.13:runQuickstartJavaFlinkLocal createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'FlinkLocal') -task examplesJavaFlinkIntegrationTest(type: Test) { +task examplesIntegrationTest(type: Test) { group = "Verification" // Disable gradle cache outputs.upToDateWhen { false } // def runnerType = config.streaming ? "streaming" : "batch" def pipelineOptionsArray = ["--runner=TestFlinkRunner", -// "--streaming=${config.streaming}", "--parallelism=2", "--tempLocation=${tempLocation}", "--tempRoot=${tempLocation}", @@ -297,11 +296,10 @@ task examplesJavaFlinkIntegrationTest(type: Test) { def pipelineOptions = JsonOutput.toJson(pipelineOptionsArray) systemProperty "beamTestPipelineOptions", pipelineOptions - // The examples/java preCommit task already covers running WordCountIT/WindowedWordCountIT so - // this postCommit integration test excludes them. include '**/*IT.class' - exclude '**/BigQueryTornadoesIT.class' - exclude '**/WindowedWordCountIT.class' + // TODO (BEAM-12572) Fix integration Tests to run with FlinkRunner + exclude '**/BigQueryTornadoesIT.class' // Fails with AssertionError + exclude '**/WindowedWordCountIT.class' // Failed with Grpc connection error maxParallelForks 4 classpath = configurations.examplesJavaIntegrationTest testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle index 5a84b93f55f1..a32295bd1fc1 100644 --- a/runners/spark/spark_runner.gradle +++ b/runners/spark/spark_runner.gradle @@ -44,6 +44,7 @@ evaluationDependsOn(":runners:core-java") configurations { validatesRunner + examplesJavaIntegrationTest } def hadoopVersions = [ @@ -215,8 +216,15 @@ dependencies { hadoopVersions.each {kv -> "hadoopVersion$kv.key" "org.apache.hadoop:hadoop-common:$kv.value" } + examplesJavaIntegrationTest project(project.path) + examplesJavaIntegrationTest project(":examples:java") + examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntimeMigration") + examplesJavaIntegrationTest project(":runners:spark:2") } +def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' +def tempLocation = project.findProperty('tempLocation') ?: 'gs://temp-storage-for-end-to-end-tests' + configurations.testRuntimeClasspath { // Testing the Spark runner causes a StackOverflowError if slf4j-jdk14 is on the classpath exclude group: "org.slf4j", module: "slf4j-jdk14" @@ -398,6 +406,32 @@ task hadoopVersionsTest(group: "Verification") { dependsOn taskNames } +task examplesSparkIntegrationTest(type: Test) { + group = "Verification" + // Disable gradle cache + outputs.upToDateWhen { false } + def pipelineOptions = JsonOutput.toJson([ + "--runner=TestSparkRunner", + "--enableSparkMetricSinks=true", + "--tempLocation=${tempLocation}", + "--tempRoot=${tempLocation}", + "--project=${gcpProject}", + ]) + systemProperty "beamTestPipelineOptions", pipelineOptions + systemProperty "beam.spark.test.reuseSparkContext", "true" + systemProperty "spark.ui.enabled", "false" + systemProperty "spark.ui.showConsoleProgress", "false" + + include '**/*IT.class' +// exclude '**/BigQueryTornadoesIT.class' // Fails with AssertionError +// exclude '**/WindowedWordCountIT.class' // Failed with Grpc connection error + maxParallelForks 4 + classpath = configurations.examplesJavaIntegrationTest + testClassesDirs += files(project(":examples:java").sourceSets.test.output.classesDirs) + useJUnit { } + jvmArgs '-Xmx3g' +} + hadoopVersions.each {kv -> task "hadoopVersion${kv.key}Test"(type: Test, group: "Verification") { description = "Runs Spark tests with Hadoop version $kv.value" From fe215440525a2ca625294899dad5d551c73551b4 Mon Sep 17 00:00:00 2001 From: Benjamin Gonzalez Date: Thu, 6 Jan 2022 18:00:49 -0600 Subject: [PATCH 3/5] [BEAM-12572] Add jenkins jobs to execute Java examples in Spark, Flink, Direct Runners --- .test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy | 2 +- .test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy | 3 ++- runners/direct-java/build.gradle | 1 + runners/flink/flink_runner.gradle | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy index 1b648ce02bc7..f2099bb863ce 100644 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy @@ -39,7 +39,7 @@ job('beam_PostCommit_Java_Examples_Flink') { steps { gradle { rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:flink:examplesIntegrationTest') + tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:examplesIntegrationTest") commonJobProperties.setGradleSwitches(delegate) } } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy index 2ec34762596e..57a6b2a36266 100644 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy @@ -39,7 +39,8 @@ job('beam_PostCommit_Java_Examples_Spark') { steps { gradle { rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:examplesIntegrationTest') + tasks(':runners:spark:2:examplesIntegrationTest') + tasks(':runners:spark:3:examplesIntegrationTest') commonJobProperties.setGradleSwitches(delegate) } } diff --git a/runners/direct-java/build.gradle b/runners/direct-java/build.gradle index ba2ad93f7c0b..efe129675783 100644 --- a/runners/direct-java/build.gradle +++ b/runners/direct-java/build.gradle @@ -57,6 +57,7 @@ description = "Apache Beam :: Runners :: Direct Java" evaluationDependsOn(":runners:core-construction-java") evaluationDependsOn(":runners:core-java") evaluationDependsOn(":sdks:java:core") +evaluationDependsOn(":examples:java") configurations { needsRunner diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle index 0039f23ab636..536715e22ce3 100644 --- a/runners/flink/flink_runner.gradle +++ b/runners/flink/flink_runner.gradle @@ -41,6 +41,7 @@ description = "Apache Beam :: Runners :: Flink $flink_version" */ evaluationDependsOn(":sdks:java:core") evaluationDependsOn(":runners:core-java") +evaluationDependsOn(":examples:java") /* * Copy & merge source overrides into build directory. @@ -285,7 +286,6 @@ task examplesIntegrationTest(type: Test) { group = "Verification" // Disable gradle cache outputs.upToDateWhen { false } -// def runnerType = config.streaming ? "streaming" : "batch" def pipelineOptionsArray = ["--runner=TestFlinkRunner", "--parallelism=2", "--tempLocation=${tempLocation}", From 74b44788648c1b8ffb2f112b524215704e6d1a8e Mon Sep 17 00:00:00 2001 From: Benjamin Gonzalez Date: Thu, 6 Jan 2022 18:44:18 -0600 Subject: [PATCH 4/5] [BEAM-12572] Spotless apply --- ...job_PostCommit_Java_Examples_Direct.groovy | 36 +++++++++--------- .../job_PostCommit_Java_Examples_Flink.groovy | 36 +++++++++--------- .../job_PostCommit_Java_Examples_Spark.groovy | 38 +++++++++---------- runners/spark/spark_runner.gradle | 11 ++---- 4 files changed, 59 insertions(+), 62 deletions(-) diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy index c9c0cbdf3494..54badc4b514b 100644 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy @@ -20,27 +20,27 @@ import CommonJobProperties as commonJobProperties // This job runs the Java examples tests with DirectRunner. job('beam_PostCommit_Java_Examples_Direct') { - description('Run Java Examples on Direct Runner') + description('Run Java Examples on Direct Runner') - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - // Allows triggering this build against pull requests. - commonJobProperties.enablePhraseTriggeringFromPullRequest( - delegate, - 'Java Direct Runner Examples', - 'Run Java Examples_Direct') + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Direct Runner Examples', + 'Run Java Examples_Direct') - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct:examplesIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - } + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:direct:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) } + } } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy index f2099bb863ce..0e69fb08fca8 100644 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy @@ -20,27 +20,27 @@ import CommonJobProperties as commonJobProperties // This job runs the Java examples tests with FlinkRunner. job('beam_PostCommit_Java_Examples_Flink') { - description('Run Java Examples on Flink Runner') + description('Run Java Examples on Flink Runner') - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - // Allows triggering this build against pull requests. - commonJobProperties.enablePhraseTriggeringFromPullRequest( - delegate, - 'Java Flink Runner Examples', - 'Run Java Examples_Flink') + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Flink Runner Examples', + 'Run Java Examples_Flink') - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:examplesIntegrationTest") - commonJobProperties.setGradleSwitches(delegate) - } + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:examplesIntegrationTest") + commonJobProperties.setGradleSwitches(delegate) } + } } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy index 57a6b2a36266..9e711c1fc7bd 100644 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy @@ -20,28 +20,28 @@ import CommonJobProperties as commonJobProperties // This job runs the Java examples tests with SparkRunner. job('beam_PostCommit_Java_Examples_Spark') { - description('Run Java Examples on Spark Runner') + description('Run Java Examples on Spark Runner') - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - // Allows triggering this build against pull requests. - commonJobProperties.enablePhraseTriggeringFromPullRequest( - delegate, - 'Java Spark Runner Examples', - 'Run Java Examples_Spark') + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Spark Runner Examples', + 'Run Java Examples_Spark') - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:2:examplesIntegrationTest') - tasks(':runners:spark:3:examplesIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - } + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:spark:2:examplesIntegrationTest') + tasks(':runners:spark:3:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) } + } } diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle index a32295bd1fc1..19fc8c40ee91 100644 --- a/runners/spark/spark_runner.gradle +++ b/runners/spark/spark_runner.gradle @@ -41,6 +41,7 @@ description = "Apache Beam :: Runners :: Spark $spark_version" evaluationDependsOn(":sdks:java:core") evaluationDependsOn(":sdks:java:io:hadoop-format") evaluationDependsOn(":runners:core-java") +evaluationDependsOn(":examples:java") configurations { validatesRunner @@ -216,10 +217,6 @@ dependencies { hadoopVersions.each {kv -> "hadoopVersion$kv.key" "org.apache.hadoop:hadoop-common:$kv.value" } - examplesJavaIntegrationTest project(project.path) - examplesJavaIntegrationTest project(":examples:java") - examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntimeMigration") - examplesJavaIntegrationTest project(":runners:spark:2") } def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' @@ -406,7 +403,7 @@ task hadoopVersionsTest(group: "Verification") { dependsOn taskNames } -task examplesSparkIntegrationTest(type: Test) { +task examplesIntegrationTest(type: Test) { group = "Verification" // Disable gradle cache outputs.upToDateWhen { false } @@ -426,8 +423,8 @@ task examplesSparkIntegrationTest(type: Test) { // exclude '**/BigQueryTornadoesIT.class' // Fails with AssertionError // exclude '**/WindowedWordCountIT.class' // Failed with Grpc connection error maxParallelForks 4 - classpath = configurations.examplesJavaIntegrationTest - testClassesDirs += files(project(":examples:java").sourceSets.test.output.classesDirs) + classpath = configurations.validatesRunner + testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) useJUnit { } jvmArgs '-Xmx3g' } From 678ffdb149b340957a4cdb5ef05038350ec35163 Mon Sep 17 00:00:00 2001 From: Benjamin Gonzalez Date: Mon, 10 Jan 2022 16:46:47 -0600 Subject: [PATCH 5/5] [BEAM-12572] Fix spark exampleValidation tests --- runners/spark/spark_runner.gradle | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle index fe46d3b74aef..10aca702308f 100644 --- a/runners/spark/spark_runner.gradle +++ b/runners/spark/spark_runner.gradle @@ -421,8 +421,10 @@ task examplesIntegrationTest(type: Test) { systemProperty "spark.ui.showConsoleProgress", "false" include '**/*IT.class' -// exclude '**/BigQueryTornadoesIT.class' // Fails with AssertionError -// exclude '**/WindowedWordCountIT.class' // Failed with Grpc connection error + exclude '**/WindowedWordCountIT.class' // Failed to read from sharded output + exclude '**/AutoCompleteIT.class' // Failed to serialize and deserialize property 'bigQuerySchema' + exclude '**/TrafficMaxLaneFlowIT.class' // Failed to serialize and deserialize property 'bigQuerySchema' + exclude '**/TrafficRoutesIT.class' // Property [windowDuration] is marked with contradictory annotations. maxParallelForks 4 classpath = configurations.validatesRunner testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)