diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy new file mode 100644 index 000000000000..54badc4b514b --- /dev/null +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties + +// This job runs the Java examples tests with DirectRunner. +job('beam_PostCommit_Java_Examples_Direct') { + description('Run Java Examples on Direct Runner') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Direct Runner Examples', + 'Run Java Examples_Direct') + + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } + + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:direct:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) + } + } +} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy new file mode 100644 index 000000000000..0e69fb08fca8 --- /dev/null +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties + +// This job runs the Java examples tests with FlinkRunner. +job('beam_PostCommit_Java_Examples_Flink') { + description('Run Java Examples on Flink Runner') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Flink Runner Examples', + 'Run Java Examples_Flink') + + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } + + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:examplesIntegrationTest") + commonJobProperties.setGradleSwitches(delegate) + } + } +} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy new file mode 100644 index 000000000000..9e711c1fc7bd --- /dev/null +++ b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CommonJobProperties as commonJobProperties + +// This job runs the Java examples tests with SparkRunner. +job('beam_PostCommit_Java_Examples_Spark') { + description('Run Java Examples on Spark Runner') + + // Set common parameters. + commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) + + // Allows triggering this build against pull requests. + commonJobProperties.enablePhraseTriggeringFromPullRequest( + delegate, + 'Java Spark Runner Examples', + 'Run Java Examples_Spark') + + publishers { + archiveJunit('**/build/test-results/**/*.xml') + } + + // Execute shell command to run examples. + steps { + gradle { + rootBuildScriptDir(commonJobProperties.checkoutDir) + tasks(':runners:spark:2:examplesIntegrationTest') + tasks(':runners:spark:3:examplesIntegrationTest') + commonJobProperties.setGradleSwitches(delegate) + } + } +} diff --git a/runners/direct-java/build.gradle b/runners/direct-java/build.gradle index da9351cb797f..44dc78a42589 100644 --- a/runners/direct-java/build.gradle +++ b/runners/direct-java/build.gradle @@ -57,10 +57,12 @@ description = "Apache Beam :: Runners :: Direct Java" evaluationDependsOn(":runners:core-construction-java") evaluationDependsOn(":runners:core-java") evaluationDependsOn(":sdks:java:core") +evaluationDependsOn(":examples:java") configurations { needsRunner validatesRunner + examplesJavaIntegrationTest } dependencies { @@ -94,6 +96,9 @@ dependencies { permitUnusedDeclared library.java.vendored_grpc_1_36_0 permitUnusedDeclared project(":runners:java-fn-execution") permitUnusedDeclared project(":sdks:java:fn-execution") + examplesJavaIntegrationTest project(project.path) + examplesJavaIntegrationTest project(":examples:java") + examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntimeMigration") } // windows handles quotes differently from linux, @@ -177,6 +182,7 @@ def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' def gcsBucket = project.findProperty('gcsBucket') ?: 'temp-storage-for-release-validation-tests/nightly-snapshot-validation' def bqDataset = project.findProperty('bqDataset') ?: 'beam_postrelease_mobile_gaming' def pubsubTopic = project.findProperty('pubsubTopic') ?: 'java_mobile_gaming_topic' +def tempLocation = project.findProperty('tempLocation') ?: 'gs://temp-storage-for-end-to-end-tests' // Generates :runners:direct-java:runQuickstartJavaDirect createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'Direct') @@ -188,3 +194,28 @@ createJavaExamplesArchetypeValidationTask(type: 'MobileGaming', gcsBucket: gcsBucket, bqDataset: bqDataset, pubsubTopic: pubsubTopic) + +task examplesIntegrationTest(type: Test) { + description = "Runs examples tests on DirectRunner" + + testLogging.showStandardStreams = true + + String[] pipelineOptions = [ + "--runner=DirectRunner", + "--runnerDeterminedSharding=false", + "--tempLocation=${tempLocation}", + "--tempRoot=${tempLocation}", + "--project=${gcpProject}", + ] + systemProperty "beamTestPipelineOptions", pipelineOptionsStringCrossPlatformHandling(pipelineOptions) + + // TODO (BEAM-12572) Fix integration Tests to run with DirectRunner + include '**/*IT.class' + exclude '**/WindowedWordCountIT.class' // Timeouts + exclude '**/TopWikipediaSessionsIT.class' // OOM Exception + exclude '**/TfIdfIT.class' // Timeouts + maxParallelForks 4 + classpath = configurations.examplesJavaIntegrationTest + testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) + useJUnit { } +} \ No newline at end of file diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle index 4a136fed86b4..cb653c0f1b65 100644 --- a/runners/flink/flink_runner.gradle +++ b/runners/flink/flink_runner.gradle @@ -41,6 +41,7 @@ description = "Apache Beam :: Runners :: Flink $flink_version" */ evaluationDependsOn(":sdks:java:core") evaluationDependsOn(":runners:core-java") +evaluationDependsOn(":examples:java") /* * Copy & merge source overrides into build directory. @@ -123,6 +124,7 @@ test { configurations { validatesRunner miniCluster + examplesJavaIntegrationTest } dependencies { @@ -177,8 +179,14 @@ dependencies { implementation library.java.jackson_databind implementation "org.apache.flink:flink-annotations:$flink_version" implementation "org.apache.flink:flink-optimizer_2.11:$flink_version" + examplesJavaIntegrationTest project(project.path) + examplesJavaIntegrationTest project(":examples:java") + examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntimeMigration") } +def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' +def tempLocation = project.findProperty('tempLocation') ?: 'gs://temp-storage-for-end-to-end-tests' + class ValidatesRunnerConfig { String name boolean streaming @@ -275,6 +283,30 @@ tasks.register('validatesRunner') { // Generates :runners:flink:1.13:runQuickstartJavaFlinkLocal createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'FlinkLocal') +task examplesIntegrationTest(type: Test) { + group = "Verification" + // Disable gradle cache + outputs.upToDateWhen { false } + def pipelineOptionsArray = ["--runner=TestFlinkRunner", + "--parallelism=2", + "--tempLocation=${tempLocation}", + "--tempRoot=${tempLocation}", + "--project=${gcpProject}", + ] + + def pipelineOptions = JsonOutput.toJson(pipelineOptionsArray) + systemProperty "beamTestPipelineOptions", pipelineOptions + + include '**/*IT.class' + // TODO (BEAM-12572) Fix integration Tests to run with FlinkRunner + exclude '**/BigQueryTornadoesIT.class' // Fails with AssertionError + exclude '**/WindowedWordCountIT.class' // Failed with Grpc connection error + maxParallelForks 4 + classpath = configurations.examplesJavaIntegrationTest + testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) + useJUnit { } +} + /** * Updates the documentation with the current pipeline options. */ diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle index 85c84a3dd7d4..10aca702308f 100644 --- a/runners/spark/spark_runner.gradle +++ b/runners/spark/spark_runner.gradle @@ -41,9 +41,11 @@ description = "Apache Beam :: Runners :: Spark $spark_version" evaluationDependsOn(":sdks:java:core") evaluationDependsOn(":sdks:java:io:hadoop-format") evaluationDependsOn(":runners:core-java") +evaluationDependsOn(":examples:java") configurations { validatesRunner + examplesJavaIntegrationTest } def hadoopVersions = [ @@ -211,6 +213,9 @@ dependencies { } } +def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' +def tempLocation = project.findProperty('tempLocation') ?: 'gs://temp-storage-for-end-to-end-tests' + configurations.testRuntimeClasspath { // Testing the Spark runner causes a StackOverflowError if slf4j-jdk14 is on the classpath exclude group: "org.slf4j", module: "slf4j-jdk14" @@ -399,6 +404,34 @@ task hadoopVersionsTest(group: "Verification") { dependsOn taskNames } +task examplesIntegrationTest(type: Test) { + group = "Verification" + // Disable gradle cache + outputs.upToDateWhen { false } + def pipelineOptions = JsonOutput.toJson([ + "--runner=TestSparkRunner", + "--enableSparkMetricSinks=true", + "--tempLocation=${tempLocation}", + "--tempRoot=${tempLocation}", + "--project=${gcpProject}", + ]) + systemProperty "beamTestPipelineOptions", pipelineOptions + systemProperty "beam.spark.test.reuseSparkContext", "true" + systemProperty "spark.ui.enabled", "false" + systemProperty "spark.ui.showConsoleProgress", "false" + + include '**/*IT.class' + exclude '**/WindowedWordCountIT.class' // Failed to read from sharded output + exclude '**/AutoCompleteIT.class' // Failed to serialize and deserialize property 'bigQuerySchema' + exclude '**/TrafficMaxLaneFlowIT.class' // Failed to serialize and deserialize property 'bigQuerySchema' + exclude '**/TrafficRoutesIT.class' // Property [windowDuration] is marked with contradictory annotations. + maxParallelForks 4 + classpath = configurations.validatesRunner + testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) + useJUnit { } + jvmArgs '-Xmx3g' +} + hadoopVersions.each {kv -> task "hadoopVersion${kv.key}Test"(type: Test, group: "Verification") { description = "Runs Spark tests with Hadoop version $kv.value"