From 15e4cf8d5d85a01c00ced6a1ffc3ed09b5e5b966 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Tue, 22 May 2018 22:57:46 +0800 Subject: [PATCH 01/11] Run Java linter with Jenkins via SBT --- dev/run-tests.py | 5 ++--- dev/sbt-checkstyle | 42 ++++++++++++++++++++++++++++++++++++++++ project/SparkBuild.scala | 13 ++++++++++++- project/plugins.sbt | 8 ++++++++ 4 files changed, 64 insertions(+), 4 deletions(-) create mode 100755 dev/sbt-checkstyle diff --git a/dev/run-tests.py b/dev/run-tests.py index 164c1e2200aa9..5e8c8590b5c34 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -204,7 +204,7 @@ def run_scala_style_checks(): def run_java_style_checks(): set_title_and_block("Running Java style checks", "BLOCK_JAVA_STYLE") - run_cmd([os.path.join(SPARK_HOME, "dev", "lint-java")]) + run_cmd([os.path.join(SPARK_HOME, "dev", "sbt-checkstyle")]) def run_python_style_checks(): @@ -574,8 +574,7 @@ def main(): or f.endswith("checkstyle.xml") or f.endswith("checkstyle-suppressions.xml") for f in changed_files): - # run_java_style_checks() - pass + run_java_style_checks() if not changed_files or any(f.endswith("lint-python") or f.endswith("tox.ini") or f.endswith(".py") diff --git a/dev/sbt-checkstyle b/dev/sbt-checkstyle new file mode 100755 index 0000000000000..8821a7c0e4ccf --- /dev/null +++ b/dev/sbt-checkstyle @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file +# with failure (either resolution or compilation); the "q" makes SBT quit. +ERRORS=$(echo -e "q\n" \ + | build/sbt \ + -Pkinesis-asl \ + -Pmesos \ + -Pkafka-0-8 \ + -Pkubernetes \ + -Pyarn \ + -Pflume \ + -Phive \ + -Phive-thriftserver \ + checkstyle test:checkstyle \ + | awk '{if($1~/error/)print}' \ +) + +if test ! -z "$ERRORS"; then + echo -e "Checkstyle failed at following occurrences:\n$ERRORS" + exit 1 +else + echo -e "Checkstyle checks passed." +fi + diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 7469f11df0294..1da0b01d8784b 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -27,6 +27,7 @@ import sbt._ import sbt.Classpaths.publishTask import sbt.Keys._ import sbtunidoc.Plugin.UnidocKeys.unidocGenjavadocVersion +import com.etsy.sbt.checkstyle.CheckstylePlugin.autoImport._ import com.simplytyped.Antlr4Plugin._ import com.typesafe.sbt.pom.{PomBuild, SbtPomKeys} import com.typesafe.tools.mima.plugin.MimaKeys @@ -317,7 +318,7 @@ object SparkBuild extends PomBuild { /* Enable shared settings on all projects */ (allProjects ++ optionallyEnabledProjects ++ assemblyProjects ++ copyJarsProjects ++ Seq(spark, tools)) .foreach(enable(sharedSettings ++ DependencyOverrides.settings ++ - ExcludedDependencies.settings)) + ExcludedDependencies.settings ++ CheckStyle.settings)) /* Enable tests settings for all projects except examples, assembly and tools */ (allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings)) @@ -740,6 +741,16 @@ object Unidoc { ) } +object CheckStyle { + lazy val settings = Seq( + checkstyleSeverityLevel := Some(CheckstyleSeverityLevel.Error), + javaSource in Compile := baseDirectory.value / "src/main/java", + javaSource in Test := baseDirectory.value / "src/test/java", + checkstyleConfigLocation := CheckstyleConfigLocation.File("dev/checkstyle.xml"), + checkstyleOutputFile := baseDirectory.value / "target/checkstyle-output.xml" + ) +} + object CopyDependencies { val copyDeps = TaskKey[Unit]("copyDeps", "Copies needed dependencies to the build directory.") diff --git a/project/plugins.sbt b/project/plugins.sbt index 96bdb9067ae59..ffbd417b0f145 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,3 +1,11 @@ +addSbtPlugin("com.etsy" % "sbt-checkstyle-plugin" % "3.1.1") + +// sbt-checkstyle-plugin uses an old version of checkstyle. Match it to Maven's. +libraryDependencies += "com.puppycrawl.tools" % "checkstyle" % "8.2" + +// checkstyle uses guava 23.0. +libraryDependencies += "com.google.guava" % "guava" % "23.0" + // need to make changes to uptake sbt 1.0 support in "com.eed3si9n" % "sbt-assembly" % "1.14.5" addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2") From b00404763531e67d0eb616114f11d2def88de80d Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Tue, 22 May 2018 23:42:09 +0800 Subject: [PATCH 02/11] Weird Java file to check the Jenkins output --- core/src/test/java/test/org/apache/spark/JavaAPISuite.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java index 01b5fb7b46684..ce215150c4334 100644 --- a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java @@ -79,7 +79,7 @@ import org.apache.spark.util.StatCounter; // The test suite itself is Serializable so that anonymous Function implementations can be -// serialized, as an alternative to converting these anonymous classes to static inner classes; +// serialized, as an alternative to converting these anonymous classes to static inner classes; aaaaaaaa // see http://stackoverflow.com/questions/758570/. public class JavaAPISuite implements Serializable { private transient JavaSparkContext sc; From 81ed6da0da007c1194f73c8b7521d564110a2bca Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Tue, 22 May 2018 23:50:21 +0800 Subject: [PATCH 03/11] Narrower scope (and test the weird Java file too) --- project/SparkBuild.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 1da0b01d8784b..b3ac8437ef117 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -744,8 +744,8 @@ object Unidoc { object CheckStyle { lazy val settings = Seq( checkstyleSeverityLevel := Some(CheckstyleSeverityLevel.Error), - javaSource in Compile := baseDirectory.value / "src/main/java", - javaSource in Test := baseDirectory.value / "src/test/java", + javaSource in (Compile, checkstyle) := baseDirectory.value / "src/main/java", + javaSource in (Test, checkstyle) := baseDirectory.value / "src/test/java", checkstyleConfigLocation := CheckstyleConfigLocation.File("dev/checkstyle.xml"), checkstyleOutputFile := baseDirectory.value / "target/checkstyle-output.xml" ) From 558b1beaa73cc536588c9a99b291ea7dcd72a1ff Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Tue, 22 May 2018 23:51:34 +0800 Subject: [PATCH 04/11] Revert "Weird Java file to check the Jenkins output" This reverts commit b00404763531e67d0eb616114f11d2def88de80d. --- core/src/test/java/test/org/apache/spark/JavaAPISuite.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java index ce215150c4334..01b5fb7b46684 100644 --- a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java @@ -79,7 +79,7 @@ import org.apache.spark.util.StatCounter; // The test suite itself is Serializable so that anonymous Function implementations can be -// serialized, as an alternative to converting these anonymous classes to static inner classes; aaaaaaaa +// serialized, as an alternative to converting these anonymous classes to static inner classes; // see http://stackoverflow.com/questions/758570/. public class JavaAPISuite implements Serializable { private transient JavaSparkContext sc; From 54d070d2517c2dd1db33347131f049d0d08f73dc Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Wed, 23 May 2018 00:11:31 +0800 Subject: [PATCH 05/11] `CheckStyle` to `Checkstyle` --- project/SparkBuild.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index b3ac8437ef117..863cfddb9699c 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -318,7 +318,7 @@ object SparkBuild extends PomBuild { /* Enable shared settings on all projects */ (allProjects ++ optionallyEnabledProjects ++ assemblyProjects ++ copyJarsProjects ++ Seq(spark, tools)) .foreach(enable(sharedSettings ++ DependencyOverrides.settings ++ - ExcludedDependencies.settings ++ CheckStyle.settings)) + ExcludedDependencies.settings ++ Checkstyle.settings)) /* Enable tests settings for all projects except examples, assembly and tools */ (allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings)) @@ -741,7 +741,7 @@ object Unidoc { ) } -object CheckStyle { +object Checkstyle { lazy val settings = Seq( checkstyleSeverityLevel := Some(CheckstyleSeverityLevel.Error), javaSource in (Compile, checkstyle) := baseDirectory.value / "src/main/java", From 0cf6de06e239187cd6eb0ade2cb4647cd60a9271 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Wed, 23 May 2018 00:43:17 +0800 Subject: [PATCH 06/11] Weird java file in test codes --- core/src/test/java/test/org/apache/spark/JavaAPISuite.java | 2 +- project/SparkBuild.scala | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java index 01b5fb7b46684..2f1623d1f7297 100644 --- a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java @@ -79,7 +79,7 @@ import org.apache.spark.util.StatCounter; // The test suite itself is Serializable so that anonymous Function implementations can be -// serialized, as an alternative to converting these anonymous classes to static inner classes; +// serialized, as an alternative to converting these anonymous classes to static inner classes; aaaaaaaaaa // see http://stackoverflow.com/questions/758570/. public class JavaAPISuite implements Serializable { private transient JavaSparkContext sc; diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 863cfddb9699c..4cb6495a33b61 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -747,7 +747,8 @@ object Checkstyle { javaSource in (Compile, checkstyle) := baseDirectory.value / "src/main/java", javaSource in (Test, checkstyle) := baseDirectory.value / "src/test/java", checkstyleConfigLocation := CheckstyleConfigLocation.File("dev/checkstyle.xml"), - checkstyleOutputFile := baseDirectory.value / "target/checkstyle-output.xml" + checkstyleOutputFile := baseDirectory.value / "target/checkstyle-output.xml", + checkstyleOutputFile in Test := baseDirectory.value / "target/checkstyle-output.xml" ) } From 1df72859f8f08074be942e5f8e1e2e45442d63c1 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Wed, 23 May 2018 00:46:42 +0800 Subject: [PATCH 07/11] Revert "[SPARK-24323][SQL] Fix lint-java errors" This reverts commit e480eccd9754b4900c3e2c2036d69130a262cffe. --- .../spark/sql/sources/v2/reader/InputPartition.java | 4 ++-- .../spark/sql/sources/v2/reader/InputPartitionReader.java | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java index f2038d0de3ffe..f53687e113ae0 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java @@ -36,8 +36,8 @@ public interface InputPartition extends Serializable { /** - * The preferred locations where the input partition reader returned by this partition can run - * faster, but Spark does not guarantee to run the input partition reader on these locations. + * The preferred locations where the input partition reader returned by this partition can run faster, + * but Spark does not guarantee to run the input partition reader on these locations. * The implementations should make sure that it can be run on any location. * The location is a string representing the host name. * diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java index 33fa7be4c1b20..f0d808536207a 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java @@ -23,12 +23,12 @@ import org.apache.spark.annotation.InterfaceStability; /** - * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is - * responsible for outputting data for a RDD partition. + * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is responsible for + * outputting data for a RDD partition. * * Note that, Currently the type `T` can only be {@link org.apache.spark.sql.Row} for normal input - * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input - * partition readers that mix in {@link SupportsScanUnsafeRow}. + * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input partition + * readers that mix in {@link SupportsScanUnsafeRow}. */ @InterfaceStability.Evolving public interface InputPartitionReader extends Closeable { From 6e4f6c370fb7d005ad9023d180a53deff06fae96 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Wed, 23 May 2018 00:47:00 +0800 Subject: [PATCH 08/11] Revert test codes change --- core/src/test/java/test/org/apache/spark/JavaAPISuite.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java index 2f1623d1f7297..01b5fb7b46684 100644 --- a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java @@ -79,7 +79,7 @@ import org.apache.spark.util.StatCounter; // The test suite itself is Serializable so that anonymous Function implementations can be -// serialized, as an alternative to converting these anonymous classes to static inner classes; aaaaaaaaaa +// serialized, as an alternative to converting these anonymous classes to static inner classes; // see http://stackoverflow.com/questions/758570/. public class JavaAPISuite implements Serializable { private transient JavaSparkContext sc; From 7bb0eb3be6619ea9d0c7a023da5b665fecbc799e Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Wed, 23 May 2018 00:49:07 +0800 Subject: [PATCH 09/11] Revert "Revert "[SPARK-24323][SQL] Fix lint-java errors"" This reverts commit 1df72859f8f08074be942e5f8e1e2e45442d63c1. --- .../spark/sql/sources/v2/reader/InputPartition.java | 4 ++-- .../spark/sql/sources/v2/reader/InputPartitionReader.java | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java index f53687e113ae0..f2038d0de3ffe 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java @@ -36,8 +36,8 @@ public interface InputPartition extends Serializable { /** - * The preferred locations where the input partition reader returned by this partition can run faster, - * but Spark does not guarantee to run the input partition reader on these locations. + * The preferred locations where the input partition reader returned by this partition can run + * faster, but Spark does not guarantee to run the input partition reader on these locations. * The implementations should make sure that it can be run on any location. * The location is a string representing the host name. * diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java index f0d808536207a..33fa7be4c1b20 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java @@ -23,12 +23,12 @@ import org.apache.spark.annotation.InterfaceStability; /** - * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is responsible for - * outputting data for a RDD partition. + * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is + * responsible for outputting data for a RDD partition. * * Note that, Currently the type `T` can only be {@link org.apache.spark.sql.Row} for normal input - * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input partition - * readers that mix in {@link SupportsScanUnsafeRow}. + * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input + * partition readers that mix in {@link SupportsScanUnsafeRow}. */ @InterfaceStability.Evolving public interface InputPartitionReader extends Closeable { From 294e18925a6d4d0d216a6173fb3d7930da6985fe Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Thu, 24 May 2018 13:29:19 +0800 Subject: [PATCH 10/11] Revert "[SPARK-24323][SQL] Fix lint-java errors" This reverts commit e480eccd9754b4900c3e2c2036d69130a262cffe. --- .../spark/sql/sources/v2/reader/InputPartition.java | 4 ++-- .../spark/sql/sources/v2/reader/InputPartitionReader.java | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java index f2038d0de3ffe..f53687e113ae0 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java @@ -36,8 +36,8 @@ public interface InputPartition extends Serializable { /** - * The preferred locations where the input partition reader returned by this partition can run - * faster, but Spark does not guarantee to run the input partition reader on these locations. + * The preferred locations where the input partition reader returned by this partition can run faster, + * but Spark does not guarantee to run the input partition reader on these locations. * The implementations should make sure that it can be run on any location. * The location is a string representing the host name. * diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java index 33fa7be4c1b20..f0d808536207a 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java @@ -23,12 +23,12 @@ import org.apache.spark.annotation.InterfaceStability; /** - * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is - * responsible for outputting data for a RDD partition. + * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is responsible for + * outputting data for a RDD partition. * * Note that, Currently the type `T` can only be {@link org.apache.spark.sql.Row} for normal input - * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input - * partition readers that mix in {@link SupportsScanUnsafeRow}. + * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input partition + * readers that mix in {@link SupportsScanUnsafeRow}. */ @InterfaceStability.Evolving public interface InputPartitionReader extends Closeable { From 6943ff81e5b63314ffc78591dec289a73fc2dcd5 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Thu, 24 May 2018 13:33:23 +0800 Subject: [PATCH 11/11] Revert "Revert "[SPARK-24323][SQL] Fix lint-java errors"" This reverts commit 294e18925a6d4d0d216a6173fb3d7930da6985fe. --- .../spark/sql/sources/v2/reader/InputPartition.java | 4 ++-- .../spark/sql/sources/v2/reader/InputPartitionReader.java | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java index f53687e113ae0..f2038d0de3ffe 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartition.java @@ -36,8 +36,8 @@ public interface InputPartition extends Serializable { /** - * The preferred locations where the input partition reader returned by this partition can run faster, - * but Spark does not guarantee to run the input partition reader on these locations. + * The preferred locations where the input partition reader returned by this partition can run + * faster, but Spark does not guarantee to run the input partition reader on these locations. * The implementations should make sure that it can be run on any location. * The location is a string representing the host name. * diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java index f0d808536207a..33fa7be4c1b20 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/InputPartitionReader.java @@ -23,12 +23,12 @@ import org.apache.spark.annotation.InterfaceStability; /** - * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is responsible for - * outputting data for a RDD partition. + * An input partition reader returned by {@link InputPartition#createPartitionReader()} and is + * responsible for outputting data for a RDD partition. * * Note that, Currently the type `T` can only be {@link org.apache.spark.sql.Row} for normal input - * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input partition - * readers that mix in {@link SupportsScanUnsafeRow}. + * partition readers, or {@link org.apache.spark.sql.catalyst.expressions.UnsafeRow} for input + * partition readers that mix in {@link SupportsScanUnsafeRow}. */ @InterfaceStability.Evolving public interface InputPartitionReader extends Closeable {