From 913312103c153ba9ba1e8a1a5c514c315428c39b Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Wed, 11 Oct 2023 22:41:32 +0800 Subject: [PATCH] init --- .../spark/sql/avro/AvroRowReaderSuite.scala | 10 +++++----- .../sql/connect/client/CloseableIterator.scala | 2 +- ...ecutePlanResponseReattachableIterator.scala | 4 ++-- .../sql/connect/client/GrpcRetryHandler.scala | 2 +- .../KafkaDontFailOnDataLossSuite.scala | 2 +- .../streaming/kafka010/KafkaRDDSuite.scala | 4 ++-- .../org/apache/spark/TaskContextImpl.scala | 2 +- .../deploy/history/FsHistoryProvider.scala | 2 +- .../scala/org/apache/spark/rdd/PipedRDD.scala | 4 ++-- .../spark/scheduler/TaskSetManager.scala | 2 +- .../spark/storage/memory/MemoryStore.scala | 2 +- .../org/apache/spark/util/HadoopFSUtils.scala | 4 ++-- .../collection/ExternalAppendOnlyMap.scala | 2 +- .../spark/util/collection/ExternalSorter.scala | 2 +- .../scala/org/apache/spark/ShuffleSuite.scala | 2 +- pom.xml | 7 ------- project/SparkBuild.scala | 4 ---- .../catalyst/expressions/DynamicPruning.scala | 2 +- ...nctionTableSubqueryArgumentExpression.scala | 2 +- .../expressions/higherOrderFunctions.scala | 2 +- .../sql/catalyst/expressions/subquery.scala | 8 ++++---- .../ExternalAppendOnlyUnsafeRowArray.scala | 4 ++-- .../WholeStageCodegenEvaluatorFactory.scala | 2 +- .../aggregate/ObjectAggregationIterator.scala | 2 +- .../joins/BroadcastNestedLoopJoinExec.scala | 2 +- .../sql/execution/metric/SQLMetrics.scala | 2 +- .../state/HDFSBackedStateStoreMap.scala | 4 ++-- .../apache/spark/sql/execution/subquery.scala | 2 +- .../org/apache/spark/sql/SSBQuerySuite.scala | 2 +- .../spark/sql/SparkSessionExtensionSuite.scala | 4 ++-- .../command/v1/DropNamespaceSuite.scala | 2 +- .../sql/execution/metric/SQLMetricsSuite.scala | 18 +++++++++--------- .../execution/ui/AllExecutionsPageSuite.scala | 2 +- .../sql/hive/execution/Hive_2_1_DDLSuite.scala | 2 +- .../PruneHiveTablePartitionsSuite.scala | 2 +- .../spark/streaming/InputStreamsSuite.scala | 2 +- 36 files changed, 56 insertions(+), 67 deletions(-) diff --git a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroRowReaderSuite.scala b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroRowReaderSuite.scala index 70d0bc6c0ad10..1c450d4f16674 100644 --- a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroRowReaderSuite.scala +++ b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroRowReaderSuite.scala @@ -80,19 +80,19 @@ class AvroRowReaderSuite override def hasNext: Boolean = hasNextRow - override def next: InternalRow = nextRow + override def next(): InternalRow = nextRow } assert(it.hasNext == true) - assert(it.next.getInt(0) == 1) + assert(it.next().getInt(0) == 1) // test no intervening next assert(it.hasNext == true) assert(it.hasNext == true) // test no intervening hasNext - assert(it.next.getInt(0) == 2) - assert(it.next.getInt(0) == 3) + assert(it.next().getInt(0) == 2) + assert(it.next().getInt(0) == 3) assert(it.hasNext == false) assertThrows[NoSuchElementException] { - it.next + it.next() } } } diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CloseableIterator.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CloseableIterator.scala index 810158b2ac8b3..4ec6828d885ab 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CloseableIterator.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CloseableIterator.scala @@ -33,7 +33,7 @@ private[sql] abstract class WrappedCloseableIterator[E] extends CloseableIterato override def next(): E = innerIterator.next() - override def hasNext(): Boolean = innerIterator.hasNext + override def hasNext: Boolean = innerIterator.hasNext override def close(): Unit = innerIterator match { case it: CloseableIterator[E] => it.close() diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala index 4c0c1d1f39047..9d134f5935446 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala @@ -108,7 +108,7 @@ class ExecutePlanResponseReattachableIterator( override def next(): proto.ExecutePlanResponse = synchronized { // hasNext will trigger reattach in case the stream completed without resultComplete - if (!hasNext()) { + if (!hasNext) { throw new java.util.NoSuchElementException() } @@ -133,7 +133,7 @@ class ExecutePlanResponseReattachableIterator( } } - override def hasNext(): Boolean = synchronized { + override def hasNext: Boolean = synchronized { if (resultComplete) { // After response complete response return false diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcRetryHandler.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcRetryHandler.scala index 3c0b750fd46e7..74c8423ce4313 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcRetryHandler.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcRetryHandler.scala @@ -74,7 +74,7 @@ private[sql] class GrpcRetryHandler( } } - override def next: U = { + override def next(): U = { retryIter(_.next) } diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDontFailOnDataLossSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDontFailOnDataLossSuite.scala index 54ce1717acc71..2e0690f2288a8 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDontFailOnDataLossSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDontFailOnDataLossSuite.scala @@ -47,7 +47,7 @@ trait KafkaMissingOffsetsTest extends SharedSparkSession { protected var testUtils: KafkaTestUtils = _ - override def createSparkSession(): TestSparkSession = { + override def createSparkSession: TestSparkSession = { // Set maxRetries to 3 to handle NPE from `poll` when deleting a topic new TestSparkSession(new SparkContext("local[2,3]", "test-sql-context", sparkConf)) } diff --git a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala index 735ec2f7b4484..986943ac6a1b8 100644 --- a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala +++ b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala @@ -48,14 +48,14 @@ class KafkaRDDSuite extends SparkFunSuite { private var sc: SparkContext = _ - override def beforeAll: Unit = { + override def beforeAll(): Unit = { super.beforeAll() sc = new SparkContext(sparkConf) kafkaTestUtils = new KafkaTestUtils kafkaTestUtils.setup() } - override def afterAll: Unit = { + override def afterAll(): Unit = { try { try { if (sc != null) { diff --git a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala b/core/src/main/scala/org/apache/spark/TaskContextImpl.scala index 4cc5f165794e4..8d2c2ab9bc4ad 100644 --- a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala +++ b/core/src/main/scala/org/apache/spark/TaskContextImpl.scala @@ -292,5 +292,5 @@ private[spark] class TaskContextImpl( private[spark] override def fetchFailed: Option[FetchFailedException] = _fetchFailedException - private[spark] override def getLocalProperties(): Properties = localProperties + private[spark] override def getLocalProperties: Properties = localProperties } diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala index abb65c7311e3f..b5afa86180b3a 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala @@ -1288,7 +1288,7 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) val diskStore = KVUtils.open(lease.tmpPath, metadata, conf, live = false) hybridStore.setDiskStore(diskStore) hybridStore.switchToDiskStore(new HybridStore.SwitchToDiskStoreListener { - override def onSwitchToDiskStoreSuccess: Unit = { + override def onSwitchToDiskStoreSuccess(): Unit = { logInfo(s"Completely switched to diskStore for app $appId / ${attempt.info.attemptId}.") diskStore.close() val newStorePath = lease.commit(appId, attempt.info.attemptId) diff --git a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala index 0359f6c6d3868..4f3683ef2df86 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala @@ -185,13 +185,13 @@ private[spark] class PipedRDD[T: ClassTag]( val lines = Source.fromInputStream(proc.getInputStream)(encoding).getLines new Iterator[String] { def next(): String = { - if (!hasNext()) { + if (!hasNext) { throw SparkCoreErrors.noSuchElementError() } lines.next() } - def hasNext(): Boolean = { + def hasNext: Boolean = { val result = if (lines.hasNext) { true } else { diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala index 4d5f32ac67b56..de9c58e8d909a 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala @@ -1032,7 +1032,7 @@ private[spark] class TaskSetManager( override def removeSchedulable(schedulable: Schedulable): Unit = {} - override def getSortedTaskSetQueue(): ArrayBuffer[TaskSetManager] = { + override def getSortedTaskSetQueue: ArrayBuffer[TaskSetManager] = { val sortedTaskSetQueue = new ArrayBuffer[TaskSetManager]() sortedTaskSetQueue += this sortedTaskSetQueue diff --git a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala index dafa67b091d71..88bb3eb7105c8 100644 --- a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala @@ -746,7 +746,7 @@ private class SerializedValuesHolder[T]( // We successfully unrolled the entirety of this block serializationStream.close() - override def preciseSize(): Long = bbos.size + override def preciseSize: Long = bbos.size override def build(): MemoryEntry[T] = SerializedMemoryEntry[T](bbos.toChunkedByteBuffer, memoryMode, classTag) diff --git a/core/src/main/scala/org/apache/spark/util/HadoopFSUtils.scala b/core/src/main/scala/org/apache/spark/util/HadoopFSUtils.scala index a033b817f5a11..5cd93dfae3580 100644 --- a/core/src/main/scala/org/apache/spark/util/HadoopFSUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/HadoopFSUtils.scala @@ -91,7 +91,7 @@ private[spark] object HadoopFSUtils extends Logging { val remoteIter = path.getFileSystem(hadoopConf).listFiles(path, true) val statues = new Iterator[LocatedFileStatus]() { def next(): LocatedFileStatus = remoteIter.next - def hasNext(): Boolean = remoteIter.hasNext + def hasNext: Boolean = remoteIter.hasNext }.filterNot(status => shouldFilterOutPath(status.getPath.toString.substring(prefixLength))) .filter(f => filter.accept(f.getPath)) .toArray @@ -209,7 +209,7 @@ private[spark] object HadoopFSUtils extends Logging { val remoteIter = fs.listLocatedStatus(path) new Iterator[LocatedFileStatus]() { def next(): LocatedFileStatus = remoteIter.next - def hasNext(): Boolean = remoteIter.hasNext + def hasNext: Boolean = remoteIter.hasNext }.toArray case _ => fs.listStatus(path) } diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala index 93efaafa43b96..71f3b0b4b4d40 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala @@ -592,7 +592,7 @@ class ExternalAppendOnlyMap[K, V, C]( } } - override def hasNext(): Boolean = cur != null + override def hasNext: Boolean = cur != null override def next(): (K, C) = { val r = cur diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala index a68e0de70c591..42501729e1b4d 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala @@ -842,7 +842,7 @@ private[spark] class ExternalSorter[K, V, C]( } } - override def hasNext(): Boolean = cur != null + override def hasNext: Boolean = cur != null override def next(): ((Int, K), C) = { val r = cur diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala index c1b9af37ce760..0b6fce57727f2 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala @@ -502,7 +502,7 @@ class InterleaveIterators[T, R]( class BarrierIterator[E](id: Int, sub: Iterator[E]) extends Iterator[E] { def hasNext: Boolean = sub.hasNext - def next: E = { + def next(): E = { barrier.await() sub.next() } diff --git a/pom.xml b/pom.xml index e3a19257c8c12..a84a2c32b38bc 100644 --- a/pom.xml +++ b/pom.xml @@ -2964,12 +2964,7 @@ -target:17 -Wconf:cat=deprecation:wv,any:e -Wunused:imports - -Wconf:cat=scaladoc:wv - -Wconf:cat=lint-multiarg-infix:wv - -Wconf:cat=other-nullary-override:wv