Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ import javax.annotation.concurrent.GuardedBy

import scala.util.control.NonFatal

import com.google.common.annotations.VisibleForTesting

import org.apache.spark.{Logging, SparkException}
import org.apache.spark.rpc.{RpcAddress, RpcEndpoint, ThreadSafeRpcEndpoint}

Expand Down Expand Up @@ -193,8 +191,8 @@ private[netty] class Inbox(

def isEmpty: Boolean = inbox.synchronized { messages.isEmpty }

/** Called when we are dropping a message. Test cases override this to test message dropping. */
@VisibleForTesting
/** Called when we are dropping a message. Test cases override this to test message dropping.
* Exposed for testing. */
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should be java doc, not scala doc

protected def onDrop(message: InboxMessage): Unit = {
logWarning(s"Drop $message because $endpointRef is stopped")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ import java.util.concurrent.TimeoutException

import scala.collection.mutable.{HashMap, HashSet, ListBuffer}

import com.google.common.annotations.VisibleForTesting

import org.apache.spark._
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.executor.TaskMetrics
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ package org.apache.spark.util
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean

import com.google.common.annotations.VisibleForTesting
import org.apache.spark.SparkContext

/**
Expand Down Expand Up @@ -122,8 +121,8 @@ private[spark] abstract class AsynchronousListenerBus[L <: AnyRef, E](name: Stri
* For testing only. Wait until there are no more events in the queue, or until the specified
* time has elapsed. Throw `TimeoutException` if the specified time elapsed before the queue
* emptied.
* Exposed for testing.
*/
@VisibleForTesting
@throws(classOf[TimeoutException])
def waitUntilEmpty(timeoutMillis: Long): Unit = {
val finishTime = System.currentTimeMillis + timeoutMillis
Expand All @@ -140,8 +139,8 @@ private[spark] abstract class AsynchronousListenerBus[L <: AnyRef, E](name: Stri

/**
* For testing only. Return whether the listener daemon thread is still alive.
* Exposed for testing.
*/
@VisibleForTesting
def listenerThreadIsAlive: Boolean = listenerThread.isAlive

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import java.util.Comparator
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable

import com.google.common.annotations.VisibleForTesting
import com.google.common.io.ByteStreams

import org.apache.spark._
Expand Down Expand Up @@ -608,8 +607,8 @@ private[spark] class ExternalSorter[K, V, C](
*
* For now, we just merge all the spilled files in once pass, but this can be modified to
* support hierarchical merging.
* Exposed for testing.
*/
@VisibleForTesting
def partitionedIterator: Iterator[(Int, Iterator[Product2[K, C]])] = {
val usingMap = aggregator.isDefined
val collection: WritablePartitionedPairCollection[K, C] = if (usingMap) map else buffer
Expand Down
7 changes: 7 additions & 0 deletions scalastyle-config.xml
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,13 @@ This file is divided into 3 sections:
// scalastyle:on println]]></customMessage>
</check>

<check customId="visiblefortesting" level="error" class="org.scalastyle.file.RegexChecker" enabled="true">
<parameters><parameter name="regex">@VisibleForTesting</parameter></parameters>
<customMessage><![CDATA[
@VisibleForTesting auses classpath issues. Please note this in the java doc instead (SPARK-11615).
]]></customMessage>
</check>

<check customId="classforname" level="error" class="org.scalastyle.file.RegexChecker" enabled="true">
<parameters><parameter name="regex">Class\.forName</parameter></parameters>
<customMessage><![CDATA[
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.execution

import com.google.common.annotations.VisibleForTesting

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.InternalRow
Expand All @@ -33,7 +31,6 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
*/
class QueryExecution(val sqlContext: SQLContext, val logical: LogicalPlan) {

@VisibleForTesting
def assertAnalyzed(): Unit = sqlContext.analyzer.checkAnalysis(analyzed)

lazy val analyzed: LogicalPlan = sqlContext.analyzer.execute(logical)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.network.shuffle
import java.io.{IOException, File}
import java.util.concurrent.ConcurrentMap

import com.google.common.annotations.VisibleForTesting
import org.apache.hadoop.yarn.api.records.ApplicationId
import org.fusesource.leveldbjni.JniDBFactory
import org.iq80.leveldb.{DB, Options}
Expand Down