From c034767abe196dad5acbddd4e2a1b437429813d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=AD=99=E7=BF=94?= Date: Wed, 19 Sep 2018 14:39:17 +0800 Subject: [PATCH 1/2] Update KafkaUtils.scala remove annotation @Experimental --- .../spark/streaming/kafka010/KafkaUtils.scala | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala index e6bdef04512d5..3a0e55f8bf537 100644 --- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala +++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala @@ -32,13 +32,10 @@ import org.apache.spark.streaming.api.java.{ JavaInputDStream, JavaStreamingCont import org.apache.spark.streaming.dstream._ /** - * :: Experimental :: * object for constructing Kafka streams and RDDs */ -@Experimental object KafkaUtils extends Logging { /** - * :: Experimental :: * Scala constructor for a batch-oriented interface for consuming from Kafka. * Starting and ending offsets are specified in advance, * so that you can control exactly-once semantics. @@ -52,7 +49,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createRDD[K, V]( sc: SparkContext, kafkaParams: ju.Map[String, Object], @@ -75,7 +71,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Java constructor for a batch-oriented interface for consuming from Kafka. * Starting and ending offsets are specified in advance, * so that you can control exactly-once semantics. @@ -89,7 +84,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createRDD[K, V]( jsc: JavaSparkContext, kafkaParams: ju.Map[String, Object], @@ -101,7 +95,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Scala constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * The spark configuration spark.streaming.kafka.maxRatePerPartition gives the maximum number @@ -114,7 +107,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( ssc: StreamingContext, locationStrategy: LocationStrategy, @@ -125,7 +117,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Scala constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]], @@ -137,7 +128,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( ssc: StreamingContext, locationStrategy: LocationStrategy, @@ -148,7 +138,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Java constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]], @@ -158,7 +147,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( jssc: JavaStreamingContext, locationStrategy: LocationStrategy, @@ -170,7 +158,6 @@ object KafkaUtils extends Logging { } /** - * :: Experimental :: * Java constructor for a DStream where * each given Kafka topic/partition corresponds to an RDD partition. * @param locationStrategy In most cases, pass in [[LocationStrategies.PreferConsistent]], @@ -182,7 +169,6 @@ object KafkaUtils extends Logging { * @tparam K type of Kafka message key * @tparam V type of Kafka message value */ - @Experimental def createDirectStream[K, V]( jssc: JavaStreamingContext, locationStrategy: LocationStrategy, From 11866c4627168bc270f03f398939ba1d3e2ed512 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=AD=99=E7=BF=94?= Date: Wed, 19 Sep 2018 14:42:01 +0800 Subject: [PATCH 2/2] remove annotation @Experimental --- .../scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala index 3a0e55f8bf537..64b6ef6c53b6d 100644 --- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala +++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaUtils.scala @@ -23,7 +23,6 @@ import org.apache.kafka.clients.consumer._ import org.apache.kafka.common.TopicPartition import org.apache.spark.SparkContext -import org.apache.spark.annotation.Experimental import org.apache.spark.api.java.{ JavaRDD, JavaSparkContext } import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD