From 4a5a7e8c88a53f363b58dada46fe9df13a814c0f Mon Sep 17 00:00:00 2001 From: Lee Dongjin Date: Wed, 5 Sep 2018 22:02:44 +0900 Subject: [PATCH 1/3] Remove unused import from KafkaRelation --- .../main/scala/org/apache/spark/sql/kafka010/KafkaRelation.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRelation.scala b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRelation.scala index 9d856c9494e10..e6f9d1259e43e 100644 --- a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRelation.scala +++ b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRelation.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql.kafka010 -import java.{util => ju} import java.util.UUID import org.apache.kafka.common.TopicPartition From 3b9ea74bc5facf7a17f698bbaac6f08243fc11bc Mon Sep 17 00:00:00 2001 From: Lee Dongjin Date: Wed, 5 Sep 2018 22:05:15 +0900 Subject: [PATCH 2/3] Remove unused import from KafkaOffsetRangeCalculator --- .../apache/spark/sql/kafka010/KafkaOffsetRangeCalculator.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculator.scala b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculator.scala index 6631ae84167c8..fb209c724afba 100644 --- a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculator.scala +++ b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculator.scala @@ -29,7 +29,6 @@ import org.apache.spark.sql.sources.v2.DataSourceOptions private[kafka010] class KafkaOffsetRangeCalculator(val minPartitions: Option[Int]) { require(minPartitions.isEmpty || minPartitions.get > 0) - import KafkaOffsetRangeCalculator._ /** * Calculate the offset ranges that we are going to process this batch. If `minPartitions` * is not set or is set less than or equal the number of `topicPartitions` that we're going to From ee4e97a8187c236ccf52f1c882b607a9421d284e Mon Sep 17 00:00:00 2001 From: Lee Dongjin Date: Wed, 5 Sep 2018 22:13:38 +0900 Subject: [PATCH 3/3] Fix comments on KafkaStreamWriterFactory: it does not extend DataWriterFactory from commit e754887. --- .../spark/sql/kafka010/KafkaStreamingWriteSupport.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaStreamingWriteSupport.scala b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaStreamingWriteSupport.scala index dc19312f79a22..927c56d9ce829 100644 --- a/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaStreamingWriteSupport.scala +++ b/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaStreamingWriteSupport.scala @@ -54,8 +54,8 @@ class KafkaStreamingWriteSupport( } /** - * A [[DataWriterFactory]] for Kafka writing. Will be serialized and sent to executors to generate - * the per-task data writers. + * A [[StreamingDataWriterFactory]] for Kafka writing. Will be serialized and sent to executors to + * generate the per-task data writers. * @param topic The topic that should be written to. If None, topic will be inferred from * a `topic` field in the incoming data. * @param producerParams Parameters for Kafka producers in each task.