Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions docs/content/development/extensions-core/kafka-ingestion.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ currently designated as an *experimental feature* and is subject to the usual
[experimental caveats](../experimental.html).

<div class="note info">
The Kafka indexing service uses the Java consumer that was introduced in Kafka 0.9. As there were protocol changes
made in this version, Kafka 0.9 consumers are not compatible with older brokers. Ensure that your Kafka brokers are
version 0.9 or better before using this service.
The Kafka indexing service uses the Java consumer that was introduced in Kafka 0.10.x. As there were protocol changes
made in this version, Kafka 0.10.x consumers might not be compatible with older brokers. Ensure that your Kafka brokers are
version 0.10.x or better before using this service. Refer <a href="https://kafka.apache.org/documentation/#upgrade">Kafka upgrade guide</a> if you are using older version of kafka brokers.
</div>

## Submitting a Supervisor Spec
Expand Down
4 changes: 2 additions & 2 deletions extensions-core/kafka-indexing-service/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.9.0.1</version>
<version>0.10.2.0</version>
</dependency>

<!-- Tests -->
Expand All @@ -67,7 +67,7 @@
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.9.0.0</version>
<version>0.10.2.0</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.Collections;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
Expand Down Expand Up @@ -1008,7 +1009,7 @@ private void possiblyResetOffsetsOrWait(
final TopicPartition topicPartition = outOfRangePartition.getKey();
final long nextOffset = outOfRangePartition.getValue();
// seek to the beginning to get the least available offset
consumer.seekToBeginning(topicPartition);
consumer.seekToBeginning(Collections.singletonList(topicPartition));
final long leastAvailableOffset = consumer.position(topicPartition);
// reset the seek
consumer.seek(topicPartition, nextOffset);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.HashMap;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -1480,13 +1481,13 @@ private long getOffsetFromKafkaForPartition(int partition, boolean useEarliestOf
{
TopicPartition topicPartition = new TopicPartition(ioConfig.getTopic(), partition);
if (!consumer.assignment().contains(topicPartition)) {
consumer.assign(Lists.newArrayList(topicPartition));
consumer.assign(Collections.singletonList(topicPartition));
}

if (useEarliestOffset) {
consumer.seekToBeginning(topicPartition);
consumer.seekToBeginning(Collections.singletonList(topicPartition));
} else {
consumer.seekToEnd(topicPartition);
consumer.seekToEnd(Collections.singletonList(topicPartition));
}

return consumer.position(topicPartition);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,13 @@
import com.google.common.collect.Maps;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.SystemTime$;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.utils.SystemTime;
import scala.Some;
import scala.collection.immutable.List$;

import java.io.Closeable;
import java.io.File;
Expand Down Expand Up @@ -69,7 +70,7 @@ public void start()

final KafkaConfig config = new KafkaConfig(props);

server = new KafkaServer(config, SystemTime$.MODULE$, Some.apply(String.format("TestingBroker[%d]-", id)));
server = new KafkaServer(config, SystemTime.SYSTEM, Some.apply(String.format("TestingBroker[%d]-", id)), List$.MODULE$.empty());
server.startup();
}

Expand Down