diff --git a/examples/java/build.gradle b/examples/java/build.gradle
index c305f0451f09..a1b6827a98ec 100644
--- a/examples/java/build.gradle
+++ b/examples/java/build.gradle
@@ -18,7 +18,12 @@
import groovy.json.JsonOutput
-plugins { id 'org.apache.beam.module' }
+plugins {
+ id 'java'
+ id 'org.apache.beam.module'
+ id 'com.github.johnrengelman.shadow'
+}
+
applyJavaNature(
exportJavadoc: false,
automaticModuleName: 'org.apache.beam.examples',
@@ -49,9 +54,11 @@ configurations.sparkRunnerPreCommit {
dependencies {
compile enforcedPlatform(library.java.google_cloud_platform_libraries_bom)
compile library.java.vendored_guava_26_0_jre
+ compile library.java.kafka_clients
compile project(path: ":sdks:java:core", configuration: "shadow")
compile project(":sdks:java:extensions:google-cloud-platform-core")
compile project(":sdks:java:io:google-cloud-platform")
+ compile project(":sdks:java:io:kafka")
compile project(":sdks:java:extensions:ml")
compile library.java.avro
compile library.java.bigdataoss_util
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/README.md b/examples/java/src/main/java/org/apache/beam/examples/complete/README.md
index 3f4842a5c7a7..b74ea44554d5 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/README.md
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/README.md
@@ -32,6 +32,11 @@ This directory contains end-to-end example pipelines that perform complex data p
Pub/Sub topic, splits each line into individual words, capitalizes those
words, and writes the output to a BigQuery table.
+
KafkaToPubsub
+ — A streaming pipeline example that creates a pipeline to read data
+ from a single or multiple topics from Apache Kafka and write data into a single topic
+ in Google Cloud Pub/Sub.
+
TfIdf
— An example that computes a basic TF-IDF search table for a directory or
Cloud Storage prefix. Demonstrates joining data, side inputs, and logging.
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/KafkaPubsubConstants.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/KafkaPubsubConstants.java
new file mode 100644
index 000000000000..46f021a5ca75
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/KafkaPubsubConstants.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.examples.complete.kafkatopubsub;
+
+/** Constant variables that are used across the template's parts. */
+public class KafkaPubsubConstants {
+
+ /* Config keywords */
+ public static final String KAFKA_CREDENTIALS = "kafka";
+ public static final String SSL_CREDENTIALS = "ssl";
+ public static final String USERNAME = "username";
+ public static final String PASSWORD = "password";
+ public static final String BUCKET = "bucket";
+}
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/KafkaToPubsub.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/KafkaToPubsub.java
new file mode 100644
index 000000000000..8b4e2b305927
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/KafkaToPubsub.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.examples.complete.kafkatopubsub;
+
+import static org.apache.beam.examples.complete.kafkatopubsub.kafka.consumer.Utils.configureKafka;
+import static org.apache.beam.examples.complete.kafkatopubsub.kafka.consumer.Utils.configureSsl;
+import static org.apache.beam.examples.complete.kafkatopubsub.kafka.consumer.Utils.getKafkaCredentialsFromVault;
+import static org.apache.beam.examples.complete.kafkatopubsub.kafka.consumer.Utils.isSslSpecified;
+import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.examples.complete.kafkatopubsub.avro.AvroDataClass;
+import org.apache.beam.examples.complete.kafkatopubsub.avro.AvroDataClassKafkaAvroDeserializer;
+import org.apache.beam.examples.complete.kafkatopubsub.options.KafkaToPubsubOptions;
+import org.apache.beam.examples.complete.kafkatopubsub.transforms.FormatTransform;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.PipelineResult;
+import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.transforms.Values;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The {@link KafkaToPubsub} pipeline is a streaming pipeline which ingests data in JSON format from
+ * Kafka, and outputs the resulting records to PubSub. Input topics, output topic, Bootstrap servers
+ * are specified by the user as template parameters.
+ * Kafka may be configured with SASL/SCRAM security mechanism, in this case a Vault secret storage
+ * with credentials should be provided. URL to credentials and Vault token are specified by the user
+ * as template parameters.
+ *
+ * Pipeline Requirements
+ *
+ *
+ * - Kafka Bootstrap Server(s).
+ *
- Kafka Topic(s) exists.
+ *
- The PubSub output topic exists.
+ *
- (Optional) An existing HashiCorp Vault secret storage
+ *
- (Optional) A configured secure SSL connection for Kafka
+ *
+ *
+ * Example Usage
+ *
+ *
+ * # Gradle preparation
+ *
+ * To run this example your {@code build.gradle} file should contain the following task
+ * to execute the pipeline:
+ * {@code
+ * task execute (type:JavaExec) {
+ * main = System.getProperty("mainClass")
+ * classpath = sourceSets.main.runtimeClasspath
+ * systemProperties System.getProperties()
+ * args System.getProperty("exec.args", "").split()
+ * }
+ * }
+ *
+ * This task allows to run the pipeline via the following command:
+ * {@code
+ * gradle clean execute -DmainClass=org.apache.beam.examples.complete.kafkatopubsub.KafkaToPubsub \
+ * -Dexec.args="--= --="
+ * }
+ *
+ * # Running the pipeline
+ * To execute this pipeline, specify the parameters:
+ *
+ * - Kafka Bootstrap servers
+ * - Kafka input topics
+ * - Pub/Sub output topic
+ * - Output format
+ *
+ * in the following format:
+ * {@code
+ * --bootstrapServers=host:port \
+ * --inputTopics=your-input-topic \
+ * --outputTopic=projects/your-project-id/topics/your-topic-pame \
+ * --outputFormat=AVRO|PUBSUB
+ * }
+ *
+ * Optionally, to retrieve Kafka credentials for SASL/SCRAM,
+ * specify a URL to the credentials in HashiCorp Vault and the vault access token:
+ * {@code
+ * --secretStoreUrl=http(s)://host:port/path/to/credentials
+ * --vaultToken=your-token
+ * }
+ *
+ * Optionally, to configure secure SSL connection between the Beam pipeline and Kafka,
+ * specify the parameters:
+ * - A path to a truststore file (it can be a local path or a GCS path, which should start with `gs://`)
+ * - A path to a keystore file (it can be a local path or a GCS path, which should start with `gs://`)
+ * - Truststore password
+ * - Keystore password
+ * - Key password
+ * {@code
+ * --truststorePath=path/to/kafka.truststore.jks
+ * --keystorePath=path/to/kafka.keystore.jks
+ * --truststorePassword=your-truststore-password
+ * --keystorePassword=your-keystore-password
+ * --keyPassword=your-key-password
+ * }
+ * By default this will run the pipeline locally with the DirectRunner. To change the runner, specify:
+ * {@code
+ * --runner=YOUR_SELECTED_RUNNER
+ * }
+ *
+ *
+ * Example Avro usage
+ *
+ *
+ * This template contains an example Class to deserialize AVRO from Kafka and serialize it to AVRO in Pub/Sub.
+ *
+ * To use this example in the specific case, follow the few steps:
+ *
+ * - Create your own class to describe AVRO schema. As an example use {@link AvroDataClass}. Just define necessary fields.
+ *
- Create your own Avro Deserializer class. As an example use {@link AvroDataClassKafkaAvroDeserializer}. Just rename it, and put your own Schema class as the necessary types.
+ *
- Modify the {@link FormatTransform}. Put your Schema class and Deserializer to the related parameter.
+ *
- Modify write step in the {@link KafkaToPubsub} by put your Schema class to "writeAvrosToPubSub" step.
+ *
+ *
+ */
+public class KafkaToPubsub {
+
+ /* Logger for class.*/
+ private static final Logger LOG = LoggerFactory.getLogger(KafkaToPubsub.class);
+
+ /**
+ * Main entry point for pipeline execution.
+ *
+ * @param args Command line arguments to the pipeline.
+ */
+ public static void main(String[] args) {
+ KafkaToPubsubOptions options =
+ PipelineOptionsFactory.fromArgs(args).withValidation().as(KafkaToPubsubOptions.class);
+
+ run(options);
+ }
+
+ /**
+ * Runs a pipeline which reads message from Kafka and writes it to GCS.
+ *
+ * @param options arguments to the pipeline
+ */
+ public static PipelineResult run(KafkaToPubsubOptions options) {
+ // Configure Kafka consumer properties
+ Map kafkaConfig = new HashMap<>();
+ Map sslConfig = new HashMap<>();
+ if (options.getSecretStoreUrl() != null && options.getVaultToken() != null) {
+ Map> credentials =
+ getKafkaCredentialsFromVault(options.getSecretStoreUrl(), options.getVaultToken());
+ kafkaConfig = configureKafka(credentials.get(KafkaPubsubConstants.KAFKA_CREDENTIALS));
+ } else {
+ LOG.warn(
+ "No information to retrieve Kafka credentials was provided. "
+ + "Trying to initiate an unauthorized connection.");
+ }
+
+ if (isSslSpecified(options)) {
+ sslConfig.putAll(configureSsl(options));
+ } else {
+ LOG.info(
+ "No information to retrieve SSL certificate was provided by parameters."
+ + "Trying to initiate a plain text connection.");
+ }
+
+ List topicsList = new ArrayList<>(Arrays.asList(options.getInputTopics().split(",")));
+
+ checkArgument(
+ topicsList.size() > 0 && topicsList.get(0).length() > 0,
+ "inputTopics cannot be an empty string.");
+
+ List bootstrapServersList =
+ new ArrayList<>(Arrays.asList(options.getBootstrapServers().split(",")));
+
+ checkArgument(
+ bootstrapServersList.size() > 0 && topicsList.get(0).length() > 0,
+ "bootstrapServers cannot be an empty string.");
+
+ // Create the pipeline
+ Pipeline pipeline = Pipeline.create(options);
+ LOG.info(
+ "Starting Kafka-To-PubSub pipeline with parameters bootstrap servers:"
+ + options.getBootstrapServers()
+ + " input topics: "
+ + options.getInputTopics()
+ + " output pubsub topic: "
+ + options.getOutputTopic());
+
+ /*
+ * Steps:
+ * 1) Read messages in from Kafka
+ * 2) Extract values only
+ * 3) Write successful records to PubSub
+ */
+
+ if (options.getOutputFormat() == FormatTransform.FORMAT.AVRO) {
+ pipeline
+ .apply(
+ "readAvrosFromKafka",
+ FormatTransform.readAvrosFromKafka(
+ options.getBootstrapServers(), topicsList, kafkaConfig, sslConfig))
+ .apply("createValues", Values.create())
+ .apply("writeAvrosToPubSub", PubsubIO.writeAvros(AvroDataClass.class));
+
+ } else {
+ pipeline
+ .apply(
+ "readFromKafka",
+ FormatTransform.readFromKafka(
+ options.getBootstrapServers(), topicsList, kafkaConfig, sslConfig))
+ .apply("createValues", Values.create())
+ .apply("writeToPubSub", new FormatTransform.FormatOutput(options));
+ }
+
+ return pipeline.run();
+ }
+}
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/README.md b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/README.md
new file mode 100644
index 000000000000..13f2d3a5f593
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/README.md
@@ -0,0 +1,200 @@
+
+
+# Apache Beam pipeline example to ingest data from Apache Kafka to Google Cloud Pub/Sub
+
+This directory contains an [Apache Beam](https://beam.apache.org/) pipeline example that creates a pipeline to read data
+from a single or multiple topics from
+[Apache Kafka](https://kafka.apache.org/) and write data into a single topic
+in [Google Cloud Pub/Sub](https://cloud.google.com/pubsub).
+
+Supported data formats:
+
+- Serializable plaintext formats, such as JSON
+- [PubSubMessage](https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage).
+
+Supported input source configurations:
+
+- Single or multiple Apache Kafka bootstrap servers
+- Apache Kafka SASL/SCRAM authentication over plaintext or SSL connection
+- Secrets vault service [HashiCorp Vault](https://www.vaultproject.io/).
+
+Supported destination configuration:
+
+- Single Google Cloud Pub/Sub topic.
+
+In a simple scenario, the example will create an Apache Beam pipeline that will read messages from a source Kafka server
+with a source topic, and stream the text messages into specified Pub/Sub destination topic. Other scenarios may need
+Kafka SASL/SCRAM authentication, that can be performed over plain text or SSL encrypted connection. The example supports
+using a single Kafka user account to authenticate in the provided source Kafka servers and topics. To support SASL
+authenticaton over SSL the example will need an SSL certificate location and access to a secrets vault service with
+Kafka username and password, currently supporting HashiCorp Vault.
+
+## Requirements
+
+- Java 8
+- Kafka Bootstrap Server(s) up and running
+- Existing source Kafka topic(s)
+- An existing Pub/Sub destination output topic
+- (Optional) An existing HashiCorp Vault
+- (Optional) A configured secure SSL connection for Kafka
+
+## Getting Started
+
+This section describes what is needed to get the example up and running.
+
+- Gradle preparation
+- Local execution
+- Running as a Dataflow Template
+- Supported Output Formats
+ - PubSubMessage
+ - AVRO
+- E2E tests (TBD)
+
+## Gradle preparation
+
+To run this example your `build.gradle` file should contain the following task to execute the pipeline:
+
+```
+task execute (type:JavaExec) {
+ main = System.getProperty("mainClass")
+ classpath = sourceSets.main.runtimeClasspath
+ systemProperties System.getProperties()
+ args System.getProperty("exec.args", "").split()
+}
+```
+
+This task allows to run the pipeline via the following command:
+
+```bash
+gradle clean execute -DmainClass=org.apache.beam.examples.complete.kafkatopubsub.KafkaToPubsub \
+ -Dexec.args="--= --="
+```
+
+## Running the pipeline
+
+To execute this pipeline, specify the parameters:
+
+- Kafka Bootstrap servers
+- Kafka input topics
+- Pub/Sub output topic
+- Output format
+
+in the following format:
+
+```bash
+--bootstrapServers=host:port \
+--inputTopics=your-input-topic \
+--outputTopic=projects/your-project-id/topics/your-topic-pame \
+--outputFormat=AVRO|PUBSUB
+```
+
+Optionally, to retrieve Kafka credentials for SASL/SCRAM, specify a URL to the credentials in HashiCorp Vault and the
+vault access token:
+
+```bash
+--secretStoreUrl=http(s)://host:port/path/to/credentials
+--vaultToken=your-token
+```
+
+Optionally, to configure secure SSL connection between the Beam pipeline and Kafka, specify the parameters:
+
+- A path to a truststore file (it can be a local path or a GCS path, which should start with `gs://`)
+- A path to a keystore file (it can be a local path or a GCS path, which should start with `gs://`)
+- Truststore password
+- Keystore password
+- Key password
+
+```bash
+--truststorePath=path/to/kafka.truststore.jks
+--keystorePath=path/to/kafka.keystore.jks
+--truststorePassword=your-truststore-password
+--keystorePassword=your-keystore-password
+--keyPassword=your-key-password
+```
+
+By default this will run the pipeline locally with the DirectRunner. To change the runner, specify:
+
+```bash
+--runner=YOUR_SELECTED_RUNNER
+```
+
+See the [documentation](http://beam.apache.org/get-started/quickstart/) and
+the [Examples README](../../../../../../../../../README.md) for more information about how to run this example.
+
+## Running as a Dataflow Template
+
+This example also exists as Google Dataflow Template, which you can build and run using Google Cloud Platform. See
+its [README.md](https://github.com/GoogleCloudPlatform/DataflowTemplates/blob/master/v2/kafka-to-pubsub/README.md) for
+more information.
+
+## Supported Output Formats
+
+This pipeline can output data in a format of PubSubMessage or AVRO.
+
+### PubSubMessage
+
+This example supports PubSubMessage format for output out-of-the-box. No additional changes are required.
+
+### AVRO
+
+This example contains an example demonstrating AVRO format support, the following steps should be done to provide it:
+
+- Define custom Class to deserialize AVRO from Kafka [provided in example]
+- Create custom data serialization in Apache Beam
+- Serialize data to AVRO in Pub/Sub [provided in example].
+
+To use this example in the specific case, follow these steps:
+
+- Create your own class to describe AVRO schema. As an example use [AvroDataClass](avro/AvroDataClass.java). Just define
+ necessary fields.
+- Create your own Avro Deserializer class. As an example
+ use [AvroDataClassKafkaAvroDeserializer class](avro/AvroDataClassKafkaAvroDeserializer.java). Just rename it, and put
+ your own Schema class as the necessary types.
+- Modify the [FormatTransform.readAvrosFromKafka method](transforms/FormatTransform.java). Put your Schema class and
+ Deserializer to the related parameter.
+
+```java
+return KafkaIO.read()
+ ...
+ .withValueDeserializerAndCoder(
+ AvroDataClassKafkaAvroDeserializer.class,AvroCoder.of(AvroDataClass.class)) // put your classes here
+ ...
+```
+
+- [OPTIONAL TO IMPLEMENT] Add [Beam Transform](https://beam.apache.org/documentation/programming-guide/#transforms) if
+ it necessary in your case.
+- Modify the write step in the [KafkaToPubsub class](KafkaToPubsub.java) by putting your Schema class to "
+ writeAvrosToPubSub" step.
+ - NOTE: if it changed during the transform, you should use changed one class definition.
+
+```java
+if(options.getOutputFormat()==FormatTransform.FORMAT.AVRO){
+ ...
+ .apply("writeAvrosToPubSub",PubsubIO.writeAvros(AvroDataClass.class)); // put your SCHEMA class here
+
+ }
+```
+
+## End to end tests
+
+TBD
+
+_Note: The Kafka to Pub/Sub job executed with a distributed runner supports SSL configuration with the certificate
+located only in GCS._
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/AvroDataClass.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/AvroDataClass.java
new file mode 100644
index 000000000000..8c8702115f65
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/AvroDataClass.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.examples.complete.kafkatopubsub.avro;
+
+import org.apache.beam.sdk.coders.AvroCoder;
+import org.apache.beam.sdk.coders.DefaultCoder;
+
+/**
+ * Example of AVRO serialization class. To configure your AVRO schema, change this class to
+ * requirement schema definition
+ */
+@DefaultCoder(AvroCoder.class)
+public class AvroDataClass {
+
+ String field1;
+ Float field2;
+ Float field3;
+
+ public AvroDataClass(String field1, Float field2, Float field3) {
+ this.field1 = field1;
+ this.field2 = field2;
+ this.field3 = field3;
+ }
+
+ public String getField1() {
+ return field1;
+ }
+
+ public void setField1(String field1) {
+ this.field1 = field1;
+ }
+
+ public Float getField2() {
+ return field2;
+ }
+
+ public void setField2(Float field2) {
+ this.field2 = field2;
+ }
+
+ public Float getField3() {
+ return field3;
+ }
+
+ public void setField3(Float field3) {
+ this.field3 = field3;
+ }
+}
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/AvroDataClassKafkaAvroDeserializer.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/AvroDataClassKafkaAvroDeserializer.java
new file mode 100644
index 000000000000..a9aeb72eb196
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/AvroDataClassKafkaAvroDeserializer.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.examples.complete.kafkatopubsub.avro;
+
+import io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer;
+import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
+import java.util.Map;
+import org.apache.kafka.common.serialization.Deserializer;
+
+/** Example of custom AVRO Deserialize. */
+public class AvroDataClassKafkaAvroDeserializer extends AbstractKafkaAvroDeserializer
+ implements Deserializer {
+
+ @Override
+ public void configure(Map configs, boolean isKey) {
+ configure(new KafkaAvroDeserializerConfig(configs));
+ }
+
+ @Override
+ public AvroDataClass deserialize(String s, byte[] bytes) {
+ return (AvroDataClass) this.deserialize(bytes);
+ }
+
+ @Override
+ public void close() {}
+}
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/package-info.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/package-info.java
new file mode 100644
index 000000000000..1dc4f36ce656
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/avro/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** Kafka to Pubsub template. */
+package org.apache.beam.examples.complete.kafkatopubsub.avro;
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/kafka/consumer/SslConsumerFactoryFn.java b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/kafka/consumer/SslConsumerFactoryFn.java
new file mode 100644
index 000000000000..012f147b77fd
--- /dev/null
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/kafkatopubsub/kafka/consumer/SslConsumerFactoryFn.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.examples.complete.kafkatopubsub.kafka.consumer;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import org.apache.beam.sdk.io.FileSystems;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.config.SslConfigs;
+import org.apache.kafka.common.security.auth.SecurityProtocol;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Class to create Kafka Consumer with configured SSL. */
+public class SslConsumerFactoryFn
+ implements SerializableFunction