From ec45dae870bafc114ce7a730b036550eabaa137a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Baptiste=20Onofr=C3=A9?= Date: Wed, 30 Mar 2016 18:18:51 +0200 Subject: [PATCH 1/2] [BEAM-77] Create IOs module and move bigtable IO there --- ios/bigtable/pom.xml | 99 +++++++++++++++++++ .../apache/beam}/io/bigtable/BigtableIO.java | 10 +- .../beam}/io/bigtable/BigtableService.java | 4 +- .../io/bigtable/BigtableServiceImpl.java | 14 +-- .../beam}/io/bigtable/package-info.java | 2 +- .../beam}/io/bigtable/BigtableIOTest.java | 41 ++------ ios/pom.xml | 40 ++++++++ pom.xml | 1 + sdks/java/core/pom.xml | 17 ---- 9 files changed, 159 insertions(+), 69 deletions(-) create mode 100644 ios/bigtable/pom.xml rename {sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk => ios/bigtable/src/main/java/org/apache/beam}/io/bigtable/BigtableIO.java (99%) rename {sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk => ios/bigtable/src/main/java/org/apache/beam}/io/bigtable/BigtableService.java (96%) rename {sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk => ios/bigtable/src/main/java/org/apache/beam}/io/bigtable/BigtableServiceImpl.java (94%) rename {sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk => ios/bigtable/src/main/java/org/apache/beam}/io/bigtable/package-info.java (93%) rename {sdks/java/core/src/test/java/com/google/cloud/dataflow/sdk => ios/bigtable/src/test/java/org/apache/beam}/io/bigtable/BigtableIOTest.java (95%) create mode 100644 ios/pom.xml diff --git a/ios/bigtable/pom.xml b/ios/bigtable/pom.xml new file mode 100644 index 000000000000..8b3d1d251811 --- /dev/null +++ b/ios/bigtable/pom.xml @@ -0,0 +1,99 @@ + + + + + 4.0.0 + + + org.apache.beam + ios-parent + 0.1.0-incubating-SNAPSHOT + ../pom.xml + + + bigtable + jar + + Apache Beam :: IOs :: Google Bigtable + + + + org.apache.beam + java-sdk-all + 0.1.0-incubating-SNAPSHOT + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + io.grpc + grpc-all + 0.12.0 + + + + com.google.cloud.bigtable + bigtable-protos + ${bigtable.version} + + + + com.google.cloud.bigtable + bigtable-client-core + ${bigtable.version} + + + + + org.apache.beam + java-sdk-all + 0.1.0-incubating-SNAPSHOT + tests + test + + + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + test + + + + junit + junit + ${junit.version} + test + + + + org.hamcrest + hamcrest-all + ${hamcrest.version} + test + + + + + \ No newline at end of file diff --git a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableIO.java b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableIO.java similarity index 99% rename from sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableIO.java rename to ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableIO.java index 7d59b09c8d80..05c7eb684280 100644 --- a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableIO.java +++ b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableIO.java @@ -14,11 +14,7 @@ * the License. */ -package com.google.cloud.dataflow.sdk.io.bigtable; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; +package org.apache.beam.io.bigtable; import com.google.bigtable.v1.Mutation; import com.google.bigtable.v1.Row; @@ -50,10 +46,10 @@ import com.google.common.util.concurrent.Futures; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Nullable; import java.io.IOException; import java.util.Collections; import java.util.Iterator; @@ -61,7 +57,7 @@ import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentLinkedQueue; -import javax.annotation.Nullable; +import static com.google.common.base.Preconditions.*; /** * A bounded source and sink for Google Cloud Bigtable. diff --git a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableService.java b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableService.java similarity index 96% rename from sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableService.java rename to ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableService.java index 85d706cb0a67..f8240ae880cc 100644 --- a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableService.java +++ b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableService.java @@ -13,16 +13,16 @@ * License for the specific language governing permissions and limitations under * the License. */ -package com.google.cloud.dataflow.sdk.io.bigtable; +package org.apache.beam.io.bigtable; import com.google.bigtable.v1.Mutation; import com.google.bigtable.v1.Row; import com.google.bigtable.v1.SampleRowKeysResponse; -import com.google.cloud.dataflow.sdk.io.bigtable.BigtableIO.BigtableSource; import com.google.cloud.dataflow.sdk.values.KV; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; +import org.apache.beam.io.bigtable.BigtableIO.BigtableSource; import java.io.IOException; import java.io.Serializable; diff --git a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableServiceImpl.java b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableServiceImpl.java similarity index 94% rename from sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableServiceImpl.java rename to ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableServiceImpl.java index 5ab85827ec05..cba15a96fd7f 100644 --- a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableServiceImpl.java +++ b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/BigtableServiceImpl.java @@ -13,32 +13,24 @@ * License for the specific language governing permissions and limitations under * the License. */ -package com.google.cloud.dataflow.sdk.io.bigtable; +package org.apache.beam.io.bigtable; import com.google.bigtable.admin.table.v1.GetTableRequest; -import com.google.bigtable.v1.MutateRowRequest; -import com.google.bigtable.v1.Mutation; -import com.google.bigtable.v1.ReadRowsRequest; -import com.google.bigtable.v1.Row; -import com.google.bigtable.v1.RowRange; -import com.google.bigtable.v1.SampleRowKeysRequest; -import com.google.bigtable.v1.SampleRowKeysResponse; +import com.google.bigtable.v1.*; import com.google.cloud.bigtable.config.BigtableOptions; import com.google.cloud.bigtable.grpc.BigtableSession; import com.google.cloud.bigtable.grpc.async.AsyncExecutor; import com.google.cloud.bigtable.grpc.async.HeapSizeManager; import com.google.cloud.bigtable.grpc.scanner.ResultScanner; -import com.google.cloud.dataflow.sdk.io.bigtable.BigtableIO.BigtableSource; import com.google.cloud.dataflow.sdk.values.KV; import com.google.common.base.MoreObjects; import com.google.common.io.Closer; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; - import io.grpc.Status.Code; import io.grpc.StatusRuntimeException; - +import org.apache.beam.io.bigtable.BigtableIO.BigtableSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/package-info.java b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/package-info.java similarity index 93% rename from sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/package-info.java rename to ios/bigtable/src/main/java/org/apache/beam/io/bigtable/package-info.java index 112a954d71f6..a4ed1df7055b 100644 --- a/sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/io/bigtable/package-info.java +++ b/ios/bigtable/src/main/java/org/apache/beam/io/bigtable/package-info.java @@ -19,4 +19,4 @@ * * @see com.google.cloud.dataflow.sdk.io.bigtable.BigtableIO */ -package com.google.cloud.dataflow.sdk.io.bigtable; +package org.apache.beam.io.bigtable; diff --git a/sdks/java/core/src/test/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableIOTest.java b/ios/bigtable/src/test/java/org/apache/beam/io/bigtable/BigtableIOTest.java similarity index 95% rename from sdks/java/core/src/test/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableIOTest.java rename to ios/bigtable/src/test/java/org/apache/beam/io/bigtable/BigtableIOTest.java index 0afac13e2962..502fa06e098e 100644 --- a/sdks/java/core/src/test/java/com/google/cloud/dataflow/sdk/io/bigtable/BigtableIOTest.java +++ b/ios/bigtable/src/test/java/org/apache/beam/io/bigtable/BigtableIOTest.java @@ -14,31 +14,13 @@ * the License. */ -package com.google.cloud.dataflow.sdk.io.bigtable; +package org.apache.beam.io.bigtable; -import static com.google.cloud.dataflow.sdk.testing.SourceTestUtils.assertSourcesEqualReferenceSource; -import static com.google.cloud.dataflow.sdk.testing.SourceTestUtils.assertSplitAtFractionExhaustive; -import static com.google.cloud.dataflow.sdk.testing.SourceTestUtils.assertSplitAtFractionFails; -import static com.google.cloud.dataflow.sdk.testing.SourceTestUtils.assertSplitAtFractionSucceedsAndConsistent; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Verify.verifyNotNull; -import static org.hamcrest.Matchers.hasSize; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; - -import com.google.bigtable.v1.Cell; -import com.google.bigtable.v1.Column; -import com.google.bigtable.v1.Family; -import com.google.bigtable.v1.Mutation; +import com.google.bigtable.v1.*; import com.google.bigtable.v1.Mutation.SetCell; -import com.google.bigtable.v1.Row; -import com.google.bigtable.v1.RowFilter; -import com.google.bigtable.v1.SampleRowKeysResponse; import com.google.cloud.bigtable.config.BigtableOptions; import com.google.cloud.dataflow.sdk.Pipeline.PipelineExecutionException; import com.google.cloud.dataflow.sdk.coders.Coder; -import com.google.cloud.dataflow.sdk.io.bigtable.BigtableIO.BigtableSource; import com.google.cloud.dataflow.sdk.io.range.ByteKey; import com.google.cloud.dataflow.sdk.io.range.ByteKeyRange; import com.google.cloud.dataflow.sdk.testing.DataflowAssert; @@ -56,7 +38,7 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; - +import org.apache.beam.io.bigtable.BigtableIO.BigtableSource; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Ignore; @@ -66,19 +48,16 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; +import javax.annotation.Nullable; import java.io.IOException; import java.io.Serializable; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.SortedMap; -import java.util.TreeMap; +import java.util.*; -import javax.annotation.Nullable; +import static com.google.cloud.dataflow.sdk.testing.SourceTestUtils.*; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Verify.verifyNotNull; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.*; /** * Unit tests for {@link BigtableIO}. diff --git a/ios/pom.xml b/ios/pom.xml new file mode 100644 index 000000000000..f7d253faf19b --- /dev/null +++ b/ios/pom.xml @@ -0,0 +1,40 @@ + + + + + 4.0.0 + + + org.apache.beam + parent + 0.1.0-incubating-SNAPSHOT + ../pom.xml + + + ios-parent + pom + + Apache Beam :: IOs + + + bigtable + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index b79ddf65e1fa..69c55ad22f57 100644 --- a/pom.xml +++ b/pom.xml @@ -129,6 +129,7 @@ runners examples/java sdks/java/maven-archetypes + ios diff --git a/sdks/java/core/pom.xml b/sdks/java/core/pom.xml index 2b9e4a974feb..380ec49c5639 100644 --- a/sdks/java/core/pom.xml +++ b/sdks/java/core/pom.xml @@ -383,23 +383,6 @@ - - io.grpc - grpc-all - 0.12.0 - - - - com.google.cloud.bigtable - bigtable-protos - ${bigtable.version} - - - - com.google.cloud.bigtable - bigtable-client-core - ${bigtable.version} - com.google.api-client From a6702581243092828901a676709fcfc1f1a79c34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Baptiste=20Onofr=C3=A9?= Date: Wed, 30 Mar 2016 18:32:18 +0200 Subject: [PATCH 2/2] [BEAM-77] Move hadoop contrib as hdfs IO --- contrib/hadoop/AUTHORS.md | 7 - contrib/hadoop/README.md | 24 --- contrib/hadoop/pom.xml | 169 ------------------ ios/hdfs/pom.xml | 88 +++++++++ .../beam/io/hdfs}/HadoopFileSource.java | 8 +- .../apache/beam/io/hdfs}/WritableCoder.java | 4 +- .../beam/io/hdfs}/HadoopFileSourceTest.java | 2 +- .../beam/io/hdfs}/WritableCoderTest.java | 2 +- ios/pom.xml | 1 + 9 files changed, 97 insertions(+), 208 deletions(-) delete mode 100644 contrib/hadoop/AUTHORS.md delete mode 100644 contrib/hadoop/README.md delete mode 100644 contrib/hadoop/pom.xml create mode 100644 ios/hdfs/pom.xml rename {contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop => ios/hdfs/src/main/java/org/apache/beam/io/hdfs}/HadoopFileSource.java (98%) rename {contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop => ios/hdfs/src/main/java/org/apache/beam/io/hdfs}/WritableCoder.java (96%) rename {contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop => ios/hdfs/src/test/java/org/apache/beam/io/hdfs}/HadoopFileSourceTest.java (99%) rename {contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop => ios/hdfs/src/test/java/org/apache/beam/io/hdfs}/WritableCoderTest.java (95%) diff --git a/contrib/hadoop/AUTHORS.md b/contrib/hadoop/AUTHORS.md deleted file mode 100644 index 6effdb917d19..000000000000 --- a/contrib/hadoop/AUTHORS.md +++ /dev/null @@ -1,7 +0,0 @@ -# Authors of 'hadoop' module - -The following is the official list of authors for copyright purposes of this community-contributed module. - - Cloudera - Tom White, tom [at] cloudera [dot] com - Google Inc. \ No newline at end of file diff --git a/contrib/hadoop/README.md b/contrib/hadoop/README.md deleted file mode 100644 index 49bbf980e80a..000000000000 --- a/contrib/hadoop/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# Hadoop module - -This library provides Dataflow sources and sinks to make it possible to read and -write Apache Hadoop file formats from Dataflow pipelines. - -Currently, only the read path is implemented. A `HadoopFileSource` allows any -Hadoop `FileInputFormat` to be read as a `PCollection`. - -A `HadoopFileSource` can be read from using the -`com.google.cloud.dataflow.sdk.io.Read` transform. For example: - -```java -HadoopFileSource source = HadoopFileSource.from(path, MyInputFormat.class, - MyKey.class, MyValue.class); -PCollection> records = Read.from(mySource); -``` - -Alternatively, the `readFrom` method is a convenience method that returns a read -transform. For example: - -```java -PCollection> records = HadoopFileSource.readFrom(path, - MyInputFormat.class, MyKey.class, MyValue.class); -``` diff --git a/contrib/hadoop/pom.xml b/contrib/hadoop/pom.xml deleted file mode 100644 index 8e5a207d1215..000000000000 --- a/contrib/hadoop/pom.xml +++ /dev/null @@ -1,169 +0,0 @@ - - - - 4.0.0 - - com.google.cloud.dataflow - google-cloud-dataflow-java-contrib-hadoop - Google Cloud Dataflow Hadoop Library - Library to read and write Hadoop file formats from Dataflow. - 0.0.1-SNAPSHOT - jar - - - - Apache License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - - UTF-8 - [1.2.0,2.0.0) - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.2 - - 1.7 - 1.7 - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 2.12 - - - com.puppycrawl.tools - checkstyle - 6.6 - - - - ../../checkstyle.xml - true - true - true - - - - - check - - - - - - - - org.apache.maven.plugins - maven-source-plugin - 2.4 - - - attach-sources - compile - - jar - - - - attach-test-sources - test-compile - - test-jar - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - Google Cloud Dataflow Hadoop Contrib - Google Cloud Dataflow Hadoop Contrib - - com.google.cloud.dataflow.contrib.hadoop - false - ]]> - - - - https://cloud.google.com/dataflow/java-sdk/JavaDoc/ - ${basedir}/../../javadoc/dataflow-sdk-docs - - - http://docs.guava-libraries.googlecode.com/git-history/release18/javadoc/ - ${basedir}/../../javadoc/guava-docs - - - - - - - jar - - package - - - - - - - - - com.google.cloud.dataflow - google-cloud-dataflow-java-sdk-all - ${google-cloud-dataflow-version} - - - - - org.apache.hadoop - hadoop-client - 2.7.0 - provided - - - - - org.hamcrest - hamcrest-all - 1.3 - test - - - - junit - junit - 4.11 - test - - - diff --git a/ios/hdfs/pom.xml b/ios/hdfs/pom.xml new file mode 100644 index 000000000000..c96b3897ceda --- /dev/null +++ b/ios/hdfs/pom.xml @@ -0,0 +1,88 @@ + + + + + 4.0.0 + + + org.apache.beam + ios-parent + 0.1.0-incubating-SNAPSHOT + ../pom.xml + + + hdfs + jar + + Apache Beam :: IOs :: HDFS + + + + org.apache.beam + java-sdk-all + 0.1.0-incubating-SNAPSHOT + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + org.apache.hadoop + hadoop-client + 2.7.0 + provided + + + + + org.apache.beam + java-sdk-all + 0.1.0-incubating-SNAPSHOT + tests + test + + + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + test + + + + junit + junit + ${junit.version} + test + + + + org.hamcrest + hamcrest-all + ${hamcrest.version} + test + + + + + \ No newline at end of file diff --git a/contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSource.java b/ios/hdfs/src/main/java/org/apache/beam/io/hdfs/HadoopFileSource.java similarity index 98% rename from contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSource.java rename to ios/hdfs/src/main/java/org/apache/beam/io/hdfs/HadoopFileSource.java index f24c3b7bd823..ff23ac3d01d9 100644 --- a/contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSource.java +++ b/ios/hdfs/src/main/java/org/apache/beam/io/hdfs/HadoopFileSource.java @@ -14,7 +14,7 @@ * the License. */ -package com.google.cloud.dataflow.contrib.hadoop; +package org.apache.beam.io.hdfs; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.coders.KvCoder; @@ -56,13 +56,13 @@ * Hadoop file-based input format. * *

To read a {@link com.google.cloud.dataflow.sdk.values.PCollection} of - * {@link com.google.cloud.dataflow.sdk.values.KV} key-value pairs from one or more + * {@link KV} key-value pairs from one or more * Hadoop files, use {@link HadoopFileSource#from} to specify the path(s) of the files to * read, the Hadoop {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}, the * key class and the value class. * *

A {@code HadoopFileSource} can be read from using the - * {@link com.google.cloud.dataflow.sdk.io.Read} transform. For example: + * {@link Read} transform. For example: * *

  * {@code
@@ -268,7 +268,7 @@ public boolean producesSortedKeys(PipelineOptions options) throws Exception {
     return false;
   }
 
-  static class HadoopFileReader extends BoundedSource.BoundedReader> {
+  static class HadoopFileReader extends BoundedReader> {
 
     private final BoundedSource> source;
     private final String filepattern;
diff --git a/contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/WritableCoder.java b/ios/hdfs/src/main/java/org/apache/beam/io/hdfs/WritableCoder.java
similarity index 96%
rename from contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/WritableCoder.java
rename to ios/hdfs/src/main/java/org/apache/beam/io/hdfs/WritableCoder.java
index 5dba58d39c19..5704743ac38d 100644
--- a/contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/WritableCoder.java
+++ b/ios/hdfs/src/main/java/org/apache/beam/io/hdfs/WritableCoder.java
@@ -14,7 +14,7 @@
  * the License.
  */
 
-package com.google.cloud.dataflow.contrib.hadoop;
+package org.apache.beam.io.hdfs;
 
 import com.google.cloud.dataflow.sdk.coders.Coder;
 import com.google.cloud.dataflow.sdk.coders.CoderException;
@@ -31,7 +31,7 @@
 import java.util.List;
 
 /**
- * A {@code WritableCoder} is a {@link com.google.cloud.dataflow.sdk.coders.Coder} for a
+ * A {@code WritableCoder} is a {@link Coder} for a
  * Java class that implements {@link org.apache.hadoop.io.Writable}.
  *
  * 

To use, specify the coder type on a PCollection: diff --git a/contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSourceTest.java b/ios/hdfs/src/test/java/org/apache/beam/io/hdfs/HadoopFileSourceTest.java similarity index 99% rename from contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSourceTest.java rename to ios/hdfs/src/test/java/org/apache/beam/io/hdfs/HadoopFileSourceTest.java index cef3c0834852..bc355caa1a3c 100644 --- a/contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSourceTest.java +++ b/ios/hdfs/src/test/java/org/apache/beam/io/hdfs/HadoopFileSourceTest.java @@ -14,7 +14,7 @@ * the License. */ -package com.google.cloud.dataflow.contrib.hadoop; +package org.apache.beam.io.hdfs; import static com.google.cloud.dataflow.sdk.testing.SourceTestUtils.readFromSource; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop/WritableCoderTest.java b/ios/hdfs/src/test/java/org/apache/beam/io/hdfs/WritableCoderTest.java similarity index 95% rename from contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop/WritableCoderTest.java rename to ios/hdfs/src/test/java/org/apache/beam/io/hdfs/WritableCoderTest.java index 8eeb5e5167ad..1d8637c56114 100644 --- a/contrib/hadoop/src/test/java/com/google/cloud/dataflow/contrib/hadoop/WritableCoderTest.java +++ b/ios/hdfs/src/test/java/org/apache/beam/io/hdfs/WritableCoderTest.java @@ -14,7 +14,7 @@ * the License. */ -package com.google.cloud.dataflow.contrib.hadoop; +package org.apache.beam.io.hdfs; import com.google.cloud.dataflow.sdk.testing.CoderProperties; diff --git a/ios/pom.xml b/ios/pom.xml index f7d253faf19b..8c639b3ce7f1 100644 --- a/ios/pom.xml +++ b/ios/pom.xml @@ -35,6 +35,7 @@ bigtable + hdfs \ No newline at end of file