From 3cb48ab7b1e3db770f927b08d9b61cca21911bd4 Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Fri, 17 Jul 2015 19:15:42 +0800 Subject: [PATCH 1/6] Tests for Avro enum values --- .gitignore | 1 + sql/core/src/test/avro/parquet-compat.avdl | 9 + sql/core/src/test/avro/parquet-compat.avpr | 10 + .../parquet/test/avro/CompatibilityTest.java | 2 +- .../parquet/test/avro/ParquetAvroCompat.java | 332 ++++++++++++------ .../ParquetAvroCompatibilitySuite.scala | 7 +- 6 files changed, 247 insertions(+), 114 deletions(-) diff --git a/.gitignore b/.gitignore index debad77ec2ad3..b60b707f221e2 100644 --- a/.gitignore +++ b/.gitignore @@ -74,3 +74,4 @@ metastore/ warehouse/ TempStatsStore/ sql/hive-thriftserver/test_warehouses +sql/core/src/test/gen-java/ diff --git a/sql/core/src/test/avro/parquet-compat.avdl b/sql/core/src/test/avro/parquet-compat.avdl index 24729f6143e6c..6445742487c61 100644 --- a/sql/core/src/test/avro/parquet-compat.avdl +++ b/sql/core/src/test/avro/parquet-compat.avdl @@ -18,6 +18,13 @@ // This is a test protocol for testing parquet-avro compatibility. @namespace("org.apache.spark.sql.parquet.test.avro") protocol CompatibilityTest { + enum Suit { + SPADES, + HEARTS, + DIAMONDS, + CLUBS + } + record Nested { array nested_ints_column; string nested_string_column; @@ -31,6 +38,7 @@ protocol CompatibilityTest { double double_column; bytes binary_column; string string_column; + Suit enum_column; union { null, boolean } maybe_bool_column; union { null, int } maybe_int_column; @@ -39,6 +47,7 @@ protocol CompatibilityTest { union { null, double } maybe_double_column; union { null, bytes } maybe_binary_column; union { null, string } maybe_string_column; + union { null, Suit } array strings_column; map string_to_int_column; diff --git a/sql/core/src/test/avro/parquet-compat.avpr b/sql/core/src/test/avro/parquet-compat.avpr index a83b7c990dd2e..9daaa6c500732 100644 --- a/sql/core/src/test/avro/parquet-compat.avpr +++ b/sql/core/src/test/avro/parquet-compat.avpr @@ -2,6 +2,10 @@ "protocol" : "CompatibilityTest", "namespace" : "org.apache.spark.sql.parquet.test.avro", "types" : [ { + "type" : "enum", + "name" : "Suit", + "symbols" : [ "SPADES", "HEARTS", "DIAMONDS", "CLUBS" ] + }, { "type" : "record", "name" : "Nested", "fields" : [ { @@ -38,6 +42,9 @@ }, { "name" : "string_column", "type" : "string" + }, { + "name" : "enum_column", + "type" : "Suit" }, { "name" : "maybe_bool_column", "type" : [ "null", "boolean" ] @@ -59,6 +66,9 @@ }, { "name" : "maybe_string_column", "type" : [ "null", "string" ] + }, { + "name" : "maybe_enum_column", + "type" : [ "null", "Suit" ] }, { "name" : "strings_column", "type" : { diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java index daec65a5bbe57..70889e76b6a11 100644 --- a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java @@ -8,7 +8,7 @@ @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public interface CompatibilityTest { - public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse("{\"protocol\":\"CompatibilityTest\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"types\":[{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}}]}],\"messages\":{}}"); + public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse("{\"protocol\":\"CompatibilityTest\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"types\":[{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]},{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":\"Suit\"},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}}]}],\"messages\":{}}"); @SuppressWarnings("all") public interface Callback extends CompatibilityTest { diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java index 354c9d73cca31..ea6b32fe1dfb8 100644 --- a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java @@ -7,7 +7,7 @@ @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}}]}"); + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}}]}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } @Deprecated public boolean bool_column; @Deprecated public int int_column; @@ -16,6 +16,7 @@ public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBa @Deprecated public double double_column; @Deprecated public java.nio.ByteBuffer binary_column; @Deprecated public java.lang.String string_column; + @Deprecated public org.apache.spark.sql.parquet.test.avro.Suit enum_column; @Deprecated public java.lang.Boolean maybe_bool_column; @Deprecated public java.lang.Integer maybe_int_column; @Deprecated public java.lang.Long maybe_long_column; @@ -23,6 +24,7 @@ public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBa @Deprecated public java.lang.Double maybe_double_column; @Deprecated public java.nio.ByteBuffer maybe_binary_column; @Deprecated public java.lang.String maybe_string_column; + @Deprecated public org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column; @Deprecated public java.util.List strings_column; @Deprecated public java.util.Map string_to_int_column; @Deprecated public java.util.Map> complex_column; @@ -37,7 +39,7 @@ public ParquetAvroCompat() {} /** * All-args constructor. */ - public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_column, java.lang.Long long_column, java.lang.Float float_column, java.lang.Double double_column, java.nio.ByteBuffer binary_column, java.lang.String string_column, java.lang.Boolean maybe_bool_column, java.lang.Integer maybe_int_column, java.lang.Long maybe_long_column, java.lang.Float maybe_float_column, java.lang.Double maybe_double_column, java.nio.ByteBuffer maybe_binary_column, java.lang.String maybe_string_column, java.util.List strings_column, java.util.Map string_to_int_column, java.util.Map> complex_column) { + public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_column, java.lang.Long long_column, java.lang.Float float_column, java.lang.Double double_column, java.nio.ByteBuffer binary_column, java.lang.String string_column, org.apache.spark.sql.parquet.test.avro.Suit enum_column, java.lang.Boolean maybe_bool_column, java.lang.Integer maybe_int_column, java.lang.Long maybe_long_column, java.lang.Float maybe_float_column, java.lang.Double maybe_double_column, java.nio.ByteBuffer maybe_binary_column, java.lang.String maybe_string_column, org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column, java.util.List strings_column, java.util.Map string_to_int_column, java.util.Map> complex_column) { this.bool_column = bool_column; this.int_column = int_column; this.long_column = long_column; @@ -45,6 +47,7 @@ public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_co this.double_column = double_column; this.binary_column = binary_column; this.string_column = string_column; + this.enum_column = enum_column; this.maybe_bool_column = maybe_bool_column; this.maybe_int_column = maybe_int_column; this.maybe_long_column = maybe_long_column; @@ -52,6 +55,7 @@ public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_co this.maybe_double_column = maybe_double_column; this.maybe_binary_column = maybe_binary_column; this.maybe_string_column = maybe_string_column; + this.maybe_enum_column = maybe_enum_column; this.strings_column = strings_column; this.string_to_int_column = string_to_int_column; this.complex_column = complex_column; @@ -68,16 +72,18 @@ public java.lang.Object get(int field$) { case 4: return double_column; case 5: return binary_column; case 6: return string_column; - case 7: return maybe_bool_column; - case 8: return maybe_int_column; - case 9: return maybe_long_column; - case 10: return maybe_float_column; - case 11: return maybe_double_column; - case 12: return maybe_binary_column; - case 13: return maybe_string_column; - case 14: return strings_column; - case 15: return string_to_int_column; - case 16: return complex_column; + case 7: return enum_column; + case 8: return maybe_bool_column; + case 9: return maybe_int_column; + case 10: return maybe_long_column; + case 11: return maybe_float_column; + case 12: return maybe_double_column; + case 13: return maybe_binary_column; + case 14: return maybe_string_column; + case 15: return maybe_enum_column; + case 16: return strings_column; + case 17: return string_to_int_column; + case 18: return complex_column; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } @@ -92,16 +98,18 @@ public void put(int field$, java.lang.Object value$) { case 4: double_column = (java.lang.Double)value$; break; case 5: binary_column = (java.nio.ByteBuffer)value$; break; case 6: string_column = (java.lang.String)value$; break; - case 7: maybe_bool_column = (java.lang.Boolean)value$; break; - case 8: maybe_int_column = (java.lang.Integer)value$; break; - case 9: maybe_long_column = (java.lang.Long)value$; break; - case 10: maybe_float_column = (java.lang.Float)value$; break; - case 11: maybe_double_column = (java.lang.Double)value$; break; - case 12: maybe_binary_column = (java.nio.ByteBuffer)value$; break; - case 13: maybe_string_column = (java.lang.String)value$; break; - case 14: strings_column = (java.util.List)value$; break; - case 15: string_to_int_column = (java.util.Map)value$; break; - case 16: complex_column = (java.util.Map>)value$; break; + case 7: enum_column = (org.apache.spark.sql.parquet.test.avro.Suit)value$; break; + case 8: maybe_bool_column = (java.lang.Boolean)value$; break; + case 9: maybe_int_column = (java.lang.Integer)value$; break; + case 10: maybe_long_column = (java.lang.Long)value$; break; + case 11: maybe_float_column = (java.lang.Float)value$; break; + case 12: maybe_double_column = (java.lang.Double)value$; break; + case 13: maybe_binary_column = (java.nio.ByteBuffer)value$; break; + case 14: maybe_string_column = (java.lang.String)value$; break; + case 15: maybe_enum_column = (org.apache.spark.sql.parquet.test.avro.Suit)value$; break; + case 16: strings_column = (java.util.List)value$; break; + case 17: string_to_int_column = (java.util.Map)value$; break; + case 18: complex_column = (java.util.Map>)value$; break; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } @@ -211,6 +219,21 @@ public void setStringColumn(java.lang.String value) { this.string_column = value; } + /** + * Gets the value of the 'enum_column' field. + */ + public org.apache.spark.sql.parquet.test.avro.Suit getEnumColumn() { + return enum_column; + } + + /** + * Sets the value of the 'enum_column' field. + * @param value the value to set. + */ + public void setEnumColumn(org.apache.spark.sql.parquet.test.avro.Suit value) { + this.enum_column = value; + } + /** * Gets the value of the 'maybe_bool_column' field. */ @@ -316,6 +339,21 @@ public void setMaybeStringColumn(java.lang.String value) { this.maybe_string_column = value; } + /** + * Gets the value of the 'maybe_enum_column' field. + */ + public org.apache.spark.sql.parquet.test.avro.Suit getMaybeEnumColumn() { + return maybe_enum_column; + } + + /** + * Sets the value of the 'maybe_enum_column' field. + * @param value the value to set. + */ + public void setMaybeEnumColumn(org.apache.spark.sql.parquet.test.avro.Suit value) { + this.maybe_enum_column = value; + } + /** * Gets the value of the 'strings_column' field. */ @@ -389,6 +427,7 @@ public static class Builder extends org.apache.avro.specific.SpecificRecordBuild private double double_column; private java.nio.ByteBuffer binary_column; private java.lang.String string_column; + private org.apache.spark.sql.parquet.test.avro.Suit enum_column; private java.lang.Boolean maybe_bool_column; private java.lang.Integer maybe_int_column; private java.lang.Long maybe_long_column; @@ -396,6 +435,7 @@ public static class Builder extends org.apache.avro.specific.SpecificRecordBuild private java.lang.Double maybe_double_column; private java.nio.ByteBuffer maybe_binary_column; private java.lang.String maybe_string_column; + private org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column; private java.util.List strings_column; private java.util.Map string_to_int_column; private java.util.Map> complex_column; @@ -436,46 +476,54 @@ private Builder(org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder this.string_column = data().deepCopy(fields()[6].schema(), other.string_column); fieldSetFlags()[6] = true; } - if (isValidValue(fields()[7], other.maybe_bool_column)) { - this.maybe_bool_column = data().deepCopy(fields()[7].schema(), other.maybe_bool_column); + if (isValidValue(fields()[7], other.enum_column)) { + this.enum_column = data().deepCopy(fields()[7].schema(), other.enum_column); fieldSetFlags()[7] = true; } - if (isValidValue(fields()[8], other.maybe_int_column)) { - this.maybe_int_column = data().deepCopy(fields()[8].schema(), other.maybe_int_column); + if (isValidValue(fields()[8], other.maybe_bool_column)) { + this.maybe_bool_column = data().deepCopy(fields()[8].schema(), other.maybe_bool_column); fieldSetFlags()[8] = true; } - if (isValidValue(fields()[9], other.maybe_long_column)) { - this.maybe_long_column = data().deepCopy(fields()[9].schema(), other.maybe_long_column); + if (isValidValue(fields()[9], other.maybe_int_column)) { + this.maybe_int_column = data().deepCopy(fields()[9].schema(), other.maybe_int_column); fieldSetFlags()[9] = true; } - if (isValidValue(fields()[10], other.maybe_float_column)) { - this.maybe_float_column = data().deepCopy(fields()[10].schema(), other.maybe_float_column); + if (isValidValue(fields()[10], other.maybe_long_column)) { + this.maybe_long_column = data().deepCopy(fields()[10].schema(), other.maybe_long_column); fieldSetFlags()[10] = true; } - if (isValidValue(fields()[11], other.maybe_double_column)) { - this.maybe_double_column = data().deepCopy(fields()[11].schema(), other.maybe_double_column); + if (isValidValue(fields()[11], other.maybe_float_column)) { + this.maybe_float_column = data().deepCopy(fields()[11].schema(), other.maybe_float_column); fieldSetFlags()[11] = true; } - if (isValidValue(fields()[12], other.maybe_binary_column)) { - this.maybe_binary_column = data().deepCopy(fields()[12].schema(), other.maybe_binary_column); + if (isValidValue(fields()[12], other.maybe_double_column)) { + this.maybe_double_column = data().deepCopy(fields()[12].schema(), other.maybe_double_column); fieldSetFlags()[12] = true; } - if (isValidValue(fields()[13], other.maybe_string_column)) { - this.maybe_string_column = data().deepCopy(fields()[13].schema(), other.maybe_string_column); + if (isValidValue(fields()[13], other.maybe_binary_column)) { + this.maybe_binary_column = data().deepCopy(fields()[13].schema(), other.maybe_binary_column); fieldSetFlags()[13] = true; } - if (isValidValue(fields()[14], other.strings_column)) { - this.strings_column = data().deepCopy(fields()[14].schema(), other.strings_column); + if (isValidValue(fields()[14], other.maybe_string_column)) { + this.maybe_string_column = data().deepCopy(fields()[14].schema(), other.maybe_string_column); fieldSetFlags()[14] = true; } - if (isValidValue(fields()[15], other.string_to_int_column)) { - this.string_to_int_column = data().deepCopy(fields()[15].schema(), other.string_to_int_column); + if (isValidValue(fields()[15], other.maybe_enum_column)) { + this.maybe_enum_column = data().deepCopy(fields()[15].schema(), other.maybe_enum_column); fieldSetFlags()[15] = true; } - if (isValidValue(fields()[16], other.complex_column)) { - this.complex_column = data().deepCopy(fields()[16].schema(), other.complex_column); + if (isValidValue(fields()[16], other.strings_column)) { + this.strings_column = data().deepCopy(fields()[16].schema(), other.strings_column); fieldSetFlags()[16] = true; } + if (isValidValue(fields()[17], other.string_to_int_column)) { + this.string_to_int_column = data().deepCopy(fields()[17].schema(), other.string_to_int_column); + fieldSetFlags()[17] = true; + } + if (isValidValue(fields()[18], other.complex_column)) { + this.complex_column = data().deepCopy(fields()[18].schema(), other.complex_column); + fieldSetFlags()[18] = true; + } } /** Creates a Builder by copying an existing ParquetAvroCompat instance */ @@ -509,46 +557,54 @@ private Builder(org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat other) this.string_column = data().deepCopy(fields()[6].schema(), other.string_column); fieldSetFlags()[6] = true; } - if (isValidValue(fields()[7], other.maybe_bool_column)) { - this.maybe_bool_column = data().deepCopy(fields()[7].schema(), other.maybe_bool_column); + if (isValidValue(fields()[7], other.enum_column)) { + this.enum_column = data().deepCopy(fields()[7].schema(), other.enum_column); fieldSetFlags()[7] = true; } - if (isValidValue(fields()[8], other.maybe_int_column)) { - this.maybe_int_column = data().deepCopy(fields()[8].schema(), other.maybe_int_column); + if (isValidValue(fields()[8], other.maybe_bool_column)) { + this.maybe_bool_column = data().deepCopy(fields()[8].schema(), other.maybe_bool_column); fieldSetFlags()[8] = true; } - if (isValidValue(fields()[9], other.maybe_long_column)) { - this.maybe_long_column = data().deepCopy(fields()[9].schema(), other.maybe_long_column); + if (isValidValue(fields()[9], other.maybe_int_column)) { + this.maybe_int_column = data().deepCopy(fields()[9].schema(), other.maybe_int_column); fieldSetFlags()[9] = true; } - if (isValidValue(fields()[10], other.maybe_float_column)) { - this.maybe_float_column = data().deepCopy(fields()[10].schema(), other.maybe_float_column); + if (isValidValue(fields()[10], other.maybe_long_column)) { + this.maybe_long_column = data().deepCopy(fields()[10].schema(), other.maybe_long_column); fieldSetFlags()[10] = true; } - if (isValidValue(fields()[11], other.maybe_double_column)) { - this.maybe_double_column = data().deepCopy(fields()[11].schema(), other.maybe_double_column); + if (isValidValue(fields()[11], other.maybe_float_column)) { + this.maybe_float_column = data().deepCopy(fields()[11].schema(), other.maybe_float_column); fieldSetFlags()[11] = true; } - if (isValidValue(fields()[12], other.maybe_binary_column)) { - this.maybe_binary_column = data().deepCopy(fields()[12].schema(), other.maybe_binary_column); + if (isValidValue(fields()[12], other.maybe_double_column)) { + this.maybe_double_column = data().deepCopy(fields()[12].schema(), other.maybe_double_column); fieldSetFlags()[12] = true; } - if (isValidValue(fields()[13], other.maybe_string_column)) { - this.maybe_string_column = data().deepCopy(fields()[13].schema(), other.maybe_string_column); + if (isValidValue(fields()[13], other.maybe_binary_column)) { + this.maybe_binary_column = data().deepCopy(fields()[13].schema(), other.maybe_binary_column); fieldSetFlags()[13] = true; } - if (isValidValue(fields()[14], other.strings_column)) { - this.strings_column = data().deepCopy(fields()[14].schema(), other.strings_column); + if (isValidValue(fields()[14], other.maybe_string_column)) { + this.maybe_string_column = data().deepCopy(fields()[14].schema(), other.maybe_string_column); fieldSetFlags()[14] = true; } - if (isValidValue(fields()[15], other.string_to_int_column)) { - this.string_to_int_column = data().deepCopy(fields()[15].schema(), other.string_to_int_column); + if (isValidValue(fields()[15], other.maybe_enum_column)) { + this.maybe_enum_column = data().deepCopy(fields()[15].schema(), other.maybe_enum_column); fieldSetFlags()[15] = true; } - if (isValidValue(fields()[16], other.complex_column)) { - this.complex_column = data().deepCopy(fields()[16].schema(), other.complex_column); + if (isValidValue(fields()[16], other.strings_column)) { + this.strings_column = data().deepCopy(fields()[16].schema(), other.strings_column); fieldSetFlags()[16] = true; } + if (isValidValue(fields()[17], other.string_to_int_column)) { + this.string_to_int_column = data().deepCopy(fields()[17].schema(), other.string_to_int_column); + fieldSetFlags()[17] = true; + } + if (isValidValue(fields()[18], other.complex_column)) { + this.complex_column = data().deepCopy(fields()[18].schema(), other.complex_column); + fieldSetFlags()[18] = true; + } } /** Gets the value of the 'bool_column' field */ @@ -721,6 +777,31 @@ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearStr return this; } + /** Gets the value of the 'enum_column' field */ + public org.apache.spark.sql.parquet.test.avro.Suit getEnumColumn() { + return enum_column; + } + + /** Sets the value of the 'enum_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setEnumColumn(org.apache.spark.sql.parquet.test.avro.Suit value) { + validate(fields()[7], value); + this.enum_column = value; + fieldSetFlags()[7] = true; + return this; + } + + /** Checks whether the 'enum_column' field has been set */ + public boolean hasEnumColumn() { + return fieldSetFlags()[7]; + } + + /** Clears the value of the 'enum_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearEnumColumn() { + enum_column = null; + fieldSetFlags()[7] = false; + return this; + } + /** Gets the value of the 'maybe_bool_column' field */ public java.lang.Boolean getMaybeBoolColumn() { return maybe_bool_column; @@ -728,21 +809,21 @@ public java.lang.Boolean getMaybeBoolColumn() { /** Sets the value of the 'maybe_bool_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeBoolColumn(java.lang.Boolean value) { - validate(fields()[7], value); + validate(fields()[8], value); this.maybe_bool_column = value; - fieldSetFlags()[7] = true; + fieldSetFlags()[8] = true; return this; } /** Checks whether the 'maybe_bool_column' field has been set */ public boolean hasMaybeBoolColumn() { - return fieldSetFlags()[7]; + return fieldSetFlags()[8]; } /** Clears the value of the 'maybe_bool_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeBoolColumn() { maybe_bool_column = null; - fieldSetFlags()[7] = false; + fieldSetFlags()[8] = false; return this; } @@ -753,21 +834,21 @@ public java.lang.Integer getMaybeIntColumn() { /** Sets the value of the 'maybe_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeIntColumn(java.lang.Integer value) { - validate(fields()[8], value); + validate(fields()[9], value); this.maybe_int_column = value; - fieldSetFlags()[8] = true; + fieldSetFlags()[9] = true; return this; } /** Checks whether the 'maybe_int_column' field has been set */ public boolean hasMaybeIntColumn() { - return fieldSetFlags()[8]; + return fieldSetFlags()[9]; } /** Clears the value of the 'maybe_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeIntColumn() { maybe_int_column = null; - fieldSetFlags()[8] = false; + fieldSetFlags()[9] = false; return this; } @@ -778,21 +859,21 @@ public java.lang.Long getMaybeLongColumn() { /** Sets the value of the 'maybe_long_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeLongColumn(java.lang.Long value) { - validate(fields()[9], value); + validate(fields()[10], value); this.maybe_long_column = value; - fieldSetFlags()[9] = true; + fieldSetFlags()[10] = true; return this; } /** Checks whether the 'maybe_long_column' field has been set */ public boolean hasMaybeLongColumn() { - return fieldSetFlags()[9]; + return fieldSetFlags()[10]; } /** Clears the value of the 'maybe_long_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeLongColumn() { maybe_long_column = null; - fieldSetFlags()[9] = false; + fieldSetFlags()[10] = false; return this; } @@ -803,21 +884,21 @@ public java.lang.Float getMaybeFloatColumn() { /** Sets the value of the 'maybe_float_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeFloatColumn(java.lang.Float value) { - validate(fields()[10], value); + validate(fields()[11], value); this.maybe_float_column = value; - fieldSetFlags()[10] = true; + fieldSetFlags()[11] = true; return this; } /** Checks whether the 'maybe_float_column' field has been set */ public boolean hasMaybeFloatColumn() { - return fieldSetFlags()[10]; + return fieldSetFlags()[11]; } /** Clears the value of the 'maybe_float_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeFloatColumn() { maybe_float_column = null; - fieldSetFlags()[10] = false; + fieldSetFlags()[11] = false; return this; } @@ -828,21 +909,21 @@ public java.lang.Double getMaybeDoubleColumn() { /** Sets the value of the 'maybe_double_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeDoubleColumn(java.lang.Double value) { - validate(fields()[11], value); + validate(fields()[12], value); this.maybe_double_column = value; - fieldSetFlags()[11] = true; + fieldSetFlags()[12] = true; return this; } /** Checks whether the 'maybe_double_column' field has been set */ public boolean hasMaybeDoubleColumn() { - return fieldSetFlags()[11]; + return fieldSetFlags()[12]; } /** Clears the value of the 'maybe_double_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeDoubleColumn() { maybe_double_column = null; - fieldSetFlags()[11] = false; + fieldSetFlags()[12] = false; return this; } @@ -853,21 +934,21 @@ public java.nio.ByteBuffer getMaybeBinaryColumn() { /** Sets the value of the 'maybe_binary_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeBinaryColumn(java.nio.ByteBuffer value) { - validate(fields()[12], value); + validate(fields()[13], value); this.maybe_binary_column = value; - fieldSetFlags()[12] = true; + fieldSetFlags()[13] = true; return this; } /** Checks whether the 'maybe_binary_column' field has been set */ public boolean hasMaybeBinaryColumn() { - return fieldSetFlags()[12]; + return fieldSetFlags()[13]; } /** Clears the value of the 'maybe_binary_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeBinaryColumn() { maybe_binary_column = null; - fieldSetFlags()[12] = false; + fieldSetFlags()[13] = false; return this; } @@ -878,21 +959,46 @@ public java.lang.String getMaybeStringColumn() { /** Sets the value of the 'maybe_string_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeStringColumn(java.lang.String value) { - validate(fields()[13], value); + validate(fields()[14], value); this.maybe_string_column = value; - fieldSetFlags()[13] = true; + fieldSetFlags()[14] = true; return this; } /** Checks whether the 'maybe_string_column' field has been set */ public boolean hasMaybeStringColumn() { - return fieldSetFlags()[13]; + return fieldSetFlags()[14]; } /** Clears the value of the 'maybe_string_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeStringColumn() { maybe_string_column = null; - fieldSetFlags()[13] = false; + fieldSetFlags()[14] = false; + return this; + } + + /** Gets the value of the 'maybe_enum_column' field */ + public org.apache.spark.sql.parquet.test.avro.Suit getMaybeEnumColumn() { + return maybe_enum_column; + } + + /** Sets the value of the 'maybe_enum_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeEnumColumn(org.apache.spark.sql.parquet.test.avro.Suit value) { + validate(fields()[15], value); + this.maybe_enum_column = value; + fieldSetFlags()[15] = true; + return this; + } + + /** Checks whether the 'maybe_enum_column' field has been set */ + public boolean hasMaybeEnumColumn() { + return fieldSetFlags()[15]; + } + + /** Clears the value of the 'maybe_enum_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeEnumColumn() { + maybe_enum_column = null; + fieldSetFlags()[15] = false; return this; } @@ -903,21 +1009,21 @@ public java.util.List getStringsColumn() { /** Sets the value of the 'strings_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setStringsColumn(java.util.List value) { - validate(fields()[14], value); + validate(fields()[16], value); this.strings_column = value; - fieldSetFlags()[14] = true; + fieldSetFlags()[16] = true; return this; } /** Checks whether the 'strings_column' field has been set */ public boolean hasStringsColumn() { - return fieldSetFlags()[14]; + return fieldSetFlags()[16]; } /** Clears the value of the 'strings_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearStringsColumn() { strings_column = null; - fieldSetFlags()[14] = false; + fieldSetFlags()[16] = false; return this; } @@ -928,21 +1034,21 @@ public java.util.Map getStringToIntColumn() /** Sets the value of the 'string_to_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setStringToIntColumn(java.util.Map value) { - validate(fields()[15], value); + validate(fields()[17], value); this.string_to_int_column = value; - fieldSetFlags()[15] = true; + fieldSetFlags()[17] = true; return this; } /** Checks whether the 'string_to_int_column' field has been set */ public boolean hasStringToIntColumn() { - return fieldSetFlags()[15]; + return fieldSetFlags()[17]; } /** Clears the value of the 'string_to_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearStringToIntColumn() { string_to_int_column = null; - fieldSetFlags()[15] = false; + fieldSetFlags()[17] = false; return this; } @@ -953,21 +1059,21 @@ public java.util.Map> value) { - validate(fields()[16], value); + validate(fields()[18], value); this.complex_column = value; - fieldSetFlags()[16] = true; + fieldSetFlags()[18] = true; return this; } /** Checks whether the 'complex_column' field has been set */ public boolean hasComplexColumn() { - return fieldSetFlags()[16]; + return fieldSetFlags()[18]; } /** Clears the value of the 'complex_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearComplexColumn() { complex_column = null; - fieldSetFlags()[16] = false; + fieldSetFlags()[18] = false; return this; } @@ -982,16 +1088,18 @@ public ParquetAvroCompat build() { record.double_column = fieldSetFlags()[4] ? this.double_column : (java.lang.Double) defaultValue(fields()[4]); record.binary_column = fieldSetFlags()[5] ? this.binary_column : (java.nio.ByteBuffer) defaultValue(fields()[5]); record.string_column = fieldSetFlags()[6] ? this.string_column : (java.lang.String) defaultValue(fields()[6]); - record.maybe_bool_column = fieldSetFlags()[7] ? this.maybe_bool_column : (java.lang.Boolean) defaultValue(fields()[7]); - record.maybe_int_column = fieldSetFlags()[8] ? this.maybe_int_column : (java.lang.Integer) defaultValue(fields()[8]); - record.maybe_long_column = fieldSetFlags()[9] ? this.maybe_long_column : (java.lang.Long) defaultValue(fields()[9]); - record.maybe_float_column = fieldSetFlags()[10] ? this.maybe_float_column : (java.lang.Float) defaultValue(fields()[10]); - record.maybe_double_column = fieldSetFlags()[11] ? this.maybe_double_column : (java.lang.Double) defaultValue(fields()[11]); - record.maybe_binary_column = fieldSetFlags()[12] ? this.maybe_binary_column : (java.nio.ByteBuffer) defaultValue(fields()[12]); - record.maybe_string_column = fieldSetFlags()[13] ? this.maybe_string_column : (java.lang.String) defaultValue(fields()[13]); - record.strings_column = fieldSetFlags()[14] ? this.strings_column : (java.util.List) defaultValue(fields()[14]); - record.string_to_int_column = fieldSetFlags()[15] ? this.string_to_int_column : (java.util.Map) defaultValue(fields()[15]); - record.complex_column = fieldSetFlags()[16] ? this.complex_column : (java.util.Map>) defaultValue(fields()[16]); + record.enum_column = fieldSetFlags()[7] ? this.enum_column : (org.apache.spark.sql.parquet.test.avro.Suit) defaultValue(fields()[7]); + record.maybe_bool_column = fieldSetFlags()[8] ? this.maybe_bool_column : (java.lang.Boolean) defaultValue(fields()[8]); + record.maybe_int_column = fieldSetFlags()[9] ? this.maybe_int_column : (java.lang.Integer) defaultValue(fields()[9]); + record.maybe_long_column = fieldSetFlags()[10] ? this.maybe_long_column : (java.lang.Long) defaultValue(fields()[10]); + record.maybe_float_column = fieldSetFlags()[11] ? this.maybe_float_column : (java.lang.Float) defaultValue(fields()[11]); + record.maybe_double_column = fieldSetFlags()[12] ? this.maybe_double_column : (java.lang.Double) defaultValue(fields()[12]); + record.maybe_binary_column = fieldSetFlags()[13] ? this.maybe_binary_column : (java.nio.ByteBuffer) defaultValue(fields()[13]); + record.maybe_string_column = fieldSetFlags()[14] ? this.maybe_string_column : (java.lang.String) defaultValue(fields()[14]); + record.maybe_enum_column = fieldSetFlags()[15] ? this.maybe_enum_column : (org.apache.spark.sql.parquet.test.avro.Suit) defaultValue(fields()[15]); + record.strings_column = fieldSetFlags()[16] ? this.strings_column : (java.util.List) defaultValue(fields()[16]); + record.string_to_int_column = fieldSetFlags()[17] ? this.string_to_int_column : (java.util.Map) defaultValue(fields()[17]); + record.complex_column = fieldSetFlags()[18] ? this.complex_column : (java.util.Map>) defaultValue(fields()[18]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala index bfa427349ff6a..96bd0189eb8f4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala @@ -25,7 +25,7 @@ import scala.collection.JavaConversions._ import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroParquetWriter -import org.apache.spark.sql.parquet.test.avro.{Nested, ParquetAvroCompat} +import org.apache.spark.sql.parquet.test.avro.{Suit, Nested, ParquetAvroCompat} import org.apache.spark.sql.test.TestSQLContext import org.apache.spark.sql.{Row, SQLContext} @@ -63,6 +63,8 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { i.toDouble + 0.2d, s"val_$i".getBytes, s"val_$i", + // Avro enum values are converted to plain UTF-8 strings + Suit.values()(i % Suit.values().length).name(), nullable(i % 2 == 0: java.lang.Boolean), nullable(i: Integer), @@ -71,6 +73,7 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { nullable(i.toDouble + 0.2d: java.lang.Double), nullable(s"val_$i".getBytes), nullable(s"val_$i"), + nullable(Suit.values()(i % Suit.values().length).name()), Seq.tabulate(3)(n => s"arr_${i + n}"), Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap, @@ -106,6 +109,7 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { .setDoubleColumn(i.toDouble + 0.2d) .setBinaryColumn(ByteBuffer.wrap(s"val_$i".getBytes)) .setStringColumn(s"val_$i") + .setEnumColumn(Suit.values()(i % Suit.values().length)) .setMaybeBoolColumn(nullable(i % 2 == 0: java.lang.Boolean)) .setMaybeIntColumn(nullable(i: Integer)) @@ -114,6 +118,7 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { .setMaybeDoubleColumn(nullable(i.toDouble + 0.2d: java.lang.Double)) .setMaybeBinaryColumn(nullable(ByteBuffer.wrap(s"val_$i".getBytes))) .setMaybeStringColumn(nullable(s"val_$i")) + .setMaybeEnumColumn(nullable(Suit.values()(i % Suit.values().length))) .setStringsColumn(Seq.tabulate(3)(n => s"arr_${i + n}")) .setStringToIntColumn( From e152ed6e81fa685bced59c07fbd0be8cd72a9e49 Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Fri, 17 Jul 2015 19:33:23 +0800 Subject: [PATCH 2/6] Makes implicit conversions more explicit --- .../ParquetAvroCompatibilitySuite.scala | 18 +++++++++--------- .../sql/parquet/ParquetCompatibilityTest.scala | 1 + 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala index 96bd0189eb8f4..55a2601c984e9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala @@ -20,12 +20,12 @@ package org.apache.spark.sql.parquet import java.nio.ByteBuffer import java.util.{List => JList, Map => JMap} -import scala.collection.JavaConversions._ +import scala.collection.JavaConverters.{mapAsJavaMapConverter, seqAsJavaListConverter} import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroParquetWriter -import org.apache.spark.sql.parquet.test.avro.{Suit, Nested, ParquetAvroCompat} +import org.apache.spark.sql.parquet.test.avro.{Nested, ParquetAvroCompat, Suit} import org.apache.spark.sql.test.TestSQLContext import org.apache.spark.sql.{Row, SQLContext} @@ -89,15 +89,15 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { def nullable[T <: AnyRef] = makeNullable[T](i) _ def makeComplexColumn(i: Int): JMap[String, JList[Nested]] = { - mapAsJavaMap(Seq.tabulate(3) { n => - (i + n).toString -> seqAsJavaList(Seq.tabulate(3) { m => + Seq.tabulate(3) { n => + (i + n).toString -> Seq.tabulate(3) { m => Nested .newBuilder() - .setNestedIntsColumn(seqAsJavaList(Seq.tabulate(3)(j => i + j + m))) + .setNestedIntsColumn(Seq.tabulate(3)(j => i + j + m: Integer).asJava) .setNestedStringColumn(s"val_${i + m}") .build() - }) - }.toMap) + }.asJava + }.toMap.asJava } ParquetAvroCompat @@ -120,9 +120,9 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { .setMaybeStringColumn(nullable(s"val_$i")) .setMaybeEnumColumn(nullable(Suit.values()(i % Suit.values().length))) - .setStringsColumn(Seq.tabulate(3)(n => s"arr_${i + n}")) + .setStringsColumn(Seq.tabulate(3)(n => s"arr_${i + n}").asJava) .setStringToIntColumn( - mapAsJavaMap(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap)) + Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap.asJava) .setComplexColumn(makeComplexColumn(i)) .build() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetCompatibilityTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetCompatibilityTest.scala index b4cdfd9e98f6f..0238f0f0a9a1a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetCompatibilityTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetCompatibilityTest.scala @@ -16,6 +16,7 @@ */ package org.apache.spark.sql.parquet + import java.io.File import scala.collection.JavaConversions._ From 3d69df3a8e97877ed4de24ac0326efb675832419 Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Fri, 17 Jul 2015 19:41:31 +0800 Subject: [PATCH 3/6] Tests for nullable Avro complex types --- sql/core/src/test/avro/parquet-compat.avdl | 6 +- sql/core/src/test/avro/parquet-compat.avpr | 21 +++ .../parquet/test/avro/CompatibilityTest.java | 2 +- .../parquet/test/avro/ParquetAvroCompat.java | 166 +++++++++++++++++- .../ParquetAvroCompatibilitySuite.scala | 18 +- 5 files changed, 206 insertions(+), 7 deletions(-) diff --git a/sql/core/src/test/avro/parquet-compat.avdl b/sql/core/src/test/avro/parquet-compat.avdl index 6445742487c61..9a64f4e0f2bd3 100644 --- a/sql/core/src/test/avro/parquet-compat.avdl +++ b/sql/core/src/test/avro/parquet-compat.avdl @@ -47,10 +47,14 @@ protocol CompatibilityTest { union { null, double } maybe_double_column; union { null, bytes } maybe_binary_column; union { null, string } maybe_string_column; - union { null, Suit } + union { null, Suit } maybe_enum_column; array strings_column; map string_to_int_column; map> complex_column; + + union { null, array } maybe_strings_column; + union { null, map } maybe_string_to_int_column; + union { null, map> } maybe_complex_column; } } diff --git a/sql/core/src/test/avro/parquet-compat.avpr b/sql/core/src/test/avro/parquet-compat.avpr index 9daaa6c500732..7d73fb41e2ddd 100644 --- a/sql/core/src/test/avro/parquet-compat.avpr +++ b/sql/core/src/test/avro/parquet-compat.avpr @@ -90,6 +90,27 @@ "items" : "Nested" } } + }, { + "name" : "maybe_strings_column", + "type" : [ "null", { + "type" : "array", + "items" : "string" + } ] + }, { + "name" : "maybe_string_to_int_column", + "type" : [ "null", { + "type" : "map", + "values" : "int" + } ] + }, { + "name" : "maybe_complex_column", + "type" : [ "null", { + "type" : "map", + "values" : { + "type" : "array", + "items" : "Nested" + } + } ] } ] } ], "messages" : { } diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java index 70889e76b6a11..8a8d1972c0d4a 100644 --- a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java @@ -8,7 +8,7 @@ @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public interface CompatibilityTest { - public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse("{\"protocol\":\"CompatibilityTest\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"types\":[{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]},{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":\"Suit\"},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}}]}],\"messages\":{}}"); + public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse("{\"protocol\":\"CompatibilityTest\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"types\":[{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]},{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":\"Suit\"},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}},{\"name\":\"maybe_strings_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"name\":\"maybe_string_to_int_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_complex_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}]}]}],\"messages\":{}}"); @SuppressWarnings("all") public interface Callback extends CompatibilityTest { diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java index ea6b32fe1dfb8..fa4d5c7f247c1 100644 --- a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java @@ -7,7 +7,7 @@ @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}}]}"); + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}},{\"name\":\"maybe_strings_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"name\":\"maybe_string_to_int_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_complex_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}]}]}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } @Deprecated public boolean bool_column; @Deprecated public int int_column; @@ -28,6 +28,9 @@ public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBa @Deprecated public java.util.List strings_column; @Deprecated public java.util.Map string_to_int_column; @Deprecated public java.util.Map> complex_column; + @Deprecated public java.util.List maybe_strings_column; + @Deprecated public java.util.Map maybe_string_to_int_column; + @Deprecated public java.util.Map> maybe_complex_column; /** * Default constructor. Note that this does not initialize fields @@ -39,7 +42,7 @@ public ParquetAvroCompat() {} /** * All-args constructor. */ - public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_column, java.lang.Long long_column, java.lang.Float float_column, java.lang.Double double_column, java.nio.ByteBuffer binary_column, java.lang.String string_column, org.apache.spark.sql.parquet.test.avro.Suit enum_column, java.lang.Boolean maybe_bool_column, java.lang.Integer maybe_int_column, java.lang.Long maybe_long_column, java.lang.Float maybe_float_column, java.lang.Double maybe_double_column, java.nio.ByteBuffer maybe_binary_column, java.lang.String maybe_string_column, org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column, java.util.List strings_column, java.util.Map string_to_int_column, java.util.Map> complex_column) { + public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_column, java.lang.Long long_column, java.lang.Float float_column, java.lang.Double double_column, java.nio.ByteBuffer binary_column, java.lang.String string_column, org.apache.spark.sql.parquet.test.avro.Suit enum_column, java.lang.Boolean maybe_bool_column, java.lang.Integer maybe_int_column, java.lang.Long maybe_long_column, java.lang.Float maybe_float_column, java.lang.Double maybe_double_column, java.nio.ByteBuffer maybe_binary_column, java.lang.String maybe_string_column, org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column, java.util.List strings_column, java.util.Map string_to_int_column, java.util.Map> complex_column, java.util.List maybe_strings_column, java.util.Map maybe_string_to_int_column, java.util.Map> maybe_complex_column) { this.bool_column = bool_column; this.int_column = int_column; this.long_column = long_column; @@ -59,6 +62,9 @@ public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_co this.strings_column = strings_column; this.string_to_int_column = string_to_int_column; this.complex_column = complex_column; + this.maybe_strings_column = maybe_strings_column; + this.maybe_string_to_int_column = maybe_string_to_int_column; + this.maybe_complex_column = maybe_complex_column; } public org.apache.avro.Schema getSchema() { return SCHEMA$; } @@ -84,6 +90,9 @@ public java.lang.Object get(int field$) { case 16: return strings_column; case 17: return string_to_int_column; case 18: return complex_column; + case 19: return maybe_strings_column; + case 20: return maybe_string_to_int_column; + case 21: return maybe_complex_column; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } @@ -110,6 +119,9 @@ public void put(int field$, java.lang.Object value$) { case 16: strings_column = (java.util.List)value$; break; case 17: string_to_int_column = (java.util.Map)value$; break; case 18: complex_column = (java.util.Map>)value$; break; + case 19: maybe_strings_column = (java.util.List)value$; break; + case 20: maybe_string_to_int_column = (java.util.Map)value$; break; + case 21: maybe_complex_column = (java.util.Map>)value$; break; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } @@ -399,6 +411,51 @@ public void setComplexColumn(java.util.Map getMaybeStringsColumn() { + return maybe_strings_column; + } + + /** + * Sets the value of the 'maybe_strings_column' field. + * @param value the value to set. + */ + public void setMaybeStringsColumn(java.util.List value) { + this.maybe_strings_column = value; + } + + /** + * Gets the value of the 'maybe_string_to_int_column' field. + */ + public java.util.Map getMaybeStringToIntColumn() { + return maybe_string_to_int_column; + } + + /** + * Sets the value of the 'maybe_string_to_int_column' field. + * @param value the value to set. + */ + public void setMaybeStringToIntColumn(java.util.Map value) { + this.maybe_string_to_int_column = value; + } + + /** + * Gets the value of the 'maybe_complex_column' field. + */ + public java.util.Map> getMaybeComplexColumn() { + return maybe_complex_column; + } + + /** + * Sets the value of the 'maybe_complex_column' field. + * @param value the value to set. + */ + public void setMaybeComplexColumn(java.util.Map> value) { + this.maybe_complex_column = value; + } + /** Creates a new ParquetAvroCompat RecordBuilder */ public static org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder newBuilder() { return new org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder(); @@ -439,6 +496,9 @@ public static class Builder extends org.apache.avro.specific.SpecificRecordBuild private java.util.List strings_column; private java.util.Map string_to_int_column; private java.util.Map> complex_column; + private java.util.List maybe_strings_column; + private java.util.Map maybe_string_to_int_column; + private java.util.Map> maybe_complex_column; /** Creates a new Builder */ private Builder() { @@ -524,6 +584,18 @@ private Builder(org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder this.complex_column = data().deepCopy(fields()[18].schema(), other.complex_column); fieldSetFlags()[18] = true; } + if (isValidValue(fields()[19], other.maybe_strings_column)) { + this.maybe_strings_column = data().deepCopy(fields()[19].schema(), other.maybe_strings_column); + fieldSetFlags()[19] = true; + } + if (isValidValue(fields()[20], other.maybe_string_to_int_column)) { + this.maybe_string_to_int_column = data().deepCopy(fields()[20].schema(), other.maybe_string_to_int_column); + fieldSetFlags()[20] = true; + } + if (isValidValue(fields()[21], other.maybe_complex_column)) { + this.maybe_complex_column = data().deepCopy(fields()[21].schema(), other.maybe_complex_column); + fieldSetFlags()[21] = true; + } } /** Creates a Builder by copying an existing ParquetAvroCompat instance */ @@ -605,6 +677,18 @@ private Builder(org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat other) this.complex_column = data().deepCopy(fields()[18].schema(), other.complex_column); fieldSetFlags()[18] = true; } + if (isValidValue(fields()[19], other.maybe_strings_column)) { + this.maybe_strings_column = data().deepCopy(fields()[19].schema(), other.maybe_strings_column); + fieldSetFlags()[19] = true; + } + if (isValidValue(fields()[20], other.maybe_string_to_int_column)) { + this.maybe_string_to_int_column = data().deepCopy(fields()[20].schema(), other.maybe_string_to_int_column); + fieldSetFlags()[20] = true; + } + if (isValidValue(fields()[21], other.maybe_complex_column)) { + this.maybe_complex_column = data().deepCopy(fields()[21].schema(), other.maybe_complex_column); + fieldSetFlags()[21] = true; + } } /** Gets the value of the 'bool_column' field */ @@ -1077,6 +1161,81 @@ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearCom return this; } + /** Gets the value of the 'maybe_strings_column' field */ + public java.util.List getMaybeStringsColumn() { + return maybe_strings_column; + } + + /** Sets the value of the 'maybe_strings_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeStringsColumn(java.util.List value) { + validate(fields()[19], value); + this.maybe_strings_column = value; + fieldSetFlags()[19] = true; + return this; + } + + /** Checks whether the 'maybe_strings_column' field has been set */ + public boolean hasMaybeStringsColumn() { + return fieldSetFlags()[19]; + } + + /** Clears the value of the 'maybe_strings_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeStringsColumn() { + maybe_strings_column = null; + fieldSetFlags()[19] = false; + return this; + } + + /** Gets the value of the 'maybe_string_to_int_column' field */ + public java.util.Map getMaybeStringToIntColumn() { + return maybe_string_to_int_column; + } + + /** Sets the value of the 'maybe_string_to_int_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeStringToIntColumn(java.util.Map value) { + validate(fields()[20], value); + this.maybe_string_to_int_column = value; + fieldSetFlags()[20] = true; + return this; + } + + /** Checks whether the 'maybe_string_to_int_column' field has been set */ + public boolean hasMaybeStringToIntColumn() { + return fieldSetFlags()[20]; + } + + /** Clears the value of the 'maybe_string_to_int_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeStringToIntColumn() { + maybe_string_to_int_column = null; + fieldSetFlags()[20] = false; + return this; + } + + /** Gets the value of the 'maybe_complex_column' field */ + public java.util.Map> getMaybeComplexColumn() { + return maybe_complex_column; + } + + /** Sets the value of the 'maybe_complex_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeComplexColumn(java.util.Map> value) { + validate(fields()[21], value); + this.maybe_complex_column = value; + fieldSetFlags()[21] = true; + return this; + } + + /** Checks whether the 'maybe_complex_column' field has been set */ + public boolean hasMaybeComplexColumn() { + return fieldSetFlags()[21]; + } + + /** Clears the value of the 'maybe_complex_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeComplexColumn() { + maybe_complex_column = null; + fieldSetFlags()[21] = false; + return this; + } + @Override public ParquetAvroCompat build() { try { @@ -1100,6 +1259,9 @@ public ParquetAvroCompat build() { record.strings_column = fieldSetFlags()[16] ? this.strings_column : (java.util.List) defaultValue(fields()[16]); record.string_to_int_column = fieldSetFlags()[17] ? this.string_to_int_column : (java.util.Map) defaultValue(fields()[17]); record.complex_column = fieldSetFlags()[18] ? this.complex_column : (java.util.Map>) defaultValue(fields()[18]); + record.maybe_strings_column = fieldSetFlags()[19] ? this.maybe_strings_column : (java.util.List) defaultValue(fields()[19]); + record.maybe_string_to_int_column = fieldSetFlags()[20] ? this.maybe_string_to_int_column : (java.util.Map) defaultValue(fields()[20]); + record.maybe_complex_column = fieldSetFlags()[21] ? this.maybe_complex_column : (java.util.Map>) defaultValue(fields()[21]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala index 55a2601c984e9..15cf112267fc7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala @@ -81,7 +81,15 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { (i + n).toString -> Seq.tabulate(3) { m => Row(Seq.tabulate(3)(j => i + j + m), s"val_${i + m}") } - }.toMap) + }.toMap, + + nullable(Seq.tabulate(3)(n => s"arr_${i + n}")), + nullable(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap), + nullable(Seq.tabulate(3) { n => + (i + n).toString -> Seq.tabulate(3) { m => + Row(Seq.tabulate(3)(j => i + j + m), s"val_${i + m}") + } + }.toMap)) }) } @@ -121,10 +129,14 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { .setMaybeEnumColumn(nullable(Suit.values()(i % Suit.values().length))) .setStringsColumn(Seq.tabulate(3)(n => s"arr_${i + n}").asJava) - .setStringToIntColumn( - Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap.asJava) + .setStringToIntColumn(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap.asJava) .setComplexColumn(makeComplexColumn(i)) + .setMaybeStringsColumn(nullable(Seq.tabulate(3)(n => s"arr_${i + n}").asJava)) + .setMaybeStringToIntColumn( + nullable(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap.asJava)) + .setMaybeComplexColumn(nullable(makeComplexColumn(i))) + .build() } } From 43804700e82527afd3cfc731025ddef26c4dd380 Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Fri, 17 Jul 2015 20:00:18 +0800 Subject: [PATCH 4/6] Tests for Hive complex types --- .../spark/sql/hive/ParquetHiveCompatibilitySuite.scala | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala index bb5f1febe9ad4..abdae1f71fd9b 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala @@ -42,7 +42,9 @@ class ParquetHiveCompatibilitySuite extends ParquetCompatibilityTest { | double_column DOUBLE, | | strings_column ARRAY, - | int_to_string_column MAP + | int_to_string_column MAP, + | structs_column ARRAY>, + | maps_column ARRAY> |) |STORED AS PARQUET |LOCATION '${parquetStore.getCanonicalPath}' @@ -86,7 +88,11 @@ class ParquetHiveCompatibilitySuite extends ParquetCompatibilityTest { nullable(i.toFloat + 0.1f: java.lang.Float), nullable(i.toDouble + 0.2d: java.lang.Double), nullable(Seq.tabulate(3)(n => s"arr_${i + n}")), - nullable(Seq.tabulate(3)(n => (i + n: Integer) -> s"val_${i + n}").toMap)) + nullable(Seq.tabulate(3)(n => (i + n: Integer) -> s"val_${i + n}").toMap), + nullable(Seq.tabulate(3)(n => Row(s"val_a_${i + n}", s"val_b_${i + n}"))), + nullable(Seq.tabulate(3) { n => + Seq.tabulate(3)(m => (i + n + m: Integer) -> s"val_b_${i + n + m}").toMap + })) } } } From 87b59ab26cea95ed27eb43eda8cbcde6a69097db Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Fri, 17 Jul 2015 23:08:50 +0800 Subject: [PATCH 5/6] Tests for Avro array of struct --- sql/core/src/test/avro/parquet-compat.avdl | 2 + sql/core/src/test/avro/parquet-compat.avpr | 12 + .../parquet/test/avro/CompatibilityTest.java | 2 +- .../parquet/test/avro/ParquetAvroCompat.java | 222 +++++++++++++----- .../ParquetAvroCompatibilitySuite.scala | 16 ++ 5 files changed, 196 insertions(+), 58 deletions(-) diff --git a/sql/core/src/test/avro/parquet-compat.avdl b/sql/core/src/test/avro/parquet-compat.avdl index 9a64f4e0f2bd3..0f80a4596e5ff 100644 --- a/sql/core/src/test/avro/parquet-compat.avdl +++ b/sql/core/src/test/avro/parquet-compat.avdl @@ -50,10 +50,12 @@ protocol CompatibilityTest { union { null, Suit } maybe_enum_column; array strings_column; + array structs_column; map string_to_int_column; map> complex_column; union { null, array } maybe_strings_column; + union { null, array } maybe_structs_column; union { null, map } maybe_string_to_int_column; union { null, map> } maybe_complex_column; } diff --git a/sql/core/src/test/avro/parquet-compat.avpr b/sql/core/src/test/avro/parquet-compat.avpr index 7d73fb41e2ddd..d3165c55c6a5e 100644 --- a/sql/core/src/test/avro/parquet-compat.avpr +++ b/sql/core/src/test/avro/parquet-compat.avpr @@ -75,6 +75,12 @@ "type" : "array", "items" : "string" } + }, { + "name" : "structs_column", + "type" : { + "type" : "array", + "items" : "Nested" + } }, { "name" : "string_to_int_column", "type" : { @@ -96,6 +102,12 @@ "type" : "array", "items" : "string" } ] + }, { + "name" : "maybe_structs_column", + "type" : [ "null", { + "type" : "array", + "items" : "Nested" + } ] }, { "name" : "maybe_string_to_int_column", "type" : [ "null", { diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java index 8a8d1972c0d4a..0368ab050e829 100644 --- a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/CompatibilityTest.java @@ -8,7 +8,7 @@ @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public interface CompatibilityTest { - public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse("{\"protocol\":\"CompatibilityTest\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"types\":[{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]},{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":\"Suit\"},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}},{\"name\":\"maybe_strings_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"name\":\"maybe_string_to_int_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_complex_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}]}]}],\"messages\":{}}"); + public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse("{\"protocol\":\"CompatibilityTest\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"types\":[{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]},{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":\"Suit\"},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"structs_column\",\"type\":{\"type\":\"array\",\"items\":\"Nested\"}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}},{\"name\":\"maybe_strings_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"name\":\"maybe_structs_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":\"Nested\"}]},{\"name\":\"maybe_string_to_int_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_complex_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}]}]}],\"messages\":{}}"); @SuppressWarnings("all") public interface Callback extends CompatibilityTest { diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java index fa4d5c7f247c1..ef2c9bae68e01 100644 --- a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/ParquetAvroCompat.java @@ -7,7 +7,7 @@ @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}},{\"name\":\"maybe_strings_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"name\":\"maybe_string_to_int_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_complex_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}]}]}"); + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"fields\":[{\"name\":\"bool_column\",\"type\":\"boolean\"},{\"name\":\"int_column\",\"type\":\"int\"},{\"name\":\"long_column\",\"type\":\"long\"},{\"name\":\"float_column\",\"type\":\"float\"},{\"name\":\"double_column\",\"type\":\"double\"},{\"name\":\"binary_column\",\"type\":\"bytes\"},{\"name\":\"string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"enum_column\",\"type\":{\"type\":\"enum\",\"name\":\"Suit\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}},{\"name\":\"maybe_bool_column\",\"type\":[\"null\",\"boolean\"]},{\"name\":\"maybe_int_column\",\"type\":[\"null\",\"int\"]},{\"name\":\"maybe_long_column\",\"type\":[\"null\",\"long\"]},{\"name\":\"maybe_float_column\",\"type\":[\"null\",\"float\"]},{\"name\":\"maybe_double_column\",\"type\":[\"null\",\"double\"]},{\"name\":\"maybe_binary_column\",\"type\":[\"null\",\"bytes\"]},{\"name\":\"maybe_string_column\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_enum_column\",\"type\":[\"null\",\"Suit\"]},{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"structs_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}},{\"name\":\"maybe_strings_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]},{\"name\":\"maybe_structs_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":\"Nested\"}]},{\"name\":\"maybe_string_to_int_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}]},{\"name\":\"maybe_complex_column\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"Nested\"},\"avro.java.string\":\"String\"}]}]}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } @Deprecated public boolean bool_column; @Deprecated public int int_column; @@ -26,9 +26,11 @@ public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBa @Deprecated public java.lang.String maybe_string_column; @Deprecated public org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column; @Deprecated public java.util.List strings_column; + @Deprecated public java.util.List structs_column; @Deprecated public java.util.Map string_to_int_column; @Deprecated public java.util.Map> complex_column; @Deprecated public java.util.List maybe_strings_column; + @Deprecated public java.util.List maybe_structs_column; @Deprecated public java.util.Map maybe_string_to_int_column; @Deprecated public java.util.Map> maybe_complex_column; @@ -42,7 +44,7 @@ public ParquetAvroCompat() {} /** * All-args constructor. */ - public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_column, java.lang.Long long_column, java.lang.Float float_column, java.lang.Double double_column, java.nio.ByteBuffer binary_column, java.lang.String string_column, org.apache.spark.sql.parquet.test.avro.Suit enum_column, java.lang.Boolean maybe_bool_column, java.lang.Integer maybe_int_column, java.lang.Long maybe_long_column, java.lang.Float maybe_float_column, java.lang.Double maybe_double_column, java.nio.ByteBuffer maybe_binary_column, java.lang.String maybe_string_column, org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column, java.util.List strings_column, java.util.Map string_to_int_column, java.util.Map> complex_column, java.util.List maybe_strings_column, java.util.Map maybe_string_to_int_column, java.util.Map> maybe_complex_column) { + public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_column, java.lang.Long long_column, java.lang.Float float_column, java.lang.Double double_column, java.nio.ByteBuffer binary_column, java.lang.String string_column, org.apache.spark.sql.parquet.test.avro.Suit enum_column, java.lang.Boolean maybe_bool_column, java.lang.Integer maybe_int_column, java.lang.Long maybe_long_column, java.lang.Float maybe_float_column, java.lang.Double maybe_double_column, java.nio.ByteBuffer maybe_binary_column, java.lang.String maybe_string_column, org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column, java.util.List strings_column, java.util.List structs_column, java.util.Map string_to_int_column, java.util.Map> complex_column, java.util.List maybe_strings_column, java.util.List maybe_structs_column, java.util.Map maybe_string_to_int_column, java.util.Map> maybe_complex_column) { this.bool_column = bool_column; this.int_column = int_column; this.long_column = long_column; @@ -60,9 +62,11 @@ public ParquetAvroCompat(java.lang.Boolean bool_column, java.lang.Integer int_co this.maybe_string_column = maybe_string_column; this.maybe_enum_column = maybe_enum_column; this.strings_column = strings_column; + this.structs_column = structs_column; this.string_to_int_column = string_to_int_column; this.complex_column = complex_column; this.maybe_strings_column = maybe_strings_column; + this.maybe_structs_column = maybe_structs_column; this.maybe_string_to_int_column = maybe_string_to_int_column; this.maybe_complex_column = maybe_complex_column; } @@ -88,11 +92,13 @@ public java.lang.Object get(int field$) { case 14: return maybe_string_column; case 15: return maybe_enum_column; case 16: return strings_column; - case 17: return string_to_int_column; - case 18: return complex_column; - case 19: return maybe_strings_column; - case 20: return maybe_string_to_int_column; - case 21: return maybe_complex_column; + case 17: return structs_column; + case 18: return string_to_int_column; + case 19: return complex_column; + case 20: return maybe_strings_column; + case 21: return maybe_structs_column; + case 22: return maybe_string_to_int_column; + case 23: return maybe_complex_column; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } @@ -117,11 +123,13 @@ public void put(int field$, java.lang.Object value$) { case 14: maybe_string_column = (java.lang.String)value$; break; case 15: maybe_enum_column = (org.apache.spark.sql.parquet.test.avro.Suit)value$; break; case 16: strings_column = (java.util.List)value$; break; - case 17: string_to_int_column = (java.util.Map)value$; break; - case 18: complex_column = (java.util.Map>)value$; break; - case 19: maybe_strings_column = (java.util.List)value$; break; - case 20: maybe_string_to_int_column = (java.util.Map)value$; break; - case 21: maybe_complex_column = (java.util.Map>)value$; break; + case 17: structs_column = (java.util.List)value$; break; + case 18: string_to_int_column = (java.util.Map)value$; break; + case 19: complex_column = (java.util.Map>)value$; break; + case 20: maybe_strings_column = (java.util.List)value$; break; + case 21: maybe_structs_column = (java.util.List)value$; break; + case 22: maybe_string_to_int_column = (java.util.Map)value$; break; + case 23: maybe_complex_column = (java.util.Map>)value$; break; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } @@ -381,6 +389,21 @@ public void setStringsColumn(java.util.List value) { this.strings_column = value; } + /** + * Gets the value of the 'structs_column' field. + */ + public java.util.List getStructsColumn() { + return structs_column; + } + + /** + * Sets the value of the 'structs_column' field. + * @param value the value to set. + */ + public void setStructsColumn(java.util.List value) { + this.structs_column = value; + } + /** * Gets the value of the 'string_to_int_column' field. */ @@ -426,6 +449,21 @@ public void setMaybeStringsColumn(java.util.List value) { this.maybe_strings_column = value; } + /** + * Gets the value of the 'maybe_structs_column' field. + */ + public java.util.List getMaybeStructsColumn() { + return maybe_structs_column; + } + + /** + * Sets the value of the 'maybe_structs_column' field. + * @param value the value to set. + */ + public void setMaybeStructsColumn(java.util.List value) { + this.maybe_structs_column = value; + } + /** * Gets the value of the 'maybe_string_to_int_column' field. */ @@ -494,9 +532,11 @@ public static class Builder extends org.apache.avro.specific.SpecificRecordBuild private java.lang.String maybe_string_column; private org.apache.spark.sql.parquet.test.avro.Suit maybe_enum_column; private java.util.List strings_column; + private java.util.List structs_column; private java.util.Map string_to_int_column; private java.util.Map> complex_column; private java.util.List maybe_strings_column; + private java.util.List maybe_structs_column; private java.util.Map maybe_string_to_int_column; private java.util.Map> maybe_complex_column; @@ -576,26 +616,34 @@ private Builder(org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder this.strings_column = data().deepCopy(fields()[16].schema(), other.strings_column); fieldSetFlags()[16] = true; } - if (isValidValue(fields()[17], other.string_to_int_column)) { - this.string_to_int_column = data().deepCopy(fields()[17].schema(), other.string_to_int_column); + if (isValidValue(fields()[17], other.structs_column)) { + this.structs_column = data().deepCopy(fields()[17].schema(), other.structs_column); fieldSetFlags()[17] = true; } - if (isValidValue(fields()[18], other.complex_column)) { - this.complex_column = data().deepCopy(fields()[18].schema(), other.complex_column); + if (isValidValue(fields()[18], other.string_to_int_column)) { + this.string_to_int_column = data().deepCopy(fields()[18].schema(), other.string_to_int_column); fieldSetFlags()[18] = true; } - if (isValidValue(fields()[19], other.maybe_strings_column)) { - this.maybe_strings_column = data().deepCopy(fields()[19].schema(), other.maybe_strings_column); + if (isValidValue(fields()[19], other.complex_column)) { + this.complex_column = data().deepCopy(fields()[19].schema(), other.complex_column); fieldSetFlags()[19] = true; } - if (isValidValue(fields()[20], other.maybe_string_to_int_column)) { - this.maybe_string_to_int_column = data().deepCopy(fields()[20].schema(), other.maybe_string_to_int_column); + if (isValidValue(fields()[20], other.maybe_strings_column)) { + this.maybe_strings_column = data().deepCopy(fields()[20].schema(), other.maybe_strings_column); fieldSetFlags()[20] = true; } - if (isValidValue(fields()[21], other.maybe_complex_column)) { - this.maybe_complex_column = data().deepCopy(fields()[21].schema(), other.maybe_complex_column); + if (isValidValue(fields()[21], other.maybe_structs_column)) { + this.maybe_structs_column = data().deepCopy(fields()[21].schema(), other.maybe_structs_column); fieldSetFlags()[21] = true; } + if (isValidValue(fields()[22], other.maybe_string_to_int_column)) { + this.maybe_string_to_int_column = data().deepCopy(fields()[22].schema(), other.maybe_string_to_int_column); + fieldSetFlags()[22] = true; + } + if (isValidValue(fields()[23], other.maybe_complex_column)) { + this.maybe_complex_column = data().deepCopy(fields()[23].schema(), other.maybe_complex_column); + fieldSetFlags()[23] = true; + } } /** Creates a Builder by copying an existing ParquetAvroCompat instance */ @@ -669,26 +717,34 @@ private Builder(org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat other) this.strings_column = data().deepCopy(fields()[16].schema(), other.strings_column); fieldSetFlags()[16] = true; } - if (isValidValue(fields()[17], other.string_to_int_column)) { - this.string_to_int_column = data().deepCopy(fields()[17].schema(), other.string_to_int_column); + if (isValidValue(fields()[17], other.structs_column)) { + this.structs_column = data().deepCopy(fields()[17].schema(), other.structs_column); fieldSetFlags()[17] = true; } - if (isValidValue(fields()[18], other.complex_column)) { - this.complex_column = data().deepCopy(fields()[18].schema(), other.complex_column); + if (isValidValue(fields()[18], other.string_to_int_column)) { + this.string_to_int_column = data().deepCopy(fields()[18].schema(), other.string_to_int_column); fieldSetFlags()[18] = true; } - if (isValidValue(fields()[19], other.maybe_strings_column)) { - this.maybe_strings_column = data().deepCopy(fields()[19].schema(), other.maybe_strings_column); + if (isValidValue(fields()[19], other.complex_column)) { + this.complex_column = data().deepCopy(fields()[19].schema(), other.complex_column); fieldSetFlags()[19] = true; } - if (isValidValue(fields()[20], other.maybe_string_to_int_column)) { - this.maybe_string_to_int_column = data().deepCopy(fields()[20].schema(), other.maybe_string_to_int_column); + if (isValidValue(fields()[20], other.maybe_strings_column)) { + this.maybe_strings_column = data().deepCopy(fields()[20].schema(), other.maybe_strings_column); fieldSetFlags()[20] = true; } - if (isValidValue(fields()[21], other.maybe_complex_column)) { - this.maybe_complex_column = data().deepCopy(fields()[21].schema(), other.maybe_complex_column); + if (isValidValue(fields()[21], other.maybe_structs_column)) { + this.maybe_structs_column = data().deepCopy(fields()[21].schema(), other.maybe_structs_column); fieldSetFlags()[21] = true; } + if (isValidValue(fields()[22], other.maybe_string_to_int_column)) { + this.maybe_string_to_int_column = data().deepCopy(fields()[22].schema(), other.maybe_string_to_int_column); + fieldSetFlags()[22] = true; + } + if (isValidValue(fields()[23], other.maybe_complex_column)) { + this.maybe_complex_column = data().deepCopy(fields()[23].schema(), other.maybe_complex_column); + fieldSetFlags()[23] = true; + } } /** Gets the value of the 'bool_column' field */ @@ -1111,6 +1167,31 @@ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearStr return this; } + /** Gets the value of the 'structs_column' field */ + public java.util.List getStructsColumn() { + return structs_column; + } + + /** Sets the value of the 'structs_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setStructsColumn(java.util.List value) { + validate(fields()[17], value); + this.structs_column = value; + fieldSetFlags()[17] = true; + return this; + } + + /** Checks whether the 'structs_column' field has been set */ + public boolean hasStructsColumn() { + return fieldSetFlags()[17]; + } + + /** Clears the value of the 'structs_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearStructsColumn() { + structs_column = null; + fieldSetFlags()[17] = false; + return this; + } + /** Gets the value of the 'string_to_int_column' field */ public java.util.Map getStringToIntColumn() { return string_to_int_column; @@ -1118,21 +1199,21 @@ public java.util.Map getStringToIntColumn() /** Sets the value of the 'string_to_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setStringToIntColumn(java.util.Map value) { - validate(fields()[17], value); + validate(fields()[18], value); this.string_to_int_column = value; - fieldSetFlags()[17] = true; + fieldSetFlags()[18] = true; return this; } /** Checks whether the 'string_to_int_column' field has been set */ public boolean hasStringToIntColumn() { - return fieldSetFlags()[17]; + return fieldSetFlags()[18]; } /** Clears the value of the 'string_to_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearStringToIntColumn() { string_to_int_column = null; - fieldSetFlags()[17] = false; + fieldSetFlags()[18] = false; return this; } @@ -1143,21 +1224,21 @@ public java.util.Map> value) { - validate(fields()[18], value); + validate(fields()[19], value); this.complex_column = value; - fieldSetFlags()[18] = true; + fieldSetFlags()[19] = true; return this; } /** Checks whether the 'complex_column' field has been set */ public boolean hasComplexColumn() { - return fieldSetFlags()[18]; + return fieldSetFlags()[19]; } /** Clears the value of the 'complex_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearComplexColumn() { complex_column = null; - fieldSetFlags()[18] = false; + fieldSetFlags()[19] = false; return this; } @@ -1168,21 +1249,46 @@ public java.util.List getMaybeStringsColumn() { /** Sets the value of the 'maybe_strings_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeStringsColumn(java.util.List value) { - validate(fields()[19], value); + validate(fields()[20], value); this.maybe_strings_column = value; - fieldSetFlags()[19] = true; + fieldSetFlags()[20] = true; return this; } /** Checks whether the 'maybe_strings_column' field has been set */ public boolean hasMaybeStringsColumn() { - return fieldSetFlags()[19]; + return fieldSetFlags()[20]; } /** Clears the value of the 'maybe_strings_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeStringsColumn() { maybe_strings_column = null; - fieldSetFlags()[19] = false; + fieldSetFlags()[20] = false; + return this; + } + + /** Gets the value of the 'maybe_structs_column' field */ + public java.util.List getMaybeStructsColumn() { + return maybe_structs_column; + } + + /** Sets the value of the 'maybe_structs_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeStructsColumn(java.util.List value) { + validate(fields()[21], value); + this.maybe_structs_column = value; + fieldSetFlags()[21] = true; + return this; + } + + /** Checks whether the 'maybe_structs_column' field has been set */ + public boolean hasMaybeStructsColumn() { + return fieldSetFlags()[21]; + } + + /** Clears the value of the 'maybe_structs_column' field */ + public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeStructsColumn() { + maybe_structs_column = null; + fieldSetFlags()[21] = false; return this; } @@ -1193,21 +1299,21 @@ public java.util.Map getMaybeStringToIntColu /** Sets the value of the 'maybe_string_to_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder setMaybeStringToIntColumn(java.util.Map value) { - validate(fields()[20], value); + validate(fields()[22], value); this.maybe_string_to_int_column = value; - fieldSetFlags()[20] = true; + fieldSetFlags()[22] = true; return this; } /** Checks whether the 'maybe_string_to_int_column' field has been set */ public boolean hasMaybeStringToIntColumn() { - return fieldSetFlags()[20]; + return fieldSetFlags()[22]; } /** Clears the value of the 'maybe_string_to_int_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeStringToIntColumn() { maybe_string_to_int_column = null; - fieldSetFlags()[20] = false; + fieldSetFlags()[22] = false; return this; } @@ -1218,21 +1324,21 @@ public java.util.Map> value) { - validate(fields()[21], value); + validate(fields()[23], value); this.maybe_complex_column = value; - fieldSetFlags()[21] = true; + fieldSetFlags()[23] = true; return this; } /** Checks whether the 'maybe_complex_column' field has been set */ public boolean hasMaybeComplexColumn() { - return fieldSetFlags()[21]; + return fieldSetFlags()[23]; } /** Clears the value of the 'maybe_complex_column' field */ public org.apache.spark.sql.parquet.test.avro.ParquetAvroCompat.Builder clearMaybeComplexColumn() { maybe_complex_column = null; - fieldSetFlags()[21] = false; + fieldSetFlags()[23] = false; return this; } @@ -1257,11 +1363,13 @@ public ParquetAvroCompat build() { record.maybe_string_column = fieldSetFlags()[14] ? this.maybe_string_column : (java.lang.String) defaultValue(fields()[14]); record.maybe_enum_column = fieldSetFlags()[15] ? this.maybe_enum_column : (org.apache.spark.sql.parquet.test.avro.Suit) defaultValue(fields()[15]); record.strings_column = fieldSetFlags()[16] ? this.strings_column : (java.util.List) defaultValue(fields()[16]); - record.string_to_int_column = fieldSetFlags()[17] ? this.string_to_int_column : (java.util.Map) defaultValue(fields()[17]); - record.complex_column = fieldSetFlags()[18] ? this.complex_column : (java.util.Map>) defaultValue(fields()[18]); - record.maybe_strings_column = fieldSetFlags()[19] ? this.maybe_strings_column : (java.util.List) defaultValue(fields()[19]); - record.maybe_string_to_int_column = fieldSetFlags()[20] ? this.maybe_string_to_int_column : (java.util.Map) defaultValue(fields()[20]); - record.maybe_complex_column = fieldSetFlags()[21] ? this.maybe_complex_column : (java.util.Map>) defaultValue(fields()[21]); + record.structs_column = fieldSetFlags()[17] ? this.structs_column : (java.util.List) defaultValue(fields()[17]); + record.string_to_int_column = fieldSetFlags()[18] ? this.string_to_int_column : (java.util.Map) defaultValue(fields()[18]); + record.complex_column = fieldSetFlags()[19] ? this.complex_column : (java.util.Map>) defaultValue(fields()[19]); + record.maybe_strings_column = fieldSetFlags()[20] ? this.maybe_strings_column : (java.util.List) defaultValue(fields()[20]); + record.maybe_structs_column = fieldSetFlags()[21] ? this.maybe_structs_column : (java.util.List) defaultValue(fields()[21]); + record.maybe_string_to_int_column = fieldSetFlags()[22] ? this.maybe_string_to_int_column : (java.util.Map) defaultValue(fields()[22]); + record.maybe_complex_column = fieldSetFlags()[23] ? this.maybe_complex_column : (java.util.Map>) defaultValue(fields()[23]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala index 15cf112267fc7..20fe4e49878e1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetAvroCompatibilitySuite.scala @@ -76,6 +76,7 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { nullable(Suit.values()(i % Suit.values().length).name()), Seq.tabulate(3)(n => s"arr_${i + n}"), + Seq.tabulate(3)(n => Row(Seq.tabulate(3)(j => i + j + n), s"val_${i + n}")), Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap, Seq.tabulate(3) { n => (i + n).toString -> Seq.tabulate(3) { m => @@ -84,6 +85,7 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { }.toMap, nullable(Seq.tabulate(3)(n => s"arr_${i + n}")), + nullable(Seq.tabulate(3)(n => Row(Seq.tabulate(3)(j => i + j + n), s"val_${i + n}"))), nullable(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap), nullable(Seq.tabulate(3) { n => (i + n).toString -> Seq.tabulate(3) { m => @@ -129,10 +131,24 @@ class ParquetAvroCompatibilitySuite extends ParquetCompatibilityTest { .setMaybeEnumColumn(nullable(Suit.values()(i % Suit.values().length))) .setStringsColumn(Seq.tabulate(3)(n => s"arr_${i + n}").asJava) + .setStructsColumn(Seq.tabulate(3) { n => + Nested + .newBuilder() + .setNestedIntsColumn(Seq.tabulate(3)(j => i + j + n: Integer).asJava) + .setNestedStringColumn(s"val_${i + n}") + .build() + }.asJava) .setStringToIntColumn(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap.asJava) .setComplexColumn(makeComplexColumn(i)) .setMaybeStringsColumn(nullable(Seq.tabulate(3)(n => s"arr_${i + n}").asJava)) + .setMaybeStructsColumn(nullable(Seq.tabulate(3) { n => + Nested + .newBuilder() + .setNestedIntsColumn(Seq.tabulate(3)(j => i + j + n: Integer).asJava) + .setNestedStringColumn(s"val_${i + n}") + .build() + }.asJava)) .setMaybeStringToIntColumn( nullable(Seq.tabulate(3)(n => n.toString -> (i + n: Integer)).toMap.asJava)) .setMaybeComplexColumn(nullable(makeComplexColumn(i))) From b1be54986d52dfeebe0c06cf9c709fcca72e2436 Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Sat, 18 Jul 2015 10:45:01 +0800 Subject: [PATCH 6/6] Updates .gitignore and adds an Avro generated Java source file This file wasn't committed due to the wrongly configured .gitignore --- .gitignore | 2 +- .../apache/spark/sql/parquet/test/avro/Suit.java | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/Suit.java diff --git a/.gitignore b/.gitignore index b60b707f221e2..5b1910a3faf56 100644 --- a/.gitignore +++ b/.gitignore @@ -74,4 +74,4 @@ metastore/ warehouse/ TempStatsStore/ sql/hive-thriftserver/test_warehouses -sql/core/src/test/gen-java/ +sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/thrift diff --git a/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/Suit.java b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/Suit.java new file mode 100644 index 0000000000000..6f0cf171fcdb1 --- /dev/null +++ b/sql/core/src/test/gen-java/org/apache/spark/sql/parquet/test/avro/Suit.java @@ -0,0 +1,13 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.apache.spark.sql.parquet.test.avro; +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public enum Suit { + SPADES, HEARTS, DIAMONDS, CLUBS ; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"Suit\",\"namespace\":\"org.apache.spark.sql.parquet.test.avro\",\"symbols\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } +}