Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@

package org.apache.hadoop.hive.llap.metrics;

import avro.shaded.com.google.common.annotations.VisibleForTesting;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;

import java.io.Serializable;
Expand Down
3 changes: 1 addition & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@
<!-- Include arrow for LlapOutputFormatService -->
<arrow.version>0.10.0</arrow.version>
<avatica.version>1.12.0</avatica.version>
<avro.version>1.8.2</avro.version>
<avro.version>1.9.1</avro.version>
<bonecp.version>0.8.0.RELEASE</bonecp.version>
<calcite.version>1.19.0</calcite.version>
<datanucleus-api-jdo.version>4.2.4</datanucleus-api-jdo.version>
Expand Down Expand Up @@ -462,7 +462,6 @@
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-mapred</artifactId>
<classifier>hadoop2</classifier>
<version>${avro.version}</version>
</dependency>
<dependency>
Expand Down
1 change: 0 additions & 1 deletion ql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,6 @@
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-mapred</artifactId>
<classifier>hadoop2</classifier>
<version>${avro.version}</version>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,24 +18,19 @@

package org.apache.hadoop.hive.ql.optimizer.signature;

import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;

import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import com.google.common.base.Objects;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelWriter;
import org.apache.calcite.rel.externalize.RelWriterImpl;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelJsonImpl;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelOptUtil;
import org.apache.hadoop.hive.ql.optimizer.calcite.RelWriterImplCopy;

import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;

import avro.shaded.com.google.common.base.Objects;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;

/**
* Operator tree signature.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco

int scale = 0;
try {
scale = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).asInt();
scale = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
} catch(Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + fileSchema, ex);
}
Expand All @@ -288,7 +288,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco

int maxLength = 0;
try {
maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for char field from file schema: " + fileSchema, ex);
}
Expand All @@ -303,7 +303,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco

maxLength = 0;
try {
maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for varchar field from file schema: " + fileSchema, ex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,8 +136,8 @@ public static TypeInfo generateTypeInfo(Schema schema,
int precision = 0;
int scale = 0;
try {
precision = schema.getJsonProp(AvroSerDe.AVRO_PROP_PRECISION).getIntValue();
scale = schema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).getIntValue();
precision = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_PRECISION);
scale = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + schema, ex);
}
Expand All @@ -155,7 +155,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
AvroSerDe.CHAR_TYPE_NAME.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
int maxLength = 0;
try {
maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
}
Expand All @@ -166,7 +166,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
int maxLength = 0;
try {
maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hive.serde2.avro;

import static org.apache.avro.Schema.Field.NULL_DEFAULT_VALUE;

import org.apache.avro.Schema;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
Expand All @@ -28,8 +30,6 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.node.JsonNodeFactory;

import java.util.ArrayList;
import java.util.Arrays;
Expand Down Expand Up @@ -235,14 +235,13 @@ private Schema createAvroArray(TypeInfo typeInfo) {
private List<Schema.Field> getFields(Schema.Field schemaField) {
List<Schema.Field> fields = new ArrayList<Schema.Field>();

JsonNode nullDefault = JsonNodeFactory.instance.nullNode();
if (schemaField.schema().getType() == Schema.Type.RECORD) {
for (Schema.Field field : schemaField.schema().getFields()) {
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), nullDefault));
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), NULL_DEFAULT_VALUE));
}
} else {
fields.add(new Schema.Field(schemaField.name(), schemaField.schema(), schemaField.doc(),
nullDefault));
NULL_DEFAULT_VALUE));
}

return fields;
Expand Down