diff --git a/llap-common/src/java/org/apache/hadoop/hive/llap/metrics/ReadWriteLockMetrics.java b/llap-common/src/java/org/apache/hadoop/hive/llap/metrics/ReadWriteLockMetrics.java
index 7d52a15c35d4..189c684d28db 100644
--- a/llap-common/src/java/org/apache/hadoop/hive/llap/metrics/ReadWriteLockMetrics.java
+++ b/llap-common/src/java/org/apache/hadoop/hive/llap/metrics/ReadWriteLockMetrics.java
@@ -18,8 +18,7 @@
package org.apache.hadoop.hive.llap.metrics;
-import avro.shaded.com.google.common.annotations.VisibleForTesting;
-
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import java.io.Serializable;
diff --git a/pom.xml b/pom.xml
index e061f64fc166..4f1e5cab6987 100644
--- a/pom.xml
+++ b/pom.xml
@@ -125,7 +125,7 @@
0.10.0
1.12.0
- 1.8.2
+ 1.9.1
0.8.0.RELEASE
1.19.0
4.2.4
@@ -462,7 +462,6 @@
org.apache.avro
avro-mapred
- hadoop2
${avro.version}
diff --git a/ql/pom.xml b/ql/pom.xml
index 445911316403..3bcfbc5de8f3 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -157,7 +157,6 @@
org.apache.avro
avro-mapred
- hadoop2
${avro.version}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/signature/RelTreeSignature.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/signature/RelTreeSignature.java
index 40e93f02a3a1..a4eaa1ad51db 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/signature/RelTreeSignature.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/signature/RelTreeSignature.java
@@ -18,24 +18,19 @@
package org.apache.hadoop.hive.ql.optimizer.signature;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-
+import com.fasterxml.jackson.annotation.JsonIdentityInfo;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.ObjectIdGenerators;
+import com.google.common.base.Objects;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelWriter;
-import org.apache.calcite.rel.externalize.RelWriterImpl;
import org.apache.calcite.sql.SqlExplainLevel;
-import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelJsonImpl;
-import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelOptUtil;
import org.apache.hadoop.hive.ql.optimizer.calcite.RelWriterImplCopy;
-import com.fasterxml.jackson.annotation.JsonIdentityInfo;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.ObjectIdGenerators;
-
-import avro.shaded.com.google.common.base.Objects;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.List;
/**
* Operator tree signature.
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index db8db1c9222a..8e882b41f313 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -272,7 +272,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
int scale = 0;
try {
- scale = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).asInt();
+ scale = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
} catch(Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + fileSchema, ex);
}
@@ -288,7 +288,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
int maxLength = 0;
try {
- maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for char field from file schema: " + fileSchema, ex);
}
@@ -303,7 +303,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
maxLength = 0;
try {
- maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for varchar field from file schema: " + fileSchema, ex);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
index 35d83bdb1af0..03f07bb8482d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
@@ -136,8 +136,8 @@ public static TypeInfo generateTypeInfo(Schema schema,
int precision = 0;
int scale = 0;
try {
- precision = schema.getJsonProp(AvroSerDe.AVRO_PROP_PRECISION).getIntValue();
- scale = schema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).getIntValue();
+ precision = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_PRECISION);
+ scale = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + schema, ex);
}
@@ -155,7 +155,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
AvroSerDe.CHAR_TYPE_NAME.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
int maxLength = 0;
try {
- maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
}
@@ -166,7 +166,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
int maxLength = 0;
try {
- maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
index 4f8b05f5aeac..798d4632f6f0 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hive.serde2.avro;
+import static org.apache.avro.Schema.Field.NULL_DEFAULT_VALUE;
+
import org.apache.avro.Schema;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
@@ -28,8 +30,6 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.node.JsonNodeFactory;
import java.util.ArrayList;
import java.util.Arrays;
@@ -235,14 +235,13 @@ private Schema createAvroArray(TypeInfo typeInfo) {
private List getFields(Schema.Field schemaField) {
List fields = new ArrayList();
- JsonNode nullDefault = JsonNodeFactory.instance.nullNode();
if (schemaField.schema().getType() == Schema.Type.RECORD) {
for (Schema.Field field : schemaField.schema().getFields()) {
- fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), nullDefault));
+ fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), NULL_DEFAULT_VALUE));
}
} else {
fields.add(new Schema.Field(schemaField.name(), schemaField.schema(), schemaField.doc(),
- nullDefault));
+ NULL_DEFAULT_VALUE));
}
return fields;