Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,8 @@

import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;

/**
* ExprNodeConstantEvaluator.
Expand All @@ -32,19 +30,11 @@
public class ExprNodeConstantEvaluator extends ExprNodeEvaluator {

protected ExprNodeConstantDesc expr;
transient ObjectInspector writableObjectInspector;
transient Object writableValue;
transient ConstantObjectInspector writableObjectInspector;

public ExprNodeConstantEvaluator(ExprNodeConstantDesc expr) {
this.expr = expr;
PrimitiveCategory pc = ((PrimitiveTypeInfo) expr.getTypeInfo())
.getPrimitiveCategory();
writableObjectInspector = PrimitiveObjectInspectorFactory
.getPrimitiveWritableObjectInspector(pc);
// Convert from Java to Writable
writableValue = PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(pc).getPrimitiveWritableObject(
expr.getValue());
writableObjectInspector = expr.getWritableObjectInspector();
}

@Override
Expand All @@ -54,7 +44,7 @@ public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveExcep

@Override
public Object evaluate(Object row) throws HiveException {
return writableValue;
return writableObjectInspector.getWritableConstantValue();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMap;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMapKeys;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMapValues;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNamedStruct;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
Expand Down Expand Up @@ -404,6 +405,7 @@ public final class FunctionRegistry {
registerGenericUDF("array", GenericUDFArray.class);
registerGenericUDF("map", GenericUDFMap.class);
registerGenericUDF("struct", GenericUDFStruct.class);
registerGenericUDF("named_struct", GenericUDFNamedStruct.class);
registerGenericUDF("create_union", GenericUDFUnion.class);

registerGenericUDF("case", GenericUDFCase.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@
import java.io.Serializable;

import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

Expand Down Expand Up @@ -52,6 +56,19 @@ public Object getValue() {
return value;
}

@Override
public ConstantObjectInspector getWritableObjectInspector() {
PrimitiveCategory pc = ((PrimitiveTypeInfo)getTypeInfo())
.getPrimitiveCategory();
// Convert from Java to Writable
Object writableValue = PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(pc).getPrimitiveWritableObject(
getValue());
return PrimitiveObjectInspectorFactory
.getPrimitiveWritableConstantObjectInspector(pc, writableValue);
}


@Override
public String toString() {
return "Const " + typeInfo.toString() + " " + value;
Expand Down
7 changes: 7 additions & 0 deletions ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@

import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

/**
* ExprNodeDesc.
Expand Down Expand Up @@ -64,6 +66,11 @@ public String getExprString() {
return null;
}

public ObjectInspector getWritableObjectInspector() {
return TypeInfoUtils
.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
}

@Explain(displayName = "type")
public String getTypeString() {
return typeInfo.getTypeName();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,9 +145,7 @@ public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF,
List<ExprNodeDesc> children) throws UDFArgumentException {
ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
for (int i = 0; i < childrenOIs.length; i++) {
childrenOIs[i] = TypeInfoUtils
.getStandardWritableObjectInspectorFromTypeInfo(children.get(i)
.getTypeInfo());
childrenOIs[i] = children.get(i).getWritableObjectInspector();
}

ObjectInspector oi = genericUDF.initialize(childrenOIs);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.udf.generic;

import java.util.ArrayList;
import java.util.Arrays;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;

@Description(name = "named_struct",
value = "_FUNC_(name1, val1, name2, val2, ...) - Creates a struct with the given " +
"field names and values")
public class GenericUDFNamedStruct extends GenericUDF {
Object[] ret;

@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
throws UDFArgumentException {

int numFields = arguments.length;
if (numFields % 2 == 1) {
throw new UDFArgumentLengthException(
"NAMED_STRUCT expects an even number of arguments.");
}
ret = new Object[numFields / 2];

ArrayList<String> fname = new ArrayList<String>(numFields / 2);
ArrayList<ObjectInspector> retOIs = new ArrayList<ObjectInspector>(numFields / 2);
for (int f = 0; f < numFields; f+=2) {
if (!(arguments[f] instanceof WritableConstantStringObjectInspector)) {
throw new UDFArgumentTypeException(f, "Even arguments" +
" to NAMED_STRUCT must be a constant STRING." + arguments[f].toString());
}
WritableConstantStringObjectInspector constantOI =
(WritableConstantStringObjectInspector)arguments[f];
fname.add(constantOI.getWritableConstantValue().toString());
retOIs.add(arguments[f + 1]);
}
StructObjectInspector soi =
ObjectInspectorFactory.getStandardStructObjectInspector(fname, retOIs);
return soi;
}

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
for (int i = 0; i < arguments.length / 2; i++) {
ret[i] = arguments[2 * i + 1].get();
}
return ret;
}

@Override
public String getDisplayString(String[] children) {
StringBuilder sb = new StringBuilder();
sb.append("named_struct(");
for (int i = 0; i < children.length; i++) {
if (i > 0) {
sb.append(',');
}
sb.append(children[i]);
}
sb.append(')');
return sb.toString();
}
}
9 changes: 9 additions & 0 deletions ql/src/test/queries/clientpositive/udf_named_struct.q
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
DESCRIBE FUNCTION named_struct;
DESCRIBE FUNCTION EXTENDED named_struct;

EXPLAIN
SELECT named_struct("foo", 1, "bar", 2),
named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1;

SELECT named_struct("foo", 1, "bar", 2),
named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1;
1 change: 1 addition & 0 deletions ql/src/test/results/clientpositive/show_functions.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ max
min
minute
month
named_struct
negative
ngrams
not
Expand Down
63 changes: 63 additions & 0 deletions ql/src/test/results/clientpositive/udf_named_struct.q.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
PREHOOK: query: DESCRIBE FUNCTION named_struct
PREHOOK: type: DESCFUNCTION
POSTHOOK: query: DESCRIBE FUNCTION named_struct
POSTHOOK: type: DESCFUNCTION
named_struct(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values
PREHOOK: query: DESCRIBE FUNCTION EXTENDED named_struct
PREHOOK: type: DESCFUNCTION
POSTHOOK: query: DESCRIBE FUNCTION EXTENDED named_struct
POSTHOOK: type: DESCFUNCTION
named_struct(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values
PREHOOK: query: EXPLAIN
SELECT named_struct("foo", 1, "bar", 2),
named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
SELECT named_struct("foo", 1, "bar", 2),
named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION named_struct "foo" 1 "bar" 2)) (TOK_SELEXPR (. (TOK_FUNCTION named_struct "foo" 1 "bar" 2) foo))) (TOK_LIMIT 1)))

STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 is a root stage

STAGE PLANS:
Stage: Stage-1
Map Reduce
Alias -> Map Operator Tree:
src
TableScan
alias: src
Select Operator
expressions:
expr: named_struct('foo',1,'bar',2)
type: struct<foo:int,bar:int>
expr: named_struct('foo',1,'bar',2).foo
type: int
outputColumnNames: _col0, _col1
Limit
File Output Operator
compressed: false
GlobalTableId: 0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Stage: Stage-0
Fetch Operator
limit: 1


PREHOOK: query: SELECT named_struct("foo", 1, "bar", 2),
named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: file:/var/folders/C4/C40caRNsEM4C4yVangruonVUe7Y/-Tmp-/jonchang/hive_2011-08-11_01-02-24_658_503462155153078291/-mr-10000
POSTHOOK: query: SELECT named_struct("foo", 1, "bar", 2),
named_struct("foo", 1, "bar", 2).foo FROM src LIMIT 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: file:/var/folders/C4/C40caRNsEM4C4yVangruonVUe7Y/-Tmp-/jonchang/hive_2011-08-11_01-02-24_658_503462155153078291/-mr-10000
{"foo":1,"bar":2} 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.objectinspector;

/**
* ConstantObjectInspector. This interface should be implemented by
* ObjectInspectors which represent constant values and can return them without
* an evaluation.
*/
public interface ConstantObjectInspector extends ObjectInspector {

Object getWritableConstantValue();

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,19 @@

import java.util.HashMap;

import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.Text;

/**
* PrimitiveObjectInspectorFactory is the primary way to create new
Expand Down Expand Up @@ -136,6 +145,40 @@ public static AbstractPrimitiveWritableObjectInspector getPrimitiveWritableObjec
return result;
}

/**
* Returns a PrimitiveWritableObjectInspector which implements ConstantObjectInspector
* for the PrimitiveCategory.
*
* @param primitiveCategory
* @param value
*/
public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory primitiveCategory, Object value) {
switch (primitiveCategory) {
case BOOLEAN:
return new WritableConstantBooleanObjectInspector((BooleanWritable)value);
case BYTE:
return new WritableConstantByteObjectInspector((ByteWritable)value);
case SHORT:
return new WritableConstantShortObjectInspector((ShortWritable)value);
case INT:
return new WritableConstantIntObjectInspector((IntWritable)value);
case LONG:
return new WritableConstantLongObjectInspector((LongWritable)value);
case FLOAT:
return new WritableConstantFloatObjectInspector((FloatWritable)value);
case DOUBLE:
return new WritableConstantDoubleObjectInspector((DoubleWritable)value);
case STRING:
return new WritableConstantStringObjectInspector((Text)value);
case VOID:
return new WritableConstantVoidObjectInspector();
default:
throw new RuntimeException("Internal error: Cannot find "
+ "ConstantObjectInspector for " + primitiveCategory);
}
}

/**
* Returns the PrimitiveJavaObjectInspector for the PrimitiveCategory.
*
Expand Down
Loading