This is an automated email from the ASF dual-hosted git repository.

krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new fd92b392639 HIVE-27690: Handle casting NULL literal to complex type 
(Krisztian Kasa, reviewed by Laszlo Vegh)
fd92b392639 is described below

commit fd92b3926393f0366b87cd55d5a0ad27968f18db
Author: Krisztian Kasa <[email protected]>
AuthorDate: Mon Dec 18 06:17:10 2023 +0100

    HIVE-27690: Handle casting NULL literal to complex type (Krisztian Kasa, 
reviewed by Laszlo Vegh)
---
 .../hadoop/hive/ql/parse/IdentifiersParser.g       |   2 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java      |   3 +
 .../optimizer/calcite/translator/ASTConverter.java |  82 ++++------
 .../calcite/translator/RexNodeConverter.java       |   8 +-
 .../apache/hadoop/hive/ql/parse/ParseUtils.java    |  51 +++++-
 .../ql/parse/type/ExprNodeDescExprFactory.java     | 108 ++++++-------
 .../hive/ql/parse/type/TypeCheckProcFactory.java   |  11 +-
 .../hadoop/hive/ql/plan/ExprNodeConstantDesc.java  |   3 +
 .../hive/ql/udf/generic/GenericUDFToArray.java     |  59 +++++++
 .../hive/ql/udf/generic/GenericUDFToMap.java       |  63 ++++++++
 .../hive/ql/udf/generic/GenericUDFToStruct.java    |  58 +++++++
 .../calcite/translator/TestASTConverter.java       | 178 +++++++++++----------
 .../ql/parse/type/TestExprNodeDescExprFactory.java | 175 ++++++++++++++++++++
 .../queries/clientpositive/cast_null_to_complex.q  |  13 ++
 .../queries/clientpositive/empty_result_ctas.q     |  12 ++
 .../results/clientpositive/llap/analyze_npe.q.out  |   7 +-
 .../clientpositive/llap/cast_null_to_complex.q.out | 107 +++++++++++++
 .../clientpositive/llap/empty_result_ctas.q.out    | 124 +++++++++++++-
 .../clientpositive/llap/show_functions.q.out       |   6 +
 19 files changed, 875 insertions(+), 195 deletions(-)

diff --git 
a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 31b63e66345..fc34da64389 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -327,7 +327,7 @@ castExpression
     LPAREN
           expression
           KW_AS
-          toType=primitiveType
+          toType=type
           (fmt=KW_FORMAT StringLiteral)?
     RPAREN
     // simple cast
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 1f411971a80..c0b9519944d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -486,6 +486,9 @@ public final class FunctionRegistry {
     system.registerGenericUDF("!", GenericUDFOPNot.class);
     system.registerGenericUDF("between", GenericUDFBetween.class);
     system.registerGenericUDF("in_bloom_filter", 
GenericUDFInBloomFilter.class);
+    system.registerGenericUDF("toMap", GenericUDFToMap.class);
+    system.registerGenericUDF("toArray", GenericUDFToArray.class);
+    system.registerGenericUDF("toStruct", GenericUDFToStruct.class);
 
     // Utility UDFs
     system.registerUDF("version", UDFVersion.class, false);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java
index dec4deddd7d..2c0fe365dac 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java
@@ -180,9 +180,7 @@ public class ASTConverter {
     ASTBuilder select = ASTBuilder.construct(HiveParser.TOK_SELECT, 
"TOK_SELECT");
     for (int i = 0; i < dataType.getFieldCount(); ++i) {
       RelDataTypeField fieldType = dataType.getFieldList().get(i);
-      select.add(ASTBuilder.selectExpr(
-              createNullField(fieldType.getType()),
-              fieldType.getName()));
+      select.add(ASTBuilder.selectExpr(createNullField(fieldType.getType()), 
fieldType.getName()));
     }
 
     ASTNode insert = ASTBuilder.
@@ -203,53 +201,52 @@ public class ASTConverter {
       return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
     }
 
+    ASTNode astNode = convertType(fieldType);
+    return ASTBuilder.construct(HiveParser.TOK_FUNCTION, "TOK_FUNCTION")
+        .add(astNode)
+        .add(HiveParser.TOK_NULL, "TOK_NULL")
+        .node();
+  }
+
+  static ASTNode convertType(RelDataType fieldType) {
+    if (fieldType.getSqlTypeName() == SqlTypeName.NULL) {
+      return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
+    }
+
     if (fieldType.getSqlTypeName() == SqlTypeName.ROW) {
-      ASTBuilder namedStructCallNode = 
ASTBuilder.construct(HiveParser.TOK_FUNCTION, "TOK_FUNCTION");
-      namedStructCallNode.add(HiveParser.Identifier, "named_struct");
+      ASTBuilder columnListNode = 
ASTBuilder.construct(HiveParser.TOK_TABCOLLIST, "TOK_TABCOLLIST");
       for (RelDataTypeField structFieldType : fieldType.getFieldList()) {
-        namedStructCallNode.add(HiveParser.Identifier, 
structFieldType.getName());
-        namedStructCallNode.add(createNullField(structFieldType.getType()));
+        ASTNode colNode = ASTBuilder.construct(HiveParser.TOK_TABCOL, 
"TOK_TABCOL")
+            .add(HiveParser.Identifier, structFieldType.getName())
+            .add(convertType(structFieldType.getType()))
+            .node();
+        columnListNode.add(colNode);
       }
-      return namedStructCallNode.node();
+      return ASTBuilder.construct(HiveParser.TOK_STRUCT, 
"TOK_STRUCT").add(columnListNode).node();
     }
 
     if (fieldType.getSqlTypeName() == SqlTypeName.MAP) {
-      ASTBuilder mapCallNode = ASTBuilder.construct(HiveParser.TOK_FUNCTION, 
"TOK_FUNCTION");
-      mapCallNode.add(HiveParser.Identifier, "map");
-      mapCallNode.add(createNullField(fieldType.getKeyType()));
-      mapCallNode.add(createNullField(fieldType.getValueType()));
+      ASTBuilder mapCallNode = ASTBuilder.construct(HiveParser.TOK_MAP, 
"TOK_MAP");
+      mapCallNode.add(convertType(fieldType.getKeyType()));
+      mapCallNode.add(convertType(fieldType.getValueType()));
       return mapCallNode.node();
     }
 
     if (fieldType.getSqlTypeName() == SqlTypeName.ARRAY) {
-      ASTBuilder arrayCallNode = ASTBuilder.construct(HiveParser.TOK_FUNCTION, 
"TOK_FUNCTION");
-      arrayCallNode.add(HiveParser.Identifier, "array");
-      arrayCallNode.add(createNullField(fieldType.getComponentType()));
+      ASTBuilder arrayCallNode = ASTBuilder.construct(HiveParser.TOK_LIST, 
"TOK_LIST");
+      arrayCallNode.add(convertType(fieldType.getComponentType()));
       return arrayCallNode.node();
     }
 
-    return createCastNull(fieldType);
-  }
-
-  private static ASTNode createCastNull(RelDataType fieldType) {
     HiveToken ht = TypeConverter.hiveToken(fieldType);
-    ASTNode typeNode;
-    if (ht == null) {
-      typeNode = ASTBuilder.construct(
-              HiveParser.Identifier, 
fieldType.getSqlTypeName().getName().toLowerCase()).node();
-    } else {
-      ASTBuilder typeNodeBuilder = ASTBuilder.construct(ht.type, ht.text);
-      if (ht.args != null) {
-        for (String castArg : ht.args) {
-          typeNodeBuilder.add(HiveParser.Identifier, castArg);
-        }
+    ASTBuilder astBldr = ASTBuilder.construct(ht.type, ht.text);
+    if (ht.args != null) {
+      for (String castArg : ht.args) {
+        astBldr.add(HiveParser.Identifier, castArg);
       }
-      typeNode = typeNodeBuilder.node();
     }
-    return ASTBuilder.construct(HiveParser.TOK_FUNCTION, "TOK_FUNCTION")
-            .add(typeNode)
-            .add(HiveParser.TOK_NULL, "TOK_NULL")
-            .node();
+
+    return astBldr.node();
   }
 
   private ASTNode convert() throws CalciteSemanticException {
@@ -1042,22 +1039,7 @@ public class ASTConverter {
           
Collections.singletonList(SqlFunctionConverter.buildAST(SqlStdOperatorTable.IS_NOT_DISTINCT_FROM,
 astNodeLst, call.getType())), call.getType());
       case CAST:
         assert(call.getOperands().size() == 1);
-        if (call.getType().isStruct() ||
-            SqlTypeName.MAP.equals(call.getType().getSqlTypeName()) ||
-            SqlTypeName.ARRAY.equals(call.getType().getSqlTypeName())) {
-          // cast for complex types can be ignored safely because explicit 
casting on such
-          // types are not possible, implicit casting e.g. CAST(ROW__ID as 
<...>) can be ignored
-          return call.getOperands().get(0).accept(this);
-        }
-
-        HiveToken ht = TypeConverter.hiveToken(call.getType());
-        ASTBuilder astBldr = ASTBuilder.construct(ht.type, ht.text);
-        if (ht.args != null) {
-          for (String castArg : ht.args) {
-            astBldr.add(HiveParser.Identifier, castArg);
-          }
-        }
-        astNodeLst.add(astBldr.node());
+        astNodeLst.add(convertType(call.getType()));
         astNodeLst.add(call.getOperands().get(0).accept(this));
         break;
       case EXTRACT:
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 89d6024cc06..e6c41c3b620 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -76,11 +76,14 @@ import 
org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToArray;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToMap;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToString;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToStruct;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToTimestampLocalTZ;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar;
@@ -334,7 +337,10 @@ public class RexNodeConverter {
           || (udf instanceof GenericUDFToString)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof 
GenericUDFToDate)
           || (udf instanceof GenericUDFTimestamp) || (udf instanceof 
GenericUDFToTimestampLocalTZ)
-          || (udf instanceof GenericUDFToBinary) || 
castExprUsingUDFBridge(udf)) {
+          || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)
+          || (udf instanceof GenericUDFToMap)
+          || (udf instanceof GenericUDFToArray)
+          || (udf instanceof GenericUDFToStruct)) {
         castExpr = rexBuilder.makeAbstractCast(returnType, 
childRexNodeLst.get(0));
       }
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
index a3a60f3d5c4..db959192db7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
@@ -27,6 +27,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Queue;
@@ -60,13 +61,20 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.getTypeStringFromAST;
+import static 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.unescapeIdentifier;
+
 
 /**
  * Library of utility functions used in the parse code.
@@ -204,7 +212,7 @@ public final class ParseUtils {
 
     switch(filterCondn.getType()) {
     case HiveParser.TOK_TABLE_OR_COL:
-      String tableOrCol = 
SemanticAnalyzer.unescapeIdentifier(filterCondn.getChild(0).getText()
+      String tableOrCol = unescapeIdentifier(filterCondn.getChild(0).getText()
           .toLowerCase());
       return getIndex(tabAliases, tableOrCol);
     case HiveParser.Identifier:
@@ -725,4 +733,45 @@ public final class ParseUtils {
     }
   }
 
+  public static TypeInfo getComplexTypeTypeInfo(ASTNode typeNode) throws 
SemanticException {
+    switch (typeNode.getType()) {
+      case HiveParser.TOK_LIST:
+        ListTypeInfo listTypeInfo = new ListTypeInfo();
+        listTypeInfo.setListElementTypeInfo(getComplexTypeTypeInfo((ASTNode) 
typeNode.getChild(0)));
+        return listTypeInfo;
+      case HiveParser.TOK_MAP:
+        MapTypeInfo mapTypeInfo = new MapTypeInfo();
+        String keyTypeString = getTypeStringFromAST((ASTNode) 
typeNode.getChild(0));
+        
mapTypeInfo.setMapKeyTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo(keyTypeString));
+        mapTypeInfo.setMapValueTypeInfo(getComplexTypeTypeInfo((ASTNode) 
typeNode.getChild(1)));
+        return mapTypeInfo;
+      case HiveParser.TOK_STRUCT:
+        StructTypeInfo structTypeInfo = new StructTypeInfo();
+        Map<String, TypeInfo> fields = collectStructFieldNames(typeNode);
+        structTypeInfo.setAllStructFieldNames(new 
ArrayList<>(fields.keySet()));
+        structTypeInfo.setAllStructFieldTypeInfos(new 
ArrayList<>(fields.values()));
+        return structTypeInfo;
+      default:
+        String typeString = getTypeStringFromAST(typeNode);
+        return TypeInfoFactory.getPrimitiveTypeInfo(typeString);
+    }
+  }
+
+  private static Map<String, TypeInfo> collectStructFieldNames(ASTNode 
structTypeNode) throws SemanticException {
+    ASTNode fieldListNode = (ASTNode) structTypeNode.getChild(0);
+    assert fieldListNode.getType() == HiveParser.TOK_TABCOLLIST;
+
+    Map<String, TypeInfo> result = new 
LinkedHashMap<>(fieldListNode.getChildCount());
+    for (int i = 0; i < fieldListNode.getChildCount(); i++) {
+      ASTNode child = (ASTNode) fieldListNode.getChild(i);
+
+      String attributeIdentifier = 
unescapeIdentifier(child.getChild(0).getText());
+      if (result.containsKey(attributeIdentifier)) {
+        throw new SemanticException(ErrorMsg.AMBIGUOUS_STRUCT_ATTRIBUTE, 
attributeIdentifier);
+      } else {
+        result.put(attributeIdentifier, getComplexTypeTypeInfo((ASTNode) 
child.getChild(1)));
+      }
+    }
+    return result;
+  }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/ExprNodeDescExprFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/ExprNodeDescExprFactory.java
index 40c5f6da192..b27b98f5f7e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/ExprNodeDescExprFactory.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/ExprNodeDescExprFactory.java
@@ -113,30 +113,40 @@ public class ExprNodeDescExprFactory extends 
ExprFactory<ExprNodeDesc> {
   protected ExprNodeDesc toExpr(ColumnInfo colInfo, RowResolver rowResolver, 
int offset)
       throws SemanticException {
     ObjectInspector inspector = colInfo.getObjectInspector();
-    if (inspector instanceof ConstantObjectInspector && inspector instanceof 
PrimitiveObjectInspector) {
-      return toPrimitiveConstDesc(colInfo, inspector);
-    }
-    if (inspector instanceof ConstantObjectInspector && inspector instanceof 
ListObjectInspector) {
-      ObjectInspector listElementOI = 
((ListObjectInspector)inspector).getListElementObjectInspector();
-      if (listElementOI instanceof PrimitiveObjectInspector) {
-        return toListConstDesc(colInfo, inspector, listElementOI);
+    if (inspector instanceof ConstantObjectInspector) {
+      if (inspector instanceof PrimitiveObjectInspector) {
+        return toPrimitiveConstDesc(colInfo, inspector);
       }
-    }
-    if (inspector instanceof ConstantObjectInspector && inspector instanceof 
MapObjectInspector) {
-      ObjectInspector keyOI = 
((MapObjectInspector)inspector).getMapKeyObjectInspector();
-      ObjectInspector valueOI = 
((MapObjectInspector)inspector).getMapValueObjectInspector();
-      if (keyOI instanceof PrimitiveObjectInspector && valueOI instanceof 
PrimitiveObjectInspector) {
-        return toMapConstDesc(colInfo, inspector, keyOI, valueOI);
+
+      Object inputConstantValue = ((ConstantObjectInspector) 
inspector).getWritableConstantValue();
+      if (inputConstantValue == null) {
+        return createExprNodeConstantDesc(colInfo, null);
       }
-    }
-    if (inspector instanceof ConstantObjectInspector && inspector instanceof 
StructObjectInspector) {
-      boolean allPrimitive = true;
-      List<? extends StructField> fields = 
((StructObjectInspector)inspector).getAllStructFieldRefs();
-      for (StructField field : fields) {
-        allPrimitive &= field.getFieldObjectInspector() instanceof 
PrimitiveObjectInspector;
+
+      if (inspector instanceof ListObjectInspector) {
+        ObjectInspector listElementOI = ((ListObjectInspector) 
inspector).getListElementObjectInspector();
+        if (listElementOI instanceof PrimitiveObjectInspector) {
+          PrimitiveObjectInspector poi = (PrimitiveObjectInspector) 
listElementOI;
+          return createExprNodeConstantDesc(colInfo, toListConstant((List<?>) 
inputConstantValue, poi));
+        }
       }
-      if (allPrimitive) {
-        return toStructConstDesc(colInfo, inspector, fields);
+      if (inspector instanceof MapObjectInspector) {
+        ObjectInspector keyOI = 
((MapObjectInspector)inspector).getMapKeyObjectInspector();
+        ObjectInspector valueOI = 
((MapObjectInspector)inspector).getMapValueObjectInspector();
+        if (keyOI instanceof PrimitiveObjectInspector && valueOI instanceof 
PrimitiveObjectInspector) {
+          return createExprNodeConstantDesc(colInfo, toMapConstant((Map<?, ?>) 
inputConstantValue, keyOI, valueOI));
+        }
+      }
+      if (inspector instanceof StructObjectInspector) {
+        boolean allPrimitive = true;
+        List<? extends StructField> fields = 
((StructObjectInspector)inspector).getAllStructFieldRefs();
+        for (StructField field : fields) {
+          allPrimitive &= field.getFieldObjectInspector() instanceof 
PrimitiveObjectInspector;
+        }
+        if (allPrimitive) {
+          return createExprNodeConstantDesc(colInfo, toStructConstDesc(
+              (List<?>) ((ConstantObjectInspector) 
inspector).getWritableConstantValue(), fields));
+        }
       }
     }
     // non-constant or non-primitive constants
@@ -145,6 +155,13 @@ public class ExprNodeDescExprFactory extends 
ExprFactory<ExprNodeDesc> {
     return column;
   }
 
+  private static ExprNodeConstantDesc createExprNodeConstantDesc(ColumnInfo 
colInfo, Object constantValue) {
+    ExprNodeConstantDesc constantExpr = new 
ExprNodeConstantDesc(colInfo.getType(), constantValue);
+    constantExpr.setFoldedFromCol(colInfo.getInternalName());
+    constantExpr.setFoldedFromTab(colInfo.getTabAlias());
+    return constantExpr;
+  }
+
   private static ExprNodeConstantDesc toPrimitiveConstDesc(ColumnInfo colInfo, 
ObjectInspector inspector) {
     PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
     Object constant = ((ConstantObjectInspector) 
inspector).getWritableConstantValue();
@@ -155,50 +172,33 @@ public class ExprNodeDescExprFactory extends 
ExprFactory<ExprNodeDesc> {
     return constantExpr;
   }
 
-  private static ExprNodeConstantDesc toListConstDesc(ColumnInfo colInfo, 
ObjectInspector inspector,
-                                                      ObjectInspector 
listElementOI) {
-    PrimitiveObjectInspector poi = (PrimitiveObjectInspector)listElementOI;
-    List<?> values = (List<?>)((ConstantObjectInspector) 
inspector).getWritableConstantValue();
-    List<Object> constant = new ArrayList<Object>();
-    for (Object o : values) {
+  private static List<Object> toListConstant(List<?> constantValue, 
PrimitiveObjectInspector poi) {
+    List<Object> constant = new ArrayList<>(constantValue.size());
+    for (Object o : constantValue) {
       constant.add(poi.getPrimitiveJavaObject(o));
     }
-
-    ExprNodeConstantDesc constantExpr = new 
ExprNodeConstantDesc(colInfo.getType(), constant);
-    constantExpr.setFoldedFromCol(colInfo.getInternalName());
-    constantExpr.setFoldedFromTab(colInfo.getTabAlias());
-    return constantExpr;
+    return constant;
   }
 
-  private static ExprNodeConstantDesc toMapConstDesc(ColumnInfo colInfo, 
ObjectInspector inspector,
-                                                     ObjectInspector keyOI, 
ObjectInspector valueOI) {
-    PrimitiveObjectInspector keyPoi = (PrimitiveObjectInspector)keyOI;
-    PrimitiveObjectInspector valuePoi = (PrimitiveObjectInspector)valueOI;
-    Map<?, ?> values = (Map<?, ?>)((ConstantObjectInspector) 
inspector).getWritableConstantValue();
-    Map<Object, Object> constant = new LinkedHashMap<Object, Object>();
-    for (Map.Entry<?, ?> e : values.entrySet()) {
+  private static Map<Object, Object> toMapConstant(
+      Map<?, ?> constantValue, ObjectInspector keyOI, ObjectInspector valueOI) 
{
+    PrimitiveObjectInspector keyPoi = (PrimitiveObjectInspector) keyOI;
+    PrimitiveObjectInspector valuePoi = (PrimitiveObjectInspector) valueOI;
+    Map<Object, Object> constant = new LinkedHashMap<>(constantValue.size());
+    for (Map.Entry<?, ?> e : constantValue.entrySet()) {
       constant.put(keyPoi.getPrimitiveJavaObject(e.getKey()), 
valuePoi.getPrimitiveJavaObject(e.getValue()));
     }
-
-    ExprNodeConstantDesc constantExpr = new 
ExprNodeConstantDesc(colInfo.getType(), constant);
-    constantExpr.setFoldedFromCol(colInfo.getInternalName());
-    constantExpr.setFoldedFromTab(colInfo.getTabAlias());
-    return constantExpr;
+    return constant;
   }
 
-  private static ExprNodeConstantDesc toStructConstDesc(ColumnInfo colInfo, 
ObjectInspector inspector,
-                                                        List<? extends 
StructField> fields) {
-    List<?> values = (List<?>)((ConstantObjectInspector) 
inspector).getWritableConstantValue();
-    List<Object> constant =  new ArrayList<Object>();
-    for (int i = 0; i < values.size(); i++) {
-      Object value = values.get(i);
+  private static List<Object> toStructConstDesc(List<?> constantValue, List<? 
extends StructField> fields) {
+    List<Object> constant = new ArrayList<>(constantValue.size());
+    for (int i = 0; i < constantValue.size(); i++) {
+      Object value = constantValue.get(i);
       PrimitiveObjectInspector fieldPoi = (PrimitiveObjectInspector) 
fields.get(i).getFieldObjectInspector();
       constant.add(fieldPoi.getPrimitiveJavaObject(value));
     }
-    ExprNodeConstantDesc constantExpr = new 
ExprNodeConstantDesc(colInfo.getType(), constant);
-    constantExpr.setFoldedFromCol(colInfo.getInternalName());
-    constantExpr.setFoldedFromTab(colInfo.getTabAlias());
-    return constantExpr;
+    return constant;
   }
 
   /**
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
index 9837b325230..318d0c87792 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
@@ -139,6 +139,9 @@ public class TypeCheckProcFactory<T> {
         serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
     CONVERSION_FUNCTION_TEXT_MAP.put(HiveParser.TOK_DECIMAL,
         serdeConstants.DECIMAL_TYPE_NAME);
+    CONVERSION_FUNCTION_TEXT_MAP.put(HiveParser.TOK_MAP, "toMap");
+    CONVERSION_FUNCTION_TEXT_MAP.put(HiveParser.TOK_LIST, "toArray");
+    CONVERSION_FUNCTION_TEXT_MAP.put(HiveParser.TOK_STRUCT, "toStruct");
 
     WINDOWING_TOKENS = new HashSet<Integer>();
     WINDOWING_TOKENS.add(HiveParser.KW_OVER);
@@ -1134,6 +1137,10 @@ public class TypeCheckProcFactory<T> {
           return timestampLocalTZTypeInfo;
         case HiveParser.TOK_DECIMAL:
           return ParseUtils.getDecimalTypeTypeInfo(funcNameNode);
+        case HiveParser.TOK_MAP:
+        case HiveParser.TOK_LIST:
+        case HiveParser.TOK_STRUCT:
+          return ParseUtils.getComplexTypeTypeInfo(funcNameNode);
         default:
           return null;
       }
@@ -1409,7 +1416,9 @@ public class TypeCheckProcFactory<T> {
       // Return nulls for conversion operators
       if (CONVERSION_FUNCTION_TEXT_MAP.keySet().contains(expr.getType())
           || expr.getToken().getType() == HiveParser.CharSetName
-          || expr.getToken().getType() == HiveParser.CharSetLiteral) {
+          || expr.getToken().getType() == HiveParser.CharSetLiteral
+          || expr.getType() == HiveParser.TOK_TABCOL
+          || expr.getType() == HiveParser.TOK_TABCOLLIST) {
         return null;
       }
 
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
index f5e3828e2cd..6709dc319e8 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
@@ -173,6 +173,9 @@ public class ExprNodeConstantDesc extends ExprNodeDesc 
implements Serializable {
     if (typeInfo.getCategory() == Category.PRIMITIVE) {
       return getFormatted(typeInfo, value);
     } else if (typeInfo.getCategory() == Category.STRUCT) {
+      if (getWritableObjectInspector().getWritableConstantValue() == null) {
+        return getFormatted(typeInfo, value);
+      }
       StringBuilder sb = new StringBuilder();
       sb.append("const struct(");
       List<?> items = (List<?>) 
getWritableObjectInspector().getWritableConstantValue();
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToArray.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToArray.java
new file mode 100644
index 00000000000..915188a363a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToArray.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.SettableUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+@Description(name = "toArray", value = "_FUNC_(x) - converts it's parameter to 
_FUNC_"
+    + "Currently only null literal is supported.")
+public class GenericUDFToArray extends GenericUDF implements SettableUDF {
+  private ListTypeInfo typeInfo;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    return 
TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return String.format("toArray(%s)", String.join(",", children));
+  }
+
+  @Override
+  public void setTypeInfo(TypeInfo typeInfo) throws UDFArgumentException {
+    this.typeInfo = (ListTypeInfo) typeInfo;
+  }
+
+  @Override
+  public TypeInfo getTypeInfo() {
+    return typeInfo;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToMap.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToMap.java
new file mode 100644
index 00000000000..85d534ccbbf
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToMap.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.SettableUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * GenericUDFMap.
+ *
+ */
+@Description(name = "toMap", value = "_FUNC_(x) - converts it's parameter to 
_FUNC_"
+    + "Currently only null literal is supported.")
+public class GenericUDFToMap extends GenericUDF implements SettableUDF {
+  private MapTypeInfo typeInfo;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    return 
TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return String.format("toMap(%s)", String.join(",", children));
+  }
+
+  @Override
+  public void setTypeInfo(TypeInfo typeInfo) throws UDFArgumentException {
+    this.typeInfo = (MapTypeInfo) typeInfo;
+  }
+
+  @Override
+  public TypeInfo getTypeInfo() {
+    return typeInfo;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToStruct.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToStruct.java
new file mode 100644
index 00000000000..f59d11ce773
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToStruct.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.SettableUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+@Description(name = "toStruct", value = "_FUNC_(x) - converts it's parameter 
to _FUNC_"
+    + "Currently only null literal is supported.")
+public class GenericUDFToStruct extends GenericUDF implements SettableUDF {
+  private StructTypeInfo typeInfo;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    return 
TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return String.format("toStruct(%s)", String.join(",", children));  }
+
+  @Override
+  public void setTypeInfo(TypeInfo typeInfo) throws UDFArgumentException {
+    this.typeInfo = (StructTypeInfo) typeInfo;
+  }
+
+  @Override
+  public TypeInfo getTypeInfo() {
+    return typeInfo;
+  }
+}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestASTConverter.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestASTConverter.java
index ddea68429f8..3c48447d51e 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestASTConverter.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TestASTConverter.java
@@ -36,11 +36,40 @@ import java.util.List;
 
 import static java.util.Arrays.asList;
 import static java.util.Collections.singletonList;
+import static 
org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter.convertType;
 import static 
org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter.emptyPlan;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.core.Is.is;
 
 class TestASTConverter {
+
+  @Test
+  void testConvertTypeWhenInputIsStruct() {
+    List<RelDataTypeField> fields = asList(
+        new RelDataTypeFieldImpl("a", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
+        new RelDataTypeFieldImpl("b", 1, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.CHAR, 30)),
+        new RelDataTypeFieldImpl("c", 2, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.NULL)));
+
+    RelDataType dataType = new RelRecordType(fields);
+
+    ASTNode tree = convertType(dataType);
+    assertThat(tree.dump(), is(EXPECTED_STRUCT_TREE));
+  }
+
+  private static final String EXPECTED_STRUCT_TREE = "\n" +
+      "TOK_STRUCT\n" +
+      "   TOK_TABCOLLIST\n" +
+      "      TOK_TABCOL\n" +
+      "         a\n" +
+      "         TOK_INT\n" +
+      "      TOK_TABCOL\n" +
+      "         b\n" +
+      "         TOK_CHAR\n" +
+      "            30\n" +
+      "      TOK_TABCOL\n" +
+      "         c\n" +
+      "         TOK_NULL\n";
+
   @Test
   void testEmptyPlanWhenInputSchemaIsEmpty() {
     RelRecordType dataType = new RelRecordType(Collections.emptyList());
@@ -54,9 +83,9 @@ class TestASTConverter {
   @Test
   void testEmptyPlan() {
     List<RelDataTypeField> fields = asList(
-            new RelDataTypeFieldImpl("a", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
-            new RelDataTypeFieldImpl("b", 1, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.CHAR, 30)),
-            new RelDataTypeFieldImpl("c", 2, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.NULL)));
+        new RelDataTypeFieldImpl("a", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
+        new RelDataTypeFieldImpl("b", 1, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.CHAR, 30)),
+        new RelDataTypeFieldImpl("c", 2, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.NULL)));
     RelDataType dataType = new RelRecordType(fields);
 
     ASTNode tree = emptyPlan(dataType);
@@ -65,96 +94,81 @@ class TestASTConverter {
   }
 
   private static final String EXPECTED_TREE = "\n" +
-          "TOK_QUERY\n" +
-          "   TOK_INSERT\n" +
-          "      TOK_DESTINATION\n" +
-          "         TOK_DIR\n" +
-          "            TOK_TMP_FILE\n" +
-          "      TOK_SELECT\n" +
-          "         TOK_SELEXPR\n" +
-          "            TOK_FUNCTION\n" +
-          "               TOK_INT\n" +
-          "               TOK_NULL\n" +
-          "            a\n" +
-          "         TOK_SELEXPR\n" +
-          "            TOK_FUNCTION\n" +
-          "               TOK_CHAR\n" +
-          "                  30\n" +
-          "               TOK_NULL\n" +
-          "            b\n" +
-          "         TOK_SELEXPR\n" +
-          "            TOK_NULL\n" +
-          "            c\n" +
-          "      TOK_LIMIT\n" +
-          "         0\n" +
-          "         0\n";
+      "TOK_QUERY\n" +
+      "   TOK_INSERT\n" +
+      "      TOK_DESTINATION\n" +
+      "         TOK_DIR\n" +
+      "            TOK_TMP_FILE\n" +
+      "      TOK_SELECT\n" +
+      "         TOK_SELEXPR\n" +
+      "            TOK_FUNCTION\n" +
+      "               TOK_INT\n" +
+      "               TOK_NULL\n" +
+      "            a\n" +
+      "         TOK_SELEXPR\n" +
+      "            TOK_FUNCTION\n" +
+      "               TOK_CHAR\n" +
+      "                  30\n" +
+      "               TOK_NULL\n" +
+      "            b\n" +
+      "         TOK_SELEXPR\n" +
+      "            TOK_NULL\n" +
+      "            c\n" +
+      "      TOK_LIMIT\n" +
+      "         0\n" +
+      "         0\n";
 
   @Test
-  void testEmptyPlanWithComplexTypes() {
+  void testEmptyPlanWithNestedComplexTypes() {
     List<RelDataTypeField> nestedStructFields = asList(
-            new RelDataTypeFieldImpl("nf1", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
-            new RelDataTypeFieldImpl("nf2", 1, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.CHAR, 30)));
+        new RelDataTypeFieldImpl("nf1", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
+        new RelDataTypeFieldImpl("nf2", 1, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.CHAR, 30)));
 
     List<RelDataTypeField> structFields = asList(
-            new RelDataTypeFieldImpl("f1", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
-            new RelDataTypeFieldImpl("farray", 1,
-                    new ArraySqlType(new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER), true)),
-            new RelDataTypeFieldImpl("fmap", 2, new MapSqlType(
-                    new BasicSqlType(new HiveTypeSystemImpl(), 
SqlTypeName.INTEGER),
-                    new BasicSqlType(new HiveTypeSystemImpl(), 
SqlTypeName.INTEGER), true)),
-            new RelDataTypeFieldImpl("fstruct", 3,
-                    new RelRecordType(nestedStructFields)));
+        new RelDataTypeFieldImpl("f1", 0, new BasicSqlType(new 
HiveTypeSystemImpl(), SqlTypeName.INTEGER)),
+        new RelDataTypeFieldImpl("farray", 1,
+            new ArraySqlType(new BasicSqlType(new HiveTypeSystemImpl(), 
SqlTypeName.INTEGER), true)),
+        new RelDataTypeFieldImpl("fmap", 2, new MapSqlType(
+            new BasicSqlType(new HiveTypeSystemImpl(), SqlTypeName.INTEGER),
+            new BasicSqlType(new HiveTypeSystemImpl(), SqlTypeName.INTEGER), 
true)),
+        new RelDataTypeFieldImpl("fstruct", 3,
+            new RelRecordType(nestedStructFields)));
 
     List<RelDataTypeField> fields = singletonList(new 
RelDataTypeFieldImpl("a", 0, new RelRecordType(structFields)));
     RelDataType dataType = new RelRecordType(fields);
 
-    ASTNode tree = emptyPlan(dataType);
+    ASTNode tree = convertType(dataType);
     assertThat(tree.dump(), is(EXPECTED_COMPLEX_TREE));
   }
 
   private static final String EXPECTED_COMPLEX_TREE = "\n" +
-          "TOK_QUERY\n" +
-          "   TOK_INSERT\n" +
-          "      TOK_DESTINATION\n" +
-          "         TOK_DIR\n" +
-          "            TOK_TMP_FILE\n" +
-          "      TOK_SELECT\n" +
-          "         TOK_SELEXPR\n" +
-          "            TOK_FUNCTION\n" +
-          "               named_struct\n" +
-          "               f1\n" +
-          "               TOK_FUNCTION\n" +
-          "                  TOK_INT\n" +
-          "                  TOK_NULL\n" +
-          "               farray\n" +
-          "               TOK_FUNCTION\n" +
-          "                  array\n" +
-          "                  TOK_FUNCTION\n" +
-          "                     TOK_INT\n" +
-          "                     TOK_NULL\n" +
-          "               fmap\n" +
-          "               TOK_FUNCTION\n" +
-          "                  map\n" +
-          "                  TOK_FUNCTION\n" +
-          "                     TOK_INT\n" +
-          "                     TOK_NULL\n" +
-          "                  TOK_FUNCTION\n" +
-          "                     TOK_INT\n" +
-          "                     TOK_NULL\n" +
-          "               fstruct\n" +
-          "               TOK_FUNCTION\n" +
-          "                  named_struct\n" +
-          "                  nf1\n" +
-          "                  TOK_FUNCTION\n" +
-          "                     TOK_INT\n" +
-          "                     TOK_NULL\n" +
-          "                  nf2\n" +
-          "                  TOK_FUNCTION\n" +
-          "                     TOK_CHAR\n" +
-          "                        30\n" +
-          "                     TOK_NULL\n" +
-          "            a\n" +
-          "      TOK_LIMIT\n" +
-          "         0\n" +
-          "         0\n";
+      "TOK_STRUCT\n" +
+      "   TOK_TABCOLLIST\n" +
+      "      TOK_TABCOL\n" +
+      "         a\n" +
+      "         TOK_STRUCT\n" +
+      "            TOK_TABCOLLIST\n" +
+      "               TOK_TABCOL\n" +
+      "                  f1\n" +
+      "                  TOK_INT\n" +
+      "               TOK_TABCOL\n" +
+      "                  farray\n" +
+      "                  TOK_LIST\n" +
+      "                     TOK_INT\n" +
+      "               TOK_TABCOL\n" +
+      "                  fmap\n" +
+      "                  TOK_MAP\n" +
+      "                     TOK_INT\n" +
+      "                     TOK_INT\n" +
+      "               TOK_TABCOL\n" +
+      "                  fstruct\n" +
+      "                  TOK_STRUCT\n" +
+      "                     TOK_TABCOLLIST\n" +
+      "                        TOK_TABCOL\n" +
+      "                           nf1\n" +
+      "                           TOK_INT\n" +
+      "                        TOK_TABCOL\n" +
+      "                           nf2\n" +
+      "                           TOK_CHAR\n" +
+      "                              30\n";
 }
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprNodeDescExprFactory.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprNodeDescExprFactory.java
new file mode 100644
index 00000000000..b97c2261ee0
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/type/TestExprNodeDescExprFactory.java
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse.type;
+
+import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+
+import java.util.HashMap;
+
+import static java.util.Arrays.asList;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class TestExprNodeDescExprFactory extends TestCase {
+
+  public void testToExprWhenColumnIsPrimitive() throws SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("decimal(3,2)");
+    DecimalTypeInfo typeInfo = new DecimalTypeInfo(3, 2);
+    
columnInfo.setObjectinspector(PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
+        typeInfo, new HiveDecimalWritable(HiveDecimal.create(6.4))));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("6.4"));
+  }
+
+  public void testToExprWhenColumnIsPrimitiveNullValue() throws 
SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("decimal(3,2)");
+    DecimalTypeInfo typeInfo = new DecimalTypeInfo(3, 2);
+    
columnInfo.setObjectinspector(PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
+        typeInfo, null));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("null"));
+  }
+
+  public void testToExprWhenColumnIsList() throws SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("array<decimal(3,2)>");
+    DecimalTypeInfo typeInfo = new DecimalTypeInfo(3, 2);
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantListObjectInspector(
+        
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo),
+        asList(
+            new HiveDecimalWritable(HiveDecimal.create(5d)),
+            new HiveDecimalWritable(HiveDecimal.create(0.4)),
+            null)));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("Const array<decimal(3,2)> [5, 
0.4, null]"));
+  }
+
+  public void testToExprWhenColumnIsListWithNullValue() throws 
SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("array<decimal(3,2)>");
+    DecimalTypeInfo typeInfo = new DecimalTypeInfo(3, 2);
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantListObjectInspector(
+        
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo), 
null));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("Const array<decimal(3,2)> 
null"));
+  }
+
+  public void testToExprWhenColumnIsMap() throws SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("map<int,string>");
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantMapObjectInspector(
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+            PrimitiveObjectInspector.PrimitiveCategory.INT),
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+            PrimitiveObjectInspector.PrimitiveCategory.STRING),
+        new HashMap<IntWritable, Text>() {{ put(new IntWritable(4), new 
Text("foo")); put(null, null); }}));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("Const map<int,string> 
{null=null, 4=foo}"));
+  }
+
+  public void testToExprWhenColumnIsMapWithNullValue() throws 
SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("map<int,string>");
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantMapObjectInspector(
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+            PrimitiveObjectInspector.PrimitiveCategory.INT),
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+            PrimitiveObjectInspector.PrimitiveCategory.STRING),
+        null));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("Const map<int,string> null"));
+  }
+
+  public void testToExprWhenColumnIsStruct() throws SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("struct<f1:int,f2:string>");
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantStructObjectInspector(
+        asList("f1", "f2"),
+        
asList(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.INT),
+            
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING)),
+        asList(new IntWritable(4), new Text("foo"))));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("const struct(4,'foo')"));
+  }
+
+  public void testToExprWhenColumnIsStructWithNullFields() throws 
SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("struct<f1:int,f2:string>");
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantStructObjectInspector(
+        asList("f1", "f2"),
+        
asList(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.INT),
+            
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING)),
+        asList(null, null)));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("const struct(null,null)"));
+  }
+
+  public void testToExprWhenColumnIsStructWithNullValue() throws 
SemanticException {
+    ExprNodeDescExprFactory exprFactory = new ExprNodeDescExprFactory();
+    ColumnInfo columnInfo = new ColumnInfo();
+    columnInfo.setTypeName("struct<f1:int,f2:string>");
+    
columnInfo.setObjectinspector(ObjectInspectorFactory.getStandardConstantStructObjectInspector(
+        asList("f1", "f2"),
+        
asList(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.INT),
+            
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING)),
+        null));
+
+    ExprNodeDesc exprNodeDesc = exprFactory.toExpr(columnInfo, null, 0);
+
+    assertThat(exprNodeDesc.getExprString(), is("null"));
+  }
+
+}
diff --git a/ql/src/test/queries/clientpositive/cast_null_to_complex.q 
b/ql/src/test/queries/clientpositive/cast_null_to_complex.q
new file mode 100644
index 00000000000..323e4bda0fc
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cast_null_to_complex.q
@@ -0,0 +1,13 @@
+SET hive.cli.print.header=true;
+
+explain cbo
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>);
+explain
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>);
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>);
+
+
+create table t1 as
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>);
+
+describe formatted t1;
diff --git a/ql/src/test/queries/clientpositive/empty_result_ctas.q 
b/ql/src/test/queries/clientpositive/empty_result_ctas.q
index 0a1fc91c11a..9437e0c1e6a 100644
--- a/ql/src/test/queries/clientpositive/empty_result_ctas.q
+++ b/ql/src/test/queries/clientpositive/empty_result_ctas.q
@@ -3,3 +3,15 @@ SET hive.cli.print.header=true;
 CREATE TABLE T1 (c_primitive int, c_array array<int>, c_nested 
array<struct<f1:int, f2:map<int, double>, f3:array<char(10)>>>);
 CREATE TABLE T2 AS SELECT * FROM T1 LIMIT 0;
 DESCRIBE FORMATTED t2;
+
+-- empty source table
+CREATE TABLE T3 AS SELECT * FROM T1;
+DESCRIBE FORMATTED t3;
+
+create table table1 (a string, b string);
+create table table2 (complex_column 
array<struct<`family`:struct<`code`:string>, 
`values`:array<struct<`code`:string, `description`:string, 
`categories`:array<string>>>>>);
+
+-- empty result subquery
+create table table3 as with t1 as (select * from table1), t2 as (select * from 
table2 where 1=0) select t1.*, t2.* from t1 left join t2;
+
+describe formatted table3;
diff --git a/ql/src/test/results/clientpositive/llap/analyze_npe.q.out 
b/ql/src/test/results/clientpositive/llap/analyze_npe.q.out
index df9097d44cc..c63f23c4fb4 100644
--- a/ql/src/test/results/clientpositive/llap/analyze_npe.q.out
+++ b/ql/src/test/results/clientpositive/llap/analyze_npe.q.out
@@ -114,6 +114,7 @@ STAGE PLANS:
           Filter Operator
             predicate: c1 is null (type: boolean)
             Select Operator
+              expressions: null (type: struct<name:string,age:int>)
               outputColumnNames: _col0
               ListSink
 
@@ -139,7 +140,7 @@ STAGE PLANS:
           Filter Operator
             predicate: c1 is null (type: boolean)
             Select Operator
-              expressions: null (type: void)
+              expressions: Const map<string,string> null (type: 
map<string,string>)
               outputColumnNames: _col0
               ListSink
 
@@ -165,7 +166,7 @@ STAGE PLANS:
           Filter Operator
             predicate: c1 is null (type: boolean)
             Select Operator
-              expressions: null (type: void)
+              expressions: Const array<string> null (type: array<string>)
               outputColumnNames: _col0
               ListSink
 
@@ -191,7 +192,7 @@ STAGE PLANS:
           Filter Operator
             predicate: c1 is null (type: boolean)
             Select Operator
-              expressions: null (type: void)
+              expressions: null (type: struct<name:string,age:int>)
               outputColumnNames: _col0
               ListSink
 
diff --git a/ql/src/test/results/clientpositive/llap/cast_null_to_complex.q.out 
b/ql/src/test/results/clientpositive/llap/cast_null_to_complex.q.out
new file mode 100644
index 00000000000..d6db733df7a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/cast_null_to_complex.q.out
@@ -0,0 +1,107 @@
+PREHOOK: query: explain cbo
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: explain cbo
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+Explain
+CBO PLAN:
+HiveProject(_o__c0=[null:(INTEGER, VARCHAR(2147483647) CHARACTER SET 
"UTF-16LE") MAP], _o__c1=[null:(INTEGER, VARCHAR(2147483647) CHARACTER SET 
"UTF-16LE") MAP ARRAY], _o__c2=[null:INTEGER], 
_o__c3=[null:RecordType((INTEGER, VARCHAR(2147483647) CHARACTER SET "UTF-16LE") 
MAP ARRAY f1, RecordType(DOUBLE a, VARCHAR(2147483647) CHARACTER SET "UTF-16LE" 
b) f2)])
+  HiveTableScan(table=[[_dummy_database, _dummy_table]], 
table:alias=[_dummy_table])
+
+PREHOOK: query: explain
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+Explain
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: _dummy_table
+          Row Limit Per Split: 1
+          Select Operator
+            expressions: Const map<int,string> null (type: map<int,string>), 
Const array<map<int,string>> null (type: array<map<int,string>>), null (type: 
int), null (type: 
struct<f1:array<map<int,string>>,f2:struct<a:double,b:string>>)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            ListSink
+
+PREHOOK: query: select cast(null as map<int, string>), cast(null as 
array<map<int, string>>), cast(null as int), cast(null as 
struct<f1:array<map<int, string>>, f2:struct<a:double, b:string>>)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(null as map<int, string>), cast(null as 
array<map<int, string>>), cast(null as int), cast(null as 
struct<f1:array<map<int, string>>, f2:struct<a:double, b:string>>)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+_c0    _c1     _c2     _c3
+NULL   NULL    NULL    NULL
+PREHOOK: query: create table t1 as
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>)
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
+POSTHOOK: query: create table t1 as
+select cast(null as map<int, string>), cast(null as array<map<int, string>>), 
cast(null as int), cast(null as struct<f1:array<map<int, string>>, 
f2:struct<a:double, b:string>>)
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@t1
+POSTHOOK: Lineage: t1._c0 SIMPLE []
+POSTHOOK: Lineage: t1._c1 SIMPLE []
+POSTHOOK: Lineage: t1._c2 SIMPLE []
+POSTHOOK: Lineage: t1._c3 SIMPLE []
+_c0    _c1     _c2     _c3
+PREHOOK: query: describe formatted t1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@t1
+POSTHOOK: query: describe formatted t1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@t1
+col_name       data_type       comment
+# col_name             data_type               comment             
+_c0                    map<int,string>                             
+_c1                    array<map<int,string>>                      
+_c2                    int                                         
+_c3                    
struct<f1:array<map<int,string>>,f2:struct<a:double,b:string>>                  
    
+                
+# Detailed Table Information            
+Database:              default                  
+#### A masked pattern was here ####
+Retention:             0                        
+#### A masked pattern was here ####
+Table Type:            MANAGED_TABLE            
+Table Parameters:               
+       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       bucketing_version       2                   
+       numFiles                1                   
+       numRows                 1                   
+       rawDataSize             11                  
+       totalSize               12                  
+#### A masked pattern was here ####
+                
+# Storage Information           
+SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
+InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
+OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+Storage Desc Params:            
+       serialization.format    1                   
diff --git a/ql/src/test/results/clientpositive/llap/empty_result_ctas.q.out 
b/ql/src/test/results/clientpositive/llap/empty_result_ctas.q.out
index 57cc1b7e822..27eba3c3b62 100644
--- a/ql/src/test/results/clientpositive/llap/empty_result_ctas.q.out
+++ b/ql/src/test/results/clientpositive/llap/empty_result_ctas.q.out
@@ -18,8 +18,8 @@ POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Input: default@t1
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@T2
-POSTHOOK: Lineage: t2.c_array EXPRESSION []
-POSTHOOK: Lineage: t2.c_nested EXPRESSION []
+POSTHOOK: Lineage: t2.c_array SIMPLE []
+POSTHOOK: Lineage: t2.c_nested SIMPLE []
 POSTHOOK: Lineage: t2.c_primitive SIMPLE []
 t1.c_primitive t1.c_array      t1.c_nested
 PREHOOK: query: DESCRIBE FORMATTED t2
@@ -49,6 +49,126 @@ Table Parameters:
        totalSize               0                   
 #### A masked pattern was here ####
                 
+# Storage Information           
+SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
+InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
+OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+Storage Desc Params:            
+       serialization.format    1                   
+PREHOOK: query: CREATE TABLE T3 AS SELECT * FROM T1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@t1
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T3
+POSTHOOK: query: CREATE TABLE T3 AS SELECT * FROM T1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@t1
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@T3
+POSTHOOK: Lineage: t3.c_array SIMPLE [(t1)t1.FieldSchema(name:c_array, 
type:array<int>, comment:null), ]
+POSTHOOK: Lineage: t3.c_nested SIMPLE [(t1)t1.FieldSchema(name:c_nested, 
type:array<struct<f1:int,f2:map<int,double>,f3:array<char(10)>>>, 
comment:null), ]
+POSTHOOK: Lineage: t3.c_primitive SIMPLE [(t1)t1.FieldSchema(name:c_primitive, 
type:int, comment:null), ]
+t1.c_primitive t1.c_array      t1.c_nested
+PREHOOK: query: DESCRIBE FORMATTED t3
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@t3
+POSTHOOK: query: DESCRIBE FORMATTED t3
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@t3
+col_name       data_type       comment
+# col_name             data_type               comment             
+c_primitive            int                                         
+c_array                array<int>                                  
+c_nested               
array<struct<f1:int,f2:map<int,double>,f3:array<char(10)>>>                     
    
+                
+# Detailed Table Information            
+Database:              default                  
+#### A masked pattern was here ####
+Retention:             0                        
+#### A masked pattern was here ####
+Table Type:            MANAGED_TABLE            
+Table Parameters:               
+       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       bucketing_version       2                   
+       numFiles                0                   
+       numRows                 0                   
+       rawDataSize             0                   
+       totalSize               0                   
+#### A masked pattern was here ####
+                
+# Storage Information           
+SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
+InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
+OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+Storage Desc Params:            
+       serialization.format    1                   
+PREHOOK: query: create table table1 (a string, b string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+POSTHOOK: query: create table table1 (a string, b string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table1
+PREHOOK: query: create table table2 (complex_column 
array<struct<`family`:struct<`code`:string>, 
`values`:array<struct<`code`:string, `description`:string, 
`categories`:array<string>>>>>)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
+POSTHOOK: query: create table table2 (complex_column 
array<struct<`family`:struct<`code`:string>, 
`values`:array<struct<`code`:string, `description`:string, 
`categories`:array<string>>>>>)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table2
+PREHOOK: query: create table table3 as with t1 as (select * from table1), t2 
as (select * from table2 where 1=0) select t1.*, t2.* from t1 left join t2
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@table1
+PREHOOK: Input: default@table2
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table3
+POSTHOOK: query: create table table3 as with t1 as (select * from table1), t2 
as (select * from table2 where 1=0) select t1.*, t2.* from t1 left join t2
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@table1
+POSTHOOK: Input: default@table2
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table3
+POSTHOOK: Lineage: table3.a SIMPLE [(table1)table1.FieldSchema(name:a, 
type:string, comment:null), ]
+POSTHOOK: Lineage: table3.b SIMPLE [(table1)table1.FieldSchema(name:b, 
type:string, comment:null), ]
+POSTHOOK: Lineage: table3.complex_column SIMPLE []
+t1.a   t1.b    t2.complex_column
+PREHOOK: query: describe formatted table3
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table3
+POSTHOOK: query: describe formatted table3
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table3
+col_name       data_type       comment
+# col_name             data_type               comment             
+a                      string                                      
+b                      string                                      
+complex_column         
array<struct<family:struct<code:string>,values:array<struct<code:string,description:string,categories:array<string>>>>>
                     
+                
+# Detailed Table Information            
+Database:              default                  
+#### A masked pattern was here ####
+Retention:             0                        
+#### A masked pattern was here ####
+Table Type:            MANAGED_TABLE            
+Table Parameters:               
+       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       bucketing_version       2                   
+       numFiles                0                   
+       numRows                 0                   
+       rawDataSize             0                   
+       totalSize               0                   
+#### A masked pattern was here ####
+                
 # Storage Information           
 SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
 InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
diff --git a/ql/src/test/results/clientpositive/llap/show_functions.q.out 
b/ql/src/test/results/clientpositive/llap/show_functions.q.out
index 3eab5bdb7cc..e5a24964711 100644
--- a/ql/src/test/results/clientpositive/llap/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/llap/show_functions.q.out
@@ -454,6 +454,9 @@ to_date
 to_epoch_milli
 to_unix_timestamp
 to_utc_timestamp
+toarray
+tomap
+tostruct
 translate
 trim
 trunc
@@ -1081,6 +1084,9 @@ to_date
 to_epoch_milli
 to_unix_timestamp
 to_utc_timestamp
+toarray
+tomap
+tostruct
 translate
 trim
 trunc

Reply via email to