http://git-wip-us.apache.org/repos/asf/hive/blob/9687dcc9/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out 
b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
new file mode 100644
index 0000000..81f3af3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
@@ -0,0 +1,811 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns. merge.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103:
+-- 99 000000_0
+-- 99 000001_0
+-- after merge
+-- 142 000000_0
+-- ds=2008-04-08/hr=11/key=484:
+-- 87 000000_0
+-- 87 000001_0
+-- after merge
+-- 118 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns. merge.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103:
+-- 99 000000_0
+-- 99 000001_0
+-- after merge
+-- 142 000000_0
+-- ds=2008-04-08/hr=11/key=484:
+-- 87 000000_0
+-- 87 000001_0
+-- after merge
+-- 118 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate 
a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = 
'2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to 
generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = 
'2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: 
COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { 
string key, string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition 
(ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition 
(ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part 
PARTITION(ds=2008-04-08,hr=11).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part 
PARTITION(ds=2008-04-08,hr=11).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition 
(ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition 
(ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name             data_type               comment             
+                
+key                    string                                      
+value                  string                                      
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+ds                     string                                      
+hr                     string                                      
+                
+# Detailed Partition Information                
+Partition Value:       [2008-04-08, 11]         
+Database:              default                  
+Table:                 list_bucketing_static_part       
+#### A masked pattern was here ####
+Partition Parameters:           
+       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       numFiles                6                   
+       numRows                 1000                
+       rawDataSize             9624                
+       totalSize               10898               
+#### A masked pattern was here ####
+                
+# Storage Information           
+SerDe Library:         org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe    
 
+InputFormat:           org.apache.hadoop.hive.ql.io.RCFileInputFormat   
+OutputFormat:          org.apache.hadoop.hive.ql.io.RCFileOutputFormat  
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+Stored As SubDirectories:      Yes                      
+Skewed Columns:        [key]                    
+Skewed Values:         [[484], [103]]           
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:        
{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, 
[484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}   
+Storage Desc Params:            
+       serialization.format    1                   
+PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a 
few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = 
'2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a 
few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = 
'2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+  Stage-5
+  Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: 
COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: 
org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { 
string key, string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-7
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: 
org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-5
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: 
org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition 
(ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition 
(ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part 
PARTITION(ds=2008-04-08,hr=11).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part 
PARTITION(ds=2008-04-08,hr=11).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition 
(ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition 
(ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name             data_type               comment             
+                
+key                    string                                      
+value                  string                                      
+                
+# Partition Information                 
+# col_name             data_type               comment             
+                
+ds                     string                                      
+hr                     string                                      
+                
+# Detailed Partition Information                
+Partition Value:       [2008-04-08, 11]         
+Database:              default                  
+Table:                 list_bucketing_static_part       
+#### A masked pattern was here ####
+Partition Parameters:           
+       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       numFiles                4                   
+       numRows                 1000                
+       rawDataSize             9624                
+       totalSize               10786               
+#### A masked pattern was here ####
+                
+# Storage Information           
+SerDe Library:         org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe    
 
+InputFormat:           org.apache.hadoop.hive.ql.io.RCFileInputFormat   
+OutputFormat:          org.apache.hadoop.hive.ql.io.RCFileOutputFormat  
+Compressed:            No                       
+Num Buckets:           -1                       
+Bucket Columns:        []                       
+Sort Columns:          []                       
+Stored As SubDirectories:      Yes                      
+Skewed Columns:        [key]                    
+Skewed Values:         [[484], [103]]           
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:        
{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, 
[484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}   
+Storage Desc Params:            
+       serialization.format    1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_static_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_static_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = 
'11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = 
'11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              numFiles 4
+              numRows 1000
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9624
+              serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10786
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string 
key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_static_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE 
Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), 
'2008-04-08' (type: string), '11' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE 
Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_static_part where ds = 
'2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_static_part where ds = 
'2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+484    val_484 2008-04-08      11
+484    val_484 2008-04-08      11
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' 
and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' 
and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484    val_484 2008-04-08      11
+484    val_484 2008-04-08      12
+PREHOOK: query: -- clean up
+drop table list_bucketing_static_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_static_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/9687dcc9/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out 
b/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out
deleted file mode 100644
index 5651839..0000000
--- a/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out
+++ /dev/null
@@ -1,242 +0,0 @@
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: 
string)
-            Execution mode: llap
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: 
string)
-            Execution mode: llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), 
_col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-        Reducer 3 
-            Execution mode: uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), 
KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), 
KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   2       val_2
-0      val_0   2       val_2
-0      val_0   2       val_2
-0      val_0   4       val_4
-0      val_0   4       val_4
-0      val_0   4       val_4
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   8       val_8
-0      val_0   8       val_8
-0      val_0   8       val_8
-0      val_0   9       val_9
-0      val_0   9       val_9
-0      val_0   9       val_9
-2      val_2   0       val_0
-2      val_2   0       val_0
-2      val_2   0       val_0
-2      val_2   2       val_2
-2      val_2   4       val_4
-2      val_2   5       val_5
-2      val_2   5       val_5
-2      val_2   5       val_5
-2      val_2   8       val_8
-2      val_2   9       val_9
-4      val_4   0       val_0
-4      val_4   0       val_0
-4      val_4   0       val_0
-4      val_4   2       val_2
-4      val_4   4       val_4
-4      val_4   5       val_5
-4      val_4   5       val_5
-4      val_4   5       val_5
-4      val_4   8       val_8
-4      val_4   9       val_9
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   2       val_2
-5      val_5   2       val_2
-5      val_5   2       val_2
-5      val_5   4       val_4
-5      val_5   4       val_4
-5      val_5   4       val_4
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   8       val_8
-5      val_5   8       val_8
-5      val_5   8       val_8
-5      val_5   9       val_9
-5      val_5   9       val_9
-5      val_5   9       val_9
-8      val_8   0       val_0
-8      val_8   0       val_0
-8      val_8   0       val_0
-8      val_8   2       val_2
-8      val_8   4       val_4
-8      val_8   5       val_5
-8      val_8   5       val_5
-8      val_8   5       val_5
-8      val_8   8       val_8
-8      val_8   9       val_9
-9      val_9   0       val_0
-9      val_9   0       val_0
-9      val_9   0       val_0
-9      val_9   2       val_2
-9      val_9   4       val_4
-9      val_9   5       val_5
-9      val_9   5       val_5
-9      val_9   5       val_5
-9      val_9   8       val_8
-9      val_9   9       val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/9687dcc9/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out 
b/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out
deleted file mode 100644
index 5651839..0000000
--- a/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out
+++ /dev/null
@@ -1,242 +0,0 @@
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: 
string)
-            Execution mode: llap
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: 
string)
-            Execution mode: llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), 
_col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-        Reducer 3 
-            Execution mode: uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), 
KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), 
KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   0       val_0
-0      val_0   2       val_2
-0      val_0   2       val_2
-0      val_0   2       val_2
-0      val_0   4       val_4
-0      val_0   4       val_4
-0      val_0   4       val_4
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   5       val_5
-0      val_0   8       val_8
-0      val_0   8       val_8
-0      val_0   8       val_8
-0      val_0   9       val_9
-0      val_0   9       val_9
-0      val_0   9       val_9
-2      val_2   0       val_0
-2      val_2   0       val_0
-2      val_2   0       val_0
-2      val_2   2       val_2
-2      val_2   4       val_4
-2      val_2   5       val_5
-2      val_2   5       val_5
-2      val_2   5       val_5
-2      val_2   8       val_8
-2      val_2   9       val_9
-4      val_4   0       val_0
-4      val_4   0       val_0
-4      val_4   0       val_0
-4      val_4   2       val_2
-4      val_4   4       val_4
-4      val_4   5       val_5
-4      val_4   5       val_5
-4      val_4   5       val_5
-4      val_4   8       val_8
-4      val_4   9       val_9
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   0       val_0
-5      val_5   2       val_2
-5      val_5   2       val_2
-5      val_5   2       val_2
-5      val_5   4       val_4
-5      val_5   4       val_4
-5      val_5   4       val_4
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   5       val_5
-5      val_5   8       val_8
-5      val_5   8       val_8
-5      val_5   8       val_8
-5      val_5   9       val_9
-5      val_5   9       val_9
-5      val_5   9       val_9
-8      val_8   0       val_0
-8      val_8   0       val_0
-8      val_8   0       val_0
-8      val_8   2       val_2
-8      val_8   4       val_4
-8      val_8   5       val_5
-8      val_8   5       val_5
-8      val_8   5       val_5
-8      val_8   8       val_8
-8      val_8   9       val_9
-9      val_9   0       val_0
-9      val_9   0       val_0
-9      val_9   0       val_0
-9      val_9   2       val_2
-9      val_9   4       val_4
-9      val_9   5       val_5
-9      val_9   5       val_5
-9      val_9   5       val_5
-9      val_9   8       val_8
-9      val_9   9       val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/9687dcc9/ql/src/test/results/clientpositive/llap/join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join0.q.out 
b/ql/src/test/results/clientpositive/llap/join0.q.out
new file mode 100644
index 0000000..f177afc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/join0.q.out
@@ -0,0 +1,243 @@
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: 
string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: 
string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string), 
_col2 (type: string), _col3 (type: string)
+                  sort order: ++++
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), 
KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), 
KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
+                  table:
+                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 
2' is a cross product
+PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   0       val_0
+0      val_0   2       val_2
+0      val_0   2       val_2
+0      val_0   2       val_2
+0      val_0   4       val_4
+0      val_0   4       val_4
+0      val_0   4       val_4
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   5       val_5
+0      val_0   8       val_8
+0      val_0   8       val_8
+0      val_0   8       val_8
+0      val_0   9       val_9
+0      val_0   9       val_9
+0      val_0   9       val_9
+2      val_2   0       val_0
+2      val_2   0       val_0
+2      val_2   0       val_0
+2      val_2   2       val_2
+2      val_2   4       val_4
+2      val_2   5       val_5
+2      val_2   5       val_5
+2      val_2   5       val_5
+2      val_2   8       val_8
+2      val_2   9       val_9
+4      val_4   0       val_0
+4      val_4   0       val_0
+4      val_4   0       val_0
+4      val_4   2       val_2
+4      val_4   4       val_4
+4      val_4   5       val_5
+4      val_4   5       val_5
+4      val_4   5       val_5
+4      val_4   8       val_8
+4      val_4   9       val_9
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   0       val_0
+5      val_5   2       val_2
+5      val_5   2       val_2
+5      val_5   2       val_2
+5      val_5   4       val_4
+5      val_5   4       val_4
+5      val_5   4       val_4
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   5       val_5
+5      val_5   8       val_8
+5      val_5   8       val_8
+5      val_5   8       val_8
+5      val_5   9       val_9
+5      val_5   9       val_9
+5      val_5   9       val_9
+8      val_8   0       val_0
+8      val_8   0       val_0
+8      val_8   0       val_0
+8      val_8   2       val_2
+8      val_8   4       val_4
+8      val_8   5       val_5
+8      val_8   5       val_5
+8      val_8   5       val_5
+8      val_8   8       val_8
+8      val_8   9       val_9
+9      val_9   0       val_0
+9      val_9   0       val_0
+9      val_9   0       val_0
+9      val_9   2       val_2
+9      val_9   4       val_4
+9      val_9   5       val_5
+9      val_9   5       val_5
+9      val_9   5       val_5
+9      val_9   8       val_8
+9      val_9   9       val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/9687dcc9/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out 
b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out
deleted file mode 100644
index 22b5d93..0000000
--- a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out
+++ /dev/null
@@ -1,217 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE 
INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE 
INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, 
type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, 
type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, 
type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, 
type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, 
type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, 
type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, 
type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, 
type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, 
type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, 
type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, 
type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: 
COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: 
COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic 
stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic 
stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: 
struct<count:bigint,sum:double,input:int>), _col2 (type: 
struct<count:bigint,sum:double,input:double>), _col3 (type: 
struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized, llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), 
avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: 
double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized, uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 
(type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: 
COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: 
COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: 
COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536  50.0    50.0    50
-65537  50.0    50.0    50
-65538  50.0    50.0    50
-65539  50.0    50.0    50
-65540  50.0    50.0    50
-65541  50.0    50.0    50
-65542  50.0    50.0    50
-65543  50.0    50.0    50
-65544  50.0    50.0    50
-65545  50.0    50.0    50

http://git-wip-us.apache.org/repos/asf/hive/blob/9687dcc9/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out 
b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out
deleted file mode 100644
index 22b5d93..0000000
--- a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out
+++ /dev/null
@@ -1,217 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE 
INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE 
INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, 
type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, 
type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, 
type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, 
type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, 
type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, 
type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, 
type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, 
type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, 
type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, 
type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, 
type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: 
COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: 
COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic 
stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic 
stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: 
struct<count:bigint,sum:double,input:int>), _col2 (type: 
struct<count:bigint,sum:double,input:double>), _col3 (type: 
struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized, llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), 
avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: 
double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized, uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 
(type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: 
COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: 
COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: 
COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536  50.0    50.0    50
-65537  50.0    50.0    50
-65538  50.0    50.0    50
-65539  50.0    50.0    50
-65540  50.0    50.0    50
-65541  50.0    50.0    50
-65542  50.0    50.0    50
-65543  50.0    50.0    50
-65544  50.0    50.0    50
-65545  50.0    50.0    50

Reply via email to